diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-28 14:29:10 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-28 14:29:10 +0000 |
commit | 2aa4a82499d4becd2284cdb482213d541b8804dd (patch) | |
tree | b80bf8bf13c3766139fbacc530efd0dd9d54394c /taskcluster/taskgraph/transforms | |
parent | Initial commit. (diff) | |
download | firefox-2aa4a82499d4becd2284cdb482213d541b8804dd.tar.xz firefox-2aa4a82499d4becd2284cdb482213d541b8804dd.zip |
Adding upstream version 86.0.1.upstream/86.0.1upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'taskcluster/taskgraph/transforms')
107 files changed, 17266 insertions, 0 deletions
diff --git a/taskcluster/taskgraph/transforms/__init__.py b/taskcluster/taskgraph/transforms/__init__.py new file mode 100644 index 0000000000..e69de29bb2 --- /dev/null +++ b/taskcluster/taskgraph/transforms/__init__.py diff --git a/taskcluster/taskgraph/transforms/balrog_submit.py b/taskcluster/taskgraph/transforms/balrog_submit.py new file mode 100644 index 0000000000..860d874ac4 --- /dev/null +++ b/taskcluster/taskgraph/transforms/balrog_submit.py @@ -0,0 +1,132 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the per-locale balrog task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from six import text_type +from taskgraph.loader.single_dep import schema +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.attributes import copy_attributes_from_dependent_job +from taskgraph.util.schema import ( + optionally_keyed_by, + resolve_keyed_by, +) +from taskgraph.util.treeherder import replace_group +from taskgraph.transforms.task import task_description_schema +from voluptuous import Optional + + +balrog_description_schema = schema.extend( + { + # unique label to describe this balrog task, defaults to balrog-{dep.label} + Optional("label"): text_type, + Optional( + "update-no-wnp", + description="Whether the parallel `-No-WNP` blob should be updated as well.", + ): optionally_keyed_by("release-type", bool), + # treeherder is allowed here to override any defaults we use for beetmover. See + # taskcluster/taskgraph/transforms/task.py for the schema details, and the + # below transforms for defaults of various values. + Optional("treeherder"): task_description_schema["treeherder"], + Optional("attributes"): task_description_schema["attributes"], + # Shipping product / phase + Optional("shipping-product"): task_description_schema["shipping-product"], + Optional("shipping-phase"): task_description_schema["shipping-phase"], + } +) + + +transforms = TransformSequence() +transforms.add_validate(balrog_description_schema) + + +@transforms.add +def handle_keyed_by(config, jobs): + """Resolve fields that can be keyed by platform, etc.""" + fields = [ + "update-no-wnp", + ] + for job in jobs: + label = job.get("dependent-task", object).__dict__.get("label", "?no-label?") + for field in fields: + resolve_keyed_by( + item=job, + field=field, + item_name=label, + **{ + "project": config.params["project"], + "release-type": config.params["release_type"], + } + ) + yield job + + +@transforms.add +def make_task_description(config, jobs): + for job in jobs: + dep_job = job["primary-dependency"] + + treeherder = job.get("treeherder", {}) + treeherder.setdefault("symbol", "c-Up(N)") + dep_th_platform = ( + dep_job.task.get("extra", {}) + .get("treeherder", {}) + .get("machine", {}) + .get("platform", "") + ) + treeherder.setdefault("platform", "{}/opt".format(dep_th_platform)) + treeherder.setdefault( + "tier", dep_job.task.get("extra", {}).get("treeherder", {}).get("tier", 1) + ) + treeherder.setdefault("kind", "build") + + attributes = copy_attributes_from_dependent_job(dep_job) + + treeherder_job_symbol = dep_job.task["extra"]["treeherder"]["symbol"] + treeherder["symbol"] = replace_group(treeherder_job_symbol, "c-Up") + + if dep_job.attributes.get("locale"): + attributes["locale"] = dep_job.attributes.get("locale") + + label = job["label"] + + description = ( + "Balrog submission for locale '{locale}' for build '" + "{build_platform}/{build_type}'".format( + locale=attributes.get("locale", "en-US"), + build_platform=attributes.get("build_platform"), + build_type=attributes.get("build_type"), + ) + ) + + upstream_artifacts = [ + { + "taskId": {"task-reference": "<beetmover>"}, + "taskType": "beetmover", + "paths": ["public/manifest.json"], + } + ] + + task = { + "label": label, + "description": description, + "worker-type": "balrog", + "worker": { + "implementation": "balrog", + "upstream-artifacts": upstream_artifacts, + "balrog-action": "v2-submit-locale", + "suffixes": ["", "-No-WNP"] if job.get("update-no-wnp") else [""], + }, + "dependencies": {"beetmover": dep_job.label}, + "attributes": attributes, + "run-on-projects": dep_job.attributes.get("run_on_projects"), + "treeherder": treeherder, + "shipping-phase": job.get("shipping-phase", "promote"), + "shipping-product": job.get("shipping-product"), + } + + yield task diff --git a/taskcluster/taskgraph/transforms/balrog_toplevel.py b/taskcluster/taskgraph/transforms/balrog_toplevel.py new file mode 100644 index 0000000000..a0795641c3 --- /dev/null +++ b/taskcluster/taskgraph/transforms/balrog_toplevel.py @@ -0,0 +1,44 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the beetmover task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.scriptworker import get_release_config +from taskgraph.util.yaml import load_yaml + +from mozrelease.balrog import generate_update_properties +from mozilla_version.gecko import GeckoVersion + +transforms = TransformSequence() + + +@transforms.add +def generate_update_line(config, jobs): + """Resolve fields that can be keyed by platform, etc.""" + release_config = get_release_config(config) + for job in jobs: + config_file = job.pop("whats-new-config") + update_config = load_yaml(config_file) + + product = job["shipping-product"] + if product == "devedition": + product = "firefox" + job["worker"]["update-line"] = {} + for blob_type, suffix in [("wnp", ""), ("no-wnp", "-No-WNP")]: + context = { + "release-type": config.params["release_type"], + "product": product, + "version": GeckoVersion.parse(release_config["appVersion"]), + "blob-type": blob_type, + "build-id": config.params["moz_build_date"], + } + job["worker"]["update-line"][suffix] = generate_update_properties( + context, update_config + ) + + yield job diff --git a/taskcluster/taskgraph/transforms/base.py b/taskcluster/taskgraph/transforms/base.py new file mode 100644 index 0000000000..fee4bda774 --- /dev/null +++ b/taskcluster/taskgraph/transforms/base.py @@ -0,0 +1,93 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import, print_function, unicode_literals + +import attr +from six import text_type + +from ..config import GraphConfig +from ..parameters import Parameters +from ..util.schema import Schema, validate_schema + + +@attr.s(frozen=True) +class TransformConfig(object): + """ + A container for configuration affecting transforms. The `config` argument + to transforms is an instance of this class. + """ + + # the name of the current kind + kind = attr.ib() + + # the path to the kind configuration directory + path = attr.ib(type=text_type) + + # the parsed contents of kind.yml + config = attr.ib(type=dict) + + # the parameters for this task-graph generation run + params = attr.ib(type=Parameters) + + # a list of all the tasks associated with the kind dependencies of the + # current kind + kind_dependencies_tasks = attr.ib() + + # Global configuration of the taskgraph + graph_config = attr.ib(type=GraphConfig) + + # whether to write out artifacts for the decision task + write_artifacts = attr.ib(type=bool) + + +@attr.s() +class TransformSequence(object): + """ + Container for a sequence of transforms. Each transform is represented as a + callable taking (config, items) and returning a generator which will yield + transformed items. The resulting sequence has the same interface. + + This is convenient to use in a file full of transforms, as it provides a + decorator, @transforms.add, that will add the decorated function to the + sequence. + """ + + _transforms = attr.ib(factory=list) + + def __call__(self, config, items): + for xform in self._transforms: + items = xform(config, items) + if items is None: + raise Exception("Transform {} is not a generator".format(xform)) + return items + + def add(self, func): + self._transforms.append(func) + return func + + def add_validate(self, schema): + self.add(ValidateSchema(schema)) + + +@attr.s +class ValidateSchema(object): + schema = attr.ib(type=Schema) + + def __call__(self, config, tasks): + for task in tasks: + if "name" in task: + error = "In {kind} kind task {name!r}:".format( + kind=config.kind, name=task["name"] + ) + elif "label" in task: + error = "In job {label!r}:".format(label=task["label"]) + elif "primary-dependency" in task: + error = "In {kind} kind task for {dependency!r}:".format( + kind=config.kind, dependency=task["primary-dependency"].label + ) + else: + error = "In unknown task:" + validate_schema(self.schema, task, error) + yield task diff --git a/taskcluster/taskgraph/transforms/beetmover.py b/taskcluster/taskgraph/transforms/beetmover.py new file mode 100644 index 0000000000..4aef6b5078 --- /dev/null +++ b/taskcluster/taskgraph/transforms/beetmover.py @@ -0,0 +1,169 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the beetmover task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from voluptuous import Optional, Required + +from six import text_type +from taskgraph.loader.single_dep import schema +from taskgraph.transforms.base import TransformSequence +from taskgraph.transforms.task import task_description_schema +from taskgraph.util.attributes import copy_attributes_from_dependent_job +from taskgraph.util.scriptworker import ( + generate_beetmover_artifact_map, + generate_beetmover_upstream_artifacts, + get_beetmover_bucket_scope, + get_beetmover_action_scope, +) +from taskgraph.util.treeherder import replace_group + + +transforms = TransformSequence() + +beetmover_description_schema = schema.extend( + { + # unique label to describe this beetmover task, defaults to {dep.label}-beetmover + Optional("label"): text_type, + # treeherder is allowed here to override any defaults we use for beetmover. See + # taskcluster/taskgraph/transforms/task.py for the schema details, and the + # below transforms for defaults of various values. + Optional("treeherder"): task_description_schema["treeherder"], + # locale is passed only for l10n beetmoving + Optional("locale"): text_type, + Required("shipping-phase"): task_description_schema["shipping-phase"], + Optional("shipping-product"): task_description_schema["shipping-product"], + Optional("attributes"): task_description_schema["attributes"], + } +) + + +transforms.add_validate(beetmover_description_schema) + + +@transforms.add +def make_task_description(config, jobs): + for job in jobs: + dep_job = job["primary-dependency"] + attributes = dep_job.attributes + + treeherder = job.get("treeherder", {}) + treeherder.setdefault( + "symbol", replace_group(dep_job.task["extra"]["treeherder"]["symbol"], "BM") + ) + dep_th_platform = ( + dep_job.task.get("extra", {}) + .get("treeherder", {}) + .get("machine", {}) + .get("platform", "") + ) + treeherder.setdefault("platform", "{}/opt".format(dep_th_platform)) + treeherder.setdefault( + "tier", dep_job.task.get("extra", {}).get("treeherder", {}).get("tier", 1) + ) + treeherder.setdefault("kind", "build") + label = job["label"] + description = ( + "Beetmover submission for locale '{locale}' for build '" + "{build_platform}/{build_type}'".format( + locale=attributes.get("locale", "en-US"), + build_platform=attributes.get("build_platform"), + build_type=attributes.get("build_type"), + ) + ) + + dependencies = {dep_job.kind: dep_job.label} + + # XXX release snap-repackage has a variable number of dependencies, depending on how many + # "post-beetmover-dummy" jobs there are in the graph. + if dep_job.kind != "release-snap-repackage" and len(dep_job.dependencies) > 1: + raise NotImplementedError( + "Can't beetmove a signing task with multiple dependencies" + ) + signing_dependencies = dep_job.dependencies + dependencies.update(signing_dependencies) + + attributes = copy_attributes_from_dependent_job(dep_job) + attributes.update(job.get("attributes", {})) + + if job.get("locale"): + attributes["locale"] = job["locale"] + + bucket_scope = get_beetmover_bucket_scope(config) + action_scope = get_beetmover_action_scope(config) + + task = { + "label": label, + "description": description, + "worker-type": "beetmover", + "scopes": [bucket_scope, action_scope], + "dependencies": dependencies, + "attributes": attributes, + "run-on-projects": dep_job.attributes.get("run_on_projects"), + "treeherder": treeherder, + "shipping-phase": job["shipping-phase"], + } + + yield task + + +def craft_release_properties(config, job): + params = config.params + build_platform = job["attributes"]["build_platform"] + build_platform = build_platform.replace("-shippable", "") + if build_platform.endswith("-source"): + build_platform = build_platform.replace("-source", "-release") + + # XXX This should be explicitly set via build attributes or something + if "android" in job["label"] or "fennec" in job["label"]: + app_name = "Fennec" + elif config.graph_config["trust-domain"] == "comm": + app_name = "Thunderbird" + else: + # XXX Even DevEdition is called Firefox + app_name = "Firefox" + + return { + "app-name": app_name, + "app-version": params["app_version"], + "branch": params["project"], + "build-id": params["moz_build_date"], + "hash-type": "sha512", + "platform": build_platform, + } + + +@transforms.add +def make_task_worker(config, jobs): + for job in jobs: + valid_beetmover_job = len(job["dependencies"]) == 2 and any( + ["signing" in j for j in job["dependencies"]] + ) + # XXX release snap-repackage has a variable number of dependencies, depending on how many + # "post-beetmover-dummy" jobs there are in the graph. + if "-snap-" not in job["label"] and not valid_beetmover_job: + raise NotImplementedError("Beetmover must have two dependencies.") + + locale = job["attributes"].get("locale") + platform = job["attributes"]["build_platform"] + + worker = { + "implementation": "beetmover", + "release-properties": craft_release_properties(config, job), + "upstream-artifacts": generate_beetmover_upstream_artifacts( + config, job, platform, locale + ), + "artifact-map": generate_beetmover_artifact_map( + config, job, platform=platform, locale=locale + ), + } + + if locale: + worker["locale"] = locale + job["worker"] = worker + + yield job diff --git a/taskcluster/taskgraph/transforms/beetmover_checksums.py b/taskcluster/taskgraph/transforms/beetmover_checksums.py new file mode 100644 index 0000000000..2dfa0cf079 --- /dev/null +++ b/taskcluster/taskgraph/transforms/beetmover_checksums.py @@ -0,0 +1,136 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the checksums signing task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + + +from six import text_type +from taskgraph.loader.single_dep import schema +from taskgraph.transforms.base import TransformSequence +from taskgraph.transforms.beetmover import craft_release_properties +from taskgraph.util.attributes import copy_attributes_from_dependent_job +from taskgraph.util.scriptworker import ( + generate_beetmover_artifact_map, + generate_beetmover_upstream_artifacts, + get_beetmover_action_scope, + get_beetmover_bucket_scope, +) +from voluptuous import Optional, Required +from taskgraph.util.treeherder import replace_group +from taskgraph.transforms.task import task_description_schema + +beetmover_checksums_description_schema = schema.extend( + { + Required("attributes"): {text_type: object}, + Optional("label"): text_type, + Optional("treeherder"): task_description_schema["treeherder"], + Optional("locale"): text_type, + Optional("shipping-phase"): task_description_schema["shipping-phase"], + Optional("shipping-product"): task_description_schema["shipping-product"], + } +) + +transforms = TransformSequence() +transforms.add_validate(beetmover_checksums_description_schema) + + +@transforms.add +def make_beetmover_checksums_description(config, jobs): + for job in jobs: + dep_job = job["primary-dependency"] + attributes = dep_job.attributes + + treeherder = job.get("treeherder", {}) + treeherder.setdefault( + "symbol", + replace_group(dep_job.task["extra"]["treeherder"]["symbol"], "BMcs"), + ) + dep_th_platform = ( + dep_job.task.get("extra", {}) + .get("treeherder", {}) + .get("machine", {}) + .get("platform", "") + ) + treeherder.setdefault("platform", "{}/opt".format(dep_th_platform)) + treeherder.setdefault( + "tier", dep_job.task.get("extra", {}).get("treeherder", {}).get("tier", 1) + ) + treeherder.setdefault("kind", "build") + + label = job["label"] + build_platform = attributes.get("build_platform") + + description = ( + "Beetmover submission of checksums for locale '{locale}' for build '" + "{build_platform}/{build_type}'".format( + locale=attributes.get("locale", "en-US"), + build_platform=build_platform, + build_type=attributes.get("build_type"), + ) + ) + + extra = {} + if "devedition" in build_platform: + extra["product"] = "devedition" + else: + extra["product"] = "firefox" + + dependencies = {dep_job.kind: dep_job.label} + + attributes = copy_attributes_from_dependent_job(dep_job) + attributes.update(job.get("attributes", {})) + + if dep_job.attributes.get("locale"): + treeherder["symbol"] = "BMcs({})".format(dep_job.attributes.get("locale")) + attributes["locale"] = dep_job.attributes.get("locale") + + bucket_scope = get_beetmover_bucket_scope(config) + action_scope = get_beetmover_action_scope(config) + + task = { + "label": label, + "description": description, + "worker-type": "beetmover", + "scopes": [bucket_scope, action_scope], + "dependencies": dependencies, + "attributes": attributes, + "run-on-projects": dep_job.attributes.get("run_on_projects"), + "treeherder": treeherder, + "extra": extra, + } + + if "shipping-phase" in job: + task["shipping-phase"] = job["shipping-phase"] + + if "shipping-product" in job: + task["shipping-product"] = job["shipping-product"] + + yield task + + +@transforms.add +def make_beetmover_checksums_worker(config, jobs): + for job in jobs: + locale = job["attributes"].get("locale") + platform = job["attributes"]["build_platform"] + + worker = { + "implementation": "beetmover", + "release-properties": craft_release_properties(config, job), + "upstream-artifacts": generate_beetmover_upstream_artifacts( + config, job, platform, locale + ), + "artifact-map": generate_beetmover_artifact_map( + config, job, platform=platform, locale=locale + ), + } + + if locale: + worker["locale"] = locale + job["worker"] = worker + + yield job diff --git a/taskcluster/taskgraph/transforms/beetmover_emefree_checksums.py b/taskcluster/taskgraph/transforms/beetmover_emefree_checksums.py new file mode 100644 index 0000000000..02d4b760ab --- /dev/null +++ b/taskcluster/taskgraph/transforms/beetmover_emefree_checksums.py @@ -0,0 +1,141 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform release-beetmover-source-checksums into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from six import text_type +from taskgraph.loader.single_dep import schema +from taskgraph.transforms.base import TransformSequence +from taskgraph.transforms.beetmover import craft_release_properties +from taskgraph.util.attributes import copy_attributes_from_dependent_job +from taskgraph.transforms.task import task_description_schema +from voluptuous import Optional + +beetmover_checksums_description_schema = schema.extend( + { + Optional("label"): text_type, + Optional("extra"): object, + Optional("shipping-phase"): task_description_schema["shipping-phase"], + Optional("shipping-product"): task_description_schema["shipping-product"], + } +) + + +transforms = TransformSequence() +transforms.add_validate(beetmover_checksums_description_schema) + + +@transforms.add +def make_beetmover_checksums_description(config, jobs): + for job in jobs: + dep_job = job["primary-dependency"] + attributes = dep_job.attributes + build_platform = attributes.get("build_platform") + if not build_platform: + raise Exception("Cannot find build platform!") + repack_id = dep_job.task.get("extra", {}).get("repack_id") + if not repack_id: + raise Exception("Cannot find repack id!") + + label = dep_job.label.replace("beetmover-", "beetmover-checksums-") + description = ( + "Beetmove checksums for repack_id '{repack_id}' for build '" + "{build_platform}/{build_type}'".format( + repack_id=repack_id, + build_platform=build_platform, + build_type=attributes.get("build_type"), + ) + ) + + extra = {} + extra["partner_path"] = dep_job.task["payload"]["upstreamArtifacts"][0][ + "locale" + ] + extra["repack_id"] = repack_id + + dependencies = {dep_job.kind: dep_job.label} + for k, v in dep_job.dependencies.items(): + if k.startswith("beetmover"): + dependencies[k] = v + + attributes = copy_attributes_from_dependent_job(dep_job) + + task = { + "label": label, + "description": description, + "worker-type": "{}/{}".format( + dep_job.task["provisionerId"], + dep_job.task["workerType"], + ), + "scopes": dep_job.task["scopes"], + "dependencies": dependencies, + "attributes": attributes, + "run-on-projects": dep_job.attributes.get("run_on_projects"), + "extra": extra, + } + + if "shipping-phase" in job: + task["shipping-phase"] = job["shipping-phase"] + + if "shipping-product" in job: + task["shipping-product"] = job["shipping-product"] + + yield task + + +def generate_upstream_artifacts(refs, partner_path): + # Until bug 1331141 is fixed, if you are adding any new artifacts here that + # need to be transfered to S3, please be aware you also need to follow-up + # with a beetmover patch in https://github.com/mozilla-releng/beetmoverscript/. + # See example in bug 1348286 + common_paths = [ + "public/target.checksums", + ] + + upstream_artifacts = [ + { + "taskId": {"task-reference": refs["beetmover"]}, + "taskType": "signing", + "paths": common_paths, + "locale": "beetmover-checksums/{}".format(partner_path), + } + ] + + return upstream_artifacts + + +@transforms.add +def make_beetmover_checksums_worker(config, jobs): + for job in jobs: + valid_beetmover_job = len(job["dependencies"]) == 1 + if not valid_beetmover_job: + raise NotImplementedError("Beetmover checksums must have one dependency.") + + refs = { + "beetmover": None, + } + for dependency in job["dependencies"].keys(): + if dependency.endswith("beetmover"): + refs["beetmover"] = "<{}>".format(dependency) + if None in refs.values(): + raise NotImplementedError( + "Beetmover checksums must have a beetmover dependency!" + ) + + worker = { + "implementation": "beetmover", + "release-properties": craft_release_properties(config, job), + "upstream-artifacts": generate_upstream_artifacts( + refs, + job["extra"]["partner_path"], + ), + "partner-public": True, + } + + job["worker"] = worker + + yield job diff --git a/taskcluster/taskgraph/transforms/beetmover_geckoview.py b/taskcluster/taskgraph/transforms/beetmover_geckoview.py new file mode 100644 index 0000000000..d9629f2dba --- /dev/null +++ b/taskcluster/taskgraph/transforms/beetmover_geckoview.py @@ -0,0 +1,157 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the beetmover task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from copy import deepcopy + +from six import text_type +from taskgraph.loader.single_dep import schema +from taskgraph.transforms.base import TransformSequence +from taskgraph.transforms.beetmover import ( + craft_release_properties as beetmover_craft_release_properties, +) +from taskgraph.util.attributes import copy_attributes_from_dependent_job +from taskgraph.util.declarative_artifacts import ( + get_geckoview_template_vars, + get_geckoview_upstream_artifacts, + get_geckoview_artifact_id, +) +from taskgraph.util.schema import resolve_keyed_by, optionally_keyed_by +from taskgraph.util.scriptworker import generate_beetmover_artifact_map +from taskgraph.transforms.task import task_description_schema +from voluptuous import Required, Optional + + +beetmover_description_schema = schema.extend( + { + Optional("label"): text_type, + Optional("treeherder"): task_description_schema["treeherder"], + Required("run-on-projects"): task_description_schema["run-on-projects"], + Required("run-on-hg-branches"): task_description_schema["run-on-hg-branches"], + Optional("bucket-scope"): optionally_keyed_by("release-level", text_type), + Optional("shipping-phase"): optionally_keyed_by( + "project", task_description_schema["shipping-phase"] + ), + Optional("shipping-product"): task_description_schema["shipping-product"], + Optional("attributes"): task_description_schema["attributes"], + } +) + +transforms = TransformSequence() +transforms.add_validate(beetmover_description_schema) + + +@transforms.add +def resolve_keys(config, jobs): + for job in jobs: + resolve_keyed_by( + job, + "run-on-hg-branches", + item_name=job["label"], + project=config.params["project"], + ) + resolve_keyed_by( + job, + "shipping-phase", + item_name=job["label"], + project=config.params["project"], + ) + resolve_keyed_by( + job, + "bucket-scope", + item_name=job["label"], + **{"release-level": config.params.release_level()} + ) + yield job + + +@transforms.add +def make_task_description(config, jobs): + for job in jobs: + dep_job = job["primary-dependency"] + attributes = copy_attributes_from_dependent_job(dep_job) + attributes.update(job.get("attributes", {})) + + treeherder = job.get("treeherder", {}) + treeherder.setdefault("symbol", "BM-gv") + dep_th_platform = ( + dep_job.task.get("extra", {}) + .get("treeherder", {}) + .get("machine", {}) + .get("platform", "") + ) + treeherder.setdefault("platform", "{}/opt".format(dep_th_platform)) + treeherder.setdefault("tier", 2) + treeherder.setdefault("kind", "build") + label = job["label"] + description = ( + "Beetmover submission for geckoview" + "{build_platform}/{build_type}'".format( + build_platform=attributes.get("build_platform"), + build_type=attributes.get("build_type"), + ) + ) + + dependencies = deepcopy(dep_job.dependencies) + dependencies[dep_job.kind] = dep_job.label + + if job.get("locale"): + attributes["locale"] = job["locale"] + + attributes["run_on_hg_branches"] = job["run-on-hg-branches"] + + task = { + "label": label, + "description": description, + "worker-type": "beetmover", + "scopes": [ + job["bucket-scope"], + "project:releng:beetmover:action:push-to-maven", + ], + "dependencies": dependencies, + "attributes": attributes, + "run-on-projects": job["run-on-projects"], + "treeherder": treeherder, + "shipping-phase": job["shipping-phase"], + } + + yield task + + +@transforms.add +def make_task_worker(config, jobs): + for job in jobs: + job["worker"] = { + "artifact-map": generate_beetmover_artifact_map( + config, + job, + **get_geckoview_template_vars( + config, + job["attributes"]["build_platform"], + job["attributes"].get("update-channel"), + ) + ), + "implementation": "beetmover-maven", + "release-properties": craft_release_properties(config, job), + "upstream-artifacts": get_geckoview_upstream_artifacts(config, job), + } + + yield job + + +def craft_release_properties(config, job): + release_properties = beetmover_craft_release_properties(config, job) + + release_properties["artifact-id"] = get_geckoview_artifact_id( + config, + job["attributes"]["build_platform"], + job["attributes"].get("update-channel"), + ) + release_properties["app-name"] = "geckoview" + + return release_properties diff --git a/taskcluster/taskgraph/transforms/beetmover_langpack_checksums.py b/taskcluster/taskgraph/transforms/beetmover_langpack_checksums.py new file mode 100644 index 0000000000..f310f29e47 --- /dev/null +++ b/taskcluster/taskgraph/transforms/beetmover_langpack_checksums.py @@ -0,0 +1,130 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform release-beetmover-langpack-checksums into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from six import text_type +from taskgraph.loader.single_dep import schema +from taskgraph.transforms.base import TransformSequence +from taskgraph.transforms.beetmover import craft_release_properties +from taskgraph.util.attributes import copy_attributes_from_dependent_job +from taskgraph.util.scriptworker import ( + generate_beetmover_artifact_map, + generate_beetmover_upstream_artifacts, + get_beetmover_action_scope, + get_beetmover_bucket_scope, +) +from taskgraph.util.treeherder import inherit_treeherder_from_dep +from taskgraph.transforms.task import task_description_schema +from voluptuous import Required, Optional + +beetmover_checksums_description_schema = schema.extend( + { + Required("attributes"): {text_type: object}, + Optional("label"): text_type, + Optional("treeherder"): task_description_schema["treeherder"], + Optional("locale"): text_type, + Optional("shipping-phase"): task_description_schema["shipping-phase"], + Optional("shipping-product"): task_description_schema["shipping-product"], + } +) + +transforms = TransformSequence() +transforms.add_validate(beetmover_checksums_description_schema) + + +@transforms.add +def make_beetmover_checksums_description(config, jobs): + for job in jobs: + dep_job = job["primary-dependency"] + attributes = dep_job.attributes + + treeherder = inherit_treeherder_from_dep(job, dep_job) + treeherder.setdefault( + "symbol", "BMcslang(N{})".format(attributes.get("l10n_chunk", "")) + ) + + label = job["label"] + build_platform = attributes.get("build_platform") + + description = "Beetmover submission of checksums for langpack files" + + extra = {} + if "devedition" in build_platform: + extra["product"] = "devedition" + else: + extra["product"] = "firefox" + + dependencies = {dep_job.kind: dep_job.label} + for k, v in dep_job.dependencies.items(): + if k.startswith("beetmover"): + dependencies[k] = v + + attributes = copy_attributes_from_dependent_job(dep_job) + if "chunk_locales" in dep_job.attributes: + attributes["chunk_locales"] = dep_job.attributes["chunk_locales"] + attributes.update(job.get("attributes", {})) + + bucket_scope = get_beetmover_bucket_scope(config) + action_scope = get_beetmover_action_scope(config) + + task = { + "label": label, + "description": description, + "worker-type": "beetmover", + "scopes": [bucket_scope, action_scope], + "dependencies": dependencies, + "attributes": attributes, + "run-on-projects": dep_job.attributes.get("run_on_projects"), + "treeherder": treeherder, + "extra": extra, + } + + if "shipping-phase" in job: + task["shipping-phase"] = job["shipping-phase"] + + if "shipping-product" in job: + task["shipping-product"] = job["shipping-product"] + + yield task + + +@transforms.add +def make_beetmover_checksums_worker(config, jobs): + for job in jobs: + valid_beetmover_job = len(job["dependencies"]) == 1 + if not valid_beetmover_job: + raise NotImplementedError("Beetmover checksums must have one dependency.") + + locales = job["attributes"].get("chunk_locales") + platform = job["attributes"]["build_platform"] + + refs = { + "beetmover": None, + } + for dependency in job["dependencies"].keys(): + if dependency.startswith("release-beetmover"): + refs["beetmover"] = "<{}>".format(dependency) + if None in refs.values(): + raise NotImplementedError( + "Beetmover checksums must have a beetmover dependency!" + ) + + worker = { + "implementation": "beetmover", + "release-properties": craft_release_properties(config, job), + "upstream-artifacts": generate_beetmover_upstream_artifacts( + config, job, platform, locales + ), + "artifact-map": generate_beetmover_artifact_map( + config, job, platform=platform, locale=locales + ), + } + + job["worker"] = worker + + yield job diff --git a/taskcluster/taskgraph/transforms/beetmover_push_to_release.py b/taskcluster/taskgraph/transforms/beetmover_push_to_release.py new file mode 100644 index 0000000000..cbb3fc09e1 --- /dev/null +++ b/taskcluster/taskgraph/transforms/beetmover_push_to_release.py @@ -0,0 +1,93 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the beetmover-push-to-release task into a task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from six import text_type +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.schema import ( + Schema, + taskref_or_string, +) +from taskgraph.util.scriptworker import ( + get_beetmover_bucket_scope, + add_scope_prefix, +) +from taskgraph.transforms.task import task_description_schema +from voluptuous import Required, Optional + + +beetmover_push_to_release_description_schema = Schema( + { + Required("name"): text_type, + Required("product"): text_type, + Required("treeherder-platform"): text_type, + Optional("attributes"): {text_type: object}, + Optional("job-from"): task_description_schema["job-from"], + Optional("run"): {text_type: object}, + Optional("run-on-projects"): task_description_schema["run-on-projects"], + Optional("dependencies"): {text_type: taskref_or_string}, + Optional("index"): {text_type: text_type}, + Optional("routes"): [text_type], + Required("shipping-phase"): task_description_schema["shipping-phase"], + Required("shipping-product"): task_description_schema["shipping-product"], + Optional("extra"): task_description_schema["extra"], + } +) + + +transforms = TransformSequence() +transforms.add_validate(beetmover_push_to_release_description_schema) + + +@transforms.add +def make_beetmover_push_to_release_description(config, jobs): + for job in jobs: + treeherder = job.get("treeherder", {}) + treeherder.setdefault("symbol", "Rel(BM-C)") + treeherder.setdefault("tier", 1) + treeherder.setdefault("kind", "build") + treeherder.setdefault("platform", job["treeherder-platform"]) + + label = job["name"] + description = "Beetmover push to release for '{product}'".format( + product=job["product"] + ) + + bucket_scope = get_beetmover_bucket_scope(config) + action_scope = add_scope_prefix(config, "beetmover:action:push-to-releases") + + task = { + "label": label, + "description": description, + "worker-type": "beetmover", + "scopes": [bucket_scope, action_scope], + "product": job["product"], + "dependencies": job["dependencies"], + "attributes": job.get("attributes", {}), + "run-on-projects": job.get("run-on-projects"), + "treeherder": treeherder, + "shipping-phase": job.get("shipping-phase", "push"), + "shipping-product": job.get("shipping-product"), + "routes": job.get("routes", []), + "extra": job.get("extra", {}), + } + + yield task + + +@transforms.add +def make_beetmover_push_to_release_worker(config, jobs): + for job in jobs: + worker = { + "implementation": "beetmover-push-to-release", + "product": job["product"], + } + job["worker"] = worker + del job["product"] + + yield job diff --git a/taskcluster/taskgraph/transforms/beetmover_repackage.py b/taskcluster/taskgraph/transforms/beetmover_repackage.py new file mode 100644 index 0000000000..0ebc1bff22 --- /dev/null +++ b/taskcluster/taskgraph/transforms/beetmover_repackage.py @@ -0,0 +1,284 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the beetmover task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from six import text_type +from taskgraph.loader.multi_dep import schema +from taskgraph.transforms.base import TransformSequence +from taskgraph.transforms.beetmover import craft_release_properties +from taskgraph.util.attributes import copy_attributes_from_dependent_job +from taskgraph.util.partials import ( + get_balrog_platform_name, + get_partials_artifacts_from_params, + get_partials_info_from_params, +) +from taskgraph.util.scriptworker import ( + generate_beetmover_artifact_map, + generate_beetmover_upstream_artifacts, + generate_beetmover_partials_artifact_map, + get_beetmover_bucket_scope, + get_beetmover_action_scope, +) +from taskgraph.util.taskcluster import get_artifact_prefix +from taskgraph.util.treeherder import replace_group, inherit_treeherder_from_dep +from taskgraph.transforms.task import task_description_schema +from voluptuous import Required, Optional + +import logging + +logger = logging.getLogger(__name__) + + +beetmover_description_schema = schema.extend( + { + # unique label to describe this beetmover task, defaults to {dep.label}-beetmover + Required("label"): text_type, + # treeherder is allowed here to override any defaults we use for beetmover. See + # taskcluster/taskgraph/transforms/task.py for the schema details, and the + # below transforms for defaults of various values. + Optional("treeherder"): task_description_schema["treeherder"], + Optional("attributes"): task_description_schema["attributes"], + # locale is passed only for l10n beetmoving + Optional("locale"): text_type, + Required("shipping-phase"): task_description_schema["shipping-phase"], + # Optional until we fix asan (run_on_projects?) + Optional("shipping-product"): task_description_schema["shipping-product"], + } +) + +transforms = TransformSequence() +transforms.add_validate(beetmover_description_schema) + + +@transforms.add +def make_task_description(config, jobs): + for job in jobs: + dep_job = job["primary-dependency"] + attributes = dep_job.attributes + + treeherder = inherit_treeherder_from_dep(job, dep_job) + upstream_symbol = dep_job.task["extra"]["treeherder"]["symbol"] + if "build" in job["dependent-tasks"]: + upstream_symbol = job["dependent-tasks"]["build"].task["extra"][ + "treeherder" + ]["symbol"] + treeherder.setdefault("symbol", replace_group(upstream_symbol, "BMR")) + label = job["label"] + description = ( + "Beetmover submission for locale '{locale}' for build '" + "{build_platform}/{build_type}'".format( + locale=attributes.get("locale", "en-US"), + build_platform=attributes.get("build_platform"), + build_type=attributes.get("build_type"), + ) + ) + + upstream_deps = job["dependent-tasks"] + + signing_name = "build-signing" + build_name = "build" + repackage_name = "repackage" + repackage_signing_name = "repackage-signing" + msi_signing_name = "repackage-signing-msi" + mar_signing_name = "mar-signing" + if job.get("locale"): + signing_name = "shippable-l10n-signing" + build_name = "shippable-l10n" + repackage_name = "repackage-l10n" + repackage_signing_name = "repackage-signing-l10n" + mar_signing_name = "mar-signing-l10n" + dependencies = { + "build": upstream_deps[build_name], + "repackage": upstream_deps[repackage_name], + "signing": upstream_deps[signing_name], + "mar-signing": upstream_deps[mar_signing_name], + } + if "partials-signing" in upstream_deps: + dependencies["partials-signing"] = upstream_deps["partials-signing"] + if msi_signing_name in upstream_deps: + dependencies[msi_signing_name] = upstream_deps[msi_signing_name] + if repackage_signing_name in upstream_deps: + dependencies["repackage-signing"] = upstream_deps[repackage_signing_name] + + attributes = copy_attributes_from_dependent_job(dep_job) + attributes.update(job.get("attributes", {})) + if job.get("locale"): + attributes["locale"] = job["locale"] + + bucket_scope = get_beetmover_bucket_scope(config) + action_scope = get_beetmover_action_scope(config) + + task = { + "label": label, + "description": description, + "worker-type": "beetmover", + "scopes": [bucket_scope, action_scope], + "dependencies": dependencies, + "attributes": attributes, + "run-on-projects": dep_job.attributes.get("run_on_projects"), + "treeherder": treeherder, + "shipping-phase": job["shipping-phase"], + "shipping-product": job.get("shipping-product"), + } + + yield task + + +def generate_partials_upstream_artifacts(job, artifacts, platform, locale=None): + artifact_prefix = get_artifact_prefix(job) + if locale and locale != "en-US": + artifact_prefix = "{}/{}".format(artifact_prefix, locale) + + upstream_artifacts = [ + { + "taskId": {"task-reference": "<partials-signing>"}, + "taskType": "signing", + "paths": ["{}/{}".format(artifact_prefix, path) for path, _ in artifacts], + "locale": locale or "en-US", + } + ] + + return upstream_artifacts + + +@transforms.add +def make_task_worker(config, jobs): + for job in jobs: + locale = job["attributes"].get("locale") + platform = job["attributes"]["build_platform"] + + worker = { + "implementation": "beetmover", + "release-properties": craft_release_properties(config, job), + "upstream-artifacts": generate_beetmover_upstream_artifacts( + config, job, platform, locale + ), + "artifact-map": generate_beetmover_artifact_map( + config, job, platform=platform, locale=locale + ), + } + + if locale: + worker["locale"] = locale + job["worker"] = worker + + yield job + + +@transforms.add +def strip_unwanted_langpacks_from_worker(config, jobs): + """Strips out langpacks where we didn't sign them. + + This explicitly deletes langpacks from upstream artifacts and from artifact-maps. + Due to limitations in declarative artifacts, doing this was our easiest way right now. + """ + ALWAYS_OK_PLATFORMS = {"linux64-shippable", "linux64-devedition"} + OSX_OK_PLATFORMS = {"macosx64-shippable", "macosx64-devedition"} + for job in jobs: + platform = job["attributes"].get("build_platform") + if platform in ALWAYS_OK_PLATFORMS: + # No need to strip anything + yield job + continue + + for map in job["worker"].get("artifact-map", [])[:]: + if not any([path.endswith("target.langpack.xpi") for path in map["paths"]]): + continue + if map["locale"] == "ja-JP-mac": + # This locale should only exist on mac + assert platform in OSX_OK_PLATFORMS + continue + # map[paths] is being modified while iterating, so we need to resolve the + # ".keys()" iterator up front by throwing it into a list. + for path in list(map["paths"].keys()): + if path.endswith("target.langpack.xpi"): + del map["paths"][path] + if map["paths"] == {}: + job["worker"]["artifact-map"].remove(map) + + for artifact in job["worker"].get("upstream-artifacts", []): + if not any( + [path.endswith("target.langpack.xpi") for path in artifact["paths"]] + ): + continue + if artifact["locale"] == "ja-JP-mac": + # This locale should only exist on mac + assert platform in OSX_OK_PLATFORMS + continue + artifact["paths"] = [ + path + for path in artifact["paths"] + if not path.endswith("target.langpack.xpi") + ] + if artifact["paths"] == []: + job["worker"]["upstream-artifacts"].remove(artifact) + + yield job + + +@transforms.add +def make_partials_artifacts(config, jobs): + for job in jobs: + locale = job["attributes"].get("locale") + if not locale: + locale = "en-US" + + platform = job["attributes"]["build_platform"] + + if "partials-signing" not in job["dependencies"]: + yield job + continue + + balrog_platform = get_balrog_platform_name(platform) + artifacts = get_partials_artifacts_from_params( + config.params.get("release_history"), balrog_platform, locale + ) + + upstream_artifacts = generate_partials_upstream_artifacts( + job, artifacts, balrog_platform, locale + ) + + job["worker"]["upstream-artifacts"].extend(upstream_artifacts) + + extra = list() + + partials_info = get_partials_info_from_params( + config.params.get("release_history"), balrog_platform, locale + ) + + job["worker"]["artifact-map"].extend( + generate_beetmover_partials_artifact_map( + config, job, partials_info, platform=platform, locale=locale + ) + ) + + for artifact in partials_info: + artifact_extra = { + "locale": locale, + "artifact_name": artifact, + "buildid": partials_info[artifact]["buildid"], + "platform": balrog_platform, + } + for rel_attr in ("previousBuildNumber", "previousVersion"): + if partials_info[artifact].get(rel_attr): + artifact_extra[rel_attr] = partials_info[artifact][rel_attr] + extra.append(artifact_extra) + + job.setdefault("extra", {}) + job["extra"]["partials"] = extra + + yield job + + +@transforms.add +def convert_deps(config, jobs): + for job in jobs: + job["dependencies"] = { + name: dep_job.label for name, dep_job in job["dependencies"].items() + } + yield job diff --git a/taskcluster/taskgraph/transforms/beetmover_repackage_l10n.py b/taskcluster/taskgraph/transforms/beetmover_repackage_l10n.py new file mode 100644 index 0000000000..e413fee245 --- /dev/null +++ b/taskcluster/taskgraph/transforms/beetmover_repackage_l10n.py @@ -0,0 +1,45 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the signing task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.treeherder import join_symbol + +transforms = TransformSequence() + + +@transforms.add +def make_beetmover_description(config, jobs): + for job in jobs: + dep_job = job["primary-dependency"] + + locale = dep_job.attributes.get("locale") + if not locale: + yield job + continue + + group = "BMR" + + # add the locale code + symbol = locale + + treeherder = { + "symbol": join_symbol(group, symbol), + } + + beet_description = { + "label": job["label"], + "primary-dependency": dep_job, + "dependent-tasks": job["dependent-tasks"], + "attributes": job["attributes"], + "treeherder": treeherder, + "locale": locale, + "shipping-phase": job["shipping-phase"], + "shipping-product": job["shipping-product"], + } + yield beet_description diff --git a/taskcluster/taskgraph/transforms/beetmover_repackage_partner.py b/taskcluster/taskgraph/transforms/beetmover_repackage_partner.py new file mode 100644 index 0000000000..6f5e0f73bd --- /dev/null +++ b/taskcluster/taskgraph/transforms/beetmover_repackage_partner.py @@ -0,0 +1,339 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the beetmover task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from six import text_type +from taskgraph.loader.single_dep import schema +from taskgraph.transforms.base import TransformSequence +from taskgraph.transforms.beetmover import craft_release_properties +from taskgraph.util.attributes import copy_attributes_from_dependent_job +from taskgraph.util.partners import ( + get_ftp_platform, + get_partner_config_by_kind, +) +from taskgraph.util.schema import ( + optionally_keyed_by, + resolve_keyed_by, +) +from taskgraph.util.scriptworker import ( + add_scope_prefix, + get_beetmover_bucket_scope, +) +from taskgraph.util.taskcluster import get_artifact_prefix +from taskgraph.transforms.task import task_description_schema +from voluptuous import Any, Required, Optional + +from copy import deepcopy +import logging + +logger = logging.getLogger(__name__) + + +beetmover_description_schema = schema.extend( + { + # unique label to describe this beetmover task, defaults to {dep.label}-beetmover + Optional("label"): text_type, + Required("partner-bucket-scope"): optionally_keyed_by( + "release-level", text_type + ), + Required("partner-public-path"): Any(None, text_type), + Required("partner-private-path"): Any(None, text_type), + Optional("extra"): object, + Required("shipping-phase"): task_description_schema["shipping-phase"], + Optional("shipping-product"): task_description_schema["shipping-product"], + Optional("priority"): task_description_schema["priority"], + } +) + +transforms = TransformSequence() +transforms.add_validate(beetmover_description_schema) + + +@transforms.add +def resolve_keys(config, jobs): + for job in jobs: + resolve_keyed_by( + job, + "partner-bucket-scope", + item_name=job["label"], + **{"release-level": config.params.release_level()} + ) + yield job + + +@transforms.add +def make_task_description(config, jobs): + for job in jobs: + dep_job = job["primary-dependency"] + repack_id = dep_job.task.get("extra", {}).get("repack_id") + if not repack_id: + raise Exception("Cannot find repack id!") + + attributes = dep_job.attributes + build_platform = attributes.get("build_platform") + if not build_platform: + raise Exception("Cannot find build platform!") + + label = dep_job.label.replace("repackage-signing-l10n", "beetmover-") + label = dep_job.label.replace("repackage-signing-", "beetmover-") + label = label.replace("repackage-", "beetmover-") + label = label.replace("chunking-dummy-", "beetmover-") + description = ( + "Beetmover submission for repack_id '{repack_id}' for build '" + "{build_platform}/{build_type}'".format( + repack_id=repack_id, + build_platform=build_platform, + build_type=attributes.get("build_type"), + ) + ) + + dependencies = {} + + base_label = "release-partner-repack" + if "eme" in config.kind: + base_label = "release-eme-free-repack" + dependencies["build"] = "{}-{}".format(base_label, build_platform) + if "macosx" in build_platform or "win" in build_platform: + dependencies["repackage"] = "{}-repackage-{}-{}".format( + base_label, build_platform, repack_id.replace("/", "-") + ) + dependencies["repackage-signing"] = "{}-repackage-signing-{}-{}".format( + base_label, build_platform, repack_id.replace("/", "-") + ) + + attributes = copy_attributes_from_dependent_job(dep_job) + + task = { + "label": label, + "description": description, + "dependencies": dependencies, + "attributes": attributes, + "run-on-projects": dep_job.attributes.get("run_on_projects"), + "shipping-phase": job["shipping-phase"], + "shipping-product": job.get("shipping-product"), + "partner-private-path": job["partner-private-path"], + "partner-public-path": job["partner-public-path"], + "partner-bucket-scope": job["partner-bucket-scope"], + "extra": { + "repack_id": repack_id, + }, + } + # we may have reduced the priority for partner jobs, otherwise task.py will set it + if job.get("priority"): + task["priority"] = job["priority"] + + yield task + + +def populate_scopes_and_worker_type(config, job, bucket_scope, partner_public=False): + action_scope = add_scope_prefix(config, "beetmover:action:push-to-partner") + + task = deepcopy(job) + task["scopes"] = [bucket_scope, action_scope] + task["worker-type"] = "beetmover" + task["partner_public"] = partner_public + if partner_public: + task["label"] = "{}-public".format(task["label"]) + return task + + +@transforms.add +def split_public_and_private(config, jobs): + public_bucket_scope = get_beetmover_bucket_scope(config) + partner_config = get_partner_config_by_kind(config, config.kind) + + for job in jobs: + partner_bucket_scope = add_scope_prefix(config, job["partner-bucket-scope"]) + partner, subpartner, _ = job["extra"]["repack_id"].split("/") + + if partner_config[partner][subpartner].get("upload_to_candidates"): + # public + yield populate_scopes_and_worker_type( + config, job, public_bucket_scope, partner_public=True + ) + else: + # private + yield populate_scopes_and_worker_type( + config, job, partner_bucket_scope, partner_public=False + ) + + +def generate_upstream_artifacts( + job, + build_task_ref, + repackage_task_ref, + repackage_signing_task_ref, + platform, + repack_id, + partner_path, + repack_stub_installer=False, +): + + upstream_artifacts = [] + artifact_prefix = get_artifact_prefix(job) + + if "linux" in platform: + upstream_artifacts.append( + { + "taskId": {"task-reference": build_task_ref}, + "taskType": "build", + "paths": ["{}/{}/target.tar.bz2".format(artifact_prefix, repack_id)], + "locale": partner_path, + } + ) + upstream_artifacts.append( + { + "taskId": {"task-reference": repackage_signing_task_ref}, + "taskType": "repackage", + "paths": [ + "{}/{}/target.tar.bz2.asc".format(artifact_prefix, repack_id) + ], + "locale": partner_path, + } + ) + elif "macosx" in platform: + upstream_artifacts.append( + { + "taskId": {"task-reference": repackage_task_ref}, + "taskType": "repackage", + "paths": ["{}/{}/target.dmg".format(artifact_prefix, repack_id)], + "locale": partner_path, + } + ) + upstream_artifacts.append( + { + "taskId": {"task-reference": repackage_signing_task_ref}, + "taskType": "repackage", + "paths": ["{}/{}/target.dmg.asc".format(artifact_prefix, repack_id)], + "locale": partner_path, + } + ) + elif "win" in platform: + upstream_artifacts.append( + { + "taskId": {"task-reference": repackage_signing_task_ref}, + "taskType": "repackage", + "paths": [ + "{}/{}/target.installer.exe".format(artifact_prefix, repack_id) + ], + "locale": partner_path, + } + ) + upstream_artifacts.append( + { + "taskId": {"task-reference": repackage_signing_task_ref}, + "taskType": "repackage", + "paths": [ + "{}/{}/target.installer.exe.asc".format(artifact_prefix, repack_id) + ], + "locale": partner_path, + } + ) + if platform.startswith("win32") and repack_stub_installer: + upstream_artifacts.append( + { + "taskId": {"task-reference": repackage_signing_task_ref}, + "taskType": "repackage", + "paths": [ + "{}/{}/target.stub-installer.exe".format( + artifact_prefix, repack_id + ) + ], + "locale": partner_path, + } + ) + upstream_artifacts.append( + { + "taskId": {"task-reference": repackage_signing_task_ref}, + "taskType": "repackage", + "paths": [ + "{}/{}/target.stub-installer.exe.asc".format( + artifact_prefix, repack_id + ) + ], + "locale": partner_path, + } + ) + + if not upstream_artifacts: + raise Exception("Couldn't find any upstream artifacts.") + + return upstream_artifacts + + +@transforms.add +def make_task_worker(config, jobs): + for job in jobs: + platform = job["attributes"]["build_platform"] + repack_id = job["extra"]["repack_id"] + partner, subpartner, locale = job["extra"]["repack_id"].split("/") + partner_config = get_partner_config_by_kind(config, config.kind) + repack_stub_installer = partner_config[partner][subpartner].get( + "repack_stub_installer" + ) + build_task = None + repackage_task = None + repackage_signing_task = None + + for dependency in job["dependencies"].keys(): + if "repackage-signing" in dependency: + repackage_signing_task = dependency + elif "repackage" in dependency: + repackage_task = dependency + else: + build_task = "build" + + build_task_ref = "<" + str(build_task) + ">" + repackage_task_ref = "<" + str(repackage_task) + ">" + repackage_signing_task_ref = "<" + str(repackage_signing_task) + ">" + + # generate the partner path; we'll send this to beetmover as the "locale" + ftp_platform = get_ftp_platform(platform) + repl_dict = { + "build_number": config.params["build_number"], + "locale": locale, + "partner": partner, + "platform": ftp_platform, + "release_partner_build_number": config.params[ + "release_partner_build_number" + ], + "subpartner": subpartner, + "version": config.params["version"], + } + partner_public = job["partner_public"] + if partner_public: + partner_path_key = "partner-public-path" + else: + partner_path_key = "partner-private-path" + # Kinds can set these to None + if not job[partner_path_key]: + continue + partner_path = job[partner_path_key].format(**repl_dict) + del job["partner_public"] + del job["partner-private-path"] + del job["partner-public-path"] + del job["partner-bucket-scope"] + + worker = { + "implementation": "beetmover", + "release-properties": craft_release_properties(config, job), + "upstream-artifacts": generate_upstream_artifacts( + job, + build_task_ref, + repackage_task_ref, + repackage_signing_task_ref, + platform, + repack_id, + partner_path, + repack_stub_installer, + ), + "partner-public": partner_public, + } + job["worker"] = worker + + yield job diff --git a/taskcluster/taskgraph/transforms/beetmover_snap.py b/taskcluster/taskgraph/transforms/beetmover_snap.py new file mode 100644 index 0000000000..f510d51722 --- /dev/null +++ b/taskcluster/taskgraph/transforms/beetmover_snap.py @@ -0,0 +1,43 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the snap beetmover kind into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence + +transforms = TransformSequence() + + +@transforms.add +def leave_snap_repackage_dependencies_only(config, jobs): + for job in jobs: + # XXX: We delete the build dependency because, unlike the other beetmover + # tasks, source doesn't depend on any build task at all. This hack should + # go away when we rewrite beetmover transforms to allow more flexibility in deps + + job["dependencies"] = { + key: value + for key, value in job["dependencies"].items() + if key == "release-snap-repackage" + } + + job["worker"]["upstream-artifacts"] = [ + upstream_artifact + for upstream_artifact in job["worker"]["upstream-artifacts"] + if upstream_artifact["taskId"]["task-reference"] + == "<release-snap-repackage>" + ] + + yield job + + +@transforms.add +def set_custom_treeherder_job_name(config, jobs): + for job in jobs: + job.get("treeherder", {})["symbol"] = "Snap(BM)" + + yield job diff --git a/taskcluster/taskgraph/transforms/beetmover_source.py b/taskcluster/taskgraph/transforms/beetmover_source.py new file mode 100644 index 0000000000..967a501cdc --- /dev/null +++ b/taskcluster/taskgraph/transforms/beetmover_source.py @@ -0,0 +1,36 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the beetmover-source task to also append `build` as dependency +""" +from __future__ import absolute_import + +from taskgraph.transforms.base import TransformSequence + +transforms = TransformSequence() + + +@transforms.add +def remove_build_dependency_in_beetmover_source(config, jobs): + for job in jobs: + # XXX: We delete the build dependency because, unlike the other beetmover + # tasks, source doesn't depend on any build task at all. This hack should + # go away when we rewrite beetmover transforms to allow more flexibility in deps + # Essentially, we should use multi_dep for beetmover. + for depname in job["dependencies"]: + if "signing" not in depname: + del job["dependencies"][depname] + break + else: + raise Exception("Can't find build dep in beetmover source!") + + all_upstream_artifacts = job["worker"]["upstream-artifacts"] + upstream_artifacts_without_build = [ + upstream_artifact + for upstream_artifact in all_upstream_artifacts + if upstream_artifact["taskId"]["task-reference"] != "<{}>".format(depname) + ] + job["worker"]["upstream-artifacts"] = upstream_artifacts_without_build + + yield job diff --git a/taskcluster/taskgraph/transforms/beetmover_source_checksums.py b/taskcluster/taskgraph/transforms/beetmover_source_checksums.py new file mode 100644 index 0000000000..dbf498d6dd --- /dev/null +++ b/taskcluster/taskgraph/transforms/beetmover_source_checksums.py @@ -0,0 +1,139 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform release-beetmover-source-checksums into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from six import text_type +from taskgraph.loader.single_dep import schema +from taskgraph.transforms.base import TransformSequence +from taskgraph.transforms.beetmover import craft_release_properties +from taskgraph.util.attributes import copy_attributes_from_dependent_job +from taskgraph.util.scriptworker import ( + generate_beetmover_artifact_map, + generate_beetmover_upstream_artifacts, + get_beetmover_bucket_scope, + get_beetmover_action_scope, +) +from taskgraph.transforms.task import task_description_schema +from voluptuous import Optional + +beetmover_checksums_description_schema = schema.extend( + { + Optional("label"): text_type, + Optional("treeherder"): task_description_schema["treeherder"], + Optional("locale"): text_type, + Optional("shipping-phase"): task_description_schema["shipping-phase"], + Optional("shipping-product"): task_description_schema["shipping-product"], + Optional("attributes"): task_description_schema["attributes"], + } +) + +transforms = TransformSequence() +transforms.add_validate(beetmover_checksums_description_schema) + + +@transforms.add +def make_beetmover_checksums_description(config, jobs): + for job in jobs: + dep_job = job["primary-dependency"] + attributes = dep_job.attributes + + treeherder = job.get("treeherder", {}) + treeherder.setdefault("symbol", "BMcss(N)") + dep_th_platform = ( + dep_job.task.get("extra", {}) + .get("treeherder", {}) + .get("machine", {}) + .get("platform", "") + ) + treeherder.setdefault("platform", "{}/opt".format(dep_th_platform)) + treeherder.setdefault("tier", 1) + treeherder.setdefault("kind", "build") + + label = job["label"] + build_platform = attributes.get("build_platform") + + description = "Beetmover submission of checksums for source file" + + extra = {} + if "devedition" in build_platform: + extra["product"] = "devedition" + else: + extra["product"] = "firefox" + + dependencies = {dep_job.kind: dep_job.label} + for k, v in dep_job.dependencies.items(): + if k.startswith("beetmover"): + dependencies[k] = v + + attributes = copy_attributes_from_dependent_job(dep_job) + attributes.update(job.get("attributes", {})) + + bucket_scope = get_beetmover_bucket_scope(config) + action_scope = get_beetmover_action_scope(config) + + task = { + "label": label, + "description": description, + "worker-type": "beetmover", + "scopes": [bucket_scope, action_scope], + "dependencies": dependencies, + "attributes": attributes, + "run-on-projects": dep_job.attributes.get("run_on_projects"), + "treeherder": treeherder, + "extra": extra, + } + + if "shipping-phase" in job: + task["shipping-phase"] = job["shipping-phase"] + + if "shipping-product" in job: + task["shipping-product"] = job["shipping-product"] + + yield task + + +@transforms.add +def make_beetmover_checksums_worker(config, jobs): + for job in jobs: + valid_beetmover_job = len(job["dependencies"]) == 2 + if not valid_beetmover_job: + raise NotImplementedError("Beetmover checksums must have two dependencies.") + + locale = job["attributes"].get("locale") + platform = job["attributes"]["build_platform"] + + refs = { + "beetmover": None, + "signing": None, + } + for dependency in job["dependencies"].keys(): + if dependency.startswith("beetmover"): + refs["beetmover"] = "<{}>".format(dependency) + else: + refs["signing"] = "<{}>".format(dependency) + if None in refs.values(): + raise NotImplementedError( + "Beetmover checksums must have a beetmover and signing dependency!" + ) + + worker = { + "implementation": "beetmover", + "release-properties": craft_release_properties(config, job), + "upstream-artifacts": generate_beetmover_upstream_artifacts( + config, job, platform, locale + ), + "artifact-map": generate_beetmover_artifact_map( + config, job, platform=platform + ), + } + + if locale: + worker["locale"] = locale + job["worker"] = worker + + yield job diff --git a/taskcluster/taskgraph/transforms/bouncer_aliases.py b/taskcluster/taskgraph/transforms/bouncer_aliases.py new file mode 100644 index 0000000000..776d8aec7d --- /dev/null +++ b/taskcluster/taskgraph/transforms/bouncer_aliases.py @@ -0,0 +1,107 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Add from parameters.yml into bouncer submission tasks. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +import logging + +from taskgraph.transforms.base import TransformSequence +from taskgraph.transforms.bouncer_submission import craft_bouncer_product_name +from taskgraph.transforms.bouncer_submission_partners import ( + craft_partner_bouncer_product_name, +) +from taskgraph.util.partners import get_partners_to_be_published +from taskgraph.util.schema import resolve_keyed_by +from taskgraph.util.scriptworker import get_release_config + +logger = logging.getLogger(__name__) + +transforms = TransformSequence() + + +@transforms.add +def make_task_worker(config, jobs): + for job in jobs: + resolve_keyed_by( + job, + "worker-type", + item_name=job["name"], + **{"release-level": config.params.release_level()} + ) + resolve_keyed_by( + job, + "scopes", + item_name=job["name"], + **{"release-level": config.params.release_level()} + ) + resolve_keyed_by( + job, + "bouncer-products-per-alias", + item_name=job["name"], + project=config.params["project"], + ) + if "partner-bouncer-products-per-alias" in job: + resolve_keyed_by( + job, + "partner-bouncer-products-per-alias", + item_name=job["name"], + project=config.params["project"], + ) + + job["worker"]["entries"] = craft_bouncer_entries(config, job) + + del job["bouncer-products-per-alias"] + if "partner-bouncer-products-per-alias" in job: + del job["partner-bouncer-products-per-alias"] + + if job["worker"]["entries"]: + yield job + else: + logger.warn( + 'No bouncer entries defined in bouncer submission task for "{}". \ +Job deleted.'.format( + job["name"] + ) + ) + + +def craft_bouncer_entries(config, job): + release_config = get_release_config(config) + + product = job["shipping-product"] + current_version = release_config["version"] + bouncer_products_per_alias = job["bouncer-products-per-alias"] + + entries = { + bouncer_alias: craft_bouncer_product_name( + product, + bouncer_product, + current_version, + ) + for bouncer_alias, bouncer_product in bouncer_products_per_alias.items() + } + + partner_bouncer_products_per_alias = job.get("partner-bouncer-products-per-alias") + if partner_bouncer_products_per_alias: + partners = get_partners_to_be_published(config) + for partner, sub_config_name, _ in partners: + entries.update( + { + bouncer_alias.replace( + "PARTNER", "{}-{}".format(partner, sub_config_name) + ): craft_partner_bouncer_product_name( + product, + bouncer_product, + current_version, + partner, + sub_config_name, + ) + for bouncer_alias, bouncer_product in partner_bouncer_products_per_alias.items() # NOQA: E501 + } + ) + + return entries diff --git a/taskcluster/taskgraph/transforms/bouncer_check.py b/taskcluster/taskgraph/transforms/bouncer_check.py new file mode 100644 index 0000000000..de495f485b --- /dev/null +++ b/taskcluster/taskgraph/transforms/bouncer_check.py @@ -0,0 +1,112 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import, print_function, unicode_literals +import json +from pipes import quote as shell_quote + +import six +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.scriptworker import get_release_config +from taskgraph.util.schema import resolve_keyed_by + +import logging + +logger = logging.getLogger(__name__) + +transforms = TransformSequence() + + +@transforms.add +def add_command(config, jobs): + for job in jobs: + command = [ + "python", + "testing/mozharness/scripts/release/bouncer_check.py", + ] + job["run"].update( + { + "using": "mach", + "mach": command, + } + ) + yield job + + +@transforms.add +def add_previous_versions(config, jobs): + release_config = get_release_config(config) + if not release_config.get("partial_versions"): + for job in jobs: + yield job + else: + extra_params = [] + for partial in release_config["partial_versions"].split(","): + extra_params.append( + "--previous-version={}".format(partial.split("build")[0].strip()) + ) + + for job in jobs: + job["run"]["mach"].extend(extra_params) + yield job + + +@transforms.add +def handle_keyed_by(config, jobs): + """Resolve fields that can be keyed by project, etc.""" + fields = [ + "run.config", + "run.product-field", + "run.extra-config", + ] + + release_config = get_release_config(config) + version = release_config["version"] + + for job in jobs: + for field in fields: + resolve_keyed_by( + item=job, + field=field, + item_name=job["name"], + **{ + "project": config.params["project"], + "release-level": config.params.release_level(), + "release-type": config.params["release_type"], + } + ) + + for cfg in job["run"]["config"]: + job["run"]["mach"].extend(["--config", cfg]) + + if config.kind == "cron-bouncer-check": + job["run"]["mach"].extend( + [ + "--product-field={}".format(job["run"]["product-field"]), + "--products-url={}".format(job["run"]["products-url"]), + ] + ) + del job["run"]["product-field"] + del job["run"]["products-url"] + elif config.kind == "release-bouncer-check": + job["run"]["mach"].append("--version={}".format(version)) + + del job["run"]["config"] + + if "extra-config" in job["run"]: + env = job["worker"].setdefault("env", {}) + env["EXTRA_MOZHARNESS_CONFIG"] = six.ensure_text( + json.dumps(job["run"]["extra-config"], sort_keys=True) + ) + del job["run"]["extra-config"] + + yield job + + +@transforms.add +def command_to_string(config, jobs): + """Convert command to string to make it work properly with run-task""" + for job in jobs: + job["run"]["mach"] = " ".join(map(shell_quote, job["run"]["mach"])) + yield job diff --git a/taskcluster/taskgraph/transforms/bouncer_locations.py b/taskcluster/taskgraph/transforms/bouncer_locations.py new file mode 100644 index 0000000000..21f96b16df --- /dev/null +++ b/taskcluster/taskgraph/transforms/bouncer_locations.py @@ -0,0 +1,36 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +from __future__ import absolute_import, print_function, unicode_literals + +import logging + +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.schema import resolve_keyed_by + +logger = logging.getLogger(__name__) + + +transforms = TransformSequence() + + +@transforms.add +def make_task_worker(config, jobs): + for job in jobs: + resolve_keyed_by( + job, "worker-type", item_name=job["name"], project=config.params["project"] + ) + resolve_keyed_by( + job, "scopes", item_name=job["name"], project=config.params["project"] + ) + resolve_keyed_by( + job, + "bouncer-products", + item_name=job["name"], + project=config.params["project"], + ) + + job["worker"]["bouncer-products"] = job["bouncer-products"] + + del job["bouncer-products"] + yield job diff --git a/taskcluster/taskgraph/transforms/bouncer_submission.py b/taskcluster/taskgraph/transforms/bouncer_submission.py new file mode 100644 index 0000000000..4b1cd3641b --- /dev/null +++ b/taskcluster/taskgraph/transforms/bouncer_submission.py @@ -0,0 +1,325 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Add from parameters.yml into bouncer submission tasks. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +import copy +import logging + +import attr + +from taskgraph.transforms.base import TransformSequence +from taskgraph.transforms.l10n import parse_locales_file +from taskgraph.util.schema import resolve_keyed_by +from taskgraph.util.scriptworker import get_release_config + +logger = logging.getLogger(__name__) + + +FTP_PLATFORMS_PER_BOUNCER_PLATFORM = { + "linux": "linux-i686", + "linux64": "linux-x86_64", + "osx": "mac", + "win": "win32", + "win64": "win64", + "win64-aarch64": "win64-aarch64", +} + +# :lang is interpolated by bouncer at runtime +CANDIDATES_PATH_TEMPLATE = "/{ftp_product}/candidates/{version}-candidates/build{build_number}/\ +{update_folder}{ftp_platform}/:lang/{file}" +RELEASES_PATH_TEMPLATE = "/{ftp_product}/releases/{version}/\ +{update_folder}{ftp_platform}/:lang/{file}" + + +CONFIG_PER_BOUNCER_PRODUCT = { + "complete-mar": { + "name_postfix": "-Complete", + "path_template": RELEASES_PATH_TEMPLATE, + "file_names": { + "default": "{product}-{version}.complete.mar", + }, + }, + "complete-mar-candidates": { + "name_postfix": "build{build_number}-Complete", + "path_template": CANDIDATES_PATH_TEMPLATE, + "file_names": { + "default": "{product}-{version}.complete.mar", + }, + }, + "installer": { + "path_template": RELEASES_PATH_TEMPLATE, + "file_names": { + "linux": "{product}-{version}.tar.bz2", + "linux64": "{product}-{version}.tar.bz2", + "osx": "{pretty_product}%20{version}.dmg", + "win": "{pretty_product}%20Setup%20{version}.exe", + "win64": "{pretty_product}%20Setup%20{version}.exe", + "win64-aarch64": "{pretty_product}%20Setup%20{version}.exe", + }, + }, + "partial-mar": { + "name_postfix": "-Partial-{previous_version}", + "path_template": RELEASES_PATH_TEMPLATE, + "file_names": { + "default": "{product}-{previous_version}-{version}.partial.mar", + }, + }, + "partial-mar-candidates": { + "name_postfix": "build{build_number}-Partial-{previous_version}build{previous_build}", + "path_template": CANDIDATES_PATH_TEMPLATE, + "file_names": { + "default": "{product}-{previous_version}-{version}.partial.mar", + }, + }, + "stub-installer": { + "name_postfix": "-stub", + # We currently have a sole win32 stub installer that is to be used + # in all windows platforms to toggle between full installers + "path_template": RELEASES_PATH_TEMPLATE.replace("{ftp_platform}", "win32"), + "file_names": { + "win": "{pretty_product}%20Installer.exe", + "win64": "{pretty_product}%20Installer.exe", + "win64-aarch64": "{pretty_product}%20Installer.exe", + }, + }, + "msi": { + "name_postfix": "-msi-SSL", + "path_template": RELEASES_PATH_TEMPLATE, + "file_names": { + "win": "{pretty_product}%20Setup%20{version}.msi", + "win64": "{pretty_product}%20Setup%20{version}.msi", + }, + }, + "pkg": { + "name_postfix": "-pkg-SSL", + "path_template": RELEASES_PATH_TEMPLATE, + "file_names": { + "osx": "{pretty_product}%20{version}.pkg", + }, + }, +} +CONFIG_PER_BOUNCER_PRODUCT["installer-ssl"] = copy.deepcopy( + CONFIG_PER_BOUNCER_PRODUCT["installer"] +) +CONFIG_PER_BOUNCER_PRODUCT["installer-ssl"]["name_postfix"] = "-SSL" + +transforms = TransformSequence() + + +@transforms.add +def make_task_worker(config, jobs): + for job in jobs: + resolve_keyed_by( + job, + "worker-type", + item_name=job["name"], + **{"release-level": config.params.release_level()} + ) + resolve_keyed_by( + job, + "scopes", + item_name=job["name"], + **{"release-level": config.params.release_level()} + ) + resolve_keyed_by( + job, + "bouncer-products", + item_name=job["name"], + **{"release-type": config.params["release_type"]} + ) + + # No need to filter out ja-JP-mac, we need to upload both; but we do + # need to filter out the platforms they come with + all_locales = sorted( + [ + locale + for locale in parse_locales_file(job["locales-file"]).keys() + if locale not in ("linux", "win32", "osx") + ] + ) + + job["worker"]["locales"] = all_locales + job["worker"]["entries"] = craft_bouncer_entries(config, job) + + del job["locales-file"] + del job["bouncer-platforms"] + del job["bouncer-products"] + + if job["worker"]["entries"]: + yield job + else: + logger.warn( + 'No bouncer entries defined in bouncer submission task for "{}". \ +Job deleted.'.format( + job["name"] + ) + ) + + +def craft_bouncer_entries(config, job): + release_config = get_release_config(config) + + product = job["shipping-product"] + bouncer_platforms = job["bouncer-platforms"] + + current_version = release_config["version"] + current_build_number = release_config["build_number"] + + bouncer_products = job["bouncer-products"] + previous_versions_string = release_config.get("partial_versions", None) + if previous_versions_string: + previous_versions = previous_versions_string.split(", ") + else: + logger.warn( + 'No partials defined! Bouncer submission task won\'t send any \ +partial-related entry for "{}"'.format( + job["name"] + ) + ) + bouncer_products = [ + bouncer_product + for bouncer_product in bouncer_products + if "partial" not in bouncer_product + ] + previous_versions = [None] + + project = config.params["project"] + + return { + craft_bouncer_product_name( + product, + bouncer_product, + current_version, + current_build_number, + previous_version, + ): { + "options": { + "add_locales": True, + "ssl_only": craft_ssl_only(bouncer_product, project), + }, + "paths_per_bouncer_platform": craft_paths_per_bouncer_platform( + product, + bouncer_product, + bouncer_platforms, + current_version, + current_build_number, + previous_version, + ), + } + for bouncer_product in bouncer_products + for previous_version in previous_versions + } + + +def craft_paths_per_bouncer_platform( + product, + bouncer_product, + bouncer_platforms, + current_version, + current_build_number, + previous_version=None, +): + paths_per_bouncer_platform = {} + for bouncer_platform in bouncer_platforms: + file_names_per_platform = CONFIG_PER_BOUNCER_PRODUCT[bouncer_product][ + "file_names" + ] + file_name_template = file_names_per_platform.get( + bouncer_platform, file_names_per_platform.get("default", None) + ) + if not file_name_template: + # Some bouncer product like stub-installer are only meant to be on Windows. + # Thus no default value is defined there + continue + + file_name_product = _craft_filename_product(product) + file_name = file_name_template.format( + product=file_name_product, + pretty_product=file_name_product.capitalize(), + version=current_version, + previous_version=split_build_data(previous_version)[0], + ) + + path_template = CONFIG_PER_BOUNCER_PRODUCT[bouncer_product]["path_template"] + file_relative_location = path_template.format( + ftp_product=_craft_ftp_product(product), + version=current_version, + build_number=current_build_number, + update_folder="update/" if "-mar" in bouncer_product else "", + ftp_platform=FTP_PLATFORMS_PER_BOUNCER_PLATFORM[bouncer_platform], + file=file_name, + ) + + paths_per_bouncer_platform[bouncer_platform] = file_relative_location + + return paths_per_bouncer_platform + + +def _craft_ftp_product(product): + return product.lower() + + +def _craft_filename_product(product): + return "firefox" if product == "devedition" else product + + +@attr.s +class InvalidSubstitution(object): + error = attr.ib(type=str) + + def __str__(self): + raise Exception("Partial is being processed, but no previous version defined.") + + +def craft_bouncer_product_name( + product, + bouncer_product, + current_version, + current_build_number=None, + previous_version=None, +): + if previous_version is None: + previous_version = previous_build = InvalidSubstitution( + "Partial is being processed, but no previous version defined." + ) + else: + previous_version, previous_build = split_build_data(previous_version) + postfix = ( + CONFIG_PER_BOUNCER_PRODUCT[bouncer_product] + .get("name_postfix", "") + .format( + build_number=current_build_number, + previous_version=previous_version, + previous_build=previous_build, + ) + ) + + return "{product}-{version}{postfix}".format( + product=product.capitalize(), version=current_version, postfix=postfix + ) + + +def craft_ssl_only(bouncer_product, project): + # XXX ESR is the only channel where we force serve the installer over SSL + if "-esr" in project and bouncer_product == "installer": + return True + + return bouncer_product not in ( + "complete-mar", + "complete-mar-candidates", + "installer", + "partial-mar", + "partial-mar-candidates", + ) + + +def split_build_data(version): + if version and "build" in version: + return version.split("build") + else: + return version, InvalidSubstitution("k") diff --git a/taskcluster/taskgraph/transforms/bouncer_submission_partners.py b/taskcluster/taskgraph/transforms/bouncer_submission_partners.py new file mode 100644 index 0000000000..5128eb6cf5 --- /dev/null +++ b/taskcluster/taskgraph/transforms/bouncer_submission_partners.py @@ -0,0 +1,190 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Add from parameters.yml into bouncer submission tasks. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +import logging + +from taskgraph.transforms.base import TransformSequence +from taskgraph.transforms.bouncer_submission import ( + FTP_PLATFORMS_PER_BOUNCER_PLATFORM, + CONFIG_PER_BOUNCER_PRODUCT as CONFIG_PER_BOUNCER_PRODUCT_VANILLA, + _craft_ftp_product, + _craft_filename_product, +) +from taskgraph.util.partners import ( + check_if_partners_enabled, + get_partners_to_be_published, +) +from taskgraph.util.schema import resolve_keyed_by +from taskgraph.util.scriptworker import get_release_config + +logger = logging.getLogger(__name__) + + +PARTNER_PLATFORMS_TO_BOUNCER = { + "linux-shippable": "linux", + "linux64-shippable": "linux64", + "macosx64-shippable": "osx", + "win32-shippable": "win", + "win64-shippable": "win64", + "win64-aarch64-shippable": "win64-aarch64", +} + +# :lang is interpolated by bouncer at runtime +RELEASES_PARTNERS_PATH_TEMPLATE = "/{ftp_product}/releases/partners/{partner}/{sub_config}/\ +{version}/{ftp_platform}/:lang/{file}" + +CONFIG_PER_BOUNCER_PRODUCT = { + "installer": { + "name_postfix": "-{partner}-{sub_config}", + "path_template": RELEASES_PARTNERS_PATH_TEMPLATE, + "file_names": CONFIG_PER_BOUNCER_PRODUCT_VANILLA["installer"]["file_names"], + }, + "stub-installer": { + "name_postfix": "-{partner}-{sub_config}-stub", + # We currently have a sole win32 stub installer that is to be used + # in all windows platforms to toggle between full installers + "path_template": RELEASES_PARTNERS_PATH_TEMPLATE.replace( + "{ftp_platform}", "win32" + ), + "file_names": CONFIG_PER_BOUNCER_PRODUCT_VANILLA["stub-installer"][ + "file_names" + ], + }, +} + +transforms = TransformSequence() +transforms.add(check_if_partners_enabled) + + +@transforms.add +def make_task_worker(config, jobs): + for job in jobs: + resolve_keyed_by( + job, + "worker-type", + item_name=job["name"], + **{"release-level": config.params.release_level()} + ) + resolve_keyed_by( + job, + "scopes", + item_name=job["name"], + **{"release-level": config.params.release_level()} + ) + resolve_keyed_by( + job, + "bouncer-products", + item_name=job["name"], + **{"release-type": config.params["release_type"]} + ) + + # the schema requires at least one locale but this will not be used + job["worker"]["locales"] = ["fake"] + job["worker"]["entries"] = craft_bouncer_entries(config, job) + + del job["locales-file"] + del job["bouncer-platforms"] + del job["bouncer-products"] + + if job["worker"]["entries"]: + yield job + + +def craft_bouncer_entries(config, job): + release_config = get_release_config(config) + + product = job["shipping-product"] + current_version = release_config["version"] + bouncer_products = job["bouncer-products"] + + partners = get_partners_to_be_published(config) + entries = {} + for partner, sub_config_name, platforms in partners: + platforms = [PARTNER_PLATFORMS_TO_BOUNCER[p] for p in platforms] + entries.update( + { + craft_partner_bouncer_product_name( + product, bouncer_product, current_version, partner, sub_config_name + ): { + "options": { + "add_locales": False, # partners may use different sets of locales + "ssl_only": craft_ssl_only(bouncer_product), + }, + "paths_per_bouncer_platform": craft_paths_per_bouncer_platform( + product, + bouncer_product, + platforms, + current_version, + partner, + sub_config_name, + ), + } + for bouncer_product in bouncer_products + } + ) + return entries + + +def craft_paths_per_bouncer_platform( + product, bouncer_product, bouncer_platforms, current_version, partner, sub_config +): + paths_per_bouncer_platform = {} + for bouncer_platform in bouncer_platforms: + file_names_per_platform = CONFIG_PER_BOUNCER_PRODUCT[bouncer_product][ + "file_names" + ] + file_name_template = file_names_per_platform.get( + bouncer_platform, file_names_per_platform.get("default", None) + ) + if not file_name_template: + # Some bouncer product like stub-installer are only meant to be on Windows. + # Thus no default value is defined there + continue + + file_name_product = _craft_filename_product(product) + file_name = file_name_template.format( + product=file_name_product, + pretty_product=file_name_product.capitalize(), + version=current_version, + ) + + path_template = CONFIG_PER_BOUNCER_PRODUCT[bouncer_product]["path_template"] + file_relative_location = path_template.format( + ftp_product=_craft_ftp_product(product), + version=current_version, + ftp_platform=FTP_PLATFORMS_PER_BOUNCER_PLATFORM[bouncer_platform], + partner=partner, + sub_config=sub_config, + file=file_name, + ) + + paths_per_bouncer_platform[bouncer_platform] = file_relative_location + + return paths_per_bouncer_platform + + +def craft_partner_bouncer_product_name( + product, bouncer_product, current_version, partner, sub_config +): + postfix = ( + CONFIG_PER_BOUNCER_PRODUCT[bouncer_product] + .get("name_postfix", "") + .format( + partner=partner, + sub_config=sub_config, + ) + ) + + return "{product}-{version}{postfix}".format( + product=product.capitalize(), version=current_version, postfix=postfix + ) + + +def craft_ssl_only(bouncer_product): + return bouncer_product == "stub-installer" diff --git a/taskcluster/taskgraph/transforms/build.py b/taskcluster/taskgraph/transforms/build.py new file mode 100644 index 0000000000..ac6f65d2ac --- /dev/null +++ b/taskcluster/taskgraph/transforms/build.py @@ -0,0 +1,215 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Apply some defaults and minor modifications to the jobs defined in the build +kind. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.attributes import RELEASE_PROJECTS +from taskgraph.util.schema import resolve_keyed_by +from taskgraph.util.treeherder import add_suffix +from taskgraph.util.workertypes import worker_type_implementation + +from mozbuild.artifact_builds import JOB_CHOICES as ARTIFACT_JOBS + +import logging + +logger = logging.getLogger(__name__) + +transforms = TransformSequence() + + +@transforms.add +def set_defaults(config, jobs): + """Set defaults, including those that differ per worker implementation""" + for job in jobs: + job["treeherder"].setdefault("kind", "build") + job["treeherder"].setdefault("tier", 1) + _, worker_os = worker_type_implementation( + config.graph_config, job["worker-type"] + ) + worker = job.setdefault("worker", {}) + worker.setdefault("env", {}) + worker["chain-of-trust"] = True + if worker_os == "linux": + worker.setdefault("docker-image", {"in-tree": "debian8-amd64-build"}) + + yield job + + +@transforms.add +def stub_installer(config, jobs): + for job in jobs: + resolve_keyed_by( + job, + "stub-installer", + item_name=job["name"], + project=config.params["project"], + **{ + "release-type": config.params["release_type"], + } + ) + job.setdefault("attributes", {}) + if job.get("stub-installer"): + job["attributes"]["stub-installer"] = job["stub-installer"] + job["worker"]["env"].update({"USE_STUB_INSTALLER": "1"}) + if "stub-installer" in job: + del job["stub-installer"] + yield job + + +@transforms.add +def resolve_shipping_product(config, jobs): + for job in jobs: + resolve_keyed_by( + job, + "shipping-product", + item_name=job["name"], + **{ + "release-type": config.params["release_type"], + } + ) + yield job + + +@transforms.add +def update_channel(config, jobs): + keys = [ + "run.update-channel", + "run.mar-channel-id", + "run.accepted-mar-channel-ids", + ] + for job in jobs: + job["worker"].setdefault("env", {}) + for key in keys: + resolve_keyed_by( + job, + key, + item_name=job["name"], + **{ + "project": config.params["project"], + "release-type": config.params["release_type"], + } + ) + update_channel = job["run"].pop("update-channel", None) + if update_channel: + job["run"].setdefault("extra-config", {})["update_channel"] = update_channel + job["attributes"]["update-channel"] = update_channel + mar_channel_id = job["run"].pop("mar-channel-id", None) + if mar_channel_id: + job["attributes"]["mar-channel-id"] = mar_channel_id + job["worker"]["env"]["MAR_CHANNEL_ID"] = mar_channel_id + accepted_mar_channel_ids = job["run"].pop("accepted-mar-channel-ids", None) + if accepted_mar_channel_ids: + job["attributes"]["accepted-mar-channel-ids"] = accepted_mar_channel_ids + job["worker"]["env"]["ACCEPTED_MAR_CHANNEL_IDS"] = accepted_mar_channel_ids + + yield job + + +@transforms.add +def mozconfig(config, jobs): + for job in jobs: + resolve_keyed_by( + job, + "run.mozconfig-variant", + item_name=job["name"], + **{ + "release-type": config.params["release_type"], + } + ) + mozconfig_variant = job["run"].pop("mozconfig-variant", None) + if mozconfig_variant: + job["run"].setdefault("extra-config", {})[ + "mozconfig_variant" + ] = mozconfig_variant + yield job + + +@transforms.add +def use_artifact(config, jobs): + if config.params.is_try(): + use_artifact = config.params["try_task_config"].get( + "use-artifact-builds", False + ) + else: + use_artifact = False + for job in jobs: + if ( + config.kind == "build" + and use_artifact + and job.get("index", {}).get("job-name") in ARTIFACT_JOBS + # If tests aren't packaged, then we are not able to rebuild all the packages + and job["worker"]["env"].get("MOZ_AUTOMATION_PACKAGE_TESTS") == "1" + ): + job["treeherder"]["symbol"] = add_suffix(job["treeherder"]["symbol"], "a") + job["worker"]["env"]["USE_ARTIFACT"] = "1" + job["attributes"]["artifact-build"] = True + yield job + + +@transforms.add +def use_profile_data(config, jobs): + for job in jobs: + use_pgo = job.pop("use-pgo", False) + disable_pgo = config.params["try_task_config"].get("disable-pgo", False) + artifact_build = job["attributes"].get("artifact-build") + if not use_pgo or disable_pgo or artifact_build: + yield job + continue + + # If use_pgo is True, the task uses the generate-profile task of the + # same name. Otherwise a task can specify a specific generate-profile + # task to use in the use_pgo field. + if use_pgo is True: + name = job["name"] + else: + name = use_pgo + dependencies = "generate-profile-{}".format(name) + job.setdefault("dependencies", {})["generate-profile"] = dependencies + job.setdefault("fetches", {})["generate-profile"] = ["profdata.tar.xz"] + job["worker"]["env"].update({"TASKCLUSTER_PGO_PROFILE_USE": "1"}) + + _, worker_os = worker_type_implementation( + config.graph_config, job["worker-type"] + ) + if worker_os == "linux": + # LTO linkage needs more open files than the default from run-task. + job["worker"]["env"].update({"MOZ_LIMIT_NOFILE": "8192"}) + + yield job + + +@transforms.add +def resolve_keys(config, jobs): + for job in jobs: + resolve_keyed_by( + job, + "use-sccache", + item_name=job["name"], + **{"release-level": config.params.release_level()} + ) + yield job + + +@transforms.add +def enable_full_crashsymbols(config, jobs): + """Enable full crashsymbols on jobs with + 'enable-full-crashsymbols' set to True and on release branches, or + on try""" + branches = RELEASE_PROJECTS | { + "try", + } + for job in jobs: + enable_full_crashsymbols = job["attributes"].get("enable-full-crashsymbols") + if enable_full_crashsymbols and config.params["project"] in branches: + logger.debug("Enabling full symbol generation for %s", job["name"]) + job["worker"]["env"]["MOZ_ENABLE_FULL_SYMBOLS"] = "1" + else: + logger.debug("Disabling full symbol generation for %s", job["name"]) + job["attributes"].pop("enable-full-crashsymbols", None) + yield job diff --git a/taskcluster/taskgraph/transforms/build_attrs.py b/taskcluster/taskgraph/transforms/build_attrs.py new file mode 100644 index 0000000000..c468020c2c --- /dev/null +++ b/taskcluster/taskgraph/transforms/build_attrs.py @@ -0,0 +1,50 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.platforms import platform_family + +transforms = TransformSequence() + + +@transforms.add +def set_build_attributes(config, jobs): + """ + Set the build_platform and build_type attributes based on the job name. + Although not all jobs using this transform are actual "builds", the try + option syntax treats them as such, and this arranges the attributes + appropriately for that purpose. + """ + for job in jobs: + build_platform, build_type = job["name"].split("/") + + # pgo builds are represented as a different platform, type opt + if build_type == "pgo": + build_platform = build_platform + "-pgo" + build_type = "opt" + + attributes = job.setdefault("attributes", {}) + attributes.update( + { + "build_platform": build_platform, + "build_type": build_type, + } + ) + + yield job + + +@transforms.add +def set_schedules_optimization(config, jobs): + """Set the `skip-unless-affected` optimization based on the build platform.""" + for job in jobs: + # don't add skip-unless-schedules if there's already a when defined + if "when" in job: + yield job + continue + + build_platform = job["attributes"]["build_platform"] + job.setdefault("optimization", {"build": [platform_family(build_platform)]}) + yield job diff --git a/taskcluster/taskgraph/transforms/build_fat_aar.py b/taskcluster/taskgraph/transforms/build_fat_aar.py new file mode 100644 index 0000000000..2cd111040c --- /dev/null +++ b/taskcluster/taskgraph/transforms/build_fat_aar.py @@ -0,0 +1,73 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import, print_function, unicode_literals + +import copy + +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.declarative_artifacts import get_geckoview_upstream_artifacts +from taskgraph.util.taskcluster import get_artifact_prefix + + +transforms = TransformSequence() + + +MOZ_ANDROID_FAT_AAR_ENV_MAP = { + "android-api-16-shippable": "MOZ_ANDROID_FAT_AAR_ARMEABI_V7A", + "android-aarch64-shippable": "MOZ_ANDROID_FAT_AAR_ARM64_V8A", + "android-x86-shippable": "MOZ_ANDROID_FAT_AAR_X86", + "android-x86_64-shippable": "MOZ_ANDROID_FAT_AAR_X86_64", + "android-api-16-opt": "MOZ_ANDROID_FAT_AAR_ARMEABI_V7A", + "android-aarch64-opt": "MOZ_ANDROID_FAT_AAR_ARM64_V8A", + "android-x86-opt": "MOZ_ANDROID_FAT_AAR_X86", + "android-x86_64-opt": "MOZ_ANDROID_FAT_AAR_X86_64", +} + + +@transforms.add +def set_fetches_and_locations(config, jobs): + """Set defaults, including those that differ per worker implementation""" + for job in jobs: + dependencies = copy.deepcopy(job["dependencies"]) + + for platform, label in dependencies.items(): + job["dependencies"] = {"build": label} + + aar_location = _get_aar_location(config, job, platform) + prefix = get_artifact_prefix(job) + if not prefix.endswith("/"): + prefix = prefix + "/" + if aar_location.startswith(prefix): + aar_location = aar_location[len(prefix) :] + + job.setdefault("fetches", {}).setdefault(platform, []).append( + { + "artifact": aar_location, + "extract": False, + } + ) + + aar_file_name = aar_location.split("/")[-1] + env_var = MOZ_ANDROID_FAT_AAR_ENV_MAP[platform] + job["worker"]["env"][env_var] = aar_file_name + + job["dependencies"] = dependencies + + yield job + + +def _get_aar_location(config, job, platform): + artifacts_locations = get_geckoview_upstream_artifacts( + config, job, platform=platform + ) + aar_locations = [ + path for path in artifacts_locations[0]["paths"] if path.endswith(".aar") + ] + if len(aar_locations) != 1: + raise ValueError( + "Only a single AAR must be given. Got: {}".format(aar_locations) + ) + + return aar_locations[0] diff --git a/taskcluster/taskgraph/transforms/build_lints.py b/taskcluster/taskgraph/transforms/build_lints.py new file mode 100644 index 0000000000..4286e0ef31 --- /dev/null +++ b/taskcluster/taskgraph/transforms/build_lints.py @@ -0,0 +1,61 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Apply some defaults and minor modifications to the jobs defined in the build +kind. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence + +transforms = TransformSequence() + +SEEN_CONFIGS = {} + + +@transforms.add +def check_mozharness_perfherder_options(config, jobs): + """Verify that multiple jobs don't use the same perfherder bucket. + + Build jobs record perfherder metrics by default. Perfherder metrics go + to a bucket derived by the platform by default. The name can further be + customized by the presence of "extra options" either defined in + mozharness sub-configs or in an environment variable. + + This linter tries to verify that no 2 jobs will send Perfherder metrics + to the same bucket by looking for jobs not defining extra options when + their platform or mozharness config are otherwise similar. + """ + + for job in jobs: + if job["run"]["using"] != "mozharness": + yield job + continue + + worker = job.get("worker", {}) + + platform = job["treeherder"]["platform"] + primary_config = job["run"]["config"][0] + options = worker.get("env", {}).get("PERFHERDER_EXTRA_OPTIONS") + shippable = job.get("attributes", {}).get("shippable", False) + + # This isn't strictly necessary. But the Perfherder code looking at the + # values we care about is only active on builds. So it doesn't make + # sense to run this linter elsewhere. + assert primary_config.startswith("builds/") + + key = (platform, primary_config, shippable, options) + + if key in SEEN_CONFIGS: + raise Exception( + "Non-unique Perfherder data collection for jobs %s-%s and %s: " + "set PERFHERDER_EXTRA_OPTIONS in worker environment variables " + "or use different mozconfigs" + % (config.kind, job["name"], SEEN_CONFIGS[key]) + ) + + SEEN_CONFIGS[key] = "{}-{}".format(config.kind, job["name"]) + + yield job diff --git a/taskcluster/taskgraph/transforms/build_signing.py b/taskcluster/taskgraph/transforms/build_signing.py new file mode 100644 index 0000000000..d87f84277a --- /dev/null +++ b/taskcluster/taskgraph/transforms/build_signing.py @@ -0,0 +1,70 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the signing task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.attributes import copy_attributes_from_dependent_job +from taskgraph.util.signed_artifacts import generate_specifications_of_artifacts_to_sign + + +transforms = TransformSequence() + + +@transforms.add +def add_signed_routes(config, jobs): + """Add routes corresponding to the routes of the build task + this corresponds to, with .signed inserted, for all gecko.v2 routes""" + + for job in jobs: + dep_job = job["primary-dependency"] + enable_signing_routes = job.pop("enable-signing-routes", True) + + job["routes"] = [] + if dep_job.attributes.get("shippable") and enable_signing_routes: + for dep_route in dep_job.task.get("routes", []): + if not dep_route.startswith("index.gecko.v2"): + continue + branch = dep_route.split(".")[3] + rest = ".".join(dep_route.split(".")[4:]) + job["routes"].append("index.gecko.v2.{}.signed.{}".format(branch, rest)) + + yield job + + +@transforms.add +def define_upstream_artifacts(config, jobs): + for job in jobs: + dep_job = job["primary-dependency"] + upstream_artifact_task = job.pop("upstream-artifact-task", dep_job) + + job["attributes"] = copy_attributes_from_dependent_job(dep_job) + + artifacts_specifications = generate_specifications_of_artifacts_to_sign( + config, + job, + keep_locale_template=False, + kind=config.kind, + dep_kind=upstream_artifact_task.kind, + ) + + task_ref = "<{}>".format(upstream_artifact_task.kind) + task_type = "build" + if "notarization" in upstream_artifact_task.kind: + task_type = "scriptworker" + + job["upstream-artifacts"] = [ + { + "taskId": {"task-reference": task_ref}, + "taskType": task_type, + "paths": spec["artifacts"], + "formats": spec["formats"], + } + for spec in artifacts_specifications + ] + + yield job diff --git a/taskcluster/taskgraph/transforms/cached_tasks.py b/taskcluster/taskgraph/transforms/cached_tasks.py new file mode 100644 index 0000000000..283ee823cf --- /dev/null +++ b/taskcluster/taskgraph/transforms/cached_tasks.py @@ -0,0 +1,87 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import, print_function, unicode_literals + +from collections import deque +import taskgraph +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.cached_tasks import add_optimization + +transforms = TransformSequence() + + +def order_tasks(config, tasks): + """Iterate image tasks in an order where parent tasks come first.""" + kind_prefix = config.kind + "-" + + pending = deque(tasks) + task_labels = {task["label"] for task in pending} + emitted = set() + while True: + try: + task = pending.popleft() + except IndexError: + break + parents = { + task + for task in task.get("dependencies", {}).values() + if task.startswith(kind_prefix) + } + if parents and not emitted.issuperset(parents & task_labels): + pending.append(task) + continue + emitted.add(task["label"]) + yield task + + +def format_task_digest(cached_task): + return "/".join( + [ + cached_task["type"], + cached_task["name"], + cached_task["digest"], + ] + ) + + +@transforms.add +def cache_task(config, tasks): + if taskgraph.fast: + for task in tasks: + yield task + return + + digests = {} + for task in config.kind_dependencies_tasks.values(): + if "cached_task" in task.attributes: + digests[task.label] = format_task_digest(task.attributes["cached_task"]) + + for task in order_tasks(config, tasks): + cache = task.pop("cache", None) + if cache is None: + yield task + continue + + dependency_digests = [] + for p in task.get("dependencies", {}).values(): + if p in digests: + dependency_digests.append(digests[p]) + else: + raise Exception( + "Cached task {} has uncached parent task: {}".format( + task["label"], p + ) + ) + digest_data = cache["digest-data"] + sorted(dependency_digests) + add_optimization( + config, + task, + cache_type=cache["type"], + cache_name=cache["name"], + digest_data=digest_data, + ) + digests[task["label"]] = format_task_digest(task["attributes"]["cached_task"]) + + yield task diff --git a/taskcluster/taskgraph/transforms/chunk_partners.py b/taskcluster/taskgraph/transforms/chunk_partners.py new file mode 100644 index 0000000000..411006214c --- /dev/null +++ b/taskcluster/taskgraph/transforms/chunk_partners.py @@ -0,0 +1,75 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Chunk the partner repack tasks by subpartner and locale +""" + +from __future__ import absolute_import, print_function, unicode_literals + +import copy + +from mozbuild.chunkify import chunkify +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.partners import ( + get_repack_ids_by_platform, + apply_partner_priority, +) + +transforms = TransformSequence() +transforms.add(apply_partner_priority) + + +@transforms.add +def chunk_partners(config, jobs): + for job in jobs: + dep_job = job["primary-dependency"] + build_platform = dep_job.attributes["build_platform"] + repack_id = dep_job.task.get("extra", {}).get("repack_id") + repack_ids = dep_job.task.get("extra", {}).get("repack_ids") + copy_repack_ids = job.pop("copy-repack-ids", False) + + if copy_repack_ids: + assert repack_ids, "dep_job {} doesn't have repack_ids!".format( + dep_job.label + ) + job.setdefault("extra", {})["repack_ids"] = repack_ids + yield job + # first downstream of the repack task, no chunking or fanout has been done yet + elif not any([repack_id, repack_ids]): + platform_repack_ids = get_repack_ids_by_platform(config, build_platform) + # we chunk mac signing + if config.kind in ( + "release-partner-repack-signing", + "release-eme-free-repack-signing", + "release-partner-repack-notarization-part-1", + "release-eme-free-repack-notarization-part-1", + ): + repacks_per_chunk = job.get("repacks-per-chunk") + chunks, remainder = divmod(len(platform_repack_ids), repacks_per_chunk) + if remainder: + chunks = int(chunks + 1) + for this_chunk in range(1, chunks + 1): + chunk = chunkify(platform_repack_ids, this_chunk, chunks) + partner_job = copy.deepcopy(job) + partner_job.setdefault("extra", {}).setdefault("repack_ids", chunk) + partner_job["extra"]["repack_suffix"] = str(this_chunk) + yield partner_job + # linux and windows we fan out immediately to one task per partner-sub_partner-locale + else: + for repack_id in platform_repack_ids: + partner_job = copy.deepcopy(job) # don't overwrite dict values here + partner_job.setdefault("extra", {}) + partner_job["extra"]["repack_id"] = repack_id + yield partner_job + # fan out chunked mac signing for repackage + elif repack_ids: + for repack_id in repack_ids: + partner_job = copy.deepcopy(job) + partner_job.setdefault("extra", {}).setdefault("repack_id", repack_id) + yield partner_job + # otherwise we've fully fanned out already, continue by passing repack_id on + else: + partner_job = copy.deepcopy(job) + partner_job.setdefault("extra", {}).setdefault("repack_id", repack_id) + yield partner_job diff --git a/taskcluster/taskgraph/transforms/code_review.py b/taskcluster/taskgraph/transforms/code_review.py new file mode 100644 index 0000000000..53e01d3683 --- /dev/null +++ b/taskcluster/taskgraph/transforms/code_review.py @@ -0,0 +1,34 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Add soft dependencies and configuration to code-review tasks. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence + +transforms = TransformSequence() + + +@transforms.add +def add_dependencies(config, jobs): + for job in jobs: + job.setdefault("soft-dependencies", []) + job["soft-dependencies"] += [ + dep_task.label + for dep_task in config.kind_dependencies_tasks.values() + if dep_task.attributes.get("code-review") is True + ] + yield job + + +@transforms.add +def add_phabricator_config(config, jobs): + for job in jobs: + diff = config.params.get("phabricator_diff") + if diff is not None: + code_review = job.setdefault("extra", {}).setdefault("code-review", {}) + code_review["phabricator-diff"] = diff + yield job diff --git a/taskcluster/taskgraph/transforms/copy_attributes_from_dependent_task.py b/taskcluster/taskgraph/transforms/copy_attributes_from_dependent_task.py new file mode 100644 index 0000000000..78c93b40c3 --- /dev/null +++ b/taskcluster/taskgraph/transforms/copy_attributes_from_dependent_task.py @@ -0,0 +1,24 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the repackage task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.attributes import copy_attributes_from_dependent_job + +transforms = TransformSequence() + + +@transforms.add +def copy_attributes(config, jobs): + for job in jobs: + job.setdefault("attributes", {}) + job["attributes"].update( + copy_attributes_from_dependent_job(job["primary-dependency"]) + ) + + yield job diff --git a/taskcluster/taskgraph/transforms/diffoscope.py b/taskcluster/taskgraph/transforms/diffoscope.py new file mode 100644 index 0000000000..d0e313a386 --- /dev/null +++ b/taskcluster/taskgraph/transforms/diffoscope.py @@ -0,0 +1,178 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +This transform construct tasks to perform diffs between builds, as +defined in kind.yml +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from six import text_type +from taskgraph.transforms.base import TransformSequence +from taskgraph.transforms.task import task_description_schema +from taskgraph.util.schema import Schema +from taskgraph.util.taskcluster import get_artifact_path +from voluptuous import ( + Any, + Optional, + Required, +) + +index_or_string = Any( + text_type, + {Required("index-search"): text_type}, +) + +diff_description_schema = Schema( + { + # Name of the diff task. + Required("name"): text_type, + # Treeherder tier. + Required("tier"): int, + # Treeherder symbol. + Required("symbol"): text_type, + # relative path (from config.path) to the file the task was defined in. + Optional("job-from"): text_type, + # Original and new builds to compare. + Required("original"): index_or_string, + Required("new"): index_or_string, + # Arguments to pass to diffoscope, used for job-defaults in + # taskcluster/ci/diffoscope/kind.yml + Optional("args"): text_type, + # Extra arguments to pass to diffoscope, that can be set per job. + Optional("extra-args"): text_type, + # Fail the task when differences are detected. + Optional("fail-on-diff"): bool, + # What artifact to check the differences of. Defaults to target.tar.bz2 + # for Linux, target.dmg for Mac, target.zip for Windows, target.apk for + # Android. + Optional("artifact"): text_type, + # Whether to unpack first. Diffoscope can normally work without unpacking, + # but when one needs to --exclude some contents, that doesn't work out well + # if said content is packed (e.g. in omni.ja). + Optional("unpack"): bool, + # Commands to run before performing the diff. + Optional("pre-diff-commands"): [text_type], + # Only run the task on a set of projects/branches. + Optional("run-on-projects"): task_description_schema["run-on-projects"], + Optional("optimization"): task_description_schema["optimization"], + } +) + +transforms = TransformSequence() +transforms.add_validate(diff_description_schema) + + +@transforms.add +def fill_template(config, tasks): + dummy_tasks = {} + + for task in tasks: + name = task["name"] + + deps = {} + urls = {} + previous_artifact = None + artifact = task.get("artifact") + for k in ("original", "new"): + value = task[k] + if isinstance(value, text_type): + deps[k] = value + dep_name = k + os_hint = value + else: + index = value["index-search"] + if index not in dummy_tasks: + dummy_tasks[index] = { + "label": "index-search-" + index, + "description": index, + "worker-type": "invalid/always-optimized", + "run": { + "using": "always-optimized", + }, + "optimization": { + "index-search": [index], + }, + } + yield dummy_tasks[index] + deps[index] = "index-search-" + index + dep_name = index + os_hint = index.split(".")[-1] + if artifact: + pass + elif "linux" in os_hint: + artifact = "target.tar.bz2" + elif "macosx" in os_hint: + artifact = "target.dmg" + elif "android" in os_hint: + artifact = "target.apk" + elif "win" in os_hint: + artifact = "target.zip" + else: + raise Exception("Cannot figure out the OS for {!r}".format(value)) + if previous_artifact is not None and previous_artifact != artifact: + raise Exception("Cannot compare builds from different OSes") + urls[k] = { + "artifact-reference": "<{}/{}>".format( + dep_name, get_artifact_path(task, artifact) + ), + } + previous_artifact = artifact + + taskdesc = { + "label": "diff-" + name, + "description": name, + "treeherder": { + "symbol": task["symbol"], + "platform": "diff/opt", + "kind": "other", + "tier": task["tier"], + }, + "worker-type": "b-linux", + "worker": { + "docker-image": {"in-tree": "diffoscope"}, + "artifacts": [ + { + "type": "file", + "path": "/builds/worker/{}".format(f), + "name": "public/{}".format(f), + } + for f in ( + "diff.html", + "diff.txt", + ) + ], + "env": { + "ORIG_URL": urls["original"], + "NEW_URL": urls["new"], + "DIFFOSCOPE_ARGS": " ".join( + task[k] for k in ("args", "extra-args") if k in task + ), + "PRE_DIFF": "; ".join(task.get("pre-diff-commands", [])), + }, + "max-run-time": 1800, + }, + "run": { + "using": "run-task", + "checkout": task.get("unpack", False), + "command": "/builds/worker/bin/get_and_diffoscope{}{}".format( + " --unpack" if task.get("unpack") else "", + " --fail" if task.get("fail-on-diff") else "", + ), + }, + "dependencies": deps, + "optimization": task.get("optimization"), + } + if "run-on-projects" in task: + taskdesc["run-on-projects"] = task["run-on-projects"] + + if artifact.endswith(".dmg"): + taskdesc.setdefault("fetches", {}).setdefault("toolchain", []).extend( + [ + "linux64-cctools-port", + "linux64-libdmg", + ] + ) + + yield taskdesc diff --git a/taskcluster/taskgraph/transforms/docker_image.py b/taskcluster/taskgraph/transforms/docker_image.py new file mode 100644 index 0000000000..e57098df1b --- /dev/null +++ b/taskcluster/taskgraph/transforms/docker_image.py @@ -0,0 +1,219 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import, print_function, unicode_literals + +import logging +import os +import re +import json + +import six +from six import text_type +import mozpack.path as mozpath +import taskgraph +from taskgraph.transforms.base import TransformSequence +from .. import GECKO +from taskgraph.util.docker import ( + create_context_tar, + generate_context_hash, + image_path, +) +from taskgraph.util.schema import Schema +from voluptuous import ( + Optional, + Required, +) +from .task import task_description_schema + +logger = logging.getLogger(__name__) + +CONTEXTS_DIR = "docker-contexts" + +DIGEST_RE = re.compile("^[0-9a-f]{64}$") + +IMAGE_BUILDER_IMAGE = ( + "mozillareleases/image_builder:5.0.0" + "@sha256:" + "e510a9a9b80385f71c112d61b2f2053da625aff2b6d430411ac42e424c58953f" +) + +transforms = TransformSequence() + +docker_image_schema = Schema( + { + # Name of the docker image. + Required("name"): text_type, + # Name of the parent docker image. + Optional("parent"): text_type, + # Treeherder symbol. + Required("symbol"): text_type, + # relative path (from config.path) to the file the docker image was defined + # in. + Optional("job-from"): text_type, + # Arguments to use for the Dockerfile. + Optional("args"): {text_type: text_type}, + # Name of the docker image definition under taskcluster/docker, when + # different from the docker image name. + Optional("definition"): text_type, + # List of package tasks this docker image depends on. + Optional("packages"): [text_type], + Optional( + "index", + description="information for indexing this build so its artifacts can be discovered", + ): task_description_schema["index"], + Optional( + "cache", + description="Whether this image should be cached based on inputs.", + ): bool, + } +) + + +transforms.add_validate(docker_image_schema) + + +@transforms.add +def fill_template(config, tasks): + if not taskgraph.fast and config.write_artifacts: + if not os.path.isdir(CONTEXTS_DIR): + os.makedirs(CONTEXTS_DIR) + + for task in tasks: + image_name = task.pop("name") + job_symbol = task.pop("symbol") + args = task.pop("args", {}) + packages = task.pop("packages", []) + parent = task.pop("parent", None) + + for p in packages: + if "packages-{}".format(p) not in config.kind_dependencies_tasks: + raise Exception( + "Missing package job for {}-{}: {}".format( + config.kind, image_name, p + ) + ) + + if not taskgraph.fast: + context_path = mozpath.relpath(image_path(image_name), GECKO) + if config.write_artifacts: + context_file = os.path.join( + CONTEXTS_DIR, "{}.tar.gz".format(image_name) + ) + logger.info( + "Writing {} for docker image {}".format(context_file, image_name) + ) + context_hash = create_context_tar( + GECKO, context_path, context_file, image_name, args + ) + else: + context_hash = generate_context_hash( + GECKO, context_path, image_name, args + ) + else: + if config.write_artifacts: + raise Exception("Can't write artifacts if `taskgraph.fast` is set.") + context_hash = "0" * 40 + digest_data = [context_hash] + digest_data += [json.dumps(args, sort_keys=True)] + + description = "Build the docker image {} for use by dependent tasks".format( + image_name + ) + + args["DOCKER_IMAGE_PACKAGES"] = " ".join("<{}>".format(p) for p in packages) + + # Adjust the zstandard compression level based on the execution level. + # We use faster compression for level 1 because we care more about + # end-to-end times. We use slower/better compression for other levels + # because images are read more often and it is worth the trade-off to + # burn more CPU once to reduce image size. + zstd_level = "3" if int(config.params["level"]) == 1 else "10" + + # include some information that is useful in reconstructing this task + # from JSON + taskdesc = { + "label": "{}-{}".format(config.kind, image_name), + "description": description, + "attributes": { + "image_name": image_name, + "artifact_prefix": "public", + }, + "expires-after": "1 year", + "scopes": [], + "treeherder": { + "symbol": job_symbol, + "platform": "taskcluster-images/opt", + "kind": "other", + "tier": 1, + }, + "run-on-projects": [], + "worker-type": "images", + "worker": { + "implementation": "docker-worker", + "os": "linux", + "artifacts": [ + { + "type": "file", + "path": "/workspace/image.tar.zst", + "name": "public/image.tar.zst", + } + ], + "env": { + "CONTEXT_TASK_ID": {"task-reference": "<decision>"}, + "CONTEXT_PATH": "public/docker-contexts/{}.tar.gz".format( + image_name + ), + "HASH": context_hash, + "PROJECT": config.params["project"], + "IMAGE_NAME": image_name, + "DOCKER_IMAGE_ZSTD_LEVEL": zstd_level, + "DOCKER_BUILD_ARGS": { + "task-reference": six.ensure_text(json.dumps(args)) + }, + "GECKO_BASE_REPOSITORY": config.params["base_repository"], + "GECKO_HEAD_REPOSITORY": config.params["head_repository"], + "GECKO_HEAD_REV": config.params["head_rev"], + }, + "chain-of-trust": True, + "max-run-time": 7200, + # FIXME: We aren't currently propagating the exit code + }, + } + # Retry for 'funsize-update-generator' if exit status code is -1 + if image_name in ["funsize-update-generator"]: + taskdesc["worker"]["retry-exit-status"] = [-1] + + worker = taskdesc["worker"] + + if image_name == "image_builder": + worker["docker-image"] = IMAGE_BUILDER_IMAGE + digest_data.append("image-builder-image:{}".format(IMAGE_BUILDER_IMAGE)) + else: + worker["docker-image"] = {"in-tree": "image_builder"} + deps = taskdesc.setdefault("dependencies", {}) + deps["docker-image"] = "{}-image_builder".format(config.kind) + + if packages: + deps = taskdesc.setdefault("dependencies", {}) + for p in sorted(packages): + deps[p] = "packages-{}".format(p) + + if parent: + deps = taskdesc.setdefault("dependencies", {}) + deps["parent"] = "{}-{}".format(config.kind, parent) + worker["env"]["PARENT_TASK_ID"] = { + "task-reference": "<parent>", + } + if "index" in task: + taskdesc["index"] = task["index"] + + if task.get("cache", True) and not taskgraph.fast: + taskdesc["cache"] = { + "type": "docker-images.v2", + "name": image_name, + "digest-data": digest_data, + } + + yield taskdesc diff --git a/taskcluster/taskgraph/transforms/fetch.py b/taskcluster/taskgraph/transforms/fetch.py new file mode 100644 index 0000000000..dd64da5d54 --- /dev/null +++ b/taskcluster/taskgraph/transforms/fetch.py @@ -0,0 +1,377 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# Support for running tasks that download remote content and re-export +# it as task artifacts. + +from __future__ import absolute_import, unicode_literals + +import attr + +from mozbuild.shellutil import quote as shell_quote + +import io +import os +import re +from six import text_type + +from voluptuous import ( + Optional, + Required, + Extra, +) + +from mozpack import path as mozpath + +import taskgraph + +from .base import TransformSequence +from ..util.cached_tasks import add_optimization +from ..util.schema import Schema, validate_schema +from ..util.treeherder import join_symbol + + +CACHE_TYPE = "content.v1" + +FETCH_SCHEMA = Schema( + { + # Name of the task. + Required("name"): text_type, + # Relative path (from config.path) to the file the task was defined + # in. + Optional("job-from"): text_type, + # Description of the task. + Required("description"): text_type, + Optional( + "fetch-alias", + description="An alias that can be used instead of the real fetch job name in " + "fetch stanzas for jobs.", + ): text_type, + Optional( + "artifact-prefix", + description="The prefix of the taskcluster artifact being uploaded. " + "Defaults to `public/`; if it starts with something other than " + "`public/` the artifact will require scopes to access.", + ): text_type, + Optional("attributes"): {text_type: object}, + Required("fetch"): { + Required("type"): text_type, + Extra: object, + }, + } +) + + +# define a collection of payload builders, depending on the worker implementation +fetch_builders = {} + + +@attr.s(frozen=True) +class FetchBuilder(object): + schema = attr.ib(type=Schema) + builder = attr.ib() + + +def fetch_builder(name, schema): + schema = Schema({Required("type"): name}).extend(schema) + + def wrap(func): + fetch_builders[name] = FetchBuilder(schema, func) + return func + + return wrap + + +transforms = TransformSequence() +transforms.add_validate(FETCH_SCHEMA) + + +@transforms.add +def process_fetch_job(config, jobs): + # Converts fetch-url entries to the job schema. + for job in jobs: + typ = job["fetch"]["type"] + name = job["name"] + fetch = job.pop("fetch") + + if typ not in fetch_builders: + raise Exception("Unknown fetch type {} in fetch {}".format(typ, name)) + validate_schema( + fetch_builders[typ].schema, fetch, "In task.fetch {!r}:".format(name) + ) + + job.update(configure_fetch(config, typ, name, fetch)) + + yield job + + +def configure_fetch(config, typ, name, fetch): + if typ not in fetch_builders: + raise Exception("No fetch type {} in fetch {}".format(typ, name)) + validate_schema( + fetch_builders[typ].schema, fetch, "In task.fetch {!r}:".format(name) + ) + + return fetch_builders[typ].builder(config, name, fetch) + + +@transforms.add +def make_task(config, jobs): + # Fetch tasks are idempotent and immutable. Have them live for + # essentially forever. + if config.params["level"] == "3": + expires = "1000 years" + else: + expires = "28 days" + + for job in jobs: + name = job["name"] + artifact_prefix = job.get("artifact-prefix", "public") + env = job.get("env", {}) + env.update({"UPLOAD_DIR": "/builds/worker/artifacts"}) + attributes = job.get("attributes", {}) + attributes["fetch-artifact"] = mozpath.join( + artifact_prefix, job["artifact_name"] + ) + alias = job.get("fetch-alias") + if alias: + attributes["fetch-alias"] = alias + + task = { + "attributes": attributes, + "name": name, + "description": job["description"], + "expires-after": expires, + "label": "fetch-%s" % name, + "run-on-projects": [], + "treeherder": { + "symbol": join_symbol("Fetch", name), + "kind": "build", + "platform": "fetch/opt", + "tier": 1, + }, + "run": { + "using": "run-task", + "checkout": False, + "command": job["command"], + }, + "worker-type": "images", + "worker": { + "chain-of-trust": True, + "docker-image": {"in-tree": "fetch"}, + "env": env, + "max-run-time": 900, + "artifacts": [ + { + "type": "directory", + "name": artifact_prefix, + "path": "/builds/worker/artifacts", + } + ], + }, + } + + if job.get("secret", None): + task["scopes"] = ["secrets:get:" + job.get("secret")] + task["worker"]["taskcluster-proxy"] = True + + if not taskgraph.fast: + cache_name = task["label"].replace("{}-".format(config.kind), "", 1) + + # This adds the level to the index path automatically. + add_optimization( + config, + task, + cache_type=CACHE_TYPE, + cache_name=cache_name, + digest_data=job["digest_data"], + ) + yield task + + +@fetch_builder( + "static-url", + schema={ + # The URL to download. + Required("url"): text_type, + # The SHA-256 of the downloaded content. + Required("sha256"): text_type, + # Size of the downloaded entity, in bytes. + Required("size"): int, + # GPG signature verification. + Optional("gpg-signature"): { + # URL where GPG signature document can be obtained. Can contain the + # value ``{url}``, which will be substituted with the value from + # ``url``. + Required("sig-url"): text_type, + # Path to file containing GPG public key(s) used to validate + # download. + Required("key-path"): text_type, + }, + # The name to give to the generated artifact. Defaults to the file + # portion of the URL. Using a different extension converts the + # archive to the given type. Only conversion to .tar.zst is + # supported. + Optional("artifact-name"): text_type, + # Strip the given number of path components at the beginning of + # each file entry in the archive. + # Requires an artifact-name ending with .tar.zst. + Optional("strip-components"): int, + # Add the given prefix to each file entry in the archive. + # Requires an artifact-name ending with .tar.zst. + Optional("add-prefix"): text_type, + # IMPORTANT: when adding anything that changes the behavior of the task, + # it is important to update the digest data used to compute cache hits. + }, +) +def create_fetch_url_task(config, name, fetch): + artifact_name = fetch.get("artifact-name") + if not artifact_name: + artifact_name = fetch["url"].split("/")[-1] + + command = [ + "/builds/worker/bin/fetch-content", + "static-url", + ] + + # Arguments that matter to the cache digest + args = [ + "--sha256", + fetch["sha256"], + "--size", + "%d" % fetch["size"], + ] + + if fetch.get("strip-components"): + args.extend(["--strip-components", "%d" % fetch["strip-components"]]) + + if fetch.get("add-prefix"): + args.extend(["--add-prefix", fetch["add-prefix"]]) + + command.extend(args) + + env = {} + + if "gpg-signature" in fetch: + sig_url = fetch["gpg-signature"]["sig-url"].format(url=fetch["url"]) + key_path = os.path.join(taskgraph.GECKO, fetch["gpg-signature"]["key-path"]) + + with io.open(key_path, "r") as fh: + gpg_key = fh.read() + + env["FETCH_GPG_KEY"] = gpg_key + command.extend( + [ + "--gpg-sig-url", + sig_url, + "--gpg-key-env", + "FETCH_GPG_KEY", + ] + ) + + command.extend( + [ + fetch["url"], + "/builds/worker/artifacts/%s" % artifact_name, + ] + ) + + return { + "command": command, + "artifact_name": artifact_name, + "env": env, + # We don't include the GPG signature in the digest because it isn't + # materially important for caching: GPG signatures are supplemental + # trust checking beyond what the shasum already provides. + "digest_data": args + [artifact_name], + } + + +@fetch_builder( + "git", + schema={ + Required("repo"): text_type, + Required("revision"): text_type, + Optional("artifact-name"): text_type, + Optional("path-prefix"): text_type, + # ssh-key is a taskcluster secret path (e.g. project/civet/github-deploy-key) + # In the secret dictionary, the key should be specified as + # "ssh_privkey": "-----BEGIN OPENSSH PRIVATE KEY-----\nkfksnb3jc..." + # n.b. The OpenSSH private key file format requires a newline at the end of the file. + Optional("ssh-key"): text_type, + }, +) +def create_git_fetch_task(config, name, fetch): + path_prefix = fetch.get("path-prefix") + if not path_prefix: + path_prefix = fetch["repo"].rstrip("/").rsplit("/", 1)[-1] + artifact_name = fetch.get("artifact-name") + if not artifact_name: + artifact_name = "{}.tar.zst".format(path_prefix) + + if not re.match(r"[0-9a-fA-F]{40}", fetch["revision"]): + raise Exception('Revision is not a sha1 in fetch task "{}"'.format(name)) + + args = [ + "/builds/worker/bin/fetch-content", + "git-checkout-archive", + "--path-prefix", + path_prefix, + fetch["repo"], + fetch["revision"], + "/builds/worker/artifacts/%s" % artifact_name, + ] + + ssh_key = fetch.get("ssh-key") + if ssh_key: + args.append("--ssh-key-secret") + args.append(ssh_key) + + return { + "command": args, + "artifact_name": artifact_name, + "digest_data": [fetch["revision"], path_prefix, artifact_name], + "secret": ssh_key, + } + + +@fetch_builder( + "chromium-fetch", + schema={ + Required("script"): text_type, + # Platform type for chromium build + Required("platform"): text_type, + # Chromium revision to obtain + Optional("revision"): text_type, + # The name to give to the generated artifact. + Required("artifact-name"): text_type, + }, +) +def create_chromium_fetch_task(config, name, fetch): + artifact_name = fetch.get("artifact-name") + + workdir = "/builds/worker" + + platform = fetch.get("platform") + revision = fetch.get("revision") + + args = "--platform " + shell_quote(platform) + if revision: + args += " --revision " + shell_quote(revision) + + cmd = [ + "bash", + "-c", + "cd {} && " "/usr/bin/python3 {} {}".format(workdir, fetch["script"], args), + ] + + return { + "command": cmd, + "artifact_name": artifact_name, + "digest_data": [ + "revision={}".format(revision), + "platform={}".format(platform), + "artifact_name={}".format(artifact_name), + ], + } diff --git a/taskcluster/taskgraph/transforms/final_verify.py b/taskcluster/taskgraph/transforms/final_verify.py new file mode 100644 index 0000000000..3edf163bf7 --- /dev/null +++ b/taskcluster/taskgraph/transforms/final_verify.py @@ -0,0 +1,36 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the beetmover task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence + +transforms = TransformSequence() + + +@transforms.add +def add_command(config, tasks): + for task in tasks: + if not task["worker"].get("env"): + task["worker"]["env"] = {} + + final_verify_configs = [] + for upstream in sorted(task.get("dependencies", {}).keys()): + if "update-verify-config" in upstream: + final_verify_configs.append( + "<{}/public/build/update-verify.cfg>".format(upstream), + ) + task["run"] = { + "using": "run-task", + "cwd": "{checkout}", + "command": { + "artifact-reference": "tools/update-verify/release/final-verification.sh " + + " ".join(final_verify_configs), + }, + "sparse-profile": "update-verify", + } + yield task diff --git a/taskcluster/taskgraph/transforms/geckodriver_signing.py b/taskcluster/taskgraph/transforms/geckodriver_signing.py new file mode 100644 index 0000000000..6f14393a11 --- /dev/null +++ b/taskcluster/taskgraph/transforms/geckodriver_signing.py @@ -0,0 +1,126 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the repackage signing task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from six import text_type +from taskgraph.loader.single_dep import schema +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.attributes import copy_attributes_from_dependent_job +from taskgraph.util.scriptworker import get_signing_cert_scope_per_platform +from taskgraph.transforms.task import task_description_schema +from voluptuous import Optional + +repackage_signing_description_schema = schema.extend( + { + Optional("label"): text_type, + Optional("treeherder"): task_description_schema["treeherder"], + Optional("shipping-phase"): task_description_schema["shipping-phase"], + } +) + +transforms = TransformSequence() +transforms.add_validate(repackage_signing_description_schema) + + +@transforms.add +def make_signing_description(config, jobs): + for job in jobs: + dep_job = job["primary-dependency"] + + attributes = copy_attributes_from_dependent_job(dep_job) + attributes["repackage_type"] = "repackage-signing" + + treeherder = job.get("treeherder", {}) + dep_treeherder = dep_job.task.get("extra", {}).get("treeherder", {}) + treeherder.setdefault( + "symbol", "{}(gd-s)".format(dep_treeherder["groupSymbol"]) + ) + treeherder.setdefault( + "platform", dep_job.task.get("extra", {}).get("treeherder-platform") + ) + treeherder.setdefault("tier", dep_treeherder.get("tier", 1)) + treeherder.setdefault("kind", "build") + + dependencies = {dep_job.kind: dep_job.label} + signing_dependencies = dep_job.dependencies + dependencies.update( + {k: v for k, v in signing_dependencies.items() if k != "docker-image"} + ) + + description = "Signing Geckodriver for build '{}'".format( + attributes.get("build_platform"), + ) + + build_platform = dep_job.attributes.get("build_platform") + is_shippable = dep_job.attributes.get("shippable") + signing_cert_scope = get_signing_cert_scope_per_platform( + build_platform, is_shippable, config + ) + + upstream_artifacts = _craft_upstream_artifacts( + dep_job, dep_job.kind, build_platform + ) + + scopes = [signing_cert_scope] + + platform = build_platform.split("-")[0] + + task = { + "label": job["label"], + "description": description, + "worker-type": "linux-signing", + "worker": { + "implementation": "scriptworker-signing", + "upstream-artifacts": upstream_artifacts, + }, + "scopes": scopes, + "dependencies": dependencies, + "attributes": attributes, + "treeherder": treeherder, + "run-on-projects": ["mozilla-central"], + "index": {"product": "geckodriver", "job-name": platform}, + } + + if build_platform.startswith("macosx"): + worker_type = task["worker-type"] + worker_type_alias_map = { + "linux-depsigning": "mac-depsigning", + "linux-signing": "mac-signing", + } + + assert worker_type in worker_type_alias_map, ( + "Make sure to adjust the below worker_type_alias logic for " + "mac if you change the signing workerType aliases!" + " ({} not found in mapping)".format(worker_type) + ) + worker_type = worker_type_alias_map[worker_type] + + task["worker-type"] = worker_type_alias_map[task["worker-type"]] + task["worker"]["mac-behavior"] = "mac_geckodriver" + + yield task + + +def _craft_upstream_artifacts(dep_job, dependency_kind, build_platform): + if build_platform.startswith("win"): + signing_format = "autograph_authenticode" + elif build_platform.startswith("linux"): + signing_format = "autograph_gpg" + elif build_platform.startswith("macosx"): + signing_format = "mac_geckodriver" + else: + raise ValueError('Unsupported build platform "{}"'.format(build_platform)) + + return [ + { + "taskId": {"task-reference": "<{}>".format(dependency_kind)}, + "taskType": "build", + "paths": [dep_job.attributes["toolchain-artifact"]], + "formats": [signing_format], + } + ] diff --git a/taskcluster/taskgraph/transforms/github_sync.py b/taskcluster/taskgraph/transforms/github_sync.py new file mode 100644 index 0000000000..afaf80be2b --- /dev/null +++ b/taskcluster/taskgraph/transforms/github_sync.py @@ -0,0 +1,25 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence + + +transforms = TransformSequence() + + +@transforms.add +def sync_github(config, tasks): + """Do transforms specific to github-sync tasks.""" + for task in tasks: + # Add the secret to the scopes, only in m-c. + # Doing this on any other tree will result in decision task failure + # because m-c is the only one allowed to have that scope. + secret = task["secret"] + if config.params["project"] == "mozilla-central": + task.setdefault("scopes", []) + task["scopes"].append("secrets:get:" + secret) + task["worker"].setdefault("env", {})["GITHUB_SECRET"] = secret + del task["secret"] + yield task diff --git a/taskcluster/taskgraph/transforms/iris.py b/taskcluster/taskgraph/transforms/iris.py new file mode 100644 index 0000000000..799f7e63cf --- /dev/null +++ b/taskcluster/taskgraph/transforms/iris.py @@ -0,0 +1,125 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Take the base iris task definition and generate all of the actual test chunks +for all combinations of test categories and test platforms. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from copy import deepcopy + +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.schema import resolve_keyed_by + +transforms = TransformSequence() + + +@transforms.add +def make_iris_tasks(config, jobs): + # Each platform will get a copy of the test categories + platforms = config.config.get("iris-build-platforms") + + # The fields needing to be resolve_keyed_by'd + fields = [ + "dependencies.build", + "fetches.build", + "run.command", + "run-on-projects", + "treeherder.platform", + "worker.docker-image", + "worker.artifacts", + "worker.env.PATH", + "worker.max-run-time", + "worker-type", + ] + + for job in jobs: + for platform in platforms: + # Make platform-specific clones of each iris task + clone = deepcopy(job) + + basename = clone["name"] + clone["description"] = clone["description"].format(basename) + clone["name"] = clone["name"] + "-" + platform + + # resolve_keyed_by picks the correct values based on + # the `by-platform` keys in the task definitions + for field in fields: + resolve_keyed_by( + clone, + field, + clone["name"], + **{ + "platform": platform, + } + ) + + # iris uses this to select the tests to run in this chunk + clone["worker"]["env"]["CURRENT_TEST_DIR"] = basename + + # Clean up some entries when they aren't needed + if clone["worker"]["docker-image"] is None: + del clone["worker"]["docker-image"] + if clone["worker"]["env"]["PATH"] is None: + del clone["worker"]["env"]["PATH"] + + yield clone + + +@transforms.add +def fill_email_data(config, tasks): + format_kwargs = { + "head_rev": config.params["head_rev"], + "project": config.params["project"], + "th_root": "https://treeherder.mozilla.org/#/", + "tiers": "&tier=1%2C2%2C3", + } + + for task in tasks: + format_kwargs["task_name"] = task["name"] + format_kwargs["filterstring"] = "&searchStr=iris%20{}".format(task["name"]) + format_kwargs["chunk"] = task["worker"]["env"]["CURRENT_TEST_DIR"] + + resolve_keyed_by( + task, + "notify.email", + item_name=task["name"], + **{ + "project": config.params["project"], + } + ) + + email = task["notify"].get("email") + if email: + email["link"]["href"] = email["link"]["href"].format(**format_kwargs) + email["subject"] = email["subject"].format(**format_kwargs) + + yield task + + +@transforms.add +def add_notify_email(config, tasks): + for task in tasks: + notify = task.pop("notify", {}) + email_config = notify.get("email") + if email_config: + extra = task.setdefault("extra", {}) + notify = extra.setdefault("notify", {}) + notify["email"] = { + "subject": email_config["subject"], + "content": email_config["message"], + "link": email_config.get("link", None), + } + + routes = task.setdefault("routes", []) + routes.extend( + [ + "notify.email.{}.on-{}".format(address, reason) + for address in email_config["emails"] + for reason in email_config["on-reasons"] + ] + ) + + yield task diff --git a/taskcluster/taskgraph/transforms/job/__init__.py b/taskcluster/taskgraph/transforms/job/__init__.py new file mode 100644 index 0000000000..e0fd3e8dd3 --- /dev/null +++ b/taskcluster/taskgraph/transforms/job/__init__.py @@ -0,0 +1,464 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Convert a job description into a task description. + +Jobs descriptions are similar to task descriptions, but they specify how to run +the job at a higher level, using a "run" field that can be interpreted by +run-using handlers in `taskcluster/taskgraph/transforms/job`. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +import copy +import logging +import json +import six +from six import text_type + +import mozpack.path as mozpath + +from taskgraph.transforms.base import TransformSequence +from taskgraph.transforms.cached_tasks import order_tasks +from taskgraph.util.schema import ( + validate_schema, + Schema, +) +from taskgraph.util.python_path import import_sibling_modules +from taskgraph.util.taskcluster import get_artifact_prefix +from taskgraph.util.workertypes import worker_type_implementation +from taskgraph.transforms.task import task_description_schema +from voluptuous import ( + Extra, + Optional, + Required, + Exclusive, +) + +logger = logging.getLogger(__name__) + +# Schema for a build description +job_description_schema = Schema( + { + # The name of the job and the job's label. At least one must be specified, + # and the label will be generated from the name if necessary, by prepending + # the kind. + Optional("name"): text_type, + Optional("label"): text_type, + # the following fields are passed directly through to the task description, + # possibly modified by the run implementation. See + # taskcluster/taskgraph/transforms/task.py for the schema details. + Required("description"): task_description_schema["description"], + Optional("attributes"): task_description_schema["attributes"], + Optional("job-from"): task_description_schema["job-from"], + Optional("dependencies"): task_description_schema["dependencies"], + Optional("if-dependencies"): task_description_schema["if-dependencies"], + Optional("soft-dependencies"): task_description_schema["soft-dependencies"], + Optional("if-dependencies"): task_description_schema["if-dependencies"], + Optional("requires"): task_description_schema["requires"], + Optional("expires-after"): task_description_schema["expires-after"], + Optional("routes"): task_description_schema["routes"], + Optional("scopes"): task_description_schema["scopes"], + Optional("tags"): task_description_schema["tags"], + Optional("extra"): task_description_schema["extra"], + Optional("treeherder"): task_description_schema["treeherder"], + Optional("index"): task_description_schema["index"], + Optional("run-on-projects"): task_description_schema["run-on-projects"], + Optional("shipping-phase"): task_description_schema["shipping-phase"], + Optional("shipping-product"): task_description_schema["shipping-product"], + Optional("always-target"): task_description_schema["always-target"], + Exclusive("optimization", "optimization"): task_description_schema[ + "optimization" + ], + Optional("use-sccache"): task_description_schema["use-sccache"], + Optional("release-artifacts"): task_description_schema["release-artifacts"], + Optional("priority"): task_description_schema["priority"], + # The "when" section contains descriptions of the circumstances under which + # this task should be included in the task graph. This will be converted + # into an optimization, so it cannot be specified in a job description that + # also gives 'optimization'. + Exclusive("when", "optimization"): { + # This task only needs to be run if a file matching one of the given + # patterns has changed in the push. The patterns use the mozpack + # match function (python/mozbuild/mozpack/path.py). + Optional("files-changed"): [text_type], + }, + # A list of artifacts to install from 'fetch' tasks. + Optional("fetches"): { + text_type: [ + text_type, + { + Required("artifact"): text_type, + Optional("dest"): text_type, + Optional("extract"): bool, + Optional("verify-hash"): bool, + }, + ], + }, + # A description of how to run this job. + "run": { + # The key to a job implementation in a peer module to this one + "using": text_type, + # Base work directory used to set up the task. + Optional("workdir"): text_type, + # Any remaining content is verified against that job implementation's + # own schema. + Extra: object, + }, + Required("worker-type"): task_description_schema["worker-type"], + # This object will be passed through to the task description, with additions + # provided by the job's run-using function + Optional("worker"): dict, + } +) + +transforms = TransformSequence() +transforms.add_validate(job_description_schema) + + +@transforms.add +def rewrite_when_to_optimization(config, jobs): + for job in jobs: + when = job.pop("when", {}) + if not when: + yield job + continue + + files_changed = when.get("files-changed") + + # implicitly add task config directory. + files_changed.append("{}/**".format(config.path)) + + # "only when files changed" implies "skip if files have not changed" + job["optimization"] = {"skip-unless-changed": files_changed} + + assert "when" not in job + yield job + + +@transforms.add +def set_implementation(config, jobs): + for job in jobs: + impl, os = worker_type_implementation(config.graph_config, job["worker-type"]) + if os: + job.setdefault("tags", {})["os"] = os + if impl: + job.setdefault("tags", {})["worker-implementation"] = impl + worker = job.setdefault("worker", {}) + assert "implementation" not in worker + worker["implementation"] = impl + if os: + worker["os"] = os + yield job + + +@transforms.add +def set_label(config, jobs): + for job in jobs: + if "label" not in job: + if "name" not in job: + raise Exception("job has neither a name nor a label") + job["label"] = "{}-{}".format(config.kind, job["name"]) + if job.get("name"): + del job["name"] + yield job + + +@transforms.add +def add_resource_monitor(config, jobs): + for job in jobs: + if job.get("attributes", {}).get("resource-monitor"): + worker_implementation, worker_os = worker_type_implementation( + config.graph_config, job["worker-type"] + ) + # Normalise worker os so that linux-bitbar and similar use linux tools. + worker_os = worker_os.split("-")[0] + # We don't currently support an Arm worker, due to gopsutil's indirect + # dependencies (go-ole) + if "aarch64" in job["worker-type"]: + yield job + continue + elif "win7" in job["worker-type"]: + arch = "32" + else: + arch = "64" + job.setdefault("fetches", {}) + job["fetches"].setdefault("toolchain", []) + job["fetches"]["toolchain"].append( + "{}{}-resource-monitor".format(worker_os, arch) + ) + + if worker_implementation == "docker-worker": + artifact_source = "/builds/worker/monitoring/resource-monitor.json" + else: + artifact_source = "monitoring/resource-monitor.json" + job["worker"].setdefault("artifacts", []) + job["worker"]["artifacts"].append( + { + "name": "public/monitoring/resource-monitor.json", + "type": "file", + "path": artifact_source, + } + ) + # Set env for output file + job["worker"].setdefault("env", {}) + job["worker"]["env"]["RESOURCE_MONITOR_OUTPUT"] = artifact_source + + yield job + + +def get_attribute(dict, key, attributes, attribute_name): + """Get `attribute_name` from the given `attributes` dict, and if there + is a corresponding value, set `key` in `dict` to that value.""" + value = attributes.get(attribute_name) + if value: + dict[key] = value + + +@transforms.add +def use_fetches(config, jobs): + artifact_names = {} + aliases = {} + + if config.kind in ("toolchain", "fetch"): + jobs = list(jobs) + for job in jobs: + run = job.get("run", {}) + label = job["label"] + get_attribute(artifact_names, label, run, "toolchain-artifact") + value = run.get("{}-alias".format(config.kind)) + if value: + aliases["{}-{}".format(config.kind, value)] = label + + for task in config.kind_dependencies_tasks.values(): + if task.kind in ("fetch", "toolchain"): + get_attribute( + artifact_names, + task.label, + task.attributes, + "{kind}-artifact".format(kind=task.kind), + ) + value = task.attributes.get("{}-alias".format(task.kind)) + if value: + aliases["{}-{}".format(task.kind, value)] = task.label + + artifact_prefixes = {} + for job in order_tasks(config, jobs): + artifact_prefixes[job["label"]] = get_artifact_prefix(job) + + fetches = job.pop("fetches", None) + if not fetches: + yield job + continue + + job_fetches = [] + name = job.get("name", job.get("label")) + dependencies = job.setdefault("dependencies", {}) + worker = job.setdefault("worker", {}) + prefix = get_artifact_prefix(job) + has_sccache = False + for kind, artifacts in fetches.items(): + if kind in ("fetch", "toolchain"): + for fetch_name in artifacts: + label = "{kind}-{name}".format(kind=kind, name=fetch_name) + label = aliases.get(label, label) + if label not in artifact_names: + raise Exception( + "Missing fetch job for {kind}-{name}: {fetch}".format( + kind=config.kind, name=name, fetch=fetch_name + ) + ) + + path = artifact_names[label] + + dependencies[label] = label + job_fetches.append( + { + "artifact": path, + "task": "<{label}>".format(label=label), + "extract": True, + } + ) + + if kind == "toolchain" and fetch_name.endswith("-sccache"): + has_sccache = True + else: + if kind not in dependencies: + raise Exception( + "{name} can't fetch {kind} artifacts because " + "it has no {kind} dependencies!".format(name=name, kind=kind) + ) + dep_label = dependencies[kind] + if dep_label in artifact_prefixes: + prefix = artifact_prefixes[dep_label] + else: + if dep_label not in config.kind_dependencies_tasks: + raise Exception( + "{name} can't fetch {kind} artifacts because " + "there are no tasks with label {label} in kind dependencies!".format( + name=name, + kind=kind, + label=dependencies[kind], + ) + ) + + prefix = get_artifact_prefix( + config.kind_dependencies_tasks[dep_label] + ) + + for artifact in artifacts: + if isinstance(artifact, text_type): + path = artifact + dest = None + extract = True + verify_hash = False + else: + path = artifact["artifact"] + dest = artifact.get("dest") + extract = artifact.get("extract", True) + verify_hash = artifact.get("verify-hash", False) + + fetch = { + "artifact": "{prefix}/{path}".format(prefix=prefix, path=path) + if not path.startswith("/") + else path[1:], + "task": "<{dep}>".format(dep=kind), + "extract": extract, + } + if dest is not None: + fetch["dest"] = dest + if verify_hash: + fetch["verify-hash"] = verify_hash + job_fetches.append(fetch) + + if job.get("use-sccache") and not has_sccache: + raise Exception("Must provide an sccache toolchain if using sccache.") + + job_artifact_prefixes = { + mozpath.dirname(fetch["artifact"]) + for fetch in job_fetches + if not fetch["artifact"].startswith("public/") + } + if job_artifact_prefixes: + # Use taskcluster-proxy and request appropriate scope. For example, add + # 'scopes: [queue:get-artifact:path/to/*]' for 'path/to/artifact.tar.xz'. + worker["taskcluster-proxy"] = True + for prefix in sorted(job_artifact_prefixes): + scope = "queue:get-artifact:{}/*".format(prefix) + if scope not in job.setdefault("scopes", []): + job["scopes"].append(scope) + + env = worker.setdefault("env", {}) + env["MOZ_FETCHES"] = { + "task-reference": six.ensure_text( + json.dumps( + sorted(job_fetches, key=lambda x: sorted(x.items())), sort_keys=True + ) + ) + } + # The path is normalized to an absolute path in run-task + env.setdefault("MOZ_FETCHES_DIR", "fetches") + + yield job + + +@transforms.add +def make_task_description(config, jobs): + """Given a build description, create a task description""" + # import plugin modules first, before iterating over jobs + import_sibling_modules(exceptions=("common.py",)) + + for job in jobs: + # only docker-worker uses a fixed absolute path to find directories + if job["worker"]["implementation"] == "docker-worker": + job["run"].setdefault("workdir", "/builds/worker") + + taskdesc = copy.deepcopy(job) + + # fill in some empty defaults to make run implementations easier + taskdesc.setdefault("attributes", {}) + taskdesc.setdefault("dependencies", {}) + taskdesc.setdefault("if-dependencies", []) + taskdesc.setdefault("soft-dependencies", []) + taskdesc.setdefault("routes", []) + taskdesc.setdefault("scopes", []) + taskdesc.setdefault("extra", {}) + + # give the function for job.run.using on this worker implementation a + # chance to set up the task description. + configure_taskdesc_for_run( + config, job, taskdesc, job["worker"]["implementation"] + ) + del taskdesc["run"] + + # yield only the task description, discarding the job description + yield taskdesc + + +# A registry of all functions decorated with run_job_using +registry = {} + + +def run_job_using(worker_implementation, run_using, schema=None, defaults={}): + """Register the decorated function as able to set up a task description for + jobs with the given worker implementation and `run.using` property. If + `schema` is given, the job's run field will be verified to match it. + + The decorated function should have the signature `using_foo(config, job, taskdesc)` + and should modify the task description in-place. The skeleton of + the task description is already set up, but without a payload.""" + + def wrap(func): + for_run_using = registry.setdefault(run_using, {}) + if worker_implementation in for_run_using: + raise Exception( + "run_job_using({!r}, {!r}) already exists: {!r}".format( + run_using, worker_implementation, for_run_using[run_using] + ) + ) + for_run_using[worker_implementation] = (func, schema, defaults) + return func + + return wrap + + +@run_job_using( + "always-optimized", "always-optimized", Schema({"using": "always-optimized"}) +) +def always_optimized(config, job, taskdesc): + pass + + +def configure_taskdesc_for_run(config, job, taskdesc, worker_implementation): + """ + Run the appropriate function for this job against the given task + description. + + This will raise an appropriate error if no function exists, or if the job's + run is not valid according to the schema. + """ + run_using = job["run"]["using"] + if run_using not in registry: + raise Exception("no functions for run.using {!r}".format(run_using)) + + if worker_implementation not in registry[run_using]: + raise Exception( + "no functions for run.using {!r} on {!r}".format( + run_using, worker_implementation + ) + ) + + func, schema, defaults = registry[run_using][worker_implementation] + for k, v in defaults.items(): + job["run"].setdefault(k, v) + + if schema: + validate_schema( + schema, + job["run"], + "In job.run using {!r}/{!r} for job {!r}:".format( + job["run"]["using"], worker_implementation, job["label"] + ), + ) + func(config, job, taskdesc) diff --git a/taskcluster/taskgraph/transforms/job/common.py b/taskcluster/taskgraph/transforms/job/common.py new file mode 100644 index 0000000000..4a63655105 --- /dev/null +++ b/taskcluster/taskgraph/transforms/job/common.py @@ -0,0 +1,254 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Common support for various job types. These functions are all named after the +worker implementation they operate on, and take the same three parameters, for +consistency. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.util.taskcluster import get_artifact_prefix + +SECRET_SCOPE = "secrets:get:project/releng/{trust_domain}/{kind}/level-{level}/{secret}" + + +def add_cache(job, taskdesc, name, mount_point, skip_untrusted=False): + """Adds a cache based on the worker's implementation. + + Args: + job (dict): Task's job description. + taskdesc (dict): Target task description to modify. + name (str): Name of the cache. + mount_point (path): Path on the host to mount the cache. + skip_untrusted (bool): Whether cache is used in untrusted environments + (default: False). Only applies to docker-worker. + """ + if not job["run"].get("use-caches", True): + return + + worker = job["worker"] + + if worker["implementation"] == "docker-worker": + taskdesc["worker"].setdefault("caches", []).append( + { + "type": "persistent", + "name": name, + "mount-point": mount_point, + "skip-untrusted": skip_untrusted, + } + ) + + elif worker["implementation"] == "generic-worker": + taskdesc["worker"].setdefault("mounts", []).append( + { + "cache-name": name, + "directory": mount_point, + } + ) + + else: + # Caches not implemented + pass + + +def add_artifacts(config, job, taskdesc, path): + taskdesc["worker"].setdefault("artifacts", []).append( + { + "name": get_artifact_prefix(taskdesc), + "path": path, + "type": "directory", + } + ) + + +def docker_worker_add_artifacts(config, job, taskdesc): + """ Adds an artifact directory to the task """ + path = "{workdir}/artifacts/".format(**job["run"]) + taskdesc["worker"].setdefault("env", {})["UPLOAD_DIR"] = path + add_artifacts(config, job, taskdesc, path) + + +def generic_worker_add_artifacts(config, job, taskdesc): + """ Adds an artifact directory to the task """ + # The path is the location on disk; it doesn't necessarily + # mean the artifacts will be public or private; that is set via the name + # attribute in add_artifacts. + path = get_artifact_prefix(taskdesc) + taskdesc["worker"].setdefault("env", {})["UPLOAD_DIR"] = path + add_artifacts(config, job, taskdesc, path=path) + + +def support_vcs_checkout(config, job, taskdesc, sparse=False): + """Update a job/task with parameters to enable a VCS checkout. + + This can only be used with ``run-task`` tasks, as the cache name is + reserved for ``run-task`` tasks. + """ + worker = job["worker"] + is_mac = worker["os"] == "macosx" + is_win = worker["os"] == "windows" + is_linux = worker["os"] == "linux" or "linux-bitbar" + is_docker = worker["implementation"] == "docker-worker" + assert is_mac or is_win or is_linux + + if is_win: + checkoutdir = "./build" + geckodir = "{}/src".format(checkoutdir) + hgstore = "y:/hg-shared" + elif is_docker: + checkoutdir = "{workdir}/checkouts".format(**job["run"]) + geckodir = "{}/gecko".format(checkoutdir) + hgstore = "{}/hg-store".format(checkoutdir) + else: + checkoutdir = "./checkouts" + geckodir = "{}/gecko".format(checkoutdir) + hgstore = "{}/hg-shared".format(checkoutdir) + + cache_name = "checkouts" + + # Sparse checkouts need their own cache because they can interfere + # with clients that aren't sparse aware. + if sparse: + cache_name += "-sparse" + + add_cache(job, taskdesc, cache_name, checkoutdir) + + taskdesc["worker"].setdefault("env", {}).update( + { + "GECKO_BASE_REPOSITORY": config.params["base_repository"], + "GECKO_HEAD_REPOSITORY": config.params["head_repository"], + "GECKO_HEAD_REV": config.params["head_rev"], + "HG_STORE_PATH": hgstore, + } + ) + taskdesc["worker"]["env"].setdefault("GECKO_PATH", geckodir) + + if "comm_base_repository" in config.params: + taskdesc["worker"]["env"].update( + { + "COMM_BASE_REPOSITORY": config.params["comm_base_repository"], + "COMM_HEAD_REPOSITORY": config.params["comm_head_repository"], + "COMM_HEAD_REV": config.params["comm_head_rev"], + } + ) + elif job["run"].get("comm-checkout", False): + raise Exception( + "Can't checkout from comm-* repository if not given a repository." + ) + + # Give task access to hgfingerprint secret so it can pin the certificate + # for hg.mozilla.org. + taskdesc["scopes"].append("secrets:get:project/taskcluster/gecko/hgfingerprint") + taskdesc["scopes"].append("secrets:get:project/taskcluster/gecko/hgmointernal") + + # only some worker platforms have taskcluster-proxy enabled + if job["worker"]["implementation"] in ("docker-worker",): + taskdesc["worker"]["taskcluster-proxy"] = True + + +def generic_worker_hg_commands( + base_repo, head_repo, head_rev, path, sparse_profile=None +): + """Obtain commands needed to obtain a Mercurial checkout on generic-worker. + + Returns two command strings. One performs the checkout. Another logs. + """ + args = [ + r'"c:\Program Files\Mercurial\hg.exe"', + "robustcheckout", + "--sharebase", + r"y:\hg-shared", + "--purge", + "--upstream", + base_repo, + "--revision", + head_rev, + ] + + if sparse_profile: + args.extend(["--config", "extensions.sparse="]) + args.extend(["--sparseprofile", sparse_profile]) + + args.extend( + [ + head_repo, + path, + ] + ) + + logging_args = [ + b":: TinderboxPrint:<a href={source_repo}/rev/{revision} " + b"title='Built from {repo_name} revision {revision}'>{revision}</a>" + b"\n".format( + revision=head_rev, source_repo=head_repo, repo_name=head_repo.split("/")[-1] + ), + ] + + return [" ".join(args), " ".join(logging_args)] + + +def setup_secrets(config, job, taskdesc): + """Set up access to secrets via taskcluster-proxy. The value of + run['secrets'] should be a boolean or a list of secret names that + can be accessed.""" + if not job["run"].get("secrets"): + return + + taskdesc["worker"]["taskcluster-proxy"] = True + secrets = job["run"]["secrets"] + if secrets is True: + secrets = ["*"] + for secret in secrets: + taskdesc["scopes"].append( + SECRET_SCOPE.format( + trust_domain=config.graph_config["trust-domain"], + kind=job["treeherder"]["kind"], + level=config.params["level"], + secret=secret, + ) + ) + + +def add_tooltool(config, job, taskdesc, internal=False): + """Give the task access to tooltool. + + Enables the tooltool cache. Adds releng proxy. Configures scopes. + + By default, only public tooltool access will be granted. Access to internal + tooltool can be enabled via ``internal=True``. + + This can only be used with ``run-task`` tasks, as the cache name is + reserved for use with ``run-task``. + """ + + if job["worker"]["implementation"] in ("docker-worker",): + add_cache( + job, + taskdesc, + "tooltool-cache", + "{workdir}/tooltool-cache".format(**job["run"]), + ) + + taskdesc["worker"].setdefault("env", {}).update( + { + "TOOLTOOL_CACHE": "{workdir}/tooltool-cache".format(**job["run"]), + } + ) + elif not internal: + return + + taskdesc["worker"]["taskcluster-proxy"] = True + taskdesc["scopes"].extend( + [ + "project:releng:services/tooltool/api/download/public", + ] + ) + + if internal: + taskdesc["scopes"].extend( + [ + "project:releng:services/tooltool/api/download/internal", + ] + ) diff --git a/taskcluster/taskgraph/transforms/job/debian_package.py b/taskcluster/taskgraph/transforms/job/debian_package.py new file mode 100644 index 0000000000..1d7bafee93 --- /dev/null +++ b/taskcluster/taskgraph/transforms/job/debian_package.py @@ -0,0 +1,215 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Support for running spidermonkey jobs via dedicated scripts +""" + +from __future__ import absolute_import, print_function, unicode_literals + +import os +import re +from six import text_type + +from taskgraph.util.schema import Schema +from voluptuous import Any, Optional, Required + +from taskgraph.transforms.job import run_job_using +from taskgraph.transforms.job.common import add_artifacts + +from taskgraph.util.hash import hash_path +from taskgraph.util.taskcluster import get_root_url +from taskgraph import GECKO +import taskgraph + +DSC_PACKAGE_RE = re.compile(".*(?=_)") +SOURCE_PACKAGE_RE = re.compile(".*(?=[-_]\d)") + +source_definition = { + Required("url"): text_type, + Required("sha256"): text_type, +} + +run_schema = Schema( + { + Required("using"): "debian-package", + # Debian distribution + Required("dist"): text_type, + # Date of the snapshot (from snapshot.debian.org) to use, in the format + # YYYYMMDDTHHMMSSZ. The same date is used for the base docker-image name + # (only the YYYYMMDD part). + Required("snapshot"): text_type, + # URL/SHA256 of a source file to build, which can either be a source + # control (.dsc), or a tarball. + Required(Any("dsc", "tarball")): source_definition, + # Package name. Normally derived from the source control or tarball file + # name. Use in case the name doesn't match DSC_PACKAGE_RE or + # SOURCE_PACKAGE_RE. + Optional("name"): text_type, + # Patch to apply to the extracted source. + Optional("patch"): text_type, + # Command to run before dpkg-buildpackage. + Optional("pre-build-command"): text_type, + # Architecture to build the package for. + Optional("arch"): text_type, + # List of package tasks to get build dependencies from. + Optional("packages"): [text_type], + # What resolver to use to install build dependencies. The default + # (apt-get) is good in most cases, but in subtle cases involving + # a *-backports archive, its solver might not be able to find a + # solution that satisfies the build dependencies. + Optional("resolver"): Any("apt-get", "aptitude"), + # Base work directory used to set up the task. + Required("workdir"): text_type, + } +) + + +@run_job_using("docker-worker", "debian-package", schema=run_schema) +def docker_worker_debian_package(config, job, taskdesc): + run = job["run"] + + name = taskdesc["label"].replace("{}-".format(config.kind), "", 1) + + arch = run.get("arch", "amd64") + + worker = taskdesc["worker"] + worker.setdefault("artifacts", []) + version = { + "wheezy": 7, + "jessie": 8, + "stretch": 9, + "buster": 10, + }[run["dist"]] + image = "debian%d" % version + if arch != "amd64": + image += "-" + arch + image += "-packages" + worker["docker-image"] = {"in-tree": image} + + add_artifacts(config, job, taskdesc, path="/tmp/artifacts") + + env = worker.setdefault("env", {}) + env["DEBFULLNAME"] = "Mozilla build team" + env["DEBEMAIL"] = "dev-builds@lists.mozilla.org" + + if "dsc" in run: + src = run["dsc"] + unpack = "dpkg-source -x {src_file} {package}" + package_re = DSC_PACKAGE_RE + elif "tarball" in run: + src = run["tarball"] + unpack = ( + "mkdir {package} && " + "tar -C {package} -axf {src_file} --strip-components=1" + ) + package_re = SOURCE_PACKAGE_RE + else: + raise RuntimeError("Unreachable") + src_url = src["url"] + src_file = os.path.basename(src_url) + src_sha256 = src["sha256"] + package = run.get("name") + if not package: + package = package_re.match(src_file).group(0) + unpack = unpack.format(src_file=src_file, package=package) + + resolver = run.get("resolver", "apt-get") + if resolver == "apt-get": + resolver = "apt-get -yyq --no-install-recommends" + elif resolver == "aptitude": + resolver = ( + "aptitude -y --without-recommends -o " + "Aptitude::ProblemResolver::Hints::KeepBuildDeps=" + '"reject {}-build-deps :UNINST"' + ).format(package) + else: + raise RuntimeError("Unreachable") + + adjust = "" + if "patch" in run: + # We don't use robustcheckout or run-task to get a checkout. So for + # this one file we'd need from a checkout, download it. + env["PATCH_URL"] = config.params.file_url( + "build/debian-packages/{patch}".format(patch=run["patch"]), + ) + adjust += "curl -sL $PATCH_URL | patch -p1 && " + if "pre-build-command" in run: + adjust += run["pre-build-command"] + " && " + if "tarball" in run: + adjust += "mv ../{src_file} ../{package}_{ver}.orig.tar.gz && ".format( + src_file=src_file, + package=package, + ver="$(dpkg-parsechangelog | awk '$1==\"Version:\"{print $2}' | cut -f 1 -d -)", + ) + if "patch" not in run and "pre-build-command" not in run: + adjust += ( + 'debchange -l ".{prefix}moz" --distribution "{dist}"' + ' "Mozilla backport for {dist}." < /dev/null && ' + ).format( + prefix=name.split("-", 1)[0], + dist=run["dist"], + ) + + worker["command"] = [ + "sh", + "-x", + "-c", + # Add sources for packages coming from other package tasks. + "/usr/local/sbin/setup_packages.sh {root_url} $PACKAGES && " + "apt-get update && " + # Upgrade packages that might have new versions in package tasks. + "apt-get dist-upgrade && " "cd /tmp && " + # Get, validate and extract the package source. + "(dget -d -u {src_url} || exit 100) && " + 'echo "{src_sha256} {src_file}" | sha256sum -c && ' + "{unpack} && " + "cd {package} && " + # Optionally apply patch and/or pre-build command. + "{adjust}" + # Install the necessary build dependencies. + "(mk-build-deps -i -r debian/control -t '{resolver}' || exit 100) && " + # Build the package + 'DEB_BUILD_OPTIONS="parallel=$(nproc) nocheck" dpkg-buildpackage && ' + # Copy the artifacts + "mkdir -p {artifacts}/debian && " + "dcmd cp ../{package}_*.changes {artifacts}/debian/ && " + "cd {artifacts} && " + # Make the artifacts directory usable as an APT repository. + "apt-ftparchive sources debian | gzip -c9 > debian/Sources.gz && " + "apt-ftparchive packages debian | gzip -c9 > debian/Packages.gz".format( + root_url=get_root_url(False), + package=package, + src_url=src_url, + src_file=src_file, + src_sha256=src_sha256, + unpack=unpack, + adjust=adjust, + artifacts="/tmp/artifacts", + resolver=resolver, + ), + ] + + if run.get("packages"): + env = worker.setdefault("env", {}) + env["PACKAGES"] = { + "task-reference": " ".join("<{}>".format(p) for p in run["packages"]) + } + deps = taskdesc.setdefault("dependencies", {}) + for p in run["packages"]: + deps[p] = "packages-{}".format(p) + + # Use the command generated above as the base for the index hash. + # We rely on it not varying depending on the head_repository or head_rev. + digest_data = list(worker["command"]) + if "patch" in run: + digest_data.append( + hash_path(os.path.join(GECKO, "build", "debian-packages", run["patch"])) + ) + + if not taskgraph.fast: + taskdesc["cache"] = { + "type": "packages.v1", + "name": name, + "digest-data": digest_data, + } diff --git a/taskcluster/taskgraph/transforms/job/hazard.py b/taskcluster/taskgraph/transforms/job/hazard.py new file mode 100644 index 0000000000..66a33a2001 --- /dev/null +++ b/taskcluster/taskgraph/transforms/job/hazard.py @@ -0,0 +1,71 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Support for running hazard jobs via dedicated scripts +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from six import text_type +from taskgraph.util.schema import Schema +from voluptuous import Required, Optional, Any + +from taskgraph.transforms.job import ( + run_job_using, + configure_taskdesc_for_run, +) +from taskgraph.transforms.job.common import ( + setup_secrets, + docker_worker_add_artifacts, + add_tooltool, +) + +haz_run_schema = Schema( + { + Required("using"): "hazard", + # The command to run within the task image (passed through to the worker) + Required("command"): text_type, + # The mozconfig to use; default in the script is used if omitted + Optional("mozconfig"): text_type, + # The set of secret names to which the task has access; these are prefixed + # with `project/releng/gecko/{treeherder.kind}/level-{level}/`. Setting + # this will enable any worker features required and set the task's scopes + # appropriately. `true` here means ['*'], all secrets. Not supported on + # Windows + Optional("secrets"): Any(bool, [text_type]), + # Base work directory used to set up the task. + Optional("workdir"): text_type, + } +) + + +@run_job_using("docker-worker", "hazard", schema=haz_run_schema) +def docker_worker_hazard(config, job, taskdesc): + run = job["run"] + + worker = taskdesc["worker"] = job["worker"] + worker.setdefault("artifacts", []) + + docker_worker_add_artifacts(config, job, taskdesc) + worker.setdefault("required-volumes", []).append( + "{workdir}/workspace".format(**run) + ) + add_tooltool(config, job, taskdesc) + setup_secrets(config, job, taskdesc) + + env = worker["env"] + env.update( + { + "MOZ_BUILD_DATE": config.params["moz_build_date"], + "MOZ_SCM_LEVEL": config.params["level"], + } + ) + + # script parameters + if run.get("mozconfig"): + env["MOZCONFIG"] = run.pop("mozconfig") + + run["using"] = "run-task" + run["cwd"] = run["workdir"] + configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"]) diff --git a/taskcluster/taskgraph/transforms/job/mach.py b/taskcluster/taskgraph/transforms/job/mach.py new file mode 100644 index 0000000000..5a771a743c --- /dev/null +++ b/taskcluster/taskgraph/transforms/job/mach.py @@ -0,0 +1,84 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Support for running mach tasks (via run-task) +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from six import text_type +from taskgraph.transforms.job import run_job_using, configure_taskdesc_for_run +from taskgraph.util.schema import ( + Schema, + taskref_or_string, +) +from voluptuous import Required, Optional, Any + +mach_schema = Schema( + { + Required("using"): "mach", + # The mach command (omitting `./mach`) to run + Required("mach"): taskref_or_string, + # The version of Python to run with. Either an absolute path to the binary + # on the worker, a version identifier (e.g python2.7 or 3.8). There is no + # validation performed to ensure the specified binaries actually exist. + Optional("python-version"): Any(text_type, int, float), + # The sparse checkout profile to use. Value is the filename relative to the + # directory where sparse profiles are defined (build/sparse-profiles/). + Optional("sparse-profile"): Any(text_type, None), + # if true, perform a checkout of a comm-central based branch inside the + # gecko checkout + Required("comm-checkout"): bool, + # Base work directory used to set up the task. + Optional("workdir"): text_type, + } +) + + +defaults = { + "comm-checkout": False, +} + + +@run_job_using("docker-worker", "mach", schema=mach_schema, defaults=defaults) +@run_job_using("generic-worker", "mach", schema=mach_schema, defaults=defaults) +def configure_mach(config, job, taskdesc): + run = job["run"] + worker = job["worker"] + + additional_prefix = [] + if worker["os"] == "macosx": + additional_prefix = ["LC_ALL=en_US.UTF-8", "LANG=en_US.UTF-8"] + + python = run.get("python-version") + if python: + del run["python-version"] + + if worker["os"] == "macosx" and python == 3: + python = "/usr/local/bin/python3" + + python = str(python) + try: + float(python) + python = "python" + python + except ValueError: + pass + + additional_prefix.append(python) + + command_prefix = " ".join(additional_prefix + ["./mach "]) + + mach = run["mach"] + if isinstance(mach, dict): + ref, pattern = next(iter(mach.items())) + command = {ref: command_prefix + pattern} + else: + command = command_prefix + mach + + # defer to the run_task implementation + run["command"] = command + run["cwd"] = "{checkout}" + run["using"] = "run-task" + del run["mach"] + configure_taskdesc_for_run(config, job, taskdesc, job["worker"]["implementation"]) diff --git a/taskcluster/taskgraph/transforms/job/mozharness.py b/taskcluster/taskgraph/transforms/job/mozharness.py new file mode 100644 index 0000000000..0b1da061cd --- /dev/null +++ b/taskcluster/taskgraph/transforms/job/mozharness.py @@ -0,0 +1,369 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" + +Support for running jobs via mozharness. Ideally, most stuff gets run this +way, and certainly anything using mozharness should use this approach. + +""" + +from __future__ import absolute_import, print_function, unicode_literals +import json + +import six +from six import text_type +from textwrap import dedent + +from taskgraph.util.schema import Schema +from voluptuous import Required, Optional, Any +from voluptuous.validators import Match + +from mozpack import path as mozpath + +from taskgraph.transforms.job import ( + configure_taskdesc_for_run, + run_job_using, +) +from taskgraph.transforms.job.common import ( + setup_secrets, + docker_worker_add_artifacts, + generic_worker_add_artifacts, +) +from taskgraph.transforms.task import ( + get_branch_repo, + get_branch_rev, +) + +mozharness_run_schema = Schema( + { + Required("using"): "mozharness", + # the mozharness script used to run this task, relative to the testing/ + # directory and using forward slashes even on Windows + Required("script"): text_type, + # Additional paths to look for mozharness configs in. These should be + # relative to the base of the source checkout + Optional("config-paths"): [text_type], + # the config files required for the task, relative to + # testing/mozharness/configs or one of the paths specified in + # `config-paths` and using forward slashes even on Windows + Required("config"): [text_type], + # any additional actions to pass to the mozharness command + Optional("actions"): [ + Match("^[a-z0-9-]+$", "actions must be `-` seperated alphanumeric strings") + ], + # any additional options (without leading --) to be passed to mozharness + Optional("options"): [ + Match( + "^[a-z0-9-]+(=[^ ]+)?$", + "options must be `-` seperated alphanumeric strings (with optional argument)", + ) + ], + # --custom-build-variant-cfg value + Optional("custom-build-variant-cfg"): text_type, + # Extra configuration options to pass to mozharness. + Optional("extra-config"): dict, + # If not false, tooltool downloads will be enabled via relengAPIProxy + # for either just public files, or all files. Not supported on Windows + Required("tooltool-downloads"): Any( + False, + "public", + "internal", + ), + # The set of secret names to which the task has access; these are prefixed + # with `project/releng/gecko/{treeherder.kind}/level-{level}/`. Setting + # this will enable any worker features required and set the task's scopes + # appropriately. `true` here means ['*'], all secrets. Not supported on + # Windows + Required("secrets"): Any(bool, [text_type]), + # If true, taskcluster proxy will be enabled; note that it may also be enabled + # automatically e.g., for secrets support. Not supported on Windows. + Required("taskcluster-proxy"): bool, + # If true, the build scripts will start Xvfb. Not supported on Windows. + Required("need-xvfb"): bool, + # If false, indicate that builds should skip producing artifacts. Not + # supported on Windows. + Required("keep-artifacts"): bool, + # If specified, use the in-tree job script specified. + Optional("job-script"): text_type, + Required("requires-signed-builds"): bool, + # Whether or not to use caches. + Optional("use-caches"): bool, + # If false, don't set MOZ_SIMPLE_PACKAGE_NAME + # Only disableable on windows + Required("use-simple-package"): bool, + # If false don't pass --branch mozharness script + # Only disableable on windows + Required("use-magic-mh-args"): bool, + # if true, perform a checkout of a comm-central based branch inside the + # gecko checkout + Required("comm-checkout"): bool, + # Base work directory used to set up the task. + Optional("workdir"): text_type, + } +) + + +mozharness_defaults = { + "tooltool-downloads": False, + "secrets": False, + "taskcluster-proxy": False, + "need-xvfb": False, + "keep-artifacts": True, + "requires-signed-builds": False, + "use-simple-package": True, + "use-magic-mh-args": True, + "comm-checkout": False, +} + + +@run_job_using( + "docker-worker", + "mozharness", + schema=mozharness_run_schema, + defaults=mozharness_defaults, +) +def mozharness_on_docker_worker_setup(config, job, taskdesc): + run = job["run"] + + worker = taskdesc["worker"] = job["worker"] + + if not run.pop("use-simple-package", None): + raise NotImplementedError( + "Simple packaging cannot be disabled via" + "'use-simple-package' on docker-workers" + ) + if not run.pop("use-magic-mh-args", None): + raise NotImplementedError( + "Cannot disabled mh magic arg passing via" + "'use-magic-mh-args' on docker-workers" + ) + + # Running via mozharness assumes an image that contains build.sh: + # by default, debian8-amd64-build, but it could be another image (like + # android-build). + worker.setdefault("docker-image", {"in-tree": "debian8-amd64-build"}) + + worker.setdefault("artifacts", []).append( + { + "name": "public/logs", + "path": "{workdir}/logs/".format(**run), + "type": "directory", + } + ) + worker["taskcluster-proxy"] = run.pop("taskcluster-proxy", None) + docker_worker_add_artifacts(config, job, taskdesc) + + env = worker.setdefault("env", {}) + env.update( + { + "WORKSPACE": "{workdir}/workspace".format(**run), + "MOZHARNESS_CONFIG": " ".join(run.pop("config")), + "MOZHARNESS_SCRIPT": run.pop("script"), + "MH_BRANCH": config.params["project"], + "MOZ_SOURCE_CHANGESET": get_branch_rev(config), + "MOZ_SOURCE_REPO": get_branch_repo(config), + "MH_BUILD_POOL": "taskcluster", + "MOZ_BUILD_DATE": config.params["moz_build_date"], + "MOZ_SCM_LEVEL": config.params["level"], + "PYTHONUNBUFFERED": "1", + } + ) + + worker.setdefault("required-volumes", []).append(env["WORKSPACE"]) + + if "actions" in run: + env["MOZHARNESS_ACTIONS"] = " ".join(run.pop("actions")) + + if "options" in run: + env["MOZHARNESS_OPTIONS"] = " ".join(run.pop("options")) + + if "config-paths" in run: + env["MOZHARNESS_CONFIG_PATHS"] = " ".join(run.pop("config-paths")) + + if "custom-build-variant-cfg" in run: + env["MH_CUSTOM_BUILD_VARIANT_CFG"] = run.pop("custom-build-variant-cfg") + + extra_config = run.pop("extra-config", {}) + extra_config["objdir"] = "obj-build" + env["EXTRA_MOZHARNESS_CONFIG"] = six.ensure_text( + json.dumps(extra_config, sort_keys=True) + ) + + if "job-script" in run: + env["JOB_SCRIPT"] = run["job-script"] + + if config.params.is_try(): + env["TRY_COMMIT_MSG"] = config.params["message"] + + # if we're not keeping artifacts, set some env variables to empty values + # that will cause the build process to skip copying the results to the + # artifacts directory. This will have no effect for operations that are + # not builds. + if not run.pop("keep-artifacts"): + env["DIST_TARGET_UPLOADS"] = "" + env["DIST_UPLOADS"] = "" + + # Xvfb + if run.pop("need-xvfb"): + env["NEED_XVFB"] = "true" + else: + env["NEED_XVFB"] = "false" + + # Retry if mozharness returns TBPL_RETRY + worker["retry-exit-status"] = [4] + + setup_secrets(config, job, taskdesc) + + run["using"] = "run-task" + run["command"] = mozpath.join( + "${GECKO_PATH}", + run.pop("job-script", "taskcluster/scripts/builder/build-linux.sh"), + ) + run.pop("secrets") + run.pop("requires-signed-builds") + + configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"]) + + +@run_job_using( + "generic-worker", + "mozharness", + schema=mozharness_run_schema, + defaults=mozharness_defaults, +) +def mozharness_on_generic_worker(config, job, taskdesc): + assert job["worker"]["os"] in ( + "windows", + "macosx", + ), "only supports windows and macOS right now: {}".format(job["label"]) + + run = job["run"] + + # fail if invalid run options are included + invalid = [] + for prop in ["need-xvfb"]: + if prop in run and run.pop(prop): + invalid.append(prop) + if not run.pop("keep-artifacts", True): + invalid.append("keep-artifacts") + if invalid: + raise Exception( + "Jobs run using mozharness on Windows do not support properties " + + ", ".join(invalid) + ) + + worker = taskdesc["worker"] = job["worker"] + + worker["taskcluster-proxy"] = run.pop("taskcluster-proxy", None) + + setup_secrets(config, job, taskdesc) + + taskdesc["worker"].setdefault("artifacts", []).append( + {"name": "public/logs", "path": "logs", "type": "directory"} + ) + if not worker.get("skip-artifacts", False): + generic_worker_add_artifacts(config, job, taskdesc) + + env = worker["env"] + env.update( + { + "MOZ_BUILD_DATE": config.params["moz_build_date"], + "MOZ_SCM_LEVEL": config.params["level"], + "MH_BRANCH": config.params["project"], + "MOZ_SOURCE_CHANGESET": get_branch_rev(config), + "MOZ_SOURCE_REPO": get_branch_repo(config), + } + ) + if run.pop("use-simple-package"): + env.update({"MOZ_SIMPLE_PACKAGE_NAME": "target"}) + + extra_config = run.pop("extra-config", {}) + extra_config["objdir"] = "obj-build" + env["EXTRA_MOZHARNESS_CONFIG"] = six.ensure_text( + json.dumps(extra_config, sort_keys=True) + ) + + # The windows generic worker uses batch files to pass environment variables + # to commands. Setting a variable to empty in a batch file unsets, so if + # there is no `TRY_COMMIT_MESSAGE`, pass a space instead, so that + # mozharness doesn't try to find the commit message on its own. + if config.params.is_try(): + env["TRY_COMMIT_MSG"] = config.params["message"] or "no commit message" + + if not job["attributes"]["build_platform"].startswith(("win", "macosx")): + raise Exception( + "Task generation for mozharness build jobs currently only supported on " + "Windows and macOS" + ) + + mh_command = [] + if job["worker"]["os"] == "windows": + mh_command.append("c:/mozilla-build/python3/python3.exe") + gecko_path = "%GECKO_PATH%" + else: + gecko_path = "$GECKO_PATH" + + mh_command += [ + "{}/mach".format(gecko_path), + "python", + "--no-activate", + "{}/testing/{}".format(gecko_path, run.pop("script")), + ] + + for path in run.pop("config-paths", []): + mh_command.append("--extra-config-path {}/{}".format(gecko_path, path)) + + for cfg in run.pop("config"): + mh_command.extend(("--config", cfg)) + if run.pop("use-magic-mh-args"): + mh_command.extend(("--branch", config.params["project"])) + if job["worker"]["os"] == "windows": + mh_command.extend(("--work-dir", r"%cd:Z:=z:%\workspace")) + for action in run.pop("actions", []): + mh_command.append("--" + action) + + for option in run.pop("options", []): + mh_command.append("--" + option) + if run.get("custom-build-variant-cfg"): + mh_command.append("--custom-build-variant") + mh_command.append(run.pop("custom-build-variant-cfg")) + + if job["worker"]["os"] == "macosx": + # Ideally, we'd use shellutil.quote, but that would single-quote + # $GECKO_PATH, which would defeat having the variable in the command + # in the first place, as it wouldn't be expanded. + # In practice, arguments are expected not to contain characters that + # would require quoting. + mh_command = " ".join(mh_command) + + run["using"] = "run-task" + run["command"] = mh_command + run.pop("secrets") + run.pop("requires-signed-builds") + run.pop("job-script", None) + configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"]) + + # Everything past this point is Windows-specific. + if job["worker"]["os"] == "macosx": + return + + if taskdesc.get("use-sccache"): + worker["command"] = ( + [ + # Make the comment part of the first command, as it will help users to + # understand what is going on, and why these steps are implemented. + dedent( + """\ + :: sccache currently uses the full compiler commandline as input to the + :: cache hash key, so create a symlink to the task dir and build from + :: the symlink dir to get consistent paths. + if exist z:\\build rmdir z:\\build""" + ), + r"mklink /d z:\build %cd%", + # Grant delete permission on the link to everyone. + r"icacls z:\build /grant *S-1-1-0:D /L", + r"cd /d z:\build", + ] + + worker["command"] + ) diff --git a/taskcluster/taskgraph/transforms/job/mozharness_test.py b/taskcluster/taskgraph/transforms/job/mozharness_test.py new file mode 100644 index 0000000000..ff29dcc9fb --- /dev/null +++ b/taskcluster/taskgraph/transforms/job/mozharness_test.py @@ -0,0 +1,452 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import, print_function, unicode_literals + +import json +import os +import re + +import six +from six import text_type +from voluptuous import Required, Optional + +from taskgraph.util.taskcluster import get_artifact_url +from taskgraph.transforms.job import ( + configure_taskdesc_for_run, + run_job_using, +) +from taskgraph.util.schema import Schema +from taskgraph.util.taskcluster import get_artifact_path +from taskgraph.transforms.tests import test_description_schema, normpath +from taskgraph.transforms.job.common import support_vcs_checkout + +VARIANTS = [ + "shippable", + "shippable-qr", + "devedition", + "pgo", + "asan", + "stylo", + "qr", + "ccov", +] + + +def get_variant(test_platform): + for v in VARIANTS: + if "-{}/".format(v) in test_platform: + return v + return "" + + +mozharness_test_run_schema = Schema( + { + Required("using"): "mozharness-test", + Required("test"): test_description_schema, + # Base work directory used to set up the task. + Optional("workdir"): text_type, + } +) + + +def test_packages_url(taskdesc): + """Account for different platforms that name their test packages differently""" + artifact_url = get_artifact_url( + "<build>", get_artifact_path(taskdesc, "target.test_packages.json") + ) + # for android shippable we need to add 'en-US' to the artifact url + test = taskdesc["run"]["test"] + if "android" in test["test-platform"] and ( + get_variant(test["test-platform"]) in ("shippable", "shippable-qr") + ): + head, tail = os.path.split(artifact_url) + artifact_url = os.path.join(head, "en-US", tail) + return artifact_url + + +def installer_url(taskdesc): + test = taskdesc["run"]["test"] + mozharness = test["mozharness"] + + if "installer-url" in mozharness: + installer_url = mozharness["installer-url"] + else: + upstream_task = ( + "<build-signing>" if mozharness["requires-signed-builds"] else "<build>" + ) + installer_url = get_artifact_url( + upstream_task, mozharness["build-artifact-name"] + ) + + return installer_url + + +@run_job_using("docker-worker", "mozharness-test", schema=mozharness_test_run_schema) +def mozharness_test_on_docker(config, job, taskdesc): + run = job["run"] + test = taskdesc["run"]["test"] + mozharness = test["mozharness"] + worker = taskdesc["worker"] = job["worker"] + + # apply some defaults + worker["docker-image"] = test["docker-image"] + worker["allow-ptrace"] = True # required for all tests, for crashreporter + worker["loopback-video"] = test["loopback-video"] + worker["loopback-audio"] = test["loopback-audio"] + worker["max-run-time"] = test["max-run-time"] + worker["retry-exit-status"] = test["retry-exit-status"] + if "android-em-7.0-x86" in test["test-platform"]: + worker["privileged"] = True + + artifacts = [ + # (artifact name prefix, in-image path) + ("public/logs/", "{workdir}/workspace/logs/".format(**run)), + ("public/test", "{workdir}/artifacts/".format(**run)), + ( + "public/test_info/", + "{workdir}/workspace/build/blobber_upload_dir/".format(**run), + ), + ] + + installer = installer_url(taskdesc) + + mozharness_url = get_artifact_url( + "<build>", get_artifact_path(taskdesc, "mozharness.zip") + ) + + worker.setdefault("artifacts", []) + worker["artifacts"].extend( + [ + { + "name": prefix, + "path": os.path.join("{workdir}/workspace".format(**run), path), + "type": "directory", + } + for (prefix, path) in artifacts + ] + ) + + env = worker.setdefault("env", {}) + env.update( + { + "MOZHARNESS_CONFIG": " ".join(mozharness["config"]), + "MOZHARNESS_SCRIPT": mozharness["script"], + "MOZILLA_BUILD_URL": {"task-reference": installer}, + "NEED_PULSEAUDIO": "true", + "NEED_WINDOW_MANAGER": "true", + "ENABLE_E10S": text_type(bool(test.get("e10s"))).lower(), + "WORKING_DIR": "/builds/worker", + } + ) + + if test.get("python-3"): + env["PYTHON"] = "python3" + + # Legacy linux64 tests rely on compiz. + if test.get("docker-image", {}).get("in-tree") == "desktop1604-test": + env.update({"NEED_COMPIZ": "true"}) + + # Bug 1602701/1601828 - use compiz on ubuntu1804 due to GTK asynchiness + # when manipulating windows. + if test.get("docker-image", {}).get("in-tree") == "ubuntu1804-test": + if "wdspec" in job["run"]["test"]["suite"] or ( + "marionette" in job["run"]["test"]["suite"] + and "headless" not in job["label"] + ): + env.update({"NEED_COMPIZ": "true"}) + + if mozharness.get("mochitest-flavor"): + env["MOCHITEST_FLAVOR"] = mozharness["mochitest-flavor"] + + if mozharness["set-moz-node-path"]: + env["MOZ_NODE_PATH"] = "/usr/local/bin/node" + + if "actions" in mozharness: + env["MOZHARNESS_ACTIONS"] = " ".join(mozharness["actions"]) + + if config.params.is_try(): + env["TRY_COMMIT_MSG"] = config.params["message"] + + # handle some of the mozharness-specific options + if test["reboot"]: + raise Exception( + "reboot: {} not supported on generic-worker".format(test["reboot"]) + ) + + # Support vcs checkouts regardless of whether the task runs from + # source or not in case it is needed on an interactive loaner. + support_vcs_checkout(config, job, taskdesc) + + # If we have a source checkout, run mozharness from it instead of + # downloading a zip file with the same content. + if test["checkout"]: + env["MOZHARNESS_PATH"] = "{workdir}/checkouts/gecko/testing/mozharness".format( + **run + ) + else: + env["MOZHARNESS_URL"] = {"task-reference": mozharness_url} + + extra_config = { + "installer_url": installer, + "test_packages_url": test_packages_url(taskdesc), + } + env["EXTRA_MOZHARNESS_CONFIG"] = { + "task-reference": six.ensure_text(json.dumps(extra_config, sort_keys=True)) + } + + # Bug 1634554 - pass in decision task artifact URL to mozharness for WPT. + # Bug 1645974 - test-verify-wpt and test-coverage-wpt need artifact URL. + if "web-platform-tests" in test["suite"] or re.match( + "test-(coverage|verify)-wpt", test["suite"] + ): + env["TESTS_BY_MANIFEST_URL"] = { + "artifact-reference": "<decision/public/tests-by-manifest.json.gz>" + } + + command = [ + "{workdir}/bin/test-linux.sh".format(**run), + ] + command.extend(mozharness.get("extra-options", [])) + + if test.get("test-manifests"): + env["MOZHARNESS_TEST_PATHS"] = six.ensure_text( + json.dumps({test["suite"]: test["test-manifests"]}, sort_keys=True) + ) + + # TODO: remove the need for run['chunked'] + elif mozharness.get("chunked") or test["chunks"] > 1: + command.append("--total-chunk={}".format(test["chunks"])) + command.append("--this-chunk={}".format(test["this-chunk"])) + + if "download-symbols" in mozharness: + download_symbols = mozharness["download-symbols"] + download_symbols = {True: "true", False: "false"}.get( + download_symbols, download_symbols + ) + command.append("--download-symbols=" + download_symbols) + + job["run"] = { + "workdir": run["workdir"], + "tooltool-downloads": mozharness["tooltool-downloads"], + "checkout": test["checkout"], + "command": command, + "using": "run-task", + } + configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"]) + + +@run_job_using("generic-worker", "mozharness-test", schema=mozharness_test_run_schema) +def mozharness_test_on_generic_worker(config, job, taskdesc): + test = taskdesc["run"]["test"] + mozharness = test["mozharness"] + worker = taskdesc["worker"] = job["worker"] + + bitbar_script = "test-linux.sh" + + is_macosx = worker["os"] == "macosx" + is_windows = worker["os"] == "windows" + is_linux = worker["os"] == "linux" or worker["os"] == "linux-bitbar" + is_bitbar = worker["os"] == "linux-bitbar" + assert is_macosx or is_windows or is_linux + + artifacts = [ + {"name": "public/logs", "path": "logs", "type": "directory"}, + ] + + # jittest doesn't have blob_upload_dir + if test["test-name"] != "jittest": + artifacts.append( + { + "name": "public/test_info", + "path": "build/blobber_upload_dir", + "type": "directory", + } + ) + + if is_bitbar: + artifacts = [ + {"name": "public/test/", "path": "artifacts/public", "type": "directory"}, + {"name": "public/logs/", "path": "workspace/logs", "type": "directory"}, + { + "name": "public/test_info/", + "path": "workspace/build/blobber_upload_dir", + "type": "directory", + }, + ] + + installer = installer_url(taskdesc) + + worker["os-groups"] = test["os-groups"] + + # run-as-administrator is a feature for workers with UAC enabled and as such should not be + # included in tasks on workers that have UAC disabled. Currently UAC is only enabled on + # gecko Windows 10 workers, however this may be subject to change. Worker type + # environment definitions can be found in https://github.com/mozilla-releng/OpenCloudConfig + # See https://docs.microsoft.com/en-us/windows/desktop/secauthz/user-account-control + # for more information about UAC. + if test.get("run-as-administrator", False): + if job["worker-type"].startswith("t-win10-64"): + worker["run-as-administrator"] = True + else: + raise Exception( + "run-as-administrator not supported on {}".format(job["worker-type"]) + ) + + if test["reboot"]: + raise Exception( + "reboot: {} not supported on generic-worker".format(test["reboot"]) + ) + + worker["max-run-time"] = test["max-run-time"] + worker["retry-exit-status"] = test["retry-exit-status"] + worker.setdefault("artifacts", []) + worker["artifacts"].extend(artifacts) + + env = worker.setdefault("env", {}) + env["GECKO_HEAD_REPOSITORY"] = config.params["head_repository"] + env["GECKO_HEAD_REV"] = config.params["head_rev"] + + # this list will get cleaned up / reduced / removed in bug 1354088 + if is_macosx: + env.update( + { + "LC_ALL": "en_US.UTF-8", + "LANG": "en_US.UTF-8", + "MOZ_NODE_PATH": "/usr/local/bin/node", + "PATH": "/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin", + "SHELL": "/bin/bash", + } + ) + elif is_bitbar: + env.update( + { + "MOZHARNESS_CONFIG": " ".join(mozharness["config"]), + "MOZHARNESS_SCRIPT": mozharness["script"], + "MOZHARNESS_URL": { + "artifact-reference": "<build/public/build/mozharness.zip>" + }, + "MOZILLA_BUILD_URL": {"task-reference": installer}, + "MOZ_NO_REMOTE": "1", + "NEED_XVFB": "false", + "XPCOM_DEBUG_BREAK": "warn", + "NO_FAIL_ON_TEST_ERRORS": "1", + "MOZ_HIDE_RESULTS_TABLE": "1", + "MOZ_NODE_PATH": "/usr/local/bin/node", + "TASKCLUSTER_WORKER_TYPE": job["worker-type"], + } + ) + + extra_config = { + "installer_url": installer, + "test_packages_url": test_packages_url(taskdesc), + } + env["EXTRA_MOZHARNESS_CONFIG"] = { + "task-reference": six.ensure_text(json.dumps(extra_config, sort_keys=True)) + } + + # Bug 1634554 - pass in decision task artifact URL to mozharness for WPT. + # Bug 1645974 - test-verify-wpt and test-coverage-wpt need artifact URL. + if "web-platform-tests" in test["suite"] or re.match( + "test-(coverage|verify)-wpt", test["suite"] + ): + env["TESTS_BY_MANIFEST_URL"] = { + "artifact-reference": "<decision/public/tests-by-manifest.json.gz>" + } + + py_3 = test.get("python-3", False) + + if is_windows: + py_binary = "c:\\mozilla-build\\{python}\\{python}.exe".format( + python="python3" if py_3 else "python" + ) + mh_command = [ + py_binary, + "-u", + "mozharness\\scripts\\" + normpath(mozharness["script"]), + ] + elif is_bitbar: + py_binary = "python3" if py_3 else "python" + mh_command = ["bash", "./{}".format(bitbar_script)] + elif is_macosx and "macosx1014-64" in test["test-platform"]: + py_binary = "/usr/local/bin/{}".format("python3" if py_3 else "python2") + mh_command = [ + py_binary, + "-u", + "mozharness/scripts/" + mozharness["script"], + ] + else: + # is_linux or is_macosx + py_binary = "/usr/bin/{}".format("python3" if py_3 else "python2") + mh_command = [ + # Using /usr/bin/python2.7 rather than python2.7 because + # /usr/local/bin/python2.7 is broken on the mac workers. + # See bug #1547903. + py_binary, + "-u", + "mozharness/scripts/" + mozharness["script"], + ] + + if py_3: + env["PYTHON"] = py_binary + + for mh_config in mozharness["config"]: + cfg_path = "mozharness/configs/" + mh_config + if is_windows: + cfg_path = normpath(cfg_path) + mh_command.extend(["--cfg", cfg_path]) + mh_command.extend(mozharness.get("extra-options", [])) + if mozharness.get("download-symbols"): + if isinstance(mozharness["download-symbols"], text_type): + mh_command.extend(["--download-symbols", mozharness["download-symbols"]]) + else: + mh_command.extend(["--download-symbols", "true"]) + if mozharness.get("include-blob-upload-branch"): + mh_command.append("--blob-upload-branch=" + config.params["project"]) + + if test.get("test-manifests"): + env["MOZHARNESS_TEST_PATHS"] = six.ensure_text( + json.dumps({test["suite"]: test["test-manifests"]}, sort_keys=True) + ) + + # TODO: remove the need for run['chunked'] + elif mozharness.get("chunked") or test["chunks"] > 1: + mh_command.append("--total-chunk={}".format(test["chunks"])) + mh_command.append("--this-chunk={}".format(test["this-chunk"])) + + if config.params.is_try(): + env["TRY_COMMIT_MSG"] = config.params["message"] + + worker["mounts"] = [ + { + "directory": "mozharness", + "content": { + "artifact": get_artifact_path(taskdesc, "mozharness.zip"), + "task-id": {"task-reference": "<build>"}, + }, + "format": "zip", + } + ] + if is_bitbar: + a_url = config.params.file_url( + "taskcluster/scripts/tester/{}".format(bitbar_script), + ) + worker["mounts"] = [ + { + "file": bitbar_script, + "content": { + "url": a_url, + }, + } + ] + + job["run"] = { + "tooltool-downloads": mozharness["tooltool-downloads"], + "checkout": test["checkout"], + "command": mh_command, + "using": "run-task", + } + if is_bitbar: + job["run"]["run-as-root"] = True + configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"]) diff --git a/taskcluster/taskgraph/transforms/job/python_test.py b/taskcluster/taskgraph/transforms/job/python_test.py new file mode 100644 index 0000000000..b998102573 --- /dev/null +++ b/taskcluster/taskgraph/transforms/job/python_test.py @@ -0,0 +1,48 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Support for running mach python-test tasks (via run-task) +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from six import text_type +from taskgraph.transforms.job import run_job_using, configure_taskdesc_for_run +from taskgraph.util.schema import Schema +from voluptuous import Required, Optional + +python_test_schema = Schema( + { + Required("using"): "python-test", + # Python version to use + Required("python-version"): int, + # The subsuite to run + Required("subsuite"): text_type, + # Base work directory used to set up the task. + Optional("workdir"): text_type, + } +) + + +defaults = { + "python-version": 2, + "subsuite": "default", +} + + +@run_job_using( + "docker-worker", "python-test", schema=python_test_schema, defaults=defaults +) +@run_job_using( + "generic-worker", "python-test", schema=python_test_schema, defaults=defaults +) +def configure_python_test(config, job, taskdesc): + run = job["run"] + worker = job["worker"] + + # defer to the mach implementation + run["mach"] = ("python-test --subsuite {subsuite} --run-slow").format(**run) + run["using"] = "mach" + del run["subsuite"] + configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"]) diff --git a/taskcluster/taskgraph/transforms/job/run_task.py b/taskcluster/taskgraph/transforms/job/run_task.py new file mode 100644 index 0000000000..aab2bde7af --- /dev/null +++ b/taskcluster/taskgraph/transforms/job/run_task.py @@ -0,0 +1,238 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Support for running jobs that are invoked via the `run-task` script. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from six import text_type + +from mozpack import path + +from taskgraph.transforms.task import taskref_or_string +from taskgraph.transforms.job import run_job_using +from taskgraph.util.schema import Schema +from taskgraph.transforms.job.common import add_tooltool, support_vcs_checkout +from voluptuous import Any, Optional, Required + +run_task_schema = Schema( + { + Required("using"): "run-task", + # if true, add a cache at ~worker/.cache, which is where things like pip + # tend to hide their caches. This cache is never added for level-1 jobs. + # TODO Once bug 1526028 is fixed, this and 'use-caches' should be merged. + Required("cache-dotcache"): bool, + # Whether or not to use caches. + Optional("use-caches"): bool, + # if true (the default), perform a checkout of gecko on the worker + Required("checkout"): bool, + Optional( + "cwd", + description="Path to run command in. If a checkout is present, the path " + "to the checkout will be interpolated with the key `checkout`", + ): text_type, + # The sparse checkout profile to use. Value is the filename relative to the + # directory where sparse profiles are defined (build/sparse-profiles/). + Required("sparse-profile"): Any(text_type, None), + # if true, perform a checkout of a comm-central based branch inside the + # gecko checkout + Required("comm-checkout"): bool, + # The command arguments to pass to the `run-task` script, after the + # checkout arguments. If a list, it will be passed directly; otherwise + # it will be included in a single argument to `bash -cx`. + Required("command"): Any([taskref_or_string], taskref_or_string), + # Base work directory used to set up the task. + Optional("workdir"): text_type, + # If not false, tooltool downloads will be enabled via relengAPIProxy + # for either just public files, or all files. Only supported on + # docker-worker. + Required("tooltool-downloads"): Any( + False, + "public", + "internal", + ), + # Whether to run as root. (defaults to False) + Optional("run-as-root"): bool, + } +) + + +def common_setup(config, job, taskdesc, command): + run = job["run"] + if run["checkout"]: + support_vcs_checkout(config, job, taskdesc, sparse=bool(run["sparse-profile"])) + command.append( + "--gecko-checkout={}".format(taskdesc["worker"]["env"]["GECKO_PATH"]) + ) + + if run["sparse-profile"]: + command.append( + "--gecko-sparse-profile=build/sparse-profiles/%s" % run["sparse-profile"] + ) + + taskdesc["worker"].setdefault("env", {})["MOZ_SCM_LEVEL"] = config.params["level"] + + +worker_defaults = { + "cache-dotcache": False, + "checkout": True, + "comm-checkout": False, + "sparse-profile": None, + "tooltool-downloads": False, + "run-as-root": False, +} + + +def script_url(config, script): + return config.params.file_url( + "taskcluster/scripts/{}".format(script), + ) + + +@run_job_using( + "docker-worker", "run-task", schema=run_task_schema, defaults=worker_defaults +) +def docker_worker_run_task(config, job, taskdesc): + run = job["run"] + worker = taskdesc["worker"] = job["worker"] + command = ["/builds/worker/bin/run-task"] + common_setup(config, job, taskdesc, command) + + if run["tooltool-downloads"]: + internal = run["tooltool-downloads"] == "internal" + add_tooltool(config, job, taskdesc, internal=internal) + + if run.get("cache-dotcache"): + worker["caches"].append( + { + "type": "persistent", + "name": "{project}-dotcache".format(**config.params), + "mount-point": "{workdir}/.cache".format(**run), + "skip-untrusted": True, + } + ) + + run_command = run["command"] + run_cwd = run.get("cwd") + if run_cwd and run["checkout"]: + run_cwd = path.normpath( + run_cwd.format(checkout=taskdesc["worker"]["env"]["GECKO_PATH"]) + ) + elif run_cwd and "{checkout}" in run_cwd: + raise Exception( + "Found `{{checkout}}` interpolation in `cwd` for task {name} " + "but the task doesn't have a checkout: {cwd}".format( + cwd=run_cwd, name=job.get("name", job.get("label")) + ) + ) + + # dict is for the case of `{'task-reference': text_type}`. + if isinstance(run_command, (text_type, dict)): + run_command = ["bash", "-cx", run_command] + if run["comm-checkout"]: + command.append( + "--comm-checkout={}/comm".format(taskdesc["worker"]["env"]["GECKO_PATH"]) + ) + command.append("--fetch-hgfingerprint") + if run["run-as-root"]: + command.extend(("--user", "root", "--group", "root")) + if run_cwd: + command.extend(("--task-cwd", run_cwd)) + command.append("--") + command.extend(run_command) + worker["command"] = command + + +@run_job_using( + "generic-worker", "run-task", schema=run_task_schema, defaults=worker_defaults +) +def generic_worker_run_task(config, job, taskdesc): + run = job["run"] + worker = taskdesc["worker"] = job["worker"] + is_win = worker["os"] == "windows" + is_mac = worker["os"] == "macosx" + is_bitbar = worker["os"] == "linux-bitbar" + + if run["tooltool-downloads"]: + internal = run["tooltool-downloads"] == "internal" + add_tooltool(config, job, taskdesc, internal=internal) + + if is_win: + command = ["C:/mozilla-build/python3/python3.exe", "run-task"] + elif is_mac: + command = ["/usr/local/bin/python3", "run-task"] + else: + command = ["./run-task"] + + common_setup(config, job, taskdesc, command) + + worker.setdefault("mounts", []) + if run.get("cache-dotcache"): + worker["mounts"].append( + { + "cache-name": "{project}-dotcache".format(**config.params), + "directory": "{workdir}/.cache".format(**run), + } + ) + worker["mounts"].append( + { + "content": { + "url": script_url(config, "run-task"), + }, + "file": "./run-task", + } + ) + if worker.get("env", {}).get("MOZ_FETCHES"): + worker["mounts"].append( + { + "content": { + "url": script_url(config, "misc/fetch-content"), + }, + "file": "./fetch-content", + } + ) + + run_command = run["command"] + run_cwd = run.get("cwd") + if run_cwd and run["checkout"]: + run_cwd = path.normpath( + run_cwd.format(checkout=taskdesc["worker"]["env"]["GECKO_PATH"]) + ) + elif run_cwd and "{checkout}" in run_cwd: + raise Exception( + "Found `{{checkout}}` interpolation in `cwd` for task {name} " + "but the task doesn't have a checkout: {cwd}".format( + cwd=run_cwd, name=job.get("name", job.get("label")) + ) + ) + + if isinstance(run_command, text_type): + if is_win: + run_command = '"{}"'.format(run_command) + run_command = ["bash", "-cx", run_command] + + if run["comm-checkout"]: + command.append( + "--comm-checkout={}/comm".format(taskdesc["worker"]["env"]["GECKO_PATH"]) + ) + + if run["run-as-root"]: + command.extend(("--user", "root", "--group", "root")) + if run_cwd: + command.extend(("--task-cwd", run_cwd)) + command.append("--") + if is_bitbar: + # Use the bitbar wrapper script which sets up the device and adb + # environment variables + command.append("/builds/taskcluster/script.py") + command.extend(run_command) + + if is_win: + worker["command"] = [" ".join(command)] + else: + worker["command"] = [ + ["chmod", "+x", "run-task"], + command, + ] diff --git a/taskcluster/taskgraph/transforms/job/spidermonkey.py b/taskcluster/taskgraph/transforms/job/spidermonkey.py new file mode 100644 index 0000000000..82c9303db1 --- /dev/null +++ b/taskcluster/taskgraph/transforms/job/spidermonkey.py @@ -0,0 +1,132 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Support for running spidermonkey jobs via dedicated scripts +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from six import text_type +from taskgraph.util.schema import Schema +from voluptuous import Required, Any, Optional + +from taskgraph.transforms.job import ( + run_job_using, + configure_taskdesc_for_run, +) +from taskgraph.transforms.job.common import ( + docker_worker_add_artifacts, + generic_worker_add_artifacts, +) + +sm_run_schema = Schema( + { + Required("using"): Any( + "spidermonkey", + "spidermonkey-package", + "spidermonkey-mozjs-crate", + "spidermonkey-rust-bindings", + ), + # SPIDERMONKEY_VARIANT and SPIDERMONKEY_PLATFORM + Required("spidermonkey-variant"): text_type, + Optional("spidermonkey-platform"): text_type, + # Base work directory used to set up the task. + Optional("workdir"): text_type, + Required("tooltool-downloads"): Any( + False, + "public", + "internal", + ), + } +) + + +@run_job_using("docker-worker", "spidermonkey", schema=sm_run_schema) +@run_job_using("docker-worker", "spidermonkey-package", schema=sm_run_schema) +@run_job_using("docker-worker", "spidermonkey-mozjs-crate", schema=sm_run_schema) +@run_job_using("docker-worker", "spidermonkey-rust-bindings", schema=sm_run_schema) +def docker_worker_spidermonkey(config, job, taskdesc): + run = job["run"] + + worker = taskdesc["worker"] = job["worker"] + worker.setdefault("artifacts", []) + + docker_worker_add_artifacts(config, job, taskdesc) + + env = worker.setdefault("env", {}) + env.update( + { + "MOZHARNESS_DISABLE": "true", + "SPIDERMONKEY_VARIANT": run.pop("spidermonkey-variant"), + "MOZ_BUILD_DATE": config.params["moz_build_date"], + "MOZ_SCM_LEVEL": config.params["level"], + } + ) + if "spidermonkey-platform" in run: + env["SPIDERMONKEY_PLATFORM"] = run.pop("spidermonkey-platform") + + script = "build-sm.sh" + if run["using"] == "spidermonkey-package": + script = "build-sm-package.sh" + elif run["using"] == "spidermonkey-mozjs-crate": + script = "build-sm-mozjs-crate.sh" + elif run["using"] == "spidermonkey-rust-bindings": + script = "build-sm-rust-bindings.sh" + + run["using"] = "run-task" + run["cwd"] = run["workdir"] + run["command"] = [ + "./checkouts/gecko/taskcluster/scripts/builder/{script}".format(script=script) + ] + + configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"]) + + +@run_job_using("generic-worker", "spidermonkey", schema=sm_run_schema) +def generic_worker_spidermonkey(config, job, taskdesc): + assert job["worker"]["os"] == "windows", "only supports windows right now" + + run = job["run"] + + worker = taskdesc["worker"] = job["worker"] + + generic_worker_add_artifacts(config, job, taskdesc) + + env = worker.setdefault("env", {}) + env.update( + { + "MOZHARNESS_DISABLE": "true", + "SPIDERMONKEY_VARIANT": run.pop("spidermonkey-variant"), + "MOZ_BUILD_DATE": config.params["moz_build_date"], + "MOZ_SCM_LEVEL": config.params["level"], + "SCCACHE_DISABLE": "1", + "WORK": ".", # Override the defaults in build scripts + "GECKO_PATH": "./src", # with values suiteable for windows generic worker + "UPLOAD_DIR": "./public/build", + } + ) + if "spidermonkey-platform" in run: + env["SPIDERMONKEY_PLATFORM"] = run.pop("spidermonkey-platform") + + script = "build-sm.sh" + if run["using"] == "spidermonkey-package": + script = "build-sm-package.sh" + # Don't allow untested configurations yet + raise Exception("spidermonkey-package is not a supported configuration") + elif run["using"] == "spidermonkey-mozjs-crate": + script = "build-sm-mozjs-crate.sh" + # Don't allow untested configurations yet + raise Exception("spidermonkey-mozjs-crate is not a supported configuration") + elif run["using"] == "spidermonkey-rust-bindings": + script = "build-sm-rust-bindings.sh" + # Don't allow untested configurations yet + raise Exception("spidermonkey-rust-bindings is not a supported configuration") + + run["using"] = "run-task" + run["command"] = [ + "c:\\mozilla-build\\msys\\bin\\bash.exe " # string concat + '"./src/taskcluster/scripts/builder/%s"' % script + ] + + configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"]) diff --git a/taskcluster/taskgraph/transforms/job/toolchain.py b/taskcluster/taskgraph/transforms/job/toolchain.py new file mode 100644 index 0000000000..4197517898 --- /dev/null +++ b/taskcluster/taskgraph/transforms/job/toolchain.py @@ -0,0 +1,234 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Support for running toolchain-building jobs via dedicated scripts +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from mozbuild.shellutil import quote as shell_quote + +from six import text_type +from taskgraph.util.schema import Schema +from voluptuous import Optional, Required, Any + +from taskgraph.transforms.job import ( + configure_taskdesc_for_run, + run_job_using, +) +from taskgraph.transforms.job.common import ( + docker_worker_add_artifacts, + generic_worker_add_artifacts, +) +from taskgraph.util.hash import hash_paths +from taskgraph.util.attributes import RELEASE_PROJECTS +from taskgraph import GECKO +import taskgraph + + +CACHE_TYPE = "toolchains.v3" + +toolchain_run_schema = Schema( + { + Required("using"): "toolchain-script", + # The script (in taskcluster/scripts/misc) to run. + # Python scripts are invoked with `mach python` so vendored libraries + # are available. + Required("script"): text_type, + # Arguments to pass to the script. + Optional("arguments"): [text_type], + # If not false, tooltool downloads will be enabled via relengAPIProxy + # for either just public files, or all files. Not supported on Windows + Required("tooltool-downloads"): Any( + False, + "public", + "internal", + ), + # Sparse profile to give to checkout using `run-task`. If given, + # a filename in `build/sparse-profiles`. Defaults to + # "toolchain-build", i.e., to + # `build/sparse-profiles/toolchain-build`. If `None`, instructs + # `run-task` to not use a sparse profile at all. + Required("sparse-profile"): Any(text_type, None), + # Paths/patterns pointing to files that influence the outcome of a + # toolchain build. + Optional("resources"): [text_type], + # Path to the artifact produced by the toolchain job + Required("toolchain-artifact"): text_type, + Optional( + "toolchain-alias", + description="An alias that can be used instead of the real toolchain job name in " + "fetch stanzas for jobs.", + ): text_type, + # Base work directory used to set up the task. + Optional("workdir"): text_type, + } +) + + +def get_digest_data(config, run, taskdesc): + files = list(run.pop("resources", [])) + # The script + files.append("taskcluster/scripts/misc/{}".format(run["script"])) + # Tooltool manifest if any is defined: + tooltool_manifest = taskdesc["worker"]["env"].get("TOOLTOOL_MANIFEST") + if tooltool_manifest: + files.append(tooltool_manifest) + + # Accumulate dependency hashes for index generation. + data = [hash_paths(GECKO, files)] + + data.append(taskdesc["attributes"]["toolchain-artifact"]) + + # If the task uses an in-tree docker image, we want it to influence + # the index path as well. Ideally, the content of the docker image itself + # should have an influence, but at the moment, we can't get that + # information here. So use the docker image name as a proxy. Not a lot of + # changes to docker images actually have an impact on the resulting + # toolchain artifact, so we'll just rely on such important changes to be + # accompanied with a docker image name change. + image = taskdesc["worker"].get("docker-image", {}).get("in-tree") + if image: + data.append(image) + + # Likewise script arguments should influence the index. + args = run.get("arguments") + if args: + data.extend(args) + + if taskdesc["attributes"].get("rebuild-on-release"): + # Add whether this is a release branch or not + data.append(str(config.params["project"] in RELEASE_PROJECTS)) + return data + + +toolchain_defaults = { + "tooltool-downloads": False, + "sparse-profile": "toolchain-build", +} + + +@run_job_using( + "docker-worker", + "toolchain-script", + schema=toolchain_run_schema, + defaults=toolchain_defaults, +) +def docker_worker_toolchain(config, job, taskdesc): + run = job["run"] + + worker = taskdesc["worker"] = job["worker"] + worker["chain-of-trust"] = True + + # If the task doesn't have a docker-image, set a default + worker.setdefault("docker-image", {"in-tree": "deb8-toolchain-build"}) + + # Allow the job to specify where artifacts come from, but add + # public/build if it's not there already. + artifacts = worker.setdefault("artifacts", []) + if not artifacts: + docker_worker_add_artifacts(config, job, taskdesc) + + # Toolchain checkouts don't live under {workdir}/checkouts + workspace = "{workdir}/workspace/build".format(**run) + gecko_path = "{}/src".format(workspace) + + env = worker.setdefault("env", {}) + env.update( + { + "MOZ_BUILD_DATE": config.params["moz_build_date"], + "MOZ_SCM_LEVEL": config.params["level"], + "GECKO_PATH": gecko_path, + } + ) + + attributes = taskdesc.setdefault("attributes", {}) + attributes["toolchain-artifact"] = run.pop("toolchain-artifact") + if "toolchain-alias" in run: + attributes["toolchain-alias"] = run.pop("toolchain-alias") + + if not taskgraph.fast: + name = taskdesc["label"].replace("{}-".format(config.kind), "", 1) + taskdesc["cache"] = { + "type": CACHE_TYPE, + "name": name, + "digest-data": get_digest_data(config, run, taskdesc), + } + + run["using"] = "run-task" + run["cwd"] = run["workdir"] + run["command"] = [ + "workspace/build/src/taskcluster/scripts/misc/{}".format(run.pop("script")) + ] + run.pop("arguments", []) + + configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"]) + + +@run_job_using( + "generic-worker", + "toolchain-script", + schema=toolchain_run_schema, + defaults=toolchain_defaults, +) +def generic_worker_toolchain(config, job, taskdesc): + run = job["run"] + + worker = taskdesc["worker"] = job["worker"] + worker["chain-of-trust"] = True + + # Allow the job to specify where artifacts come from, but add + # public/build if it's not there already. + artifacts = worker.setdefault("artifacts", []) + if not artifacts: + generic_worker_add_artifacts(config, job, taskdesc) + + if job["worker"]["os"] == "windows": + # There were no caches on generic-worker before bug 1519472, and they cause + # all sorts of problems with Windows toolchain tasks, disable them until + # tasks are ready. + run["use-caches"] = False + + env = worker.setdefault("env", {}) + env.update( + { + "MOZ_BUILD_DATE": config.params["moz_build_date"], + "MOZ_SCM_LEVEL": config.params["level"], + } + ) + + # Use `mach` to invoke python scripts so in-tree libraries are available. + if run["script"].endswith(".py"): + raise NotImplementedError( + "Python toolchain scripts aren't supported on generic-worker" + ) + + attributes = taskdesc.setdefault("attributes", {}) + attributes["toolchain-artifact"] = run.pop("toolchain-artifact") + if "toolchain-alias" in run: + attributes["toolchain-alias"] = run.pop("toolchain-alias") + + if not taskgraph.fast: + name = taskdesc["label"].replace("{}-".format(config.kind), "", 1) + taskdesc["cache"] = { + "type": CACHE_TYPE, + "name": name, + "digest-data": get_digest_data(config, run, taskdesc), + } + + run["using"] = "run-task" + + args = run.pop("arguments", "") + if args: + args = " " + shell_quote(*args) + + if job["worker"]["os"] == "windows": + gecko_path = "%GECKO_PATH%" + else: + gecko_path = "$GECKO_PATH" + + run["command"] = "{}/taskcluster/scripts/misc/{}{}".format( + gecko_path, run.pop("script"), args + ) + + configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"]) diff --git a/taskcluster/taskgraph/transforms/l10n.py b/taskcluster/taskgraph/transforms/l10n.py new file mode 100644 index 0000000000..ffa8319b4d --- /dev/null +++ b/taskcluster/taskgraph/transforms/l10n.py @@ -0,0 +1,424 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Do transforms specific to l10n kind +""" + +from __future__ import absolute_import, print_function, unicode_literals + +import copy +import io +import json +import six + +from mozbuild.chunkify import chunkify +from six import text_type +from taskgraph.loader.multi_dep import schema +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.schema import ( + optionally_keyed_by, + resolve_keyed_by, + taskref_or_string, +) +from taskgraph.util.attributes import copy_attributes_from_dependent_job +from taskgraph.util.taskcluster import get_artifact_prefix +from taskgraph.util.treeherder import add_suffix +from taskgraph.transforms.job import job_description_schema +from taskgraph.transforms.task import task_description_schema +from voluptuous import ( + Any, + Optional, + Required, +) + + +def _by_platform(arg): + return optionally_keyed_by("build-platform", arg) + + +l10n_description_schema = schema.extend( + { + # Name for this job, inferred from the dependent job before validation + Required("name"): text_type, + # build-platform, inferred from dependent job before validation + Required("build-platform"): text_type, + # max run time of the task + Required("run-time"): _by_platform(int), + # Locales not to repack for + Required("ignore-locales"): _by_platform([text_type]), + # All l10n jobs use mozharness + Required("mozharness"): { + # Script to invoke for mozharness + Required("script"): _by_platform(text_type), + # Config files passed to the mozharness script + Required("config"): _by_platform([text_type]), + # Additional paths to look for mozharness configs in. These should be + # relative to the base of the source checkout + Optional("config-paths"): [text_type], + # Options to pass to the mozharness script + Optional("options"): _by_platform([text_type]), + # Action commands to provide to mozharness script + Required("actions"): _by_platform([text_type]), + # if true, perform a checkout of a comm-central based branch inside the + # gecko checkout + Optional("comm-checkout"): bool, + }, + # Items for the taskcluster index + Optional("index"): { + # Product to identify as in the taskcluster index + Required("product"): _by_platform(text_type), + # Job name to identify as in the taskcluster index + Required("job-name"): _by_platform(text_type), + # Type of index + Optional("type"): _by_platform(text_type), + }, + # Description of the localized task + Required("description"): _by_platform(text_type), + Optional("run-on-projects"): job_description_schema["run-on-projects"], + # worker-type to utilize + Required("worker-type"): _by_platform(text_type), + # File which contains the used locales + Required("locales-file"): _by_platform(text_type), + # Tooltool visibility required for task. + Required("tooltool"): _by_platform(Any("internal", "public")), + # Docker image required for task. We accept only in-tree images + # -- generally desktop-build or android-build -- for now. + Optional("docker-image"): _by_platform( + # an in-tree generated docker image (from `taskcluster/docker/<name>`) + {"in-tree": text_type}, + ), + Optional("fetches"): { + text_type: _by_platform([text_type]), + }, + # The set of secret names to which the task has access; these are prefixed + # with `project/releng/gecko/{treeherder.kind}/level-{level}/`. Setting + # this will enable any worker features required and set the task's scopes + # appropriately. `true` here means ['*'], all secrets. Not supported on + # Windows + Optional("secrets"): _by_platform(Any(bool, [text_type])), + # Information for treeherder + Required("treeherder"): { + # Platform to display the task on in treeherder + Required("platform"): _by_platform(text_type), + # Symbol to use + Required("symbol"): text_type, + # Tier this task is + Required("tier"): _by_platform(int), + }, + # Extra environment values to pass to the worker + Optional("env"): _by_platform({text_type: taskref_or_string}), + # Max number locales per chunk + Optional("locales-per-chunk"): _by_platform(int), + # Task deps to chain this task with, added in transforms from primary-dependency + # if this is a shippable-style build + Optional("dependencies"): {text_type: text_type}, + # Run the task when the listed files change (if present). + Optional("when"): {"files-changed": [text_type]}, + # passed through directly to the job description + Optional("attributes"): job_description_schema["attributes"], + Optional("extra"): job_description_schema["extra"], + # Shipping product and phase + Optional("shipping-product"): task_description_schema["shipping-product"], + Optional("shipping-phase"): task_description_schema["shipping-phase"], + } +) + +transforms = TransformSequence() + + +def parse_locales_file(locales_file, platform=None): + """Parse the passed locales file for a list of locales.""" + locales = [] + + with io.open(locales_file, mode="r") as f: + if locales_file.endswith("json"): + all_locales = json.load(f) + # XXX Only single locales are fetched + locales = { + locale: data["revision"] + for locale, data in all_locales.items() + if platform is None or platform in data["platforms"] + } + else: + all_locales = f.read().split() + # 'default' is the hg revision at the top of hg repo, in this context + locales = {locale: "default" for locale in all_locales} + return locales + + +def _remove_locales(locales, to_remove=None): + # ja-JP-mac is a mac-only locale, but there are no mac builds being repacked, + # so just omit it unconditionally + return { + locale: revision + for locale, revision in locales.items() + if locale not in to_remove + } + + +@transforms.add +def setup_name(config, jobs): + for job in jobs: + dep = job["primary-dependency"] + # Set the name to the same as the dep task, without kind name. + # Label will get set automatically with this kinds name. + job["name"] = job.get("name", dep.name) + yield job + + +@transforms.add +def copy_in_useful_magic(config, jobs): + for job in jobs: + dep = job["primary-dependency"] + attributes = copy_attributes_from_dependent_job(dep) + attributes.update(job.get("attributes", {})) + # build-platform is needed on `job` for by-build-platform + job["build-platform"] = attributes.get("build_platform") + job["attributes"] = attributes + yield job + + +transforms.add_validate(l10n_description_schema) + + +@transforms.add +def setup_shippable_dependency(config, jobs): + """ Sets up a task dependency to the signing job this relates to """ + for job in jobs: + job["dependencies"] = {"build": job["dependent-tasks"]["build"].label} + if job["attributes"]["build_platform"].startswith("win") or job["attributes"][ + "build_platform" + ].startswith("linux"): + job["dependencies"].update( + { + "build-signing": job["dependent-tasks"]["build-signing"].label, + } + ) + if job["attributes"]["build_platform"].startswith("macosx"): + job["dependencies"].update( + {"repackage": job["dependent-tasks"]["repackage"].label} + ) + yield job + + +@transforms.add +def handle_keyed_by(config, jobs): + """Resolve fields that can be keyed by platform, etc.""" + fields = [ + "locales-file", + "locales-per-chunk", + "worker-type", + "description", + "run-time", + "docker-image", + "secrets", + "fetches.toolchain", + "fetches.fetch", + "tooltool", + "env", + "ignore-locales", + "mozharness.config", + "mozharness.options", + "mozharness.actions", + "mozharness.script", + "treeherder.tier", + "treeherder.platform", + "index.type", + "index.product", + "index.job-name", + "when.files-changed", + ] + for job in jobs: + job = copy.deepcopy(job) # don't overwrite dict values here + for field in fields: + resolve_keyed_by(item=job, field=field, item_name=job["name"]) + yield job + + +@transforms.add +def handle_artifact_prefix(config, jobs): + """Resolve ``artifact_prefix`` in env vars""" + for job in jobs: + artifact_prefix = get_artifact_prefix(job) + for k1, v1 in six.iteritems(job.get("env", {})): + if isinstance(v1, text_type): + job["env"][k1] = v1.format(artifact_prefix=artifact_prefix) + elif isinstance(v1, dict): + for k2, v2 in six.iteritems(v1): + job["env"][k1][k2] = v2.format(artifact_prefix=artifact_prefix) + yield job + + +@transforms.add +def all_locales_attribute(config, jobs): + for job in jobs: + locales_platform = job["attributes"]["build_platform"].replace("-shippable", "") + locales_platform = locales_platform.replace("-pgo", "") + locales_with_changesets = parse_locales_file( + job["locales-file"], platform=locales_platform + ) + locales_with_changesets = _remove_locales( + locales_with_changesets, to_remove=job["ignore-locales"] + ) + + locales = sorted(locales_with_changesets.keys()) + attributes = job.setdefault("attributes", {}) + attributes["all_locales"] = locales + attributes["all_locales_with_changesets"] = locales_with_changesets + if job.get("shipping-product"): + attributes["shipping_product"] = job["shipping-product"] + yield job + + +@transforms.add +def chunk_locales(config, jobs): + """ Utilizes chunking for l10n stuff """ + for job in jobs: + locales_per_chunk = job.get("locales-per-chunk") + locales_with_changesets = job["attributes"]["all_locales_with_changesets"] + if locales_per_chunk: + chunks, remainder = divmod(len(locales_with_changesets), locales_per_chunk) + if remainder: + chunks = int(chunks + 1) + for this_chunk in range(1, chunks + 1): + chunked = copy.deepcopy(job) + chunked["name"] = chunked["name"].replace( + "/", "-{}/".format(this_chunk), 1 + ) + chunked["mozharness"]["options"] = chunked["mozharness"].get( + "options", [] + ) + # chunkify doesn't work with dicts + locales_with_changesets_as_list = sorted( + locales_with_changesets.items() + ) + chunked_locales = chunkify( + locales_with_changesets_as_list, this_chunk, chunks + ) + chunked["mozharness"]["options"].extend( + [ + "locale={}:{}".format(locale, changeset) + for locale, changeset in chunked_locales + ] + ) + chunked["attributes"]["l10n_chunk"] = str(this_chunk) + # strip revision + chunked["attributes"]["chunk_locales"] = [ + locale for locale, _ in chunked_locales + ] + + # add the chunk number to the TH symbol + chunked["treeherder"]["symbol"] = add_suffix( + chunked["treeherder"]["symbol"], this_chunk + ) + yield chunked + else: + job["mozharness"]["options"] = job["mozharness"].get("options", []) + job["mozharness"]["options"].extend( + [ + "locale={}:{}".format(locale, changeset) + for locale, changeset in sorted(locales_with_changesets.items()) + ] + ) + yield job + + +transforms.add_validate(l10n_description_schema) + + +@transforms.add +def stub_installer(config, jobs): + for job in jobs: + job.setdefault("attributes", {}) + job.setdefault("env", {}) + if job["attributes"].get("stub-installer"): + job["env"].update({"USE_STUB_INSTALLER": "1"}) + yield job + + +@transforms.add +def set_extra_config(config, jobs): + for job in jobs: + job["mozharness"].setdefault("extra-config", {})["branch"] = config.params[ + "project" + ] + if "update-channel" in job["attributes"]: + job["mozharness"]["extra-config"]["update_channel"] = job["attributes"][ + "update-channel" + ] + yield job + + +@transforms.add +def make_job_description(config, jobs): + for job in jobs: + job["mozharness"].update( + { + "using": "mozharness", + "job-script": "taskcluster/scripts/builder/build-l10n.sh", + "secrets": job.get("secrets", False), + } + ) + job_description = { + "name": job["name"], + "worker-type": job["worker-type"], + "description": job["description"], + "run": job["mozharness"], + "attributes": job["attributes"], + "treeherder": { + "kind": "build", + "tier": job["treeherder"]["tier"], + "symbol": job["treeherder"]["symbol"], + "platform": job["treeherder"]["platform"], + }, + "run-on-projects": job.get("run-on-projects") + if job.get("run-on-projects") + else [], + } + if job.get("extra"): + job_description["extra"] = job["extra"] + + job_description["run"]["tooltool-downloads"] = job["tooltool"] + + job_description["worker"] = { + "max-run-time": job["run-time"], + "chain-of-trust": True, + } + if job["worker-type"] == "b-win2012": + job_description["worker"]["os"] = "windows" + job_description["run"]["use-simple-package"] = False + job_description["run"]["use-magic-mh-args"] = False + else: + job_description["run"]["need-xvfb"] = True + + if job.get("docker-image"): + job_description["worker"]["docker-image"] = job["docker-image"] + + if job.get("fetches"): + job_description["fetches"] = job["fetches"] + + if job.get("index"): + job_description["index"] = { + "product": job["index"]["product"], + "job-name": job["index"]["job-name"], + "type": job["index"].get("type", "generic"), + } + + if job.get("dependencies"): + job_description["dependencies"] = job["dependencies"] + if job.get("env"): + job_description["worker"]["env"] = job["env"] + if job.get("when", {}).get("files-changed"): + job_description.setdefault("when", {}) + job_description["when"]["files-changed"] = [job["locales-file"]] + job[ + "when" + ]["files-changed"] + + if "shipping-phase" in job: + job_description["shipping-phase"] = job["shipping-phase"] + + if "shipping-product" in job: + job_description["shipping-product"] = job["shipping-product"] + + yield job_description diff --git a/taskcluster/taskgraph/transforms/mac_notarization_poller.py b/taskcluster/taskgraph/transforms/mac_notarization_poller.py new file mode 100644 index 0000000000..1f9887d6bf --- /dev/null +++ b/taskcluster/taskgraph/transforms/mac_notarization_poller.py @@ -0,0 +1,100 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the mac notarization poller task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.attributes import copy_attributes_from_dependent_job +from taskgraph.util.taskcluster import get_artifact_url +from taskgraph.util.treeherder import add_suffix, join_symbol + + +transforms = TransformSequence() + + +@transforms.add +def make_task_description(config, jobs): + for job in jobs: + dep_job = job["primary-dependency"] + attributes = dep_job.attributes + + build_platform = dep_job.attributes.get("build_platform") + treeherder = None + if "partner" not in config.kind and "eme-free" not in config.kind: + treeherder = job.get("treeherder", {}) + + dep_th_platform = ( + dep_job.task.get("extra", {}) + .get("treeherder", {}) + .get("machine", {}) + .get("platform", "") + ) + build_type = dep_job.attributes.get("build_type") + treeherder.setdefault( + "platform", "{}/{}".format(dep_th_platform, build_type) + ) + + dep_treeherder = dep_job.task.get("extra", {}).get("treeherder", {}) + treeherder.setdefault("tier", dep_treeherder.get("tier", 1)) + treeherder.setdefault( + "symbol", + _generate_treeherder_symbol( + dep_treeherder.get("groupSymbol", "?"), dep_treeherder.get("symbol") + ), + ) + treeherder.setdefault("kind", "build") + + label = dep_job.label.replace("part-1", "poller") + description = ( + "Mac Notarization Poller for build '" + "{build_platform}/{build_type}'".format( + build_platform=build_platform, build_type=attributes.get("build_type") + ) + ) + + attributes = ( + job["attributes"] + if job.get("attributes") + else copy_attributes_from_dependent_job(dep_job) + ) + attributes["signed"] = True + + if dep_job.attributes.get("chunk_locales"): + # Used for l10n attribute passthrough + attributes["chunk_locales"] = dep_job.attributes.get("chunk_locales") + + uuid_manifest_url = get_artifact_url("<part1>", "public/uuid_manifest.json") + task = { + "label": label, + "description": description, + "worker": { + "implementation": "notarization-poller", + "uuid-manifest": {"task-reference": uuid_manifest_url}, + }, + "worker-type": "mac-notarization-poller", + "dependencies": {"part1": dep_job.label}, + "attributes": attributes, + "run-on-projects": dep_job.attributes.get("run_on_projects"), + "optimization": dep_job.optimization, + "routes": job.get("routes", []), + "shipping-product": job.get("shipping-product"), + "shipping-phase": job.get("shipping-phase"), + } + + if treeherder: + task["treeherder"] = treeherder + if job.get("extra"): + task["extra"] = job["extra"] + # we may have reduced the priority for partner jobs, otherwise task.py will set it + if job.get("priority"): + task["priority"] = job["priority"] + + yield task + + +def _generate_treeherder_symbol(group_symbol, build_symbol): + return join_symbol(group_symbol, add_suffix(build_symbol, "-poll")) diff --git a/taskcluster/taskgraph/transforms/mar_signing.py b/taskcluster/taskgraph/transforms/mar_signing.py new file mode 100644 index 0000000000..0622d35b2a --- /dev/null +++ b/taskcluster/taskgraph/transforms/mar_signing.py @@ -0,0 +1,165 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the {partials,mar}-signing task into an actual task description. +""" +from __future__ import absolute_import, print_function, unicode_literals + +import os + +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.attributes import ( + copy_attributes_from_dependent_job, + sorted_unique_list, +) +from taskgraph.util.scriptworker import ( + get_signing_cert_scope_per_platform, +) +from taskgraph.util.partials import get_partials_artifacts_from_params +from taskgraph.util.taskcluster import get_artifact_prefix +from taskgraph.util.treeherder import join_symbol, inherit_treeherder_from_dep + +import logging + +logger = logging.getLogger(__name__) + +SIGNING_FORMATS = { + "mar-signing-autograph-stage": { + "target.complete.mar": ["autograph_stage_mar384"], + }, + "default": { + "target.complete.mar": ["autograph_hash_only_mar384"], + }, +} + +transforms = TransformSequence() + + +def generate_partials_artifacts(job, release_history, platform, locale=None): + artifact_prefix = get_artifact_prefix(job) + if locale: + artifact_prefix = "{}/{}".format(artifact_prefix, locale) + else: + locale = "en-US" + + artifacts = get_partials_artifacts_from_params(release_history, platform, locale) + + upstream_artifacts = [ + { + "taskId": {"task-reference": "<partials>"}, + "taskType": "partials", + "paths": [ + "{}/{}".format(artifact_prefix, path) + for path, version in artifacts + # TODO Use mozilla-version to avoid comparing strings. Otherwise Firefox 100 will + # be considered smaller than Firefox 56 + if version is None or version >= "56" + ], + "formats": ["autograph_hash_only_mar384"], + } + ] + + old_mar_upstream_artifacts = { + "taskId": {"task-reference": "<partials>"}, + "taskType": "partials", + "paths": [ + "{}/{}".format(artifact_prefix, path) + for path, version in artifacts + # TODO Use mozilla-version to avoid comparing strings. Otherwise Firefox 100 will be + # considered smaller than Firefox 56 + if version is not None and version < "56" + ], + "formats": ["mar"], + } + + if old_mar_upstream_artifacts["paths"]: + upstream_artifacts.append(old_mar_upstream_artifacts) + + return upstream_artifacts + + +def generate_complete_artifacts(job, kind): + upstream_artifacts = [] + if kind not in SIGNING_FORMATS: + kind = "default" + for artifact in job.release_artifacts: + basename = os.path.basename(artifact) + if basename in SIGNING_FORMATS[kind]: + upstream_artifacts.append( + { + "taskId": {"task-reference": "<{}>".format(job.kind)}, + "taskType": "build", + "paths": [artifact], + "formats": SIGNING_FORMATS[kind][basename], + } + ) + + return upstream_artifacts + + +@transforms.add +def make_task_description(config, jobs): + for job in jobs: + dep_job = job["primary-dependency"] + locale = dep_job.attributes.get("locale") + + treeherder = inherit_treeherder_from_dep(job, dep_job) + treeherder.setdefault( + "symbol", join_symbol(job.get("treeherder-group", "ms"), locale or "N") + ) + + label = job.get("label", "{}-{}".format(config.kind, dep_job.label)) + + dependencies = {dep_job.kind: dep_job.label} + signing_dependencies = dep_job.dependencies + # This is so we get the build task etc in our dependencies to + # have better beetmover support. + dependencies.update(signing_dependencies) + + attributes = copy_attributes_from_dependent_job(dep_job) + attributes["required_signoffs"] = sorted_unique_list( + attributes.get("required_signoffs", []), job.pop("required_signoffs") + ) + attributes["shipping_phase"] = job["shipping-phase"] + if locale: + attributes["locale"] = locale + + build_platform = attributes.get("build_platform") + if config.kind == "partials-signing": + upstream_artifacts = generate_partials_artifacts( + dep_job, config.params["release_history"], build_platform, locale + ) + else: + upstream_artifacts = generate_complete_artifacts(dep_job, config.kind) + + is_shippable = job.get( + "shippable", dep_job.attributes.get("shippable") # First check current job + ) # Then dep job for 'shippable' + signing_cert_scope = get_signing_cert_scope_per_platform( + build_platform, is_shippable, config + ) + + scopes = [signing_cert_scope] + + task = { + "label": label, + "description": "{} {}".format( + dep_job.description, job["description-suffix"] + ), + "worker-type": job.get("worker-type", "linux-signing"), + "worker": { + "implementation": "scriptworker-signing", + "upstream-artifacts": upstream_artifacts, + "max-run-time": 3600, + }, + "dependencies": dependencies, + "attributes": attributes, + "scopes": scopes, + "run-on-projects": job.get( + "run-on-projects", dep_job.attributes.get("run_on_projects") + ), + "treeherder": treeherder, + } + + yield task diff --git a/taskcluster/taskgraph/transforms/maybe_release.py b/taskcluster/taskgraph/transforms/maybe_release.py new file mode 100644 index 0000000000..134b5c46b1 --- /dev/null +++ b/taskcluster/taskgraph/transforms/maybe_release.py @@ -0,0 +1,23 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.schema import resolve_keyed_by + +transforms = TransformSequence() + + +@transforms.add +def make_task_description(config, jobs): + for job in jobs: + for key in ["worker-type", "scopes"]: + resolve_keyed_by( + job, + key, + item_name=job["name"], + **{"release-level": config.params.release_level()} + ) + yield job diff --git a/taskcluster/taskgraph/transforms/merge_automation.py b/taskcluster/taskgraph/transforms/merge_automation.py new file mode 100644 index 0000000000..7367e43785 --- /dev/null +++ b/taskcluster/taskgraph/transforms/merge_automation.py @@ -0,0 +1,83 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the update generation task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.schema import resolve_keyed_by + +transforms = TransformSequence() + + +@transforms.add +def handle_keyed_by(config, tasks): + """Resolve fields that can be keyed by platform, etc.""" + if "merge_config" not in config.params: + return + merge_config = config.params["merge_config"] + fields = [ + "worker.push", + "worker-type", + "worker.l10n-bump-info", + "worker.source-repo", + ] + for task in tasks: + for field in fields: + resolve_keyed_by( + task, + field, + item_name=task["name"], + **{ + "project": config.params["project"], + "release-type": config.params["release_type"], + "behavior": merge_config["behavior"], + } + ) + yield task + + +@transforms.add +def update_labels(config, tasks): + for task in tasks: + merge_config = config.params["merge_config"] + task["label"] = "merge-{}".format(merge_config["behavior"]) + treeherder = task.get("treeherder", {}) + treeherder["symbol"] = "Rel({})".format(merge_config["behavior"]) + task["treeherder"] = treeherder + yield task + + +@transforms.add +def add_payload_config(config, tasks): + for task in tasks: + if "merge_config" not in config.params: + break + merge_config = config.params["merge_config"] + worker = task["worker"] + worker["merge-info"] = config.graph_config["merge-automation"]["behaviors"][ + merge_config["behavior"] + ] + + if "l10n-bump-info" in worker and worker["l10n-bump-info"] is None: + del worker["l10n-bump-info"] + + # Override defaults, useful for testing. + for field in [ + "from-repo", + "from-branch", + "to-repo", + "to-branch", + "fetch-version-from", + ]: + if merge_config.get(field): + worker["merge-info"][field] = merge_config[field] + + worker["force-dry-run"] = merge_config["force-dry-run"] + worker["ssh-user"] = merge_config.get("ssh-user-alias", "merge_user") + if merge_config.get("push"): + worker["push"] = merge_config["push"] + yield task diff --git a/taskcluster/taskgraph/transforms/name_sanity.py b/taskcluster/taskgraph/transforms/name_sanity.py new file mode 100644 index 0000000000..de0b4c7c02 --- /dev/null +++ b/taskcluster/taskgraph/transforms/name_sanity.py @@ -0,0 +1,48 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Generate labels for tasks without names, consistently. +Uses attributes from `primary-dependency`. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence + + +transforms = TransformSequence() + + +@transforms.add +def make_label(config, jobs): + """Generate a sane label for a new task constructed from a dependency + Using attributes from the dependent job and the current task kind""" + for job in jobs: + dep_job = job["primary-dependency"] + attr = dep_job.attributes.get + + if attr("locale", job.get("locale")): + template = "{kind}-{locale}-{build_platform}/{build_type}" + elif attr("l10n_chunk"): + template = "{kind}-{build_platform}-{l10n_chunk}/{build_type}" + elif config.kind.startswith("release-eme-free") or config.kind.startswith( + "release-partner-repack" + ): + suffix = job.get("extra", {}).get("repack_suffix", None) or job.get( + "extra", {} + ).get("repack_id", None) + template = "{kind}-{build_platform}" + if suffix: + template += "-{}".format(suffix.replace("/", "-")) + else: + template = "{kind}-{build_platform}/{build_type}" + job["label"] = template.format( + kind=config.kind, + build_platform=attr("build_platform"), + build_type=attr("build_type"), + locale=attr("locale", job.get("locale", "")), # Locale can be absent + l10n_chunk=attr("l10n_chunk", ""), # Can be empty + ) + + yield job diff --git a/taskcluster/taskgraph/transforms/openh264.py b/taskcluster/taskgraph/transforms/openh264.py new file mode 100644 index 0000000000..20a0527139 --- /dev/null +++ b/taskcluster/taskgraph/transforms/openh264.py @@ -0,0 +1,28 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +This transform is used to help populate mozharness options for openh264 jobs +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence + +transforms = TransformSequence() + + +@transforms.add +def set_mh_options(config, jobs): + """ + This transform sets the 'openh264_rev' attribute. + """ + for job in jobs: + repo = job.pop("repo") + rev = job.pop("revision") + attributes = job.setdefault("attributes", {}) + attributes["openh264_rev"] = rev + run = job.setdefault("run", {}) + options = run.setdefault("options", []) + options.extend(["repo={}".format(repo), "rev={}".format(rev)]) + yield job diff --git a/taskcluster/taskgraph/transforms/openh264_signing.py b/taskcluster/taskgraph/transforms/openh264_signing.py new file mode 100644 index 0000000000..877651f34d --- /dev/null +++ b/taskcluster/taskgraph/transforms/openh264_signing.py @@ -0,0 +1,107 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the repackage signing task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from six import text_type +from taskgraph.loader.single_dep import schema +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.attributes import copy_attributes_from_dependent_job +from taskgraph.util.scriptworker import get_signing_cert_scope_per_platform +from taskgraph.util.treeherder import inherit_treeherder_from_dep +from taskgraph.transforms.task import task_description_schema +from voluptuous import Optional + +transforms = TransformSequence() + +signing_description_schema = schema.extend( + { + Optional("label"): text_type, + Optional("extra"): object, + Optional("shipping-product"): task_description_schema["shipping-product"], + Optional("shipping-phase"): task_description_schema["shipping-phase"], + } +) + +transforms.add_validate(signing_description_schema) + + +@transforms.add +def make_signing_description(config, jobs): + for job in jobs: + dep_job = job["primary-dependency"] + attributes = dep_job.attributes + build_platform = dep_job.attributes.get("build_platform") + is_nightly = True # cert_scope_per_platform uses this to choose the right cert + + description = ( + "Signing of OpenH264 Binaries for '" + "{build_platform}/{build_type}'".format( + build_platform=attributes.get("build_platform"), + build_type=attributes.get("build_type"), + ) + ) + + # we have a genuine repackage job as our parent + dependencies = {"openh264": dep_job.label} + + my_attributes = copy_attributes_from_dependent_job(dep_job) + + signing_cert_scope = get_signing_cert_scope_per_platform( + build_platform, is_nightly, config + ) + + scopes = [signing_cert_scope] + + if "win" in build_platform: + # job['primary-dependency'].task['payload']['command'] + formats = ["autograph_authenticode"] + else: + formats = ["autograph_gpg"] + + rev = attributes["openh264_rev"] + upstream_artifacts = [ + { + "taskId": {"task-reference": "<openh264>"}, + "taskType": "build", + "paths": [ + "private/openh264/openh264-{}-{}.zip".format(build_platform, rev), + ], + "formats": formats, + } + ] + + treeherder = inherit_treeherder_from_dep(job, dep_job) + treeherder.setdefault( + "symbol", + _generate_treeherder_symbol( + dep_job.task.get("extra", {}).get("treeherder", {}).get("symbol") + ), + ) + + task = { + "label": job["label"], + "description": description, + "worker-type": "linux-signing", + "worker": { + "implementation": "scriptworker-signing", + "upstream-artifacts": upstream_artifacts, + "max-run-time": 3600, + }, + "scopes": scopes, + "dependencies": dependencies, + "attributes": my_attributes, + "run-on-projects": dep_job.attributes.get("run_on_projects"), + "treeherder": treeherder, + } + + yield task + + +def _generate_treeherder_symbol(build_symbol): + symbol = build_symbol + "s" + return symbol diff --git a/taskcluster/taskgraph/transforms/partials.py b/taskcluster/taskgraph/transforms/partials.py new file mode 100644 index 0000000000..8c2e014aa1 --- /dev/null +++ b/taskcluster/taskgraph/transforms/partials.py @@ -0,0 +1,169 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the partials task into an actual task description. +""" +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.attributes import copy_attributes_from_dependent_job +from taskgraph.util.partials import get_builds +from taskgraph.util.platforms import architecture +from taskgraph.util.taskcluster import get_artifact_prefix +from taskgraph.util.treeherder import inherit_treeherder_from_dep + +import logging + +logger = logging.getLogger(__name__) + +transforms = TransformSequence() + + +def _generate_task_output_files(job, filenames, locale=None): + locale_output_path = "{}/".format(locale) if locale else "" + artifact_prefix = get_artifact_prefix(job) + + data = list() + for filename in filenames: + data.append( + { + "type": "file", + "path": "/home/worker/artifacts/{}".format(filename), + "name": "{}/{}{}".format(artifact_prefix, locale_output_path, filename), + } + ) + data.append( + { + "type": "file", + "path": "/home/worker/artifacts/manifest.json", + "name": "{}/{}manifest.json".format(artifact_prefix, locale_output_path), + } + ) + return data + + +def identify_desired_signing_keys(project, product): + if project in ["mozilla-central", "comm-central", "oak"]: + return "nightly" + elif project == "mozilla-beta": + if product == "devedition": + return "nightly" + return "release" + elif ( + project in ["mozilla-release", "comm-beta"] + or project.startswith("mozilla-esr") + or project.startswith("comm-esr") + ): + return "release" + return "dep1" + + +@transforms.add +def make_task_description(config, jobs): + # If no balrog release history, then don't generate partials + if not config.params.get("release_history"): + return + for job in jobs: + dep_job = job["primary-dependency"] + + treeherder = inherit_treeherder_from_dep(job, dep_job) + treeherder.setdefault("symbol", "p(N)") + + label = job.get("label", "partials-{}".format(dep_job.label)) + + dependencies = {dep_job.kind: dep_job.label} + + attributes = copy_attributes_from_dependent_job(dep_job) + locale = dep_job.attributes.get("locale") + if locale: + attributes["locale"] = locale + treeherder["symbol"] = "p({})".format(locale) + attributes["shipping_phase"] = job["shipping-phase"] + + build_locale = locale or "en-US" + + build_platform = attributes["build_platform"] + builds = get_builds( + config.params["release_history"], build_platform, build_locale + ) + + # If the list is empty there's no available history for this platform + # and locale combination, so we can't build any partials. + if not builds: + continue + + extra = {"funsize": {"partials": list()}} + update_number = 1 + + locale_suffix = "" + if locale: + locale_suffix = "{}/".format(locale) + artifact_path = "<{}/{}/{}target.complete.mar>".format( + dep_job.kind, + get_artifact_prefix(dep_job), + locale_suffix, + ) + for build in sorted(builds): + partial_info = { + "locale": build_locale, + "from_mar": builds[build]["mar_url"], + "to_mar": {"artifact-reference": artifact_path}, + "branch": config.params["project"], + "update_number": update_number, + "dest_mar": build, + } + if "product" in builds[build]: + partial_info["product"] = builds[build]["product"] + if "previousVersion" in builds[build]: + partial_info["previousVersion"] = builds[build]["previousVersion"] + if "previousBuildNumber" in builds[build]: + partial_info["previousBuildNumber"] = builds[build][ + "previousBuildNumber" + ] + extra["funsize"]["partials"].append(partial_info) + update_number += 1 + + level = config.params["level"] + + worker = { + "artifacts": _generate_task_output_files(dep_job, builds.keys(), locale), + "implementation": "docker-worker", + "docker-image": {"in-tree": "funsize-update-generator"}, + "os": "linux", + "max-run-time": 3600 if "asan" in dep_job.label else 900, + "chain-of-trust": True, + "taskcluster-proxy": True, + "env": { + "SIGNING_CERT": identify_desired_signing_keys( + config.params["project"], config.params["release_product"] + ), + "EXTRA_PARAMS": "--arch={}".format(architecture(build_platform)), + "MAR_CHANNEL_ID": attributes["mar-channel-id"], + }, + } + if config.params.release_level() == "staging": + worker["env"]["FUNSIZE_ALLOW_STAGING_PREFIXES"] = "true" + + task = { + "label": label, + "description": "{} Partials".format(dep_job.description), + "worker-type": "b-linux", + "dependencies": dependencies, + "scopes": [], + "attributes": attributes, + "run-on-projects": dep_job.attributes.get("run_on_projects"), + "treeherder": treeherder, + "extra": extra, + "worker": worker, + } + + # We only want caching on linux/windows due to bug 1436977 + if int(level) == 3 and any( + [build_platform.startswith(prefix) for prefix in ["linux", "win"]] + ): + task["scopes"].append( + "auth:aws-s3:read-write:tc-gp-private-1d-us-east-1/releng/mbsdiff-cache/" + ) + + yield task diff --git a/taskcluster/taskgraph/transforms/partner_attribution.py b/taskcluster/taskgraph/transforms/partner_attribution.py new file mode 100644 index 0000000000..5396d733d5 --- /dev/null +++ b/taskcluster/taskgraph/transforms/partner_attribution.py @@ -0,0 +1,133 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the partner attribution task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from collections import defaultdict +import json +import logging + +import six + +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.partners import ( + apply_partner_priority, + check_if_partners_enabled, + get_partner_config_by_kind, + generate_attribution_code, +) + +log = logging.getLogger(__name__) + +transforms = TransformSequence() +transforms.add(check_if_partners_enabled) +transforms.add(apply_partner_priority) + + +@transforms.add +def add_command_arguments(config, tasks): + enabled_partners = config.params.get("release_partners") + dependencies = {} + fetches = defaultdict(set) + attributions = [] + release_artifacts = [] + attribution_config = get_partner_config_by_kind(config, config.kind) + + for partner_config in attribution_config.get("configs", []): + # we might only be interested in a subset of all partners, eg for a respin + if enabled_partners and partner_config["campaign"] not in enabled_partners: + continue + attribution_code = generate_attribution_code( + attribution_config["defaults"], partner_config + ) + for platform in partner_config["platforms"]: + stage_platform = platform.replace("-shippable", "") + for locale in partner_config["locales"]: + # find the upstream, throw away locales we don't have, somehow. Skip ? + if locale == "en-US": + upstream_label = "repackage-signing-{platform}/opt".format( + platform=platform + ) + upstream_artifact = "target.installer.exe" + else: + upstream_label = ( + "repackage-signing-l10n-{locale}-{platform}/opt".format( + locale=locale, platform=platform + ) + ) + upstream_artifact = "{locale}/target.installer.exe".format( + locale=locale + ) + if upstream_label not in config.kind_dependencies_tasks: + raise Exception( + "Can't find upstream task for {} {}".format(platform, locale) + ) + upstream = config.kind_dependencies_tasks[upstream_label] + + # set the dependencies to just what we need rather than all of l10n + dependencies.update({upstream.label: upstream.label}) + + fetches[upstream_label].add((upstream_artifact, stage_platform, locale)) + + artifact_part = "{platform}/{locale}/target.installer.exe".format( + platform=stage_platform, locale=locale + ) + artifact = ( + "releng/partner/{partner}/{sub_partner}/{artifact_part}".format( + partner=partner_config["campaign"], + sub_partner=partner_config["content"], + artifact_part=artifact_part, + ) + ) + # config for script + # TODO - generalise input & output ?? + # add releng/partner prefix via get_artifact_prefix..() + attributions.append( + { + "input": "/builds/worker/fetches/{}".format(artifact_part), + "output": "/builds/worker/artifacts/{}".format(artifact), + "attribution": attribution_code, + } + ) + release_artifacts.append(artifact) + + # bail-out early if we don't have any attributions to do + if not attributions: + return + + for task in tasks: + worker = task.get("worker", {}) + worker["chain-of-trust"] = True + + task.setdefault("dependencies", {}).update(dependencies) + task.setdefault("fetches", {}) + for upstream_label, upstream_artifacts in fetches.items(): + task["fetches"][upstream_label] = [ + { + "artifact": upstream_artifact, + "dest": "{platform}/{locale}".format( + platform=platform, locale=locale + ), + "extract": False, + "verify-hash": True, + } + for upstream_artifact, platform, locale in upstream_artifacts + ] + worker.setdefault("env", {})["ATTRIBUTION_CONFIG"] = six.ensure_text( + json.dumps(attributions, sort_keys=True) + ) + worker["artifacts"] = [ + { + "name": "releng/partner", + "path": "/builds/worker/artifacts/releng/partner", + "type": "directory", + } + ] + task["release-artifacts"] = release_artifacts + task["label"] = config.kind + + yield task diff --git a/taskcluster/taskgraph/transforms/partner_attribution_beetmover.py b/taskcluster/taskgraph/transforms/partner_attribution_beetmover.py new file mode 100644 index 0000000000..268dc967a1 --- /dev/null +++ b/taskcluster/taskgraph/transforms/partner_attribution_beetmover.py @@ -0,0 +1,207 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the beetmover task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from six import text_type +from taskgraph.loader.single_dep import schema +from taskgraph.transforms.base import TransformSequence +from taskgraph.transforms.beetmover import craft_release_properties +from taskgraph.util.attributes import copy_attributes_from_dependent_job +from taskgraph.util.partners import ( + get_partner_config_by_kind, + apply_partner_priority, +) +from taskgraph.util.schema import ( + optionally_keyed_by, + resolve_keyed_by, +) +from taskgraph.util.scriptworker import ( + add_scope_prefix, + get_beetmover_bucket_scope, +) +from taskgraph.util.taskcluster import get_artifact_prefix +from taskgraph.transforms.task import task_description_schema +from voluptuous import Any, Required, Optional + +from collections import defaultdict +from copy import deepcopy + + +beetmover_description_schema = schema.extend( + { + # depname is used in taskref's to identify the taskID of the unsigned things + Required("depname", default="build"): text_type, + # unique label to describe this beetmover task, defaults to {dep.label}-beetmover + Optional("label"): text_type, + Required("partner-bucket-scope"): optionally_keyed_by( + "release-level", text_type + ), + Required("partner-public-path"): Any(None, text_type), + Required("partner-private-path"): Any(None, text_type), + Optional("extra"): object, + Required("shipping-phase"): task_description_schema["shipping-phase"], + Optional("shipping-product"): task_description_schema["shipping-product"], + Optional("priority"): task_description_schema["priority"], + } +) + +transforms = TransformSequence() +transforms.add_validate(beetmover_description_schema) +transforms.add(apply_partner_priority) + + +@transforms.add +def resolve_keys(config, jobs): + for job in jobs: + resolve_keyed_by( + job, + "partner-bucket-scope", + item_name=job["label"], + **{"release-level": config.params.release_level()} + ) + yield job + + +@transforms.add +def split_public_and_private(config, jobs): + # we need to separate private vs public destinations because beetmover supports one + # in a single task. Only use a single task for each type though. + partner_config = get_partner_config_by_kind(config, config.kind) + for job in jobs: + upstream_artifacts = job["primary-dependency"].release_artifacts + attribution_task_ref = "<{}>".format(job["primary-dependency"].label) + prefix = get_artifact_prefix(job["primary-dependency"]) + artifacts = defaultdict(list) + for artifact in upstream_artifacts: + partner, sub_partner, platform, locale, _ = artifact.replace( + prefix + "/", "" + ).split("/", 4) + destination = "private" + this_config = [ + p + for p in partner_config["configs"] + if (p["campaign"] == partner and p["content"] == sub_partner) + ] + if this_config[0].get("upload_to_candidates"): + destination = "public" + artifacts[destination].append( + (artifact, partner, sub_partner, platform, locale) + ) + + action_scope = add_scope_prefix(config, "beetmover:action:push-to-partner") + public_bucket_scope = get_beetmover_bucket_scope(config) + partner_bucket_scope = add_scope_prefix(config, job["partner-bucket-scope"]) + repl_dict = { + "build_number": config.params["build_number"], + "release_partner_build_number": config.params[ + "release_partner_build_number" + ], + "version": config.params["version"], + "partner": "{partner}", # we'll replace these later, per artifact + "subpartner": "{subpartner}", + "platform": "{platform}", + "locale": "{locale}", + } + for destination, destination_artifacts in artifacts.items(): + this_job = deepcopy(job) + + if destination == "public": + this_job["scopes"] = [public_bucket_scope, action_scope] + this_job["partner_public"] = True + else: + this_job["scopes"] = [partner_bucket_scope, action_scope] + this_job["partner_public"] = False + + partner_path_key = "partner-{destination}-path".format( + destination=destination + ) + partner_path = this_job[partner_path_key].format(**repl_dict) + this_job.setdefault("worker", {})[ + "upstream-artifacts" + ] = generate_upstream_artifacts( + attribution_task_ref, destination_artifacts, partner_path + ) + + yield this_job + + +@transforms.add +def make_task_description(config, jobs): + for job in jobs: + dep_job = job["primary-dependency"] + + attributes = dep_job.attributes + build_platform = attributes.get("build_platform") + if not build_platform: + raise Exception("Cannot find build platform!") + + label = config.kind + description = "Beetmover for partner attribution" + if job["partner_public"]: + label = "{}-public".format(label) + description = "{} public".format(description) + else: + label = "{}-private".format(label) + description = "{} private".format(description) + attributes = copy_attributes_from_dependent_job(dep_job) + + task = { + "label": label, + "description": description, + "dependencies": {dep_job.kind: dep_job.label}, + "attributes": attributes, + "run-on-projects": dep_job.attributes.get("run_on_projects"), + "shipping-phase": job["shipping-phase"], + "shipping-product": job.get("shipping-product"), + "partner_public": job["partner_public"], + "worker": job["worker"], + "scopes": job["scopes"], + } + # we may have reduced the priority for partner jobs, otherwise task.py will set it + if job.get("priority"): + task["priority"] = job["priority"] + + yield task + + +def generate_upstream_artifacts(attribution_task, artifacts, partner_path): + upstream_artifacts = [] + for artifact, partner, subpartner, platform, locale in artifacts: + upstream_artifacts.append( + { + "taskId": {"task-reference": attribution_task}, + "taskType": "repackage", + "paths": [artifact], + "locale": partner_path.format( + partner=partner, + subpartner=subpartner, + platform=platform, + locale=locale, + ), + } + ) + + if not upstream_artifacts: + raise Exception("Couldn't find any upstream artifacts.") + + return upstream_artifacts + + +@transforms.add +def make_task_worker(config, jobs): + for job in jobs: + job["worker-type"] = "beetmover" + worker = { + "implementation": "beetmover", + "release-properties": craft_release_properties(config, job), + "partner-public": job["partner_public"], + } + job["worker"].update(worker) + del job["partner_public"] + + yield job diff --git a/taskcluster/taskgraph/transforms/partner_repack.py b/taskcluster/taskgraph/transforms/partner_repack.py new file mode 100644 index 0000000000..1de66a16fa --- /dev/null +++ b/taskcluster/taskgraph/transforms/partner_repack.py @@ -0,0 +1,122 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the partner repack task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.schema import resolve_keyed_by +from taskgraph.util.scriptworker import get_release_config +from taskgraph.util.partners import ( + check_if_partners_enabled, + get_partner_config_by_kind, + get_partner_url_config, + get_repack_ids_by_platform, + apply_partner_priority, +) + + +transforms = TransformSequence() +transforms.add(check_if_partners_enabled) +transforms.add(apply_partner_priority) + + +@transforms.add +def skip_unnecessary_platforms(config, tasks): + for task in tasks: + if config.kind == "release-partner-repack": + platform = task["attributes"]["build_platform"] + repack_ids = get_repack_ids_by_platform(config, platform) + if not repack_ids: + continue + yield task + + +@transforms.add +def populate_repack_manifests_url(config, tasks): + for task in tasks: + partner_url_config = get_partner_url_config(config.params, config.graph_config) + + for k in partner_url_config: + if config.kind.startswith(k): + task["worker"].setdefault("env", {})[ + "REPACK_MANIFESTS_URL" + ] = partner_url_config[k] + break + else: + raise Exception("Can't find partner REPACK_MANIFESTS_URL") + + for property in ("limit-locales",): + property = "extra.{}".format(property) + resolve_keyed_by( + task, + property, + property, + **{"release-level": config.params.release_level()} + ) + + if task["worker"]["env"]["REPACK_MANIFESTS_URL"].startswith("git@"): + task.setdefault("scopes", []).append( + "secrets:get:project/releng/gecko/build/level-{level}/partner-github-ssh".format( + **config.params + ) + ) + + yield task + + +@transforms.add +def make_label(config, tasks): + for task in tasks: + task["label"] = "{}-{}".format(config.kind, task["name"]) + yield task + + +@transforms.add +def add_command_arguments(config, tasks): + release_config = get_release_config(config) + + # staging releases - pass reduced set of locales to the repacking script + all_locales = set() + partner_config = get_partner_config_by_kind(config, config.kind) + for partner in partner_config.values(): + for sub_partner in partner.values(): + all_locales.update(sub_partner.get("locales", [])) + + for task in tasks: + # add the MOZHARNESS_OPTIONS, eg version=61.0, build-number=1, platform=win64 + if not task["attributes"]["build_platform"].endswith("-shippable"): + raise Exception( + "Unexpected partner repack platform: {}".format( + task["attributes"]["build_platform"], + ), + ) + platform = task["attributes"]["build_platform"].partition("-shippable")[0] + task["run"]["options"] = [ + "version={}".format(release_config["version"]), + "build-number={}".format(release_config["build_number"]), + "platform={}".format(platform), + ] + if task["extra"]["limit-locales"]: + for locale in all_locales: + task["run"]["options"].append("limit-locale={}".format(locale)) + if "partner" in config.kind and config.params["release_partners"]: + for partner in config.params["release_partners"]: + task["run"]["options"].append("partner={}".format(partner)) + + # The upstream taskIds are stored a special environment variable, because we want to use + # task-reference's to resolve dependencies, but the string handling of MOZHARNESS_OPTIONS + # blocks that. It's space-separated string of ids in the end. + task["worker"]["env"]["UPSTREAM_TASKIDS"] = { + "task-reference": " ".join( + ["<{}>".format(dep) for dep in task["dependencies"]] + ) + } + + # Forward the release type for bouncer product construction + task["worker"]["env"]["RELEASE_TYPE"] = config.params["release_type"] + + yield task diff --git a/taskcluster/taskgraph/transforms/partner_signing.py b/taskcluster/taskgraph/transforms/partner_signing.py new file mode 100644 index 0000000000..7e934c3580 --- /dev/null +++ b/taskcluster/taskgraph/transforms/partner_signing.py @@ -0,0 +1,64 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the signing task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.attributes import copy_attributes_from_dependent_job +from taskgraph.util.partners import get_partner_config_by_kind +from taskgraph.util.signed_artifacts import generate_specifications_of_artifacts_to_sign + +transforms = TransformSequence() + + +@transforms.add +def set_mac_label(config, jobs): + for job in jobs: + dep_job = job["primary-dependency"] + job.setdefault("label", dep_job.label.replace("notarization-part-1", "signing")) + assert job["label"] != dep_job.label, "Unable to determine label for {}".format( + config.kind + ) + yield job + + +@transforms.add +def define_upstream_artifacts(config, jobs): + partner_configs = get_partner_config_by_kind(config, config.kind) + if not partner_configs: + return + + for job in jobs: + dep_job = job["primary-dependency"] + job["depname"] = dep_job.label + job["attributes"] = copy_attributes_from_dependent_job(dep_job) + + repack_ids = job["extra"]["repack_ids"] + artifacts_specifications = generate_specifications_of_artifacts_to_sign( + config, + job, + keep_locale_template=True, + kind=config.kind, + ) + task_type = "build" + if "notarization" in job["depname"]: + task_type = "scriptworker" + job["upstream-artifacts"] = [ + { + "taskId": {"task-reference": "<{}>".format(dep_job.kind)}, + "taskType": task_type, + "paths": [ + path_template.format(locale=repack_id) + for path_template in spec["artifacts"] + for repack_id in repack_ids + ], + "formats": spec["formats"], + } + for spec in artifacts_specifications + ] + + yield job diff --git a/taskcluster/taskgraph/transforms/per_platform_dummy.py b/taskcluster/taskgraph/transforms/per_platform_dummy.py new file mode 100644 index 0000000000..f2732c9f2c --- /dev/null +++ b/taskcluster/taskgraph/transforms/per_platform_dummy.py @@ -0,0 +1,34 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the repackage task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.attributes import copy_attributes_from_dependent_job + +transforms = TransformSequence() + + +@transforms.add +def one_task_per_product_and_platform(config, jobs): + unique_products_and_platforms = set() + for job in jobs: + dep_task = job["primary-dependency"] + if "primary-dependency" in job: + del job["primary-dependency"] + product = dep_task.attributes.get("shipping_product") + platform = dep_task.attributes.get("build_platform") + if (product, platform) not in unique_products_and_platforms: + attr_denylist = ("l10n_chunk", "locale", "artifact_map", "artifact_prefix") + attributes = copy_attributes_from_dependent_job( + dep_task, denylist=attr_denylist + ) + attributes.update(job.get("attributes", {})) + job["attributes"] = attributes + job["name"] = "{}-{}".format(product, platform) + yield job + unique_products_and_platforms.add((product, platform)) diff --git a/taskcluster/taskgraph/transforms/perftest.py b/taskcluster/taskgraph/transforms/perftest.py new file mode 100644 index 0000000000..dcbb9c5bcc --- /dev/null +++ b/taskcluster/taskgraph/transforms/perftest.py @@ -0,0 +1,296 @@ +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +This transform passes options from `mach perftest` to the corresponding task. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from copy import deepcopy +from datetime import date, timedelta +import json + +from six import ensure_text, text_type + +from voluptuous import ( + Any, + Optional, + Extra, +) + +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.schema import optionally_keyed_by, resolve_keyed_by, Schema +from taskgraph.util.treeherder import split_symbol, join_symbol + + +transforms = TransformSequence() + + +perftest_description_schema = Schema( + { + # The test names and the symbols to use for them: [test-symbol, test-path] + Optional("perftest"): [[text_type]], + # Metrics to gather for the test. These will be merged + # with options specified through perftest-perfherder-global + Optional("perftest-metrics"): optionally_keyed_by( + "perftest", + Any( + [text_type], + {text_type: Any(None, {text_type: Any(None, text_type, [text_type])})}, + ), + ), + # Perfherder data options that will be applied to + # all metrics gathered. + Optional("perftest-perfherder-global"): optionally_keyed_by( + "perftest", {text_type: Any(None, text_type, [text_type])} + ), + # Extra options to add to the test's command + Optional("perftest-extra-options"): optionally_keyed_by( + "perftest", [text_type] + ), + # Variants of the test to make based on extra browsertime + # arguments. Expecting: + # [variant-suffix, options-to-use] + # If variant-suffix is `null` then the options will be added + # to the existing task. Otherwise, a new variant is created + # with the given suffix and with its options replaced. + Optional("perftest-btime-variants"): optionally_keyed_by( + "perftest", [[Any(None, text_type)]] + ), + # These options will be parsed in the next schemas + Extra: object, + } +) + + +transforms.add_validate(perftest_description_schema) + + +@transforms.add +def split_tests(config, jobs): + for job in jobs: + if job.get("perftest") is None: + yield job + continue + + for test_symbol, test_name in job.pop("perftest"): + job_new = deepcopy(job) + + job_new["perftest"] = test_symbol + job_new["name"] += "-" + test_symbol + job_new["treeherder"]["symbol"] = job["treeherder"]["symbol"].format( + symbol=test_symbol + ) + job_new["run"]["command"] = job["run"]["command"].replace( + "{perftest_testname}", test_name + ) + + yield job_new + + +@transforms.add +def handle_keyed_by_perftest(config, jobs): + fields = ["perftest-metrics", "perftest-extra-options", "perftest-btime-variants"] + for job in jobs: + if job.get("perftest") is None: + yield job + continue + + for field in fields: + resolve_keyed_by(job, field, item_name=job["name"]) + + job.pop("perftest") + yield job + + +@transforms.add +def parse_perftest_metrics(config, jobs): + """Parse the metrics into a dictionary immediately. + + This way we can modify the extraOptions field (and others) entry through the + transforms that come later. The metrics aren't formatted until the end of the + transforms. + """ + for job in jobs: + if job.get("perftest-metrics") is None: + yield job + continue + perftest_metrics = job.pop("perftest-metrics") + + # If perftest metrics is a string, split it up first + if isinstance(perftest_metrics, list): + new_metrics_info = [{"name": metric} for metric in perftest_metrics] + else: + new_metrics_info = [] + for metric, options in perftest_metrics.items(): + entry = {"name": metric} + entry.update(options) + new_metrics_info.append(entry) + + job["perftest-metrics"] = new_metrics_info + yield job + + +@transforms.add +def split_perftest_variants(config, jobs): + for job in jobs: + if job.get("variants") is None: + yield job + continue + + for variant in job.pop("variants"): + job_new = deepcopy(job) + + group, symbol = split_symbol(job_new["treeherder"]["symbol"]) + group += "-" + variant + job_new["treeherder"]["symbol"] = join_symbol(group, symbol) + job_new["name"] += "-" + variant + job_new.setdefault("perftest-perfherder-global", {}).setdefault( + "extraOptions", [] + ).append(variant) + job_new[variant] = True + + yield job_new + + yield job + + +@transforms.add +def split_btime_variants(config, jobs): + for job in jobs: + if job.get("perftest-btime-variants") is None: + yield job + continue + + variants = job.pop("perftest-btime-variants") + if not variants: + yield job + continue + + yield_existing = False + for suffix, options in variants: + if suffix is None: + # Append options to the existing job + job.setdefault("perftest-btime-variants", []).append(options) + yield_existing = True + else: + job_new = deepcopy(job) + group, symbol = split_symbol(job_new["treeherder"]["symbol"]) + symbol += "-" + suffix + job_new["treeherder"]["symbol"] = join_symbol(group, symbol) + job_new["name"] += "-" + suffix + job_new.setdefault("perftest-perfherder-global", {}).setdefault( + "extraOptions", [] + ).append(suffix) + # Replace the existing options with the new ones + job_new["perftest-btime-variants"] = [options] + yield job_new + + # The existing job has been modified so we should also return it + if yield_existing: + yield job + + +@transforms.add +def setup_http3_tests(config, jobs): + for job in jobs: + if job.get("http3") is None or not job.pop("http3"): + yield job + continue + job.setdefault("perftest-btime-variants", []).append( + "firefox.preference=network.http.http3.enabled:true" + ) + yield job + + +@transforms.add +def setup_perftest_metrics(config, jobs): + for job in jobs: + if job.get("perftest-metrics") is None: + yield job + continue + perftest_metrics = job.pop("perftest-metrics") + + # Options to apply to each metric + global_options = job.pop("perftest-perfherder-global", {}) + for metric_info in perftest_metrics: + for opt, val in global_options.items(): + if isinstance(val, list) and opt in metric_info: + metric_info[opt].extend(val) + elif not (isinstance(val, list) and len(val) == 0): + metric_info[opt] = val + + quote_escape = '\\"' + if "win" in job.get("platform", ""): + # Escaping is a bit different on windows platforms + quote_escape = '\\\\\\"' + + job["run"]["command"] = job["run"]["command"].replace( + "{perftest_metrics}", + " ".join( + [ + ",".join( + [ + ":".join( + [ + option, + str(value) + .replace(" ", "") + .replace("'", quote_escape), + ] + ) + for option, value in metric_info.items() + ] + ) + for metric_info in perftest_metrics + ] + ), + ) + + yield job + + +@transforms.add +def setup_perftest_browsertime_variants(config, jobs): + for job in jobs: + if job.get("perftest-btime-variants") is None: + yield job + continue + + job["run"]["command"] += " --browsertime-extra-options %s" % ",".join( + [opt.strip() for opt in job.pop("perftest-btime-variants")] + ) + + yield job + + +@transforms.add +def setup_perftest_extra_options(config, jobs): + for job in jobs: + if job.get("perftest-extra-options") is None: + yield job + continue + job["run"]["command"] += " " + " ".join(job.pop("perftest-extra-options")) + yield job + + +@transforms.add +def pass_perftest_options(config, jobs): + for job in jobs: + env = job.setdefault("worker", {}).setdefault("env", {}) + env["PERFTEST_OPTIONS"] = ensure_text( + json.dumps(config.params["try_task_config"].get("perftest-options")) + ) + yield job + + +@transforms.add +def setup_perftest_test_date(config, jobs): + for job in jobs: + if ( + job.get("attributes", {}).get("batch", False) + and "--test-date" not in job["run"]["command"] + ): + yesterday = (date.today() - timedelta(1)).strftime("%Y.%m.%d") + job["run"]["command"] += " --test-date %s" % yesterday + yield job diff --git a/taskcluster/taskgraph/transforms/python_update.py b/taskcluster/taskgraph/transforms/python_update.py new file mode 100644 index 0000000000..a0000e0c78 --- /dev/null +++ b/taskcluster/taskgraph/transforms/python_update.py @@ -0,0 +1,27 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the repo-update task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.schema import resolve_keyed_by + +transforms = TransformSequence() + + +@transforms.add +def resolve_keys(config, tasks): + for task in tasks: + env = task["worker"].setdefault("env", {}) + env["BRANCH"] = config.params["project"] + for envvar in env: + resolve_keyed_by(env, envvar, envvar, **config.params) + + for envvar in list(env.keys()): + if not env.get(envvar): + del env[envvar] + yield task diff --git a/taskcluster/taskgraph/transforms/raptor.py b/taskcluster/taskgraph/transforms/raptor.py new file mode 100644 index 0000000000..48300c1ec9 --- /dev/null +++ b/taskcluster/taskgraph/transforms/raptor.py @@ -0,0 +1,359 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import, print_function, unicode_literals + +from copy import deepcopy +from six import text_type + +from voluptuous import ( + Any, + Optional, + Required, + Extra, +) + +from taskgraph.transforms.base import TransformSequence +from taskgraph.transforms.tests import test_description_schema +from taskgraph.util.schema import optionally_keyed_by, resolve_keyed_by, Schema +from taskgraph.util.treeherder import split_symbol, join_symbol + +transforms = TransformSequence() + + +raptor_description_schema = Schema( + { + # Raptor specific configs. + Optional("apps"): optionally_keyed_by("test-platform", "subtest", [text_type]), + Optional("raptor-test"): text_type, + Optional("raptor-subtests"): optionally_keyed_by("app", "test-platform", list), + Optional("activity"): optionally_keyed_by("app", text_type), + Optional("binary-path"): optionally_keyed_by("app", text_type), + Optional("pageload"): optionally_keyed_by( + "test-platform", + "app", + Any("cold", "warm", "both"), + ), + # Configs defined in the 'test_description_schema'. + Optional("max-run-time"): optionally_keyed_by( + "app", test_description_schema["max-run-time"] + ), + Optional("run-on-projects"): optionally_keyed_by( + "app", + "pageload", + "test-name", + "raptor-test", + "subtest", + test_description_schema["run-on-projects"], + ), + Optional("webrender-run-on-projects"): optionally_keyed_by( + "app", + "pageload", + "test-name", + "raptor-test", + "subtest", + "test-platform", + test_description_schema["webrender-run-on-projects"], + ), + Optional("variants"): optionally_keyed_by( + "app", "subtest", test_description_schema["variants"] + ), + Optional("target"): optionally_keyed_by( + "app", test_description_schema["target"] + ), + Optional("tier"): optionally_keyed_by( + "app", "raptor-test", "subtest", test_description_schema["tier"] + ), + Optional("test-url-param"): optionally_keyed_by( + "subtest", "test-platform", text_type + ), + Optional("run-visual-metrics"): optionally_keyed_by("app", bool), + Required("test-name"): test_description_schema["test-name"], + Required("test-platform"): test_description_schema["test-platform"], + Required("require-signed-extensions"): test_description_schema[ + "require-signed-extensions" + ], + Required("treeherder-symbol"): test_description_schema["treeherder-symbol"], + # Any unrecognized keys will be validated against the test_description_schema. + Extra: object, + } +) + +transforms.add_validate(raptor_description_schema) + + +@transforms.add +def set_defaults(config, tests): + for test in tests: + test.setdefault("pageload", None) + test.setdefault("run-visual-metrics", False) + yield test + + +@transforms.add +def split_apps(config, tests): + app_symbols = { + "chrome": "ChR", + "chrome-m": "ChR", + "chromium": "Cr", + "fenix": "fenix", + "refbrow": "refbrow", + } + + for test in tests: + apps = test.pop("apps", None) + if not apps: + yield test + continue + + for app in apps: + atest = deepcopy(test) + suffix = "-{}".format(app) + atest["app"] = app + atest["description"] += " on {}".format(app.capitalize()) + + name = atest["test-name"] + if name.endswith("-cold"): + name = atest["test-name"][: -len("-cold")] + suffix + "-cold" + else: + name += suffix + + atest["test-name"] = name + atest["try-name"] = name + + if app in app_symbols: + group, symbol = split_symbol(atest["treeherder-symbol"]) + group += "-{}".format(app_symbols[app]) + atest["treeherder-symbol"] = join_symbol(group, symbol) + + yield atest + + +@transforms.add +def handle_keyed_by_prereqs(config, tests): + """ + Only resolve keys for prerequisite fields here since the + these keyed-by options might have keyed-by fields + as well. + """ + fields = ["raptor-subtests", "pageload"] + for test in tests: + for field in fields: + resolve_keyed_by(test, field, item_name=test["test-name"]) + + # We need to make the split immediately so that we can split + # task configurations by pageload type, the `both` condition is + # the same as not having a by-pageload split. + if test["pageload"] == "both": + test["pageload"] = "cold" + + warmtest = deepcopy(test) + warmtest["pageload"] = "warm" + yield warmtest + + yield test + + +@transforms.add +def split_raptor_subtests(config, tests): + for test in tests: + # For tests that have 'raptor-subtests' listed, we want to create a separate + # test job for every subtest (i.e. split out each page-load URL into its own job) + subtests = test.pop("raptor-subtests", None) + if not subtests: + yield test + continue + + chunk_number = 0 + + for subtest in subtests: + chunk_number += 1 + + # Create new test job + chunked = deepcopy(test) + chunked["chunk-number"] = chunk_number + chunked["subtest"] = subtest + chunked["subtest-symbol"] = subtest + if isinstance(chunked["subtest"], list): + chunked["subtest"] = subtest[0] + chunked["subtest-symbol"] = subtest[1] + chunked = resolve_keyed_by(chunked, "tier", chunked["subtest"]) + yield chunked + + +@transforms.add +def handle_keyed_by(config, tests): + fields = [ + "test-url-param", + "variants", + "limit-platforms", + "activity", + "binary-path", + "fetches.fetch", + "fission-run-on-projects", + "max-run-time", + "run-on-projects", + "target", + "tier", + "run-visual-metrics", + "webrender-run-on-projects", + ] + for test in tests: + for field in fields: + resolve_keyed_by(test, field, item_name=test["test-name"]) + yield test + + +@transforms.add +def split_pageload(config, tests): + # Split test by pageload type (cold, warm) + for test in tests: + mozharness = test.setdefault("mozharness", {}) + extra_options = mozharness.setdefault("extra-options", []) + + pageload = test.pop("pageload", None) + + if not pageload or "--chimera" in extra_options: + yield test + continue + + if pageload in ("warm", "both"): + # make a deepcopy if 'both', otherwise use the test object itself + warmtest = deepcopy(test) if pageload == "both" else test + + warmtest["warm"] = True + group, symbol = split_symbol(warmtest["treeherder-symbol"]) + symbol += "-w" + warmtest["treeherder-symbol"] = join_symbol(group, symbol) + yield warmtest + + if pageload in ("cold", "both"): + assert "subtest" in test + + test["description"] += " using cold pageload" + test["cold"] = True + test["max-run-time"] = 3000 + test["test-name"] += "-cold" + test["try-name"] += "-cold" + + group, symbol = split_symbol(test["treeherder-symbol"]) + symbol += "-c" + test["treeherder-symbol"] = join_symbol(group, symbol) + yield test + + +@transforms.add +def split_page_load_by_url(config, tests): + for test in tests: + # `chunk-number` and 'subtest' only exists when the task had a + # definition for `raptor-subtests` + chunk_number = test.pop("chunk-number", None) + subtest = test.pop("subtest", None) + subtest_symbol = test.pop("subtest-symbol", None) + + if not chunk_number or not subtest: + yield test + continue + + if len(subtest_symbol) > 10 and "ytp" not in subtest_symbol: + raise Exception( + "Treeherder symbol %s is lager than 10 char! Please use a different symbol." + % subtest_symbol + ) + + if test["test-name"].startswith("browsertime-"): + test["raptor-test"] = subtest + + # Remove youtube-playback in the test name to avoid duplication + test["test-name"] = test["test-name"].replace("youtube-playback-", "") + else: + # Use full test name if running on webextension + test["raptor-test"] = "raptor-tp6-" + subtest + "-{}".format(test["app"]) + + # Only run the subtest/single URL + test["test-name"] += "-{}".format(subtest) + test["try-name"] += "-{}".format(subtest) + + # Set treeherder symbol and description + group, symbol = split_symbol(test["treeherder-symbol"]) + + symbol = subtest_symbol + if test.get("cold"): + symbol += "-c" + elif test.pop("warm", False): + symbol += "-w" + + test["treeherder-symbol"] = join_symbol(group, symbol) + test["description"] += " on {}".format(subtest) + + yield test + + +@transforms.add +def add_extra_options(config, tests): + for test in tests: + mozharness = test.setdefault("mozharness", {}) + if test.get("app", "") == "chrome-m": + mozharness["tooltool-downloads"] = "internal" + + extra_options = mozharness.setdefault("extra-options", []) + + # Adding device name if we're on android + test_platform = test["test-platform"] + if test_platform.startswith("android-hw-g5"): + extra_options.append("--device-name=g5") + elif test_platform.startswith("android-hw-p2"): + extra_options.append("--device-name=p2_aarch64") + + if test.pop("run-visual-metrics", False): + extra_options.append("--browsertime-video") + test["attributes"]["run-visual-metrics"] = True + + if test.get("app", "") == "fennec" and test["test-name"].startswith( + "browsertime" + ): + # Bug 1645181: Conditioned profiles cause problems + extra_options.append("--no-conditioned-profile") + + if "app" in test: + extra_options.append("--app={}".format(test.pop("app"))) + + if test.pop("cold", False) is True: + extra_options.append("--cold") + + if "activity" in test: + extra_options.append("--activity={}".format(test.pop("activity"))) + + if "binary-path" in test: + extra_options.append("--binary-path={}".format(test.pop("binary-path"))) + + if "raptor-test" in test: + extra_options.append("--test={}".format(test.pop("raptor-test"))) + + if test["require-signed-extensions"]: + extra_options.append("--is-release-build") + + if "test-url-param" in test: + param = test.pop("test-url-param") + if not param == []: + extra_options.append( + "--test-url-params={}".format(param.replace(" ", "")) + ) + + extra_options.append("--project={}".format(config.params.get("project"))) + + yield test + + +@transforms.add +def apply_tier_optimization(config, tests): + for test in tests: + if test["test-platform"].startswith("android-hw"): + yield test + continue + + test["optimization"] = {"skip-unless-expanded": None} + if test["tier"] > 1: + test["optimization"] = {"skip-unless-backstop": None} + yield test diff --git a/taskcluster/taskgraph/transforms/release.py b/taskcluster/taskgraph/transforms/release.py new file mode 100644 index 0000000000..b91f350127 --- /dev/null +++ b/taskcluster/taskgraph/transforms/release.py @@ -0,0 +1,22 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +""" +Transforms for release tasks +""" + +from __future__ import absolute_import, print_function, unicode_literals + + +def run_on_releases(config, jobs): + """ + Filter out jobs with `run-on-releases` set, and that don't match the + `release_type` paramater. + """ + for job in jobs: + release_type = config.params["release_type"] + run_on_release_types = job.pop("run-on-releases", None) + + if run_on_release_types is None or release_type in run_on_release_types: + yield job diff --git a/taskcluster/taskgraph/transforms/release_beetmover_signed_addons.py b/taskcluster/taskgraph/transforms/release_beetmover_signed_addons.py new file mode 100644 index 0000000000..7ed9a11f33 --- /dev/null +++ b/taskcluster/taskgraph/transforms/release_beetmover_signed_addons.py @@ -0,0 +1,243 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the beetmover task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +import six +from six import text_type +from taskgraph.loader.single_dep import schema +from taskgraph.transforms.base import TransformSequence +from taskgraph.transforms.beetmover import craft_release_properties +from taskgraph.util.attributes import copy_attributes_from_dependent_job +from taskgraph.util.schema import optionally_keyed_by, resolve_keyed_by +from taskgraph.util.scriptworker import ( + get_beetmover_bucket_scope, + get_beetmover_action_scope, + generate_beetmover_upstream_artifacts, + generate_beetmover_artifact_map, +) +from taskgraph.util.treeherder import inherit_treeherder_from_dep +from taskgraph.transforms.task import task_description_schema +from voluptuous import Required, Optional + +import logging +import copy + +logger = logging.getLogger(__name__) + + +transforms = TransformSequence() + + +beetmover_description_schema = schema.extend( + { + # attributes is used for enabling artifact-map by declarative artifacts + Required("attributes"): {text_type: object}, + # unique label to describe this beetmover task, defaults to {dep.label}-beetmover + Optional("label"): text_type, + # treeherder is allowed here to override any defaults we use for beetmover. See + # taskcluster/taskgraph/transforms/task.py for the schema details, and the + # below transforms for defaults of various values. + Optional("treeherder"): task_description_schema["treeherder"], + Required("description"): text_type, + Required("worker-type"): optionally_keyed_by("release-level", text_type), + Required("run-on-projects"): [], + # locale is passed only for l10n beetmoving + Optional("locale"): text_type, + Optional("shipping-phase"): task_description_schema["shipping-phase"], + Optional("shipping-product"): task_description_schema["shipping-product"], + } +) + + +transforms.add_validate(beetmover_description_schema) + + +@transforms.add +def resolve_keys(config, jobs): + for job in jobs: + for field in ("worker-type", "attributes.artifact_map"): + resolve_keyed_by( + job, + field, + item_name=job["label"], + **{ + "release-level": config.params.release_level(), + "release-type": config.params["release_type"], + "project": config.params["project"], + } + ) + yield job + + +@transforms.add +def make_task_description(config, jobs): + for job in jobs: + dep_job = job["primary-dependency"] + attributes = dep_job.attributes + + treeherder = inherit_treeherder_from_dep(job, dep_job) + treeherder.setdefault( + "symbol", "langpack(BM{})".format(attributes.get("l10n_chunk", "")) + ) + + job["attributes"].update(copy_attributes_from_dependent_job(dep_job)) + job["attributes"]["chunk_locales"] = dep_job.attributes.get( + "chunk_locales", ["en-US"] + ) + + job["description"] = job["description"].format( + locales="/".join(job["attributes"]["chunk_locales"]), + platform=job["attributes"]["build_platform"], + ) + + job["scopes"] = [ + get_beetmover_bucket_scope(config), + get_beetmover_action_scope(config), + ] + + job["dependencies"] = {"langpack-copy": dep_job.label} + + job["run-on-projects"] = job.get( + "run_on_projects", dep_job.attributes["run_on_projects"] + ) + job["treeherder"] = treeherder + job["shipping-phase"] = job.get( + "shipping-phase", dep_job.attributes["shipping_phase"] + ) + job["shipping-product"] = dep_job.attributes["shipping_product"] + + yield job + + +@transforms.add +def make_task_worker(config, jobs): + for job in jobs: + platform = job["attributes"]["build_platform"] + locale = job["attributes"]["chunk_locales"] + + job["worker"] = { + "implementation": "beetmover", + "release-properties": craft_release_properties(config, job), + "upstream-artifacts": generate_beetmover_upstream_artifacts( + config, + job, + platform, + locale, + ), + "artifact-map": generate_beetmover_artifact_map( + config, job, platform=platform, locale=locale + ), + } + + yield job + + +@transforms.add +def strip_unused_data(config, jobs): + for job in jobs: + del job["primary-dependency"] + + yield job + + +@transforms.add +def yield_all_platform_jobs(config, jobs): + # Even though langpacks are now platform independent, we keep beetmoving them at old + # platform-specific locations. That's why this transform exist + # The linux64 and mac specific ja-JP-mac are beetmoved along with the signing beetmover + # So while the dependent jobs are linux here, we only yield jobs for other platforms + for job in jobs: + platforms = ("linux", "macosx64", "win32", "win64") + if "devedition" in job["attributes"]["build_platform"]: + platforms = ("{}-devedition".format(plat) for plat in platforms) + for platform in platforms: + platform_job = copy.deepcopy(job) + if "ja" in platform_job["attributes"]["chunk_locales"] and platform in ( + "macosx64", + "macosx64-devedition", + ): + platform_job = _strip_ja_data_from_linux_job(platform_job) + + platform_job = _change_platform_data(config, platform_job, platform) + + yield platform_job + + +def _strip_ja_data_from_linux_job(platform_job): + # Let's take "ja" out the description. This locale is in a substring like "aa/bb/cc/dd", where + # "ja" could be any of "aa", "bb", "cc", "dd" + platform_job["description"] = platform_job["description"].replace("ja/", "") + platform_job["description"] = platform_job["description"].replace("/ja", "") + + platform_job["worker"]["upstream-artifacts"] = [ + artifact + for artifact in platform_job["worker"]["upstream-artifacts"] + if artifact["locale"] != "ja" + ] + + return platform_job + + +def _change_platform_in_artifact_map_paths(paths, orig_platform, new_platform): + amended_paths = {} + for artifact, artifact_info in six.iteritems(paths): + amended_artifact_info = { + "checksums_path": artifact_info["checksums_path"].replace( + orig_platform, new_platform + ), + "destinations": [ + d.replace(orig_platform, new_platform) + for d in artifact_info["destinations"] + ], + } + amended_paths[artifact] = amended_artifact_info + + return amended_paths + + +def _change_platform_data(config, platform_job, platform): + orig_platform = "linux64" + if "devedition" in platform: + orig_platform = "linux64-devedition" + platform_job["attributes"]["build_platform"] = platform + platform_job["label"] = platform_job["label"].replace(orig_platform, platform) + platform_job["description"] = platform_job["description"].replace( + orig_platform, platform + ) + platform_job["treeherder"]["platform"] = platform_job["treeherder"][ + "platform" + ].replace(orig_platform, platform) + platform_job["worker"]["release-properties"]["platform"] = platform + + # amend artifactMap entries as well + platform_mapping = { + "linux64": "linux-x86_64", + "linux": "linux-i686", + "macosx64": "mac", + "win32": "win32", + "win64": "win64", + "linux64-devedition": "linux-x86_64", + "linux-devedition": "linux-i686", + "macosx64-devedition": "mac", + "win32-devedition": "win32", + "win64-devedition": "win64", + } + orig_platform = platform_mapping.get(orig_platform, orig_platform) + platform = platform_mapping.get(platform, platform) + platform_job["worker"]["artifact-map"] = [ + { + "locale": entry["locale"], + "taskId": entry["taskId"], + "paths": _change_platform_in_artifact_map_paths( + entry["paths"], orig_platform, platform + ), + } + for entry in platform_job["worker"]["artifact-map"] + ] + + return platform_job diff --git a/taskcluster/taskgraph/transforms/release_deps.py b/taskcluster/taskgraph/transforms/release_deps.py new file mode 100644 index 0000000000..749f251ba4 --- /dev/null +++ b/taskcluster/taskgraph/transforms/release_deps.py @@ -0,0 +1,63 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Add dependencies to release tasks. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence + +PHASES = ["build", "promote", "push", "ship"] + +transforms = TransformSequence() + + +@transforms.add +def add_dependencies(config, jobs): + for job in jobs: + dependencies = {} + # Add any kind_dependencies_tasks with matching product as dependencies + product = job.get("shipping-product") + phase = job.get("shipping-phase") + if product is None: + continue + + required_signoffs = set( + job.setdefault("attributes", {}).get("required_signoffs", []) + ) + for dep_task in config.kind_dependencies_tasks.values(): + # Weed out unwanted tasks. + # XXX we have run-on-projects which specifies the on-push behavior; + # we need another attribute that specifies release promotion, + # possibly which action(s) each task belongs in. + + # We can only depend on tasks in the current or previous phases + dep_phase = dep_task.attributes.get("shipping_phase") + if dep_phase and PHASES.index(dep_phase) > PHASES.index(phase): + continue + + if dep_task.attributes.get("build_platform") and job.get( + "attributes", {} + ).get("build_platform"): + if ( + dep_task.attributes["build_platform"] + != job["attributes"]["build_platform"] + ): + continue + # Add matching product tasks to deps + if ( + dep_task.task.get("shipping-product") == product + or dep_task.attributes.get("shipping_product") == product + ): + dependencies[dep_task.label] = dep_task.label + required_signoffs.update( + dep_task.attributes.get("required_signoffs", []) + ) + + job.setdefault("dependencies", {}).update(dependencies) + if required_signoffs: + job["attributes"]["required_signoffs"] = sorted(required_signoffs) + + yield job diff --git a/taskcluster/taskgraph/transforms/release_flatpak_push.py b/taskcluster/taskgraph/transforms/release_flatpak_push.py new file mode 100644 index 0000000000..1b914fe730 --- /dev/null +++ b/taskcluster/taskgraph/transforms/release_flatpak_push.py @@ -0,0 +1,81 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the release-flatpak-push kind into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from six import text_type +from taskgraph.transforms.base import TransformSequence +from taskgraph.transforms.task import task_description_schema +from taskgraph.util.schema import optionally_keyed_by, resolve_keyed_by, Schema +from taskgraph.util.scriptworker import add_scope_prefix + +from voluptuous import Optional, Required + +push_flatpak_description_schema = Schema( + { + Required("name"): text_type, + Required("job-from"): task_description_schema["job-from"], + Required("dependencies"): task_description_schema["dependencies"], + Required("description"): task_description_schema["description"], + Required("treeherder"): task_description_schema["treeherder"], + Required("run-on-projects"): task_description_schema["run-on-projects"], + Required("worker-type"): optionally_keyed_by("release-level", text_type), + Required("worker"): object, + Optional("scopes"): [text_type], + Required("shipping-phase"): task_description_schema["shipping-phase"], + Required("shipping-product"): task_description_schema["shipping-product"], + Optional("extra"): task_description_schema["extra"], + Optional("attributes"): task_description_schema["attributes"], + } +) + +transforms = TransformSequence() +transforms.add_validate(push_flatpak_description_schema) + + +@transforms.add +def make_task_description(config, jobs): + for job in jobs: + if len(job["dependencies"]) != 1: + raise Exception("Exactly 1 dependency is required") + + job["worker"]["upstream-artifacts"] = generate_upstream_artifacts( + job["dependencies"] + ) + + resolve_keyed_by( + job, + "worker.channel", + item_name=job["name"], + **{"release-type": config.params["release_type"]} + ) + resolve_keyed_by( + job, + "worker-type", + item_name=job["name"], + **{"release-level": config.params.release_level()} + ) + if config.params.release_level() == "production": + job.setdefault("scopes", []).append( + add_scope_prefix( + config, + "flathub:firefox:{}".format(job["worker"]["channel"]), + ) + ) + + yield job + + +def generate_upstream_artifacts(dependencies): + return [ + { + "taskId": {"task-reference": "<{}>".format(task_kind)}, + "taskType": "build", + "paths": ["public/build/target.flatpak.tar.xz"], + } + for task_kind in dependencies.keys() + ] diff --git a/taskcluster/taskgraph/transforms/release_flatpak_repackage.py b/taskcluster/taskgraph/transforms/release_flatpak_repackage.py new file mode 100644 index 0000000000..714a0a21c5 --- /dev/null +++ b/taskcluster/taskgraph/transforms/release_flatpak_repackage.py @@ -0,0 +1,43 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.scriptworker import get_release_config +from taskgraph.util.schema import resolve_keyed_by + + +transforms = TransformSequence() + + +@transforms.add +def format(config, tasks): + """Apply format substitution to worker.env and worker.command.""" + + format_params = { + "release_config": get_release_config(config), + "config_params": config.params, + } + + for task in tasks: + format_params["task"] = task + + command = task.get("worker", {}).get("command", []) + task["worker"]["command"] = [x.format(**format_params) for x in command] + + env = task.get("worker", {}).get("env", {}) + for k in env.keys(): + resolve_keyed_by( + env, + k, + "flatpak envs", + **{ + "release-level": config.params.release_level(), + "project": config.params["project"], + } + ) + task["worker"]["env"][k] = env[k].format(**format_params) + + yield task diff --git a/taskcluster/taskgraph/transforms/release_generate_checksums.py b/taskcluster/taskgraph/transforms/release_generate_checksums.py new file mode 100644 index 0000000000..db273bc87e --- /dev/null +++ b/taskcluster/taskgraph/transforms/release_generate_checksums.py @@ -0,0 +1,53 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the checksums task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals +import copy + +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.scriptworker import get_release_config +from taskgraph.util.schema import resolve_keyed_by + +import logging + +logger = logging.getLogger(__name__) + +transforms = TransformSequence() + + +@transforms.add +def handle_keyed_by(config, jobs): + """Resolve fields that can be keyed by project, etc.""" + fields = [ + "run.config", + "run.extra-config", + ] + for job in jobs: + job = copy.deepcopy(job) + for field in fields: + resolve_keyed_by( + item=job, + field=field, + item_name=job["name"], + **{"release-level": config.params.release_level()} + ) + yield job + + +@transforms.add +def interpolate(config, jobs): + release_config = get_release_config(config) + for job in jobs: + mh_options = list(job["run"]["options"]) + job["run"]["options"] = [ + option.format( + version=release_config["version"], + build_number=release_config["build_number"], + ) + for option in mh_options + ] + yield job diff --git a/taskcluster/taskgraph/transforms/release_generate_checksums_beetmover.py b/taskcluster/taskgraph/transforms/release_generate_checksums_beetmover.py new file mode 100644 index 0000000000..43c912f901 --- /dev/null +++ b/taskcluster/taskgraph/transforms/release_generate_checksums_beetmover.py @@ -0,0 +1,119 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the `release-generate-checksums-beetmover` task to also append `build` as dependency +""" +from __future__ import absolute_import, print_function, unicode_literals + +from six import text_type +from taskgraph.loader.single_dep import schema +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.attributes import copy_attributes_from_dependent_job +from taskgraph.util.scriptworker import ( + generate_beetmover_artifact_map, + generate_beetmover_upstream_artifacts, + get_beetmover_bucket_scope, + get_beetmover_action_scope, +) +from taskgraph.transforms.beetmover import craft_release_properties +from taskgraph.transforms.task import task_description_schema +from voluptuous import Optional + +transforms = TransformSequence() + + +release_generate_checksums_beetmover_schema = schema.extend( + { + # unique label to describe this beetmover task, defaults to {dep.label}-beetmover + Optional("label"): text_type, + # treeherder is allowed here to override any defaults we use for beetmover. See + # taskcluster/taskgraph/transforms/task.py for the schema details, and the + # below transforms for defaults of various values. + Optional("treeherder"): task_description_schema["treeherder"], + Optional("shipping-phase"): task_description_schema["shipping-phase"], + Optional("shipping-product"): task_description_schema["shipping-product"], + Optional("attributes"): task_description_schema["attributes"], + } +) + +transforms = TransformSequence() +transforms.add_validate(release_generate_checksums_beetmover_schema) + + +@transforms.add +def make_task_description(config, jobs): + for job in jobs: + dep_job = job["primary-dependency"] + attributes = copy_attributes_from_dependent_job(dep_job) + attributes.update(job.get("attributes", {})) + + treeherder = job.get("treeherder", {}) + treeherder.setdefault("symbol", "BM-SGenChcks") + dep_th_platform = ( + dep_job.task.get("extra", {}) + .get("treeherder", {}) + .get("machine", {}) + .get("platform", "") + ) + treeherder.setdefault("platform", "{}/opt".format(dep_th_platform)) + treeherder.setdefault("tier", 1) + treeherder.setdefault("kind", "build") + + job_template = "{}".format(dep_job.label) + label = job_template.replace("signing", "beetmover") + + description = "Transfer *SUMS and *SUMMARY checksums file to S3." + + # first dependency is the signing task for the *SUMS files + dependencies = {dep_job.kind: dep_job.label} + + if len(dep_job.dependencies) > 1: + raise NotImplementedError( + "Can't beetmove a signing task with multiple dependencies" + ) + # update the dependencies with the dependencies of the signing task + dependencies.update(dep_job.dependencies) + + bucket_scope = get_beetmover_bucket_scope(config) + action_scope = get_beetmover_action_scope(config) + + task = { + "label": label, + "description": description, + "worker-type": "beetmover", + "scopes": [bucket_scope, action_scope], + "dependencies": dependencies, + "attributes": attributes, + "run-on-projects": dep_job.attributes.get("run_on_projects"), + "treeherder": treeherder, + "shipping-phase": "promote", + } + + yield task + + +@transforms.add +def make_task_worker(config, jobs): + for job in jobs: + valid_beetmover_job = len(job["dependencies"]) == 2 and any( + ["signing" in j for j in job["dependencies"]] + ) + if not valid_beetmover_job: + raise NotImplementedError("Beetmover must have two dependencies.") + + platform = job["attributes"]["build_platform"] + worker = { + "implementation": "beetmover", + "release-properties": craft_release_properties(config, job), + "upstream-artifacts": generate_beetmover_upstream_artifacts( + config, job, platform=None, locale=None + ), + "artifact-map": generate_beetmover_artifact_map( + config, job, platform=platform + ), + } + + job["worker"] = worker + + yield job diff --git a/taskcluster/taskgraph/transforms/release_generate_checksums_signing.py b/taskcluster/taskgraph/transforms/release_generate_checksums_signing.py new file mode 100644 index 0000000000..e2b9bb95a8 --- /dev/null +++ b/taskcluster/taskgraph/transforms/release_generate_checksums_signing.py @@ -0,0 +1,88 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the release-generate-checksums-signing task into task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from six import text_type +from taskgraph.loader.single_dep import schema +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.attributes import copy_attributes_from_dependent_job +from taskgraph.util.scriptworker import get_signing_cert_scope +from taskgraph.util.taskcluster import get_artifact_path +from taskgraph.transforms.task import task_description_schema +from voluptuous import Optional + +release_generate_checksums_signing_schema = schema.extend( + { + Optional("label"): text_type, + Optional("treeherder"): task_description_schema["treeherder"], + Optional("shipping-product"): task_description_schema["shipping-product"], + Optional("shipping-phase"): task_description_schema["shipping-phase"], + } +) + +transforms = TransformSequence() +transforms.add_validate(release_generate_checksums_signing_schema) + + +@transforms.add +def make_release_generate_checksums_signing_description(config, jobs): + for job in jobs: + dep_job = job["primary-dependency"] + attributes = copy_attributes_from_dependent_job(dep_job) + + treeherder = job.get("treeherder", {}) + treeherder.setdefault("symbol", "SGenChcks") + dep_th_platform = ( + dep_job.task.get("extra", {}) + .get("treeherder", {}) + .get("machine", {}) + .get("platform", "") + ) + treeherder.setdefault("platform", "{}/opt".format(dep_th_platform)) + treeherder.setdefault("tier", 1) + treeherder.setdefault("kind", "build") + + job_template = "{}-{}".format(dep_job.label, "signing") + label = job.get("label", job_template) + description = "Signing of the overall release-related checksums" + + dependencies = {dep_job.kind: dep_job.label} + + upstream_artifacts = [ + { + "taskId": {"task-reference": "<{}>".format(str(dep_job.kind))}, + "taskType": "build", + "paths": [ + get_artifact_path(dep_job, "SHA256SUMS"), + get_artifact_path(dep_job, "SHA512SUMS"), + ], + "formats": ["autograph_gpg"], + } + ] + + signing_cert_scope = get_signing_cert_scope(config) + + task = { + "label": label, + "description": description, + "worker-type": "linux-signing", + "worker": { + "implementation": "scriptworker-signing", + "upstream-artifacts": upstream_artifacts, + "max-run-time": 3600, + }, + "scopes": [ + signing_cert_scope, + ], + "dependencies": dependencies, + "attributes": attributes, + "run-on-projects": dep_job.attributes.get("run_on_projects"), + "treeherder": treeherder, + } + + yield task diff --git a/taskcluster/taskgraph/transforms/release_mark_as_shipped.py b/taskcluster/taskgraph/transforms/release_mark_as_shipped.py new file mode 100644 index 0000000000..4566a9f66d --- /dev/null +++ b/taskcluster/taskgraph/transforms/release_mark_as_shipped.py @@ -0,0 +1,39 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.schema import resolve_keyed_by +from taskgraph.util.scriptworker import get_release_config + +transforms = TransformSequence() + + +@transforms.add +def make_task_description(config, jobs): + release_config = get_release_config(config) + for job in jobs: + resolve_keyed_by( + job, + "worker-type", + item_name=job["name"], + **{"release-level": config.params.release_level()} + ) + resolve_keyed_by( + job, + "scopes", + item_name=job["name"], + **{"release-level": config.params.release_level()} + ) + + job["worker"][ + "release-name" + ] = "{product}-{version}-build{build_number}".format( + product=job["shipping-product"].capitalize(), + version=release_config["version"], + build_number=release_config["build_number"], + ) + + yield job diff --git a/taskcluster/taskgraph/transforms/release_notifications.py b/taskcluster/taskgraph/transforms/release_notifications.py new file mode 100644 index 0000000000..623232109e --- /dev/null +++ b/taskcluster/taskgraph/transforms/release_notifications.py @@ -0,0 +1,75 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Add notifications via taskcluster-notify for release tasks +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from string import Formatter +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.scriptworker import get_release_config +from taskgraph.util.schema import resolve_keyed_by + + +transforms = TransformSequence() + + +class TitleCaseFormatter(Formatter): + """Support title formatter for strings""" + + def convert_field(self, value, conversion): + if conversion == "t": + return str(value).title() + super(TitleCaseFormatter, self).convert_field(value, conversion) + return value + + +titleformatter = TitleCaseFormatter() + + +@transforms.add +def add_notifications(config, jobs): + release_config = get_release_config(config) + + for job in jobs: + label = "{}-{}".format(config.kind, job["name"]) + + notifications = job.pop("notifications", None) + if notifications: + resolve_keyed_by( + notifications, "emails", label, project=config.params["project"] + ) + emails = notifications["emails"] + format_kwargs = dict( + task=job, + config=config.__dict__, + release_config=release_config, + ) + subject = titleformatter.format(notifications["subject"], **format_kwargs) + message = titleformatter.format(notifications["message"], **format_kwargs) + emails = [email.format(**format_kwargs) for email in emails] + + # By default, we only send mail on success to avoid messages like 'blah is in the + # candidates dir' when cancelling graphs, dummy job failure, etc + status_types = notifications.get("status-types", ["on-completed"]) + for s in status_types: + job.setdefault("routes", []).extend( + ["notify.email.{}.{}".format(email, s) for email in emails] + ) + + # Customize the email subject to include release name and build number + job.setdefault("extra", {}).update( + { + "notify": { + "email": { + "subject": subject, + } + } + } + ) + if message: + job["extra"]["notify"]["email"]["content"] = message + + yield job diff --git a/taskcluster/taskgraph/transforms/release_sign_and_push_langpacks.py b/taskcluster/taskgraph/transforms/release_sign_and_push_langpacks.py new file mode 100644 index 0000000000..b297476a79 --- /dev/null +++ b/taskcluster/taskgraph/transforms/release_sign_and_push_langpacks.py @@ -0,0 +1,179 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the release-sign-and-push task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from six import text_type +from taskgraph.loader.single_dep import schema +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.attributes import copy_attributes_from_dependent_job +from taskgraph.util.schema import resolve_keyed_by, optionally_keyed_by +from taskgraph.util.treeherder import inherit_treeherder_from_dep +from taskgraph.transforms.task import task_description_schema +from voluptuous import Any, Required + +transforms = TransformSequence() + +langpack_sign_push_description_schema = schema.extend( + { + Required("label"): text_type, + Required("description"): text_type, + Required("worker-type"): optionally_keyed_by("release-level", text_type), + Required("worker"): { + Required("implementation"): "push-addons", + Required("channel"): optionally_keyed_by( + "project", "platform", Any("listed", "unlisted") + ), + Required("upstream-artifacts"): None, # Processed here below + }, + Required("run-on-projects"): [], + Required("scopes"): optionally_keyed_by("release-level", [text_type]), + Required("shipping-phase"): task_description_schema["shipping-phase"], + Required("shipping-product"): task_description_schema["shipping-product"], + } +) + + +@transforms.add +def set_label(config, jobs): + for job in jobs: + label = "push-langpacks-{}".format(job["primary-dependency"].label) + job["label"] = label + + yield job + + +transforms.add_validate(langpack_sign_push_description_schema) + + +@transforms.add +def resolve_keys(config, jobs): + for job in jobs: + resolve_keyed_by( + job, + "worker-type", + item_name=job["label"], + **{"release-level": config.params.release_level()} + ) + resolve_keyed_by( + job, + "scopes", + item_name=job["label"], + **{"release-level": config.params.release_level()} + ) + resolve_keyed_by( + job, + "worker.channel", + item_name=job["label"], + project=config.params["project"], + platform=job["primary-dependency"].attributes["build_platform"], + ) + + yield job + + +@transforms.add +def copy_attributes(config, jobs): + for job in jobs: + dep_job = job["primary-dependency"] + job["attributes"] = copy_attributes_from_dependent_job(dep_job) + job["attributes"]["chunk_locales"] = dep_job.attributes.get( + "chunk_locales", ["en-US"] + ) + + yield job + + +@transforms.add +def filter_out_macos_jobs_but_mac_only_locales(config, jobs): + for job in jobs: + build_platform = job["primary-dependency"].attributes.get("build_platform") + + if build_platform in ("linux64-devedition", "linux64-shippable"): + yield job + elif ( + build_platform in ("macosx64-devedition", "macosx64-shippable") + and "ja-JP-mac" in job["attributes"]["chunk_locales"] + ): + # Other locales of the same job shouldn't be processed + job["attributes"]["chunk_locales"] = ["ja-JP-mac"] + job["label"] = job["label"].replace( + # Guard against a chunk 10 or chunk 1 (latter on try) weird munging + "-{}/".format(job["attributes"]["l10n_chunk"]), + "-ja-JP-mac/", + ) + yield job + + +@transforms.add +def make_task_description(config, jobs): + for job in jobs: + dep_job = job["primary-dependency"] + + treeherder = inherit_treeherder_from_dep(job, dep_job) + treeherder.setdefault( + "symbol", "langpack(SnP{})".format(job["attributes"].get("l10n_chunk", "")) + ) + + job["description"] = job["description"].format( + locales="/".join(job["attributes"]["chunk_locales"]), + ) + + job["dependencies"] = {dep_job.kind: dep_job.label} + job["treeherder"] = treeherder + + yield job + + +def generate_upstream_artifacts(upstream_task_ref, locales): + return [ + { + "taskId": {"task-reference": upstream_task_ref}, + "taskType": "build", + "paths": [ + "public/build{locale}/target.langpack.xpi".format( + locale="" if locale == "en-US" else "/" + locale + ) + for locale in locales + ], + } + ] + + +@transforms.add +def make_task_worker(config, jobs): + for job in jobs: + upstream_task_ref = get_upstream_task_ref( + job, expected_kinds=("build", "shippable-l10n") + ) + + job["worker"]["upstream-artifacts"] = generate_upstream_artifacts( + upstream_task_ref, job["attributes"]["chunk_locales"] + ) + + yield job + + +def get_upstream_task_ref(job, expected_kinds): + upstream_tasks = [ + job_kind + for job_kind in job["dependencies"].keys() + if job_kind in expected_kinds + ] + + if len(upstream_tasks) > 1: + raise Exception("Only one dependency expected") + + return "<{}>".format(upstream_tasks[0]) + + +@transforms.add +def strip_unused_data(config, jobs): + for job in jobs: + del job["primary-dependency"] + + yield job diff --git a/taskcluster/taskgraph/transforms/release_snap_push.py b/taskcluster/taskgraph/transforms/release_snap_push.py new file mode 100644 index 0000000000..8dff720ea7 --- /dev/null +++ b/taskcluster/taskgraph/transforms/release_snap_push.py @@ -0,0 +1,84 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the release-snap-push kind into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from six import text_type +from taskgraph.transforms.base import TransformSequence +from taskgraph.transforms.task import task_description_schema +from taskgraph.util.schema import optionally_keyed_by, resolve_keyed_by, Schema +from taskgraph.util.scriptworker import add_scope_prefix + +from voluptuous import Optional, Required + +push_snap_description_schema = Schema( + { + Required("name"): text_type, + Required("job-from"): task_description_schema["job-from"], + Required("dependencies"): task_description_schema["dependencies"], + Required("description"): task_description_schema["description"], + Required("treeherder"): task_description_schema["treeherder"], + Required("run-on-projects"): task_description_schema["run-on-projects"], + Required("worker-type"): optionally_keyed_by("release-level", text_type), + Required("worker"): object, + Optional("scopes"): [text_type], + Required("shipping-phase"): task_description_schema["shipping-phase"], + Required("shipping-product"): task_description_schema["shipping-product"], + Optional("extra"): task_description_schema["extra"], + Optional("attributes"): task_description_schema["attributes"], + } +) + +transforms = TransformSequence() +transforms.add_validate(push_snap_description_schema) + + +@transforms.add +def make_task_description(config, jobs): + for job in jobs: + if len(job["dependencies"]) != 1: + raise Exception("Exactly 1 dependency is required") + + job["worker"]["upstream-artifacts"] = generate_upstream_artifacts( + job["dependencies"] + ) + + resolve_keyed_by( + job, + "worker.channel", + item_name=job["name"], + **{"release-type": config.params["release_type"]} + ) + resolve_keyed_by( + job, + "worker-type", + item_name=job["name"], + **{"release-level": config.params.release_level()} + ) + if config.params.release_level() == "production": + job.setdefault("scopes", []).append( + add_scope_prefix( + config, + "snapcraft:firefox:{}".format( + job["worker"]["channel"].split("/")[0] + ), + ) + ) + + yield job + + +def generate_upstream_artifacts(dependencies): + return [ + { + "taskId": {"task-reference": "<{}>".format(task_kind)}, + # TODO bug 1417960 + "taskType": "build", + "paths": ["public/build/target.snap"], + } + for task_kind in dependencies.keys() + ] diff --git a/taskcluster/taskgraph/transforms/release_snap_repackage.py b/taskcluster/taskgraph/transforms/release_snap_repackage.py new file mode 100644 index 0000000000..78266a89ff --- /dev/null +++ b/taskcluster/taskgraph/transforms/release_snap_repackage.py @@ -0,0 +1,37 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.scriptworker import get_release_config +from taskgraph.util.schema import resolve_keyed_by + + +transforms = TransformSequence() + + +@transforms.add +def format(config, tasks): + """Apply format substitution to worker.env and worker.command.""" + + format_params = { + "release_config": get_release_config(config), + "config_params": config.params, + } + + for task in tasks: + format_params["task"] = task + + command = task.get("worker", {}).get("command", []) + task["worker"]["command"] = [x.format(**format_params) for x in command] + + env = task.get("worker", {}).get("env", {}) + for k in env.keys(): + resolve_keyed_by( + env, k, "snap envs", **{"release-level": config.params.release_level()} + ) + task["worker"]["env"][k] = env[k].format(**format_params) + + yield task diff --git a/taskcluster/taskgraph/transforms/release_started.py b/taskcluster/taskgraph/transforms/release_started.py new file mode 100644 index 0000000000..a48de6f124 --- /dev/null +++ b/taskcluster/taskgraph/transforms/release_started.py @@ -0,0 +1,56 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Add notifications via taskcluster-notify for release tasks +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from pipes import quote as shell_quote + +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.schema import resolve_keyed_by + + +transforms = TransformSequence() + + +@transforms.add +def add_notifications(config, jobs): + for job in jobs: + label = "{}-{}".format(config.kind, job["name"]) + + resolve_keyed_by(job, "emails", label, project=config.params["project"]) + emails = [email.format(config=config.__dict__) for email in job.pop("emails")] + + command = [ + "release", + "send-buglist-email", + "--version", + config.params["version"], + "--product", + job["shipping-product"], + "--revision", + config.params["head_rev"], + "--build-number", + str(config.params["build_number"]), + "--repo", + config.params["head_repository"], + ] + for address in emails: + command += ["--address", address] + command += [ + # We wrap this in `{'task-reference': ...}` below + "--task-group-id", + "<decision>", + ] + + job["scopes"] = ["notify:email:{}".format(address) for address in emails] + job["run"] = { + "using": "mach", + "sparse-profile": "mach", + "mach": {"task-reference": " ".join(map(shell_quote, command))}, + } + + yield job diff --git a/taskcluster/taskgraph/transforms/release_version_bump.py b/taskcluster/taskgraph/transforms/release_version_bump.py new file mode 100644 index 0000000000..95c5c033ea --- /dev/null +++ b/taskcluster/taskgraph/transforms/release_version_bump.py @@ -0,0 +1,39 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the update generation task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.schema import resolve_keyed_by + +transforms = TransformSequence() + + +@transforms.add +def handle_keyed_by(config, tasks): + """Resolve fields that can be keyed by platform, etc.""" + default_fields = [ + "worker.push", + "worker.bump-files", + "worker-type", + ] + for task in tasks: + fields = default_fields[:] + for additional_field in ("l10n-bump-info", "source-repo", "dontbuild"): + if additional_field in task["worker"]: + fields.append("worker.{}".format(additional_field)) + for field in fields: + resolve_keyed_by( + task, + field, + item_name=task["name"], + **{ + "project": config.params["project"], + "release-type": config.params["release_type"], + } + ) + yield task diff --git a/taskcluster/taskgraph/transforms/repackage.py b/taskcluster/taskgraph/transforms/repackage.py new file mode 100644 index 0000000000..5bedad96c7 --- /dev/null +++ b/taskcluster/taskgraph/transforms/repackage.py @@ -0,0 +1,435 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the repackage task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +import copy + +from six import text_type +from taskgraph.loader.single_dep import schema +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.attributes import copy_attributes_from_dependent_job +from taskgraph.util.schema import ( + optionally_keyed_by, + resolve_keyed_by, +) +from taskgraph.util.taskcluster import get_artifact_prefix +from taskgraph.util.platforms import archive_format, architecture +from taskgraph.util.workertypes import worker_type_implementation +from taskgraph.transforms.job import job_description_schema +from voluptuous import Required, Optional, Extra + + +packaging_description_schema = schema.extend( + { + # unique label to describe this repackaging task + Optional("label"): text_type, + Optional("worker-type"): text_type, + Optional("worker"): object, + # treeherder is allowed here to override any defaults we use for repackaging. See + # taskcluster/taskgraph/transforms/task.py for the schema details, and the + # below transforms for defaults of various values. + Optional("treeherder"): job_description_schema["treeherder"], + # If a l10n task, the corresponding locale + Optional("locale"): text_type, + # Routes specific to this task, if defined + Optional("routes"): [text_type], + # passed through directly to the job description + Optional("extra"): job_description_schema["extra"], + # passed through to job description + Optional("fetches"): job_description_schema["fetches"], + # Shipping product and phase + Optional("shipping-product"): job_description_schema["shipping-product"], + Optional("shipping-phase"): job_description_schema["shipping-phase"], + Required("package-formats"): optionally_keyed_by( + "build-platform", "release-type", [text_type] + ), + # All l10n jobs use mozharness + Required("mozharness"): { + Extra: object, + # Config files passed to the mozharness script + Required("config"): optionally_keyed_by("build-platform", [text_type]), + # Additional paths to look for mozharness configs in. These should be + # relative to the base of the source checkout + Optional("config-paths"): [text_type], + # if true, perform a checkout of a comm-central based branch inside the + # gecko checkout + Optional("comm-checkout"): bool, + }, + } +) + +# The configuration passed to the mozharness repackage script. This defines the +# arguments passed to `mach repackage` +# - `args` is interpolated by mozharness (`{package-name}`, `{installer-tag}`, +# `{stub-installer-tag}`, `{sfx-stub}`, `{wsx-stub}`, `{fetch-dir}`), with values +# from mozharness. +# - `inputs` are passed as long-options, with the filename prefixed by +# `MOZ_FETCH_DIR`. The filename is interpolated by taskgraph +# (`{archive_format}`). +# - `output` is passed to `--output`, with the filename prefixed by the output +# directory. +PACKAGE_FORMATS = { + "mar": { + "args": [ + "mar", + "--arch", + "{architecture}", + "--mar-channel-id", + "{mar-channel-id}", + ], + "inputs": { + "input": "target{archive_format}", + "mar": "mar-tools/mar", + }, + "output": "target.complete.mar", + }, + "msi": { + "args": [ + "msi", + "--wsx", + "{wsx-stub}", + "--version", + "{version_display}", + "--locale", + "{_locale}", + "--arch", + "{architecture}", + "--candle", + "{fetch-dir}/candle.exe", + "--light", + "{fetch-dir}/light.exe", + ], + "inputs": { + "setupexe": "target.installer.exe", + }, + "output": "target.installer.msi", + }, + "dmg": { + "args": ["dmg"], + "inputs": { + "input": "target{archive_format}", + }, + "output": "target.dmg", + }, + "installer": { + "args": [ + "installer", + "--package-name", + "{package-name}", + "--tag", + "{installer-tag}", + "--sfx-stub", + "{sfx-stub}", + ], + "inputs": { + "package": "target{archive_format}", + "setupexe": "setup.exe", + }, + "output": "target.installer.exe", + }, + "installer-stub": { + "args": [ + "installer", + "--tag", + "{stub-installer-tag}", + "--sfx-stub", + "{sfx-stub}", + ], + "inputs": { + "setupexe": "setup-stub.exe", + }, + "output": "target.stub-installer.exe", + }, +} +MOZHARNESS_EXPANSIONS = [ + "package-name", + "installer-tag", + "fetch-dir", + "stub-installer-tag", + "sfx-stub", + "wsx-stub", +] + +transforms = TransformSequence() +transforms.add_validate(packaging_description_schema) + + +@transforms.add +def copy_in_useful_magic(config, jobs): + """Copy attributes from upstream task to be used for keyed configuration.""" + for job in jobs: + dep = job["primary-dependency"] + job["build-platform"] = dep.attributes.get("build_platform") + yield job + + +@transforms.add +def handle_keyed_by(config, jobs): + """Resolve fields that can be keyed by platform, etc.""" + fields = [ + "mozharness.config", + "package-formats", + ] + for job in jobs: + job = copy.deepcopy(job) # don't overwrite dict values here + for field in fields: + resolve_keyed_by( + item=job, + field=field, + item_name="?", + **{ + "release-type": config.params["release_type"], + } + ) + yield job + + +@transforms.add +def make_repackage_description(config, jobs): + for job in jobs: + dep_job = job["primary-dependency"] + + label = job.get("label", dep_job.label.replace("signing-", "repackage-")) + job["label"] = label + + yield job + + +@transforms.add +def make_job_description(config, jobs): + for job in jobs: + dep_job = job["primary-dependency"] + dependencies = {dep_job.kind: dep_job.label} + + attributes = copy_attributes_from_dependent_job(dep_job) + attributes["repackage_type"] = "repackage" + + locale = attributes.get("locale", job.get("locale")) + if locale: + attributes["locale"] = locale + + treeherder = job.get("treeherder", {}) + treeherder.setdefault("symbol", "Rpk") + dep_th_platform = dep_job.task.get("extra", {}).get("treeherder-platform") + treeherder.setdefault("platform", dep_th_platform) + treeherder.setdefault("tier", 1) + treeherder.setdefault("kind", "build") + + if config.kind == "repackage-msi": + treeherder["symbol"] = "MSI({})".format(locale or "N") + + signing_task = None + repackage_signing_task = None + for dependency in dependencies.keys(): + if "repackage-signing" in dependency: + repackage_signing_task = dependency + elif "signing" in dependency: + signing_task = dependency + + _fetch_subst_locale = "en-US" + if locale: + _fetch_subst_locale = locale + + worker_type = job["worker-type"] + build_platform = attributes["build_platform"] + + use_stub = attributes.get("stub-installer") + + repackage_config = [] + package_formats = job.get("package-formats") + if use_stub and not repackage_signing_task: + # if repackage_signing_task doesn't exists, generate the stub installer + package_formats += ["installer-stub"] + for format in package_formats: + command = copy.deepcopy(PACKAGE_FORMATS[format]) + substs = { + "archive_format": archive_format(build_platform), + "_locale": _fetch_subst_locale, + "architecture": architecture(build_platform), + "version_display": config.params["version"], + "mar-channel-id": attributes["mar-channel-id"], + } + # Allow us to replace args a well, but specifying things expanded in mozharness + # Without breaking .format and without allowing unknown through + substs.update( + {name: "{{{}}}".format(name) for name in MOZHARNESS_EXPANSIONS} + ) + command["inputs"] = { + name: filename.format(**substs) + for name, filename in command["inputs"].items() + } + command["args"] = [arg.format(**substs) for arg in command["args"]] + if "installer" in format and "aarch64" not in build_platform: + command["args"].append("--use-upx") + repackage_config.append(command) + + run = job.get("mozharness", {}) + run.update( + { + "using": "mozharness", + "script": "mozharness/scripts/repackage.py", + "job-script": "taskcluster/scripts/builder/repackage.sh", + "actions": ["setup", "repackage"], + "extra-config": { + "repackage_config": repackage_config, + }, + } + ) + + worker = job.get("worker", {}) + worker.update( + { + "chain-of-trust": True, + "max-run-time": 7200 if build_platform.startswith("win") else 3600, + # Don't add generic artifact directory. + "skip-artifacts": True, + } + ) + + if locale: + # Make sure we specify the locale-specific upload dir + worker.setdefault("env", {})["LOCALE"] = locale + + worker["artifacts"] = _generate_task_output_files( + dep_job, + worker_type_implementation(config.graph_config, worker_type), + repackage_config=repackage_config, + locale=locale, + ) + + description = ( + "Repackaging for locale '{locale}' for build '" + "{build_platform}/{build_type}'".format( + locale=attributes.get("locale", "en-US"), + build_platform=attributes.get("build_platform"), + build_type=attributes.get("build_type"), + ) + ) + + task = { + "label": job["label"], + "description": description, + "worker-type": worker_type, + "dependencies": dependencies, + "if-dependencies": [dep_job.kind], + "attributes": attributes, + "run-on-projects": dep_job.attributes.get("run_on_projects"), + "optimization": dep_job.optimization, + "treeherder": treeherder, + "routes": job.get("routes", []), + "extra": job.get("extra", {}), + "worker": worker, + "run": run, + "fetches": _generate_download_config( + dep_job, + build_platform, + signing_task, + repackage_signing_task, + locale=locale, + project=config.params["project"], + existing_fetch=job.get("fetches"), + ), + "release-artifacts": [artifact["name"] for artifact in worker["artifacts"]], + } + + if build_platform.startswith("macosx"): + task.setdefault("fetches", {}).setdefault("toolchain", []).extend( + [ + "linux64-libdmg", + "linux64-hfsplus", + "linux64-node", + ] + ) + yield task + + +def _generate_download_config( + task, + build_platform, + signing_task, + repackage_signing_task, + locale=None, + project=None, + existing_fetch=None, +): + locale_path = "{}/".format(locale) if locale else "" + fetch = {} + if existing_fetch: + fetch.update(existing_fetch) + + if repackage_signing_task and build_platform.startswith("win"): + fetch.update( + { + repackage_signing_task: ["{}target.installer.exe".format(locale_path)], + } + ) + elif build_platform.startswith("linux") or build_platform.startswith("macosx"): + fetch.update( + { + signing_task: [ + { + "artifact": "{}target{}".format( + locale_path, archive_format(build_platform) + ), + "extract": False, + }, + ], + } + ) + elif build_platform.startswith("win"): + fetch.update( + { + signing_task: [ + { + "artifact": "{}target.zip".format(locale_path), + "extract": False, + }, + "{}setup.exe".format(locale_path), + ], + } + ) + + use_stub = task.attributes.get("stub-installer") + if use_stub: + fetch[signing_task].append("{}setup-stub.exe".format(locale_path)) + + if fetch: + return fetch + + raise NotImplementedError('Unsupported build_platform: "{}"'.format(build_platform)) + + +def _generate_task_output_files( + task, worker_implementation, repackage_config, locale=None +): + locale_output_path = "{}/".format(locale) if locale else "" + artifact_prefix = get_artifact_prefix(task) + + if worker_implementation == ("docker-worker", "linux"): + local_prefix = "/builds/worker/workspace/" + elif worker_implementation == ("generic-worker", "windows"): + local_prefix = "workspace/" + else: + raise NotImplementedError( + 'Unsupported worker implementation: "{}"'.format(worker_implementation) + ) + + output_files = [] + for config in repackage_config: + output_files.append( + { + "type": "file", + "path": "{}outputs/{}{}".format( + local_prefix, locale_output_path, config["output"] + ), + "name": "{}/{}{}".format( + artifact_prefix, locale_output_path, config["output"] + ), + } + ) + return output_files diff --git a/taskcluster/taskgraph/transforms/repackage_l10n.py b/taskcluster/taskgraph/transforms/repackage_l10n.py new file mode 100644 index 0000000000..598228fed7 --- /dev/null +++ b/taskcluster/taskgraph/transforms/repackage_l10n.py @@ -0,0 +1,26 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the repackage task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +import copy + +from taskgraph.transforms.base import TransformSequence + +transforms = TransformSequence() + + +@transforms.add +def split_locales(config, jobs): + for job in jobs: + dep_job = job["primary-dependency"] + for locale in dep_job.attributes.get("chunk_locales", []): + locale_job = copy.deepcopy(job) # don't overwrite dict values here + treeherder = locale_job.setdefault("treeherder", {}) + treeherder["symbol"] = "L10n-Rpk({})".format(locale) + locale_job["locale"] = locale + yield locale_job diff --git a/taskcluster/taskgraph/transforms/repackage_partner.py b/taskcluster/taskgraph/transforms/repackage_partner.py new file mode 100644 index 0000000000..697e788f95 --- /dev/null +++ b/taskcluster/taskgraph/transforms/repackage_partner.py @@ -0,0 +1,304 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the repackage task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +import copy + +from six import text_type +from taskgraph.loader.single_dep import schema +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.attributes import copy_attributes_from_dependent_job +from taskgraph.util.schema import ( + optionally_keyed_by, + resolve_keyed_by, +) +from taskgraph.util.taskcluster import get_artifact_prefix +from taskgraph.util.partners import get_partner_config_by_kind +from taskgraph.util.platforms import archive_format, executable_extension +from taskgraph.util.workertypes import worker_type_implementation +from taskgraph.transforms.task import task_description_schema +from taskgraph.transforms.repackage import PACKAGE_FORMATS as PACKAGE_FORMATS_VANILLA +from voluptuous import Required, Optional + + +def _by_platform(arg): + return optionally_keyed_by("build-platform", arg) + + +# When repacking the stub installer we need to pass a zip file and package name to the +# repackage task. This is not needed for vanilla stub but analogous to the full installer. +PACKAGE_FORMATS = copy.deepcopy(PACKAGE_FORMATS_VANILLA) +PACKAGE_FORMATS["installer-stub"]["inputs"]["package"] = "target-stub{archive_format}" +PACKAGE_FORMATS["installer-stub"]["args"].extend(["--package-name", "{package-name}"]) + +packaging_description_schema = schema.extend( + { + # unique label to describe this repackaging task + Optional("label"): text_type, + # Routes specific to this task, if defined + Optional("routes"): [text_type], + # passed through directly to the job description + Optional("extra"): task_description_schema["extra"], + # Shipping product and phase + Optional("shipping-product"): task_description_schema["shipping-product"], + Optional("shipping-phase"): task_description_schema["shipping-phase"], + Required("package-formats"): _by_platform([text_type]), + # All l10n jobs use mozharness + Required("mozharness"): { + # Config files passed to the mozharness script + Required("config"): _by_platform([text_type]), + # Additional paths to look for mozharness configs in. These should be + # relative to the base of the source checkout + Optional("config-paths"): [text_type], + # if true, perform a checkout of a comm-central based branch inside the + # gecko checkout + Optional("comm-checkout"): bool, + }, + # Override the default priority for the project + Optional("priority"): task_description_schema["priority"], + } +) + +transforms = TransformSequence() +transforms.add_validate(packaging_description_schema) + + +@transforms.add +def copy_in_useful_magic(config, jobs): + """Copy attributes from upstream task to be used for keyed configuration.""" + for job in jobs: + dep = job["primary-dependency"] + job["build-platform"] = dep.attributes.get("build_platform") + yield job + + +@transforms.add +def handle_keyed_by(config, jobs): + """Resolve fields that can be keyed by platform, etc.""" + fields = [ + "mozharness.config", + "package-formats", + ] + for job in jobs: + job = copy.deepcopy(job) # don't overwrite dict values here + for field in fields: + resolve_keyed_by(item=job, field=field, item_name="?") + yield job + + +@transforms.add +def make_repackage_description(config, jobs): + for job in jobs: + dep_job = job["primary-dependency"] + + label = job.get("label", dep_job.label.replace("signing-", "repackage-")) + job["label"] = label + + yield job + + +@transforms.add +def make_job_description(config, jobs): + for job in jobs: + dep_job = job["primary-dependency"] + attributes = copy_attributes_from_dependent_job(dep_job) + build_platform = attributes["build_platform"] + + if job["build-platform"].startswith("win"): + if dep_job.kind.endswith("signing"): + continue + if job["build-platform"].startswith("macosx"): + if dep_job.kind.endswith("repack"): + continue + dependencies = {dep_job.attributes.get("kind"): dep_job.label} + dependencies.update(dep_job.dependencies) + + signing_task = None + for dependency in dependencies.keys(): + if build_platform.startswith("macosx") and dependency.endswith("signing"): + signing_task = dependency + elif build_platform.startswith("win") and dependency.endswith("repack"): + signing_task = dependency + + attributes["repackage_type"] = "repackage" + + repack_id = job["extra"]["repack_id"] + + partner_config = get_partner_config_by_kind(config, config.kind) + partner, subpartner, _ = repack_id.split("/") + repack_stub_installer = partner_config[partner][subpartner].get( + "repack_stub_installer" + ) + if build_platform.startswith("win32") and repack_stub_installer: + job["package-formats"].append("installer-stub") + + repackage_config = [] + for format in job.get("package-formats"): + command = copy.deepcopy(PACKAGE_FORMATS[format]) + substs = { + "archive_format": archive_format(build_platform), + "executable_extension": executable_extension(build_platform), + } + command["inputs"] = { + name: filename.format(**substs) + for name, filename in command["inputs"].items() + } + repackage_config.append(command) + + run = job.get("mozharness", {}) + run.update( + { + "using": "mozharness", + "script": "mozharness/scripts/repackage.py", + "job-script": "taskcluster/scripts/builder/repackage.sh", + "actions": ["setup", "repackage"], + "extra-config": { + "repackage_config": repackage_config, + }, + } + ) + + worker = { + "chain-of-trust": True, + "max-run-time": 7200 if build_platform.startswith("win") else 3600, + "taskcluster-proxy": True if get_artifact_prefix(dep_job) else False, + "env": { + "REPACK_ID": repack_id, + }, + # Don't add generic artifact directory. + "skip-artifacts": True, + } + + worker_type = "b-linux" + worker["docker-image"] = {"in-tree": "debian8-amd64-build"} + + worker["artifacts"] = _generate_task_output_files( + dep_job, + worker_type_implementation(config.graph_config, worker_type), + repackage_config, + partner=repack_id, + ) + + description = ( + "Repackaging for repack_id '{repack_id}' for build '" + "{build_platform}/{build_type}'".format( + repack_id=job["extra"]["repack_id"], + build_platform=attributes.get("build_platform"), + build_type=attributes.get("build_type"), + ) + ) + + task = { + "label": job["label"], + "description": description, + "worker-type": worker_type, + "dependencies": dependencies, + "attributes": attributes, + "scopes": ["queue:get-artifact:releng/partner/*"], + "run-on-projects": dep_job.attributes.get("run_on_projects"), + "routes": job.get("routes", []), + "extra": job.get("extra", {}), + "worker": worker, + "run": run, + "fetches": _generate_download_config( + dep_job, + build_platform, + signing_task, + partner=repack_id, + project=config.params["project"], + repack_stub_installer=repack_stub_installer, + ), + } + + # we may have reduced the priority for partner jobs, otherwise task.py will set it + if job.get("priority"): + task["priority"] = job["priority"] + if build_platform.startswith("macosx"): + task.setdefault("fetches", {}).setdefault("toolchain", []).extend( + [ + "linux64-libdmg", + "linux64-hfsplus", + "linux64-node", + ] + ) + yield task + + +def _generate_download_config( + task, + build_platform, + signing_task, + partner=None, + project=None, + repack_stub_installer=False, +): + locale_path = "{}/".format(partner) if partner else "" + + if build_platform.startswith("macosx"): + return { + signing_task: [ + { + "artifact": "{}target.tar.gz".format(locale_path), + "extract": False, + }, + ], + } + elif build_platform.startswith("win"): + download_config = [ + { + "artifact": "{}target.zip".format(locale_path), + "extract": False, + }, + "{}setup.exe".format(locale_path), + ] + if build_platform.startswith("win32") and repack_stub_installer: + download_config.extend( + [ + { + "artifact": "{}target-stub.zip".format(locale_path), + "extract": False, + }, + "{}setup-stub.exe".format(locale_path), + ] + ) + return {signing_task: download_config} + + raise NotImplementedError('Unsupported build_platform: "{}"'.format(build_platform)) + + +def _generate_task_output_files(task, worker_implementation, repackage_config, partner): + """We carefully generate an explicit list here, but there's an artifacts directory + too, courtesy of generic_worker_add_artifacts() (windows) or docker_worker_add_artifacts(). + Any errors here are likely masked by that. + """ + partner_output_path = "{}/".format(partner) + artifact_prefix = get_artifact_prefix(task) + + if worker_implementation == ("docker-worker", "linux"): + local_prefix = "/builds/worker/workspace/" + elif worker_implementation == ("generic-worker", "windows"): + local_prefix = "workspace/" + else: + raise NotImplementedError( + 'Unsupported worker implementation: "{}"'.format(worker_implementation) + ) + + output_files = [] + for config in repackage_config: + output_files.append( + { + "type": "file", + "path": "{}outputs/{}{}".format( + local_prefix, partner_output_path, config["output"] + ), + "name": "{}/{}{}".format( + artifact_prefix, partner_output_path, config["output"] + ), + } + ) + return output_files diff --git a/taskcluster/taskgraph/transforms/repackage_routes.py b/taskcluster/taskgraph/transforms/repackage_routes.py new file mode 100644 index 0000000000..a6ab3af793 --- /dev/null +++ b/taskcluster/taskgraph/transforms/repackage_routes.py @@ -0,0 +1,36 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Add indexes to repackage kinds +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence + +transforms = TransformSequence() + + +@transforms.add +def add_indexes(config, jobs): + for job in jobs: + repackage_type = job["attributes"].get("repackage_type") + if repackage_type: + build_platform = job["attributes"]["build_platform"] + job_name = "{}-{}".format(build_platform, repackage_type) + product = job.get("index", {}).get("product", "firefox") + index_type = "generic" + if job["attributes"].get("shippable") and job["attributes"].get("locale"): + index_type = "shippable-l10n" + if job["attributes"].get("shippable"): + index_type = "shippable" + if job["attributes"].get("locale"): + index_type = "l10n" + job["index"] = { + "job-name": job_name, + "product": product, + "type": index_type, + } + + yield job diff --git a/taskcluster/taskgraph/transforms/repackage_signing.py b/taskcluster/taskgraph/transforms/repackage_signing.py new file mode 100644 index 0000000000..6c9fa64d8d --- /dev/null +++ b/taskcluster/taskgraph/transforms/repackage_signing.py @@ -0,0 +1,123 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the repackage signing task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +import os + +from six import text_type +from taskgraph.loader.single_dep import schema +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.attributes import copy_attributes_from_dependent_job +from taskgraph.util.scriptworker import get_signing_cert_scope_per_platform +from taskgraph.transforms.task import task_description_schema +from voluptuous import Optional + +repackage_signing_description_schema = schema.extend( + { + Optional("label"): text_type, + Optional("treeherder"): task_description_schema["treeherder"], + Optional("shipping-product"): task_description_schema["shipping-product"], + Optional("shipping-phase"): task_description_schema["shipping-phase"], + } +) + +SIGNING_FORMATS = { + "target.installer.exe": ["autograph_authenticode_stub"], + "target.stub-installer.exe": ["autograph_authenticode_stub"], + "target.installer.msi": ["autograph_authenticode"], +} + +transforms = TransformSequence() +transforms.add_validate(repackage_signing_description_schema) + + +@transforms.add +def make_repackage_signing_description(config, jobs): + for job in jobs: + dep_job = job["primary-dependency"] + attributes = copy_attributes_from_dependent_job(dep_job) + locale = attributes.get("locale", dep_job.attributes.get("locale")) + attributes["repackage_type"] = "repackage-signing" + + treeherder = job.get("treeherder", {}) + treeherder.setdefault("symbol", "rs(B)") + dep_th_platform = dep_job.task.get("extra", {}).get("treeherder-platform") + treeherder.setdefault("platform", dep_th_platform) + treeherder.setdefault( + "tier", dep_job.task.get("extra", {}).get("treeherder", {}).get("tier", 1) + ) + treeherder.setdefault("kind", "build") + + if locale: + treeherder["symbol"] = "rs({})".format(locale) + + if config.kind == "repackage-signing-msi": + treeherder["symbol"] = "MSIs({})".format(locale or "N") + + label = job["label"] + + dep_kind = dep_job.kind + if "l10n" in dep_kind: + dep_kind = "repackage" + + dependencies = {dep_kind: dep_job.label} + + signing_dependencies = dep_job.dependencies + # This is so we get the build task etc in our dependencies to + # have better beetmover support. + dependencies.update( + {k: v for k, v in signing_dependencies.items() if k != "docker-image"} + ) + + description = ( + "Signing of repackaged artifacts for locale '{locale}' for build '" + "{build_platform}/{build_type}'".format( + locale=attributes.get("locale", "en-US"), + build_platform=attributes.get("build_platform"), + build_type=attributes.get("build_type"), + ) + ) + + build_platform = dep_job.attributes.get("build_platform") + is_shippable = dep_job.attributes.get("shippable") + signing_cert_scope = get_signing_cert_scope_per_platform( + build_platform, is_shippable, config + ) + scopes = [signing_cert_scope] + + upstream_artifacts = [] + for artifact in sorted(dep_job.release_artifacts): + basename = os.path.basename(artifact) + if basename in SIGNING_FORMATS: + upstream_artifacts.append( + { + "taskId": {"task-reference": "<{}>".format(dep_kind)}, + "taskType": "repackage", + "paths": [artifact], + "formats": SIGNING_FORMATS[os.path.basename(artifact)], + } + ) + + task = { + "label": label, + "description": description, + "worker-type": "linux-signing", + "worker": { + "implementation": "scriptworker-signing", + "upstream-artifacts": upstream_artifacts, + "max-run-time": 3600, + }, + "scopes": scopes, + "dependencies": dependencies, + "attributes": attributes, + "run-on-projects": dep_job.attributes.get("run_on_projects"), + "optimization": dep_job.optimization, + "treeherder": treeherder, + } + + yield task diff --git a/taskcluster/taskgraph/transforms/repackage_signing_partner.py b/taskcluster/taskgraph/transforms/repackage_signing_partner.py new file mode 100644 index 0000000000..7f93216c4c --- /dev/null +++ b/taskcluster/taskgraph/transforms/repackage_signing_partner.py @@ -0,0 +1,151 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the repackage signing task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from six import text_type +from taskgraph.loader.single_dep import schema +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.attributes import copy_attributes_from_dependent_job +from taskgraph.util.partners import get_partner_config_by_kind +from taskgraph.util.scriptworker import get_signing_cert_scope_per_platform +from taskgraph.util.taskcluster import get_artifact_path +from taskgraph.transforms.task import task_description_schema +from voluptuous import Optional + +transforms = TransformSequence() + +repackage_signing_description_schema = schema.extend( + { + Optional("label"): text_type, + Optional("extra"): object, + Optional("shipping-product"): task_description_schema["shipping-product"], + Optional("shipping-phase"): task_description_schema["shipping-phase"], + Optional("priority"): task_description_schema["priority"], + } +) + +transforms.add_validate(repackage_signing_description_schema) + + +@transforms.add +def make_repackage_signing_description(config, jobs): + for job in jobs: + dep_job = job["primary-dependency"] + repack_id = dep_job.task["extra"]["repack_id"] + attributes = dep_job.attributes + build_platform = dep_job.attributes.get("build_platform") + is_shippable = dep_job.attributes.get("shippable") + + # Mac & windows + label = dep_job.label.replace("repackage-", "repackage-signing-") + # Linux + label = label.replace("chunking-dummy-", "repackage-signing-") + description = "Signing of repackaged artifacts for partner repack id '{repack_id}' for build '" "{build_platform}/{build_type}'".format( # NOQA: E501 + repack_id=repack_id, + build_platform=attributes.get("build_platform"), + build_type=attributes.get("build_type"), + ) + + if "linux" in build_platform: + # we want the repack job, via the dependencies for the the chunking-dummy dep_job + for dep in dep_job.dependencies.values(): + if dep.startswith("release-partner-repack"): + dependencies = {"repack": dep} + break + else: + # we have a genuine repackage job as our parent + dependencies = {"repackage": dep_job.label} + + attributes = copy_attributes_from_dependent_job(dep_job) + attributes["repackage_type"] = "repackage-signing" + + signing_cert_scope = get_signing_cert_scope_per_platform( + build_platform, is_shippable, config + ) + scopes = [signing_cert_scope] + + if "win" in build_platform: + upstream_artifacts = [ + { + "taskId": {"task-reference": "<repackage>"}, + "taskType": "repackage", + "paths": [ + get_artifact_path( + dep_job, "{}/target.installer.exe".format(repack_id) + ), + ], + "formats": ["autograph_authenticode", "autograph_gpg"], + } + ] + + partner_config = get_partner_config_by_kind(config, config.kind) + partner, subpartner, _ = repack_id.split("/") + repack_stub_installer = partner_config[partner][subpartner].get( + "repack_stub_installer" + ) + if build_platform.startswith("win32") and repack_stub_installer: + upstream_artifacts.append( + { + "taskId": {"task-reference": "<repackage>"}, + "taskType": "repackage", + "paths": [ + get_artifact_path( + dep_job, + "{}/target.stub-installer.exe".format(repack_id), + ), + ], + "formats": ["autograph_authenticode", "autograph_gpg"], + } + ) + elif "mac" in build_platform: + upstream_artifacts = [ + { + "taskId": {"task-reference": "<repackage>"}, + "taskType": "repackage", + "paths": [ + get_artifact_path(dep_job, "{}/target.dmg".format(repack_id)), + ], + "formats": ["autograph_gpg"], + } + ] + elif "linux" in build_platform: + upstream_artifacts = [ + { + "taskId": {"task-reference": "<repack>"}, + "taskType": "repackage", + "paths": [ + get_artifact_path( + dep_job, "{}/target.tar.bz2".format(repack_id) + ), + ], + "formats": ["autograph_gpg"], + } + ] + + task = { + "label": label, + "description": description, + "worker-type": "linux-signing", + "worker": { + "implementation": "scriptworker-signing", + "upstream-artifacts": upstream_artifacts, + "max-run-time": 3600, + }, + "scopes": scopes, + "dependencies": dependencies, + "attributes": attributes, + "run-on-projects": dep_job.attributes.get("run_on_projects"), + "extra": { + "repack_id": repack_id, + }, + } + # we may have reduced the priority for partner jobs, otherwise task.py will set it + if job.get("priority"): + task["priority"] = job["priority"] + + yield task diff --git a/taskcluster/taskgraph/transforms/repo_update.py b/taskcluster/taskgraph/transforms/repo_update.py new file mode 100644 index 0000000000..a0000e0c78 --- /dev/null +++ b/taskcluster/taskgraph/transforms/repo_update.py @@ -0,0 +1,27 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the repo-update task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.schema import resolve_keyed_by + +transforms = TransformSequence() + + +@transforms.add +def resolve_keys(config, tasks): + for task in tasks: + env = task["worker"].setdefault("env", {}) + env["BRANCH"] = config.params["project"] + for envvar in env: + resolve_keyed_by(env, envvar, envvar, **config.params) + + for envvar in list(env.keys()): + if not env.get(envvar): + del env[envvar] + yield task diff --git a/taskcluster/taskgraph/transforms/reverse_chunk_deps.py b/taskcluster/taskgraph/transforms/reverse_chunk_deps.py new file mode 100644 index 0000000000..0fd8ba38c0 --- /dev/null +++ b/taskcluster/taskgraph/transforms/reverse_chunk_deps.py @@ -0,0 +1,46 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Adjust dependencies to not exceed MAX_DEPENDENCIES +""" + +from __future__ import absolute_import, print_function, unicode_literals +from copy import deepcopy + +from taskgraph.transforms.base import TransformSequence +import taskgraph.transforms.release_deps as release_deps +from taskgraph.util.treeherder import add_suffix +from taskgraph import MAX_DEPENDENCIES + +transforms = TransformSequence() + + +def yield_job(orig_job, deps, count): + job = deepcopy(orig_job) + job["dependencies"] = deps + job["name"] = "{}-{}".format(orig_job["name"], count) + if "treeherder" in job: + job["treeherder"]["symbol"] = add_suffix( + job["treeherder"]["symbol"], "-{}".format(count) + ) + + return job + + +@transforms.add +def add_dependencies(config, jobs): + for job in release_deps.add_dependencies(config, jobs): + count = 1 + deps = {} + + # sort for deterministic chunking + for dep_label in sorted(job["dependencies"].keys()): + deps[dep_label] = dep_label + if len(deps) == MAX_DEPENDENCIES: + yield yield_job(job, deps, count) + deps = {} + count += 1 + if deps: + yield yield_job(job, deps, count) + count += 1 diff --git a/taskcluster/taskgraph/transforms/run_pgo_profile.py b/taskcluster/taskgraph/transforms/run_pgo_profile.py new file mode 100644 index 0000000000..d21b4033ec --- /dev/null +++ b/taskcluster/taskgraph/transforms/run_pgo_profile.py @@ -0,0 +1,36 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Apply some defaults and minor modifications to the pgo jobs. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence + +import logging + +logger = logging.getLogger(__name__) + +transforms = TransformSequence() + + +@transforms.add +def run_profile_data(config, jobs): + for job in jobs: + build_platform = job["attributes"].get("build_platform") + instr = "instrumented-build-{}".format(job["name"]) + if "android" in build_platform: + artifact = "geckoview-androidTest.apk" + elif "macosx64" in build_platform: + artifact = "target.dmg" + elif "win" in build_platform: + artifact = "target.zip" + else: + artifact = "target.tar.bz2" + job.setdefault("fetches", {})[instr] = [ + artifact, + "target.crashreporter-symbols.zip", + ] + yield job diff --git a/taskcluster/taskgraph/transforms/scriptworker.py b/taskcluster/taskgraph/transforms/scriptworker.py new file mode 100644 index 0000000000..abc0b86244 --- /dev/null +++ b/taskcluster/taskgraph/transforms/scriptworker.py @@ -0,0 +1,25 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +""" +Transforms for adding appropriate scopes to scriptworker tasks. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.util.scriptworker import ( + get_balrog_action_scope, + get_balrog_server_scope, +) + + +def add_balrog_scopes(config, jobs): + for job in jobs: + worker = job["worker"] + + server_scope = get_balrog_server_scope(config) + action_scope = get_balrog_action_scope(config, action=worker["balrog-action"]) + job["scopes"] = [server_scope, action_scope] + + yield job diff --git a/taskcluster/taskgraph/transforms/scriptworker_canary.py b/taskcluster/taskgraph/transforms/scriptworker_canary.py new file mode 100644 index 0000000000..76734a3fa6 --- /dev/null +++ b/taskcluster/taskgraph/transforms/scriptworker_canary.py @@ -0,0 +1,48 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Build a command to run `mach release push-scriptworker-canaries`. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from pipes import quote as shell_quote + +from mozrelease.scriptworker_canary import TASK_TYPES + +from taskgraph.transforms.base import TransformSequence + +transforms = TransformSequence() + + +@transforms.add +def build_command(config, jobs): + scriptworkers = config.params["try_task_config"].get( + "scriptworker-canary-workers", [] + ) + # Filter the list of workers to those we have configured a set of canary + # tasks for. + scriptworkers = [ + scriptworker for scriptworker in scriptworkers if scriptworker in TASK_TYPES + ] + + if not scriptworkers: + return + + for job in jobs: + + command = ["release", "push-scriptworker-canary"] + for scriptworker in scriptworkers: + command.extend(["--scriptworker", scriptworker]) + for address in job.pop("addresses"): + command.extend(["--address", address]) + if "ssh-key-secret" in job: + ssh_key_secret = job.pop("ssh-key-secret") + command.extend(["--ssh-key-secret", ssh_key_secret]) + job.setdefault("scopes", []).append("secrets:get:{}".format(ssh_key_secret)) + + job.setdefault("run", {}).update( + {"using": "mach", "mach": " ".join(map(shell_quote, command))} + ) + yield job diff --git a/taskcluster/taskgraph/transforms/shippable_l10n_signing.py b/taskcluster/taskgraph/transforms/shippable_l10n_signing.py new file mode 100644 index 0000000000..6d986d355e --- /dev/null +++ b/taskcluster/taskgraph/transforms/shippable_l10n_signing.py @@ -0,0 +1,87 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the signing task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.attributes import copy_attributes_from_dependent_job +from taskgraph.util.signed_artifacts import generate_specifications_of_artifacts_to_sign +from taskgraph.util.treeherder import join_symbol + +transforms = TransformSequence() + + +@transforms.add +def make_signing_description(config, jobs): + for job in jobs: + + dep_job = job["primary-dependency"] + job["depname"] = dep_job.label + + # add the chunk number to the TH symbol + symbol = job.get("treeherder", {}).get("symbol", "Bs") + symbol = "{}{}".format(symbol, dep_job.attributes.get("l10n_chunk")) + group = "L10n" + + job["treeherder"] = { + "symbol": join_symbol(group, symbol), + } + + yield job + + +@transforms.add +def define_upstream_artifacts(config, jobs): + for job in jobs: + dep_job = job["primary-dependency"] + upstream_artifact_task = job.pop("upstream-artifact-task", dep_job) + + job["attributes"] = copy_attributes_from_dependent_job(dep_job) + if dep_job.attributes.get("chunk_locales"): + # Used for l10n attribute passthrough + job["attributes"]["chunk_locales"] = dep_job.attributes.get("chunk_locales") + + locale_specifications = generate_specifications_of_artifacts_to_sign( + config, + job, + keep_locale_template=True, + dep_kind=upstream_artifact_task.kind, + ) + + upstream_artifacts = [] + for spec in locale_specifications: + task_type = "l10n" + if "notarization" in upstream_artifact_task.kind: + task_type = "scriptworker" + upstream_artifacts.append( + { + "taskId": { + "task-reference": "<{}>".format(upstream_artifact_task.kind) + }, + "taskType": task_type, + # Set paths based on artifacts in the specs (above) one per + # locale present in the chunk this is signing stuff for. + # Pass paths through set and sorted() so we get a list back + # and we remove any duplicates (e.g. hardcoded ja-JP-mac langpack) + "paths": sorted( + set( + [ + path_template.format(locale=locale) + for locale in upstream_artifact_task.attributes.get( + "chunk_locales", [] + ) + for path_template in spec["artifacts"] + ] + ) + ), + "formats": spec["formats"], + } + ) + + job["upstream-artifacts"] = upstream_artifacts + + yield job diff --git a/taskcluster/taskgraph/transforms/signing.py b/taskcluster/taskgraph/transforms/signing.py new file mode 100644 index 0000000000..619a7a6cfd --- /dev/null +++ b/taskcluster/taskgraph/transforms/signing.py @@ -0,0 +1,259 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the signing task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from six import text_type +from taskgraph.loader.single_dep import schema +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.attributes import copy_attributes_from_dependent_job +from taskgraph.util.keyed_by import evaluate_keyed_by +from taskgraph.util.schema import taskref_or_string +from taskgraph.util.scriptworker import get_signing_cert_scope_per_platform +from taskgraph.transforms.task import task_description_schema +from voluptuous import Required, Optional + + +transforms = TransformSequence() + +signing_description_schema = schema.extend( + { + # Artifacts from dep task to sign - Sync with taskgraph/transforms/task.py + # because this is passed directly into the signingscript worker + Required("upstream-artifacts"): [ + { + # taskId of the task with the artifact + Required("taskId"): taskref_or_string, + # type of signing task (for CoT) + Required("taskType"): text_type, + # Paths to the artifacts to sign + Required("paths"): [text_type], + # Signing formats to use on each of the paths + Required("formats"): [text_type], + } + ], + # depname is used in taskref's to identify the taskID of the unsigned things + Required("depname"): text_type, + # attributes for this task + Optional("attributes"): {text_type: object}, + # unique label to describe this signing task, defaults to {dep.label}-signing + Optional("label"): text_type, + # treeherder is allowed here to override any defaults we use for signing. See + # taskcluster/taskgraph/transforms/task.py for the schema details, and the + # below transforms for defaults of various values. + Optional("treeherder"): task_description_schema["treeherder"], + # Routes specific to this task, if defined + Optional("routes"): [text_type], + Optional("shipping-phase"): task_description_schema["shipping-phase"], + Optional("shipping-product"): task_description_schema["shipping-product"], + Optional("dependent-tasks"): {text_type: object}, + # Optional control for how long a task may run (aka maxRunTime) + Optional("max-run-time"): int, + Optional("extra"): {text_type: object}, + # Max number of partner repacks per chunk + Optional("repacks-per-chunk"): int, + # Override the default priority for the project + Optional("priority"): task_description_schema["priority"], + } +) + + +@transforms.add +def set_defaults(config, jobs): + for job in jobs: + if not job.get("depname"): + dep_job = job["primary-dependency"] + job["depname"] = dep_job.kind + yield job + + +transforms.add_validate(signing_description_schema) + + +@transforms.add +def add_entitlements_link(config, jobs): + for job in jobs: + entitlements_path = evaluate_keyed_by( + config.graph_config["mac-notarization"]["mac-entitlements"], + "mac entitlements", + { + "platform": job["primary-dependency"].attributes.get("build_platform"), + "release-level": config.params.release_level(), + }, + ) + if entitlements_path: + job["entitlements-url"] = config.params.file_url( + entitlements_path, + ) + yield job + + +@transforms.add +def make_task_description(config, jobs): + for job in jobs: + dep_job = job["primary-dependency"] + attributes = dep_job.attributes + + signing_format_scopes = [] + formats = set([]) + for artifacts in job["upstream-artifacts"]: + for f in artifacts["formats"]: + formats.add(f) # Add each format only once + + is_shippable = dep_job.attributes.get("shippable", False) + build_platform = dep_job.attributes.get("build_platform") + treeherder = None + if "partner" not in config.kind and "eme-free" not in config.kind: + treeherder = job.get("treeherder", {}) + + dep_th_platform = ( + dep_job.task.get("extra", {}) + .get("treeherder", {}) + .get("machine", {}) + .get("platform", "") + ) + build_type = dep_job.attributes.get("build_type") + treeherder.setdefault( + "platform", + _generate_treeherder_platform( + dep_th_platform, build_platform, build_type + ), + ) + + # ccov builds are tier 2, so they cannot have tier 1 tasks + # depending on them. + treeherder.setdefault( + "tier", + dep_job.task.get("extra", {}).get("treeherder", {}).get("tier", 1), + ) + treeherder.setdefault( + "symbol", + _generate_treeherder_symbol( + dep_job.task.get("extra", {}).get("treeherder", {}).get("symbol") + ), + ) + treeherder.setdefault("kind", "build") + + label = job["label"] + description = ( + "Initial Signing for locale '{locale}' for build '" + "{build_platform}/{build_type}'".format( + locale=attributes.get("locale", "en-US"), + build_platform=build_platform, + build_type=attributes.get("build_type"), + ) + ) + + attributes = ( + job["attributes"] + if job.get("attributes") + else copy_attributes_from_dependent_job(dep_job) + ) + attributes["signed"] = True + + if dep_job.attributes.get("chunk_locales"): + # Used for l10n attribute passthrough + attributes["chunk_locales"] = dep_job.attributes.get("chunk_locales") + + signing_cert_scope = get_signing_cert_scope_per_platform( + build_platform, is_shippable, config + ) + worker_type_alias = "linux-signing" if is_shippable else "linux-depsigning" + mac_behavior = None + task = { + "label": label, + "description": description, + "worker": { + "implementation": "scriptworker-signing", + "upstream-artifacts": job["upstream-artifacts"], + "max-run-time": job.get("max-run-time", 3600), + }, + "scopes": [signing_cert_scope] + signing_format_scopes, + "dependencies": _generate_dependencies(job), + "attributes": attributes, + "run-on-projects": dep_job.attributes.get("run_on_projects"), + "optimization": dep_job.optimization, + "routes": job.get("routes", []), + "shipping-product": job.get("shipping-product"), + "shipping-phase": job.get("shipping-phase"), + } + if dep_job.kind in task["dependencies"]: + task["if-dependencies"] = [dep_job.kind] + + if "macosx" in build_platform: + shippable = "false" + if "shippable" in attributes and attributes["shippable"]: + shippable = "true" + mac_behavior = evaluate_keyed_by( + config.graph_config["mac-notarization"]["mac-behavior"], + "mac behavior", + { + "project": config.params["project"], + "shippable": shippable, + }, + ) + if mac_behavior == "mac_notarize": + if "part-1" in config.kind: + mac_behavior = "mac_notarize_part_1" + elif config.kind.endswith("signing"): + mac_behavior = "mac_notarize_part_3" + else: + raise Exception( + "Unknown kind {} for mac_behavior!".format(config.kind) + ) + else: + if "part-1" in config.kind: + continue + task["worker"]["mac-behavior"] = mac_behavior + worker_type_alias_map = { + "linux-depsigning": "mac-depsigning", + "linux-signing": "mac-signing", + } + + assert worker_type_alias in worker_type_alias_map, ( + "Make sure to adjust the below worker_type_alias logic for " + "mac if you change the signing workerType aliases!" + " ({} not found in mapping)".format(worker_type_alias) + ) + worker_type_alias = worker_type_alias_map[worker_type_alias] + if job.get("entitlements-url"): + task["worker"]["entitlements-url"] = job["entitlements-url"] + + task["worker-type"] = worker_type_alias + if treeherder: + task["treeherder"] = treeherder + if job.get("extra"): + task["extra"] = job["extra"] + # we may have reduced the priority for partner jobs, otherwise task.py will set it + if job.get("priority"): + task["priority"] = job["priority"] + + yield task + + +def _generate_dependencies(job): + if isinstance(job.get("dependent-tasks"), dict): + deps = {} + for k, v in job["dependent-tasks"].items(): + deps[k] = v.label + return deps + return {job["depname"]: job["primary-dependency"].label} + + +def _generate_treeherder_platform(dep_th_platform, build_platform, build_type): + if "-pgo" in build_platform: + actual_build_type = "pgo" + elif "-ccov" in build_platform: + actual_build_type = "ccov" + else: + actual_build_type = build_type + return "{}/{}".format(dep_th_platform, actual_build_type) + + +def _generate_treeherder_symbol(build_symbol): + symbol = build_symbol + "s" + return symbol diff --git a/taskcluster/taskgraph/transforms/source_checksums_signing.py b/taskcluster/taskgraph/transforms/source_checksums_signing.py new file mode 100644 index 0000000000..6bdf8beba2 --- /dev/null +++ b/taskcluster/taskgraph/transforms/source_checksums_signing.py @@ -0,0 +1,85 @@ +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the checksums signing task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from six import text_type +from taskgraph.loader.single_dep import schema +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.attributes import copy_attributes_from_dependent_job +from taskgraph.util.scriptworker import get_signing_cert_scope +from taskgraph.transforms.task import task_description_schema +from voluptuous import Optional + +checksums_signing_description_schema = schema.extend( + { + Optional("label"): text_type, + Optional("treeherder"): task_description_schema["treeherder"], + Optional("shipping-product"): task_description_schema["shipping-product"], + Optional("shipping-phase"): task_description_schema["shipping-phase"], + } +) + +transforms = TransformSequence() +transforms.add_validate(checksums_signing_description_schema) + + +@transforms.add +def make_checksums_signing_description(config, jobs): + for job in jobs: + dep_job = job["primary-dependency"] + attributes = dep_job.attributes + + treeherder = job.get("treeherder", {}) + treeherder.setdefault("symbol", "css(N)") + dep_th_platform = ( + dep_job.task.get("extra", {}) + .get("treeherder", {}) + .get("machine", {}) + .get("platform", "") + ) + treeherder.setdefault("platform", "{}/opt".format(dep_th_platform)) + treeherder.setdefault("tier", 1) + treeherder.setdefault("kind", "build") + + label = job["label"] + description = "Signing of release-source checksums file" + dependencies = {"beetmover": dep_job.label} + + attributes = copy_attributes_from_dependent_job(dep_job) + + upstream_artifacts = [ + { + "taskId": {"task-reference": "<beetmover>"}, + "taskType": "beetmover", + "paths": [ + "public/target-source.checksums", + ], + "formats": ["autograph_gpg"], + } + ] + + signing_cert_scope = get_signing_cert_scope(config) + + task = { + "label": label, + "description": description, + "worker-type": "linux-signing", + "worker": { + "implementation": "scriptworker-signing", + "upstream-artifacts": upstream_artifacts, + "max-run-time": 3600, + }, + "scopes": [ + signing_cert_scope, + ], + "dependencies": dependencies, + "attributes": attributes, + "run-on-projects": dep_job.attributes.get("run_on_projects"), + "treeherder": treeherder, + } + + yield task diff --git a/taskcluster/taskgraph/transforms/source_test.py b/taskcluster/taskgraph/transforms/source_test.py new file mode 100644 index 0000000000..7060b4f025 --- /dev/null +++ b/taskcluster/taskgraph/transforms/source_test.py @@ -0,0 +1,237 @@ +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Source-test jobs can run on multiple platforms. These transforms allow jobs +with either `platform` or a list of `platforms`, and set the appropriate +treeherder configuration and attributes for that platform. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +import copy +import os +from six import text_type + +from taskgraph.transforms.base import TransformSequence +from taskgraph.transforms.job import job_description_schema +from taskgraph.util.attributes import keymatch +from taskgraph.util.schema import ( + resolve_keyed_by, + optionally_keyed_by, + Schema, +) +from taskgraph.util.treeherder import join_symbol, split_symbol + +from voluptuous import ( + Any, + Extra, + Optional, + Required, +) + +source_test_description_schema = Schema( + { + # most fields are passed directly through as job fields, and are not + # repeated here + Extra: object, + # The platform on which this task runs. This will be used to set up attributes + # (for try selection) and treeherder metadata (for display). If given as a list, + # the job will be "split" into multiple tasks, one with each platform. + Required("platform"): Any(text_type, [text_type]), + # Build labels required for the task. If this key is provided it must + # contain a build label for the task platform. + # The task will then depend on a build task, and the installer url will be + # saved to the GECKO_INSTALLER_URL environment variable. + Optional("require-build"): optionally_keyed_by( + "project", {text_type: text_type} + ), + # These fields can be keyed by "platform", and are otherwise identical to + # job descriptions. + Required("worker-type"): optionally_keyed_by( + "platform", job_description_schema["worker-type"] + ), + Required("worker"): optionally_keyed_by( + "platform", job_description_schema["worker"] + ), + Optional("python-version"): [int], + # A list of artifacts to install from 'fetch' tasks. + Optional("fetches"): { + text_type: optionally_keyed_by( + "platform", job_description_schema["fetches"][text_type] + ), + }, + } +) + +transforms = TransformSequence() + +transforms.add_validate(source_test_description_schema) + + +@transforms.add +def set_job_name(config, jobs): + for job in jobs: + if "job-from" in job and job["job-from"] != "kind.yml": + from_name = os.path.splitext(job["job-from"])[0] + job["name"] = "{}-{}".format(from_name, job["name"]) + yield job + + +@transforms.add +def expand_platforms(config, jobs): + for job in jobs: + if isinstance(job["platform"], text_type): + yield job + continue + + for platform in job["platform"]: + pjob = copy.deepcopy(job) + pjob["platform"] = platform + + if "name" in pjob: + pjob["name"] = "{}-{}".format(pjob["name"], platform) + else: + pjob["label"] = "{}-{}".format(pjob["label"], platform) + yield pjob + + +@transforms.add +def split_python(config, jobs): + for job in jobs: + key = "python-version" + versions = job.pop(key, []) + if not versions: + yield job + continue + for version in versions: + group = "py{0}".format(version) + pyjob = copy.deepcopy(job) + if "name" in pyjob: + pyjob["name"] += "-{0}".format(group) + else: + pyjob["label"] += "-{0}".format(group) + symbol = split_symbol(pyjob["treeherder"]["symbol"])[1] + pyjob["treeherder"]["symbol"] = join_symbol(group, symbol) + pyjob["run"][key] = version + yield pyjob + + +@transforms.add +def split_jsshell(config, jobs): + all_shells = {"sm": "Spidermonkey", "v8": "Google V8"} + + for job in jobs: + if not job["name"].startswith("jsshell"): + yield job + continue + + test = job.pop("test") + for shell in job.get("shell", all_shells.keys()): + assert shell in all_shells + + new_job = copy.deepcopy(job) + new_job["name"] = "{}-{}".format(new_job["name"], shell) + new_job["description"] = "{} on {}".format( + new_job["description"], all_shells[shell] + ) + new_job["shell"] = shell + + group = "js-bench-{}".format(shell) + symbol = split_symbol(new_job["treeherder"]["symbol"])[1] + new_job["treeherder"]["symbol"] = join_symbol(group, symbol) + + run = new_job["run"] + run["mach"] = run["mach"].format( + shell=shell, SHELL=shell.upper(), test=test + ) + yield new_job + + +def add_build_dependency(config, job): + """ + Add build dependency to the job and installer_url to env. + """ + key = job["platform"] + build_labels = job.pop("require-build", {}) + matches = keymatch(build_labels, key) + if not matches: + raise Exception( + "No build platform found. " + "Define 'require-build' for {} in the task config.".format(key) + ) + + if len(matches) > 1: + raise Exception("More than one build platform found for '{}'.".format(key)) + + label = matches[0] + deps = job.setdefault("dependencies", {}) + deps.update({"build": label}) + + +@transforms.add +def handle_platform(config, jobs): + """ + Handle the 'platform' property, setting up treeherder context as well as + try-related attributes. + """ + fields = [ + "fetches.toolchain", + "require-build", + "worker-type", + "worker", + ] + + for job in jobs: + platform = job["platform"] + + for field in fields: + resolve_keyed_by( + job, field, item_name=job["name"], project=config.params["project"] + ) + + if "treeherder" in job: + job["treeherder"]["platform"] = platform + + if "require-build" in job: + add_build_dependency(config, job) + + del job["platform"] + yield job + + +@transforms.add +def handle_shell(config, jobs): + """ + Handle the 'shell' property. + """ + fields = [ + "run-on-projects", + "worker.env", + ] + + for job in jobs: + if not job.get("shell"): + yield job + continue + + for field in fields: + resolve_keyed_by(job, field, item_name=job["name"]) + + del job["shell"] + yield job + + +@transforms.add +def set_code_review_env(config, jobs): + """ + Add a CODE_REVIEW environment variable when running in code-review bot mode + """ + is_code_review = config.params["target_tasks_method"] == "codereview" + + for job in jobs: + attrs = job.get("attributes", {}) + if is_code_review and attrs.get("code-review") is True: + env = job["worker"].setdefault("env", {}) + env["CODE_REVIEW"] = "1" + + yield job diff --git a/taskcluster/taskgraph/transforms/spidermonkey.py b/taskcluster/taskgraph/transforms/spidermonkey.py new file mode 100644 index 0000000000..dff29a6526 --- /dev/null +++ b/taskcluster/taskgraph/transforms/spidermonkey.py @@ -0,0 +1,22 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.schema import resolve_keyed_by +import copy + +transforms = TransformSequence() + + +@transforms.add +def handle_keyed_by(config, jobs): + """Resolve fields that can be keyed by platform, etc.""" + fields = ["fetches.toolchain"] + for job in jobs: + job = copy.deepcopy(job) # don't overwrite dict values here + for field in fields: + resolve_keyed_by(item=job, field=field, item_name=job["name"]) + + yield job diff --git a/taskcluster/taskgraph/transforms/strip_dependent_task.py b/taskcluster/taskgraph/transforms/strip_dependent_task.py new file mode 100644 index 0000000000..f153a750cc --- /dev/null +++ b/taskcluster/taskgraph/transforms/strip_dependent_task.py @@ -0,0 +1,19 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +FIXME +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence + +transforms = TransformSequence() + + +@transforms.add +def strip_dependent_task(config, jobs): + for job in jobs: + del job["primary-dependency"] + yield job diff --git a/taskcluster/taskgraph/transforms/task.py b/taskcluster/taskgraph/transforms/task.py new file mode 100644 index 0000000000..72e65a9981 --- /dev/null +++ b/taskcluster/taskgraph/transforms/task.py @@ -0,0 +1,2184 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +These transformations take a task description and turn it into a TaskCluster +task definition (along with attributes, label, etc.). The input to these +transformations is generic to any kind of task, but abstracts away some of the +complexities of worker implementations, scopes, and treeherder annotations. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +import hashlib +import os +import re +import time +from copy import deepcopy +import six +from six import text_type + +import attr + +from mozbuild.util import memoize +from taskgraph.util.attributes import TRUNK_PROJECTS +from taskgraph.util.hash import hash_path +from taskgraph.util.treeherder import split_symbol +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.keyed_by import evaluate_keyed_by +from taskgraph.util.schema import ( + validate_schema, + Schema, + optionally_keyed_by, + resolve_keyed_by, + taskref_or_string, +) +from taskgraph.optimize.schema import OptimizationSchema +from taskgraph.util.partners import get_partners_to_be_published +from taskgraph.util.scriptworker import ( + BALROG_ACTIONS, + get_release_config, +) +from taskgraph.util.signed_artifacts import get_signed_artifacts +from taskgraph.util.time import value_of +from taskgraph.util.workertypes import worker_type_implementation +from voluptuous import Any, Required, Optional, Extra, Match, All, NotIn +from taskgraph import GECKO, MAX_DEPENDENCIES +from ..util import docker as dockerutil +from ..util.workertypes import get_worker_type + +RUN_TASK = os.path.join(GECKO, "taskcluster", "scripts", "run-task") + +SCCACHE_GCS_PROJECT = "sccache-3" + + +@memoize +def _run_task_suffix(): + """String to append to cache names under control of run-task.""" + return hash_path(RUN_TASK)[0:20] + + +def _compute_geckoview_version(app_version, moz_build_date): + """Geckoview version string that matches geckoview gradle configuration""" + # Must be synchronized with /mobile/android/geckoview/build.gradle computeVersionCode(...) + version_without_milestone = re.sub(r"a[0-9]", "", app_version, 1) + parts = version_without_milestone.split(".") + return "%s.%s.%s" % (parts[0], parts[1], moz_build_date) + + +# A task description is a general description of a TaskCluster task +task_description_schema = Schema( + { + # the label for this task + Required("label"): text_type, + # description of the task (for metadata) + Required("description"): text_type, + # attributes for this task + Optional("attributes"): {text_type: object}, + # relative path (from config.path) to the file task was defined in + Optional("job-from"): text_type, + # dependencies of this task, keyed by name; these are passed through + # verbatim and subject to the interpretation of the Task's get_dependencies + # method. + Optional("dependencies"): { + All( + text_type, + NotIn( + ["self", "decision"], + "Can't use 'self` or 'decision' as depdency names.", + ), + ): object, + }, + # Soft dependencies of this task, as a list of tasks labels + Optional("soft-dependencies"): [text_type], + # Dependencies that must be scheduled in order for this task to run. + Optional("if-dependencies"): [text_type], + Optional("requires"): Any("all-completed", "all-resolved"), + # expiration and deadline times, relative to task creation, with units + # (e.g., "14 days"). Defaults are set based on the project. + Optional("expires-after"): text_type, + Optional("deadline-after"): text_type, + # custom routes for this task; the default treeherder routes will be added + # automatically + Optional("routes"): [text_type], + # custom scopes for this task; any scopes required for the worker will be + # added automatically. The following parameters will be substituted in each + # scope: + # {level} -- the scm level of this push + # {project} -- the project of this push + Optional("scopes"): [text_type], + # Tags + Optional("tags"): {text_type: text_type}, + # custom "task.extra" content + Optional("extra"): {text_type: object}, + # treeherder-related information; see + # https://firefox-ci-tc.services.mozilla.com/schemas/taskcluster-treeherder/v1/task-treeherder-config.json + # If not specified, no treeherder extra information or routes will be + # added to the task + Optional("treeherder"): { + # either a bare symbol, or "grp(sym)". + "symbol": text_type, + # the job kind + "kind": Any("build", "test", "other"), + # tier for this task + "tier": int, + # task platform, in the form platform/collection, used to set + # treeherder.machine.platform and treeherder.collection or + # treeherder.labels + "platform": Match("^[A-Za-z0-9_-]{1,50}/[A-Za-z0-9_-]{1,50}$"), + }, + # information for indexing this build so its artifacts can be discovered; + # if omitted, the build will not be indexed. + Optional("index"): { + # the name of the product this build produces + "product": text_type, + # the names to use for this job in the TaskCluster index + "job-name": text_type, + # Type of gecko v2 index to use + "type": Any( + "generic", + "l10n", + "shippable", + "shippable-l10n", + "android-shippable", + "android-shippable-with-multi-l10n", + "shippable-with-multi-l10n", + ), + # The rank that the task will receive in the TaskCluster + # index. A newly completed task supercedes the currently + # indexed task iff it has a higher rank. If unspecified, + # 'by-tier' behavior will be used. + "rank": Any( + # Rank is equal the timestamp of the build_date for tier-1 + # tasks, and zero for non-tier-1. This sorts tier-{2,3} + # builds below tier-1 in the index. + "by-tier", + # Rank is given as an integer constant (e.g. zero to make + # sure a task is last in the index). + int, + # Rank is equal to the timestamp of the build_date. This + # option can be used to override the 'by-tier' behavior + # for non-tier-1 tasks. + "build_date", + ), + }, + # The `run_on_projects` attribute, defaulting to "all". This dictates the + # projects on which this task should be included in the target task set. + # See the attributes documentation for details. + Optional("run-on-projects"): optionally_keyed_by("build-platform", [text_type]), + # Like `run_on_projects`, `run-on-hg-branches` defaults to "all". + Optional("run-on-hg-branches"): optionally_keyed_by("project", [text_type]), + # The `shipping_phase` attribute, defaulting to None. This specifies the + # release promotion phase that this task belongs to. + Required("shipping-phase"): Any( + None, + "build", + "promote", + "push", + "ship", + ), + # The `shipping_product` attribute, defaulting to None. This specifies the + # release promotion product that this task belongs to. + Required("shipping-product"): Any(None, text_type), + # The `always-target` attribute will cause the task to be included in the + # target_task_graph regardless of filtering. Tasks included in this manner + # will be candidates for optimization even when `optimize_target_tasks` is + # False, unless the task was also explicitly chosen by the target_tasks + # method. + Required("always-target"): bool, + # Optimization to perform on this task during the optimization phase. + # Optimizations are defined in taskcluster/taskgraph/optimize.py. + Required("optimization"): OptimizationSchema, + # the provisioner-id/worker-type for the task. The following parameters will + # be substituted in this string: + # {level} -- the scm level of this push + "worker-type": text_type, + # Whether the job should use sccache compiler caching. + Required("use-sccache"): bool, + # Set of artifacts relevant to release tasks + Optional("release-artifacts"): [text_type], + # information specific to the worker implementation that will run this task + Optional("worker"): { + Required("implementation"): text_type, + Extra: object, + }, + # Override the default priority for the project + Optional("priority"): text_type, + } +) + +TC_TREEHERDER_SCHEMA_URL = ( + "https://github.com/taskcluster/taskcluster-treeherder/" + "blob/master/schemas/task-treeherder-config.yml" +) + + +UNKNOWN_GROUP_NAME = ( + "Treeherder group {} (from {}) has no name; " "add it to taskcluster/ci/config.yml" +) + +V2_ROUTE_TEMPLATES = [ + "index.{trust-domain}.v2.{project}.latest.{product}.{job-name}", + "index.{trust-domain}.v2.{project}.pushdate.{build_date_long}.{product}.{job-name}", + "index.{trust-domain}.v2.{project}.pushdate.{build_date}.latest.{product}.{job-name}", + "index.{trust-domain}.v2.{project}.pushlog-id.{pushlog_id}.{product}.{job-name}", + "index.{trust-domain}.v2.{project}.revision.{branch_rev}.{product}.{job-name}", +] + +# {central, inbound, autoland} write to a "trunk" index prefix. This facilitates +# walking of tasks with similar configurations. +V2_TRUNK_ROUTE_TEMPLATES = [ + "index.{trust-domain}.v2.trunk.revision.{branch_rev}.{product}.{job-name}", +] + +V2_SHIPPABLE_TEMPLATES = [ + "index.{trust-domain}.v2.{project}.shippable.latest.{product}.{job-name}", + "index.{trust-domain}.v2.{project}.shippable.{build_date}.revision.{branch_rev}.{product}.{job-name}", # noqa - too long + "index.{trust-domain}.v2.{project}.shippable.{build_date}.latest.{product}.{job-name}", + "index.{trust-domain}.v2.{project}.shippable.revision.{branch_rev}.{product}.{job-name}", +] + +V2_SHIPPABLE_L10N_TEMPLATES = [ + "index.{trust-domain}.v2.{project}.shippable.latest.{product}-l10n.{job-name}.{locale}", + "index.{trust-domain}.v2.{project}.shippable.{build_date}.revision.{branch_rev}.{product}-l10n.{job-name}.{locale}", # noqa - too long + "index.{trust-domain}.v2.{project}.shippable.{build_date}.latest.{product}-l10n.{job-name}.{locale}", # noqa - too long + "index.{trust-domain}.v2.{project}.shippable.revision.{branch_rev}.{product}-l10n.{job-name}.{locale}", # noqa - too long +] + +V2_L10N_TEMPLATES = [ + "index.{trust-domain}.v2.{project}.revision.{branch_rev}.{product}-l10n.{job-name}.{locale}", + "index.{trust-domain}.v2.{project}.pushdate.{build_date_long}.{product}-l10n.{job-name}.{locale}", # noqa - too long + "index.{trust-domain}.v2.{project}.pushlog-id.{pushlog_id}.{product}-l10n.{job-name}.{locale}", + "index.{trust-domain}.v2.{project}.latest.{product}-l10n.{job-name}.{locale}", +] + +# This index is specifically for builds that include geckoview releases, +# so we can hard-code the project to "geckoview" +V2_GECKOVIEW_RELEASE = "index.{trust-domain}.v2.{project}.geckoview-version.{geckoview-version}.{product}.{job-name}" # noqa - too long + +# the roots of the treeherder routes +TREEHERDER_ROUTE_ROOT = "tc-treeherder" + + +def get_branch_rev(config): + return config.params[ + "{}head_rev".format(config.graph_config["project-repo-param-prefix"]) + ] + + +def get_branch_repo(config): + return config.params[ + "{}head_repository".format( + config.graph_config["project-repo-param-prefix"], + ) + ] + + +@memoize +def get_default_priority(graph_config, project): + return evaluate_keyed_by( + graph_config["task-priority"], "Graph Config", {"project": project} + ) + + +# define a collection of payload builders, depending on the worker implementation +payload_builders = {} + + +@attr.s(frozen=True) +class PayloadBuilder(object): + schema = attr.ib(type=Schema) + builder = attr.ib() + + +def payload_builder(name, schema): + schema = Schema( + {Required("implementation"): name, Optional("os"): text_type} + ).extend(schema) + + def wrap(func): + payload_builders[name] = PayloadBuilder(schema, func) + return func + + return wrap + + +# define a collection of index builders, depending on the type implementation +index_builders = {} + + +def index_builder(name): + def wrap(func): + index_builders[name] = func + return func + + return wrap + + +UNSUPPORTED_INDEX_PRODUCT_ERROR = """\ +The gecko-v2 product {product} is not in the list of configured products in +`taskcluster/ci/config.yml'. +""" + + +def verify_index(config, index): + product = index["product"] + if product not in config.graph_config["index"]["products"]: + raise Exception(UNSUPPORTED_INDEX_PRODUCT_ERROR.format(product=product)) + + +@payload_builder( + "docker-worker", + schema={ + Required("os"): "linux", + # For tasks that will run in docker-worker, this is the + # name of the docker image or in-tree docker image to run the task in. If + # in-tree, then a dependency will be created automatically. This is + # generally `desktop-test`, or an image that acts an awful lot like it. + Required("docker-image"): Any( + # a raw Docker image path (repo/image:tag) + text_type, + # an in-tree generated docker image (from `taskcluster/docker/<name>`) + {"in-tree": text_type}, + # an indexed docker image + {"indexed": text_type}, + ), + # worker features that should be enabled + Required("chain-of-trust"): bool, + Required("taskcluster-proxy"): bool, + Required("allow-ptrace"): bool, + Required("loopback-video"): bool, + Required("loopback-audio"): bool, + Required("docker-in-docker"): bool, # (aka 'dind') + Required("privileged"): bool, + # Paths to Docker volumes. + # + # For in-tree Docker images, volumes can be parsed from Dockerfile. + # This only works for the Dockerfile itself: if a volume is defined in + # a base image, it will need to be declared here. Out-of-tree Docker + # images will also require explicit volume annotation. + # + # Caches are often mounted to the same path as Docker volumes. In this + # case, they take precedence over a Docker volume. But a volume still + # needs to be declared for the path. + Optional("volumes"): [text_type], + Optional( + "required-volumes", + description=( + "Paths that are required to be volumes for performance reasons. " + "For in-tree images, these paths will be checked to verify that they " + "are defined as volumes." + ), + ): [text_type], + # caches to set up for the task + Optional("caches"): [ + { + # only one type is supported by any of the workers right now + "type": "persistent", + # name of the cache, allowing re-use by subsequent tasks naming the + # same cache + "name": text_type, + # location in the task image where the cache will be mounted + "mount-point": text_type, + # Whether the cache is not used in untrusted environments + # (like the Try repo). + Optional("skip-untrusted"): bool, + } + ], + # artifacts to extract from the task image after completion + Optional("artifacts"): [ + { + # type of artifact -- simple file, or recursive directory + "type": Any("file", "directory"), + # task image path from which to read artifact + "path": text_type, + # name of the produced artifact (root of the names for + # type=directory) + "name": text_type, + } + ], + # environment variables + Required("env"): {text_type: taskref_or_string}, + # the command to run; if not given, docker-worker will default to the + # command in the docker image + Optional("command"): [taskref_or_string], + # the maximum time to run, in seconds + Required("max-run-time"): int, + # the exit status code(s) that indicates the task should be retried + Optional("retry-exit-status"): [int], + # the exit status code(s) that indicates the caches used by the task + # should be purged + Optional("purge-caches-exit-status"): [int], + # Wether any artifacts are assigned to this worker + Optional("skip-artifacts"): bool, + }, +) +def build_docker_worker_payload(config, task, task_def): + worker = task["worker"] + level = int(config.params["level"]) + + image = worker["docker-image"] + if isinstance(image, dict): + if "in-tree" in image: + name = image["in-tree"] + docker_image_task = "docker-image-" + image["in-tree"] + task.setdefault("dependencies", {})["docker-image"] = docker_image_task + + image = { + "path": "public/image.tar.zst", + "taskId": {"task-reference": "<docker-image>"}, + "type": "task-image", + } + + # Find VOLUME in Dockerfile. + volumes = dockerutil.parse_volumes(name) + for v in sorted(volumes): + if v in worker["volumes"]: + raise Exception( + "volume %s already defined; " + "if it is defined in a Dockerfile, " + "it does not need to be specified in the " + "worker definition" % v + ) + + worker["volumes"].append(v) + + elif "indexed" in image: + image = { + "path": "public/image.tar.zst", + "namespace": image["indexed"], + "type": "indexed-image", + } + else: + raise Exception("unknown docker image type") + + features = {} + + if worker.get("taskcluster-proxy"): + features["taskclusterProxy"] = True + + if worker.get("allow-ptrace"): + features["allowPtrace"] = True + task_def["scopes"].append("docker-worker:feature:allowPtrace") + + if worker.get("chain-of-trust"): + features["chainOfTrust"] = True + + if worker.get("docker-in-docker"): + features["dind"] = True + + if task.get("use-sccache"): + features["taskclusterProxy"] = True + task_def["scopes"].append( + "assume:project:taskcluster:{trust_domain}:level-{level}-sccache-buckets".format( + trust_domain=config.graph_config["trust-domain"], + level=config.params["level"], + ) + ) + worker["env"]["USE_SCCACHE"] = "1" + worker["env"]["SCCACHE_GCS_PROJECT"] = SCCACHE_GCS_PROJECT + # Disable sccache idle shutdown. + worker["env"]["SCCACHE_IDLE_TIMEOUT"] = "0" + else: + worker["env"]["SCCACHE_DISABLE"] = "1" + + capabilities = {} + + for lo in "audio", "video": + if worker.get("loopback-" + lo): + capitalized = "loopback" + lo.capitalize() + devices = capabilities.setdefault("devices", {}) + devices[capitalized] = True + task_def["scopes"].append("docker-worker:capability:device:" + capitalized) + + if worker.get("privileged"): + capabilities["privileged"] = True + task_def["scopes"].append("docker-worker:capability:privileged") + + task_def["payload"] = payload = { + "image": image, + "env": worker["env"], + } + if "command" in worker: + payload["command"] = worker["command"] + + if "max-run-time" in worker: + payload["maxRunTime"] = worker["max-run-time"] + + run_task = payload.get("command", [""])[0].endswith("run-task") + + # run-task exits EXIT_PURGE_CACHES if there is a problem with caches. + # Automatically retry the tasks and purge caches if we see this exit + # code. + # TODO move this closer to code adding run-task once bug 1469697 is + # addressed. + if run_task: + worker.setdefault("retry-exit-status", []).append(72) + worker.setdefault("purge-caches-exit-status", []).append(72) + + payload["onExitStatus"] = {} + if "retry-exit-status" in worker: + payload["onExitStatus"]["retry"] = worker["retry-exit-status"] + if "purge-caches-exit-status" in worker: + payload["onExitStatus"]["purgeCaches"] = worker["purge-caches-exit-status"] + + if "artifacts" in worker: + artifacts = {} + for artifact in worker["artifacts"]: + artifacts[artifact["name"]] = { + "path": artifact["path"], + "type": artifact["type"], + "expires": task_def["expires"], # always expire with the task + } + payload["artifacts"] = artifacts + + if isinstance(worker.get("docker-image"), text_type): + out_of_tree_image = worker["docker-image"] + else: + out_of_tree_image = None + image = worker.get("docker-image", {}).get("in-tree") + + if "caches" in worker: + caches = {} + + # run-task knows how to validate caches. + # + # To help ensure new run-task features and bug fixes don't interfere + # with existing caches, we seed the hash of run-task into cache names. + # So, any time run-task changes, we should get a fresh set of caches. + # This means run-task can make changes to cache interaction at any time + # without regards for backwards or future compatibility. + # + # But this mechanism only works for in-tree Docker images that are built + # with the current run-task! For out-of-tree Docker images, we have no + # way of knowing their content of run-task. So, in addition to varying + # cache names by the contents of run-task, we also take the Docker image + # name into consideration. This means that different Docker images will + # never share the same cache. This is a bit unfortunate. But it is the + # safest thing to do. Fortunately, most images are defined in-tree. + # + # For out-of-tree Docker images, we don't strictly need to incorporate + # the run-task content into the cache name. However, doing so preserves + # the mechanism whereby changing run-task results in new caches + # everywhere. + + # As an additional mechanism to force the use of different caches, the + # string literal in the variable below can be changed. This is + # preferred to changing run-task because it doesn't require images + # to be rebuilt. + cache_version = "v3" + + if run_task: + suffix = "{}-{}".format(cache_version, _run_task_suffix()) + + if out_of_tree_image: + name_hash = hashlib.sha256( + six.ensure_binary(out_of_tree_image) + ).hexdigest() + suffix += name_hash[0:12] + + else: + suffix = cache_version + + skip_untrusted = config.params.is_try() or level == 1 + + for cache in worker["caches"]: + # Some caches aren't enabled in environments where we can't + # guarantee certain behavior. Filter those out. + if cache.get("skip-untrusted") and skip_untrusted: + continue + + name = "{trust_domain}-level-{level}-{name}-{suffix}".format( + trust_domain=config.graph_config["trust-domain"], + level=config.params["level"], + name=cache["name"], + suffix=suffix, + ) + + caches[name] = cache["mount-point"] + task_def["scopes"].append("docker-worker:cache:%s" % name) + + # Assertion: only run-task is interested in this. + if run_task: + payload["env"]["TASKCLUSTER_CACHES"] = ";".join(sorted(caches.values())) + + payload["cache"] = caches + + # And send down volumes information to run-task as well. + if run_task and worker.get("volumes"): + payload["env"]["TASKCLUSTER_VOLUMES"] = ";".join( + [six.ensure_text(s) for s in sorted(worker["volumes"])] + ) + + if payload.get("cache") and skip_untrusted: + payload["env"]["TASKCLUSTER_UNTRUSTED_CACHES"] = "1" + + if features: + payload["features"] = features + if capabilities: + payload["capabilities"] = capabilities + + check_caches_are_volumes(task) + check_required_volumes(task) + + +@payload_builder( + "generic-worker", + schema={ + Required("os"): Any("windows", "macosx", "linux", "linux-bitbar"), + # see http://schemas.taskcluster.net/generic-worker/v1/payload.json + # and https://docs.taskcluster.net/reference/workers/generic-worker/payload + # command is a list of commands to run, sequentially + # on Windows, each command is a string, on OS X and Linux, each command is + # a string array + Required("command"): Any( + [taskref_or_string], [[taskref_or_string]] # Windows # Linux / OS X + ), + # artifacts to extract from the task image after completion; note that artifacts + # for the generic worker cannot have names + Optional("artifacts"): [ + { + # type of artifact -- simple file, or recursive directory + "type": Any("file", "directory"), + # filesystem path from which to read artifact + "path": text_type, + # if not specified, path is used for artifact name + Optional("name"): text_type, + } + ], + # Directories and/or files to be mounted. + # The actual allowed combinations are stricter than the model below, + # but this provides a simple starting point. + # See https://docs.taskcluster.net/reference/workers/generic-worker/payload + Optional("mounts"): [ + { + # A unique name for the cache volume, implies writable cache directory + # (otherwise mount is a read-only file or directory). + Optional("cache-name"): text_type, + # Optional content for pre-loading cache, or mandatory content for + # read-only file or directory. Pre-loaded content can come from either + # a task artifact or from a URL. + Optional("content"): { + # *** Either (artifact and task-id) or url must be specified. *** + # Artifact name that contains the content. + Optional("artifact"): text_type, + # Task ID that has the artifact that contains the content. + Optional("task-id"): taskref_or_string, + # URL that supplies the content in response to an unauthenticated + # GET request. + Optional("url"): text_type, + }, + # *** Either file or directory must be specified. *** + # If mounting a cache or read-only directory, the filesystem location of + # the directory should be specified as a relative path to the task + # directory here. + Optional("directory"): text_type, + # If mounting a file, specify the relative path within the task + # directory to mount the file (the file will be read only). + Optional("file"): text_type, + # Required if and only if `content` is specified and mounting a + # directory (not a file). This should be the archive format of the + # content (either pre-loaded cache or read-only directory). + Optional("format"): Any("rar", "tar.bz2", "tar.gz", "zip"), + } + ], + # environment variables + Required("env"): {text_type: taskref_or_string}, + # the maximum time to run, in seconds + Required("max-run-time"): int, + # os user groups for test task workers + Optional("os-groups"): [text_type], + # feature for test task to run as administarotr + Optional("run-as-administrator"): bool, + # optional features + Required("chain-of-trust"): bool, + Optional("taskcluster-proxy"): bool, + # the exit status code(s) that indicates the task should be retried + Optional("retry-exit-status"): [int], + # Wether any artifacts are assigned to this worker + Optional("skip-artifacts"): bool, + }, +) +def build_generic_worker_payload(config, task, task_def): + worker = task["worker"] + features = {} + + task_def["payload"] = { + "command": worker["command"], + "maxRunTime": worker["max-run-time"], + } + + if worker["os"] == "windows": + task_def["payload"]["onExitStatus"] = { + "retry": [ + # These codes (on windows) indicate a process interruption, + # rather than a task run failure. See bug 1544403. + 1073807364, # process force-killed due to system shutdown + 3221225786, # sigint (any interrupt) + ] + } + if "retry-exit-status" in worker: + task_def["payload"].setdefault("onExitStatus", {}).setdefault( + "retry", [] + ).extend(worker["retry-exit-status"]) + if worker["os"] == "linux-bitbar": + task_def["payload"].setdefault("onExitStatus", {}).setdefault("retry", []) + # exit code 4 is used to indicate an intermittent android device error + if 4 not in task_def["payload"]["onExitStatus"]["retry"]: + task_def["payload"]["onExitStatus"]["retry"].extend([4]) + + env = worker.get("env", {}) + + if task.get("use-sccache"): + features["taskclusterProxy"] = True + task_def["scopes"].append( + "assume:project:taskcluster:{trust_domain}:level-{level}-sccache-buckets".format( + trust_domain=config.graph_config["trust-domain"], + level=config.params["level"], + ) + ) + env["USE_SCCACHE"] = "1" + worker["env"]["SCCACHE_GCS_PROJECT"] = SCCACHE_GCS_PROJECT + # Disable sccache idle shutdown. + env["SCCACHE_IDLE_TIMEOUT"] = "0" + else: + env["SCCACHE_DISABLE"] = "1" + + if env: + task_def["payload"]["env"] = env + + artifacts = [] + + for artifact in worker.get("artifacts", []): + a = { + "path": artifact["path"], + "type": artifact["type"], + } + if "name" in artifact: + a["name"] = artifact["name"] + artifacts.append(a) + + if artifacts: + task_def["payload"]["artifacts"] = artifacts + + # Need to copy over mounts, but rename keys to respect naming convention + # * 'cache-name' -> 'cacheName' + # * 'task-id' -> 'taskId' + # All other key names are already suitable, and don't need renaming. + mounts = deepcopy(worker.get("mounts", [])) + for mount in mounts: + if "cache-name" in mount: + mount["cacheName"] = "{trust_domain}-level-{level}-{name}".format( + trust_domain=config.graph_config["trust-domain"], + level=config.params["level"], + name=mount.pop("cache-name"), + ) + task_def["scopes"].append( + "generic-worker:cache:{}".format(mount["cacheName"]) + ) + if "content" in mount: + if "task-id" in mount["content"]: + mount["content"]["taskId"] = mount["content"].pop("task-id") + if "artifact" in mount["content"]: + if not mount["content"]["artifact"].startswith("public/"): + task_def["scopes"].append( + "queue:get-artifact:{}".format(mount["content"]["artifact"]) + ) + + if mounts: + task_def["payload"]["mounts"] = mounts + + if worker.get("os-groups"): + task_def["payload"]["osGroups"] = worker["os-groups"] + task_def["scopes"].extend( + [ + "generic-worker:os-group:{}/{}".format(task["worker-type"], group) + for group in worker["os-groups"] + ] + ) + + if worker.get("chain-of-trust"): + features["chainOfTrust"] = True + + if worker.get("taskcluster-proxy"): + features["taskclusterProxy"] = True + + if worker.get("run-as-administrator", False): + features["runAsAdministrator"] = True + task_def["scopes"].append( + "generic-worker:run-as-administrator:{}".format(task["worker-type"]), + ) + + if features: + task_def["payload"]["features"] = features + + +@payload_builder( + "scriptworker-signing", + schema={ + # the maximum time to run, in seconds + Required("max-run-time"): int, + # list of artifact URLs for the artifacts that should be signed + Required("upstream-artifacts"): [ + { + # taskId of the task with the artifact + Required("taskId"): taskref_or_string, + # type of signing task (for CoT) + Required("taskType"): text_type, + # Paths to the artifacts to sign + Required("paths"): [text_type], + # Signing formats to use on each of the paths + Required("formats"): [text_type], + } + ], + # behavior for mac iscript + Optional("mac-behavior"): Any( + "mac_notarize_part_1", + "mac_notarize_part_3", + "mac_sign_and_pkg", + "mac_geckodriver", + ), + Optional("entitlements-url"): text_type, + }, +) +def build_scriptworker_signing_payload(config, task, task_def): + worker = task["worker"] + + task_def["payload"] = { + "maxRunTime": worker["max-run-time"], + "upstreamArtifacts": worker["upstream-artifacts"], + } + if worker.get("mac-behavior"): + task_def["payload"]["behavior"] = worker["mac-behavior"] + if worker.get("entitlements-url"): + task_def["payload"]["entitlements-url"] = worker["entitlements-url"] + artifacts = set(task.get("release-artifacts", [])) + for upstream_artifact in worker["upstream-artifacts"]: + for path in upstream_artifact["paths"]: + artifacts.update( + get_signed_artifacts( + input=path, + formats=upstream_artifact["formats"], + behavior=worker.get("mac-behavior"), + ) + ) + task["release-artifacts"] = list(artifacts) + + +@payload_builder( + "notarization-poller", + schema={ + Required("uuid-manifest"): taskref_or_string, + }, +) +def notarization_poller_payload(config, task, task_def): + worker = task["worker"] + task_def["payload"] = {"uuid_manifest": worker["uuid-manifest"]} + + +@payload_builder( + "beetmover", + schema={ + # the maximum time to run, in seconds + Required("max-run-time"): int, + # locale key, if this is a locale beetmover job + Optional("locale"): text_type, + Optional("partner-public"): bool, + Required("release-properties"): { + "app-name": text_type, + "app-version": text_type, + "branch": text_type, + "build-id": text_type, + "hash-type": text_type, + "platform": text_type, + }, + # list of artifact URLs for the artifacts that should be beetmoved + Required("upstream-artifacts"): [ + { + # taskId of the task with the artifact + Required("taskId"): taskref_or_string, + # type of signing task (for CoT) + Required("taskType"): text_type, + # Paths to the artifacts to sign + Required("paths"): [text_type], + # locale is used to map upload path and allow for duplicate simple names + Required("locale"): text_type, + } + ], + Optional("artifact-map"): object, + }, +) +def build_beetmover_payload(config, task, task_def): + worker = task["worker"] + release_config = get_release_config(config) + release_properties = worker["release-properties"] + + task_def["payload"] = { + "maxRunTime": worker["max-run-time"], + "releaseProperties": { + "appName": release_properties["app-name"], + "appVersion": release_properties["app-version"], + "branch": release_properties["branch"], + "buildid": release_properties["build-id"], + "hashType": release_properties["hash-type"], + "platform": release_properties["platform"], + }, + "upload_date": config.params["build_date"], + "upstreamArtifacts": worker["upstream-artifacts"], + } + if worker.get("locale"): + task_def["payload"]["locale"] = worker["locale"] + if worker.get("artifact-map"): + task_def["payload"]["artifactMap"] = worker["artifact-map"] + if worker.get("partner-public"): + task_def["payload"]["is_partner_repack_public"] = worker["partner-public"] + if release_config: + task_def["payload"].update(release_config) + + +@payload_builder( + "beetmover-push-to-release", + schema={ + # the maximum time to run, in seconds + Required("max-run-time"): int, + Required("product"): text_type, + }, +) +def build_beetmover_push_to_release_payload(config, task, task_def): + worker = task["worker"] + release_config = get_release_config(config) + partners = [ + "{}/{}".format(p, s) for p, s, _ in get_partners_to_be_published(config) + ] + + task_def["payload"] = { + "maxRunTime": worker["max-run-time"], + "product": worker["product"], + "version": release_config["version"], + "build_number": release_config["build_number"], + "partners": partners, + } + + +@payload_builder( + "beetmover-maven", + schema={ + Required("max-run-time"): int, + Required("release-properties"): { + "app-name": text_type, + "app-version": text_type, + "branch": text_type, + "build-id": text_type, + "artifact-id": text_type, + "hash-type": text_type, + "platform": text_type, + }, + Required("upstream-artifacts"): [ + { + Required("taskId"): taskref_or_string, + Required("taskType"): text_type, + Required("paths"): [text_type], + Optional("zipExtract"): bool, + } + ], + Optional("artifact-map"): object, + }, +) +def build_beetmover_maven_payload(config, task, task_def): + build_beetmover_payload(config, task, task_def) + + task_def["payload"]["artifact_id"] = task["worker"]["release-properties"][ + "artifact-id" + ] + if task["worker"].get("artifact-map"): + task_def["payload"]["artifactMap"] = task["worker"]["artifact-map"] + + task_def["payload"]["version"] = _compute_geckoview_version( + task["worker"]["release-properties"]["app-version"], + task["worker"]["release-properties"]["build-id"], + ) + + del task_def["payload"]["releaseProperties"]["hashType"] + del task_def["payload"]["releaseProperties"]["platform"] + + +@payload_builder( + "balrog", + schema={ + Required("balrog-action"): Any(*BALROG_ACTIONS), + Optional("product"): text_type, + Optional("platforms"): [text_type], + Optional("release-eta"): text_type, + Optional("channel-names"): optionally_keyed_by("release-type", [text_type]), + Optional("require-mirrors"): bool, + Optional("publish-rules"): optionally_keyed_by( + "release-type", "release-level", [int] + ), + Optional("rules-to-update"): optionally_keyed_by( + "release-type", "release-level", [text_type] + ), + Optional("archive-domain"): optionally_keyed_by("release-level", text_type), + Optional("download-domain"): optionally_keyed_by("release-level", text_type), + Optional("blob-suffix"): text_type, + Optional("complete-mar-filename-pattern"): text_type, + Optional("complete-mar-bouncer-product-pattern"): text_type, + Optional("update-line"): object, + Optional("suffixes"): [text_type], + Optional("background-rate"): optionally_keyed_by( + "release-type", "beta-number", Any(int, None) + ), + Optional("force-fallback-mapping-update"): optionally_keyed_by( + "release-type", "beta-number", bool + ), + # list of artifact URLs for the artifacts that should be beetmoved + Optional("upstream-artifacts"): [ + { + # taskId of the task with the artifact + Required("taskId"): taskref_or_string, + # type of signing task (for CoT) + Required("taskType"): text_type, + # Paths to the artifacts to sign + Required("paths"): [text_type], + } + ], + }, +) +def build_balrog_payload(config, task, task_def): + worker = task["worker"] + release_config = get_release_config(config) + beta_number = None + if "b" in release_config["version"]: + beta_number = release_config["version"].split("b")[-1] + + if ( + worker["balrog-action"] == "submit-locale" + or worker["balrog-action"] == "v2-submit-locale" + ): + task_def["payload"] = { + "upstreamArtifacts": worker["upstream-artifacts"], + "suffixes": worker["suffixes"], + } + else: + for prop in ( + "archive-domain", + "channel-names", + "download-domain", + "publish-rules", + "rules-to-update", + "background-rate", + "force-fallback-mapping-update", + ): + if prop in worker: + resolve_keyed_by( + worker, + prop, + task["description"], + **{ + "release-type": config.params["release_type"], + "release-level": config.params.release_level(), + "beta-number": beta_number, + } + ) + task_def["payload"] = { + "build_number": release_config["build_number"], + "product": worker["product"], + "version": release_config["version"], + } + for prop in ( + "blob-suffix", + "complete-mar-filename-pattern", + "complete-mar-bouncer-product-pattern", + ): + if prop in worker: + task_def["payload"][prop.replace("-", "_")] = worker[prop] + if ( + worker["balrog-action"] == "submit-toplevel" + or worker["balrog-action"] == "v2-submit-toplevel" + ): + task_def["payload"].update( + { + "app_version": release_config["appVersion"], + "archive_domain": worker["archive-domain"], + "channel_names": worker["channel-names"], + "download_domain": worker["download-domain"], + "partial_versions": release_config.get("partial_versions", ""), + "platforms": worker["platforms"], + "rules_to_update": worker["rules-to-update"], + "require_mirrors": worker["require-mirrors"], + "update_line": worker["update-line"], + } + ) + else: # schedule / ship + task_def["payload"].update( + { + "publish_rules": worker["publish-rules"], + "release_eta": worker.get( + "release-eta", config.params.get("release_eta") + ) + or "", + } + ) + if worker.get("force-fallback-mapping-update"): + task_def["payload"]["force_fallback_mapping_update"] = worker[ + "force-fallback-mapping-update" + ] + if worker.get("background-rate"): + task_def["payload"]["background_rate"] = worker["background-rate"] + + +@payload_builder( + "bouncer-aliases", + schema={ + Required("entries"): object, + }, +) +def build_bouncer_aliases_payload(config, task, task_def): + worker = task["worker"] + + task_def["payload"] = {"aliases_entries": worker["entries"]} + + +@payload_builder( + "bouncer-locations", + schema={ + Required("implementation"): "bouncer-locations", + Required("bouncer-products"): [text_type], + }, +) +def build_bouncer_locations_payload(config, task, task_def): + worker = task["worker"] + release_config = get_release_config(config) + + task_def["payload"] = { + "bouncer_products": worker["bouncer-products"], + "version": release_config["version"], + "product": task["shipping-product"], + } + + +@payload_builder( + "bouncer-submission", + schema={ + Required("locales"): [text_type], + Required("entries"): object, + }, +) +def build_bouncer_submission_payload(config, task, task_def): + worker = task["worker"] + + task_def["payload"] = { + "locales": worker["locales"], + "submission_entries": worker["entries"], + } + + +@payload_builder( + "push-snap", + schema={ + Required("channel"): text_type, + Required("upstream-artifacts"): [ + { + Required("taskId"): taskref_or_string, + Required("taskType"): text_type, + Required("paths"): [text_type], + } + ], + }, +) +def build_push_snap_payload(config, task, task_def): + worker = task["worker"] + + task_def["payload"] = { + "channel": worker["channel"], + "upstreamArtifacts": worker["upstream-artifacts"], + } + + +@payload_builder( + "push-flatpak", + schema={ + Required("channel"): text_type, + Required("upstream-artifacts"): [ + { + Required("taskId"): taskref_or_string, + Required("taskType"): text_type, + Required("paths"): [text_type], + } + ], + }, +) +def build_push_flatpak_payload(config, task, task_def): + worker = task["worker"] + + task_def["payload"] = { + "channel": worker["channel"], + "upstreamArtifacts": worker["upstream-artifacts"], + } + + +@payload_builder( + "shipit-shipped", + schema={ + Required("release-name"): text_type, + }, +) +def build_ship_it_shipped_payload(config, task, task_def): + worker = task["worker"] + + task_def["payload"] = {"release_name": worker["release-name"]} + + +@payload_builder( + "shipit-maybe-release", + schema={ + Required("phase"): text_type, + }, +) +def build_ship_it_maybe_release_payload(config, task, task_def): + # expect branch name, including path + branch = config.params["head_repository"][len("https://hg.mozilla.org/") :] + # 'version' is e.g. '71.0b13' (app_version doesn't have beta number) + version = config.params["version"] + + task_def["payload"] = { + "product": task["shipping-product"], + "branch": branch, + "phase": task["worker"]["phase"], + "version": version, + "cron_revision": config.params["head_rev"], + } + + +@payload_builder( + "push-addons", + schema={ + Required("channel"): Any("listed", "unlisted"), + Required("upstream-artifacts"): [ + { + Required("taskId"): taskref_or_string, + Required("taskType"): text_type, + Required("paths"): [text_type], + } + ], + }, +) +def build_push_addons_payload(config, task, task_def): + worker = task["worker"] + + task_def["payload"] = { + "channel": worker["channel"], + "upstreamArtifacts": worker["upstream-artifacts"], + } + + +@payload_builder( + "treescript", + schema={ + Required("tags"): [Any("buildN", "release", None)], + Required("bump"): bool, + Optional("bump-files"): [text_type], + Optional("repo-param-prefix"): text_type, + Optional("dontbuild"): bool, + Optional("ignore-closed-tree"): bool, + Optional("force-dry-run"): bool, + Optional("push"): bool, + Optional("source-repo"): text_type, + Optional("ssh-user"): text_type, + Optional("l10n-bump-info"): { + Required("name"): text_type, + Required("path"): text_type, + Required("version-path"): text_type, + Optional("l10n-repo-url"): text_type, + Optional("ignore-config"): object, + Required("platform-configs"): [ + { + Required("platforms"): [text_type], + Required("path"): text_type, + Optional("format"): text_type, + } + ], + }, + Optional("merge-info"): object, + }, +) +def build_treescript_payload(config, task, task_def): + worker = task["worker"] + release_config = get_release_config(config) + + task_def["payload"] = {"actions": []} + actions = task_def["payload"]["actions"] + if worker["tags"]: + tag_names = [] + product = task["shipping-product"].upper() + version = release_config["version"].replace(".", "_") + buildnum = release_config["build_number"] + if "buildN" in worker["tags"]: + tag_names.extend( + [ + "{}_{}_BUILD{}".format(product, version, buildnum), + ] + ) + if "release" in worker["tags"]: + tag_names.extend(["{}_{}_RELEASE".format(product, version)]) + tag_info = { + "tags": tag_names, + "revision": config.params[ + "{}head_rev".format(worker.get("repo-param-prefix", "")) + ], + } + task_def["payload"]["tag_info"] = tag_info + actions.append("tag") + + if worker["bump"]: + if not worker["bump-files"]: + raise Exception("Version Bump requested without bump-files") + + bump_info = {} + bump_info["next_version"] = release_config["next_version"] + bump_info["files"] = worker["bump-files"] + task_def["payload"]["version_bump_info"] = bump_info + actions.append("version_bump") + + if worker.get("l10n-bump-info"): + l10n_bump_info = {} + for k, v in worker["l10n-bump-info"].items(): + l10n_bump_info[k.replace("-", "_")] = worker["l10n-bump-info"][k] + task_def["payload"]["l10n_bump_info"] = [l10n_bump_info] + actions.append("l10n_bump") + + if worker.get("merge-info"): + merge_info = { + merge_param_name.replace("-", "_"): merge_param_value + for merge_param_name, merge_param_value in worker["merge-info"].items() + if merge_param_name != "version-files" + } + merge_info["version_files"] = [ + { + file_param_name.replace("-", "_"): file_param_value + for file_param_name, file_param_value in file_entry.items() + } + for file_entry in worker["merge-info"]["version-files"] + ] + task_def["payload"]["merge_info"] = merge_info + actions.append("merge_day") + + if worker["push"]: + actions.append("push") + + if worker.get("force-dry-run"): + task_def["payload"]["dry_run"] = True + + if worker.get("dontbuild"): + task_def["payload"]["dontbuild"] = True + + if worker.get("ignore-closed-tree") is not None: + task_def["payload"]["ignore_closed_tree"] = worker["ignore-closed-tree"] + + if worker.get("source-repo"): + task_def["payload"]["source_repo"] = worker["source-repo"] + + if worker.get("ssh-user"): + task_def["payload"]["ssh_user"] = worker["ssh-user"] + + +@payload_builder( + "invalid", + schema={ + # an invalid task is one which should never actually be created; this is used in + # release automation on branches where the task just doesn't make sense + Extra: object, + }, +) +def build_invalid_payload(config, task, task_def): + task_def["payload"] = "invalid task - should never be created" + + +@payload_builder( + "always-optimized", + schema={ + Extra: object, + }, +) +@payload_builder("succeed", schema={}) +def build_dummy_payload(config, task, task_def): + task_def["payload"] = {} + + +transforms = TransformSequence() + + +@transforms.add +def set_implementation(config, tasks): + """ + Set the worker implementation based on the worker-type alias. + """ + for task in tasks: + if "implementation" in task["worker"]: + yield task + continue + + impl, os = worker_type_implementation(config.graph_config, task["worker-type"]) + + tags = task.setdefault("tags", {}) + tags["worker-implementation"] = impl + if os: + task["tags"]["os"] = os + worker = task.setdefault("worker", {}) + worker["implementation"] = impl + if os: + worker["os"] = os + + yield task + + +@transforms.add +def set_defaults(config, tasks): + for task in tasks: + task.setdefault("shipping-phase", None) + task.setdefault("shipping-product", None) + task.setdefault("always-target", False) + task.setdefault("optimization", None) + task.setdefault("use-sccache", False) + + worker = task["worker"] + if worker["implementation"] in ("docker-worker",): + worker.setdefault("chain-of-trust", False) + worker.setdefault("taskcluster-proxy", False) + worker.setdefault("allow-ptrace", True) + worker.setdefault("loopback-video", False) + worker.setdefault("loopback-audio", False) + worker.setdefault("docker-in-docker", False) + worker.setdefault("privileged", False) + worker.setdefault("volumes", []) + worker.setdefault("env", {}) + if "caches" in worker: + for c in worker["caches"]: + c.setdefault("skip-untrusted", False) + elif worker["implementation"] == "generic-worker": + worker.setdefault("env", {}) + worker.setdefault("os-groups", []) + if worker["os-groups"] and worker["os"] != "windows": + raise Exception( + "os-groups feature of generic-worker is only supported on " + "Windows, not on {}".format(worker["os"]) + ) + worker.setdefault("chain-of-trust", False) + elif worker["implementation"] in ( + "scriptworker-signing", + "beetmover", + "beetmover-push-to-release", + "beetmover-maven", + ): + worker.setdefault("max-run-time", 600) + elif worker["implementation"] == "push-apk": + worker.setdefault("commit", False) + + yield task + + +@transforms.add +def task_name_from_label(config, tasks): + for task in tasks: + if "label" not in task: + if "name" not in task: + raise Exception("task has neither a name nor a label") + task["label"] = "{}-{}".format(config.kind, task["name"]) + if task.get("name"): + del task["name"] + yield task + + +UNSUPPORTED_SHIPPING_PRODUCT_ERROR = """\ +The shipping product {product} is not in the list of configured products in +`taskcluster/ci/config.yml'. +""" + + +def validate_shipping_product(config, product): + if product not in config.graph_config["release-promotion"]["products"]: + raise Exception(UNSUPPORTED_SHIPPING_PRODUCT_ERROR.format(product=product)) + + +@transforms.add +def validate(config, tasks): + for task in tasks: + validate_schema( + task_description_schema, + task, + "In task {!r}:".format(task.get("label", "?no-label?")), + ) + validate_schema( + payload_builders[task["worker"]["implementation"]].schema, + task["worker"], + "In task.run {!r}:".format(task.get("label", "?no-label?")), + ) + if task["shipping-product"] is not None: + validate_shipping_product(config, task["shipping-product"]) + yield task + + +@index_builder("generic") +def add_generic_index_routes(config, task): + index = task.get("index") + routes = task.setdefault("routes", []) + + verify_index(config, index) + + subs = config.params.copy() + subs["job-name"] = index["job-name"] + subs["build_date_long"] = time.strftime( + "%Y.%m.%d.%Y%m%d%H%M%S", time.gmtime(config.params["build_date"]) + ) + subs["build_date"] = time.strftime( + "%Y.%m.%d", time.gmtime(config.params["build_date"]) + ) + subs["product"] = index["product"] + subs["trust-domain"] = config.graph_config["trust-domain"] + subs["branch_rev"] = get_branch_rev(config) + + project = config.params.get("project") + + for tpl in V2_ROUTE_TEMPLATES: + routes.append(tpl.format(**subs)) + + # Additionally alias all tasks for "trunk" repos into a common + # namespace. + if project and project in TRUNK_PROJECTS: + for tpl in V2_TRUNK_ROUTE_TEMPLATES: + routes.append(tpl.format(**subs)) + + return task + + +@index_builder("shippable") +def add_shippable_index_routes(config, task): + index = task.get("index") + routes = task.setdefault("routes", []) + + verify_index(config, index) + + subs = config.params.copy() + subs["job-name"] = index["job-name"] + subs["build_date_long"] = time.strftime( + "%Y.%m.%d.%Y%m%d%H%M%S", time.gmtime(config.params["build_date"]) + ) + subs["build_date"] = time.strftime( + "%Y.%m.%d", time.gmtime(config.params["build_date"]) + ) + subs["product"] = index["product"] + subs["trust-domain"] = config.graph_config["trust-domain"] + subs["branch_rev"] = get_branch_rev(config) + + for tpl in V2_SHIPPABLE_TEMPLATES: + routes.append(tpl.format(**subs)) + + # Also add routes for en-US + task = add_shippable_l10n_index_routes(config, task, force_locale="en-US") + + return task + + +@index_builder("shippable-with-multi-l10n") +def add_shippable_multi_index_routes(config, task): + task = add_shippable_index_routes(config, task) + task = add_l10n_index_routes(config, task, force_locale="multi") + return task + + +@index_builder("l10n") +def add_l10n_index_routes(config, task, force_locale=None): + index = task.get("index") + routes = task.setdefault("routes", []) + + verify_index(config, index) + + subs = config.params.copy() + subs["job-name"] = index["job-name"] + subs["build_date_long"] = time.strftime( + "%Y.%m.%d.%Y%m%d%H%M%S", time.gmtime(config.params["build_date"]) + ) + subs["product"] = index["product"] + subs["trust-domain"] = config.graph_config["trust-domain"] + subs["branch_rev"] = get_branch_rev(config) + + locales = task["attributes"].get( + "chunk_locales", task["attributes"].get("all_locales") + ) + # Some tasks has only one locale set + if task["attributes"].get("locale"): + locales = [task["attributes"]["locale"]] + + if force_locale: + # Used for en-US and multi-locale + locales = [force_locale] + + if not locales: + raise Exception("Error: Unable to use l10n index for tasks without locales") + + # If there are too many locales, we can't write a route for all of them + # See Bug 1323792 + if len(locales) > 18: # 18 * 3 = 54, max routes = 64 + return task + + for locale in locales: + for tpl in V2_L10N_TEMPLATES: + routes.append(tpl.format(locale=locale, **subs)) + + return task + + +@index_builder("shippable-l10n") +def add_shippable_l10n_index_routes(config, task, force_locale=None): + index = task.get("index") + routes = task.setdefault("routes", []) + + verify_index(config, index) + + subs = config.params.copy() + subs["job-name"] = index["job-name"] + subs["build_date_long"] = time.strftime( + "%Y.%m.%d.%Y%m%d%H%M%S", time.gmtime(config.params["build_date"]) + ) + subs["product"] = index["product"] + subs["trust-domain"] = config.graph_config["trust-domain"] + subs["branch_rev"] = get_branch_rev(config) + + locales = task["attributes"].get( + "chunk_locales", task["attributes"].get("all_locales") + ) + # Some tasks has only one locale set + if task["attributes"].get("locale"): + locales = [task["attributes"]["locale"]] + + if force_locale: + # Used for en-US and multi-locale + locales = [force_locale] + + if not locales: + raise Exception("Error: Unable to use l10n index for tasks without locales") + + # If there are too many locales, we can't write a route for all of them + # See Bug 1323792 + if len(locales) > 18: # 18 * 3 = 54, max routes = 64 + return task + + for locale in locales: + for tpl in V2_SHIPPABLE_L10N_TEMPLATES: + routes.append(tpl.format(locale=locale, **subs)) + + return task + + +def add_geckoview_index_routes(config, task): + index = task.get("index") + routes = task.setdefault("routes", []) + geckoview_version = _compute_geckoview_version( + config.params["app_version"], config.params["moz_build_date"] + ) + + subs = { + "geckoview-version": geckoview_version, + "job-name": index["job-name"], + "product": index["product"], + "project": config.params["project"], + "trust-domain": config.graph_config["trust-domain"], + } + routes.append(V2_GECKOVIEW_RELEASE.format(**subs)) + + return task + + +@index_builder("android-shippable") +def add_android_shippable_index_routes(config, task): + task = add_shippable_index_routes(config, task) + task = add_geckoview_index_routes(config, task) + + return task + + +@index_builder("android-shippable-with-multi-l10n") +def add_android_shippable_multi_index_routes(config, task): + task = add_shippable_multi_index_routes(config, task) + task = add_geckoview_index_routes(config, task) + + return task + + +@transforms.add +def add_index_routes(config, tasks): + for task in tasks: + index = task.get("index", {}) + + # The default behavior is to rank tasks according to their tier + extra_index = task.setdefault("extra", {}).setdefault("index", {}) + rank = index.get("rank", "by-tier") + + if rank == "by-tier": + # rank is zero for non-tier-1 tasks and based on pushid for others; + # this sorts tier-{2,3} builds below tier-1 in the index + tier = task.get("treeherder", {}).get("tier", 3) + extra_index["rank"] = 0 if tier > 1 else int(config.params["build_date"]) + elif rank == "build_date": + extra_index["rank"] = int(config.params["build_date"]) + else: + extra_index["rank"] = rank + + if not index: + yield task + continue + + index_type = index.get("type", "generic") + task = index_builders[index_type](config, task) + + del task["index"] + yield task + + +@transforms.add +def try_task_config_env(config, tasks): + """Set environment variables in the task.""" + env = config.params["try_task_config"].get("env") + # Find all implementations that have an 'env' key. + implementations = { + name + for name, builder in payload_builders.items() + if "env" in builder.schema.schema + } + for task in tasks: + if env and task["worker"]["implementation"] in implementations: + task["worker"]["env"].update(env) + yield task + + +@transforms.add +def try_task_config_chemspill_prio(config, tasks): + """Increase the priority from lowest and very-low -> low, but leave others unchanged.""" + chemspill_prio = config.params["try_task_config"].get("chemspill-prio") + for task in tasks: + if chemspill_prio and task["priority"] in ("lowest", "very-low"): + task["priority"] = "low" + yield task + + +@transforms.add +def try_task_config_routes(config, tasks): + """Set routes in the task.""" + routes = config.params["try_task_config"].get("routes") + for task in tasks: + if routes: + task_routes = task.setdefault("routes", []) + task_routes.extend(routes) + yield task + + +@transforms.add +def build_task(config, tasks): + for task in tasks: + level = str(config.params["level"]) + + if task["worker-type"] in config.params["try_task_config"].get( + "worker-overrides", {} + ): + worker_pool = config.params["try_task_config"]["worker-overrides"][ + task["worker-type"] + ] + provisioner_id, worker_type = worker_pool.split("/", 1) + else: + provisioner_id, worker_type = get_worker_type( + config.graph_config, + task["worker-type"], + level=level, + release_level=config.params.release_level(), + ) + task["worker-type"] = "/".join([provisioner_id, worker_type]) + project = config.params["project"] + + routes = task.get("routes", []) + scopes = [ + s.format(level=level, project=project) for s in task.get("scopes", []) + ] + + # set up extra + extra = task.get("extra", {}) + extra["parent"] = {"task-reference": "<decision>"} + task_th = task.get("treeherder") + if task_th: + extra.setdefault("treeherder-platform", task_th["platform"]) + treeherder = extra.setdefault("treeherder", {}) + + machine_platform, collection = task_th["platform"].split("/", 1) + treeherder["machine"] = {"platform": machine_platform} + treeherder["collection"] = {collection: True} + + group_names = config.graph_config["treeherder"]["group-names"] + groupSymbol, symbol = split_symbol(task_th["symbol"]) + if groupSymbol != "?": + treeherder["groupSymbol"] = groupSymbol + if groupSymbol not in group_names: + path = os.path.join(config.path, task.get("job-from", "")) + raise Exception(UNKNOWN_GROUP_NAME.format(groupSymbol, path)) + treeherder["groupName"] = group_names[groupSymbol] + treeherder["symbol"] = symbol + if len(symbol) > 25 or len(groupSymbol) > 25: + raise RuntimeError( + "Treeherder group and symbol names must not be longer than " + "25 characters: {} (see {})".format( + task_th["symbol"], + TC_TREEHERDER_SCHEMA_URL, + ) + ) + treeherder["jobKind"] = task_th["kind"] + treeherder["tier"] = task_th["tier"] + + branch_rev = get_branch_rev(config) + + routes.append( + "{}.v2.{}.{}".format( + TREEHERDER_ROUTE_ROOT, + config.params["project"], + branch_rev, + ) + ) + + if "expires-after" in task: + if config.params.is_try(): + delta = value_of(task["expires-after"]) + if delta.days >= 28: + task["expires-after"] = "28 days" + else: + task["expires-after"] = "28 days" if config.params.is_try() else "1 year" + + if "deadline-after" not in task: + task["deadline-after"] = "1 day" + + if "priority" not in task: + task["priority"] = get_default_priority( + config.graph_config, config.params["project"] + ) + + tags = task.get("tags", {}) + attributes = task.get("attributes", {}) + + tags.update( + { + "createdForUser": config.params["owner"], + "kind": config.kind, + "label": task["label"], + "retrigger": "true" if attributes.get("retrigger", False) else "false", + } + ) + + task_def = { + "provisionerId": provisioner_id, + "workerType": worker_type, + "routes": routes, + "created": {"relative-datestamp": "0 seconds"}, + "deadline": {"relative-datestamp": task["deadline-after"]}, + "expires": {"relative-datestamp": task["expires-after"]}, + "scopes": scopes, + "metadata": { + "description": task["description"], + "name": task["label"], + "owner": config.params["owner"], + "source": config.params.file_url(config.path, pretty=True), + }, + "extra": extra, + "tags": tags, + "priority": task["priority"], + } + + if task.get("requires", None): + task_def["requires"] = task["requires"] + + if task_th: + # link back to treeherder in description + th_job_link = ( + "https://treeherder.mozilla.org/#/jobs?repo={}&revision={}&selectedTaskRun=<self>" + ).format(config.params["project"], branch_rev) + task_def["metadata"]["description"] = { + "task-reference": "{description} ([Treeherder job]({th_job_link}))".format( + description=task_def["metadata"]["description"], + th_job_link=th_job_link, + ) + } + + # add the payload and adjust anything else as required (e.g., scopes) + payload_builders[task["worker"]["implementation"]].builder( + config, task, task_def + ) + + # Resolve run-on-projects + build_platform = attributes.get("build_platform") + resolve_keyed_by( + task, + "run-on-projects", + item_name=task["label"], + **{"build-platform": build_platform} + ) + attributes["run_on_projects"] = task.get("run-on-projects", ["all"]) + attributes["always_target"] = task["always-target"] + # This logic is here since downstream tasks don't always match their + # upstream dependency's shipping_phase. + # A text_type task['shipping-phase'] takes precedence, then + # an existing attributes['shipping_phase'], then fall back to None. + if task.get("shipping-phase") is not None: + attributes["shipping_phase"] = task["shipping-phase"] + else: + attributes.setdefault("shipping_phase", None) + # shipping_product will always match the upstream task's + # shipping_product, so a pre-set existing attributes['shipping_product'] + # takes precedence over task['shipping-product']. However, make sure + # we don't have conflicting values. + if task.get("shipping-product") and attributes.get("shipping_product") not in ( + None, + task["shipping-product"], + ): + raise Exception( + "{} shipping_product {} doesn't match task shipping-product {}!".format( + task["label"], + attributes["shipping_product"], + task["shipping-product"], + ) + ) + attributes.setdefault("shipping_product", task["shipping-product"]) + + # Set MOZ_AUTOMATION on all jobs. + if task["worker"]["implementation"] in ( + "generic-worker", + "docker-worker", + ): + payload = task_def.get("payload") + if payload: + env = payload.setdefault("env", {}) + env["MOZ_AUTOMATION"] = "1" + + dependencies = task.get("dependencies", {}) + if_dependencies = task.get("if-dependencies", []) + if if_dependencies: + for i, dep in enumerate(if_dependencies): + if dep in dependencies: + if_dependencies[i] = dependencies[dep] + continue + + raise Exception( + "{label} specifies '{dep}' in if-dependencies, " + "but {dep} is not a dependency!".format( + label=task["label"], dep=dep + ) + ) + + yield { + "label": task["label"], + "description": task["description"], + "task": task_def, + "dependencies": dependencies, + "if-dependencies": if_dependencies, + "soft-dependencies": task.get("soft-dependencies", []), + "attributes": attributes, + "optimization": task.get("optimization", None), + "release-artifacts": task.get("release-artifacts", []), + } + + +@transforms.add +def chain_of_trust(config, tasks): + for task in tasks: + if task["task"].get("payload", {}).get("features", {}).get("chainOfTrust"): + image = task.get("dependencies", {}).get("docker-image") + if image: + cot = ( + task["task"].setdefault("extra", {}).setdefault("chainOfTrust", {}) + ) + cot.setdefault("inputs", {})["docker-image"] = { + "task-reference": "<docker-image>" + } + yield task + + +@transforms.add +def check_task_identifiers(config, tasks): + """Ensures that all tasks have well defined identifiers: + ^[a-zA-Z0-9_-]{1,38}$ + """ + e = re.compile("^[a-zA-Z0-9_-]{1,38}$") + for task in tasks: + for attrib in ("workerType", "provisionerId"): + if not e.match(task["task"][attrib]): + raise Exception( + "task {}.{} is not a valid identifier: {}".format( + task["label"], attrib, task["task"][attrib] + ) + ) + yield task + + +@transforms.add +def check_task_dependencies(config, tasks): + """Ensures that tasks don't have more than 100 dependencies.""" + for task in tasks: + if len(task["dependencies"]) > MAX_DEPENDENCIES: + raise Exception( + "task {}/{} has too many dependencies ({} > {})".format( + config.kind, + task["label"], + len(task["dependencies"]), + MAX_DEPENDENCIES, + ) + ) + yield task + + +def check_caches_are_volumes(task): + """Ensures that all cache paths are defined as volumes. + + Caches and volumes are the only filesystem locations whose content + isn't defined by the Docker image itself. Some caches are optional + depending on the job environment. We want paths that are potentially + caches to have as similar behavior regardless of whether a cache is + used. To help enforce this, we require that all paths used as caches + to be declared as Docker volumes. This check won't catch all offenders. + But it is better than nothing. + """ + volumes = set(six.ensure_text(s) for s in task["worker"]["volumes"]) + paths = set( + six.ensure_text(c["mount-point"]) for c in task["worker"].get("caches", []) + ) + missing = paths - volumes + + if not missing: + return + + raise Exception( + "task %s (image %s) has caches that are not declared as " + "Docker volumes: %s " + "(have you added them as VOLUMEs in the Dockerfile?)" + % (task["label"], task["worker"]["docker-image"], ", ".join(sorted(missing))) + ) + + +def check_required_volumes(task): + """ + Ensures that all paths that are required to be volumes are defined as volumes. + + Performance of writing to files in poor in directories not marked as + volumes, in docker. Ensure that paths that are often written to are marked + as volumes. + """ + volumes = set(task["worker"]["volumes"]) + paths = set(task["worker"].get("required-volumes", [])) + missing = paths - volumes + + if not missing: + return + + raise Exception( + "task %s (image %s) has paths that should be volumes for peformance " + "that are not declared as Docker volumes: %s " + "(have you added them as VOLUMEs in the Dockerfile?)" + % (task["label"], task["worker"]["docker-image"], ", ".join(sorted(missing))) + ) + + +@transforms.add +def check_run_task_caches(config, tasks): + """Audit for caches requiring run-task. + + run-task manages caches in certain ways. If a cache managed by run-task + is used by a non run-task task, it could cause problems. So we audit for + that and make sure certain cache names are exclusive to run-task. + + IF YOU ARE TEMPTED TO MAKE EXCLUSIONS TO THIS POLICY, YOU ARE LIKELY + CONTRIBUTING TECHNICAL DEBT AND WILL HAVE TO SOLVE MANY OF THE PROBLEMS + THAT RUN-TASK ALREADY SOLVES. THINK LONG AND HARD BEFORE DOING THAT. + """ + re_reserved_caches = re.compile( + """^ + (checkouts|tooltool-cache) + """, + re.VERBOSE, + ) + + re_sparse_checkout_cache = re.compile("^checkouts-sparse") + + cache_prefix = "{trust_domain}-level-{level}-".format( + trust_domain=config.graph_config["trust-domain"], + level=config.params["level"], + ) + + suffix = _run_task_suffix() + + for task in tasks: + payload = task["task"].get("payload", {}) + command = payload.get("command") or [""] + + main_command = command[0] if isinstance(command[0], text_type) else "" + run_task = main_command.endswith("run-task") + + require_sparse_cache = False + have_sparse_cache = False + + if run_task: + for arg in command[1:]: + if not isinstance(arg, text_type): + continue + + if arg == "--": + break + + if arg.startswith("--gecko-sparse-profile"): + if "=" not in arg: + raise Exception( + "{} is specifying `--gecko-sparse-profile` to run-task " + "as two arguments. Unable to determine if the sparse " + "profile exists.".format(task["label"]) + ) + _, sparse_profile = arg.split("=", 1) + if not os.path.exists(os.path.join(GECKO, sparse_profile)): + raise Exception( + "{} is using non-existant sparse profile {}.".format( + task["label"], sparse_profile + ) + ) + require_sparse_cache = True + break + + for cache in payload.get("cache", {}): + if not cache.startswith(cache_prefix): + raise Exception( + "{} is using a cache ({}) which is not appropriate " + "for its trust-domain and level. It should start with {}.".format( + task["label"], cache, cache_prefix + ) + ) + + cache = cache[len(cache_prefix) :] + + if re_sparse_checkout_cache.match(cache): + have_sparse_cache = True + + if not re_reserved_caches.match(cache): + continue + + if not run_task: + raise Exception( + "%s is using a cache (%s) reserved for run-task " + "change the task to use run-task or use a different " + "cache name" % (task["label"], cache) + ) + + if not cache.endswith(suffix): + raise Exception( + "%s is using a cache (%s) reserved for run-task " + "but the cache name is not dependent on the contents " + "of run-task; change the cache name to conform to the " + "naming requirements" % (task["label"], cache) + ) + + if require_sparse_cache and not have_sparse_cache: + raise Exception( + "%s is using a sparse checkout but not using " + "a sparse checkout cache; change the checkout " + "cache name so it is sparse aware" % task["label"] + ) + + yield task diff --git a/taskcluster/taskgraph/transforms/tests.py b/taskcluster/taskgraph/transforms/tests.py new file mode 100644 index 0000000000..4891351cfe --- /dev/null +++ b/taskcluster/taskgraph/transforms/tests.py @@ -0,0 +1,1949 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +These transforms construct a task description to run the given test, based on a +test description. The implementation here is shared among all test kinds, but +contains specific support for how we run tests in Gecko (via mozharness, +invoked in particular ways). + +This is a good place to translate a test-description option such as +`single-core: true` to the implementation of that option in a task description +(worker options, mozharness commandline, environment variables, etc.) + +The test description should be fully formed by the time it reaches these +transforms, and these transforms should not embody any specific knowledge about +what should run where. this is the wrong place for special-casing platforms, +for example - use `all_tests.py` instead. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +import copy +import logging +import re + +from mozbuild.schedules import INCLUSIVE_COMPONENTS +from six import string_types, text_type +from voluptuous import ( + Any, + Optional, + Required, + Exclusive, +) + +import taskgraph +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.attributes import match_run_on_projects, keymatch +from taskgraph.util.keyed_by import evaluate_keyed_by +from taskgraph.util.templates import merge +from taskgraph.util.treeherder import split_symbol, join_symbol +from taskgraph.util.platforms import platform_family +from taskgraph.util.schema import ( + resolve_keyed_by, + optionally_keyed_by, + Schema, +) +from taskgraph.optimize.schema import OptimizationSchema +from taskgraph.util.chunking import ( + chunk_manifests, + get_manifest_loader, + get_runtimes, + guess_mozinfo_from_task, + manifest_loaders, + DefaultLoader, +) +from taskgraph.util.taskcluster import ( + get_artifact_path, + get_index_url, +) +from taskgraph.util.perfile import perfile_number_of_chunks + + +# default worker types keyed by instance-size +LINUX_WORKER_TYPES = { + "large": "t-linux-large", + "xlarge": "t-linux-xlarge", + "default": "t-linux-large", +} + +# windows worker types keyed by test-platform and virtualization +WINDOWS_WORKER_TYPES = { + "windows7-32": { + "virtual": "t-win7-32", + "virtual-with-gpu": "t-win7-32-gpu", + "hardware": "t-win10-64-1803-hw", + }, + "windows7-32-shippable": { + "virtual": "t-win7-32", + "virtual-with-gpu": "t-win7-32-gpu", + "hardware": "t-win10-64-1803-hw", + }, + "windows7-32-devedition": { # build only, tests have no value + "virtual": "t-win7-32", + "virtual-with-gpu": "t-win7-32-gpu", + "hardware": "t-win10-64-1803-hw", + }, + "windows7-32-mingwclang": { + "virtual": "t-win7-32", + "virtual-with-gpu": "t-win7-32-gpu", + "hardware": "t-win10-64-1803-hw", + }, + "windows7-32-qr": { + "virtual": "t-win7-32", + "virtual-with-gpu": "t-win7-32-gpu", + "hardware": "t-win10-64-1803-hw", + }, + "windows10-64": { + "virtual": "t-win10-64", + "virtual-with-gpu": "t-win10-64-gpu-s", + "hardware": "t-win10-64-1803-hw", + }, + "windows10-aarch64": { + "virtual": "t-win64-aarch64-laptop", + "virtual-with-gpu": "t-win64-aarch64-laptop", + "hardware": "t-win64-aarch64-laptop", + }, + "windows10-64-ccov": { + "virtual": "t-win10-64", + "virtual-with-gpu": "t-win10-64-gpu-s", + "hardware": "t-win10-64-1803-hw", + }, + "windows10-64-ccov-qr": { + "virtual": "t-win10-64", + "virtual-with-gpu": "t-win10-64-gpu-s", + "hardware": "t-win10-64-1803-hw", + }, + "windows10-64-devedition": { + "virtual": "t-win10-64", + "virtual-with-gpu": "t-win10-64-gpu-s", + "hardware": "t-win10-64-1803-hw", + }, + "windows10-64-shippable": { + "virtual": "t-win10-64", + "virtual-with-gpu": "t-win10-64-gpu-s", + "hardware": "t-win10-64-1803-hw", + }, + "windows10-64-asan": { + "virtual": "t-win10-64", + "virtual-with-gpu": "t-win10-64-gpu-s", + "hardware": "t-win10-64-1803-hw", + }, + "windows10-64-qr": { + "virtual": "t-win10-64", + "virtual-with-gpu": "t-win10-64-gpu-s", + "hardware": "t-win10-64-1803-hw", + }, + "windows10-64-shippable-qr": { + "virtual": "t-win10-64", + "virtual-with-gpu": "t-win10-64-gpu-s", + "hardware": "t-win10-64-1803-hw", + }, + "windows10-64-mingwclang": { + "virtual": "t-win10-64", + "virtual-with-gpu": "t-win10-64-gpu-s", + "hardware": "t-win10-64-1803-hw", + }, + "windows10-64-ref-hw-2017": { + "virtual": "t-win10-64", + "virtual-with-gpu": "t-win10-64-gpu-s", + "hardware": "t-win10-64-ref-hw", + }, +} + +# os x worker types keyed by test-platform +MACOSX_WORKER_TYPES = { + "macosx1014-64": "t-osx-1014", + "macosx1014-64-power": "t-osx-1014-power", +} + + +def runs_on_central(task): + return match_run_on_projects("mozilla-central", task["run-on-projects"]) + + +def gv_e10s_filter(task): + return get_mobile_project(task) == "geckoview" and task["e10s"] + + +def fission_filter(task): + return ( + runs_on_central(task) + and task.get("e10s") in (True, "both") + and get_mobile_project(task) != "fennec" + ) + + +TEST_VARIANTS = { + "a11y-checks": { + "description": "{description} with accessibility checks enabled", + "suffix": "a11y-checks", + "replace": { + "run-on-projects": { + "by-test-platform": { + "linux.*64(-shippable)?/opt": ["trunk"], + "default": [], + }, + }, + "tier": 2, + }, + "merge": { + "mozharness": { + "extra-options": [ + "--enable-a11y-checks", + ], + }, + }, + }, + "geckoview-e10s-single": { + "description": "{description} with single-process e10s", + "filterfn": gv_e10s_filter, + "replace": { + "run-on-projects": ["trunk"], + }, + "suffix": "e10s-single", + "merge": { + "mozharness": { + "extra-options": [ + "--setpref=dom.ipc.processCount=1", + ], + }, + }, + }, + "geckoview-fission": { + "description": "{description} with fission enabled", + "filterfn": gv_e10s_filter, + "suffix": "fis", + "merge": { + # Ensures the default state is to not run anywhere. + "fission-run-on-projects": [], + "mozharness": { + "extra-options": [ + "--enable-fission", + ], + }, + }, + }, + "fission": { + "description": "{description} with fission enabled", + "filterfn": fission_filter, + "suffix": "fis", + "replace": { + "e10s": True, + }, + "merge": { + # Ensures the default state is to not run anywhere. + "fission-run-on-projects": [], + "mozharness": { + "extra-options": [ + "--setpref=fission.autostart=true", + "--setpref=dom.serviceWorkers.parent_intercept=true", + ], + }, + }, + }, + "fission-xorigin": { + "description": "{description} with cross-origin and fission enabled", + "filterfn": fission_filter, + "suffix": "fis-xorig", + "replace": { + "e10s": True, + }, + "merge": { + # Ensures the default state is to not run anywhere. + "fission-run-on-projects": [], + "mozharness": { + "extra-options": [ + "--setpref=fission.autostart=true", + "--setpref=dom.serviceWorkers.parent_intercept=true", + "--enable-xorigin-tests", + ], + }, + }, + }, + "socketprocess": { + "description": "{description} with socket process enabled", + "suffix": "spi", + "merge": { + "mozharness": { + "extra-options": [ + "--setpref=media.peerconnection.mtransport_process=true", + "--setpref=network.process.enabled=true", + ], + } + }, + }, + "socketprocess_networking": { + "description": "{description} with networking on socket process enabled", + "suffix": "spi-nw", + "merge": { + "mozharness": { + "extra-options": [ + "--setpref=network.process.enabled=true", + "--setpref=network.http.network_access_on_socket_process.enabled=true", + "--setpref=network.ssl_tokens_cache_enabled=true", + ], + } + }, + }, + "webrender": { + "description": "{description} with webrender enabled", + "suffix": "wr", + "merge": { + "webrender": True, + }, + }, + "webrender-sw": { + "description": "{description} with software webrender enabled", + "suffix": "swr", + "merge": { + "webrender": True, + "mozharness": { + "extra-options": [ + "--setpref=gfx.webrender.software=true", + ], + }, + }, + }, + "webgl-ipc": { + # TODO: After 2021-02-01, verify this variant is still needed. + "description": "{description} with WebGL IPC process enabled", + "suffix": "gli", + "replace": { + "run-on-projects": { + "by-test-platform": { + "linux.*-64.*": ["trunk"], + "mac.*": ["trunk"], + "win.*": ["trunk"], + "default": [], + }, + }, + }, + "merge": { + "mozharness": { + "extra-options": [ + "--setpref=webgl.out-of-process=true", + ], + }, + }, + }, +} + + +DYNAMIC_CHUNK_DURATION = 20 * 60 # seconds +"""The approximate time each test chunk should take to run.""" + + +DYNAMIC_CHUNK_MULTIPLIER = { + # Desktop xpcshell tests run in parallel. Reduce the total runtime to + # compensate. + "^(?!android).*-xpcshell.*": 0.2, +} +"""A multiplication factor to tweak the total duration per platform / suite.""" + + +logger = logging.getLogger(__name__) + +transforms = TransformSequence() + +# Schema for a test description +# +# *****WARNING***** +# +# This is a great place for baffling cruft to accumulate, and that makes +# everyone move more slowly. Be considerate of your fellow hackers! +# See the warnings in taskcluster/docs/how-tos.rst +# +# *****WARNING***** +test_description_schema = Schema( + { + # description of the suite, for the task metadata + Required("description"): text_type, + # test suite category and name + Optional("suite"): Any( + text_type, + {Optional("category"): text_type, Optional("name"): text_type}, + ), + # base work directory used to set up the task. + Optional("workdir"): optionally_keyed_by( + "test-platform", Any(text_type, "default") + ), + # the name by which this test suite is addressed in try syntax; defaults to + # the test-name. This will translate to the `unittest_try_name` or + # `talos_try_name` attribute. + Optional("try-name"): text_type, + # additional tags to mark up this type of test + Optional("tags"): {text_type: object}, + # the symbol, or group(symbol), under which this task should appear in + # treeherder. + Required("treeherder-symbol"): text_type, + # the value to place in task.extra.treeherder.machine.platform; ideally + # this is the same as build-platform, and that is the default, but in + # practice it's not always a match. + Optional("treeherder-machine-platform"): text_type, + # attributes to appear in the resulting task (later transforms will add the + # common attributes) + Optional("attributes"): {text_type: object}, + # relative path (from config.path) to the file task was defined in + Optional("job-from"): text_type, + # The `run_on_projects` attribute, defaulting to "all". This dictates the + # projects on which this task should be included in the target task set. + # See the attributes documentation for details. + # + # Note that the special case 'built-projects', the default, uses the parent + # build task's run-on-projects, meaning that tests run only on platforms + # that are built. + Optional("run-on-projects"): optionally_keyed_by( + "test-platform", "test-name", "variant", Any([text_type], "built-projects") + ), + # When set only run on projects where the build would already be running. + # This ensures tasks where this is True won't be the cause of the build + # running on a project it otherwise wouldn't have. + Optional("built-projects-only"): bool, + # Same as `run-on-projects` except it only applies to Fission tasks. Fission + # tasks will ignore `run_on_projects` and non-Fission tasks will ignore + # `fission-run-on-projects`. + Optional("fission-run-on-projects"): optionally_keyed_by( + "test-platform", Any([text_type], "built-projects") + ), + # the sheriffing tier for this task (default: set based on test platform) + Optional("tier"): optionally_keyed_by("test-platform", Any(int, "default")), + # Same as `tier` except it only applies to Fission tasks. Fission tasks + # will ignore `tier` and non-Fission tasks will ignore `fission-tier`. + Optional("fission-tier"): optionally_keyed_by( + "test-platform", Any(int, "default") + ), + # number of chunks to create for this task. This can be keyed by test + # platform by passing a dictionary in the `by-test-platform` key. If the + # test platform is not found, the key 'default' will be tried. + Required("chunks"): optionally_keyed_by("test-platform", Any(int, "dynamic")), + # Custom 'test_manifest_loader' to use, overriding the one configured in the + # parameters. When 'null', no test chunking will be performed. Can also + # be used to disable "manifest scheduling". + Optional("test-manifest-loader"): Any(None, *list(manifest_loaders)), + # the time (with unit) after which this task is deleted; default depends on + # the branch (see below) + Optional("expires-after"): text_type, + # The different configurations that should be run against this task, defined + # in the TEST_VARIANTS object. + Optional("variants"): optionally_keyed_by( + "test-platform", "project", Any(list(TEST_VARIANTS)) + ), + # Whether to run this task with e10s. If false, run + # without e10s; if true, run with e10s; if 'both', run one task with and + # one task without e10s. E10s tasks have "-e10s" appended to the test name + # and treeherder group. + Required("e10s"): optionally_keyed_by( + "test-platform", "project", Any(bool, "both") + ), + # Whether the task should run with WebRender enabled or not. + Optional("webrender"): bool, + Optional("webrender-run-on-projects"): optionally_keyed_by( + "app", Any([text_type], "default") + ), + # The EC2 instance size to run these tests on. + Required("instance-size"): optionally_keyed_by( + "test-platform", Any("default", "large", "xlarge") + ), + # type of virtualization or hardware required by test. + Required("virtualization"): optionally_keyed_by( + "test-platform", Any("virtual", "virtual-with-gpu", "hardware") + ), + # Whether the task requires loopback audio or video (whatever that may mean + # on the platform) + Required("loopback-audio"): bool, + Required("loopback-video"): bool, + # Whether the test can run using a software GL implementation on Linux + # using the GL compositor. May not be used with "legacy" sized instances + # due to poor LLVMPipe performance (bug 1296086). Defaults to true for + # unit tests on linux platforms and false otherwise + Optional("allow-software-gl-layers"): bool, + # For tasks that will run in docker-worker, this is the + # name of the docker image or in-tree docker image to run the task in. If + # in-tree, then a dependency will be created automatically. This is + # generally `desktop-test`, or an image that acts an awful lot like it. + Required("docker-image"): optionally_keyed_by( + "test-platform", + Any( + # a raw Docker image path (repo/image:tag) + text_type, + # an in-tree generated docker image (from `taskcluster/docker/<name>`) + {"in-tree": text_type}, + # an indexed docker image + {"indexed": text_type}, + ), + ), + # seconds of runtime after which the task will be killed. Like 'chunks', + # this can be keyed by test pltaform. + Required("max-run-time"): optionally_keyed_by("test-platform", int), + # the exit status code that indicates the task should be retried + Optional("retry-exit-status"): [int], + # Whether to perform a gecko checkout. + Required("checkout"): bool, + # Wheter to perform a machine reboot after test is done + Optional("reboot"): Any(False, "always", "on-exception", "on-failure"), + # What to run + Required("mozharness"): { + # the mozharness script used to run this task + Required("script"): optionally_keyed_by("test-platform", text_type), + # the config files required for the task + Required("config"): optionally_keyed_by("test-platform", [text_type]), + # mochitest flavor for mochitest runs + Optional("mochitest-flavor"): text_type, + # any additional actions to pass to the mozharness command + Optional("actions"): [text_type], + # additional command-line options for mozharness, beyond those + # automatically added + Required("extra-options"): optionally_keyed_by( + "test-platform", [text_type] + ), + # the artifact name (including path) to test on the build task; this is + # generally set in a per-kind transformation + Optional("build-artifact-name"): text_type, + Optional("installer-url"): text_type, + # If not false, tooltool downloads will be enabled via relengAPIProxy + # for either just public files, or all files. Not supported on Windows + Required("tooltool-downloads"): Any( + False, + "public", + "internal", + ), + # Add --blob-upload-branch=<project> mozharness parameter + Optional("include-blob-upload-branch"): bool, + # The setting for --download-symbols (if omitted, the option will not + # be passed to mozharness) + Optional("download-symbols"): Any(True, "ondemand"), + # If set, then MOZ_NODE_PATH=/usr/local/bin/node is included in the + # environment. This is more than just a helpful path setting -- it + # causes xpcshell tests to start additional servers, and runs + # additional tests. + Required("set-moz-node-path"): bool, + # If true, include chunking information in the command even if the number + # of chunks is 1 + Required("chunked"): optionally_keyed_by("test-platform", bool), + Required("requires-signed-builds"): optionally_keyed_by( + "test-platform", bool + ), + }, + # The set of test manifests to run. + Optional("test-manifests"): Any( + [text_type], + {"active": [text_type], "skipped": [text_type]}, + ), + # The current chunk (if chunking is enabled). + Optional("this-chunk"): int, + # os user groups for test task workers; required scopes, will be + # added automatically + Optional("os-groups"): optionally_keyed_by("test-platform", [text_type]), + Optional("run-as-administrator"): optionally_keyed_by("test-platform", bool), + # -- values supplied by the task-generation infrastructure + # the platform of the build this task is testing + Required("build-platform"): text_type, + # the label of the build task generating the materials to test + Required("build-label"): text_type, + # the label of the signing task generating the materials to test. + # Signed builds are used in xpcshell tests on Windows, for instance. + Optional("build-signing-label"): text_type, + # the build's attributes + Required("build-attributes"): {text_type: object}, + # the platform on which the tests will run + Required("test-platform"): text_type, + # limit the test-platforms (as defined in test-platforms.yml) + # that the test will run on + Optional("limit-platforms"): optionally_keyed_by("app", [text_type]), + # the name of the test (the key in tests.yml) + Required("test-name"): text_type, + # the product name, defaults to firefox + Optional("product"): text_type, + # conditional files to determine when these tests should be run + Exclusive("when", "optimization"): { + Optional("files-changed"): [text_type], + }, + # Optimization to perform on this task during the optimization phase. + # Optimizations are defined in taskcluster/taskgraph/optimize.py. + Exclusive("optimization", "optimization"): OptimizationSchema, + # The SCHEDULES component for this task; this defaults to the suite + # (not including the flavor) but can be overridden here. + Exclusive("schedules-component", "optimization"): Any( + text_type, + [text_type], + ), + Optional("worker-type"): optionally_keyed_by( + "test-platform", + Any(text_type, None), + ), + Optional( + "require-signed-extensions", + description="Whether the build being tested requires extensions be signed.", + ): optionally_keyed_by("release-type", "test-platform", bool), + # The target name, specifying the build artifact to be tested. + # If None or not specified, a transform sets the target based on OS: + # target.dmg (Mac), target.apk (Android), target.tar.bz2 (Linux), + # or target.zip (Windows). + Optional("target"): optionally_keyed_by( + "test-platform", + Any( + text_type, + None, + {Required("index"): text_type, Required("name"): text_type}, + ), + ), + # A list of artifacts to install from 'fetch' tasks. + Optional("fetches"): { + text_type: optionally_keyed_by("test-platform", [text_type]) + }, + # Opt-in to Python 3 support + Optional("python-3"): bool, + } +) + + +@transforms.add +def handle_keyed_by_mozharness(config, tasks): + """Resolve a mozharness field if it is keyed by something""" + fields = [ + "mozharness", + "mozharness.chunked", + "mozharness.config", + "mozharness.extra-options", + "mozharness.requires-signed-builds", + "mozharness.script", + ] + for task in tasks: + for field in fields: + resolve_keyed_by(task, field, item_name=task["test-name"]) + yield task + + +@transforms.add +def set_defaults(config, tasks): + for task in tasks: + build_platform = task["build-platform"] + if build_platform.startswith("android"): + # all Android test tasks download internal objects from tooltool + task["mozharness"]["tooltool-downloads"] = "internal" + task["mozharness"]["actions"] = ["get-secrets"] + + # loopback-video is always true for Android, but false for other + # platform phyla + task["loopback-video"] = True + task["mozharness"]["set-moz-node-path"] = True + + # software-gl-layers is only meaningful on linux unittests, where it defaults to True + if task["test-platform"].startswith("linux") and task["suite"] not in [ + "talos", + "raptor", + ]: + task.setdefault("allow-software-gl-layers", True) + else: + task["allow-software-gl-layers"] = False + + # Enable WebRender by default on the QuantumRender test platforms, since + # the whole point of QuantumRender is to run with WebRender enabled. + # This currently matches linux64-qr and windows10-64-qr; both of these + # have /opt and /debug variants. + if "-qr/" in task["test-platform"]: + task["webrender"] = True + else: + task.setdefault("webrender", False) + + task.setdefault("e10s", True) + task.setdefault("try-name", task["test-name"]) + task.setdefault("os-groups", []) + task.setdefault("run-as-administrator", False) + task.setdefault("chunks", 1) + task.setdefault("run-on-projects", "built-projects") + task.setdefault("built-projects-only", False) + task.setdefault("instance-size", "default") + task.setdefault("max-run-time", 3600) + task.setdefault("reboot", False) + task.setdefault("virtualization", "virtual") + task.setdefault("loopback-audio", False) + task.setdefault("loopback-video", False) + task.setdefault("limit-platforms", []) + task.setdefault("docker-image", {"in-tree": "ubuntu1804-test"}) + task.setdefault("checkout", False) + task.setdefault("require-signed-extensions", False) + task.setdefault("variants", []) + + task["mozharness"].setdefault("extra-options", []) + task["mozharness"].setdefault("requires-signed-builds", False) + task["mozharness"].setdefault("tooltool-downloads", "public") + task["mozharness"].setdefault("set-moz-node-path", False) + task["mozharness"].setdefault("chunked", False) + yield task + + +@transforms.add +def resolve_keys(config, tasks): + for task in tasks: + resolve_keyed_by( + task, + "require-signed-extensions", + item_name=task["test-name"], + **{ + "release-type": config.params["release_type"], + } + ) + yield task + + +@transforms.add +def setup_raptor(config, tasks): + """Add options that are specific to raptor jobs (identified by suite=raptor)""" + from taskgraph.transforms.raptor import transforms as raptor_transforms + + for task in tasks: + if task["suite"] != "raptor": + yield task + continue + + for t in raptor_transforms(config, [task]): + yield t + + +@transforms.add +def limit_platforms(config, tasks): + for task in tasks: + if not task["limit-platforms"]: + yield task + continue + + limited_platforms = {key: key for key in task["limit-platforms"]} + if keymatch(limited_platforms, task["test-platform"]): + yield task + + +transforms.add_validate(test_description_schema) + + +@transforms.add +def handle_suite_category(config, tasks): + for task in tasks: + task.setdefault("suite", {}) + + if isinstance(task["suite"], text_type): + task["suite"] = {"name": task["suite"]} + + suite = task["suite"].setdefault("name", task["test-name"]) + category = task["suite"].setdefault("category", suite) + + task.setdefault("attributes", {}) + task["attributes"]["unittest_suite"] = suite + task["attributes"]["unittest_category"] = category + + script = task["mozharness"]["script"] + category_arg = None + if suite.startswith("test-verify") or suite.startswith("test-coverage"): + pass + elif script in ("android_emulator_unittest.py", "android_hardware_unittest.py"): + category_arg = "--test-suite" + elif script == "desktop_unittest.py": + category_arg = "--{}-suite".format(category) + + if category_arg: + task["mozharness"].setdefault("extra-options", []) + extra = task["mozharness"]["extra-options"] + if not any(arg.startswith(category_arg) for arg in extra): + extra.append("{}={}".format(category_arg, suite)) + + # From here on out we only use the suite name. + task["suite"] = suite + yield task + + +@transforms.add +def setup_talos(config, tasks): + """Add options that are specific to talos jobs (identified by suite=talos)""" + for task in tasks: + if task["suite"] != "talos": + yield task + continue + + extra_options = task.setdefault("mozharness", {}).setdefault( + "extra-options", [] + ) + extra_options.append("--use-talos-json") + + # win7 needs to test skip + if task["build-platform"].startswith("win32"): + extra_options.append("--add-option") + extra_options.append("--setpref,gfx.direct2d.disabled=true") + + yield task + + +@transforms.add +def setup_browsertime_flag(config, tasks): + """Optionally add `--browsertime` flag to Raptor pageload tests.""" + + browsertime_flag = config.params["try_task_config"].get("browsertime", False) + + for task in tasks: + if not browsertime_flag or task["suite"] != "raptor": + yield task + continue + + if task["treeherder-symbol"].startswith("Rap"): + # The Rap group is subdivided as Rap{-fenix,-refbrow,-fennec}(...), + # so `taskgraph.util.treeherder.replace_group` isn't appropriate. + task["treeherder-symbol"] = task["treeherder-symbol"].replace( + "Rap", "Btime", 1 + ) + + extra_options = task.setdefault("mozharness", {}).setdefault( + "extra-options", [] + ) + extra_options.append("--browsertime") + + yield task + + +@transforms.add +def handle_artifact_prefix(config, tasks): + """Handle translating `artifact_prefix` appropriately""" + for task in tasks: + if task["build-attributes"].get("artifact_prefix"): + task.setdefault("attributes", {}).setdefault( + "artifact_prefix", task["build-attributes"]["artifact_prefix"] + ) + yield task + + +@transforms.add +def set_target(config, tasks): + for task in tasks: + build_platform = task["build-platform"] + target = None + if "target" in task: + resolve_keyed_by(task, "target", item_name=task["test-name"]) + target = task["target"] + if not target: + if build_platform.startswith("macosx"): + target = "target.dmg" + elif build_platform.startswith("android"): + target = "target.apk" + elif build_platform.startswith("win"): + target = "target.zip" + else: + target = "target.tar.bz2" + + if isinstance(target, dict): + # TODO Remove hardcoded mobile artifact prefix + index_url = get_index_url(target["index"]) + installer_url = "{}/artifacts/public/{}".format(index_url, target["name"]) + task["mozharness"]["installer-url"] = installer_url + else: + task["mozharness"]["build-artifact-name"] = get_artifact_path(task, target) + + yield task + + +@transforms.add +def set_treeherder_machine_platform(config, tasks): + """Set the appropriate task.extra.treeherder.machine.platform""" + translation = { + # Linux64 build platform for asan is specified differently to + # treeherder. + "macosx1014-64/debug": "osx-10-14/debug", + "macosx1014-64/opt": "osx-10-14/opt", + "macosx1014-64-shippable/opt": "osx-10-14-shippable/opt", + "win64-asan/opt": "windows10-64/asan", + "win64-aarch64/opt": "windows10-aarch64/opt", + } + for task in tasks: + # For most desktop platforms, the above table is not used for "regular" + # builds, so we'll always pick the test platform here. + # On macOS though, the regular builds are in the table. This causes a + # conflict in `verify_task_graph_symbol` once you add a new test + # platform based on regular macOS builds, such as for QR. + # Since it's unclear if the regular macOS builds can be removed from + # the table, workaround the issue for QR. + if "android" in task["test-platform"] and "pgo/opt" in task["test-platform"]: + platform_new = task["test-platform"].replace("-pgo/opt", "/pgo") + task["treeherder-machine-platform"] = platform_new + elif "android-em-7.0-x86_64-qr" in task["test-platform"]: + task["treeherder-machine-platform"] = task["test-platform"].replace( + ".", "-" + ) + elif "android-em-7.0-x86_64-shippable-qr" in task["test-platform"]: + task["treeherder-machine-platform"] = task["test-platform"].replace( + ".", "-" + ) + elif "-qr" in task["test-platform"]: + task["treeherder-machine-platform"] = task["test-platform"] + elif "android-hw" in task["test-platform"]: + task["treeherder-machine-platform"] = task["test-platform"] + elif "android-em-7.0-x86_64" in task["test-platform"]: + task["treeherder-machine-platform"] = task["test-platform"].replace( + ".", "-" + ) + elif "android-em-7.0-x86" in task["test-platform"]: + task["treeherder-machine-platform"] = task["test-platform"].replace( + ".", "-" + ) + # Bug 1602863 - must separately define linux64/asan and linux1804-64/asan + # otherwise causes an exception during taskgraph generation about + # duplicate treeherder platform/symbol. + elif "linux64-asan/opt" in task["test-platform"]: + task["treeherder-machine-platform"] = "linux64/asan" + elif "linux1804-asan/opt" in task["test-platform"]: + task["treeherder-machine-platform"] = "linux1804-64/asan" + else: + task["treeherder-machine-platform"] = translation.get( + task["build-platform"], task["test-platform"] + ) + yield task + + +@transforms.add +def set_tier(config, tasks): + """Set the tier based on policy for all test descriptions that do not + specify a tier otherwise.""" + for task in tasks: + if "tier" in task: + resolve_keyed_by(task, "tier", item_name=task["test-name"]) + + if "fission-tier" in task: + resolve_keyed_by(task, "fission-tier", item_name=task["test-name"]) + + # only override if not set for the test + if "tier" not in task or task["tier"] == "default": + if task["test-platform"] in [ + "linux64/opt", + "linux64/debug", + "linux64-shippable/opt", + "linux64-devedition/opt", + "linux64-asan/opt", + "linux64-qr/opt", + "linux64-qr/debug", + "linux64-shippable-qr/opt", + "linux1804-64/opt", + "linux1804-64/debug", + "linux1804-64-shippable/opt", + "linux1804-64-devedition/opt", + "linux1804-64-qr/opt", + "linux1804-64-qr/debug", + "linux1804-64-shippable-qr/opt", + "linux1804-64-asan/opt", + "linux1804-64-tsan/opt", + "windows7-32/debug", + "windows7-32/opt", + "windows7-32-devedition/opt", + "windows7-32-shippable/opt", + "windows10-aarch64/opt", + "windows10-64/debug", + "windows10-64/opt", + "windows10-64-shippable/opt", + "windows10-64-devedition/opt", + "windows10-64-asan/opt", + "windows10-64-qr/opt", + "windows10-64-qr/debug", + "windows10-64-shippable-qr/opt", + "macosx1014-64/opt", + "macosx1014-64/debug", + "macosx1014-64-shippable/opt", + "macosx1014-64-devedition/opt", + "macosx1014-64-devedition-qr/opt", + "macosx1014-64-qr/opt", + "macosx1014-64-shippable-qr/opt", + "macosx1014-64-qr/debug", + "android-em-7.0-x86_64-shippable/opt", + "android-em-7.0-x86_64/debug", + "android-em-7.0-x86_64/opt", + "android-em-7.0-x86-shippable/opt", + "android-em-7.0-x86_64-shippable-qr/opt", + "android-em-7.0-x86_64-qr/debug", + "android-em-7.0-x86_64-qr/opt", + ]: + task["tier"] = 1 + else: + task["tier"] = 2 + + yield task + + +@transforms.add +def set_download_symbols(config, tasks): + """In general, we download symbols immediately for debug builds, but only + on demand for everything else. ASAN builds shouldn't download + symbols since they don't product symbol zips see bug 1283879""" + for task in tasks: + if task["test-platform"].split("/")[-1] == "debug": + task["mozharness"]["download-symbols"] = True + elif ( + task["build-platform"] == "linux64-asan/opt" + or task["build-platform"] == "windows10-64-asan/opt" + ): + if "download-symbols" in task["mozharness"]: + del task["mozharness"]["download-symbols"] + else: + task["mozharness"]["download-symbols"] = "ondemand" + yield task + + +@transforms.add +def handle_keyed_by(config, tasks): + """Resolve fields that can be keyed by platform, etc.""" + fields = [ + "instance-size", + "docker-image", + "max-run-time", + "chunks", + "variants", + "e10s", + "suite", + "run-on-projects", + "fission-run-on-projects", + "os-groups", + "run-as-administrator", + "workdir", + "worker-type", + "virtualization", + "fetches.fetch", + "fetches.toolchain", + "target", + "webrender-run-on-projects", + ] + for task in tasks: + for field in fields: + resolve_keyed_by( + task, + field, + item_name=task["test-name"], + defer=["variant"], + project=config.params["project"], + ) + yield task + + +@transforms.add +def setup_browsertime(config, tasks): + """Configure browsertime dependencies for Raptor pageload tests that have + `--browsertime` extra option.""" + + for task in tasks: + # We need to make non-trivial changes to various fetches, and our + # `by-test-platform` may not be "compatible" with existing + # `by-test-platform` filters. Therefore we do everything after + # `handle_keyed_by` so that existing fields have been resolved down to + # simple lists. But we use the `by-test-platform` machinery to express + # filters so that when the time comes to move browsertime into YAML + # files, the transition is straight-forward. + extra_options = task.get("mozharness", {}).get("extra-options", []) + + if task["suite"] != "raptor" or "--browsertime" not in extra_options: + yield task + continue + + # This is appropriate as the browsertime task variants mature. + task["tier"] = max(task["tier"], 1) + + ts = { + "by-test-platform": { + "android.*": ["browsertime", "linux64-geckodriver", "linux64-node"], + "linux.*": ["browsertime", "linux64-geckodriver", "linux64-node"], + "macosx.*": ["browsertime", "macosx64-geckodriver", "macosx64-node"], + "windows.*aarch64.*": [ + "browsertime", + "win32-geckodriver", + "win32-node", + ], + "windows.*-32.*": ["browsertime", "win32-geckodriver", "win32-node"], + "windows.*-64.*": ["browsertime", "win64-geckodriver", "win64-node"], + }, + } + + task.setdefault("fetches", {}).setdefault("toolchain", []).extend( + evaluate_keyed_by(ts, "fetches.toolchain", task) + ) + + fs = { + "by-test-platform": { + "android.*": ["linux64-ffmpeg-4.1.4"], + "linux.*": ["linux64-ffmpeg-4.1.4"], + "macosx.*": ["mac64-ffmpeg-4.1.1"], + "windows.*aarch64.*": ["win64-ffmpeg-4.1.1"], + "windows.*-32.*": ["win64-ffmpeg-4.1.1"], + "windows.*-64.*": ["win64-ffmpeg-4.1.1"], + }, + } + + cd_fetches = { + "android.*": [ + "linux64-chromedriver-85", + "linux64-chromedriver-86", + "linux64-chromedriver-87", + ], + "linux.*": [ + "linux64-chromedriver-85", + "linux64-chromedriver-86", + "linux64-chromedriver-87", + ], + "macosx.*": [ + "mac64-chromedriver-85", + "mac64-chromedriver-86", + "mac64-chromedriver-87", + ], + "windows.*aarch64.*": [ + "win32-chromedriver-85", + "win32-chromedriver-86", + "win32-chromedriver-87", + ], + "windows.*-32.*": [ + "win32-chromedriver-85", + "win32-chromedriver-86", + "win32-chromedriver-87", + ], + "windows.*-64.*": [ + "win32-chromedriver-85", + "win32-chromedriver-86", + "win32-chromedriver-87", + ], + } + + chromium_fetches = { + "linux.*": ["linux64-chromium"], + "macosx.*": ["mac-chromium"], + "windows.*aarch64.*": ["win32-chromium"], + "windows.*-32.*": ["win32-chromium"], + "windows.*-64.*": ["win64-chromium"], + } + + cd_extracted_name = { + "windows": "{}chromedriver.exe", + "mac": "{}chromedriver", + "default": "{}chromedriver", + } + + if "--app=chrome" in extra_options or "--app=chrome-m" in extra_options: + # Only add the chromedriver fetches when chrome is running + for platform in cd_fetches: + fs["by-test-platform"][platform].extend(cd_fetches[platform]) + if "--app=chromium" in extra_options: + for platform in chromium_fetches: + fs["by-test-platform"][platform].extend(chromium_fetches[platform]) + + # The chromedrivers for chromium are repackaged into the archives + # that we get the chromium binary from so we always have a compatible + # version. + cd_extracted_name = { + "windows": "chrome-win/chromedriver.exe", + "mac": "chrome-mac/chromedriver", + "default": "chrome-linux/chromedriver", + } + + # Disable the Raptor install step + if "--app=chrome-m" in extra_options: + extra_options.append("--noinstall") + + task.setdefault("fetches", {}).setdefault("fetch", []).extend( + evaluate_keyed_by(fs, "fetches.fetch", task) + ) + + extra_options.extend( + ( + "--browsertime-browsertimejs", + "$MOZ_FETCHES_DIR/browsertime/node_modules/browsertime/bin/browsertime.js", + ) + ) # noqa: E501 + + eos = { + "by-test-platform": { + "windows.*": [ + "--browsertime-node", + "$MOZ_FETCHES_DIR/node/node.exe", + "--browsertime-geckodriver", + "$MOZ_FETCHES_DIR/geckodriver.exe", + "--browsertime-chromedriver", + "$MOZ_FETCHES_DIR/" + cd_extracted_name["windows"], + "--browsertime-ffmpeg", + "$MOZ_FETCHES_DIR/ffmpeg-4.1.1-win64-static/bin/ffmpeg.exe", + ], + "macosx.*": [ + "--browsertime-node", + "$MOZ_FETCHES_DIR/node/bin/node", + "--browsertime-geckodriver", + "$MOZ_FETCHES_DIR/geckodriver", + "--browsertime-chromedriver", + "$MOZ_FETCHES_DIR/" + cd_extracted_name["mac"], + "--browsertime-ffmpeg", + "$MOZ_FETCHES_DIR/ffmpeg-4.1.1-macos64-static/bin/ffmpeg", + ], + "default": [ + "--browsertime-node", + "$MOZ_FETCHES_DIR/node/bin/node", + "--browsertime-geckodriver", + "$MOZ_FETCHES_DIR/geckodriver", + "--browsertime-chromedriver", + "$MOZ_FETCHES_DIR/" + cd_extracted_name["default"], + "--browsertime-ffmpeg", + "$MOZ_FETCHES_DIR/ffmpeg-4.1.4-i686-static/ffmpeg", + ], + } + } + + extra_options.extend(evaluate_keyed_by(eos, "mozharness.extra-options", task)) + + yield task + + +def get_mobile_project(task): + """Returns the mobile project of the specified task or None.""" + + if not task["build-platform"].startswith("android"): + return + + mobile_projects = ("fenix", "fennec", "geckoview", "refbrow", "chrome-m") + + for name in mobile_projects: + if name in task["test-name"]: + return name + + target = task.get("target") + if target: + if isinstance(target, dict): + target = target["name"] + + for name in mobile_projects: + if name in target: + return name + + return "fennec" + + +@transforms.add +def adjust_mobile_e10s(config, tasks): + for task in tasks: + project = get_mobile_project(task) + if project == "geckoview": + # Geckoview is always-e10s + task["e10s"] = True + elif project == "fennec": + # Fennec is non-e10s + task["e10s"] = False + yield task + + +@transforms.add +def disable_wpt_timeouts_on_autoland(config, tasks): + """do not run web-platform-tests that are expected TIMEOUT on autoland""" + for task in tasks: + if ( + "web-platform-tests" in task["test-name"] + and config.params["project"] == "autoland" + ): + task["mozharness"].setdefault("extra-options", []).append("--skip-timeout") + yield task + + +@transforms.add +def enable_code_coverage(config, tasks): + """Enable code coverage for the ccov build-platforms""" + for task in tasks: + if "ccov" in task["build-platform"]: + # Do not run tests on fuzzing builds + if "fuzzing" in task["build-platform"]: + task["run-on-projects"] = [] + continue + + # Skip this transform for android code coverage builds. + if "android" in task["build-platform"]: + task.setdefault("fetches", {}).setdefault("toolchain", []).append( + "linux64-grcov" + ) + task["mozharness"].setdefault("extra-options", []).append( + "--java-code-coverage" + ) + yield task + continue + task["mozharness"].setdefault("extra-options", []).append("--code-coverage") + task["instance-size"] = "xlarge" + + # Temporarily disable Mac tests on mozilla-central + if "mac" in task["build-platform"]: + task["run-on-projects"] = [] + + # Ensure we always run on the projects defined by the build, unless the test + # is try only or shouldn't run at all. + if task["run-on-projects"] not in [[]]: + task["run-on-projects"] = "built-projects" + + # Ensure we don't optimize test suites out. + # We always want to run all test suites for coverage purposes. + task.pop("schedules-component", None) + task.pop("when", None) + task["optimization"] = None + + # Add a toolchain and a fetch task for the grcov binary. + if any(p in task["build-platform"] for p in ("linux", "osx", "win")): + task.setdefault("fetches", {}) + task["fetches"].setdefault("fetch", []) + task["fetches"].setdefault("toolchain", []) + + if "linux" in task["build-platform"]: + task["fetches"]["toolchain"].append("linux64-grcov") + elif "osx" in task["build-platform"]: + task["fetches"]["fetch"].append("grcov-osx-x86_64") + elif "win" in task["build-platform"]: + task["fetches"]["toolchain"].append("win64-grcov") + + if "talos" in task["test-name"]: + task["max-run-time"] = 7200 + if "linux" in task["build-platform"]: + task["docker-image"] = {"in-tree": "ubuntu1804-test"} + task["mozharness"]["extra-options"].append("--add-option") + task["mozharness"]["extra-options"].append("--cycles,1") + task["mozharness"]["extra-options"].append("--add-option") + task["mozharness"]["extra-options"].append("--tppagecycles,1") + task["mozharness"]["extra-options"].append("--add-option") + task["mozharness"]["extra-options"].append("--no-upload-results") + task["mozharness"]["extra-options"].append("--add-option") + task["mozharness"]["extra-options"].append("--tptimeout,15000") + if "raptor" in task["test-name"]: + task["max-run-time"] = 1800 + yield task + + +@transforms.add +def handle_run_on_projects(config, tasks): + """Handle translating `built-projects` appropriately""" + for task in tasks: + if task["run-on-projects"] == "built-projects": + task["run-on-projects"] = task["build-attributes"].get( + "run_on_projects", ["all"] + ) + + if task.pop("built-projects-only", False): + built_projects = set( + task["build-attributes"].get("run_on_projects", {"all"}) + ) + run_on_projects = set(task.get("run-on-projects", set())) + + # If 'all' exists in run-on-projects, then the intersection of both + # is built-projects. Similarly if 'all' exists in built-projects, + # the intersection is run-on-projects (so do nothing). When neither + # contains 'all', take the actual set intersection. + if "all" in run_on_projects: + task["run-on-projects"] = sorted(built_projects) + elif "all" not in built_projects: + task["run-on-projects"] = sorted(run_on_projects & built_projects) + yield task + + +@transforms.add +def split_variants(config, tasks): + for task in tasks: + variants = task.pop("variants", []) + + yield copy.deepcopy(task) + + for name in variants: + taskv = copy.deepcopy(task) + variant = TEST_VARIANTS[name] + + if "filterfn" in variant and not variant["filterfn"](taskv): + continue + + taskv["attributes"]["unittest_variant"] = name + taskv["description"] = variant["description"].format(**taskv) + + suffix = "-" + variant["suffix"] + taskv["test-name"] += suffix + taskv["try-name"] += suffix + + group, symbol = split_symbol(taskv["treeherder-symbol"]) + if group != "?": + group += suffix + else: + symbol += suffix + taskv["treeherder-symbol"] = join_symbol(group, symbol) + + taskv.update(variant.get("replace", {})) + + if task["suite"] == "raptor": + taskv["tier"] = max(taskv["tier"], 2) + + yield merge(taskv, variant.get("merge", {})) + + +@transforms.add +def handle_keyed_by_variant(config, tasks): + """Resolve fields that can be keyed by platform, etc.""" + fields = [ + "run-on-projects", + ] + for task in tasks: + for field in fields: + resolve_keyed_by( + task, + field, + item_name=task["test-name"], + variant=task["attributes"].get("unittest_variant"), + ) + yield task + + +@transforms.add +def handle_fission_attributes(config, tasks): + """Handle run_on_projects for fission tasks.""" + for task in tasks: + for attr in ("run-on-projects", "tier"): + fission_attr = task.pop("fission-{}".format(attr), None) + + if ( + task["attributes"].get("unittest_variant") + not in ("fission", "geckoview-fission", "fission-xorigin") + ) or fission_attr is None: + continue + + task[attr] = fission_attr + + yield task + + +@transforms.add +def disable_try_only_platforms(config, tasks): + """Turns off platforms that should only run on try.""" + try_only_platforms = ("windows7-32-qr/.*",) + for task in tasks: + if any(re.match(k + "$", task["test-platform"]) for k in try_only_platforms): + task["run-on-projects"] = [] + if "fission-run-on-projects" in task: + task["fission-run-on-projects"] = [] + yield task + + +@transforms.add +def ensure_spi_disabled_on_all_but_spi(config, tasks): + for task in tasks: + variant = task["attributes"].get("unittest_variant", "") + has_setpref = ( + "gtest" not in task["suite"] + and "cppunit" not in task["suite"] + and "jittest" not in task["suite"] + and "junit" not in task["suite"] + and "raptor" not in task["suite"] + ) + + if ( + has_setpref + and variant != "socketprocess" + and variant != "socketprocess_networking" + ): + task["mozharness"]["extra-options"].append( + "--setpref=media.peerconnection.mtransport_process=false" + ) + task["mozharness"]["extra-options"].append( + "--setpref=network.process.enabled=false" + ) + + yield task + + +@transforms.add +def split_e10s(config, tasks): + for task in tasks: + e10s = task["e10s"] + + if e10s: + task_copy = copy.deepcopy(task) + task_copy["test-name"] += "-e10s" + task_copy["e10s"] = True + task_copy["attributes"]["e10s"] = True + yield task_copy + + if not e10s or e10s == "both": + task["test-name"] += "-1proc" + task["try-name"] += "-1proc" + task["e10s"] = False + task["attributes"]["e10s"] = False + group, symbol = split_symbol(task["treeherder-symbol"]) + if group != "?": + group += "-1proc" + task["treeherder-symbol"] = join_symbol(group, symbol) + task["mozharness"]["extra-options"].append("--disable-e10s") + yield task + + +@transforms.add +def set_test_verify_chunks(config, tasks): + """Set the number of chunks we use for test-verify.""" + for task in tasks: + if any(task["suite"].startswith(s) for s in ("test-verify", "test-coverage")): + env = config.params.get("try_task_config", {}) or {} + env = env.get("templates", {}).get("env", {}) + task["chunks"] = perfile_number_of_chunks( + config.params.is_try(), + env.get("MOZHARNESS_TEST_PATHS", ""), + config.params.get("head_repository", ""), + config.params.get("head_rev", ""), + task["test-name"], + ) + + # limit the number of chunks we run for test-verify mode because + # test-verify is comprehensive and takes a lot of time, if we have + # >30 tests changed, this is probably an import of external tests, + # or a patch renaming/moving files in bulk + maximum_number_verify_chunks = 3 + if task["chunks"] > maximum_number_verify_chunks: + task["chunks"] = maximum_number_verify_chunks + + yield task + + +@transforms.add +def set_test_manifests(config, tasks): + """Determine the set of test manifests that should run in this task.""" + + for task in tasks: + # When a task explicitly requests no 'test_manifest_loader', test + # resolving will happen at test runtime rather than in the taskgraph. + if "test-manifest-loader" in task and task["test-manifest-loader"] is None: + yield task + continue + + # Set 'tests_grouped' to "1", so we can differentiate between suites that are + # chunked at the test runtime and those that are chunked in the taskgraph. + task.setdefault("tags", {})["tests_grouped"] = "1" + + if taskgraph.fast: + # We want to avoid evaluating manifests when taskgraph.fast is set. But + # manifests are required for dynamic chunking. Just set the number of + # chunks to one in this case. + if task["chunks"] == "dynamic": + task["chunks"] = 1 + yield task + continue + + manifests = task.get("test-manifests") + if manifests: + if isinstance(manifests, list): + task["test-manifests"] = {"active": manifests, "skipped": []} + yield task + continue + + mozinfo = guess_mozinfo_from_task(task) + + loader_name = task.pop( + "test-manifest-loader", config.params["test_manifest_loader"] + ) + loader = get_manifest_loader(loader_name, config.params) + + task["test-manifests"] = loader.get_manifests( + task["suite"], + frozenset(mozinfo.items()), + ) + + # The default loader loads all manifests. If we use a non-default + # loader, we'll only run some subset of manifests and the hardcoded + # chunk numbers will no longer be valid. Dynamic chunking should yield + # better results. + if not isinstance(loader, DefaultLoader): + task["chunks"] = "dynamic" + + yield task + + +@transforms.add +def resolve_dynamic_chunks(config, tasks): + """Determine how many chunks are needed to handle the given set of manifests.""" + + for task in tasks: + if task["chunks"] != "dynamic": + yield task + continue + + if not task.get("test-manifests"): + raise Exception( + "{} must define 'test-manifests' to use dynamic chunking!".format( + task["test-name"] + ) + ) + + runtimes = { + m: r + for m, r in get_runtimes(task["test-platform"], task["suite"]).items() + if m in task["test-manifests"]["active"] + } + + # Truncate runtimes that are above the desired chunk duration. They + # will be assigned to a chunk on their own and the excess duration + # shouldn't cause additional chunks to be needed. + times = [min(DYNAMIC_CHUNK_DURATION, r) for r in runtimes.values()] + avg = round(sum(times) / len(times), 2) if times else 0 + total = sum(times) + + # If there are manifests missing from the runtimes data, fill them in + # with the average of all present manifests. + missing = [m for m in task["test-manifests"]["active"] if m not in runtimes] + total += avg * len(missing) + + # Apply any chunk multipliers if found. + key = "{}-{}".format(task["test-platform"], task["test-name"]) + matches = keymatch(DYNAMIC_CHUNK_MULTIPLIER, key) + if len(matches) > 1: + raise Exception( + "Multiple matching values for {} found while " + "determining dynamic chunk multiplier!".format(key) + ) + elif matches: + total = total * matches[0] + + chunks = int(round(total / DYNAMIC_CHUNK_DURATION)) + + # Make sure we never exceed the number of manifests, nor have a chunk + # length of 0. + task["chunks"] = min(chunks, len(task["test-manifests"]["active"])) or 1 + yield task + + +@transforms.add +def split_chunks(config, tasks): + """Based on the 'chunks' key, split tests up into chunks by duplicating + them and assigning 'this-chunk' appropriately and updating the treeherder + symbol. + """ + + for task in tasks: + # If test-manifests are set, chunk them ahead of time to avoid running + # the algorithm more than once. + chunked_manifests = None + if "test-manifests" in task: + manifests = task["test-manifests"] + chunked_manifests = chunk_manifests( + task["suite"], + task["test-platform"], + task["chunks"], + manifests["active"], + ) + + # Add all skipped manifests to the first chunk of backstop pushes + # so they still show up in the logs. They won't impact runtime much + # and this way tools like ActiveData are still aware that they + # exist. + if config.params["backstop"] and manifests["active"]: + chunked_manifests[0].extend(manifests["skipped"]) + + for i in range(task["chunks"]): + this_chunk = i + 1 + + # copy the test and update with the chunk number + chunked = copy.deepcopy(task) + chunked["this-chunk"] = this_chunk + + if chunked_manifests is not None: + chunked["test-manifests"] = sorted(chunked_manifests[i]) + + group, symbol = split_symbol(chunked["treeherder-symbol"]) + if task["chunks"] > 1 or not symbol: + # add the chunk number to the TH symbol + symbol += str(this_chunk) + chunked["treeherder-symbol"] = join_symbol(group, symbol) + + yield chunked + + +@transforms.add +def allow_software_gl_layers(config, tasks): + """ + Handle the "allow-software-gl-layers" property for platforms where it + applies. + """ + for task in tasks: + if task.get("allow-software-gl-layers"): + # This should be set always once bug 1296086 is resolved. + task["mozharness"].setdefault("extra-options", []).append( + "--allow-software-gl-layers" + ) + + yield task + + +@transforms.add +def enable_webrender(config, tasks): + """ + Handle the "webrender" property by passing a flag to mozharness if it is + enabled. + """ + for task in tasks: + if task.get("webrender"): + extra_options = task["mozharness"].setdefault("extra-options", []) + extra_options.append("--enable-webrender") + # We only want to 'setpref' on tests that have a profile + if not task["attributes"]["unittest_category"] in [ + "cppunittest", + "geckoview-junit", + "gtest", + "jittest", + "raptor", + ]: + extra_options.append("--setpref=layers.d3d11.enable-blacklist=false") + + # run webrender variants on the projects specified on webrender-run-on-projects + if task.get("webrender-run-on-projects") is not None: + task["run-on-projects"] = task["webrender-run-on-projects"] + + yield task + + +@transforms.add +def set_schedules_for_webrender_android(config, tasks): + """android-hw has limited resources, we need webrender on phones""" + for task in tasks: + if task["suite"] in ["crashtest", "reftest"] and task[ + "test-platform" + ].startswith("android-hw"): + task["schedules-component"] = "android-hw-gfx" + yield task + + +@transforms.add +def set_retry_exit_status(config, tasks): + """Set the retry exit status to TBPL_RETRY, the value returned by mozharness + scripts to indicate a transient failure that should be retried.""" + for task in tasks: + task["retry-exit-status"] = [4] + yield task + + +@transforms.add +def set_profile(config, tasks): + """Set profiling mode for tests.""" + profile = config.params["try_task_config"].get("gecko-profile", False) + + for task in tasks: + if profile and task["suite"] in ["talos", "raptor"]: + task["mozharness"]["extra-options"].append("--gecko-profile") + yield task + + +@transforms.add +def set_tag(config, tasks): + """Set test for a specific tag.""" + tag = None + if config.params["try_mode"] == "try_option_syntax": + tag = config.params["try_options"]["tag"] + for task in tasks: + if tag: + task["mozharness"]["extra-options"].extend(["--tag", tag]) + yield task + + +@transforms.add +def set_test_type(config, tasks): + types = ["mochitest", "reftest", "talos", "raptor", "geckoview-junit", "gtest"] + for task in tasks: + for test_type in types: + if test_type in task["suite"] and "web-platform" not in task["suite"]: + task.setdefault("tags", {})["test-type"] = test_type + yield task + + +@transforms.add +def set_worker_type(config, tasks): + """Set the worker type based on the test platform.""" + for task in tasks: + # during the taskcluster migration, this is a bit tortured, but it + # will get simpler eventually! + test_platform = task["test-platform"] + if task.get("worker-type"): + # This test already has its worker type defined, so just use that (yields below) + pass + elif test_platform.startswith("macosx1014-64"): + if "--power-test" in task["mozharness"]["extra-options"]: + task["worker-type"] = MACOSX_WORKER_TYPES["macosx1014-64-power"] + else: + task["worker-type"] = MACOSX_WORKER_TYPES["macosx1014-64"] + elif test_platform.startswith("win"): + # figure out what platform the job needs to run on + if task["virtualization"] == "hardware": + # some jobs like talos and reftest run on real h/w - those are all win10 + if test_platform.startswith("windows10-64-ref-hw-2017"): + win_worker_type_platform = WINDOWS_WORKER_TYPES[ + "windows10-64-ref-hw-2017" + ] + elif test_platform.startswith("windows10-aarch64"): + win_worker_type_platform = WINDOWS_WORKER_TYPES["windows10-aarch64"] + else: + win_worker_type_platform = WINDOWS_WORKER_TYPES["windows10-64"] + else: + # the other jobs run on a vm which may or may not be a win10 vm + win_worker_type_platform = WINDOWS_WORKER_TYPES[ + test_platform.split("/")[0] + ] + # now we have the right platform set the worker type accordingly + task["worker-type"] = win_worker_type_platform[task["virtualization"]] + elif test_platform.startswith("android-hw-g5"): + if task["suite"] != "raptor": + task["worker-type"] = "t-bitbar-gw-unit-g5" + else: + task["worker-type"] = "t-bitbar-gw-perf-g5" + elif test_platform.startswith("android-hw-p2"): + if task["suite"] != "raptor": + task["worker-type"] = "t-bitbar-gw-unit-p2" + else: + task["worker-type"] = "t-bitbar-gw-perf-p2" + elif test_platform.startswith("android-hw-s7"): + if task["suite"] != "raptor": + task["worker-type"] = "t-bitbar-gw-unit-s7" + else: + task["worker-type"] = "t-bitbar-gw-perf-s7" + elif test_platform.startswith("android-em-7.0-x86"): + task["worker-type"] = "t-linux-metal" + elif test_platform.startswith("linux") or test_platform.startswith("android"): + if task.get("suite", "") in ["talos", "raptor"] and not task[ + "build-platform" + ].startswith("linux64-ccov"): + task["worker-type"] = "t-linux-talos" + else: + task["worker-type"] = LINUX_WORKER_TYPES[task["instance-size"]] + else: + raise Exception("unknown test_platform {}".format(test_platform)) + + yield task + + +@transforms.add +def set_schedules_components(config, tasks): + for task in tasks: + if "optimization" in task or "when" in task: + yield task + continue + + category = task["attributes"]["unittest_category"] + schedules = task.get("schedules-component", category) + if isinstance(schedules, string_types): + schedules = [schedules] + + schedules = set(schedules) + if schedules & set(INCLUSIVE_COMPONENTS): + # if this is an "inclusive" test, then all files which might + # cause it to run are annotated with SCHEDULES in moz.build, + # so do not include the platform or any other components here + task["schedules-component"] = sorted(schedules) + yield task + continue + + schedules.add(category) + schedules.add(platform_family(task["build-platform"])) + + if task["webrender"]: + schedules.add("webrender") + + task["schedules-component"] = sorted(schedules) + yield task + + +@transforms.add +def make_job_description(config, tasks): + """Convert *test* descriptions to *job* descriptions (input to + taskgraph.transforms.job)""" + + for task in tasks: + mobile = get_mobile_project(task) + if mobile and (mobile not in task["test-name"]): + label = "{}-{}-{}-{}".format( + config.kind, task["test-platform"], mobile, task["test-name"] + ) + else: + label = "{}-{}-{}".format( + config.kind, task["test-platform"], task["test-name"] + ) + if task["chunks"] > 1: + label += "-{}".format(task["this-chunk"]) + + build_label = task["build-label"] + + try_name = task["try-name"] + if task["suite"] == "talos": + attr_try_name = "talos_try_name" + elif task["suite"] == "raptor": + attr_try_name = "raptor_try_name" + else: + attr_try_name = "unittest_try_name" + + attr_build_platform, attr_build_type = task["build-platform"].split("/", 1) + + attributes = task.get("attributes", {}) + attributes.update( + { + "build_platform": attr_build_platform, + "build_type": attr_build_type, + "test_platform": task["test-platform"], + "test_chunk": str(task["this-chunk"]), + attr_try_name: try_name, + } + ) + + if "test-manifests" in task: + attributes["test_manifests"] = task["test-manifests"] + + jobdesc = {} + name = "{}-{}".format(task["test-platform"], task["test-name"]) + jobdesc["name"] = name + jobdesc["label"] = label + jobdesc["description"] = task["description"] + jobdesc["attributes"] = attributes + jobdesc["dependencies"] = {"build": build_label} + jobdesc["job-from"] = task["job-from"] + + if task.get("fetches"): + jobdesc["fetches"] = task["fetches"] + + if task["mozharness"]["requires-signed-builds"] is True: + jobdesc["dependencies"]["build-signing"] = task["build-signing-label"] + + if "expires-after" in task: + jobdesc["expires-after"] = task["expires-after"] + + jobdesc["routes"] = [] + jobdesc["run-on-projects"] = sorted(task["run-on-projects"]) + jobdesc["scopes"] = [] + jobdesc["tags"] = task.get("tags", {}) + jobdesc["extra"] = { + "chunks": { + "current": task["this-chunk"], + "total": task["chunks"], + }, + "suite": attributes["unittest_suite"], + } + jobdesc["treeherder"] = { + "symbol": task["treeherder-symbol"], + "kind": "test", + "tier": task["tier"], + "platform": task.get("treeherder-machine-platform", task["build-platform"]), + } + + schedules = task.get("schedules-component", []) + if task.get("when"): + # This may still be used by comm-central. + jobdesc["when"] = task["when"] + elif "optimization" in task: + jobdesc["optimization"] = task["optimization"] + elif set(schedules) & set(INCLUSIVE_COMPONENTS): + jobdesc["optimization"] = {"test-inclusive": schedules} + else: + jobdesc["optimization"] = {"test": schedules} + + run = jobdesc["run"] = {} + run["using"] = "mozharness-test" + run["test"] = task + + if "workdir" in task: + run["workdir"] = task.pop("workdir") + + jobdesc["worker-type"] = task.pop("worker-type") + if task.get("fetches"): + jobdesc["fetches"] = task.pop("fetches") + + yield jobdesc + + +def normpath(path): + return path.replace("/", "\\") + + +def get_firefox_version(): + with open("browser/config/version.txt", "r") as f: + return f.readline().strip() diff --git a/taskcluster/taskgraph/transforms/try_job.py b/taskcluster/taskgraph/transforms/try_job.py new file mode 100644 index 0000000000..c840c6f8e6 --- /dev/null +++ b/taskcluster/taskgraph/transforms/try_job.py @@ -0,0 +1,19 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence + +transforms = TransformSequence() + + +@transforms.add +def set_job_try_name(config, jobs): + """ + For a task which is governed by `-j` in try syntax, set the `job_try_name` + attribute based on the job name. + """ + for job in jobs: + job.setdefault("attributes", {}).setdefault("job_try_name", job["name"]) + yield job diff --git a/taskcluster/taskgraph/transforms/update_verify.py b/taskcluster/taskgraph/transforms/update_verify.py new file mode 100644 index 0000000000..1044edf7a6 --- /dev/null +++ b/taskcluster/taskgraph/transforms/update_verify.py @@ -0,0 +1,60 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the beetmover task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from copy import deepcopy + +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.treeherder import add_suffix, inherit_treeherder_from_dep + +transforms = TransformSequence() + + +@transforms.add +def add_command(config, tasks): + config_tasks = {} + for dep in config.kind_dependencies_tasks.values(): + if ( + "update-verify-config" in dep.kind + or "update-verify-next-config" in dep.kind + ): + config_tasks[dep.name] = dep + + for task in tasks: + config_task = config_tasks[task["name"]] + total_chunks = task["extra"]["chunks"] + task["worker"].setdefault("env", {})["CHANNEL"] = config_task.task["extra"][ + "channel" + ] + task.setdefault("fetches", {})[config_task.label] = [ + "update-verify.cfg", + ] + task["treeherder"] = inherit_treeherder_from_dep(task, config_task) + + for this_chunk in range(1, total_chunks + 1): + chunked = deepcopy(task) + chunked["treeherder"]["symbol"] = add_suffix( + chunked["treeherder"]["symbol"], this_chunk + ) + chunked["label"] = "release-update-verify-{}-{}/{}".format( + chunked["name"], this_chunk, total_chunks + ) + if not chunked["worker"].get("env"): + chunked["worker"]["env"] = {} + chunked["run"] = { + "using": "run-task", + "cwd": "{checkout}", + "command": "tools/update-verify/scripts/chunked-verify.sh " + "{} {}".format( + total_chunks, + this_chunk, + ), + "sparse-profile": "update-verify", + } + + yield chunked diff --git a/taskcluster/taskgraph/transforms/update_verify_config.py b/taskcluster/taskgraph/transforms/update_verify_config.py new file mode 100644 index 0000000000..3f71bb5d47 --- /dev/null +++ b/taskcluster/taskgraph/transforms/update_verify_config.py @@ -0,0 +1,137 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the beetmover task into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +import six.moves.urllib.parse as urlparse + +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.schema import resolve_keyed_by +from taskgraph.util.scriptworker import get_release_config +from taskgraph.transforms.task import ( + get_branch_repo, + get_branch_rev, +) + +transforms = TransformSequence() + + +# The beta regexes do not match point releases. +# In the rare event that we do ship a point +# release to beta, we need to either: +# 1) update these regexes to match that specific version +# 2) pass a second include version that matches that specific version +INCLUDE_VERSION_REGEXES = { + "beta": r"'^(\d+\.\d+(b\d+)?)$'", + "nonbeta": r"'^\d+\.\d+(\.\d+)?$'", + # Same as beta, except excludes 58.0b1 due to issues with it not being able + # to update to latest + "devedition_hack": r"'^((?!58\.0b1$)\d+\.\d+(b\d+)?)$'", + # Same as nonbeta, except for the esr suffix + "esr": r"'^\d+\.\d+(\.\d+)?esr$'", + # Previous esr versions, for update testing before we update users to esr78 + "esr78-next": r"'^(52|60|68)+\.\d+(\.\d+)?esr$'", +} + +MAR_CHANNEL_ID_OVERRIDE_REGEXES = { + "beta": r"'^\d+\.\d+(\.\d+)?$$,firefox-mozilla-beta,firefox-mozilla-release'", +} + + +@transforms.add +def add_command(config, tasks): + keyed_by_args = [ + "channel", + "archive-prefix", + "previous-archive-prefix", + "aus-server", + "override-certs", + "include-version", + "mar-channel-id-override", + "last-watershed", + ] + optional_args = [ + "updater-platform", + ] + + release_config = get_release_config(config) + + for task in tasks: + task["description"] = "generate update verify config for {}".format( + task["attributes"]["build_platform"] + ) + + command = [ + "python", + "testing/mozharness/scripts/release/update-verify-config-creator.py", + "--product", + task["extra"]["product"], + "--stage-product", + task["shipping-product"], + "--app-name", + task["extra"]["app-name"], + "--branch-prefix", + task["extra"]["branch-prefix"], + "--platform", + task["extra"]["platform"], + "--to-version", + release_config["version"], + "--to-app-version", + release_config["appVersion"], + "--to-build-number", + str(release_config["build_number"]), + "--to-buildid", + config.params["moz_build_date"], + "--to-revision", + get_branch_rev(config), + "--output-file", + "update-verify.cfg", + ] + + repo_path = urlparse.urlsplit(get_branch_repo(config)).path.lstrip("/") + command.extend(["--repo-path", repo_path]) + + if release_config.get("partial_versions"): + for partial in release_config["partial_versions"].split(","): + command.extend(["--partial-version", partial.split("build")[0]]) + + for arg in optional_args: + if task["extra"].get(arg): + command.append("--{}".format(arg)) + command.append(task["extra"][arg]) + + for arg in keyed_by_args: + thing = "extra.{}".format(arg) + resolve_keyed_by( + task, + thing, + item_name=task["name"], + platform=task["attributes"]["build_platform"], + **{ + "release-type": config.params["release_type"], + "release-level": config.params.release_level(), + } + ) + # ignore things that resolved to null + if not task["extra"].get(arg): + continue + if arg == "include-version": + task["extra"][arg] = INCLUDE_VERSION_REGEXES[task["extra"][arg]] + if arg == "mar-channel-id-override": + task["extra"][arg] = MAR_CHANNEL_ID_OVERRIDE_REGEXES[task["extra"][arg]] + + command.append("--{}".format(arg)) + command.append(task["extra"][arg]) + + task["run"].update( + { + "using": "mach", + "mach": " ".join(command), + } + ) + + yield task diff --git a/taskcluster/taskgraph/transforms/upload_generated_sources.py b/taskcluster/taskgraph/transforms/upload_generated_sources.py new file mode 100644 index 0000000000..e8a53dac9f --- /dev/null +++ b/taskcluster/taskgraph/transforms/upload_generated_sources.py @@ -0,0 +1,43 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the upload-generated-files task description template, +taskcluster/ci/upload-generated-sources/kind.yml, into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence + + +transforms = TransformSequence() + + +@transforms.add +def add_task_info(config, jobs): + for job in jobs: + dep_task = job["primary-dependency"] + del job["primary-dependency"] + + # Add a dependency on the build task. + job["dependencies"] = {"build": dep_task.label} + # Label the job to match the build task it's uploading from. + job["label"] = dep_task.label.replace("build-", "upload-generated-sources-") + # Copy over some bits of metdata from the build task. + dep_th = dep_task.task["extra"]["treeherder"] + job.setdefault("attributes", {}) + job["attributes"]["build_platform"] = dep_task.attributes.get("build_platform") + if dep_task.attributes.get("shippable"): + job["attributes"]["shippable"] = True + plat = "{}/{}".format( + dep_th["machine"]["platform"], dep_task.attributes.get("build_type") + ) + job["treeherder"]["platform"] = plat + job["treeherder"]["tier"] = dep_th["tier"] + if dep_th["symbol"] != "N": + job["treeherder"]["symbol"] = "Ugs{}".format(dep_th["symbol"]) + job["run-on-projects"] = dep_task.attributes.get("run_on_projects") + job["optimization"] = dep_task.optimization + + yield job diff --git a/taskcluster/taskgraph/transforms/upload_symbols.py b/taskcluster/taskgraph/transforms/upload_symbols.py new file mode 100644 index 0000000000..244b2174a8 --- /dev/null +++ b/taskcluster/taskgraph/transforms/upload_symbols.py @@ -0,0 +1,93 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Transform the upload-symbols task description template, +taskcluster/ci/upload-symbols/job-template.yml into an actual task description. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.attributes import RELEASE_PROJECTS +from taskgraph.util.treeherder import join_symbol, inherit_treeherder_from_dep +from taskgraph.util.attributes import copy_attributes_from_dependent_job + +import logging + +logger = logging.getLogger(__name__) + +transforms = TransformSequence() + + +@transforms.add +def check_nightlies(config, tasks): + """Ensure that we upload symbols for all shippable builds, so that crash-stats can + resolve any reports sent to it. Try may enable full symbols but not upload them. + + Putting this check here (instead of the transforms for the build kind) lets us + leverage the any not-for-build-platforms set in the update-symbols kind.""" + for task in tasks: + dep = task["primary-dependency"] + if ( + config.params["project"] in RELEASE_PROJECTS + and dep.attributes.get("shippable") + and not dep.attributes.get("enable-full-crashsymbols") + and not dep.attributes.get("skip-upload-crashsymbols") + ): + raise Exception( + "Shippable job %s should have enable-full-crashsymbols attribute " + "set to true to enable symbol upload to crash-stats" % dep.label + ) + yield task + + +@transforms.add +def fill_template(config, tasks): + for task in tasks: + dep = task["primary-dependency"] + task.pop("dependent-tasks", None) + + # Fill out the dynamic fields in the task description + task["label"] = dep.label + "-upload-symbols" + + # Skip tasks where we don't have the full crashsymbols enabled + if not dep.attributes.get("enable-full-crashsymbols") or dep.attributes.get( + "skip-upload-crashsymbols" + ): + logger.debug("Skipping upload symbols task for %s", task["label"]) + continue + + task["dependencies"] = {"build": dep.label} + task["worker"]["env"]["GECKO_HEAD_REPOSITORY"] = config.params[ + "head_repository" + ] + task["worker"]["env"]["GECKO_HEAD_REV"] = config.params["head_rev"] + task["worker"]["env"]["SYMBOL_SECRET"] = task["worker"]["env"][ + "SYMBOL_SECRET" + ].format(level=config.params["level"]) + + attributes = copy_attributes_from_dependent_job(dep) + attributes.update(task.get("attributes", {})) + task["attributes"] = attributes + + treeherder = inherit_treeherder_from_dep(task, dep) + th = dep.task.get("extra")["treeherder"] + th_symbol = th.get("symbol") + th_groupsymbol = th.get("groupSymbol", "?") + + # Disambiguate the treeherder symbol. + sym = "Sym" + (th_symbol[1:] if th_symbol.startswith("B") else th_symbol) + treeherder.setdefault("symbol", join_symbol(th_groupsymbol, sym)) + task["treeherder"] = treeherder + + # We only want to run these tasks if the build is run. + # XXX Better to run this on promote phase instead? + task["run-on-projects"] = dep.attributes.get("run_on_projects") + task["optimization"] = {"upload-symbols": None} + task["if-dependencies"] = ["build"] + + # clear out the stuff that's not part of a task description + del task["primary-dependency"] + + yield task diff --git a/taskcluster/taskgraph/transforms/upstream_artifact_task.py b/taskcluster/taskgraph/transforms/upstream_artifact_task.py new file mode 100644 index 0000000000..e5865742a6 --- /dev/null +++ b/taskcluster/taskgraph/transforms/upstream_artifact_task.py @@ -0,0 +1,32 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Find upstream artifact task. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.base import TransformSequence + + +transforms = TransformSequence() + + +@transforms.add +def find_upstream_artifact_task(config, jobs): + for job in jobs: + dep_job = None + if job.get("dependent-tasks"): + dep_labels = [l for l in job["dependent-tasks"].keys()] + for label in dep_labels: + if "notarization-part-1" in label: + assert ( + dep_job is None + ), "Can't determine whether " "{} or {} is dep_job!".format( + dep_job.label, label + ) + dep_job = job["dependent-tasks"][label] + if dep_job is not None: + job["upstream-artifact-task"] = dep_job + yield job diff --git a/taskcluster/taskgraph/transforms/visual_metrics.py b/taskcluster/taskgraph/transforms/visual_metrics.py new file mode 100644 index 0000000000..b2b2b2fa32 --- /dev/null +++ b/taskcluster/taskgraph/transforms/visual_metrics.py @@ -0,0 +1,32 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +These transformations take a task description for a visual metrics task and +add the necessary environment variables to run on the given inputs. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +import json + +from taskgraph.transforms.base import TransformSequence + +transforms = TransformSequence() + + +@transforms.add +def set_visual_metrics_jobs(config, jobs): + """Set the visual metrics configuration for the given jobs.""" + vismet_jobs = config.params["try_task_config"].get("visual-metrics-jobs") + + if vismet_jobs: + vismet_jobs = json.dumps(vismet_jobs) + + for job in jobs: + if vismet_jobs: + job["task"]["payload"].setdefault("env", {}).update( + VISUAL_METRICS_JOBS_JSON=vismet_jobs + ) + + yield job diff --git a/taskcluster/taskgraph/transforms/visual_metrics_dep.py b/taskcluster/taskgraph/transforms/visual_metrics_dep.py new file mode 100644 index 0000000000..909d6ba730 --- /dev/null +++ b/taskcluster/taskgraph/transforms/visual_metrics_dep.py @@ -0,0 +1,54 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +These transformations take a task description for a visual metrics task and +add the necessary environment variables to run on the given inputs. +""" +from __future__ import absolute_import, print_function, unicode_literals +import os + +from taskgraph.transforms.base import TransformSequence + + +transforms = TransformSequence() + +SYMBOL = "%(groupSymbol)s(%(symbol)s-vismet)" +# the test- prefix makes the task SETA-optimized. +LABEL = "test-vismet-%(platform)s-%(raptor_try_name)s" + + +@transforms.add +def run_visual_metrics(config, jobs): + for job in jobs: + dep_job = job.pop("primary-dependency", None) + if dep_job is not None: + platform = dep_job.task["extra"]["treeherder-platform"] + job["dependencies"] = {dep_job.label: dep_job.label} + job["fetches"][dep_job.label] = [ + "/public/test_info/browsertime-results.tgz" + ] + attributes = dict(dep_job.attributes) + attributes["platform"] = platform + job["label"] = LABEL % attributes + treeherder_info = dict(dep_job.task["extra"]["treeherder"]) + job["treeherder"]["symbol"] = SYMBOL % treeherder_info + + # Store the platform name so we can use it to calculate + # the similarity metric against other tasks + job["worker"].setdefault("env", {})["TC_LABEL"] = dep_job.label + + # Setting the `TC_GROUP_ID` environment variable to a task group ID + # is a simple way to compare videos to a specific task group + job["worker"]["env"]["TC_GROUP_ID"] = os.getenv("TC_GROUP_ID", "") + + # vismet runs on Linux but we want to have it displayed + # alongside the job it was triggered by to make it easier for + # people to find it back. + job["treeherder"]["platform"] = platform + job["treeherder"]["tier"] = treeherder_info["tier"] + + # run-on-projects needs to be set based on the dependent task + job["run-on-projects"] = attributes["run_on_projects"] + + yield job |