summaryrefslogtreecommitdiffstats
path: root/taskcluster/gecko_taskgraph/transforms
diff options
context:
space:
mode:
Diffstat (limited to 'taskcluster/gecko_taskgraph/transforms')
-rw-r--r--taskcluster/gecko_taskgraph/transforms/__init__.py0
-rw-r--r--taskcluster/gecko_taskgraph/transforms/artifact.py116
-rw-r--r--taskcluster/gecko_taskgraph/transforms/artifacts.yml16
-rw-r--r--taskcluster/gecko_taskgraph/transforms/attribution.py32
-rw-r--r--taskcluster/gecko_taskgraph/transforms/balrog_submit.py138
-rw-r--r--taskcluster/gecko_taskgraph/transforms/balrog_toplevel.py42
-rw-r--r--taskcluster/gecko_taskgraph/transforms/beetmover.py165
-rw-r--r--taskcluster/gecko_taskgraph/transforms/beetmover_apt.py114
-rw-r--r--taskcluster/gecko_taskgraph/transforms/beetmover_checksums.py133
-rw-r--r--taskcluster/gecko_taskgraph/transforms/beetmover_emefree_checksums.py139
-rw-r--r--taskcluster/gecko_taskgraph/transforms/beetmover_geckoview.py166
-rw-r--r--taskcluster/gecko_taskgraph/transforms/beetmover_langpack_checksums.py128
-rw-r--r--taskcluster/gecko_taskgraph/transforms/beetmover_push_to_release.py93
-rw-r--r--taskcluster/gecko_taskgraph/transforms/beetmover_repackage.py327
-rw-r--r--taskcluster/gecko_taskgraph/transforms/beetmover_repackage_l10n.py44
-rw-r--r--taskcluster/gecko_taskgraph/transforms/beetmover_repackage_partner.py326
-rw-r--r--taskcluster/gecko_taskgraph/transforms/beetmover_snap.py42
-rw-r--r--taskcluster/gecko_taskgraph/transforms/beetmover_source.py35
-rw-r--r--taskcluster/gecko_taskgraph/transforms/beetmover_source_checksums.py137
-rw-r--r--taskcluster/gecko_taskgraph/transforms/bootstrap.py132
-rw-r--r--taskcluster/gecko_taskgraph/transforms/bouncer_aliases.py108
-rw-r--r--taskcluster/gecko_taskgraph/transforms/bouncer_check.py111
-rw-r--r--taskcluster/gecko_taskgraph/transforms/bouncer_locations.py35
-rw-r--r--taskcluster/gecko_taskgraph/transforms/bouncer_submission.py335
-rw-r--r--taskcluster/gecko_taskgraph/transforms/bouncer_submission_partners.py193
-rw-r--r--taskcluster/gecko_taskgraph/transforms/build.py238
-rw-r--r--taskcluster/gecko_taskgraph/transforms/build_attrs.py50
-rw-r--r--taskcluster/gecko_taskgraph/transforms/build_fat_aar.py78
-rw-r--r--taskcluster/gecko_taskgraph/transforms/build_lints.py59
-rw-r--r--taskcluster/gecko_taskgraph/transforms/build_signing.py71
-rw-r--r--taskcluster/gecko_taskgraph/transforms/cached_tasks.py101
-rw-r--r--taskcluster/gecko_taskgraph/transforms/chunk_partners.py75
-rw-r--r--taskcluster/gecko_taskgraph/transforms/code_review.py33
-rw-r--r--taskcluster/gecko_taskgraph/transforms/condprof.py85
-rw-r--r--taskcluster/gecko_taskgraph/transforms/copy_attributes_from_dependent_task.py23
-rw-r--r--taskcluster/gecko_taskgraph/transforms/cross_channel.py44
-rw-r--r--taskcluster/gecko_taskgraph/transforms/diffoscope.py172
-rw-r--r--taskcluster/gecko_taskgraph/transforms/docker_image.py209
-rw-r--r--taskcluster/gecko_taskgraph/transforms/fetch.py387
-rw-r--r--taskcluster/gecko_taskgraph/transforms/final_verify.py35
-rw-r--r--taskcluster/gecko_taskgraph/transforms/fxrecord.py22
-rw-r--r--taskcluster/gecko_taskgraph/transforms/geckodriver_mac_notarization.py68
-rw-r--r--taskcluster/gecko_taskgraph/transforms/geckodriver_signing.py124
-rw-r--r--taskcluster/gecko_taskgraph/transforms/github_sync.py23
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/__init__.py504
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/common.py269
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/distro_package.py238
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/hazard.py66
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/mach.py83
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/mozharness.py366
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/mozharness_test.py477
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/python_test.py47
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/run_task.py308
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/spidermonkey.py109
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/toolchain.py257
-rw-r--r--taskcluster/gecko_taskgraph/transforms/l10n.py416
-rw-r--r--taskcluster/gecko_taskgraph/transforms/mac_dummy.py40
-rw-r--r--taskcluster/gecko_taskgraph/transforms/mac_notarization.py19
-rw-r--r--taskcluster/gecko_taskgraph/transforms/mar_signing.py140
-rw-r--r--taskcluster/gecko_taskgraph/transforms/maybe_release.py23
-rw-r--r--taskcluster/gecko_taskgraph/transforms/merge_automation.py81
-rw-r--r--taskcluster/gecko_taskgraph/transforms/name_sanity.py45
-rw-r--r--taskcluster/gecko_taskgraph/transforms/openh264.py26
-rw-r--r--taskcluster/gecko_taskgraph/transforms/openh264_signing.py109
-rw-r--r--taskcluster/gecko_taskgraph/transforms/partials.py172
-rw-r--r--taskcluster/gecko_taskgraph/transforms/partner_attribution.py129
-rw-r--r--taskcluster/gecko_taskgraph/transforms/partner_attribution_beetmover.py202
-rw-r--r--taskcluster/gecko_taskgraph/transforms/partner_repack.py136
-rw-r--r--taskcluster/gecko_taskgraph/transforms/partner_signing.py66
-rw-r--r--taskcluster/gecko_taskgraph/transforms/per_platform_dummy.py33
-rw-r--r--taskcluster/gecko_taskgraph/transforms/perftest.py351
-rw-r--r--taskcluster/gecko_taskgraph/transforms/python_update.py25
-rw-r--r--taskcluster/gecko_taskgraph/transforms/release.py20
-rw-r--r--taskcluster/gecko_taskgraph/transforms/release_beetmover_signed_addons.py243
-rw-r--r--taskcluster/gecko_taskgraph/transforms/release_deps.py61
-rw-r--r--taskcluster/gecko_taskgraph/transforms/release_flatpak_push.py79
-rw-r--r--taskcluster/gecko_taskgraph/transforms/release_flatpak_repackage.py42
-rw-r--r--taskcluster/gecko_taskgraph/transforms/release_generate_checksums.py53
-rw-r--r--taskcluster/gecko_taskgraph/transforms/release_generate_checksums_beetmover.py118
-rw-r--r--taskcluster/gecko_taskgraph/transforms/release_generate_checksums_signing.py86
-rw-r--r--taskcluster/gecko_taskgraph/transforms/release_mark_as_shipped.py39
-rw-r--r--taskcluster/gecko_taskgraph/transforms/release_msix_push.py88
-rw-r--r--taskcluster/gecko_taskgraph/transforms/release_notifications.py73
-rw-r--r--taskcluster/gecko_taskgraph/transforms/release_sign_and_push_langpacks.py180
-rw-r--r--taskcluster/gecko_taskgraph/transforms/release_snap_repackage.py39
-rw-r--r--taskcluster/gecko_taskgraph/transforms/release_started.py52
-rw-r--r--taskcluster/gecko_taskgraph/transforms/release_version_bump.py42
-rw-r--r--taskcluster/gecko_taskgraph/transforms/repackage.py684
-rw-r--r--taskcluster/gecko_taskgraph/transforms/repackage_l10n.py26
-rw-r--r--taskcluster/gecko_taskgraph/transforms/repackage_partner.py302
-rw-r--r--taskcluster/gecko_taskgraph/transforms/repackage_routes.py34
-rw-r--r--taskcluster/gecko_taskgraph/transforms/repackage_set_upstream_mac_kind.py39
-rw-r--r--taskcluster/gecko_taskgraph/transforms/repackage_signing.py137
-rw-r--r--taskcluster/gecko_taskgraph/transforms/repackage_signing_partner.py145
-rw-r--r--taskcluster/gecko_taskgraph/transforms/repo_update.py25
-rw-r--r--taskcluster/gecko_taskgraph/transforms/reprocess_symbols.py67
-rw-r--r--taskcluster/gecko_taskgraph/transforms/reverse_chunk_deps.py45
-rw-r--r--taskcluster/gecko_taskgraph/transforms/run_pgo_profile.py34
-rw-r--r--taskcluster/gecko_taskgraph/transforms/scriptworker.py18
-rw-r--r--taskcluster/gecko_taskgraph/transforms/scriptworker_canary.py46
-rw-r--r--taskcluster/gecko_taskgraph/transforms/sentry.py30
-rw-r--r--taskcluster/gecko_taskgraph/transforms/shippable_l10n_signing.py86
-rw-r--r--taskcluster/gecko_taskgraph/transforms/signing.py266
-rw-r--r--taskcluster/gecko_taskgraph/transforms/source_checksums_signing.py83
-rw-r--r--taskcluster/gecko_taskgraph/transforms/source_test.py270
-rw-r--r--taskcluster/gecko_taskgraph/transforms/spidermonkey.py21
-rw-r--r--taskcluster/gecko_taskgraph/transforms/split_by_locale.py79
-rw-r--r--taskcluster/gecko_taskgraph/transforms/startup_test.py40
-rw-r--r--taskcluster/gecko_taskgraph/transforms/strip_dependent_task.py17
-rw-r--r--taskcluster/gecko_taskgraph/transforms/task.py2266
-rw-r--r--taskcluster/gecko_taskgraph/transforms/test/__init__.py538
-rw-r--r--taskcluster/gecko_taskgraph/transforms/test/chunk.py262
-rw-r--r--taskcluster/gecko_taskgraph/transforms/test/other.py1081
-rw-r--r--taskcluster/gecko_taskgraph/transforms/test/raptor.py317
-rw-r--r--taskcluster/gecko_taskgraph/transforms/test/variant.py128
-rw-r--r--taskcluster/gecko_taskgraph/transforms/test/worker.py201
-rw-r--r--taskcluster/gecko_taskgraph/transforms/try_job.py18
-rw-r--r--taskcluster/gecko_taskgraph/transforms/update_verify.py58
-rw-r--r--taskcluster/gecko_taskgraph/transforms/update_verify_config.py148
-rw-r--r--taskcluster/gecko_taskgraph/transforms/upload_generated_sources.py40
-rw-r--r--taskcluster/gecko_taskgraph/transforms/upload_symbols.py95
-rw-r--r--taskcluster/gecko_taskgraph/transforms/upstream_artifact_task.py29
122 files changed, 18631 insertions, 0 deletions
diff --git a/taskcluster/gecko_taskgraph/transforms/__init__.py b/taskcluster/gecko_taskgraph/transforms/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/__init__.py
diff --git a/taskcluster/gecko_taskgraph/transforms/artifact.py b/taskcluster/gecko_taskgraph/transforms/artifact.py
new file mode 100644
index 0000000000..559148f7b4
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/artifact.py
@@ -0,0 +1,116 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Apply different expiration dates to different artifacts based on a manifest file (artifacts.yml)
+"""
+import logging
+import os
+import sys
+
+import yaml
+from taskgraph.transforms.base import TransformSequence
+from yaml import YAMLError
+
+from gecko_taskgraph.transforms.job.common import get_expiration
+from gecko_taskgraph.util.workertypes import worker_type_implementation
+
+logger = logging.getLogger(__name__)
+
+transforms = TransformSequence()
+
+
+def read_artifact_manifest(manifest_path):
+ """Read the artifacts.yml manifest file and return it."""
+ # logger.info(f"The current directory is {os.getcwd()}")
+ try:
+ with open(manifest_path, "r") as ymlf:
+ yml = yaml.safe_load(ymlf.read())
+ return yml
+ except YAMLError as ye:
+ err = 'Failed to parse manifest "{manifest_path}". Invalid Yaml:'
+ err += ye
+ raise SystemExit(err)
+ except FileNotFoundError:
+ err = f'Failed to load manifest "{manifest_path}". File not found'
+ raise SystemExit(err)
+ except PermissionError:
+ err = f'Failed to load manifest "{manifest_path}". Permission Error'
+ raise SystemExit(err)
+
+
+@transforms.add
+def set_artifact_expiration(config, jobs):
+ """Set the expiration for certain artifacts based on a manifest file."""
+ """---
+ win:
+ - build_resources.json: short
+
+ linux:
+ - target.crashreporter-symbols-full.tar.zst: medium
+ """
+ transform_dir = os.path.dirname(__file__)
+ manifest = read_artifact_manifest(os.path.join(transform_dir, "artifacts.yml"))
+
+ for job in jobs:
+ try:
+ platform = job["attributes"]["build_platform"]
+ except KeyError:
+ err = "Tried to get build_platfrom for job, but it does not exist. Exiting."
+ raise SystemExit(err)
+ if "worker" in job:
+ if "env" in job["worker"]:
+ if isinstance(job["worker"]["env"], dict):
+ job["worker"]["env"]["MOZ_ARTIFACT_PLATFORM"] = platform
+ else:
+ raise SystemExit(
+ f"Expected env to be a dict, but it was {type(job['worker']['env'])}"
+ )
+ if "artifacts" in job["worker"]:
+ plat = platform.lower()
+ if "plain" in plat or "ccov" in plat or "rusttest" in plat:
+ art_dict = None
+ elif (
+ plat == "toolchain-wasm32-wasi-compiler-rt-trunk"
+ or plat == "toolchain-linux64-x64-compiler-rt-trunk"
+ or plat == "toolchain-linux64-x86-compiler-rt-trunk"
+ or plat == "android-geckoview-docs"
+ ):
+ art_dict = None
+ elif plat.startswith("win"):
+ art_dict = manifest["win"]
+ elif plat.startswith("linux"):
+ art_dict = manifest["linux"]
+ elif plat.startswith("mac"):
+ art_dict = manifest["macos"]
+ elif plat.startswith("android"):
+ art_dict = manifest["android"]
+ else:
+ print(
+ 'The platform name "{plat}" didn\'t start with',
+ '"win", "mac", "android", or "linux".',
+ file=sys.stderr,
+ )
+ art_dict = None
+ worker_implementation, _ = worker_type_implementation(
+ config.graph_config, config.params, job["worker-type"]
+ )
+ if worker_implementation == "docker-worker":
+ artifact_dest = "/builds/worker/cidata/{}"
+ else:
+ artifact_dest = "cidata/{}"
+
+ if art_dict is not None:
+ for art_name in art_dict.keys():
+ # The 'artifacts' key of a job is a list at this stage.
+ # So, must append a new dict to the list
+ expiry_policy = art_dict[art_name]
+ expires = get_expiration(config, policy=expiry_policy)
+ new_art = {
+ "name": f"public/cidata/{art_name}",
+ "path": artifact_dest.format(art_name),
+ "type": "file",
+ "expires-after": expires,
+ }
+ job["worker"]["artifacts"].append(new_art)
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/artifacts.yml b/taskcluster/gecko_taskgraph/transforms/artifacts.yml
new file mode 100644
index 0000000000..b41c657283
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/artifacts.yml
@@ -0,0 +1,16 @@
+---
+win:
+ target.crashreporter-symbols-full.tar.zst: shortest
+ sccache.log: shortest
+
+linux:
+ target.crashreporter-symbols-full.tar.zst: shortest
+ sccache.log: shortest
+
+macos:
+ target.crashreporter-symbols-full.tar.zst: shortest
+ sccache.log: shortest
+
+android:
+ target.crashreporter-symbols-full.tar.zst: shortest
+ sccache.log: shortest
diff --git a/taskcluster/gecko_taskgraph/transforms/attribution.py b/taskcluster/gecko_taskgraph/transforms/attribution.py
new file mode 100644
index 0000000000..935c274c03
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/attribution.py
@@ -0,0 +1,32 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def stub_installer(config, jobs):
+ """Not all windows builds come with a stub installer (only win32, and not
+ on esr), so conditionally add it here based on our dependency's
+ stub-installer attribute."""
+ for job in jobs:
+ dep_name, dep_label = next(iter(job["dependencies"].items()))
+ dep_task = config.kind_dependencies_tasks[dep_label]
+ if dep_task.attributes.get("stub-installer"):
+ locale = job["attributes"].get("locale")
+ if locale:
+ artifact = f"{locale}/target.stub-installer.exe"
+ else:
+ artifact = "target.stub-installer.exe"
+ job["fetches"][dep_name].append(artifact)
+ job["run"]["command"] += [
+ "--input",
+ "/builds/worker/fetches/target.stub-installer.exe",
+ ]
+ job["attributes"]["release_artifacts"].append(
+ "public/build/target.stub-installer.exe"
+ )
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/balrog_submit.py b/taskcluster/gecko_taskgraph/transforms/balrog_submit.py
new file mode 100644
index 0000000000..067600f0cb
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/balrog_submit.py
@@ -0,0 +1,138 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the per-locale balrog task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import optionally_keyed_by, resolve_keyed_by
+from taskgraph.util.treeherder import replace_group
+from voluptuous import Optional
+
+from gecko_taskgraph.loader.single_dep import schema
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+
+balrog_description_schema = schema.extend(
+ {
+ # unique label to describe this balrog task, defaults to balrog-{dep.label}
+ Optional("label"): str,
+ Optional(
+ "update-no-wnp",
+ description="Whether the parallel `-No-WNP` blob should be updated as well.",
+ ): optionally_keyed_by("release-type", bool),
+ # treeherder is allowed here to override any defaults we use for beetmover. See
+ # taskcluster/gecko_taskgraph/transforms/task.py for the schema details, and the
+ # below transforms for defaults of various values.
+ Optional("treeherder"): task_description_schema["treeherder"],
+ Optional("attributes"): task_description_schema["attributes"],
+ # Shipping product / phase
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ }
+)
+
+
+transforms = TransformSequence()
+transforms.add_validate(balrog_description_schema)
+
+
+@transforms.add
+def handle_keyed_by(config, jobs):
+ """Resolve fields that can be keyed by platform, etc."""
+ fields = [
+ "update-no-wnp",
+ ]
+ for job in jobs:
+ label = job.get("dependent-task", object).__dict__.get("label", "?no-label?")
+ for field in fields:
+ resolve_keyed_by(
+ item=job,
+ field=field,
+ item_name=label,
+ **{
+ "project": config.params["project"],
+ "release-type": config.params["release_type"],
+ },
+ )
+ yield job
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+
+ treeherder = job.get("treeherder", {})
+ treeherder.setdefault("symbol", "c-Up(N)")
+ dep_th_platform = (
+ dep_job.task.get("extra", {})
+ .get("treeherder", {})
+ .get("machine", {})
+ .get("platform", "")
+ )
+ treeherder.setdefault("platform", f"{dep_th_platform}/opt")
+ treeherder.setdefault(
+ "tier", dep_job.task.get("extra", {}).get("treeherder", {}).get("tier", 1)
+ )
+ treeherder.setdefault("kind", "build")
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+
+ treeherder_job_symbol = dep_job.task["extra"]["treeherder"]["symbol"]
+ treeherder["symbol"] = replace_group(treeherder_job_symbol, "c-Up")
+
+ if dep_job.attributes.get("locale"):
+ attributes["locale"] = dep_job.attributes.get("locale")
+
+ label = job["label"]
+
+ description = (
+ "Balrog submission for locale '{locale}' for build '"
+ "{build_platform}/{build_type}'".format(
+ locale=attributes.get("locale", "en-US"),
+ build_platform=attributes.get("build_platform"),
+ build_type=attributes.get("build_type"),
+ )
+ )
+
+ upstream_artifacts = [
+ {
+ "taskId": {"task-reference": "<beetmover>"},
+ "taskType": "beetmover",
+ "paths": ["public/manifest.json"],
+ }
+ ]
+
+ dependencies = {"beetmover": dep_job.label}
+ for kind_dep in config.kind_dependencies_tasks.values():
+ if (
+ kind_dep.kind == "startup-test"
+ and kind_dep.attributes["build_platform"]
+ == attributes.get("build_platform")
+ and kind_dep.attributes["build_type"] == attributes.get("build_type")
+ and kind_dep.attributes.get("shipping_product")
+ == job.get("shipping-product")
+ ):
+ dependencies["startup-test"] = kind_dep.label
+
+ task = {
+ "label": label,
+ "description": description,
+ "worker-type": "balrog",
+ "worker": {
+ "implementation": "balrog",
+ "upstream-artifacts": upstream_artifacts,
+ "balrog-action": "v2-submit-locale",
+ "suffixes": ["", "-No-WNP"] if job.get("update-no-wnp") else [""],
+ },
+ "dependencies": dependencies,
+ "attributes": attributes,
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "treeherder": treeherder,
+ "shipping-phase": job.get("shipping-phase", "promote"),
+ "shipping-product": job.get("shipping-product"),
+ }
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/balrog_toplevel.py b/taskcluster/gecko_taskgraph/transforms/balrog_toplevel.py
new file mode 100644
index 0000000000..6b06758f69
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/balrog_toplevel.py
@@ -0,0 +1,42 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the beetmover task into an actual task description.
+"""
+
+from mozilla_version.gecko import GeckoVersion
+from mozrelease.balrog import generate_update_properties
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.yaml import load_yaml
+
+from gecko_taskgraph.util.scriptworker import get_release_config
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def generate_update_line(config, jobs):
+ """Resolve fields that can be keyed by platform, etc."""
+ release_config = get_release_config(config)
+ for job in jobs:
+ config_file = job.pop("whats-new-config")
+ update_config = load_yaml(config_file)
+
+ product = job["shipping-product"]
+ if product == "devedition":
+ product = "firefox"
+ job["worker"]["update-line"] = {}
+ for blob_type, suffix in [("wnp", ""), ("no-wnp", "-No-WNP")]:
+ context = {
+ "release-type": config.params["release_type"],
+ "product": product,
+ "version": GeckoVersion.parse(release_config["appVersion"]),
+ "blob-type": blob_type,
+ "build-id": config.params["moz_build_date"],
+ }
+ job["worker"]["update-line"][suffix] = generate_update_properties(
+ context, update_config
+ )
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/beetmover.py b/taskcluster/gecko_taskgraph/transforms/beetmover.py
new file mode 100644
index 0000000000..93818707dd
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/beetmover.py
@@ -0,0 +1,165 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the beetmover task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.treeherder import replace_group
+from voluptuous import Optional, Required
+
+from gecko_taskgraph.loader.single_dep import schema
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+from gecko_taskgraph.util.scriptworker import (
+ generate_beetmover_artifact_map,
+ generate_beetmover_upstream_artifacts,
+ get_beetmover_action_scope,
+ get_beetmover_bucket_scope,
+)
+
+transforms = TransformSequence()
+
+beetmover_description_schema = schema.extend(
+ {
+ # unique label to describe this beetmover task, defaults to {dep.label}-beetmover
+ Optional("label"): str,
+ # treeherder is allowed here to override any defaults we use for beetmover. See
+ # taskcluster/gecko_taskgraph/transforms/task.py for the schema details, and the
+ # below transforms for defaults of various values.
+ Optional("treeherder"): task_description_schema["treeherder"],
+ # locale is passed only for l10n beetmoving
+ Optional("locale"): str,
+ Required("shipping-phase"): task_description_schema["shipping-phase"],
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Optional("attributes"): task_description_schema["attributes"],
+ }
+)
+
+
+transforms.add_validate(beetmover_description_schema)
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+ attributes = dep_job.attributes
+
+ treeherder = job.get("treeherder", {})
+ treeherder.setdefault(
+ "symbol", replace_group(dep_job.task["extra"]["treeherder"]["symbol"], "BM")
+ )
+ dep_th_platform = (
+ dep_job.task.get("extra", {})
+ .get("treeherder", {})
+ .get("machine", {})
+ .get("platform", "")
+ )
+ treeherder.setdefault("platform", f"{dep_th_platform}/opt")
+ treeherder.setdefault(
+ "tier", dep_job.task.get("extra", {}).get("treeherder", {}).get("tier", 1)
+ )
+ treeherder.setdefault("kind", "build")
+ label = job["label"]
+ description = (
+ "Beetmover submission for locale '{locale}' for build '"
+ "{build_platform}/{build_type}'".format(
+ locale=attributes.get("locale", "en-US"),
+ build_platform=attributes.get("build_platform"),
+ build_type=attributes.get("build_type"),
+ )
+ )
+
+ dependencies = {dep_job.kind: dep_job.label}
+
+ # XXX release snap-repackage has a variable number of dependencies, depending on how many
+ # "post-beetmover-dummy" jobs there are in the graph.
+ if dep_job.kind != "release-snap-repackage" and len(dep_job.dependencies) > 1:
+ raise NotImplementedError(
+ "Can't beetmove a signing task with multiple dependencies"
+ )
+ signing_dependencies = dep_job.dependencies
+ dependencies.update(signing_dependencies)
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+ attributes.update(job.get("attributes", {}))
+
+ if job.get("locale"):
+ attributes["locale"] = job["locale"]
+
+ bucket_scope = get_beetmover_bucket_scope(config)
+ action_scope = get_beetmover_action_scope(config)
+
+ task = {
+ "label": label,
+ "description": description,
+ "worker-type": "beetmover",
+ "scopes": [bucket_scope, action_scope],
+ "dependencies": dependencies,
+ "attributes": attributes,
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "treeherder": treeherder,
+ "shipping-phase": job["shipping-phase"],
+ }
+
+ yield task
+
+
+def craft_release_properties(config, job):
+ params = config.params
+ build_platform = job["attributes"]["build_platform"]
+ build_platform = build_platform.replace("-shippable", "")
+ if build_platform.endswith("-source"):
+ build_platform = build_platform.replace("-source", "-release")
+
+ # XXX This should be explicitly set via build attributes or something
+ if "android" in job["label"] or "fennec" in job["label"]:
+ app_name = "Fennec"
+ elif config.graph_config["trust-domain"] == "comm":
+ app_name = "Thunderbird"
+ else:
+ # XXX Even DevEdition is called Firefox
+ app_name = "Firefox"
+
+ return {
+ "app-name": app_name,
+ "app-version": params["app_version"],
+ "branch": params["project"],
+ "build-id": params["moz_build_date"],
+ "hash-type": "sha512",
+ "platform": build_platform,
+ }
+
+
+@transforms.add
+def make_task_worker(config, jobs):
+ for job in jobs:
+ valid_beetmover_job = len(job["dependencies"]) == 2 and any(
+ ["signing" in j for j in job["dependencies"]]
+ )
+ # XXX release snap-repackage has a variable number of dependencies, depending on how many
+ # "post-beetmover-dummy" jobs there are in the graph.
+ if "-snap-" not in job["label"] and not valid_beetmover_job:
+ raise NotImplementedError("Beetmover must have two dependencies.")
+
+ locale = job["attributes"].get("locale")
+ platform = job["attributes"]["build_platform"]
+
+ worker = {
+ "implementation": "beetmover",
+ "release-properties": craft_release_properties(config, job),
+ "upstream-artifacts": generate_beetmover_upstream_artifacts(
+ config, job, platform, locale
+ ),
+ "artifact-map": generate_beetmover_artifact_map(
+ config, job, platform=platform, locale=locale
+ ),
+ }
+
+ if locale:
+ worker["locale"] = locale
+ job["worker"] = worker
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/beetmover_apt.py b/taskcluster/gecko_taskgraph/transforms/beetmover_apt.py
new file mode 100644
index 0000000000..8c56f1a968
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/beetmover_apt.py
@@ -0,0 +1,114 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from itertools import islice
+
+from taskgraph import MAX_DEPENDENCIES
+from taskgraph.transforms.base import TransformSequence
+
+from gecko_taskgraph.util.platforms import architecture
+from gecko_taskgraph.util.scriptworker import (
+ generate_artifact_registry_gcs_sources,
+ get_beetmover_apt_repo_scope,
+ get_beetmover_repo_action_scope,
+)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def beetmover_apt(config, tasks):
+ product = (
+ "firefox"
+ if not config.params["release_type"] # try
+ or config.params["release_type"] == "nightly"
+ else config.params["release_product"]
+ )
+ filtered_tasks = filter_beetmover_apt_tasks(tasks, product)
+ # There are too many beetmover-repackage dependencies for a single task
+ # and we hit the taskgraph dependencies limit.
+ # To work around this limitation, we chunk the would be task
+ # into tasks dependendent on, at most, half of MAX_DEPENDENCIES.
+ batches = batched(filtered_tasks, MAX_DEPENDENCIES // 2)
+ for index, batch in enumerate(batches):
+ dependencies = {}
+ gcs_sources = []
+ for task in batch:
+ dep = task["primary-dependency"]
+ dependencies[dep.label] = dep.label
+ gcs_sources.extend(generate_artifact_registry_gcs_sources(dep))
+ description = f"Batch {index + 1} of beetmover APT submissions for the {config.params['release_type']} .deb packages"
+ platform = "firefox-release/opt"
+ treeherder = {
+ "platform": platform,
+ "tier": 1,
+ "kind": "other",
+ "symbol": f"BM-apt(batch-{index + 1})",
+ }
+ apt_repo_scope = get_beetmover_apt_repo_scope(config)
+ repo_action_scope = get_beetmover_repo_action_scope(config)
+ attributes = {
+ "required_signoffs": ["mar-signing"],
+ "shippable": True,
+ "shipping_product": product,
+ }
+ task = {
+ "label": f"{config.kind}-{index + 1}-{platform}",
+ "description": description,
+ "worker-type": "beetmover",
+ "treeherder": treeherder,
+ "scopes": [apt_repo_scope, repo_action_scope],
+ "attributes": attributes,
+ "shipping-phase": "ship",
+ "shipping-product": product,
+ "dependencies": dependencies,
+ }
+ worker = {
+ "implementation": "beetmover-import-from-gcs-to-artifact-registry",
+ "product": product,
+ "gcs-sources": gcs_sources,
+ }
+ task["worker"] = worker
+ yield task
+
+
+def batched(iterable, n):
+ "Batch data into tuples of length n. The last batch may be shorter."
+ # batched('ABCDEFG', 3) --> ABC DEF G
+ if n < 1:
+ raise ValueError("n must be at least one")
+ it = iter(iterable)
+ batch = tuple(islice(it, n))
+ while batch:
+ yield batch
+ batch = tuple(islice(it, n))
+
+
+def filter_beetmover_apt_tasks(tasks, product):
+ return (task for task in tasks if filter_beetmover_apt_task(task, product))
+
+
+def filter_beetmover_apt_task(task, product):
+ # We only create beetmover-apt tasks for l10n beetmover-repackage tasks that
+ # beetmove langpack .deb packages. The langpack .deb packages support all
+ # architectures, so we generate them only on x86_64 tasks.
+ return (
+ is_x86_64_l10n_task(task) or is_not_l10n_task(task)
+ ) and is_task_for_product(task, product)
+
+
+def is_x86_64_l10n_task(task):
+ dep = task["primary-dependency"]
+ locale = dep.attributes.get("locale")
+ return locale and architecture(dep.attributes["build_platform"]) == "x86_64"
+
+
+def is_not_l10n_task(task):
+ dep = task["primary-dependency"]
+ locale = dep.attributes.get("locale")
+ return not locale
+
+
+def is_task_for_product(task, product):
+ dep = task["primary-dependency"]
+ return dep.attributes.get("shipping_product") == product
diff --git a/taskcluster/gecko_taskgraph/transforms/beetmover_checksums.py b/taskcluster/gecko_taskgraph/transforms/beetmover_checksums.py
new file mode 100644
index 0000000000..f595854a80
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/beetmover_checksums.py
@@ -0,0 +1,133 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the checksums signing task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.treeherder import replace_group
+from voluptuous import Optional, Required
+
+from gecko_taskgraph.loader.single_dep import schema
+from gecko_taskgraph.transforms.beetmover import craft_release_properties
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+from gecko_taskgraph.util.scriptworker import (
+ generate_beetmover_artifact_map,
+ generate_beetmover_upstream_artifacts,
+ get_beetmover_action_scope,
+ get_beetmover_bucket_scope,
+)
+
+beetmover_checksums_description_schema = schema.extend(
+ {
+ Required("attributes"): {str: object},
+ Optional("label"): str,
+ Optional("treeherder"): task_description_schema["treeherder"],
+ Optional("locale"): str,
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ }
+)
+
+transforms = TransformSequence()
+transforms.add_validate(beetmover_checksums_description_schema)
+
+
+@transforms.add
+def make_beetmover_checksums_description(config, jobs):
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+ attributes = dep_job.attributes
+
+ treeherder = job.get("treeherder", {})
+ treeherder.setdefault(
+ "symbol",
+ replace_group(dep_job.task["extra"]["treeherder"]["symbol"], "BMcs"),
+ )
+ dep_th_platform = (
+ dep_job.task.get("extra", {})
+ .get("treeherder", {})
+ .get("machine", {})
+ .get("platform", "")
+ )
+ treeherder.setdefault("platform", f"{dep_th_platform}/opt")
+ treeherder.setdefault(
+ "tier", dep_job.task.get("extra", {}).get("treeherder", {}).get("tier", 1)
+ )
+ treeherder.setdefault("kind", "build")
+
+ label = job["label"]
+ build_platform = attributes.get("build_platform")
+
+ description = (
+ "Beetmover submission of checksums for locale '{locale}' for build '"
+ "{build_platform}/{build_type}'".format(
+ locale=attributes.get("locale", "en-US"),
+ build_platform=build_platform,
+ build_type=attributes.get("build_type"),
+ )
+ )
+
+ extra = {}
+ if "devedition" in build_platform:
+ extra["product"] = "devedition"
+ else:
+ extra["product"] = "firefox"
+
+ dependencies = {dep_job.kind: dep_job.label}
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+ attributes.update(job.get("attributes", {}))
+
+ if dep_job.attributes.get("locale"):
+ treeherder["symbol"] = "BMcs({})".format(dep_job.attributes.get("locale"))
+ attributes["locale"] = dep_job.attributes.get("locale")
+
+ bucket_scope = get_beetmover_bucket_scope(config)
+ action_scope = get_beetmover_action_scope(config)
+
+ task = {
+ "label": label,
+ "description": description,
+ "worker-type": "beetmover",
+ "scopes": [bucket_scope, action_scope],
+ "dependencies": dependencies,
+ "attributes": attributes,
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "treeherder": treeherder,
+ "extra": extra,
+ }
+
+ if "shipping-phase" in job:
+ task["shipping-phase"] = job["shipping-phase"]
+
+ if "shipping-product" in job:
+ task["shipping-product"] = job["shipping-product"]
+
+ yield task
+
+
+@transforms.add
+def make_beetmover_checksums_worker(config, jobs):
+ for job in jobs:
+ locale = job["attributes"].get("locale")
+ platform = job["attributes"]["build_platform"]
+
+ worker = {
+ "implementation": "beetmover",
+ "release-properties": craft_release_properties(config, job),
+ "upstream-artifacts": generate_beetmover_upstream_artifacts(
+ config, job, platform, locale
+ ),
+ "artifact-map": generate_beetmover_artifact_map(
+ config, job, platform=platform, locale=locale
+ ),
+ }
+
+ if locale:
+ worker["locale"] = locale
+ job["worker"] = worker
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/beetmover_emefree_checksums.py b/taskcluster/gecko_taskgraph/transforms/beetmover_emefree_checksums.py
new file mode 100644
index 0000000000..9c2b49b6c5
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/beetmover_emefree_checksums.py
@@ -0,0 +1,139 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform release-beetmover-source-checksums into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from voluptuous import Optional
+
+from gecko_taskgraph.loader.single_dep import schema
+from gecko_taskgraph.transforms.beetmover import craft_release_properties
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+
+beetmover_checksums_description_schema = schema.extend(
+ {
+ Optional("label"): str,
+ Optional("extra"): object,
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ }
+)
+
+
+transforms = TransformSequence()
+transforms.add_validate(beetmover_checksums_description_schema)
+
+
+@transforms.add
+def make_beetmover_checksums_description(config, jobs):
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+ attributes = dep_job.attributes
+ build_platform = attributes.get("build_platform")
+ if not build_platform:
+ raise Exception("Cannot find build platform!")
+ repack_id = dep_job.task.get("extra", {}).get("repack_id")
+ if not repack_id:
+ raise Exception("Cannot find repack id!")
+
+ label = dep_job.label.replace("beetmover-", "beetmover-checksums-")
+ description = (
+ "Beetmove checksums for repack_id '{repack_id}' for build '"
+ "{build_platform}/{build_type}'".format(
+ repack_id=repack_id,
+ build_platform=build_platform,
+ build_type=attributes.get("build_type"),
+ )
+ )
+
+ extra = {}
+ extra["partner_path"] = dep_job.task["payload"]["upstreamArtifacts"][0][
+ "locale"
+ ]
+ extra["repack_id"] = repack_id
+
+ dependencies = {dep_job.kind: dep_job.label}
+ for k, v in dep_job.dependencies.items():
+ if k.startswith("beetmover"):
+ dependencies[k] = v
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+
+ task = {
+ "label": label,
+ "description": description,
+ "worker-type": "{}/{}".format(
+ dep_job.task["provisionerId"],
+ dep_job.task["workerType"],
+ ),
+ "scopes": dep_job.task["scopes"],
+ "dependencies": dependencies,
+ "attributes": attributes,
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "extra": extra,
+ }
+
+ if "shipping-phase" in job:
+ task["shipping-phase"] = job["shipping-phase"]
+
+ if "shipping-product" in job:
+ task["shipping-product"] = job["shipping-product"]
+
+ yield task
+
+
+def generate_upstream_artifacts(refs, partner_path):
+ # Until bug 1331141 is fixed, if you are adding any new artifacts here that
+ # need to be transfered to S3, please be aware you also need to follow-up
+ # with a beetmover patch in https://github.com/mozilla-releng/beetmoverscript/.
+ # See example in bug 1348286
+ common_paths = [
+ "public/target.checksums",
+ ]
+
+ upstream_artifacts = [
+ {
+ "taskId": {"task-reference": refs["beetmover"]},
+ "taskType": "signing",
+ "paths": common_paths,
+ "locale": f"beetmover-checksums/{partner_path}",
+ }
+ ]
+
+ return upstream_artifacts
+
+
+@transforms.add
+def make_beetmover_checksums_worker(config, jobs):
+ for job in jobs:
+ valid_beetmover_job = len(job["dependencies"]) == 1
+ if not valid_beetmover_job:
+ raise NotImplementedError("Beetmover checksums must have one dependency.")
+
+ refs = {
+ "beetmover": None,
+ }
+ for dependency in job["dependencies"].keys():
+ if dependency.endswith("beetmover"):
+ refs["beetmover"] = f"<{dependency}>"
+ if None in refs.values():
+ raise NotImplementedError(
+ "Beetmover checksums must have a beetmover dependency!"
+ )
+
+ worker = {
+ "implementation": "beetmover",
+ "release-properties": craft_release_properties(config, job),
+ "upstream-artifacts": generate_upstream_artifacts(
+ refs,
+ job["extra"]["partner_path"],
+ ),
+ "partner-public": True,
+ }
+
+ job["worker"] = worker
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/beetmover_geckoview.py b/taskcluster/gecko_taskgraph/transforms/beetmover_geckoview.py
new file mode 100644
index 0000000000..6eee5954e3
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/beetmover_geckoview.py
@@ -0,0 +1,166 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the beetmover task into an actual task description.
+"""
+
+
+from copy import deepcopy
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import optionally_keyed_by, resolve_keyed_by
+from voluptuous import Optional, Required
+
+from gecko_taskgraph.loader.single_dep import schema
+from gecko_taskgraph.transforms.beetmover import (
+ craft_release_properties as beetmover_craft_release_properties,
+)
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import (
+ copy_attributes_from_dependent_job,
+ release_level,
+)
+from gecko_taskgraph.util.declarative_artifacts import (
+ get_geckoview_artifact_id,
+ get_geckoview_artifact_map,
+ get_geckoview_upstream_artifacts,
+)
+
+beetmover_description_schema = schema.extend(
+ {
+ Optional("label"): str,
+ Optional("treeherder"): task_description_schema["treeherder"],
+ Required("run-on-projects"): task_description_schema["run-on-projects"],
+ Required("run-on-hg-branches"): task_description_schema["run-on-hg-branches"],
+ Optional("bucket-scope"): optionally_keyed_by("release-level", str),
+ Optional("shipping-phase"): optionally_keyed_by(
+ "project", task_description_schema["shipping-phase"]
+ ),
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Optional("attributes"): task_description_schema["attributes"],
+ }
+)
+
+transforms = TransformSequence()
+transforms.add_validate(beetmover_description_schema)
+
+
+@transforms.add
+def resolve_keys(config, jobs):
+ for job in jobs:
+ resolve_keyed_by(
+ job,
+ "run-on-hg-branches",
+ item_name=job["label"],
+ project=config.params["project"],
+ )
+ resolve_keyed_by(
+ job,
+ "shipping-phase",
+ item_name=job["label"],
+ project=config.params["project"],
+ )
+ resolve_keyed_by(
+ job,
+ "bucket-scope",
+ item_name=job["label"],
+ **{"release-level": release_level(config.params["project"])},
+ )
+ yield job
+
+
+@transforms.add
+def split_maven_packages(config, jobs):
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+ attributes = copy_attributes_from_dependent_job(dep_job)
+ for package in attributes["maven_packages"]:
+ package_job = deepcopy(job)
+ package_job["maven-package"] = package
+ yield package_job
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+ attributes = copy_attributes_from_dependent_job(dep_job)
+ attributes.update(job.get("attributes", {}))
+
+ treeherder = job.get("treeherder", {})
+ dep_th_platform = (
+ dep_job.task.get("extra", {})
+ .get("treeherder", {})
+ .get("machine", {})
+ .get("platform", "")
+ )
+ treeherder.setdefault("platform", f"{dep_th_platform}/opt")
+ treeherder.setdefault("tier", 2)
+ treeherder.setdefault("kind", "build")
+ package = job["maven-package"]
+ treeherder.setdefault("symbol", f"BM-{package}")
+ label = job["label"]
+ description = (
+ "Beetmover submission for geckoview"
+ "{build_platform}/{build_type}'".format(
+ build_platform=attributes.get("build_platform"),
+ build_type=attributes.get("build_type"),
+ )
+ )
+
+ dependencies = deepcopy(dep_job.dependencies)
+ dependencies[dep_job.kind] = dep_job.label
+
+ if job.get("locale"):
+ attributes["locale"] = job["locale"]
+
+ attributes["run_on_hg_branches"] = job["run-on-hg-branches"]
+
+ task = {
+ "label": f"{package}-{label}",
+ "description": description,
+ "worker-type": "beetmover",
+ "scopes": [
+ job["bucket-scope"],
+ "project:releng:beetmover:action:push-to-maven",
+ ],
+ "dependencies": dependencies,
+ "attributes": attributes,
+ "run-on-projects": job["run-on-projects"],
+ "treeherder": treeherder,
+ "shipping-phase": job["shipping-phase"],
+ "maven-package": package,
+ }
+
+ yield task
+
+
+@transforms.add
+def make_task_worker(config, jobs):
+ for job in jobs:
+ job["worker"] = {
+ "artifact-map": get_geckoview_artifact_map(config, job),
+ "implementation": "beetmover-maven",
+ "release-properties": craft_release_properties(config, job),
+ "upstream-artifacts": get_geckoview_upstream_artifacts(
+ config, job, job["maven-package"]
+ ),
+ }
+ del job["maven-package"]
+
+ yield job
+
+
+def craft_release_properties(config, job):
+ release_properties = beetmover_craft_release_properties(config, job)
+
+ release_properties["artifact-id"] = get_geckoview_artifact_id(
+ config,
+ job["attributes"]["build_platform"],
+ job["maven-package"],
+ job["attributes"].get("update-channel"),
+ )
+ release_properties["app-name"] = "geckoview"
+
+ return release_properties
diff --git a/taskcluster/gecko_taskgraph/transforms/beetmover_langpack_checksums.py b/taskcluster/gecko_taskgraph/transforms/beetmover_langpack_checksums.py
new file mode 100644
index 0000000000..9318ed29d4
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/beetmover_langpack_checksums.py
@@ -0,0 +1,128 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform release-beetmover-langpack-checksums into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.treeherder import inherit_treeherder_from_dep
+from voluptuous import Optional, Required
+
+from gecko_taskgraph.loader.single_dep import schema
+from gecko_taskgraph.transforms.beetmover import craft_release_properties
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+from gecko_taskgraph.util.scriptworker import (
+ generate_beetmover_artifact_map,
+ generate_beetmover_upstream_artifacts,
+ get_beetmover_action_scope,
+ get_beetmover_bucket_scope,
+)
+
+beetmover_checksums_description_schema = schema.extend(
+ {
+ Required("attributes"): {str: object},
+ Optional("label"): str,
+ Optional("treeherder"): task_description_schema["treeherder"],
+ Optional("locale"): str,
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ }
+)
+
+transforms = TransformSequence()
+transforms.add_validate(beetmover_checksums_description_schema)
+
+
+@transforms.add
+def make_beetmover_checksums_description(config, jobs):
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+ attributes = dep_job.attributes
+
+ treeherder = inherit_treeherder_from_dep(job, dep_job)
+ treeherder.setdefault(
+ "symbol", "BMcslang(N{})".format(attributes.get("l10n_chunk", ""))
+ )
+
+ label = job["label"]
+ build_platform = attributes.get("build_platform")
+
+ description = "Beetmover submission of checksums for langpack files"
+
+ extra = {}
+ if "devedition" in build_platform:
+ extra["product"] = "devedition"
+ else:
+ extra["product"] = "firefox"
+
+ dependencies = {dep_job.kind: dep_job.label}
+ for k, v in dep_job.dependencies.items():
+ if k.startswith("beetmover"):
+ dependencies[k] = v
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+ if "chunk_locales" in dep_job.attributes:
+ attributes["chunk_locales"] = dep_job.attributes["chunk_locales"]
+ attributes.update(job.get("attributes", {}))
+
+ bucket_scope = get_beetmover_bucket_scope(config)
+ action_scope = get_beetmover_action_scope(config)
+
+ task = {
+ "label": label,
+ "description": description,
+ "worker-type": "beetmover",
+ "scopes": [bucket_scope, action_scope],
+ "dependencies": dependencies,
+ "attributes": attributes,
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "treeherder": treeherder,
+ "extra": extra,
+ }
+
+ if "shipping-phase" in job:
+ task["shipping-phase"] = job["shipping-phase"]
+
+ if "shipping-product" in job:
+ task["shipping-product"] = job["shipping-product"]
+
+ yield task
+
+
+@transforms.add
+def make_beetmover_checksums_worker(config, jobs):
+ for job in jobs:
+ valid_beetmover_job = len(job["dependencies"]) == 1
+ if not valid_beetmover_job:
+ raise NotImplementedError("Beetmover checksums must have one dependency.")
+
+ locales = job["attributes"].get("chunk_locales")
+ platform = job["attributes"]["build_platform"]
+
+ refs = {
+ "beetmover": None,
+ }
+ for dependency in job["dependencies"].keys():
+ if dependency.startswith("release-beetmover"):
+ refs["beetmover"] = f"<{dependency}>"
+ if None in refs.values():
+ raise NotImplementedError(
+ "Beetmover checksums must have a beetmover dependency!"
+ )
+
+ worker = {
+ "implementation": "beetmover",
+ "release-properties": craft_release_properties(config, job),
+ "upstream-artifacts": generate_beetmover_upstream_artifacts(
+ config, job, platform, locales
+ ),
+ "artifact-map": generate_beetmover_artifact_map(
+ config, job, platform=platform, locale=locales
+ ),
+ }
+
+ job["worker"] = worker
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/beetmover_push_to_release.py b/taskcluster/gecko_taskgraph/transforms/beetmover_push_to_release.py
new file mode 100644
index 0000000000..b6307d93cf
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/beetmover_push_to_release.py
@@ -0,0 +1,93 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the beetmover-push-to-release task into a task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import Schema, taskref_or_string
+from voluptuous import Optional, Required
+
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.scriptworker import (
+ add_scope_prefix,
+ get_beetmover_bucket_scope,
+)
+
+beetmover_push_to_release_description_schema = Schema(
+ {
+ Required("name"): str,
+ Required("product"): str,
+ Required("treeherder-platform"): str,
+ Optional("attributes"): {str: object},
+ Optional("job-from"): task_description_schema["job-from"],
+ Optional("run"): {str: object},
+ Optional("run-on-projects"): task_description_schema["run-on-projects"],
+ Optional("dependencies"): {str: taskref_or_string},
+ Optional("index"): {str: str},
+ Optional("routes"): [str],
+ Required("shipping-phase"): task_description_schema["shipping-phase"],
+ Required("shipping-product"): task_description_schema["shipping-product"],
+ Optional("extra"): task_description_schema["extra"],
+ Optional("worker"): {
+ Optional("max-run-time"): int,
+ },
+ }
+)
+
+
+transforms = TransformSequence()
+transforms.add_validate(beetmover_push_to_release_description_schema)
+
+
+@transforms.add
+def make_beetmover_push_to_release_description(config, jobs):
+ for job in jobs:
+ treeherder = job.get("treeherder", {})
+ treeherder.setdefault("symbol", "Rel(BM-C)")
+ treeherder.setdefault("tier", 1)
+ treeherder.setdefault("kind", "build")
+ treeherder.setdefault("platform", job["treeherder-platform"])
+
+ label = job["name"]
+ description = "Beetmover push to release for '{product}'".format(
+ product=job["product"]
+ )
+
+ bucket_scope = get_beetmover_bucket_scope(config)
+ action_scope = add_scope_prefix(config, "beetmover:action:push-to-releases")
+
+ task = {
+ "label": label,
+ "description": description,
+ "worker-type": "beetmover",
+ "scopes": [bucket_scope, action_scope],
+ "product": job["product"],
+ "dependencies": job["dependencies"],
+ "attributes": job.get("attributes", {}),
+ "run-on-projects": job.get("run-on-projects"),
+ "treeherder": treeherder,
+ "shipping-phase": job.get("shipping-phase", "push"),
+ "shipping-product": job.get("shipping-product"),
+ "routes": job.get("routes", []),
+ "extra": job.get("extra", {}),
+ "worker": job.get("worker", {}),
+ }
+
+ yield task
+
+
+@transforms.add
+def make_beetmover_push_to_release_worker(config, jobs):
+ for job in jobs:
+ worker = {
+ "implementation": "beetmover-push-to-release",
+ "product": job["product"],
+ }
+ if job.get("worker", {}).get("max-run-time"):
+ worker["max-run-time"] = job["worker"]["max-run-time"]
+ job["worker"] = worker
+ del job["product"]
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/beetmover_repackage.py b/taskcluster/gecko_taskgraph/transforms/beetmover_repackage.py
new file mode 100644
index 0000000000..a1ce911b9f
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/beetmover_repackage.py
@@ -0,0 +1,327 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the beetmover task into an actual task description.
+"""
+
+import logging
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.taskcluster import get_artifact_prefix
+from taskgraph.util.treeherder import inherit_treeherder_from_dep, replace_group
+from voluptuous import Optional, Required
+
+from gecko_taskgraph.loader.multi_dep import schema
+from gecko_taskgraph.transforms.beetmover import craft_release_properties
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+from gecko_taskgraph.util.partials import (
+ get_balrog_platform_name,
+ get_partials_artifacts_from_params,
+ get_partials_info_from_params,
+)
+from gecko_taskgraph.util.scriptworker import (
+ generate_beetmover_artifact_map,
+ generate_beetmover_partials_artifact_map,
+ generate_beetmover_upstream_artifacts,
+ get_beetmover_action_scope,
+ get_beetmover_bucket_scope,
+)
+
+logger = logging.getLogger(__name__)
+
+
+beetmover_description_schema = schema.extend(
+ {
+ # unique label to describe this beetmover task, defaults to {dep.label}-beetmover
+ Required("label"): str,
+ # treeherder is allowed here to override any defaults we use for beetmover. See
+ # taskcluster/gecko_taskgraph/transforms/task.py for the schema details, and the
+ # below transforms for defaults of various values.
+ Optional("treeherder"): task_description_schema["treeherder"],
+ Optional("attributes"): task_description_schema["attributes"],
+ # locale is passed only for l10n beetmoving
+ Optional("locale"): str,
+ Required("shipping-phase"): task_description_schema["shipping-phase"],
+ # Optional until we fix asan (run_on_projects?)
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ }
+)
+
+transforms = TransformSequence()
+transforms.add_validate(beetmover_description_schema)
+
+
+def get_task_by_suffix(tasks, suffix):
+ """
+ Given tasks<dict>, returns the key to the task with provided suffix<str>
+ Raises exception if more than one task is found
+
+ Args:
+ tasks (Dict): Map of labels to tasks
+ suffix (str): Suffix for the desired task
+
+ Returns
+ str: The key to the desired task
+ """
+ labels = []
+ for label in tasks.keys():
+ if label.endswith(suffix):
+ labels.append(label)
+ if len(labels) > 1:
+ raise Exception(
+ f"There should only be a single task with suffix: {suffix} - found {len(labels)}"
+ )
+ return labels[0]
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+ attributes = dep_job.attributes
+
+ treeherder = inherit_treeherder_from_dep(job, dep_job)
+ upstream_symbol = dep_job.task["extra"]["treeherder"]["symbol"]
+ if "build" in job["dependent-tasks"]:
+ upstream_symbol = job["dependent-tasks"]["build"].task["extra"][
+ "treeherder"
+ ]["symbol"]
+ treeherder.setdefault("symbol", replace_group(upstream_symbol, "BMR"))
+ label = job["label"]
+ description = (
+ "Beetmover submission for locale '{locale}' for build '"
+ "{build_platform}/{build_type}'".format(
+ locale=attributes.get("locale", "en-US"),
+ build_platform=attributes.get("build_platform"),
+ build_type=attributes.get("build_type"),
+ )
+ )
+
+ upstream_deps = job["dependent-tasks"]
+
+ signing_name = "build-signing"
+ build_name = "build"
+ repackage_name = "repackage"
+ repackage_signing_name = "repackage-signing"
+ msi_signing_name = "repackage-signing-msi"
+ msix_signing_name = "repackage-signing-shippable-l10n-msix"
+ mar_signing_name = "mar-signing"
+ attribution_name = "attribution"
+ repackage_deb_name = "repackage-deb"
+ if job.get("locale"):
+ signing_name = "shippable-l10n-signing"
+ build_name = "shippable-l10n"
+ repackage_name = "repackage-l10n"
+ repackage_signing_name = "repackage-signing-l10n"
+ mar_signing_name = "mar-signing-l10n"
+ attribution_name = "attribution-l10n"
+ repackage_deb_name = "repackage-deb-l10n"
+
+ # The upstream "signing" task for macosx is either *-mac-signing or *-mac-notarization
+ if attributes.get("build_platform", "").startswith("macosx"):
+ # We use the signing task on level 1 and notarization on level 3
+ if int(config.params.get("level", 0)) < 3:
+ signing_name = get_task_by_suffix(upstream_deps, "-mac-signing")
+ else:
+ signing_name = get_task_by_suffix(upstream_deps, "-mac-notarization")
+ if not signing_name:
+ raise Exception("Could not find upstream kind for mac signing.")
+
+ dependencies = {
+ "build": upstream_deps[build_name],
+ "repackage": upstream_deps[repackage_name],
+ "signing": upstream_deps[signing_name],
+ "mar-signing": upstream_deps[mar_signing_name],
+ }
+ if "partials-signing" in upstream_deps:
+ dependencies["partials-signing"] = upstream_deps["partials-signing"]
+ if msi_signing_name in upstream_deps:
+ dependencies[msi_signing_name] = upstream_deps[msi_signing_name]
+ if msix_signing_name in upstream_deps:
+ dependencies[msix_signing_name] = upstream_deps[msix_signing_name]
+ if repackage_signing_name in upstream_deps:
+ dependencies["repackage-signing"] = upstream_deps[repackage_signing_name]
+ if attribution_name in upstream_deps:
+ dependencies[attribution_name] = upstream_deps[attribution_name]
+ if repackage_deb_name in upstream_deps:
+ dependencies[repackage_deb_name] = upstream_deps[repackage_deb_name]
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+ attributes.update(job.get("attributes", {}))
+ if job.get("locale"):
+ attributes["locale"] = job["locale"]
+
+ bucket_scope = get_beetmover_bucket_scope(config)
+ action_scope = get_beetmover_action_scope(config)
+
+ task = {
+ "label": label,
+ "description": description,
+ "worker-type": "beetmover",
+ "scopes": [bucket_scope, action_scope],
+ "dependencies": dependencies,
+ "attributes": attributes,
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "treeherder": treeherder,
+ "shipping-phase": job["shipping-phase"],
+ "shipping-product": job.get("shipping-product"),
+ }
+
+ yield task
+
+
+def generate_partials_upstream_artifacts(job, artifacts, platform, locale=None):
+ artifact_prefix = get_artifact_prefix(job)
+ if locale and locale != "en-US":
+ artifact_prefix = f"{artifact_prefix}/{locale}"
+
+ upstream_artifacts = [
+ {
+ "taskId": {"task-reference": "<partials-signing>"},
+ "taskType": "signing",
+ "paths": [f"{artifact_prefix}/{path}" for path, _ in artifacts],
+ "locale": locale or "en-US",
+ }
+ ]
+
+ return upstream_artifacts
+
+
+@transforms.add
+def make_task_worker(config, jobs):
+ for job in jobs:
+ locale = job["attributes"].get("locale")
+ platform = job["attributes"]["build_platform"]
+
+ worker = {
+ "implementation": "beetmover",
+ "release-properties": craft_release_properties(config, job),
+ "upstream-artifacts": generate_beetmover_upstream_artifacts(
+ config, job, platform, locale
+ ),
+ "artifact-map": generate_beetmover_artifact_map(
+ config, job, platform=platform, locale=locale
+ ),
+ }
+
+ if locale:
+ worker["locale"] = locale
+ job["worker"] = worker
+
+ yield job
+
+
+@transforms.add
+def strip_unwanted_langpacks_from_worker(config, jobs):
+ """Strips out langpacks where we didn't sign them.
+
+ This explicitly deletes langpacks from upstream artifacts and from artifact-maps.
+ Due to limitations in declarative artifacts, doing this was our easiest way right now.
+ """
+ ALWAYS_OK_PLATFORMS = {"linux64-shippable", "linux64-devedition"}
+ OSX_OK_PLATFORMS = {"macosx64-shippable", "macosx64-devedition"}
+ for job in jobs:
+ platform = job["attributes"].get("build_platform")
+ if platform in ALWAYS_OK_PLATFORMS:
+ # No need to strip anything
+ yield job
+ continue
+
+ for map in job["worker"].get("artifact-map", [])[:]:
+ if not any([path.endswith("target.langpack.xpi") for path in map["paths"]]):
+ continue
+ if map["locale"] == "ja-JP-mac":
+ # This locale should only exist on mac
+ assert platform in OSX_OK_PLATFORMS
+ continue
+ # map[paths] is being modified while iterating, so we need to resolve the
+ # ".keys()" iterator up front by throwing it into a list.
+ for path in list(map["paths"].keys()):
+ if path.endswith("target.langpack.xpi"):
+ del map["paths"][path]
+ if map["paths"] == {}:
+ job["worker"]["artifact-map"].remove(map)
+
+ for artifact in job["worker"].get("upstream-artifacts", []):
+ if not any(
+ [path.endswith("target.langpack.xpi") for path in artifact["paths"]]
+ ):
+ continue
+ if artifact["locale"] == "ja-JP-mac":
+ # This locale should only exist on mac
+ assert platform in OSX_OK_PLATFORMS
+ continue
+ artifact["paths"] = [
+ path
+ for path in artifact["paths"]
+ if not path.endswith("target.langpack.xpi")
+ ]
+ if artifact["paths"] == []:
+ job["worker"]["upstream-artifacts"].remove(artifact)
+
+ yield job
+
+
+@transforms.add
+def make_partials_artifacts(config, jobs):
+ for job in jobs:
+ locale = job["attributes"].get("locale")
+ if not locale:
+ locale = "en-US"
+
+ platform = job["attributes"]["build_platform"]
+
+ if "partials-signing" not in job["dependencies"]:
+ yield job
+ continue
+
+ balrog_platform = get_balrog_platform_name(platform)
+ artifacts = get_partials_artifacts_from_params(
+ config.params.get("release_history"), balrog_platform, locale
+ )
+
+ upstream_artifacts = generate_partials_upstream_artifacts(
+ job, artifacts, balrog_platform, locale
+ )
+
+ job["worker"]["upstream-artifacts"].extend(upstream_artifacts)
+
+ extra = list()
+
+ partials_info = get_partials_info_from_params(
+ config.params.get("release_history"), balrog_platform, locale
+ )
+
+ job["worker"]["artifact-map"].extend(
+ generate_beetmover_partials_artifact_map(
+ config, job, partials_info, platform=platform, locale=locale
+ )
+ )
+
+ for artifact in partials_info:
+ artifact_extra = {
+ "locale": locale,
+ "artifact_name": artifact,
+ "buildid": partials_info[artifact]["buildid"],
+ "platform": balrog_platform,
+ }
+ for rel_attr in ("previousBuildNumber", "previousVersion"):
+ if partials_info[artifact].get(rel_attr):
+ artifact_extra[rel_attr] = partials_info[artifact][rel_attr]
+ extra.append(artifact_extra)
+
+ job.setdefault("extra", {})
+ job["extra"]["partials"] = extra
+
+ yield job
+
+
+@transforms.add
+def convert_deps(config, jobs):
+ for job in jobs:
+ job["dependencies"] = {
+ name: dep_job.label for name, dep_job in job["dependencies"].items()
+ }
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/beetmover_repackage_l10n.py b/taskcluster/gecko_taskgraph/transforms/beetmover_repackage_l10n.py
new file mode 100644
index 0000000000..da2a41ccb3
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/beetmover_repackage_l10n.py
@@ -0,0 +1,44 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the signing task into an actual task description.
+"""
+
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.treeherder import join_symbol
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def make_beetmover_description(config, jobs):
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+
+ locale = dep_job.attributes.get("locale")
+ if not locale:
+ yield job
+ continue
+
+ group = "BMR"
+
+ # add the locale code
+ symbol = locale
+
+ treeherder = {
+ "symbol": join_symbol(group, symbol),
+ }
+
+ beet_description = {
+ "label": job["label"],
+ "primary-dependency": dep_job,
+ "dependent-tasks": job["dependent-tasks"],
+ "attributes": job["attributes"],
+ "treeherder": treeherder,
+ "locale": locale,
+ "shipping-phase": job["shipping-phase"],
+ "shipping-product": job["shipping-product"],
+ }
+ yield beet_description
diff --git a/taskcluster/gecko_taskgraph/transforms/beetmover_repackage_partner.py b/taskcluster/gecko_taskgraph/transforms/beetmover_repackage_partner.py
new file mode 100644
index 0000000000..40dc370f33
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/beetmover_repackage_partner.py
@@ -0,0 +1,326 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the beetmover task into an actual task description.
+"""
+
+import logging
+from copy import deepcopy
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import optionally_keyed_by, resolve_keyed_by
+from taskgraph.util.taskcluster import get_artifact_prefix
+from voluptuous import Any, Optional, Required
+
+from gecko_taskgraph.loader.single_dep import schema
+from gecko_taskgraph.transforms.beetmover import craft_release_properties
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import (
+ copy_attributes_from_dependent_job,
+ release_level,
+)
+from gecko_taskgraph.util.partners import get_ftp_platform, get_partner_config_by_kind
+from gecko_taskgraph.util.scriptworker import (
+ add_scope_prefix,
+ get_beetmover_bucket_scope,
+)
+
+logger = logging.getLogger(__name__)
+
+
+beetmover_description_schema = schema.extend(
+ {
+ # unique label to describe this beetmover task, defaults to {dep.label}-beetmover
+ Optional("label"): str,
+ Required("partner-bucket-scope"): optionally_keyed_by("release-level", str),
+ Required("partner-public-path"): Any(None, str),
+ Required("partner-private-path"): Any(None, str),
+ Optional("extra"): object,
+ Required("shipping-phase"): task_description_schema["shipping-phase"],
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Optional("priority"): task_description_schema["priority"],
+ }
+)
+
+transforms = TransformSequence()
+transforms.add_validate(beetmover_description_schema)
+
+
+@transforms.add
+def resolve_keys(config, jobs):
+ for job in jobs:
+ resolve_keyed_by(
+ job,
+ "partner-bucket-scope",
+ item_name=job["label"],
+ **{"release-level": release_level(config.params["project"])},
+ )
+ yield job
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+ repack_id = dep_job.task.get("extra", {}).get("repack_id")
+ if not repack_id:
+ raise Exception("Cannot find repack id!")
+
+ attributes = dep_job.attributes
+ build_platform = attributes.get("build_platform")
+ if not build_platform:
+ raise Exception("Cannot find build platform!")
+
+ label = dep_job.label.replace("repackage-signing-l10n", "beetmover-")
+ label = dep_job.label.replace("repackage-signing-", "beetmover-")
+ label = label.replace("repackage-", "beetmover-")
+ label = label.replace("chunking-dummy-", "beetmover-")
+ description = (
+ "Beetmover submission for repack_id '{repack_id}' for build '"
+ "{build_platform}/{build_type}'".format(
+ repack_id=repack_id,
+ build_platform=build_platform,
+ build_type=attributes.get("build_type"),
+ )
+ )
+
+ dependencies = {}
+
+ base_label = "release-partner-repack"
+ if "eme" in config.kind:
+ base_label = "release-eme-free-repack"
+ dependencies["build"] = f"{base_label}-{build_platform}"
+ if "macosx" in build_platform or "win" in build_platform:
+ dependencies["repackage"] = "{}-repackage-{}-{}".format(
+ base_label, build_platform, repack_id.replace("/", "-")
+ )
+ dependencies["repackage-signing"] = "{}-repackage-signing-{}-{}".format(
+ base_label, build_platform, repack_id.replace("/", "-")
+ )
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+
+ task = {
+ "label": label,
+ "description": description,
+ "dependencies": dependencies,
+ "attributes": attributes,
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "shipping-phase": job["shipping-phase"],
+ "shipping-product": job.get("shipping-product"),
+ "partner-private-path": job["partner-private-path"],
+ "partner-public-path": job["partner-public-path"],
+ "partner-bucket-scope": job["partner-bucket-scope"],
+ "extra": {
+ "repack_id": repack_id,
+ },
+ }
+ # we may have reduced the priority for partner jobs, otherwise task.py will set it
+ if job.get("priority"):
+ task["priority"] = job["priority"]
+
+ yield task
+
+
+def populate_scopes_and_worker_type(config, job, bucket_scope, partner_public=False):
+ action_scope = add_scope_prefix(config, "beetmover:action:push-to-partner")
+
+ task = deepcopy(job)
+ task["scopes"] = [bucket_scope, action_scope]
+ task["worker-type"] = "beetmover"
+ task["partner_public"] = partner_public
+ if partner_public:
+ task["label"] = "{}-public".format(task["label"])
+ return task
+
+
+@transforms.add
+def split_public_and_private(config, jobs):
+ public_bucket_scope = get_beetmover_bucket_scope(config)
+ partner_config = get_partner_config_by_kind(config, config.kind)
+
+ for job in jobs:
+ partner_bucket_scope = add_scope_prefix(config, job["partner-bucket-scope"])
+ partner, subpartner, _ = job["extra"]["repack_id"].split("/")
+
+ if partner_config[partner][subpartner].get("upload_to_candidates"):
+ # public
+ yield populate_scopes_and_worker_type(
+ config, job, public_bucket_scope, partner_public=True
+ )
+ else:
+ # private
+ yield populate_scopes_and_worker_type(
+ config, job, partner_bucket_scope, partner_public=False
+ )
+
+
+def generate_upstream_artifacts(
+ job,
+ build_task_ref,
+ repackage_task_ref,
+ repackage_signing_task_ref,
+ platform,
+ repack_id,
+ partner_path,
+ repack_stub_installer=False,
+):
+
+ upstream_artifacts = []
+ artifact_prefix = get_artifact_prefix(job)
+
+ if "linux" in platform:
+ upstream_artifacts.append(
+ {
+ "taskId": {"task-reference": build_task_ref},
+ "taskType": "build",
+ "paths": [f"{artifact_prefix}/{repack_id}/target.tar.bz2"],
+ "locale": partner_path,
+ }
+ )
+ upstream_artifacts.append(
+ {
+ "taskId": {"task-reference": repackage_signing_task_ref},
+ "taskType": "repackage",
+ "paths": [f"{artifact_prefix}/{repack_id}/target.tar.bz2.asc"],
+ "locale": partner_path,
+ }
+ )
+ elif "macosx" in platform:
+ upstream_artifacts.append(
+ {
+ "taskId": {"task-reference": repackage_task_ref},
+ "taskType": "repackage",
+ "paths": [f"{artifact_prefix}/{repack_id}/target.dmg"],
+ "locale": partner_path,
+ }
+ )
+ upstream_artifacts.append(
+ {
+ "taskId": {"task-reference": repackage_signing_task_ref},
+ "taskType": "repackage",
+ "paths": [f"{artifact_prefix}/{repack_id}/target.dmg.asc"],
+ "locale": partner_path,
+ }
+ )
+ elif "win" in platform:
+ upstream_artifacts.append(
+ {
+ "taskId": {"task-reference": repackage_signing_task_ref},
+ "taskType": "repackage",
+ "paths": [f"{artifact_prefix}/{repack_id}/target.installer.exe"],
+ "locale": partner_path,
+ }
+ )
+ upstream_artifacts.append(
+ {
+ "taskId": {"task-reference": repackage_signing_task_ref},
+ "taskType": "repackage",
+ "paths": [f"{artifact_prefix}/{repack_id}/target.installer.exe.asc"],
+ "locale": partner_path,
+ }
+ )
+ if platform.startswith("win32") and repack_stub_installer:
+ upstream_artifacts.append(
+ {
+ "taskId": {"task-reference": repackage_signing_task_ref},
+ "taskType": "repackage",
+ "paths": [
+ "{}/{}/target.stub-installer.exe".format(
+ artifact_prefix, repack_id
+ )
+ ],
+ "locale": partner_path,
+ }
+ )
+ upstream_artifacts.append(
+ {
+ "taskId": {"task-reference": repackage_signing_task_ref},
+ "taskType": "repackage",
+ "paths": [
+ "{}/{}/target.stub-installer.exe.asc".format(
+ artifact_prefix, repack_id
+ )
+ ],
+ "locale": partner_path,
+ }
+ )
+
+ if not upstream_artifacts:
+ raise Exception("Couldn't find any upstream artifacts.")
+
+ return upstream_artifacts
+
+
+@transforms.add
+def make_task_worker(config, jobs):
+ for job in jobs:
+ platform = job["attributes"]["build_platform"]
+ repack_id = job["extra"]["repack_id"]
+ partner, subpartner, locale = job["extra"]["repack_id"].split("/")
+ partner_config = get_partner_config_by_kind(config, config.kind)
+ repack_stub_installer = partner_config[partner][subpartner].get(
+ "repack_stub_installer"
+ )
+ build_task = None
+ repackage_task = None
+ repackage_signing_task = None
+
+ for dependency in job["dependencies"].keys():
+ if "repackage-signing" in dependency:
+ repackage_signing_task = dependency
+ elif "repackage" in dependency:
+ repackage_task = dependency
+ else:
+ build_task = "build"
+
+ build_task_ref = "<" + str(build_task) + ">"
+ repackage_task_ref = "<" + str(repackage_task) + ">"
+ repackage_signing_task_ref = "<" + str(repackage_signing_task) + ">"
+
+ # generate the partner path; we'll send this to beetmover as the "locale"
+ ftp_platform = get_ftp_platform(platform)
+ repl_dict = {
+ "build_number": config.params["build_number"],
+ "locale": locale,
+ "partner": partner,
+ "platform": ftp_platform,
+ "release_partner_build_number": config.params[
+ "release_partner_build_number"
+ ],
+ "subpartner": subpartner,
+ "version": config.params["version"],
+ }
+ partner_public = job["partner_public"]
+ if partner_public:
+ partner_path_key = "partner-public-path"
+ else:
+ partner_path_key = "partner-private-path"
+ # Kinds can set these to None
+ if not job[partner_path_key]:
+ continue
+ partner_path = job[partner_path_key].format(**repl_dict)
+ del job["partner_public"]
+ del job["partner-private-path"]
+ del job["partner-public-path"]
+ del job["partner-bucket-scope"]
+
+ worker = {
+ "implementation": "beetmover",
+ "release-properties": craft_release_properties(config, job),
+ "upstream-artifacts": generate_upstream_artifacts(
+ job,
+ build_task_ref,
+ repackage_task_ref,
+ repackage_signing_task_ref,
+ platform,
+ repack_id,
+ partner_path,
+ repack_stub_installer,
+ ),
+ "partner-public": partner_public,
+ }
+ job["worker"] = worker
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/beetmover_snap.py b/taskcluster/gecko_taskgraph/transforms/beetmover_snap.py
new file mode 100644
index 0000000000..40f5132cc1
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/beetmover_snap.py
@@ -0,0 +1,42 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the snap beetmover kind into an actual task description.
+"""
+
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def leave_snap_repackage_dependencies_only(config, jobs):
+ for job in jobs:
+ # XXX: We delete the build dependency because, unlike the other beetmover
+ # tasks, source doesn't depend on any build task at all. This hack should
+ # go away when we rewrite beetmover transforms to allow more flexibility in deps
+
+ job["dependencies"] = {
+ key: value
+ for key, value in job["dependencies"].items()
+ if key == "release-snap-repackage"
+ }
+
+ job["worker"]["upstream-artifacts"] = [
+ upstream_artifact
+ for upstream_artifact in job["worker"]["upstream-artifacts"]
+ if upstream_artifact["taskId"]["task-reference"]
+ == "<release-snap-repackage>"
+ ]
+
+ yield job
+
+
+@transforms.add
+def set_custom_treeherder_job_name(config, jobs):
+ for job in jobs:
+ job.get("treeherder", {})["symbol"] = "Snap(BM)"
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/beetmover_source.py b/taskcluster/gecko_taskgraph/transforms/beetmover_source.py
new file mode 100644
index 0000000000..573f684a98
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/beetmover_source.py
@@ -0,0 +1,35 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the beetmover-source task to also append `build` as dependency
+"""
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def remove_build_dependency_in_beetmover_source(config, jobs):
+ for job in jobs:
+ # XXX: We delete the build dependency because, unlike the other beetmover
+ # tasks, source doesn't depend on any build task at all. This hack should
+ # go away when we rewrite beetmover transforms to allow more flexibility in deps
+ # Essentially, we should use multi_dep for beetmover.
+ for depname in job["dependencies"]:
+ if "signing" not in depname:
+ del job["dependencies"][depname]
+ break
+ else:
+ raise Exception("Can't find build dep in beetmover source!")
+
+ all_upstream_artifacts = job["worker"]["upstream-artifacts"]
+ upstream_artifacts_without_build = [
+ upstream_artifact
+ for upstream_artifact in all_upstream_artifacts
+ if upstream_artifact["taskId"]["task-reference"] != f"<{depname}>"
+ ]
+ job["worker"]["upstream-artifacts"] = upstream_artifacts_without_build
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/beetmover_source_checksums.py b/taskcluster/gecko_taskgraph/transforms/beetmover_source_checksums.py
new file mode 100644
index 0000000000..bcaa889903
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/beetmover_source_checksums.py
@@ -0,0 +1,137 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform release-beetmover-source-checksums into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from voluptuous import Optional
+
+from gecko_taskgraph.loader.single_dep import schema
+from gecko_taskgraph.transforms.beetmover import craft_release_properties
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+from gecko_taskgraph.util.scriptworker import (
+ generate_beetmover_artifact_map,
+ generate_beetmover_upstream_artifacts,
+ get_beetmover_action_scope,
+ get_beetmover_bucket_scope,
+)
+
+beetmover_checksums_description_schema = schema.extend(
+ {
+ Optional("label"): str,
+ Optional("treeherder"): task_description_schema["treeherder"],
+ Optional("locale"): str,
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Optional("attributes"): task_description_schema["attributes"],
+ }
+)
+
+transforms = TransformSequence()
+transforms.add_validate(beetmover_checksums_description_schema)
+
+
+@transforms.add
+def make_beetmover_checksums_description(config, jobs):
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+ attributes = dep_job.attributes
+
+ treeherder = job.get("treeherder", {})
+ treeherder.setdefault("symbol", "BMcss(N)")
+ dep_th_platform = (
+ dep_job.task.get("extra", {})
+ .get("treeherder", {})
+ .get("machine", {})
+ .get("platform", "")
+ )
+ treeherder.setdefault("platform", f"{dep_th_platform}/opt")
+ treeherder.setdefault("tier", 1)
+ treeherder.setdefault("kind", "build")
+
+ label = job["label"]
+ build_platform = attributes.get("build_platform")
+
+ description = "Beetmover submission of checksums for source file"
+
+ extra = {}
+ if "devedition" in build_platform:
+ extra["product"] = "devedition"
+ else:
+ extra["product"] = "firefox"
+
+ dependencies = {dep_job.kind: dep_job.label}
+ for k, v in dep_job.dependencies.items():
+ if k.startswith("beetmover"):
+ dependencies[k] = v
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+ attributes.update(job.get("attributes", {}))
+
+ bucket_scope = get_beetmover_bucket_scope(config)
+ action_scope = get_beetmover_action_scope(config)
+
+ task = {
+ "label": label,
+ "description": description,
+ "worker-type": "beetmover",
+ "scopes": [bucket_scope, action_scope],
+ "dependencies": dependencies,
+ "attributes": attributes,
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "treeherder": treeherder,
+ "extra": extra,
+ }
+
+ if "shipping-phase" in job:
+ task["shipping-phase"] = job["shipping-phase"]
+
+ if "shipping-product" in job:
+ task["shipping-product"] = job["shipping-product"]
+
+ yield task
+
+
+@transforms.add
+def make_beetmover_checksums_worker(config, jobs):
+ for job in jobs:
+ valid_beetmover_job = len(job["dependencies"]) == 2
+ if not valid_beetmover_job:
+ raise NotImplementedError("Beetmover checksums must have two dependencies.")
+
+ locale = job["attributes"].get("locale")
+ platform = job["attributes"]["build_platform"]
+
+ refs = {
+ "beetmover": None,
+ "signing": None,
+ }
+ for dependency in job["dependencies"].keys():
+ if dependency.startswith("beetmover"):
+ refs["beetmover"] = f"<{dependency}>"
+ else:
+ refs["signing"] = f"<{dependency}>"
+ if None in refs.values():
+ raise NotImplementedError(
+ "Beetmover checksums must have a beetmover and signing dependency!"
+ )
+
+ worker = {
+ "implementation": "beetmover",
+ "release-properties": craft_release_properties(config, job),
+ "upstream-artifacts": generate_beetmover_upstream_artifacts(
+ config, job, platform, locale
+ ),
+ "artifact-map": generate_beetmover_artifact_map(
+ config, job, platform=platform
+ ),
+ }
+
+ if locale:
+ worker["locale"] = locale
+ job["worker"] = worker
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/bootstrap.py b/taskcluster/gecko_taskgraph/transforms/bootstrap.py
new file mode 100644
index 0000000000..e4537cab01
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/bootstrap.py
@@ -0,0 +1,132 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import Schema
+from voluptuous import Any, Optional, Required
+
+transforms = TransformSequence()
+
+bootstrap_schema = Schema(
+ {
+ # Name of the bootstrap task.
+ Required("name"): str,
+ # Name of the docker image. Ideally, we'd also have tasks for mac and windows,
+ # but we unfortunately don't have workers barebones enough for such testing
+ # to be satisfactory.
+ Required("image"): Any(str, {"in-tree": str}),
+ # Initialization commands.
+ Required("pre-commands"): [str],
+ # relative path (from config.path) to the file task was defined in
+ Optional("job-from"): str,
+ }
+)
+
+
+transforms.add_validate(bootstrap_schema)
+
+
+@transforms.add
+def bootstrap_tasks(config, tasks):
+ for task in tasks:
+ name = task.pop("name")
+ image = task.pop("image")
+ pre_commands = task.pop("pre-commands")
+
+ head_repo = config.params["head_repository"]
+ head_rev = config.params["head_rev"]
+
+ # Get all the non macos/windows local toolchains (the only ones bootstrap can use),
+ # and use them as dependencies for the tasks we create, so that they don't start
+ # before any potential toolchain task that would be triggered on the same push
+ # (which would lead to bootstrap failing).
+ dependencies = {
+ name: name
+ for name, task in config.kind_dependencies_tasks.items()
+ if task.attributes.get("local-toolchain")
+ and not name.startswith(("toolchain-macos", "toolchain-win"))
+ }
+ # We don't test the artifacts variants, or js, because they are essentially subsets.
+ # Mobile and browser are different enough to warrant testing them separately.
+ for app in ("browser", "mobile_android"):
+ commands = pre_commands + [
+ # MOZ_AUTOMATION changes the behavior, and we want something closer to user
+ # machines.
+ "unset MOZ_AUTOMATION",
+ f"curl -O {head_repo}/raw-file/{head_rev}/python/mozboot/bin/bootstrap.py",
+ f"python3 bootstrap.py --no-interactive --application-choice {app}",
+ "cd mozilla-unified",
+ # After bootstrap, configure should go through without its own auto-bootstrap.
+ "./mach configure --disable-bootstrap",
+ # Then a build should go through too.
+ "./mach build",
+ ]
+
+ os_specific = []
+ if app == "mobile_android":
+ os_specific += ["android*"]
+ for os, filename in (
+ ("debian", "debian.py"),
+ ("ubuntu", "debian.py"),
+ ("fedora", "centosfedora.py"),
+ ("rockylinux", "centosfedora.py"),
+ ("opensuse", "opensuse.py"),
+ ("gentoo", "gentoo.py"),
+ ("archlinux", "archlinux.py"),
+ ("voidlinux", "void.py"),
+ ):
+ if name.startswith(os):
+ os_specific.append(filename)
+ break
+ else:
+ raise Exception(f"Missing OS specific bootstrap file for {name}")
+
+ taskdesc = {
+ "label": f"{config.kind}-{name}-{app}",
+ "description": f"Bootstrap {app} build on {name}",
+ "always-target": True,
+ "scopes": [],
+ "treeherder": {
+ "symbol": f"Boot({name})",
+ "platform": {
+ "browser": "linux64/opt",
+ "mobile_android": "android-5-0-armv7/opt",
+ }[app],
+ "kind": "other",
+ "tier": 2,
+ },
+ "run-on-projects": ["trunk"],
+ "worker-type": "b-linux-gcp",
+ "worker": {
+ "implementation": "docker-worker",
+ "docker-image": image,
+ "os": "linux",
+ "env": {
+ "GECKO_HEAD_REPOSITORY": head_repo,
+ "GECKO_HEAD_REV": head_rev,
+ "MACH_NO_TERMINAL_FOOTER": "1",
+ "MOZ_SCM_LEVEL": config.params["level"],
+ },
+ "command": ["sh", "-c", "-x", "-e", " && ".join(commands)],
+ "max-run-time": 7200,
+ },
+ "dependencies": dependencies,
+ "optimization": {
+ "skip-unless-changed": [
+ "python/mozboot/bin/bootstrap.py",
+ "python/mozboot/mozboot/base.py",
+ "python/mozboot/mozboot/bootstrap.py",
+ "python/mozboot/mozboot/linux_common.py",
+ "python/mozboot/mozboot/mach_commands.py",
+ "python/mozboot/mozboot/mozconfig.py",
+ "python/mozboot/mozboot/rust.py",
+ "python/mozboot/mozboot/sccache.py",
+ "python/mozboot/mozboot/util.py",
+ ]
+ + [f"python/mozboot/mozboot/{f}" for f in os_specific]
+ },
+ }
+
+ yield taskdesc
diff --git a/taskcluster/gecko_taskgraph/transforms/bouncer_aliases.py b/taskcluster/gecko_taskgraph/transforms/bouncer_aliases.py
new file mode 100644
index 0000000000..38f1aa136a
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/bouncer_aliases.py
@@ -0,0 +1,108 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Add from parameters.yml into bouncer submission tasks.
+"""
+
+
+import logging
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+from gecko_taskgraph.transforms.bouncer_submission import craft_bouncer_product_name
+from gecko_taskgraph.transforms.bouncer_submission_partners import (
+ craft_partner_bouncer_product_name,
+)
+from gecko_taskgraph.util.attributes import release_level
+from gecko_taskgraph.util.partners import get_partners_to_be_published
+from gecko_taskgraph.util.scriptworker import get_release_config
+
+logger = logging.getLogger(__name__)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def make_task_worker(config, jobs):
+ for job in jobs:
+ resolve_keyed_by(
+ job,
+ "worker-type",
+ item_name=job["name"],
+ **{"release-level": release_level(config.params["project"])},
+ )
+ resolve_keyed_by(
+ job,
+ "scopes",
+ item_name=job["name"],
+ **{"release-level": release_level(config.params["project"])},
+ )
+ resolve_keyed_by(
+ job,
+ "bouncer-products-per-alias",
+ item_name=job["name"],
+ **{"release-type": config.params["release_type"]},
+ )
+ if "partner-bouncer-products-per-alias" in job:
+ resolve_keyed_by(
+ job,
+ "partner-bouncer-products-per-alias",
+ item_name=job["name"],
+ **{"release-type": config.params["release_type"]},
+ )
+
+ job["worker"]["entries"] = craft_bouncer_entries(config, job)
+
+ del job["bouncer-products-per-alias"]
+ if "partner-bouncer-products-per-alias" in job:
+ del job["partner-bouncer-products-per-alias"]
+
+ if job["worker"]["entries"]:
+ yield job
+ else:
+ logger.warn(
+ 'No bouncer entries defined in bouncer submission task for "{}". \
+Job deleted.'.format(
+ job["name"]
+ )
+ )
+
+
+def craft_bouncer_entries(config, job):
+ release_config = get_release_config(config)
+
+ product = job["shipping-product"]
+ current_version = release_config["version"]
+ bouncer_products_per_alias = job["bouncer-products-per-alias"]
+
+ entries = {
+ bouncer_alias: craft_bouncer_product_name(
+ product,
+ bouncer_product,
+ current_version,
+ )
+ for bouncer_alias, bouncer_product in bouncer_products_per_alias.items()
+ }
+
+ partner_bouncer_products_per_alias = job.get("partner-bouncer-products-per-alias")
+ if partner_bouncer_products_per_alias:
+ partners = get_partners_to_be_published(config)
+ for partner, sub_config_name, _ in partners:
+ entries.update(
+ {
+ bouncer_alias.replace(
+ "PARTNER", f"{partner}-{sub_config_name}"
+ ): craft_partner_bouncer_product_name(
+ product,
+ bouncer_product,
+ current_version,
+ partner,
+ sub_config_name,
+ )
+ for bouncer_alias, bouncer_product in partner_bouncer_products_per_alias.items() # NOQA: E501
+ }
+ )
+
+ return entries
diff --git a/taskcluster/gecko_taskgraph/transforms/bouncer_check.py b/taskcluster/gecko_taskgraph/transforms/bouncer_check.py
new file mode 100644
index 0000000000..9261ef9463
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/bouncer_check.py
@@ -0,0 +1,111 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import json
+import logging
+from pipes import quote as shell_quote
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+from gecko_taskgraph.util.attributes import release_level
+from gecko_taskgraph.util.scriptworker import get_release_config
+
+logger = logging.getLogger(__name__)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def add_command(config, jobs):
+ for job in jobs:
+ command = [
+ "python",
+ "testing/mozharness/scripts/release/bouncer_check.py",
+ ]
+ job["run"].update(
+ {
+ "using": "mach",
+ "mach": command,
+ }
+ )
+ yield job
+
+
+@transforms.add
+def add_previous_versions(config, jobs):
+ release_config = get_release_config(config)
+ if not release_config.get("partial_versions"):
+ for job in jobs:
+ yield job
+ else:
+ extra_params = []
+ for partial in release_config["partial_versions"].split(","):
+ extra_params.append(
+ "--previous-version={}".format(partial.split("build")[0].strip())
+ )
+
+ for job in jobs:
+ job["run"]["mach"].extend(extra_params)
+ yield job
+
+
+@transforms.add
+def handle_keyed_by(config, jobs):
+ """Resolve fields that can be keyed by project, etc."""
+ fields = [
+ "run.config",
+ "run.product-field",
+ "run.extra-config",
+ ]
+
+ release_config = get_release_config(config)
+ version = release_config["version"]
+
+ for job in jobs:
+ for field in fields:
+ resolve_keyed_by(
+ item=job,
+ field=field,
+ item_name=job["name"],
+ **{
+ "project": config.params["project"],
+ "release-level": release_level(config.params["project"]),
+ "release-type": config.params["release_type"],
+ },
+ )
+
+ for cfg in job["run"]["config"]:
+ job["run"]["mach"].extend(["--config", cfg])
+
+ if config.kind == "cron-bouncer-check":
+ job["run"]["mach"].extend(
+ [
+ "--product-field={}".format(job["run"]["product-field"]),
+ "--products-url={}".format(job["run"]["products-url"]),
+ ]
+ )
+ del job["run"]["product-field"]
+ del job["run"]["products-url"]
+ elif config.kind == "release-bouncer-check":
+ job["run"]["mach"].append(f"--version={version}")
+
+ del job["run"]["config"]
+
+ if "extra-config" in job["run"]:
+ env = job["worker"].setdefault("env", {})
+ env["EXTRA_MOZHARNESS_CONFIG"] = json.dumps(
+ job["run"]["extra-config"], sort_keys=True
+ )
+ del job["run"]["extra-config"]
+
+ yield job
+
+
+@transforms.add
+def command_to_string(config, jobs):
+ """Convert command to string to make it work properly with run-task"""
+ for job in jobs:
+ job["run"]["mach"] = " ".join(map(shell_quote, job["run"]["mach"]))
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/bouncer_locations.py b/taskcluster/gecko_taskgraph/transforms/bouncer_locations.py
new file mode 100644
index 0000000000..e755b73c27
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/bouncer_locations.py
@@ -0,0 +1,35 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import logging
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+logger = logging.getLogger(__name__)
+
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def make_task_worker(config, jobs):
+ for job in jobs:
+ resolve_keyed_by(
+ job, "worker-type", item_name=job["name"], project=config.params["project"]
+ )
+ resolve_keyed_by(
+ job, "scopes", item_name=job["name"], project=config.params["project"]
+ )
+ resolve_keyed_by(
+ job,
+ "bouncer-products",
+ item_name=job["name"],
+ project=config.params["project"],
+ )
+
+ job["worker"]["bouncer-products"] = job["bouncer-products"]
+
+ del job["bouncer-products"]
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/bouncer_submission.py b/taskcluster/gecko_taskgraph/transforms/bouncer_submission.py
new file mode 100644
index 0000000000..d6320a9312
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/bouncer_submission.py
@@ -0,0 +1,335 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Add from parameters.yml into bouncer submission tasks.
+"""
+
+
+import copy
+import logging
+
+import attr
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+from gecko_taskgraph.transforms.l10n import parse_locales_file
+from gecko_taskgraph.util.attributes import release_level
+from gecko_taskgraph.util.scriptworker import get_release_config
+
+logger = logging.getLogger(__name__)
+
+
+FTP_PLATFORMS_PER_BOUNCER_PLATFORM = {
+ "linux": "linux-i686",
+ "linux64": "linux-x86_64",
+ "osx": "mac",
+ "win": "win32",
+ "win64": "win64",
+ "win64-aarch64": "win64-aarch64",
+}
+
+# :lang is interpolated by bouncer at runtime
+CANDIDATES_PATH_TEMPLATE = "/{ftp_product}/candidates/{version}-candidates/build{build_number}/\
+{update_folder}{ftp_platform}/:lang/{file}"
+RELEASES_PATH_TEMPLATE = "/{ftp_product}/releases/{version}/\
+{update_folder}{ftp_platform}/:lang/{file}"
+
+
+CONFIG_PER_BOUNCER_PRODUCT = {
+ "complete-mar": {
+ "name_postfix": "-Complete",
+ "path_template": RELEASES_PATH_TEMPLATE,
+ "file_names": {
+ "default": "{product}-{version}.complete.mar",
+ },
+ },
+ "complete-mar-candidates": {
+ "name_postfix": "build{build_number}-Complete",
+ "path_template": CANDIDATES_PATH_TEMPLATE,
+ "file_names": {
+ "default": "{product}-{version}.complete.mar",
+ },
+ },
+ "installer": {
+ "path_template": RELEASES_PATH_TEMPLATE,
+ "file_names": {
+ "linux": "{product}-{version}.tar.bz2",
+ "linux64": "{product}-{version}.tar.bz2",
+ "osx": "{pretty_product}%20{version}.dmg",
+ "win": "{pretty_product}%20Setup%20{version}.exe",
+ "win64": "{pretty_product}%20Setup%20{version}.exe",
+ "win64-aarch64": "{pretty_product}%20Setup%20{version}.exe",
+ },
+ },
+ "partial-mar": {
+ "name_postfix": "-Partial-{previous_version}",
+ "path_template": RELEASES_PATH_TEMPLATE,
+ "file_names": {
+ "default": "{product}-{previous_version}-{version}.partial.mar",
+ },
+ },
+ "partial-mar-candidates": {
+ "name_postfix": "build{build_number}-Partial-{previous_version}build{previous_build}",
+ "path_template": CANDIDATES_PATH_TEMPLATE,
+ "file_names": {
+ "default": "{product}-{previous_version}-{version}.partial.mar",
+ },
+ },
+ "stub-installer": {
+ "name_postfix": "-stub",
+ # We currently have a sole win32 stub installer that is to be used
+ # in all windows platforms to toggle between full installers
+ "path_template": RELEASES_PATH_TEMPLATE.replace("{ftp_platform}", "win32"),
+ "file_names": {
+ "win": "{pretty_product}%20Installer.exe",
+ "win64": "{pretty_product}%20Installer.exe",
+ "win64-aarch64": "{pretty_product}%20Installer.exe",
+ },
+ },
+ "msi": {
+ "name_postfix": "-msi-SSL",
+ "path_template": RELEASES_PATH_TEMPLATE,
+ "file_names": {
+ "win": "{pretty_product}%20Setup%20{version}.msi",
+ "win64": "{pretty_product}%20Setup%20{version}.msi",
+ },
+ },
+ "msix": {
+ "name_postfix": "-msix-SSL",
+ "path_template": RELEASES_PATH_TEMPLATE.replace(":lang", "multi"),
+ "file_names": {
+ "win": "{pretty_product}%20Setup%20{version}.msix",
+ "win64": "{pretty_product}%20Setup%20{version}.msix",
+ },
+ },
+ "pkg": {
+ "name_postfix": "-pkg-SSL",
+ "path_template": RELEASES_PATH_TEMPLATE,
+ "file_names": {
+ "osx": "{pretty_product}%20{version}.pkg",
+ },
+ },
+ "langpack": {
+ "name_postfix": "-langpack-SSL",
+ "path_template": RELEASES_PATH_TEMPLATE.replace(":lang", "xpi"),
+ "file_names": {"default": ":lang.xpi"},
+ },
+}
+CONFIG_PER_BOUNCER_PRODUCT["installer-ssl"] = copy.deepcopy(
+ CONFIG_PER_BOUNCER_PRODUCT["installer"]
+)
+CONFIG_PER_BOUNCER_PRODUCT["installer-ssl"]["name_postfix"] = "-SSL"
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def make_task_worker(config, jobs):
+ for job in jobs:
+ resolve_keyed_by(
+ job,
+ "worker-type",
+ item_name=job["name"],
+ **{"release-level": release_level(config.params["project"])}
+ )
+ resolve_keyed_by(
+ job,
+ "scopes",
+ item_name=job["name"],
+ **{"release-level": release_level(config.params["project"])}
+ )
+ resolve_keyed_by(
+ job,
+ "bouncer-products",
+ item_name=job["name"],
+ **{"release-type": config.params["release_type"]}
+ )
+
+ # No need to filter out ja-JP-mac, we need to upload both; but we do
+ # need to filter out the platforms they come with
+ all_locales = sorted(
+ locale
+ for locale in parse_locales_file(job["locales-file"]).keys()
+ if locale not in ("linux", "win32", "osx")
+ )
+
+ job["worker"]["locales"] = all_locales
+ job["worker"]["entries"] = craft_bouncer_entries(config, job)
+
+ del job["locales-file"]
+ del job["bouncer-platforms"]
+ del job["bouncer-products"]
+
+ if job["worker"]["entries"]:
+ yield job
+ else:
+ logger.warn(
+ 'No bouncer entries defined in bouncer submission task for "{}". \
+Job deleted.'.format(
+ job["name"]
+ )
+ )
+
+
+def craft_bouncer_entries(config, job):
+ release_config = get_release_config(config)
+
+ product = job["shipping-product"]
+ bouncer_platforms = job["bouncer-platforms"]
+
+ current_version = release_config["version"]
+ current_build_number = release_config["build_number"]
+
+ bouncer_products = job["bouncer-products"]
+ previous_versions_string = release_config.get("partial_versions", None)
+ if previous_versions_string:
+ previous_versions = previous_versions_string.split(", ")
+ else:
+ logger.warn(
+ 'No partials defined! Bouncer submission task won\'t send any \
+partial-related entry for "{}"'.format(
+ job["name"]
+ )
+ )
+ bouncer_products = [
+ bouncer_product
+ for bouncer_product in bouncer_products
+ if "partial" not in bouncer_product
+ ]
+ previous_versions = [None]
+
+ project = config.params["project"]
+
+ return {
+ craft_bouncer_product_name(
+ product,
+ bouncer_product,
+ current_version,
+ current_build_number,
+ previous_version,
+ ): {
+ "options": {
+ "add_locales": False if "msix" in bouncer_product else True,
+ "ssl_only": craft_ssl_only(bouncer_product, project),
+ },
+ "paths_per_bouncer_platform": craft_paths_per_bouncer_platform(
+ product,
+ bouncer_product,
+ bouncer_platforms,
+ current_version,
+ current_build_number,
+ previous_version,
+ ),
+ }
+ for bouncer_product in bouncer_products
+ for previous_version in previous_versions
+ }
+
+
+def craft_paths_per_bouncer_platform(
+ product,
+ bouncer_product,
+ bouncer_platforms,
+ current_version,
+ current_build_number,
+ previous_version=None,
+):
+ paths_per_bouncer_platform = {}
+ for bouncer_platform in bouncer_platforms:
+ file_names_per_platform = CONFIG_PER_BOUNCER_PRODUCT[bouncer_product][
+ "file_names"
+ ]
+ file_name_template = file_names_per_platform.get(
+ bouncer_platform, file_names_per_platform.get("default", None)
+ )
+ if not file_name_template:
+ # Some bouncer product like stub-installer are only meant to be on Windows.
+ # Thus no default value is defined there
+ continue
+
+ file_name_product = _craft_filename_product(product)
+ file_name = file_name_template.format(
+ product=file_name_product,
+ pretty_product=file_name_product.capitalize(),
+ version=current_version,
+ previous_version=split_build_data(previous_version)[0],
+ )
+
+ path_template = CONFIG_PER_BOUNCER_PRODUCT[bouncer_product]["path_template"]
+ file_relative_location = path_template.format(
+ ftp_product=_craft_ftp_product(product),
+ version=current_version,
+ build_number=current_build_number,
+ update_folder="update/" if "-mar" in bouncer_product else "",
+ ftp_platform=FTP_PLATFORMS_PER_BOUNCER_PLATFORM[bouncer_platform],
+ file=file_name,
+ )
+
+ paths_per_bouncer_platform[bouncer_platform] = file_relative_location
+
+ return paths_per_bouncer_platform
+
+
+def _craft_ftp_product(product):
+ return product.lower()
+
+
+def _craft_filename_product(product):
+ return "firefox" if product == "devedition" else product
+
+
+@attr.s
+class InvalidSubstitution:
+ error = attr.ib(type=str)
+
+ def __str__(self):
+ raise Exception("Partial is being processed, but no previous version defined.")
+
+
+def craft_bouncer_product_name(
+ product,
+ bouncer_product,
+ current_version,
+ current_build_number=None,
+ previous_version=None,
+):
+ if previous_version is None:
+ previous_version = previous_build = InvalidSubstitution(
+ "Partial is being processed, but no previous version defined."
+ )
+ else:
+ previous_version, previous_build = split_build_data(previous_version)
+ postfix = (
+ CONFIG_PER_BOUNCER_PRODUCT[bouncer_product]
+ .get("name_postfix", "")
+ .format(
+ build_number=current_build_number,
+ previous_version=previous_version,
+ previous_build=previous_build,
+ )
+ )
+
+ return "{product}-{version}{postfix}".format(
+ product=product.capitalize(), version=current_version, postfix=postfix
+ )
+
+
+def craft_ssl_only(bouncer_product, project):
+ # XXX ESR is the only channel where we force serve the installer over SSL
+ if "-esr" in project and bouncer_product == "installer":
+ return True
+
+ return bouncer_product not in (
+ "complete-mar",
+ "complete-mar-candidates",
+ "installer",
+ "partial-mar",
+ "partial-mar-candidates",
+ )
+
+
+def split_build_data(version):
+ if version and "build" in version:
+ return version.split("build")
+ return version, InvalidSubstitution("k")
diff --git a/taskcluster/gecko_taskgraph/transforms/bouncer_submission_partners.py b/taskcluster/gecko_taskgraph/transforms/bouncer_submission_partners.py
new file mode 100644
index 0000000000..0f298cb120
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/bouncer_submission_partners.py
@@ -0,0 +1,193 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Add from parameters.yml into bouncer submission tasks.
+"""
+
+
+import logging
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+from gecko_taskgraph.transforms.bouncer_submission import (
+ CONFIG_PER_BOUNCER_PRODUCT as CONFIG_PER_BOUNCER_PRODUCT_VANILLA,
+)
+from gecko_taskgraph.transforms.bouncer_submission import (
+ FTP_PLATFORMS_PER_BOUNCER_PLATFORM,
+ _craft_filename_product,
+ _craft_ftp_product,
+)
+from gecko_taskgraph.util.attributes import release_level
+from gecko_taskgraph.util.partners import (
+ check_if_partners_enabled,
+ get_partners_to_be_published,
+)
+from gecko_taskgraph.util.scriptworker import get_release_config
+
+logger = logging.getLogger(__name__)
+
+
+PARTNER_PLATFORMS_TO_BOUNCER = {
+ "linux-shippable": "linux",
+ "linux64-shippable": "linux64",
+ "macosx64-shippable": "osx",
+ "win32-shippable": "win",
+ "win64-shippable": "win64",
+ "win64-aarch64-shippable": "win64-aarch64",
+}
+
+# :lang is interpolated by bouncer at runtime
+RELEASES_PARTNERS_PATH_TEMPLATE = "/{ftp_product}/releases/partners/{partner}/{sub_config}/\
+{version}/{ftp_platform}/:lang/{file}"
+
+CONFIG_PER_BOUNCER_PRODUCT = {
+ "installer": {
+ "name_postfix": "-{partner}-{sub_config}",
+ "path_template": RELEASES_PARTNERS_PATH_TEMPLATE,
+ "file_names": CONFIG_PER_BOUNCER_PRODUCT_VANILLA["installer"]["file_names"],
+ },
+ "stub-installer": {
+ "name_postfix": "-{partner}-{sub_config}-stub",
+ # We currently have a sole win32 stub installer that is to be used
+ # in all windows platforms to toggle between full installers
+ "path_template": RELEASES_PARTNERS_PATH_TEMPLATE.replace(
+ "{ftp_platform}", "win32"
+ ),
+ "file_names": CONFIG_PER_BOUNCER_PRODUCT_VANILLA["stub-installer"][
+ "file_names"
+ ],
+ },
+}
+
+transforms = TransformSequence()
+transforms.add(check_if_partners_enabled)
+
+
+@transforms.add
+def make_task_worker(config, jobs):
+ for job in jobs:
+ resolve_keyed_by(
+ job,
+ "worker-type",
+ item_name=job["name"],
+ **{"release-level": release_level(config.params["project"])}
+ )
+ resolve_keyed_by(
+ job,
+ "scopes",
+ item_name=job["name"],
+ **{"release-level": release_level(config.params["project"])}
+ )
+ resolve_keyed_by(
+ job,
+ "bouncer-products",
+ item_name=job["name"],
+ **{"release-type": config.params["release_type"]}
+ )
+
+ # the schema requires at least one locale but this will not be used
+ job["worker"]["locales"] = ["fake"]
+ job["worker"]["entries"] = craft_bouncer_entries(config, job)
+
+ del job["locales-file"]
+ del job["bouncer-platforms"]
+ del job["bouncer-products"]
+
+ if job["worker"]["entries"]:
+ yield job
+
+
+def craft_bouncer_entries(config, job):
+ release_config = get_release_config(config)
+
+ product = job["shipping-product"]
+ current_version = release_config["version"]
+ bouncer_products = job["bouncer-products"]
+
+ partners = get_partners_to_be_published(config)
+ entries = {}
+ for partner, sub_config_name, platforms in partners:
+ platforms = [PARTNER_PLATFORMS_TO_BOUNCER[p] for p in platforms]
+ entries.update(
+ {
+ craft_partner_bouncer_product_name(
+ product, bouncer_product, current_version, partner, sub_config_name
+ ): {
+ "options": {
+ "add_locales": False, # partners may use different sets of locales
+ "ssl_only": craft_ssl_only(bouncer_product),
+ },
+ "paths_per_bouncer_platform": craft_paths_per_bouncer_platform(
+ product,
+ bouncer_product,
+ platforms,
+ current_version,
+ partner,
+ sub_config_name,
+ ),
+ }
+ for bouncer_product in bouncer_products
+ }
+ )
+ return entries
+
+
+def craft_paths_per_bouncer_platform(
+ product, bouncer_product, bouncer_platforms, current_version, partner, sub_config
+):
+ paths_per_bouncer_platform = {}
+ for bouncer_platform in bouncer_platforms:
+ file_names_per_platform = CONFIG_PER_BOUNCER_PRODUCT[bouncer_product][
+ "file_names"
+ ]
+ file_name_template = file_names_per_platform.get(
+ bouncer_platform, file_names_per_platform.get("default", None)
+ )
+ if not file_name_template:
+ # Some bouncer product like stub-installer are only meant to be on Windows.
+ # Thus no default value is defined there
+ continue
+
+ file_name_product = _craft_filename_product(product)
+ file_name = file_name_template.format(
+ product=file_name_product,
+ pretty_product=file_name_product.capitalize(),
+ version=current_version,
+ )
+
+ path_template = CONFIG_PER_BOUNCER_PRODUCT[bouncer_product]["path_template"]
+ file_relative_location = path_template.format(
+ ftp_product=_craft_ftp_product(product),
+ version=current_version,
+ ftp_platform=FTP_PLATFORMS_PER_BOUNCER_PLATFORM[bouncer_platform],
+ partner=partner,
+ sub_config=sub_config,
+ file=file_name,
+ )
+
+ paths_per_bouncer_platform[bouncer_platform] = file_relative_location
+
+ return paths_per_bouncer_platform
+
+
+def craft_partner_bouncer_product_name(
+ product, bouncer_product, current_version, partner, sub_config
+):
+ postfix = (
+ CONFIG_PER_BOUNCER_PRODUCT[bouncer_product]
+ .get("name_postfix", "")
+ .format(
+ partner=partner,
+ sub_config=sub_config,
+ )
+ )
+
+ return "{product}-{version}{postfix}".format(
+ product=product.capitalize(), version=current_version, postfix=postfix
+ )
+
+
+def craft_ssl_only(bouncer_product):
+ return bouncer_product == "stub-installer"
diff --git a/taskcluster/gecko_taskgraph/transforms/build.py b/taskcluster/gecko_taskgraph/transforms/build.py
new file mode 100644
index 0000000000..94a4d71b0b
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/build.py
@@ -0,0 +1,238 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Apply some defaults and minor modifications to the jobs defined in the build
+kind.
+"""
+import logging
+
+from mozbuild.artifact_builds import JOB_CHOICES as ARTIFACT_JOBS
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+from taskgraph.util.treeherder import add_suffix
+
+from gecko_taskgraph.util.attributes import RELEASE_PROJECTS, is_try, release_level
+from gecko_taskgraph.util.workertypes import worker_type_implementation
+
+logger = logging.getLogger(__name__)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def set_defaults(config, jobs):
+ """Set defaults, including those that differ per worker implementation"""
+ for job in jobs:
+ job["treeherder"].setdefault("kind", "build")
+ job["treeherder"].setdefault("tier", 1)
+ _, worker_os = worker_type_implementation(
+ config.graph_config, config.params, job["worker-type"]
+ )
+ worker = job.setdefault("worker", {})
+ worker.setdefault("env", {})
+ worker["chain-of-trust"] = True
+ if worker_os == "linux":
+ worker.setdefault("docker-image", {"in-tree": "debian11-amd64-build"})
+
+ yield job
+
+
+@transforms.add
+def stub_installer(config, jobs):
+ for job in jobs:
+ resolve_keyed_by(
+ job,
+ "stub-installer",
+ item_name=job["name"],
+ project=config.params["project"],
+ **{
+ "release-type": config.params["release_type"],
+ },
+ )
+ job.setdefault("attributes", {})
+ if job.get("stub-installer"):
+ job["attributes"]["stub-installer"] = job["stub-installer"]
+ job["worker"]["env"].update({"USE_STUB_INSTALLER": "1"})
+ if "stub-installer" in job:
+ del job["stub-installer"]
+ yield job
+
+
+@transforms.add
+def resolve_shipping_product(config, jobs):
+ for job in jobs:
+ resolve_keyed_by(
+ job,
+ "shipping-product",
+ item_name=job["name"],
+ **{
+ "release-type": config.params["release_type"],
+ },
+ )
+ yield job
+
+
+@transforms.add
+def update_channel(config, jobs):
+ keys = [
+ "run.update-channel",
+ "run.mar-channel-id",
+ "run.accepted-mar-channel-ids",
+ ]
+ for job in jobs:
+ job["worker"].setdefault("env", {})
+ for key in keys:
+ resolve_keyed_by(
+ job,
+ key,
+ item_name=job["name"],
+ **{
+ "project": config.params["project"],
+ "release-type": config.params["release_type"],
+ },
+ )
+ update_channel = job["run"].pop("update-channel", None)
+ if update_channel:
+ job["run"].setdefault("extra-config", {})["update_channel"] = update_channel
+ job["attributes"]["update-channel"] = update_channel
+ mar_channel_id = job["run"].pop("mar-channel-id", None)
+ if mar_channel_id:
+ job["attributes"]["mar-channel-id"] = mar_channel_id
+ job["worker"]["env"]["MAR_CHANNEL_ID"] = mar_channel_id
+ accepted_mar_channel_ids = job["run"].pop("accepted-mar-channel-ids", None)
+ if accepted_mar_channel_ids:
+ job["attributes"]["accepted-mar-channel-ids"] = accepted_mar_channel_ids
+ job["worker"]["env"]["ACCEPTED_MAR_CHANNEL_IDS"] = accepted_mar_channel_ids
+
+ yield job
+
+
+@transforms.add
+def mozconfig(config, jobs):
+ for job in jobs:
+ resolve_keyed_by(
+ job,
+ "run.mozconfig-variant",
+ item_name=job["name"],
+ **{
+ "release-type": config.params["release_type"],
+ },
+ )
+ mozconfig_variant = job["run"].pop("mozconfig-variant", None)
+ if mozconfig_variant:
+ job["run"].setdefault("extra-config", {})[
+ "mozconfig_variant"
+ ] = mozconfig_variant
+ yield job
+
+
+@transforms.add
+def use_artifact(config, jobs):
+ if is_try(config.params):
+ use_artifact = config.params["try_task_config"].get(
+ "use-artifact-builds", False
+ )
+ else:
+ use_artifact = False
+ for job in jobs:
+ if (
+ config.kind == "build"
+ and use_artifact
+ and job.get("index", {}).get("job-name") in ARTIFACT_JOBS
+ # If tests aren't packaged, then we are not able to rebuild all the packages
+ and job["worker"]["env"].get("MOZ_AUTOMATION_PACKAGE_TESTS") == "1"
+ ):
+ job["treeherder"]["symbol"] = add_suffix(job["treeherder"]["symbol"], "a")
+ job["worker"]["env"]["USE_ARTIFACT"] = "1"
+ job["attributes"]["artifact-build"] = True
+ yield job
+
+
+@transforms.add
+def use_profile_data(config, jobs):
+ for job in jobs:
+ use_pgo = job.pop("use-pgo", False)
+ disable_pgo = config.params["try_task_config"].get("disable-pgo", False)
+ artifact_build = job["attributes"].get("artifact-build")
+ if not use_pgo or disable_pgo or artifact_build:
+ yield job
+ continue
+
+ # If use_pgo is True, the task uses the generate-profile task of the
+ # same name. Otherwise a task can specify a specific generate-profile
+ # task to use in the use_pgo field.
+ if use_pgo is True:
+ name = job["name"]
+ else:
+ name = use_pgo
+ dependencies = f"generate-profile-{name}"
+ job.setdefault("dependencies", {})["generate-profile"] = dependencies
+ job.setdefault("fetches", {})["generate-profile"] = ["profdata.tar.xz"]
+ job["worker"]["env"].update({"TASKCLUSTER_PGO_PROFILE_USE": "1"})
+
+ _, worker_os = worker_type_implementation(
+ config.graph_config, config.params, job["worker-type"]
+ )
+ if worker_os == "linux":
+ # LTO linkage needs more open files than the default from run-task.
+ job["worker"]["env"].update({"MOZ_LIMIT_NOFILE": "8192"})
+
+ if job.get("use-sccache"):
+ raise Exception(
+ "use-sccache is incompatible with use-pgo in {}".format(job["name"])
+ )
+
+ yield job
+
+
+@transforms.add
+def resolve_keys(config, jobs):
+ for job in jobs:
+ resolve_keyed_by(
+ job,
+ "use-sccache",
+ item_name=job["name"],
+ **{"release-level": release_level(config.params["project"])},
+ )
+ yield job
+
+
+@transforms.add
+def enable_full_crashsymbols(config, jobs):
+ """Enable full crashsymbols on jobs with
+ 'enable-full-crashsymbols' set to True and on release branches, or
+ on try"""
+ branches = RELEASE_PROJECTS | {
+ "toolchains",
+ "try",
+ }
+ for job in jobs:
+ enable_full_crashsymbols = job["attributes"].get("enable-full-crashsymbols")
+ if enable_full_crashsymbols and config.params["project"] in branches:
+ logger.debug("Enabling full symbol generation for %s", job["name"])
+ job["worker"]["env"]["MOZ_ENABLE_FULL_SYMBOLS"] = "1"
+ else:
+ logger.debug("Disabling full symbol generation for %s", job["name"])
+ job["attributes"].pop("enable-full-crashsymbols", None)
+ yield job
+
+
+@transforms.add
+def set_expiry(config, jobs):
+ for job in jobs:
+ attributes = job["attributes"]
+ if (
+ "shippable" in attributes
+ and attributes["shippable"]
+ and config.kind
+ in {
+ "build",
+ }
+ ):
+ expiration_policy = "long"
+ else:
+ expiration_policy = "medium"
+
+ job["expiration-policy"] = expiration_policy
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/build_attrs.py b/taskcluster/gecko_taskgraph/transforms/build_attrs.py
new file mode 100644
index 0000000000..9cda71718a
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/build_attrs.py
@@ -0,0 +1,50 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from taskgraph.transforms.base import TransformSequence
+
+from gecko_taskgraph.util.platforms import platform_family
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def set_build_attributes(config, jobs):
+ """
+ Set the build_platform and build_type attributes based on the job name.
+ Although not all jobs using this transform are actual "builds", the try
+ option syntax treats them as such, and this arranges the attributes
+ appropriately for that purpose.
+ """
+ for job in jobs:
+ build_platform, build_type = job["name"].split("/")
+
+ # pgo builds are represented as a different platform, type opt
+ if build_type == "pgo":
+ build_platform = build_platform + "-pgo"
+ build_type = "opt"
+
+ attributes = job.setdefault("attributes", {})
+ attributes.update(
+ {
+ "build_platform": build_platform,
+ "build_type": build_type,
+ }
+ )
+
+ yield job
+
+
+@transforms.add
+def set_schedules_optimization(config, jobs):
+ """Set the `skip-unless-affected` optimization based on the build platform."""
+ for job in jobs:
+ # don't add skip-unless-schedules if there's already a when defined
+ if "when" in job:
+ yield job
+ continue
+
+ build_platform = job["attributes"]["build_platform"]
+ job.setdefault("optimization", {"build": [platform_family(build_platform)]})
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/build_fat_aar.py b/taskcluster/gecko_taskgraph/transforms/build_fat_aar.py
new file mode 100644
index 0000000000..61df2111d2
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/build_fat_aar.py
@@ -0,0 +1,78 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import copy
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.taskcluster import get_artifact_prefix
+
+from gecko_taskgraph.util.declarative_artifacts import get_geckoview_upstream_artifacts
+
+transforms = TransformSequence()
+
+
+MOZ_ANDROID_FAT_AAR_ENV_MAP = {
+ "android-arm-shippable": "MOZ_ANDROID_FAT_AAR_ARMEABI_V7A",
+ "android-arm-shippable-lite": "MOZ_ANDROID_FAT_AAR_ARMEABI_V7A",
+ "android-aarch64-shippable": "MOZ_ANDROID_FAT_AAR_ARM64_V8A",
+ "android-aarch64-shippable-lite": "MOZ_ANDROID_FAT_AAR_ARM64_V8A",
+ "android-x86-shippable": "MOZ_ANDROID_FAT_AAR_X86",
+ "android-x86-shippable-lite": "MOZ_ANDROID_FAT_AAR_X86",
+ "android-x86_64-shippable": "MOZ_ANDROID_FAT_AAR_X86_64",
+ "android-x86_64-shippable-lite": "MOZ_ANDROID_FAT_AAR_X86_64",
+ "android-arm-opt": "MOZ_ANDROID_FAT_AAR_ARMEABI_V7A",
+ "android-aarch64-opt": "MOZ_ANDROID_FAT_AAR_ARM64_V8A",
+ "android-x86-opt": "MOZ_ANDROID_FAT_AAR_X86",
+ "android-x86_64-opt": "MOZ_ANDROID_FAT_AAR_X86_64",
+}
+
+
+@transforms.add
+def set_fetches_and_locations(config, jobs):
+ """Set defaults, including those that differ per worker implementation"""
+ for job in jobs:
+ dependencies = copy.deepcopy(job["dependencies"])
+
+ for platform, label in dependencies.items():
+ job["dependencies"] = {"build": label}
+
+ aar_location = _get_aar_location(config, job, platform)
+ prefix = get_artifact_prefix(job)
+ if not prefix.endswith("/"):
+ prefix = prefix + "/"
+ if aar_location.startswith(prefix):
+ aar_location = aar_location[len(prefix) :]
+
+ job.setdefault("fetches", {}).setdefault(platform, []).append(
+ {
+ "artifact": aar_location,
+ "extract": False,
+ }
+ )
+
+ aar_file_name = aar_location.split("/")[-1]
+ env_var = MOZ_ANDROID_FAT_AAR_ENV_MAP[platform]
+ job["worker"]["env"][env_var] = aar_file_name
+
+ job["dependencies"] = dependencies
+
+ yield job
+
+
+def _get_aar_location(config, job, platform):
+ artifacts_locations = []
+
+ for package in job["attributes"]["maven_packages"]:
+ artifacts_locations += get_geckoview_upstream_artifacts(
+ config, job, package, platform=platform
+ )
+
+ aar_locations = [
+ path for path in artifacts_locations[0]["paths"] if path.endswith(".aar")
+ ]
+ if len(aar_locations) != 1:
+ raise ValueError(f"Only a single AAR must be given. Got: {aar_locations}")
+
+ return aar_locations[0]
diff --git a/taskcluster/gecko_taskgraph/transforms/build_lints.py b/taskcluster/gecko_taskgraph/transforms/build_lints.py
new file mode 100644
index 0000000000..d1bd276059
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/build_lints.py
@@ -0,0 +1,59 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Apply some defaults and minor modifications to the jobs defined in the build
+kind.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def check_mozharness_perfherder_options(config, jobs):
+ """Verify that multiple jobs don't use the same perfherder bucket.
+
+ Build jobs record perfherder metrics by default. Perfherder metrics go
+ to a bucket derived by the platform by default. The name can further be
+ customized by the presence of "extra options" either defined in
+ mozharness sub-configs or in an environment variable.
+
+ This linter tries to verify that no 2 jobs will send Perfherder metrics
+ to the same bucket by looking for jobs not defining extra options when
+ their platform or mozharness config are otherwise similar.
+ """
+
+ SEEN_CONFIGS = {}
+
+ for job in jobs:
+ if job["run"]["using"] != "mozharness":
+ yield job
+ continue
+
+ worker = job.get("worker", {})
+
+ platform = job["treeherder"]["platform"]
+ primary_config = job["run"]["config"][0]
+ options = worker.get("env", {}).get("PERFHERDER_EXTRA_OPTIONS")
+ shippable = job.get("attributes", {}).get("shippable", False)
+
+ # This isn't strictly necessary. But the Perfherder code looking at the
+ # values we care about is only active on builds. So it doesn't make
+ # sense to run this linter elsewhere.
+ assert primary_config.startswith("builds/")
+
+ key = (platform, primary_config, shippable, options)
+
+ if key in SEEN_CONFIGS:
+ raise Exception(
+ "Non-unique Perfherder data collection for jobs %s-%s and %s: "
+ "set PERFHERDER_EXTRA_OPTIONS in worker environment variables "
+ "or use different mozconfigs"
+ % (config.kind, job["name"], SEEN_CONFIGS[key])
+ )
+
+ SEEN_CONFIGS[key] = "{}-{}".format(config.kind, job["name"])
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/build_signing.py b/taskcluster/gecko_taskgraph/transforms/build_signing.py
new file mode 100644
index 0000000000..36f39f0b56
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/build_signing.py
@@ -0,0 +1,71 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the signing task into an actual task description.
+"""
+
+
+from taskgraph.transforms.base import TransformSequence
+
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+from gecko_taskgraph.util.signed_artifacts import (
+ generate_specifications_of_artifacts_to_sign,
+)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def add_signed_routes(config, jobs):
+ """Add routes corresponding to the routes of the build task
+ this corresponds to, with .signed inserted, for all gecko.v2 routes"""
+
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+ enable_signing_routes = job.pop("enable-signing-routes", True)
+
+ job["routes"] = []
+ if dep_job.attributes.get("shippable") and enable_signing_routes:
+ for dep_route in dep_job.task.get("routes", []):
+ if not dep_route.startswith("index.gecko.v2"):
+ continue
+ branch = dep_route.split(".")[3]
+ rest = ".".join(dep_route.split(".")[4:])
+ job["routes"].append(f"index.gecko.v2.{branch}.signed.{rest}")
+
+ yield job
+
+
+@transforms.add
+def define_upstream_artifacts(config, jobs):
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+ upstream_artifact_task = job.pop("upstream-artifact-task", dep_job)
+
+ job["attributes"] = copy_attributes_from_dependent_job(dep_job)
+
+ artifacts_specifications = generate_specifications_of_artifacts_to_sign(
+ config,
+ job,
+ keep_locale_template=False,
+ kind=config.kind,
+ dep_kind=upstream_artifact_task.kind,
+ )
+
+ task_ref = f"<{upstream_artifact_task.kind}>"
+ task_type = "build"
+ if "notarization" in upstream_artifact_task.kind:
+ task_type = "scriptworker"
+
+ job["upstream-artifacts"] = [
+ {
+ "taskId": {"task-reference": task_ref},
+ "taskType": task_type,
+ "paths": spec["artifacts"],
+ "formats": spec["formats"],
+ }
+ for spec in artifacts_specifications
+ ]
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/cached_tasks.py b/taskcluster/gecko_taskgraph/transforms/cached_tasks.py
new file mode 100644
index 0000000000..bb7e6e6778
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/cached_tasks.py
@@ -0,0 +1,101 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+from collections import deque
+
+import taskgraph
+from taskgraph.transforms.base import TransformSequence
+
+from gecko_taskgraph.util.cached_tasks import add_optimization
+
+transforms = TransformSequence()
+
+
+def order_tasks(config, tasks):
+ """Iterate image tasks in an order where parent tasks come first."""
+ kind_prefix = config.kind + "-"
+
+ pending = deque(tasks)
+ task_labels = {task["label"] for task in pending}
+ emitted = set()
+ while True:
+ try:
+ task = pending.popleft()
+ except IndexError:
+ break
+ parents = {
+ task
+ for task in task.get("dependencies", {}).values()
+ if task.startswith(kind_prefix)
+ }
+ if parents and not emitted.issuperset(parents & task_labels):
+ pending.append(task)
+ continue
+ emitted.add(task["label"])
+ yield task
+
+
+def format_task_digest(cached_task):
+ return "/".join(
+ [
+ cached_task["type"],
+ cached_task["name"],
+ cached_task["digest"],
+ ]
+ )
+
+
+@transforms.add
+def cache_task(config, tasks):
+ if taskgraph.fast:
+ for task in tasks:
+ yield task
+ return
+
+ digests = {}
+ for task in config.kind_dependencies_tasks.values():
+ if (
+ "cached_task" in task.attributes
+ and task.attributes["cached_task"] is not False
+ ):
+ digests[task.label] = format_task_digest(task.attributes["cached_task"])
+
+ for task in order_tasks(config, tasks):
+ cache = task.pop("cache", None)
+ if cache is None:
+ yield task
+ continue
+
+ dependency_digests = []
+ for p in task.get("dependencies", {}).values():
+ if p in digests:
+ dependency_digests.append(digests[p])
+ elif config.params["project"] == "toolchains":
+ # The toolchains repository uses non-cached toolchain artifacts. Allow
+ # tasks to use them.
+ cache = None
+ break
+ else:
+ raise Exception(
+ "Cached task {} has uncached parent task: {}".format(
+ task["label"], p
+ )
+ )
+
+ if cache is None:
+ yield task
+ continue
+
+ digest_data = cache["digest-data"] + sorted(dependency_digests)
+ add_optimization(
+ config,
+ task,
+ cache_type=cache["type"],
+ cache_name=cache["name"],
+ digest_data=digest_data,
+ )
+ digests[task["label"]] = format_task_digest(task["attributes"]["cached_task"])
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/chunk_partners.py b/taskcluster/gecko_taskgraph/transforms/chunk_partners.py
new file mode 100644
index 0000000000..ed74cc6232
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/chunk_partners.py
@@ -0,0 +1,75 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Chunk the partner repack tasks by subpartner and locale
+"""
+
+
+import copy
+
+from mozbuild.chunkify import chunkify
+from taskgraph.transforms.base import TransformSequence
+
+from gecko_taskgraph.util.partners import (
+ apply_partner_priority,
+ get_repack_ids_by_platform,
+)
+
+transforms = TransformSequence()
+transforms.add(apply_partner_priority)
+
+
+@transforms.add
+def chunk_partners(config, jobs):
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+ build_platform = dep_job.attributes["build_platform"]
+ repack_id = dep_job.task.get("extra", {}).get("repack_id")
+ repack_ids = dep_job.task.get("extra", {}).get("repack_ids")
+ copy_repack_ids = job.pop("copy-repack-ids", False)
+
+ if copy_repack_ids:
+ assert repack_ids, "dep_job {} doesn't have repack_ids!".format(
+ dep_job.label
+ )
+ job.setdefault("extra", {})["repack_ids"] = repack_ids
+ yield job
+ # first downstream of the repack task, no chunking or fanout has been done yet
+ elif not any([repack_id, repack_ids]):
+ platform_repack_ids = get_repack_ids_by_platform(config, build_platform)
+ # we chunk mac signing
+ if config.kind in (
+ "release-partner-repack-signing",
+ "release-eme-free-repack-signing",
+ "release-eme-free-repack-mac-signing",
+ "release-partner-repack-mac-signing",
+ ):
+ repacks_per_chunk = job.get("repacks-per-chunk")
+ chunks, remainder = divmod(len(platform_repack_ids), repacks_per_chunk)
+ if remainder:
+ chunks = int(chunks + 1)
+ for this_chunk in range(1, chunks + 1):
+ chunk = chunkify(platform_repack_ids, this_chunk, chunks)
+ partner_job = copy.deepcopy(job)
+ partner_job.setdefault("extra", {}).setdefault("repack_ids", chunk)
+ partner_job["extra"]["repack_suffix"] = str(this_chunk)
+ yield partner_job
+ # linux and windows we fan out immediately to one task per partner-sub_partner-locale
+ else:
+ for repack_id in platform_repack_ids:
+ partner_job = copy.deepcopy(job) # don't overwrite dict values here
+ partner_job.setdefault("extra", {})
+ partner_job["extra"]["repack_id"] = repack_id
+ yield partner_job
+ # fan out chunked mac signing for repackage
+ elif repack_ids:
+ for repack_id in repack_ids:
+ partner_job = copy.deepcopy(job)
+ partner_job.setdefault("extra", {}).setdefault("repack_id", repack_id)
+ yield partner_job
+ # otherwise we've fully fanned out already, continue by passing repack_id on
+ else:
+ partner_job = copy.deepcopy(job)
+ partner_job.setdefault("extra", {}).setdefault("repack_id", repack_id)
+ yield partner_job
diff --git a/taskcluster/gecko_taskgraph/transforms/code_review.py b/taskcluster/gecko_taskgraph/transforms/code_review.py
new file mode 100644
index 0000000000..d644e17d0e
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/code_review.py
@@ -0,0 +1,33 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Add soft dependencies and configuration to code-review tasks.
+"""
+
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def add_dependencies(config, jobs):
+ for job in jobs:
+ job.setdefault("soft-dependencies", [])
+ job["soft-dependencies"] += [
+ dep_task.label
+ for dep_task in config.kind_dependencies_tasks.values()
+ if dep_task.attributes.get("code-review") is True
+ ]
+ yield job
+
+
+@transforms.add
+def add_phabricator_config(config, jobs):
+ for job in jobs:
+ diff = config.params.get("phabricator_diff")
+ if diff is not None:
+ code_review = job.setdefault("extra", {}).setdefault("code-review", {})
+ code_review["phabricator-diff"] = diff
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/condprof.py b/taskcluster/gecko_taskgraph/transforms/condprof.py
new file mode 100644
index 0000000000..34cf7e7dd3
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/condprof.py
@@ -0,0 +1,85 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+This transform constructs tasks generate conditioned profiles from
+the condprof/kind.yml file
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import Schema
+from voluptuous import Optional
+
+from gecko_taskgraph.transforms.job import job_description_schema
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.copy_task import copy_task
+
+diff_description_schema = Schema(
+ {
+ # default is settled, but add 'full' to get both
+ Optional("scenarios"): [str],
+ Optional("description"): task_description_schema["description"],
+ Optional("dependencies"): task_description_schema["dependencies"],
+ Optional("fetches"): job_description_schema["fetches"],
+ Optional("index"): task_description_schema["index"],
+ Optional("job-from"): str,
+ Optional("name"): str,
+ Optional("run"): job_description_schema["run"],
+ Optional("run-on-projects"): task_description_schema["run-on-projects"],
+ Optional("scopes"): task_description_schema["scopes"],
+ Optional("treeherder"): task_description_schema["treeherder"],
+ Optional("worker"): job_description_schema["worker"],
+ Optional("worker-type"): task_description_schema["worker-type"],
+ }
+)
+
+transforms = TransformSequence()
+transforms.add_validate(diff_description_schema)
+
+
+@transforms.add
+def generate_scenarios(config, tasks):
+ for task in tasks:
+ cmds = task["run"]["command"]
+ symbol = task["treeherder"]["symbol"].split(")")[0]
+ index = task["index"]
+ jobname = index["job-name"]
+ label = task["name"]
+ run_as_root = task["run"].get("run-as-root", False)
+
+ for scenario in set(task["scenarios"]):
+ extra_args = ""
+ if scenario == "settled":
+ extra_args = " --force-new "
+
+ tcmd = cmds.replace("${EXTRA_ARGS}", extra_args)
+ tcmd = tcmd.replace("${SCENARIO}", scenario)
+
+ index["job-name"] = "%s-%s" % (jobname, scenario)
+
+ taskdesc = {
+ "name": "%s-%s" % (label, scenario),
+ "description": task["description"],
+ "treeherder": {
+ "symbol": "%s-%s)" % (symbol, scenario),
+ "platform": task["treeherder"]["platform"],
+ "kind": task["treeherder"]["kind"],
+ "tier": task["treeherder"]["tier"],
+ },
+ "worker-type": copy_task(task["worker-type"]),
+ "worker": copy_task(task["worker"]),
+ "index": copy_task(index),
+ "run": {
+ "using": "run-task",
+ "cwd": task["run"]["cwd"],
+ "checkout": task["run"]["checkout"],
+ "tooltool-downloads": copy_task(task["run"]["tooltool-downloads"]),
+ "command": tcmd,
+ "run-as-root": run_as_root,
+ },
+ "run-on-projects": copy_task(task["run-on-projects"]),
+ "scopes": copy_task(task["scopes"]),
+ "dependencies": copy_task(task["dependencies"]),
+ "fetches": copy_task(task["fetches"]),
+ }
+ yield taskdesc
diff --git a/taskcluster/gecko_taskgraph/transforms/copy_attributes_from_dependent_task.py b/taskcluster/gecko_taskgraph/transforms/copy_attributes_from_dependent_task.py
new file mode 100644
index 0000000000..281dd5938f
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/copy_attributes_from_dependent_task.py
@@ -0,0 +1,23 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the repackage task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def copy_attributes(config, jobs):
+ for job in jobs:
+ job.setdefault("attributes", {})
+ job["attributes"].update(
+ copy_attributes_from_dependent_job(job["primary-dependency"])
+ )
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/cross_channel.py b/taskcluster/gecko_taskgraph/transforms/cross_channel.py
new file mode 100644
index 0000000000..bf6d3a3a4f
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/cross_channel.py
@@ -0,0 +1,44 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Build a command to run `mach l10n-cross-channel`.
+"""
+
+
+from pipes import quote as shell_quote
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def resolve_keys(config, jobs):
+ for job in jobs:
+ for item in ["ssh-key-secret", "run.actions"]:
+ resolve_keyed_by(job, item, item, **{"level": str(config.params["level"])})
+ yield job
+
+
+@transforms.add
+def build_command(config, jobs):
+ for job in jobs:
+ command = [
+ "l10n-cross-channel",
+ "-o",
+ "/builds/worker/artifacts/outgoing.diff",
+ "--attempts",
+ "5",
+ ]
+ ssh_key_secret = job.pop("ssh-key-secret")
+ if ssh_key_secret:
+ command.extend(["--ssh-secret", ssh_key_secret])
+ job.setdefault("scopes", []).append(f"secrets:get:{ssh_key_secret}")
+
+ command.extend(job["run"].pop("actions", []))
+ job.setdefault("run", {}).update(
+ {"using": "mach", "mach": " ".join(map(shell_quote, command))}
+ )
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/diffoscope.py b/taskcluster/gecko_taskgraph/transforms/diffoscope.py
new file mode 100644
index 0000000000..b74dc5bb8f
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/diffoscope.py
@@ -0,0 +1,172 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+This transform construct tasks to perform diffs between builds, as
+defined in kind.yml
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import Schema
+from taskgraph.util.taskcluster import get_artifact_path
+from voluptuous import Any, Optional, Required
+
+from gecko_taskgraph.transforms.task import task_description_schema
+
+index_or_string = Any(
+ str,
+ {Required("index-search"): str},
+)
+
+diff_description_schema = Schema(
+ {
+ # Name of the diff task.
+ Required("name"): str,
+ # Treeherder tier.
+ Required("tier"): int,
+ # Treeherder symbol.
+ Required("symbol"): str,
+ # relative path (from config.path) to the file the task was defined in.
+ Optional("job-from"): str,
+ # Original and new builds to compare.
+ Required("original"): index_or_string,
+ Required("new"): index_or_string,
+ # Arguments to pass to diffoscope, used for job-defaults in
+ # taskcluster/ci/diffoscope/kind.yml
+ Optional("args"): str,
+ # Extra arguments to pass to diffoscope, that can be set per job.
+ Optional("extra-args"): str,
+ # Fail the task when differences are detected.
+ Optional("fail-on-diff"): bool,
+ # What artifact to check the differences of. Defaults to target.tar.bz2
+ # for Linux, target.dmg for Mac, target.zip for Windows, target.apk for
+ # Android.
+ Optional("artifact"): str,
+ # Whether to unpack first. Diffoscope can normally work without unpacking,
+ # but when one needs to --exclude some contents, that doesn't work out well
+ # if said content is packed (e.g. in omni.ja).
+ Optional("unpack"): bool,
+ # Commands to run before performing the diff.
+ Optional("pre-diff-commands"): [str],
+ # Only run the task on a set of projects/branches.
+ Optional("run-on-projects"): task_description_schema["run-on-projects"],
+ Optional("optimization"): task_description_schema["optimization"],
+ }
+)
+
+transforms = TransformSequence()
+transforms.add_validate(diff_description_schema)
+
+
+@transforms.add
+def fill_template(config, tasks):
+ dummy_tasks = {}
+
+ for task in tasks:
+ name = task["name"]
+
+ deps = {}
+ urls = {}
+ previous_artifact = None
+ artifact = task.get("artifact")
+ for k in ("original", "new"):
+ value = task[k]
+ if isinstance(value, str):
+ deps[k] = value
+ dep_name = k
+ os_hint = value
+ else:
+ index = value["index-search"]
+ if index not in dummy_tasks:
+ dummy_tasks[index] = {
+ "label": "index-search-" + index,
+ "description": index,
+ "worker-type": "invalid/always-optimized",
+ "run": {
+ "using": "always-optimized",
+ },
+ "optimization": {
+ "index-search": [index],
+ },
+ }
+ yield dummy_tasks[index]
+ deps[index] = "index-search-" + index
+ dep_name = index
+ os_hint = index.split(".")[-1]
+ if artifact:
+ pass
+ elif "linux" in os_hint:
+ artifact = "target.tar.bz2"
+ elif "macosx" in os_hint:
+ artifact = "target.dmg"
+ elif "android" in os_hint:
+ artifact = "target.apk"
+ elif "win" in os_hint:
+ artifact = "target.zip"
+ else:
+ raise Exception(f"Cannot figure out the OS for {value!r}")
+ if previous_artifact is not None and previous_artifact != artifact:
+ raise Exception("Cannot compare builds from different OSes")
+ urls[k] = {
+ "artifact-reference": "<{}/{}>".format(
+ dep_name, get_artifact_path(task, artifact)
+ ),
+ }
+ previous_artifact = artifact
+
+ taskdesc = {
+ "label": "diff-" + name,
+ "description": name,
+ "treeherder": {
+ "symbol": task["symbol"],
+ "platform": "diff/opt",
+ "kind": "other",
+ "tier": task["tier"],
+ },
+ "worker-type": "b-linux-gcp",
+ "worker": {
+ "docker-image": {"in-tree": "diffoscope"},
+ "artifacts": [
+ {
+ "type": "file",
+ "path": f"/builds/worker/{f}",
+ "name": f"public/{f}",
+ }
+ for f in (
+ "diff.html",
+ "diff.txt",
+ )
+ ],
+ "env": {
+ "ORIG_URL": urls["original"],
+ "NEW_URL": urls["new"],
+ "DIFFOSCOPE_ARGS": " ".join(
+ task[k] for k in ("args", "extra-args") if k in task
+ ),
+ "PRE_DIFF": "; ".join(task.get("pre-diff-commands", [])),
+ },
+ "max-run-time": 1800,
+ },
+ "run": {
+ "using": "run-task",
+ "checkout": task.get("unpack", False),
+ "command": "/builds/worker/bin/get_and_diffoscope{}{}".format(
+ " --unpack" if task.get("unpack") else "",
+ " --fail" if task.get("fail-on-diff") else "",
+ ),
+ },
+ "dependencies": deps,
+ "optimization": task.get("optimization"),
+ }
+ if "run-on-projects" in task:
+ taskdesc["run-on-projects"] = task["run-on-projects"]
+
+ if artifact.endswith(".dmg"):
+ taskdesc.setdefault("fetches", {}).setdefault("toolchain", []).extend(
+ [
+ "linux64-cctools-port",
+ "linux64-libdmg",
+ ]
+ )
+
+ yield taskdesc
diff --git a/taskcluster/gecko_taskgraph/transforms/docker_image.py b/taskcluster/gecko_taskgraph/transforms/docker_image.py
new file mode 100644
index 0000000000..0ccf83bf38
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/docker_image.py
@@ -0,0 +1,209 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import json
+import logging
+import os
+import re
+
+import mozpack.path as mozpath
+import taskgraph
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import Schema
+from voluptuous import Optional, Required
+
+from gecko_taskgraph.util.docker import (
+ create_context_tar,
+ generate_context_hash,
+ image_path,
+)
+
+from .. import GECKO
+from .task import task_description_schema
+
+logger = logging.getLogger(__name__)
+
+CONTEXTS_DIR = "docker-contexts"
+
+DIGEST_RE = re.compile("^[0-9a-f]{64}$")
+
+IMAGE_BUILDER_IMAGE = (
+ "mozillareleases/image_builder:5.0.0"
+ "@sha256:"
+ "e510a9a9b80385f71c112d61b2f2053da625aff2b6d430411ac42e424c58953f"
+)
+
+transforms = TransformSequence()
+
+docker_image_schema = Schema(
+ {
+ # Name of the docker image.
+ Required("name"): str,
+ # Name of the parent docker image.
+ Optional("parent"): str,
+ # Treeherder symbol.
+ Required("symbol"): str,
+ # relative path (from config.path) to the file the docker image was defined
+ # in.
+ Optional("job-from"): str,
+ # Arguments to use for the Dockerfile.
+ Optional("args"): {str: str},
+ # Name of the docker image definition under taskcluster/docker, when
+ # different from the docker image name.
+ Optional("definition"): str,
+ # List of package tasks this docker image depends on.
+ Optional("packages"): [str],
+ Optional(
+ "index",
+ description="information for indexing this build so its artifacts can be discovered",
+ ): task_description_schema["index"],
+ Optional(
+ "cache",
+ description="Whether this image should be cached based on inputs.",
+ ): bool,
+ }
+)
+
+
+transforms.add_validate(docker_image_schema)
+
+
+@transforms.add
+def fill_template(config, tasks):
+ if not taskgraph.fast and config.write_artifacts:
+ if not os.path.isdir(CONTEXTS_DIR):
+ os.makedirs(CONTEXTS_DIR)
+
+ for task in tasks:
+ image_name = task.pop("name")
+ job_symbol = task.pop("symbol")
+ args = task.pop("args", {})
+ packages = task.pop("packages", [])
+ parent = task.pop("parent", None)
+
+ for p in packages:
+ if f"packages-{p}" not in config.kind_dependencies_tasks:
+ raise Exception(
+ "Missing package job for {}-{}: {}".format(
+ config.kind, image_name, p
+ )
+ )
+
+ if not taskgraph.fast:
+ context_path = mozpath.relpath(image_path(image_name), GECKO)
+ if config.write_artifacts:
+ context_file = os.path.join(CONTEXTS_DIR, f"{image_name}.tar.gz")
+ logger.info(f"Writing {context_file} for docker image {image_name}")
+ context_hash = create_context_tar(
+ GECKO, context_path, context_file, image_name, args
+ )
+ else:
+ context_hash = generate_context_hash(
+ GECKO, context_path, image_name, args
+ )
+ else:
+ if config.write_artifacts:
+ raise Exception("Can't write artifacts if `taskgraph.fast` is set.")
+ context_hash = "0" * 40
+ digest_data = [context_hash]
+ digest_data += [json.dumps(args, sort_keys=True)]
+
+ description = "Build the docker image {} for use by dependent tasks".format(
+ image_name
+ )
+
+ args["DOCKER_IMAGE_PACKAGES"] = " ".join(f"<{p}>" for p in packages)
+
+ # Adjust the zstandard compression level based on the execution level.
+ # We use faster compression for level 1 because we care more about
+ # end-to-end times. We use slower/better compression for other levels
+ # because images are read more often and it is worth the trade-off to
+ # burn more CPU once to reduce image size.
+ zstd_level = "3" if int(config.params["level"]) == 1 else "10"
+
+ # include some information that is useful in reconstructing this task
+ # from JSON
+ taskdesc = {
+ "label": f"{config.kind}-{image_name}",
+ "description": description,
+ "attributes": {
+ "image_name": image_name,
+ "artifact_prefix": "public",
+ },
+ "expiration-policy": "long",
+ "scopes": [],
+ "treeherder": {
+ "symbol": job_symbol,
+ "platform": "taskcluster-images/opt",
+ "kind": "other",
+ "tier": 1,
+ },
+ "run-on-projects": [],
+ "worker-type": "images-gcp",
+ "worker": {
+ "implementation": "docker-worker",
+ "os": "linux",
+ "artifacts": [
+ {
+ "type": "file",
+ "path": "/workspace/image.tar.zst",
+ "name": "public/image.tar.zst",
+ }
+ ],
+ "env": {
+ "CONTEXT_TASK_ID": {"task-reference": "<decision>"},
+ "CONTEXT_PATH": "public/docker-contexts/{}.tar.gz".format(
+ image_name
+ ),
+ "HASH": context_hash,
+ "PROJECT": config.params["project"],
+ "IMAGE_NAME": image_name,
+ "DOCKER_IMAGE_ZSTD_LEVEL": zstd_level,
+ "DOCKER_BUILD_ARGS": {"task-reference": json.dumps(args)},
+ "GECKO_BASE_REPOSITORY": config.params["base_repository"],
+ "GECKO_HEAD_REPOSITORY": config.params["head_repository"],
+ "GECKO_HEAD_REV": config.params["head_rev"],
+ },
+ "chain-of-trust": True,
+ "max-run-time": 7200,
+ # FIXME: We aren't currently propagating the exit code
+ },
+ }
+ # Retry for 'funsize-update-generator' if exit status code is -1
+ if image_name in ["funsize-update-generator"]:
+ taskdesc["worker"]["retry-exit-status"] = [-1]
+
+ worker = taskdesc["worker"]
+
+ if image_name == "image_builder":
+ worker["docker-image"] = IMAGE_BUILDER_IMAGE
+ digest_data.append(f"image-builder-image:{IMAGE_BUILDER_IMAGE}")
+ else:
+ worker["docker-image"] = {"in-tree": "image_builder"}
+ deps = taskdesc.setdefault("dependencies", {})
+ deps["docker-image"] = f"{config.kind}-image_builder"
+
+ if packages:
+ deps = taskdesc.setdefault("dependencies", {})
+ for p in sorted(packages):
+ deps[p] = f"packages-{p}"
+
+ if parent:
+ deps = taskdesc.setdefault("dependencies", {})
+ deps["parent"] = f"{config.kind}-{parent}"
+ worker["env"]["PARENT_TASK_ID"] = {
+ "task-reference": "<parent>",
+ }
+ if "index" in task:
+ taskdesc["index"] = task["index"]
+
+ if task.get("cache", True) and not taskgraph.fast:
+ taskdesc["cache"] = {
+ "type": "docker-images.v2",
+ "name": image_name,
+ "digest-data": digest_data,
+ }
+
+ yield taskdesc
diff --git a/taskcluster/gecko_taskgraph/transforms/fetch.py b/taskcluster/gecko_taskgraph/transforms/fetch.py
new file mode 100644
index 0000000000..b51362a905
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/fetch.py
@@ -0,0 +1,387 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# Support for running tasks that download remote content and re-export
+# it as task artifacts.
+
+
+import os
+import re
+
+import attr
+import taskgraph
+from mozbuild.shellutil import quote as shell_quote
+from mozpack import path as mozpath
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import Schema, validate_schema
+from taskgraph.util.treeherder import join_symbol
+from voluptuous import Any, Extra, Optional, Required
+
+import gecko_taskgraph
+
+from ..util.cached_tasks import add_optimization
+
+CACHE_TYPE = "content.v1"
+
+FETCH_SCHEMA = Schema(
+ {
+ # Name of the task.
+ Required("name"): str,
+ # Relative path (from config.path) to the file the task was defined
+ # in.
+ Optional("job-from"): str,
+ # Description of the task.
+ Required("description"): str,
+ Optional(
+ "fetch-alias",
+ description="An alias that can be used instead of the real fetch job name in "
+ "fetch stanzas for jobs.",
+ ): str,
+ Optional(
+ "artifact-prefix",
+ description="The prefix of the taskcluster artifact being uploaded. "
+ "Defaults to `public/`; if it starts with something other than "
+ "`public/` the artifact will require scopes to access.",
+ ): str,
+ Optional("attributes"): {str: object},
+ Required("fetch"): {
+ Required("type"): str,
+ Extra: object,
+ },
+ }
+)
+
+
+# define a collection of payload builders, depending on the worker implementation
+fetch_builders = {}
+
+
+@attr.s(frozen=True)
+class FetchBuilder:
+ schema = attr.ib(type=Schema)
+ builder = attr.ib()
+
+
+def fetch_builder(name, schema):
+ schema = Schema({Required("type"): name}).extend(schema)
+
+ def wrap(func):
+ fetch_builders[name] = FetchBuilder(schema, func)
+ return func
+
+ return wrap
+
+
+transforms = TransformSequence()
+transforms.add_validate(FETCH_SCHEMA)
+
+
+@transforms.add
+def process_fetch_job(config, jobs):
+ # Converts fetch-url entries to the job schema.
+ for job in jobs:
+ typ = job["fetch"]["type"]
+ name = job["name"]
+ fetch = job.pop("fetch")
+
+ if typ not in fetch_builders:
+ raise Exception(f"Unknown fetch type {typ} in fetch {name}")
+ validate_schema(fetch_builders[typ].schema, fetch, f"In task.fetch {name!r}:")
+
+ job.update(configure_fetch(config, typ, name, fetch))
+
+ yield job
+
+
+def configure_fetch(config, typ, name, fetch):
+ if typ not in fetch_builders:
+ raise Exception(f"No fetch type {typ} in fetch {name}")
+ validate_schema(fetch_builders[typ].schema, fetch, f"In task.fetch {name!r}:")
+
+ return fetch_builders[typ].builder(config, name, fetch)
+
+
+@transforms.add
+def make_task(config, jobs):
+ # Fetch tasks are idempotent and immutable. Have them live for
+ # essentially forever.
+ if config.params["level"] == "3":
+ expires = "1000 years"
+ else:
+ expires = "28 days"
+
+ for job in jobs:
+ name = job["name"]
+ artifact_prefix = job.get("artifact-prefix", "public")
+ env = job.get("env", {})
+ env.update({"UPLOAD_DIR": "/builds/worker/artifacts"})
+ attributes = job.get("attributes", {})
+ attributes["fetch-artifact"] = mozpath.join(
+ artifact_prefix, job["artifact_name"]
+ )
+ alias = job.get("fetch-alias")
+ if alias:
+ attributes["fetch-alias"] = alias
+
+ task_expires = "28 days" if attributes.get("cached_task") is False else expires
+ artifact_expires = (
+ "2 days" if attributes.get("cached_task") is False else expires
+ )
+
+ task = {
+ "attributes": attributes,
+ "name": name,
+ "description": job["description"],
+ "expires-after": task_expires,
+ "label": "fetch-%s" % name,
+ "run-on-projects": [],
+ "treeherder": {
+ "symbol": join_symbol("Fetch", name),
+ "kind": "build",
+ "platform": "fetch/opt",
+ "tier": 1,
+ },
+ "run": {
+ "using": "run-task",
+ "checkout": False,
+ "command": job["command"],
+ },
+ "worker-type": "b-linux-gcp",
+ "worker": {
+ "chain-of-trust": True,
+ "docker-image": {"in-tree": "fetch"},
+ "env": env,
+ "max-run-time": 900,
+ "artifacts": [
+ {
+ "type": "directory",
+ "name": artifact_prefix,
+ "path": "/builds/worker/artifacts",
+ "expires-after": artifact_expires,
+ }
+ ],
+ },
+ }
+
+ if job.get("secret", None):
+ task["scopes"] = ["secrets:get:" + job.get("secret")]
+ task["worker"]["taskcluster-proxy"] = True
+
+ if not taskgraph.fast:
+ cache_name = task["label"].replace(f"{config.kind}-", "", 1)
+
+ # This adds the level to the index path automatically.
+ add_optimization(
+ config,
+ task,
+ cache_type=CACHE_TYPE,
+ cache_name=cache_name,
+ digest_data=job["digest_data"],
+ )
+ yield task
+
+
+@fetch_builder(
+ "static-url",
+ schema={
+ # The URL to download.
+ Required("url"): str,
+ # The SHA-256 of the downloaded content.
+ Required("sha256"): str,
+ # Size of the downloaded entity, in bytes.
+ Required("size"): int,
+ # GPG signature verification.
+ Optional("gpg-signature"): {
+ # URL where GPG signature document can be obtained. Can contain the
+ # value ``{url}``, which will be substituted with the value from
+ # ``url``.
+ Required("sig-url"): str,
+ # Path to file containing GPG public key(s) used to validate
+ # download.
+ Required("key-path"): str,
+ },
+ # The name to give to the generated artifact. Defaults to the file
+ # portion of the URL. Using a different extension converts the
+ # archive to the given type. Only conversion to .tar.zst is
+ # supported.
+ Optional("artifact-name"): str,
+ # Strip the given number of path components at the beginning of
+ # each file entry in the archive.
+ # Requires an artifact-name ending with .tar.zst.
+ Optional("strip-components"): int,
+ # Add the given prefix to each file entry in the archive.
+ # Requires an artifact-name ending with .tar.zst.
+ Optional("add-prefix"): str,
+ # IMPORTANT: when adding anything that changes the behavior of the task,
+ # it is important to update the digest data used to compute cache hits.
+ },
+)
+def create_fetch_url_task(config, name, fetch):
+ artifact_name = fetch.get("artifact-name")
+ if not artifact_name:
+ artifact_name = fetch["url"].split("/")[-1]
+
+ command = [
+ "/builds/worker/bin/fetch-content",
+ "static-url",
+ ]
+
+ # Arguments that matter to the cache digest
+ args = [
+ "--sha256",
+ fetch["sha256"],
+ "--size",
+ "%d" % fetch["size"],
+ ]
+
+ if fetch.get("strip-components"):
+ args.extend(["--strip-components", "%d" % fetch["strip-components"]])
+
+ if fetch.get("add-prefix"):
+ args.extend(["--add-prefix", fetch["add-prefix"]])
+
+ command.extend(args)
+
+ env = {}
+
+ if "gpg-signature" in fetch:
+ sig_url = fetch["gpg-signature"]["sig-url"].format(url=fetch["url"])
+ key_path = os.path.join(
+ gecko_taskgraph.GECKO, fetch["gpg-signature"]["key-path"]
+ )
+
+ with open(key_path, "r") as fh:
+ gpg_key = fh.read()
+
+ env["FETCH_GPG_KEY"] = gpg_key
+ command.extend(
+ [
+ "--gpg-sig-url",
+ sig_url,
+ "--gpg-key-env",
+ "FETCH_GPG_KEY",
+ ]
+ )
+
+ command.extend(
+ [
+ fetch["url"],
+ "/builds/worker/artifacts/%s" % artifact_name,
+ ]
+ )
+
+ return {
+ "command": command,
+ "artifact_name": artifact_name,
+ "env": env,
+ # We don't include the GPG signature in the digest because it isn't
+ # materially important for caching: GPG signatures are supplemental
+ # trust checking beyond what the shasum already provides.
+ "digest_data": args + [artifact_name],
+ }
+
+
+@fetch_builder(
+ "git",
+ schema={
+ Required("repo"): str,
+ Required(Any("revision", "branch")): str,
+ Optional("include-dot-git"): bool,
+ Optional("artifact-name"): str,
+ Optional("path-prefix"): str,
+ # ssh-key is a taskcluster secret path (e.g. project/civet/github-deploy-key)
+ # In the secret dictionary, the key should be specified as
+ # "ssh_privkey": "-----BEGIN OPENSSH PRIVATE KEY-----\nkfksnb3jc..."
+ # n.b. The OpenSSH private key file format requires a newline at the end of the file.
+ Optional("ssh-key"): str,
+ },
+)
+def create_git_fetch_task(config, name, fetch):
+ path_prefix = fetch.get("path-prefix")
+ if not path_prefix:
+ path_prefix = fetch["repo"].rstrip("/").rsplit("/", 1)[-1]
+ artifact_name = fetch.get("artifact-name")
+ if not artifact_name:
+ artifact_name = f"{path_prefix}.tar.zst"
+
+ if "revision" in fetch and "branch" in fetch:
+ raise Exception("revision and branch cannot be used in the same context")
+
+ revision_or_branch = None
+
+ if "revision" in fetch:
+ revision_or_branch = fetch["revision"]
+ if not re.match(r"[0-9a-fA-F]{40}", fetch["revision"]):
+ raise Exception(f'Revision is not a sha1 in fetch task "{name}"')
+ else:
+ # we are sure we are dealing with a branch
+ revision_or_branch = fetch["branch"]
+
+ args = [
+ "/builds/worker/bin/fetch-content",
+ "git-checkout-archive",
+ "--path-prefix",
+ path_prefix,
+ fetch["repo"],
+ revision_or_branch,
+ "/builds/worker/artifacts/%s" % artifact_name,
+ ]
+
+ ssh_key = fetch.get("ssh-key")
+ if ssh_key:
+ args.append("--ssh-key-secret")
+ args.append(ssh_key)
+
+ digest_data = [revision_or_branch, path_prefix, artifact_name]
+ if fetch.get("include-dot-git", False):
+ args.append("--include-dot-git")
+ digest_data.append(".git")
+
+ return {
+ "command": args,
+ "artifact_name": artifact_name,
+ "digest_data": digest_data,
+ "secret": ssh_key,
+ }
+
+
+@fetch_builder(
+ "chromium-fetch",
+ schema={
+ Required("script"): str,
+ # Platform type for chromium build
+ Required("platform"): str,
+ # Chromium revision to obtain
+ Optional("revision"): str,
+ # The name to give to the generated artifact.
+ Required("artifact-name"): str,
+ },
+)
+def create_chromium_fetch_task(config, name, fetch):
+ artifact_name = fetch.get("artifact-name")
+
+ workdir = "/builds/worker"
+
+ platform = fetch.get("platform")
+ revision = fetch.get("revision")
+
+ args = "--platform " + shell_quote(platform)
+ if revision:
+ args += " --revision " + shell_quote(revision)
+
+ cmd = [
+ "bash",
+ "-c",
+ "cd {} && " "/usr/bin/python3 {} {}".format(workdir, fetch["script"], args),
+ ]
+
+ return {
+ "command": cmd,
+ "artifact_name": artifact_name,
+ "digest_data": [
+ f"revision={revision}",
+ f"platform={platform}",
+ f"artifact_name={artifact_name}",
+ ],
+ }
diff --git a/taskcluster/gecko_taskgraph/transforms/final_verify.py b/taskcluster/gecko_taskgraph/transforms/final_verify.py
new file mode 100644
index 0000000000..aa8be35a0d
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/final_verify.py
@@ -0,0 +1,35 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the beetmover task into an actual task description.
+"""
+
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def add_command(config, tasks):
+ for task in tasks:
+ if not task["worker"].get("env"):
+ task["worker"]["env"] = {}
+
+ final_verify_configs = []
+ for upstream in sorted(task.get("dependencies", {}).keys()):
+ if "update-verify-config" in upstream:
+ final_verify_configs.append(
+ f"<{upstream}/public/build/update-verify.cfg>",
+ )
+ task["run"] = {
+ "using": "run-task",
+ "cwd": "{checkout}",
+ "command": {
+ "artifact-reference": "tools/update-verify/release/final-verification.sh "
+ + " ".join(final_verify_configs),
+ },
+ "sparse-profile": "update-verify",
+ }
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/fxrecord.py b/taskcluster/gecko_taskgraph/transforms/fxrecord.py
new file mode 100644
index 0000000000..0d8a23bfb4
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/fxrecord.py
@@ -0,0 +1,22 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def fxrecord(config, jobs):
+ for job in jobs:
+ dep_job = job.pop("primary-dependency", None)
+
+ if dep_job is not None:
+ job["dependencies"] = {dep_job.label: dep_job.label}
+ job["treeherder"]["platform"] = dep_job.task["extra"]["treeherder-platform"]
+ job["worker"].setdefault("env", {})["FXRECORD_TASK_ID"] = {
+ "task-reference": f"<{dep_job.label}>"
+ }
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/geckodriver_mac_notarization.py b/taskcluster/gecko_taskgraph/transforms/geckodriver_mac_notarization.py
new file mode 100644
index 0000000000..1a996c1fc4
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/geckodriver_mac_notarization.py
@@ -0,0 +1,68 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the repackage signing task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from voluptuous import Optional
+
+from gecko_taskgraph.loader.single_dep import schema
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+from gecko_taskgraph.util.scriptworker import add_scope_prefix
+
+repackage_signing_description_schema = schema.extend(
+ {
+ Optional("label"): str,
+ Optional("treeherder"): task_description_schema["treeherder"],
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ Optional("worker"): task_description_schema["worker"],
+ Optional("worker-type"): task_description_schema["worker-type"],
+ }
+)
+
+transforms = TransformSequence()
+transforms.add_validate(repackage_signing_description_schema)
+
+
+@transforms.add
+def geckodriver_mac_notarization(config, jobs):
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+ treeherder = job.get("treeherder", {})
+ dep_treeherder = dep_job.task.get("extra", {}).get("treeherder", {})
+ treeherder.setdefault(
+ "platform", dep_job.task.get("extra", {}).get("treeherder-platform")
+ )
+ treeherder.setdefault("tier", dep_treeherder.get("tier", 1))
+ treeherder.setdefault("kind", "build")
+
+ dependencies = {dep_job.kind: dep_job.label}
+
+ description = "Mac notarization - Geckodriver for build '{}'".format(
+ attributes.get("build_platform"),
+ )
+
+ build_platform = dep_job.attributes.get("build_platform")
+
+ scopes = [add_scope_prefix(config, "signing:cert:release-apple-notarization")]
+
+ platform = build_platform.rsplit("-", 1)[0]
+
+ task = {
+ "label": job["label"],
+ "description": description,
+ "worker-type": job["worker-type"],
+ "worker": job["worker"],
+ "scopes": scopes,
+ "dependencies": dependencies,
+ "attributes": attributes,
+ "treeherder": treeherder,
+ "run-on-projects": ["mozilla-central"],
+ "index": {"product": "geckodriver", "job-name": f"{platform}-notarized"},
+ }
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/geckodriver_signing.py b/taskcluster/gecko_taskgraph/transforms/geckodriver_signing.py
new file mode 100644
index 0000000000..05bc1319de
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/geckodriver_signing.py
@@ -0,0 +1,124 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the repackage signing task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from voluptuous import Optional
+
+from gecko_taskgraph.loader.single_dep import schema
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+from gecko_taskgraph.util.scriptworker import get_signing_cert_scope_per_platform
+
+repackage_signing_description_schema = schema.extend(
+ {
+ Optional("label"): str,
+ Optional("treeherder"): task_description_schema["treeherder"],
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ }
+)
+
+transforms = TransformSequence()
+transforms.add_validate(repackage_signing_description_schema)
+
+
+@transforms.add
+def make_signing_description(config, jobs):
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+ attributes["repackage_type"] = "repackage-signing"
+
+ treeherder = job.get("treeherder", {})
+ dep_treeherder = dep_job.task.get("extra", {}).get("treeherder", {})
+ treeherder.setdefault(
+ "symbol", "{}(gd-s)".format(dep_treeherder["groupSymbol"])
+ )
+ treeherder.setdefault(
+ "platform", dep_job.task.get("extra", {}).get("treeherder-platform")
+ )
+ treeherder.setdefault("tier", dep_treeherder.get("tier", 1))
+ treeherder.setdefault("kind", "build")
+
+ dependencies = {dep_job.kind: dep_job.label}
+ signing_dependencies = dep_job.dependencies
+ dependencies.update(
+ {k: v for k, v in signing_dependencies.items() if k != "docker-image"}
+ )
+
+ description = "Signing Geckodriver for build '{}'".format(
+ attributes.get("build_platform"),
+ )
+
+ build_platform = dep_job.attributes.get("build_platform")
+ is_shippable = dep_job.attributes.get("shippable")
+ signing_cert_scope = get_signing_cert_scope_per_platform(
+ build_platform, is_shippable, config
+ )
+
+ upstream_artifacts = _craft_upstream_artifacts(
+ dep_job, dep_job.kind, build_platform
+ )
+
+ scopes = [signing_cert_scope]
+
+ platform = build_platform.rsplit("-", 1)[0]
+
+ task = {
+ "label": job["label"],
+ "description": description,
+ "worker-type": "linux-signing",
+ "worker": {
+ "implementation": "scriptworker-signing",
+ "upstream-artifacts": upstream_artifacts,
+ },
+ "scopes": scopes,
+ "dependencies": dependencies,
+ "attributes": attributes,
+ "treeherder": treeherder,
+ "run-on-projects": ["mozilla-central"],
+ "index": {"product": "geckodriver", "job-name": platform},
+ }
+
+ if build_platform.startswith("macosx"):
+ worker_type = task["worker-type"]
+ worker_type_alias_map = {
+ "linux-depsigning": "mac-depsigning",
+ "linux-signing": "mac-signing",
+ }
+
+ assert worker_type in worker_type_alias_map, (
+ "Make sure to adjust the below worker_type_alias logic for "
+ "mac if you change the signing workerType aliases!"
+ " ({} not found in mapping)".format(worker_type)
+ )
+ worker_type = worker_type_alias_map[worker_type]
+
+ task["worker-type"] = worker_type_alias_map[task["worker-type"]]
+ task["worker"]["mac-behavior"] = "mac_geckodriver"
+
+ yield task
+
+
+def _craft_upstream_artifacts(dep_job, dependency_kind, build_platform):
+ if build_platform.startswith("win"):
+ signing_format = "autograph_authenticode_sha2"
+ elif build_platform.startswith("linux"):
+ signing_format = "autograph_gpg"
+ elif build_platform.startswith("macosx"):
+ signing_format = "mac_geckodriver"
+ else:
+ raise ValueError(f'Unsupported build platform "{build_platform}"')
+
+ return [
+ {
+ "taskId": {"task-reference": f"<{dependency_kind}>"},
+ "taskType": "build",
+ "paths": [dep_job.attributes["toolchain-artifact"]],
+ "formats": [signing_format],
+ }
+ ]
diff --git a/taskcluster/gecko_taskgraph/transforms/github_sync.py b/taskcluster/gecko_taskgraph/transforms/github_sync.py
new file mode 100644
index 0000000000..6f48f794ce
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/github_sync.py
@@ -0,0 +1,23 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def sync_github(config, tasks):
+ """Do transforms specific to github-sync tasks."""
+ for task in tasks:
+ # Add the secret to the scopes, only in m-c.
+ # Doing this on any other tree will result in decision task failure
+ # because m-c is the only one allowed to have that scope.
+ secret = task["secret"]
+ if config.params["project"] == "mozilla-central":
+ task.setdefault("scopes", [])
+ task["scopes"].append("secrets:get:" + secret)
+ task["worker"].setdefault("env", {})["GITHUB_SECRET"] = secret
+ del task["secret"]
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/job/__init__.py b/taskcluster/gecko_taskgraph/transforms/job/__init__.py
new file mode 100644
index 0000000000..9b6924b605
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/job/__init__.py
@@ -0,0 +1,504 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Convert a job description into a task description.
+
+Jobs descriptions are similar to task descriptions, but they specify how to run
+the job at a higher level, using a "run" field that can be interpreted by
+run-using handlers in `taskcluster/gecko_taskgraph/transforms/job`.
+"""
+
+
+import json
+import logging
+
+import mozpack.path as mozpath
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.python_path import import_sibling_modules
+from taskgraph.util.schema import Schema, validate_schema
+from taskgraph.util.taskcluster import get_artifact_prefix
+from voluptuous import Any, Exclusive, Extra, Optional, Required
+
+from gecko_taskgraph.transforms.cached_tasks import order_tasks
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.copy_task import copy_task
+from gecko_taskgraph.util.workertypes import worker_type_implementation
+
+logger = logging.getLogger(__name__)
+
+# Schema for a build description
+job_description_schema = Schema(
+ {
+ # The name of the job and the job's label. At least one must be specified,
+ # and the label will be generated from the name if necessary, by prepending
+ # the kind.
+ Optional("name"): str,
+ Optional("label"): str,
+ # the following fields are passed directly through to the task description,
+ # possibly modified by the run implementation. See
+ # taskcluster/gecko_taskgraph/transforms/task.py for the schema details.
+ Required("description"): task_description_schema["description"],
+ Optional("attributes"): task_description_schema["attributes"],
+ Optional("job-from"): task_description_schema["job-from"],
+ Optional("dependencies"): task_description_schema["dependencies"],
+ Optional("if-dependencies"): task_description_schema["if-dependencies"],
+ Optional("soft-dependencies"): task_description_schema["soft-dependencies"],
+ Optional("if-dependencies"): task_description_schema["if-dependencies"],
+ Optional("requires"): task_description_schema["requires"],
+ Optional("expires-after"): task_description_schema["expires-after"],
+ Optional("expiration-policy"): task_description_schema["expiration-policy"],
+ Optional("routes"): task_description_schema["routes"],
+ Optional("scopes"): task_description_schema["scopes"],
+ Optional("tags"): task_description_schema["tags"],
+ Optional("extra"): task_description_schema["extra"],
+ Optional("treeherder"): task_description_schema["treeherder"],
+ Optional("index"): task_description_schema["index"],
+ Optional("run-on-projects"): task_description_schema["run-on-projects"],
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Optional("always-target"): task_description_schema["always-target"],
+ Exclusive("optimization", "optimization"): task_description_schema[
+ "optimization"
+ ],
+ Optional("use-sccache"): task_description_schema["use-sccache"],
+ Optional("use-system-python"): bool,
+ Optional("priority"): task_description_schema["priority"],
+ # The "when" section contains descriptions of the circumstances under which
+ # this task should be included in the task graph. This will be converted
+ # into an optimization, so it cannot be specified in a job description that
+ # also gives 'optimization'.
+ Exclusive("when", "optimization"): Any(
+ None,
+ {
+ # This task only needs to be run if a file matching one of the given
+ # patterns has changed in the push. The patterns use the mozpack
+ # match function (python/mozbuild/mozpack/path.py).
+ Optional("files-changed"): [str],
+ },
+ ),
+ # A list of artifacts to install from 'fetch' tasks.
+ Optional("fetches"): {
+ str: [
+ str,
+ {
+ Required("artifact"): str,
+ Optional("dest"): str,
+ Optional("extract"): bool,
+ Optional("verify-hash"): bool,
+ },
+ ],
+ },
+ # A description of how to run this job.
+ "run": {
+ # The key to a job implementation in a peer module to this one
+ "using": str,
+ # Base work directory used to set up the task.
+ Optional("workdir"): str,
+ # Any remaining content is verified against that job implementation's
+ # own schema.
+ Extra: object,
+ },
+ Required("worker-type"): task_description_schema["worker-type"],
+ # This object will be passed through to the task description, with additions
+ # provided by the job's run-using function
+ Optional("worker"): dict,
+ }
+)
+
+transforms = TransformSequence()
+transforms.add_validate(job_description_schema)
+
+
+@transforms.add
+def rewrite_when_to_optimization(config, jobs):
+ for job in jobs:
+ when = job.pop("when", {})
+ if not when:
+ yield job
+ continue
+
+ files_changed = when.get("files-changed")
+
+ # implicitly add task config directory.
+ files_changed.append(f"{config.path}/**")
+
+ # "only when files changed" implies "skip if files have not changed"
+ job["optimization"] = {"skip-unless-changed": files_changed}
+
+ assert "when" not in job
+ yield job
+
+
+@transforms.add
+def set_implementation(config, jobs):
+ for job in jobs:
+ impl, os = worker_type_implementation(
+ config.graph_config, config.params, job["worker-type"]
+ )
+ if os:
+ job.setdefault("tags", {})["os"] = os
+ if impl:
+ job.setdefault("tags", {})["worker-implementation"] = impl
+ worker = job.setdefault("worker", {})
+ assert "implementation" not in worker
+ worker["implementation"] = impl
+ if os:
+ worker["os"] = os
+ yield job
+
+
+@transforms.add
+def set_label(config, jobs):
+ for job in jobs:
+ if "label" not in job:
+ if "name" not in job:
+ raise Exception("job has neither a name nor a label")
+ job["label"] = "{}-{}".format(config.kind, job["name"])
+ if job.get("name"):
+ del job["name"]
+ yield job
+
+
+@transforms.add
+def add_resource_monitor(config, jobs):
+ for job in jobs:
+ if job.get("attributes", {}).get("resource-monitor"):
+ worker_implementation, worker_os = worker_type_implementation(
+ config.graph_config, config.params, job["worker-type"]
+ )
+ # Normalise worker os so that linux-bitbar and similar use linux tools.
+ worker_os = worker_os.split("-")[0]
+ # We don't currently support an Arm worker, due to gopsutil's indirect
+ # dependencies (go-ole)
+ if "aarch64" in job["worker-type"]:
+ yield job
+ continue
+ elif "win7" in job["worker-type"]:
+ arch = "32"
+ else:
+ arch = "64"
+ job.setdefault("fetches", {})
+ job["fetches"].setdefault("toolchain", [])
+ job["fetches"]["toolchain"].append(f"{worker_os}{arch}-resource-monitor")
+
+ if worker_implementation == "docker-worker":
+ artifact_source = "/builds/worker/monitoring/resource-monitor.json"
+ else:
+ artifact_source = "monitoring/resource-monitor.json"
+ job["worker"].setdefault("artifacts", [])
+ job["worker"]["artifacts"].append(
+ {
+ "name": "public/monitoring/resource-monitor.json",
+ "type": "file",
+ "path": artifact_source,
+ }
+ )
+ # Set env for output file
+ job["worker"].setdefault("env", {})
+ job["worker"]["env"]["RESOURCE_MONITOR_OUTPUT"] = artifact_source
+
+ yield job
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ """Given a build description, create a task description"""
+ # import plugin modules first, before iterating over jobs
+ import_sibling_modules(exceptions=("common.py",))
+
+ for job in jobs:
+ # only docker-worker uses a fixed absolute path to find directories
+ if job["worker"]["implementation"] == "docker-worker":
+ job["run"].setdefault("workdir", "/builds/worker")
+
+ taskdesc = copy_task(job)
+
+ # fill in some empty defaults to make run implementations easier
+ taskdesc.setdefault("attributes", {})
+ taskdesc.setdefault("dependencies", {})
+ taskdesc.setdefault("if-dependencies", [])
+ taskdesc.setdefault("soft-dependencies", [])
+ taskdesc.setdefault("routes", [])
+ taskdesc.setdefault("scopes", [])
+ taskdesc.setdefault("extra", {})
+
+ # give the function for job.run.using on this worker implementation a
+ # chance to set up the task description.
+ configure_taskdesc_for_run(
+ config, job, taskdesc, job["worker"]["implementation"]
+ )
+ del taskdesc["run"]
+
+ # yield only the task description, discarding the job description
+ yield taskdesc
+
+
+def get_attribute(dict, key, attributes, attribute_name):
+ """Get `attribute_name` from the given `attributes` dict, and if there
+ is a corresponding value, set `key` in `dict` to that value."""
+ value = attributes.get(attribute_name)
+ if value:
+ dict[key] = value
+
+
+@transforms.add
+def use_system_python(config, jobs):
+ for job in jobs:
+ if job.pop("use-system-python", True):
+ yield job
+ else:
+ fetches = job.setdefault("fetches", {})
+ toolchain = fetches.setdefault("toolchain", [])
+
+ moz_python_home = mozpath.join("fetches", "python")
+ if "win" in job["worker"]["os"]:
+ platform = "win64"
+ elif "linux" in job["worker"]["os"]:
+ platform = "linux64"
+ elif "macosx" in job["worker"]["os"]:
+ platform = "macosx64"
+ else:
+ raise ValueError("unexpected worker.os value {}".format(platform))
+
+ toolchain.append("{}-python".format(platform))
+
+ worker = job.setdefault("worker", {})
+ env = worker.setdefault("env", {})
+ env["MOZ_PYTHON_HOME"] = moz_python_home
+
+ yield job
+
+
+@transforms.add
+def use_fetches(config, jobs):
+ artifact_names = {}
+ extra_env = {}
+ aliases = {}
+ tasks = []
+
+ if config.kind in ("toolchain", "fetch"):
+ jobs = list(jobs)
+ tasks.extend((config.kind, j) for j in jobs)
+
+ tasks.extend(
+ (task.kind, task.__dict__)
+ for task in config.kind_dependencies_tasks.values()
+ if task.kind in ("fetch", "toolchain")
+ )
+ for (kind, task) in tasks:
+ get_attribute(
+ artifact_names, task["label"], task["attributes"], f"{kind}-artifact"
+ )
+ get_attribute(extra_env, task["label"], task["attributes"], f"{kind}-env")
+ value = task["attributes"].get(f"{kind}-alias")
+ if not value:
+ value = []
+ elif isinstance(value, str):
+ value = [value]
+ for alias in value:
+ fully_qualified = f"{kind}-{alias}"
+ label = task["label"]
+ if fully_qualified == label:
+ raise Exception(f"The alias {alias} of task {label} points to itself!")
+ aliases[fully_qualified] = label
+
+ artifact_prefixes = {}
+ for job in order_tasks(config, jobs):
+ artifact_prefixes[job["label"]] = get_artifact_prefix(job)
+
+ fetches = job.pop("fetches", None)
+ if not fetches:
+ yield job
+ continue
+
+ job_fetches = []
+ name = job.get("name") or job.get("label").replace(f"{config.kind}-", "")
+ dependencies = job.setdefault("dependencies", {})
+ worker = job.setdefault("worker", {})
+ env = worker.setdefault("env", {})
+ prefix = get_artifact_prefix(job)
+ has_sccache = False
+ for kind, artifacts in fetches.items():
+ if kind in ("fetch", "toolchain"):
+ for fetch_name in artifacts:
+ label = f"{kind}-{fetch_name}"
+ label = aliases.get(label, label)
+ if label not in artifact_names:
+ raise Exception(
+ "Missing fetch job for {kind}-{name}: {fetch}".format(
+ kind=config.kind, name=name, fetch=fetch_name
+ )
+ )
+ if label in extra_env:
+ env.update(extra_env[label])
+
+ path = artifact_names[label]
+
+ dependencies[label] = label
+ job_fetches.append(
+ {
+ "artifact": path,
+ "task": f"<{label}>",
+ "extract": True,
+ }
+ )
+
+ if kind == "toolchain" and fetch_name.endswith("-sccache"):
+ has_sccache = True
+ else:
+ if kind not in dependencies:
+ raise Exception(
+ "{name} can't fetch {kind} artifacts because "
+ "it has no {kind} dependencies!".format(name=name, kind=kind)
+ )
+ dep_label = dependencies[kind]
+ if dep_label in artifact_prefixes:
+ prefix = artifact_prefixes[dep_label]
+ else:
+ if dep_label not in config.kind_dependencies_tasks:
+ raise Exception(
+ "{name} can't fetch {kind} artifacts because "
+ "there are no tasks with label {label} in kind dependencies!".format(
+ name=name,
+ kind=kind,
+ label=dependencies[kind],
+ )
+ )
+
+ prefix = get_artifact_prefix(
+ config.kind_dependencies_tasks[dep_label]
+ )
+
+ for artifact in artifacts:
+ if isinstance(artifact, str):
+ path = artifact
+ dest = None
+ extract = True
+ verify_hash = False
+ else:
+ path = artifact["artifact"]
+ dest = artifact.get("dest")
+ extract = artifact.get("extract", True)
+ verify_hash = artifact.get("verify-hash", False)
+
+ fetch = {
+ "artifact": f"{prefix}/{path}"
+ if not path.startswith("/")
+ else path[1:],
+ "task": f"<{kind}>",
+ "extract": extract,
+ }
+ if dest is not None:
+ fetch["dest"] = dest
+ if verify_hash:
+ fetch["verify-hash"] = verify_hash
+ job_fetches.append(fetch)
+
+ if job.get("use-sccache") and not has_sccache:
+ raise Exception("Must provide an sccache toolchain if using sccache.")
+
+ job_artifact_prefixes = {
+ mozpath.dirname(fetch["artifact"])
+ for fetch in job_fetches
+ if not fetch["artifact"].startswith("public/")
+ }
+ if job_artifact_prefixes:
+ # Use taskcluster-proxy and request appropriate scope. For example, add
+ # 'scopes: [queue:get-artifact:path/to/*]' for 'path/to/artifact.tar.xz'.
+ worker["taskcluster-proxy"] = True
+ for prefix in sorted(job_artifact_prefixes):
+ scope = f"queue:get-artifact:{prefix}/*"
+ if scope not in job.setdefault("scopes", []):
+ job["scopes"].append(scope)
+
+ artifacts = {}
+ for f in job_fetches:
+ _, __, artifact = f["artifact"].rpartition("/")
+ if "dest" in f:
+ artifact = f"{f['dest']}/{artifact}"
+ task = f["task"][1:-1]
+ if artifact in artifacts:
+ raise Exception(
+ f"Task {name} depends on {artifacts[artifact]} and {task} "
+ f"that both provide {artifact}"
+ )
+ artifacts[artifact] = task
+
+ env["MOZ_FETCHES"] = {
+ "task-reference": json.dumps(
+ sorted(job_fetches, key=lambda x: sorted(x.items())), sort_keys=True
+ )
+ }
+ # The path is normalized to an absolute path in run-task
+ env.setdefault("MOZ_FETCHES_DIR", "fetches")
+
+ yield job
+
+
+# A registry of all functions decorated with run_job_using
+registry = {}
+
+
+def run_job_using(worker_implementation, run_using, schema=None, defaults={}):
+ """Register the decorated function as able to set up a task description for
+ jobs with the given worker implementation and `run.using` property. If
+ `schema` is given, the job's run field will be verified to match it.
+
+ The decorated function should have the signature `using_foo(config, job, taskdesc)`
+ and should modify the task description in-place. The skeleton of
+ the task description is already set up, but without a payload."""
+
+ def wrap(func):
+ for_run_using = registry.setdefault(run_using, {})
+ if worker_implementation in for_run_using:
+ raise Exception(
+ "run_job_using({!r}, {!r}) already exists: {!r}".format(
+ run_using, worker_implementation, for_run_using[run_using]
+ )
+ )
+ for_run_using[worker_implementation] = (func, schema, defaults)
+ return func
+
+ return wrap
+
+
+@run_job_using(
+ "always-optimized", "always-optimized", Schema({"using": "always-optimized"})
+)
+def always_optimized(config, job, taskdesc):
+ pass
+
+
+def configure_taskdesc_for_run(config, job, taskdesc, worker_implementation):
+ """
+ Run the appropriate function for this job against the given task
+ description.
+
+ This will raise an appropriate error if no function exists, or if the job's
+ run is not valid according to the schema.
+ """
+ run_using = job["run"]["using"]
+ if run_using not in registry:
+ raise Exception(f"no functions for run.using {run_using!r}")
+
+ if worker_implementation not in registry[run_using]:
+ raise Exception(
+ "no functions for run.using {!r} on {!r}".format(
+ run_using, worker_implementation
+ )
+ )
+
+ func, schema, defaults = registry[run_using][worker_implementation]
+ for k, v in defaults.items():
+ job["run"].setdefault(k, v)
+
+ if schema:
+ validate_schema(
+ schema,
+ job["run"],
+ "In job.run using {!r}/{!r} for job {!r}:".format(
+ job["run"]["using"], worker_implementation, job["label"]
+ ),
+ )
+ func(config, job, taskdesc)
diff --git a/taskcluster/gecko_taskgraph/transforms/job/common.py b/taskcluster/gecko_taskgraph/transforms/job/common.py
new file mode 100644
index 0000000000..0c6289a6db
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/job/common.py
@@ -0,0 +1,269 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Common support for various job types. These functions are all named after the
+worker implementation they operate on, and take the same three parameters, for
+consistency.
+"""
+
+
+from taskgraph.util.keyed_by import evaluate_keyed_by
+from taskgraph.util.taskcluster import get_artifact_prefix
+
+SECRET_SCOPE = "secrets:get:project/releng/{trust_domain}/{kind}/level-{level}/{secret}"
+
+
+def add_cache(job, taskdesc, name, mount_point, skip_untrusted=False):
+ """Adds a cache based on the worker's implementation.
+
+ Args:
+ job (dict): Task's job description.
+ taskdesc (dict): Target task description to modify.
+ name (str): Name of the cache.
+ mount_point (path): Path on the host to mount the cache.
+ skip_untrusted (bool): Whether cache is used in untrusted environments
+ (default: False). Only applies to docker-worker.
+ """
+ if not job["run"].get("use-caches", True):
+ return
+
+ worker = job["worker"]
+
+ if worker["implementation"] == "docker-worker":
+ taskdesc["worker"].setdefault("caches", []).append(
+ {
+ "type": "persistent",
+ "name": name,
+ "mount-point": mount_point,
+ "skip-untrusted": skip_untrusted,
+ }
+ )
+
+ elif worker["implementation"] == "generic-worker":
+ taskdesc["worker"].setdefault("mounts", []).append(
+ {
+ "cache-name": name,
+ "directory": mount_point,
+ }
+ )
+
+ else:
+ # Caches not implemented
+ pass
+
+
+def add_artifacts(config, job, taskdesc, path):
+ taskdesc["worker"].setdefault("artifacts", []).append(
+ {
+ "name": get_artifact_prefix(taskdesc),
+ "path": path,
+ "type": "directory",
+ }
+ )
+
+
+def docker_worker_add_artifacts(config, job, taskdesc):
+ """Adds an artifact directory to the task"""
+ path = "{workdir}/artifacts/".format(**job["run"])
+ taskdesc["worker"].setdefault("env", {})["UPLOAD_DIR"] = path
+ add_artifacts(config, job, taskdesc, path)
+
+
+def generic_worker_add_artifacts(config, job, taskdesc):
+ """Adds an artifact directory to the task"""
+ # The path is the location on disk; it doesn't necessarily
+ # mean the artifacts will be public or private; that is set via the name
+ # attribute in add_artifacts.
+ path = get_artifact_prefix(taskdesc)
+ taskdesc["worker"].setdefault("env", {})["UPLOAD_DIR"] = path
+ add_artifacts(config, job, taskdesc, path=path)
+
+
+def support_vcs_checkout(config, job, taskdesc, sparse=False):
+ """Update a job/task with parameters to enable a VCS checkout.
+
+ This can only be used with ``run-task`` tasks, as the cache name is
+ reserved for ``run-task`` tasks.
+ """
+ worker = job["worker"]
+ is_mac = worker["os"] == "macosx"
+ is_win = worker["os"] == "windows"
+ is_linux = worker["os"] == "linux" or "linux-bitbar"
+ is_docker = worker["implementation"] == "docker-worker"
+ assert is_mac or is_win or is_linux
+
+ if is_win:
+ checkoutdir = "./build"
+ geckodir = f"{checkoutdir}/src"
+ hgstore = "y:/hg-shared"
+ elif is_docker:
+ checkoutdir = "{workdir}/checkouts".format(**job["run"])
+ geckodir = f"{checkoutdir}/gecko"
+ hgstore = f"{checkoutdir}/hg-store"
+ else:
+ checkoutdir = "./checkouts"
+ geckodir = f"{checkoutdir}/gecko"
+ hgstore = f"{checkoutdir}/hg-shared"
+
+ cache_name = "checkouts"
+
+ # Sparse checkouts need their own cache because they can interfere
+ # with clients that aren't sparse aware.
+ if sparse:
+ cache_name += "-sparse"
+
+ # Workers using Mercurial >= 5.8 will enable revlog-compression-zstd, which
+ # workers using older versions can't understand, so they can't share cache.
+ # At the moment, only docker workers use the newer version.
+ if is_docker:
+ cache_name += "-hg58"
+
+ add_cache(job, taskdesc, cache_name, checkoutdir)
+
+ taskdesc["worker"].setdefault("env", {}).update(
+ {
+ "GECKO_BASE_REPOSITORY": config.params["base_repository"],
+ "GECKO_HEAD_REPOSITORY": config.params["head_repository"],
+ "GECKO_HEAD_REV": config.params["head_rev"],
+ "HG_STORE_PATH": hgstore,
+ }
+ )
+ taskdesc["worker"]["env"].setdefault("GECKO_PATH", geckodir)
+
+ if "comm_base_repository" in config.params:
+ taskdesc["worker"]["env"].update(
+ {
+ "COMM_BASE_REPOSITORY": config.params["comm_base_repository"],
+ "COMM_HEAD_REPOSITORY": config.params["comm_head_repository"],
+ "COMM_HEAD_REV": config.params["comm_head_rev"],
+ }
+ )
+ elif job["run"].get("comm-checkout", False):
+ raise Exception(
+ "Can't checkout from comm-* repository if not given a repository."
+ )
+
+ # Give task access to hgfingerprint secret so it can pin the certificate
+ # for hg.mozilla.org.
+ taskdesc["scopes"].append("secrets:get:project/taskcluster/gecko/hgfingerprint")
+ taskdesc["scopes"].append("secrets:get:project/taskcluster/gecko/hgmointernal")
+
+ # only some worker platforms have taskcluster-proxy enabled
+ if job["worker"]["implementation"] in ("docker-worker",):
+ taskdesc["worker"]["taskcluster-proxy"] = True
+
+
+def generic_worker_hg_commands(
+ base_repo, head_repo, head_rev, path, sparse_profile=None
+):
+ """Obtain commands needed to obtain a Mercurial checkout on generic-worker.
+
+ Returns two command strings. One performs the checkout. Another logs.
+ """
+ args = [
+ r'"c:\Program Files\Mercurial\hg.exe"',
+ "robustcheckout",
+ "--sharebase",
+ r"y:\hg-shared",
+ "--purge",
+ "--upstream",
+ base_repo,
+ "--revision",
+ head_rev,
+ ]
+
+ if sparse_profile:
+ args.extend(["--config", "extensions.sparse="])
+ args.extend(["--sparseprofile", sparse_profile])
+
+ args.extend(
+ [
+ head_repo,
+ path,
+ ]
+ )
+
+ logging_args = [
+ b":: TinderboxPrint:<a href={source_repo}/rev/{revision} "
+ b"title='Built from {repo_name} revision {revision}'>{revision}</a>"
+ b"\n".format(
+ revision=head_rev, source_repo=head_repo, repo_name=head_repo.split("/")[-1]
+ ),
+ ]
+
+ return [" ".join(args), " ".join(logging_args)]
+
+
+def setup_secrets(config, job, taskdesc):
+ """Set up access to secrets via taskcluster-proxy. The value of
+ run['secrets'] should be a boolean or a list of secret names that
+ can be accessed."""
+ if not job["run"].get("secrets"):
+ return
+
+ taskdesc["worker"]["taskcluster-proxy"] = True
+ secrets = job["run"]["secrets"]
+ if secrets is True:
+ secrets = ["*"]
+ for secret in secrets:
+ taskdesc["scopes"].append(
+ SECRET_SCOPE.format(
+ trust_domain=config.graph_config["trust-domain"],
+ kind=job["treeherder"]["kind"],
+ level=config.params["level"],
+ secret=secret,
+ )
+ )
+
+
+def add_tooltool(config, job, taskdesc, internal=False):
+ """Give the task access to tooltool.
+
+ Enables the tooltool cache. Adds releng proxy. Configures scopes.
+
+ By default, only public tooltool access will be granted. Access to internal
+ tooltool can be enabled via ``internal=True``.
+
+ This can only be used with ``run-task`` tasks, as the cache name is
+ reserved for use with ``run-task``.
+ """
+
+ if job["worker"]["implementation"] in ("docker-worker",):
+ add_cache(
+ job,
+ taskdesc,
+ "tooltool-cache",
+ "{workdir}/tooltool-cache".format(**job["run"]),
+ )
+
+ taskdesc["worker"].setdefault("env", {}).update(
+ {
+ "TOOLTOOL_CACHE": "{workdir}/tooltool-cache".format(**job["run"]),
+ }
+ )
+ elif not internal:
+ return
+
+ taskdesc["worker"]["taskcluster-proxy"] = True
+ taskdesc["scopes"].extend(
+ [
+ "project:releng:services/tooltool/api/download/public",
+ ]
+ )
+
+ if internal:
+ taskdesc["scopes"].extend(
+ [
+ "project:releng:services/tooltool/api/download/internal",
+ ]
+ )
+
+
+def get_expiration(config, policy="default"):
+ expires = evaluate_keyed_by(
+ config.graph_config["expiration-policy"],
+ "artifact expiration",
+ {"project": config.params["project"]},
+ )[policy]
+ return expires
diff --git a/taskcluster/gecko_taskgraph/transforms/job/distro_package.py b/taskcluster/gecko_taskgraph/transforms/job/distro_package.py
new file mode 100644
index 0000000000..44236b1abc
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/job/distro_package.py
@@ -0,0 +1,238 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running spidermonkey jobs via dedicated scripts
+"""
+
+
+import os
+import re
+
+import taskgraph
+from taskgraph.util.schema import Schema
+from taskgraph.util.taskcluster import get_root_url
+from voluptuous import Any, Optional, Required
+
+from gecko_taskgraph import GECKO
+from gecko_taskgraph.transforms.job import run_job_using
+from gecko_taskgraph.transforms.job.common import add_artifacts
+from gecko_taskgraph.util.hash import hash_path
+
+DSC_PACKAGE_RE = re.compile(".*(?=_)")
+SOURCE_PACKAGE_RE = re.compile(r".*(?=[-_]\d)")
+
+source_definition = {
+ Required("url"): str,
+ Required("sha256"): str,
+}
+
+common_schema = Schema(
+ {
+ # URL/SHA256 of a source file to build, which can either be a source
+ # control (.dsc), or a tarball.
+ Required(Any("dsc", "tarball")): source_definition,
+ # Package name. Normally derived from the source control or tarball file
+ # name. Use in case the name doesn't match DSC_PACKAGE_RE or
+ # SOURCE_PACKAGE_RE.
+ Optional("name"): str,
+ # Patch to apply to the extracted source.
+ Optional("patch"): str,
+ # Command to run before dpkg-buildpackage.
+ Optional("pre-build-command"): str,
+ # Architecture to build the package for.
+ Optional("arch"): str,
+ # List of package tasks to get build dependencies from.
+ Optional("packages"): [str],
+ # What resolver to use to install build dependencies. The default
+ # (apt-get) is good in most cases, but in subtle cases involving
+ # a *-backports archive, its solver might not be able to find a
+ # solution that satisfies the build dependencies.
+ Optional("resolver"): Any("apt-get", "aptitude"),
+ # Base work directory used to set up the task.
+ Required("workdir"): str,
+ }
+)
+
+debian_schema = common_schema.extend(
+ {
+ Required("using"): "debian-package",
+ # Debian distribution
+ Required("dist"): str,
+ }
+)
+
+ubuntu_schema = common_schema.extend(
+ {
+ Required("using"): "ubuntu-package",
+ # Ubuntu distribution
+ Required("dist"): str,
+ }
+)
+
+
+def common_package(config, job, taskdesc, distro, version):
+ run = job["run"]
+
+ name = taskdesc["label"].replace(f"{config.kind}-", "", 1)
+
+ arch = run.get("arch", "amd64")
+
+ worker = taskdesc["worker"]
+ worker.setdefault("artifacts", [])
+
+ image = "%s%d" % (distro, version)
+ if arch != "amd64":
+ image += "-" + arch
+ image += "-packages"
+ worker["docker-image"] = {"in-tree": image}
+
+ add_artifacts(config, job, taskdesc, path="/tmp/artifacts")
+
+ env = worker.setdefault("env", {})
+ env["DEBFULLNAME"] = "Mozilla build team"
+ env["DEBEMAIL"] = "dev-builds@lists.mozilla.org"
+
+ if "dsc" in run:
+ src = run["dsc"]
+ unpack = "dpkg-source -x {src_file} {package}"
+ package_re = DSC_PACKAGE_RE
+ elif "tarball" in run:
+ src = run["tarball"]
+ unpack = (
+ "mkdir {package} && "
+ "tar -C {package} -axf {src_file} --strip-components=1"
+ )
+ package_re = SOURCE_PACKAGE_RE
+ else:
+ raise RuntimeError("Unreachable")
+ src_url = src["url"]
+ src_file = os.path.basename(src_url)
+ src_sha256 = src["sha256"]
+ package = run.get("name")
+ if not package:
+ package = package_re.match(src_file).group(0)
+ unpack = unpack.format(src_file=src_file, package=package)
+
+ resolver = run.get("resolver", "apt-get")
+ if resolver == "apt-get":
+ resolver = "apt-get -yyq --no-install-recommends"
+ elif resolver == "aptitude":
+ resolver = (
+ "aptitude -y --without-recommends -o "
+ "Aptitude::ProblemResolver::Hints::KeepBuildDeps="
+ '"reject {}-build-deps :UNINST"'
+ ).format(package)
+ else:
+ raise RuntimeError("Unreachable")
+
+ adjust = ""
+ if "patch" in run:
+ # We don't use robustcheckout or run-task to get a checkout. So for
+ # this one file we'd need from a checkout, download it.
+ env["PATCH_URL"] = config.params.file_url(
+ "build/debian-packages/{patch}".format(patch=run["patch"]),
+ )
+ adjust += "curl -sL $PATCH_URL | patch -p1 && "
+ if "pre-build-command" in run:
+ adjust += run["pre-build-command"] + " && "
+ if "tarball" in run:
+ adjust += "mv ../{src_file} ../{package}_{ver}.orig.tar.gz && ".format(
+ src_file=src_file,
+ package=package,
+ ver="$(dpkg-parsechangelog | awk '$1==\"Version:\"{print $2}' | cut -f 1 -d -)",
+ )
+ if "patch" not in run and "pre-build-command" not in run:
+ adjust += (
+ 'debchange -l ".{prefix}moz" --distribution "{dist}"'
+ ' "Mozilla backport for {dist}." < /dev/null && '
+ ).format(
+ prefix=name.split("-", 1)[0],
+ dist=run["dist"],
+ )
+
+ worker["command"] = [
+ "sh",
+ "-x",
+ "-c",
+ # Add sources for packages coming from other package tasks.
+ "/usr/local/sbin/setup_packages.sh {root_url} $PACKAGES && "
+ "apt-get update && "
+ # Upgrade packages that might have new versions in package tasks.
+ "apt-get dist-upgrade && " "cd /tmp && "
+ # Get, validate and extract the package source.
+ "(dget -d -u {src_url} || exit 100) && "
+ 'echo "{src_sha256} {src_file}" | sha256sum -c && '
+ "{unpack} && "
+ "cd {package} && "
+ # Optionally apply patch and/or pre-build command.
+ "{adjust}"
+ # Install the necessary build dependencies.
+ "(cd ..; mk-build-deps -i -r {package}/debian/control -t '{resolver}' || exit 100) && "
+ # Build the package
+ 'DEB_BUILD_OPTIONS="parallel=$(nproc) nocheck" dpkg-buildpackage -sa && '
+ # Copy the artifacts
+ "mkdir -p {artifacts}/apt && "
+ "dcmd cp ../{package}_*.changes {artifacts}/apt/ && "
+ "cd {artifacts} && "
+ # Make the artifacts directory usable as an APT repository.
+ "apt-ftparchive sources apt | gzip -c9 > apt/Sources.gz && "
+ "apt-ftparchive packages apt | gzip -c9 > apt/Packages.gz".format(
+ root_url=get_root_url(False),
+ package=package,
+ src_url=src_url,
+ src_file=src_file,
+ src_sha256=src_sha256,
+ unpack=unpack,
+ adjust=adjust,
+ artifacts="/tmp/artifacts",
+ resolver=resolver,
+ ),
+ ]
+
+ if run.get("packages"):
+ env = worker.setdefault("env", {})
+ env["PACKAGES"] = {
+ "task-reference": " ".join(f"<{p}>" for p in run["packages"])
+ }
+ deps = taskdesc.setdefault("dependencies", {})
+ for p in run["packages"]:
+ deps[p] = f"packages-{p}"
+
+ # Use the command generated above as the base for the index hash.
+ # We rely on it not varying depending on the head_repository or head_rev.
+ digest_data = list(worker["command"])
+ if "patch" in run:
+ digest_data.append(
+ hash_path(os.path.join(GECKO, "build", "debian-packages", run["patch"]))
+ )
+
+ if not taskgraph.fast:
+ taskdesc["cache"] = {
+ "type": "packages.v1",
+ "name": name,
+ "digest-data": digest_data,
+ }
+
+
+@run_job_using("docker-worker", "debian-package", schema=debian_schema)
+def docker_worker_debian_package(config, job, taskdesc):
+ run = job["run"]
+ version = {
+ "wheezy": 7,
+ "jessie": 8,
+ "stretch": 9,
+ "buster": 10,
+ "bullseye": 11,
+ }[run["dist"]]
+ common_package(config, job, taskdesc, "debian", version)
+
+
+@run_job_using("docker-worker", "ubuntu-package", schema=ubuntu_schema)
+def docker_worker_ubuntu_package(config, job, taskdesc):
+ run = job["run"]
+ version = {
+ "bionic": 1804,
+ "focal": 2004,
+ }[run["dist"]]
+ common_package(config, job, taskdesc, "ubuntu", version)
diff --git a/taskcluster/gecko_taskgraph/transforms/job/hazard.py b/taskcluster/gecko_taskgraph/transforms/job/hazard.py
new file mode 100644
index 0000000000..af0e8616e0
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/job/hazard.py
@@ -0,0 +1,66 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running hazard jobs via dedicated scripts
+"""
+
+
+from taskgraph.util.schema import Schema
+from voluptuous import Any, Optional, Required
+
+from gecko_taskgraph.transforms.job import configure_taskdesc_for_run, run_job_using
+from gecko_taskgraph.transforms.job.common import (
+ add_tooltool,
+ docker_worker_add_artifacts,
+ setup_secrets,
+)
+
+haz_run_schema = Schema(
+ {
+ Required("using"): "hazard",
+ # The command to run within the task image (passed through to the worker)
+ Required("command"): str,
+ # The mozconfig to use; default in the script is used if omitted
+ Optional("mozconfig"): str,
+ # The set of secret names to which the task has access; these are prefixed
+ # with `project/releng/gecko/{treeherder.kind}/level-{level}/`. Setting
+ # this will enable any worker features required and set the task's scopes
+ # appropriately. `true` here means ['*'], all secrets. Not supported on
+ # Windows
+ Optional("secrets"): Any(bool, [str]),
+ # Base work directory used to set up the task.
+ Optional("workdir"): str,
+ }
+)
+
+
+@run_job_using("docker-worker", "hazard", schema=haz_run_schema)
+def docker_worker_hazard(config, job, taskdesc):
+ run = job["run"]
+
+ worker = taskdesc["worker"] = job["worker"]
+ worker.setdefault("artifacts", [])
+
+ docker_worker_add_artifacts(config, job, taskdesc)
+ worker.setdefault("required-volumes", []).append(
+ "{workdir}/workspace".format(**run)
+ )
+ add_tooltool(config, job, taskdesc)
+ setup_secrets(config, job, taskdesc)
+
+ env = worker["env"]
+ env.update(
+ {
+ "MOZ_BUILD_DATE": config.params["moz_build_date"],
+ "MOZ_SCM_LEVEL": config.params["level"],
+ }
+ )
+
+ # script parameters
+ if run.get("mozconfig"):
+ env["MOZCONFIG"] = run.pop("mozconfig")
+
+ run["using"] = "run-task"
+ run["cwd"] = run["workdir"]
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
diff --git a/taskcluster/gecko_taskgraph/transforms/job/mach.py b/taskcluster/gecko_taskgraph/transforms/job/mach.py
new file mode 100644
index 0000000000..a418b44794
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/job/mach.py
@@ -0,0 +1,83 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running mach tasks (via run-task)
+"""
+
+from taskgraph.util.schema import Schema, taskref_or_string
+from voluptuous import Any, Optional, Required
+
+from gecko_taskgraph.transforms.job import configure_taskdesc_for_run, run_job_using
+
+mach_schema = Schema(
+ {
+ Required("using"): "mach",
+ # The mach command (omitting `./mach`) to run
+ Required("mach"): taskref_or_string,
+ # The version of Python to run with. Either an absolute path to the binary
+ # on the worker, a version identifier (e.g python2.7 or 3.8). There is no
+ # validation performed to ensure the specified binaries actually exist.
+ Optional("python-version"): Any(str, int, float),
+ # The sparse checkout profile to use. Value is the filename relative to the
+ # directory where sparse profiles are defined (build/sparse-profiles/).
+ Optional("sparse-profile"): Any(str, None),
+ # if true, perform a checkout of a comm-central based branch inside the
+ # gecko checkout
+ Required("comm-checkout"): bool,
+ # Base work directory used to set up the task.
+ Optional("workdir"): str,
+ # Context to substitute into the command using format string
+ # substitution (e.g {value}). This is useful if certain aspects of the
+ # command need to be generated in transforms.
+ Optional("command-context"): dict,
+ }
+)
+
+
+defaults = {
+ "comm-checkout": False,
+}
+
+
+@run_job_using("docker-worker", "mach", schema=mach_schema, defaults=defaults)
+@run_job_using("generic-worker", "mach", schema=mach_schema, defaults=defaults)
+def configure_mach(config, job, taskdesc):
+ run = job["run"]
+ worker = job["worker"]
+
+ additional_prefix = []
+ if worker["os"] == "macosx":
+ additional_prefix = ["LC_ALL=en_US.UTF-8", "LANG=en_US.UTF-8"]
+
+ python = run.get("python-version")
+ if python:
+ del run["python-version"]
+
+ if worker["os"] == "macosx" and python == 3:
+ python = "/usr/local/bin/python3"
+
+ python = str(python)
+ try:
+ float(python)
+ python = "python" + python
+ except ValueError:
+ pass
+
+ additional_prefix.append(python)
+
+ command_prefix = " ".join(additional_prefix + ["./mach "])
+
+ mach = run["mach"]
+ if isinstance(mach, dict):
+ ref, pattern = next(iter(mach.items()))
+ command = {ref: command_prefix + pattern}
+ else:
+ command = command_prefix + mach
+
+ # defer to the run_task implementation
+ run["command"] = command
+ run["cwd"] = "{checkout}"
+ run["using"] = "run-task"
+ del run["mach"]
+ configure_taskdesc_for_run(config, job, taskdesc, job["worker"]["implementation"])
diff --git a/taskcluster/gecko_taskgraph/transforms/job/mozharness.py b/taskcluster/gecko_taskgraph/transforms/job/mozharness.py
new file mode 100644
index 0000000000..3dbcc6e015
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/job/mozharness.py
@@ -0,0 +1,366 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+
+Support for running jobs via mozharness. Ideally, most stuff gets run this
+way, and certainly anything using mozharness should use this approach.
+
+"""
+
+import json
+from textwrap import dedent
+
+from mozpack import path as mozpath
+from taskgraph.util.schema import Schema
+from voluptuous import Any, Optional, Required
+from voluptuous.validators import Match
+
+from gecko_taskgraph.transforms.job import configure_taskdesc_for_run, run_job_using
+from gecko_taskgraph.transforms.job.common import (
+ docker_worker_add_artifacts,
+ generic_worker_add_artifacts,
+ get_expiration,
+ setup_secrets,
+)
+from gecko_taskgraph.transforms.task import get_branch_repo, get_branch_rev
+from gecko_taskgraph.util.attributes import is_try
+
+mozharness_run_schema = Schema(
+ {
+ Required("using"): "mozharness",
+ # the mozharness script used to run this task, relative to the testing/
+ # directory and using forward slashes even on Windows
+ Required("script"): str,
+ # Additional paths to look for mozharness configs in. These should be
+ # relative to the base of the source checkout
+ Optional("config-paths"): [str],
+ # the config files required for the task, relative to
+ # testing/mozharness/configs or one of the paths specified in
+ # `config-paths` and using forward slashes even on Windows
+ Required("config"): [str],
+ # any additional actions to pass to the mozharness command
+ Optional("actions"): [
+ Match("^[a-z0-9-]+$", "actions must be `-` seperated alphanumeric strings")
+ ],
+ # any additional options (without leading --) to be passed to mozharness
+ Optional("options"): [
+ Match(
+ "^[a-z0-9-]+(=[^ ]+)?$",
+ "options must be `-` seperated alphanumeric strings (with optional argument)",
+ )
+ ],
+ # --custom-build-variant-cfg value
+ Optional("custom-build-variant-cfg"): str,
+ # Extra configuration options to pass to mozharness.
+ Optional("extra-config"): dict,
+ # If not false, tooltool downloads will be enabled via relengAPIProxy
+ # for either just public files, or all files. Not supported on Windows
+ Required("tooltool-downloads"): Any(
+ False,
+ "public",
+ "internal",
+ ),
+ # The set of secret names to which the task has access; these are prefixed
+ # with `project/releng/gecko/{treeherder.kind}/level-{level}/`. Setting
+ # this will enable any worker features required and set the task's scopes
+ # appropriately. `true` here means ['*'], all secrets. Not supported on
+ # Windows
+ Required("secrets"): Any(bool, [str]),
+ # If true, taskcluster proxy will be enabled; note that it may also be enabled
+ # automatically e.g., for secrets support. Not supported on Windows.
+ Required("taskcluster-proxy"): bool,
+ # If false, indicate that builds should skip producing artifacts. Not
+ # supported on Windows.
+ Required("keep-artifacts"): bool,
+ # If specified, use the in-tree job script specified.
+ Optional("job-script"): str,
+ Required("requires-signed-builds"): bool,
+ # Whether or not to use caches.
+ Optional("use-caches"): bool,
+ # If false, don't set MOZ_SIMPLE_PACKAGE_NAME
+ # Only disableable on windows
+ Required("use-simple-package"): bool,
+ # If false don't pass --branch mozharness script
+ # Only disableable on windows
+ Required("use-magic-mh-args"): bool,
+ # if true, perform a checkout of a comm-central based branch inside the
+ # gecko checkout
+ Required("comm-checkout"): bool,
+ # Base work directory used to set up the task.
+ Optional("workdir"): str,
+ Optional("run-as-root"): bool,
+ }
+)
+
+
+mozharness_defaults = {
+ "tooltool-downloads": False,
+ "secrets": False,
+ "taskcluster-proxy": False,
+ "keep-artifacts": True,
+ "requires-signed-builds": False,
+ "use-simple-package": True,
+ "use-magic-mh-args": True,
+ "comm-checkout": False,
+ "run-as-root": False,
+}
+
+
+@run_job_using(
+ "docker-worker",
+ "mozharness",
+ schema=mozharness_run_schema,
+ defaults=mozharness_defaults,
+)
+def mozharness_on_docker_worker_setup(config, job, taskdesc):
+ run = job["run"]
+
+ worker = taskdesc["worker"] = job["worker"]
+
+ if not run.pop("use-simple-package", None):
+ raise NotImplementedError(
+ "Simple packaging cannot be disabled via"
+ "'use-simple-package' on docker-workers"
+ )
+ if not run.pop("use-magic-mh-args", None):
+ raise NotImplementedError(
+ "Cannot disabled mh magic arg passing via"
+ "'use-magic-mh-args' on docker-workers"
+ )
+
+ # Running via mozharness assumes an image that contains build.sh:
+ # by default, debian11-amd64-build, but it could be another image (like
+ # android-build).
+ worker.setdefault("docker-image", {"in-tree": "debian11-amd64-build"})
+
+ worker.setdefault("artifacts", []).append(
+ {
+ "name": "public/logs",
+ "path": "{workdir}/logs/".format(**run),
+ "type": "directory",
+ "expires-after": get_expiration(config, "medium"),
+ }
+ )
+ worker["taskcluster-proxy"] = run.pop("taskcluster-proxy", None)
+ docker_worker_add_artifacts(config, job, taskdesc)
+
+ env = worker.setdefault("env", {})
+ env.update(
+ {
+ "WORKSPACE": "{workdir}/workspace".format(**run),
+ "MOZHARNESS_CONFIG": " ".join(run.pop("config")),
+ "MOZHARNESS_SCRIPT": run.pop("script"),
+ "MH_BRANCH": config.params["project"],
+ "MOZ_SOURCE_CHANGESET": get_branch_rev(config),
+ "MOZ_SOURCE_REPO": get_branch_repo(config),
+ "MH_BUILD_POOL": "taskcluster",
+ "MOZ_BUILD_DATE": config.params["moz_build_date"],
+ "MOZ_SCM_LEVEL": config.params["level"],
+ "PYTHONUNBUFFERED": "1",
+ }
+ )
+
+ worker.setdefault("required-volumes", []).append(env["WORKSPACE"])
+
+ if "actions" in run:
+ env["MOZHARNESS_ACTIONS"] = " ".join(run.pop("actions"))
+
+ if "options" in run:
+ env["MOZHARNESS_OPTIONS"] = " ".join(run.pop("options"))
+
+ if "config-paths" in run:
+ env["MOZHARNESS_CONFIG_PATHS"] = " ".join(run.pop("config-paths"))
+
+ if "custom-build-variant-cfg" in run:
+ env["MH_CUSTOM_BUILD_VARIANT_CFG"] = run.pop("custom-build-variant-cfg")
+
+ extra_config = run.pop("extra-config", {})
+ extra_config["objdir"] = "obj-build"
+ env["EXTRA_MOZHARNESS_CONFIG"] = json.dumps(extra_config, sort_keys=True)
+
+ if "job-script" in run:
+ env["JOB_SCRIPT"] = run["job-script"]
+
+ if is_try(config.params):
+ env["TRY_COMMIT_MSG"] = config.params["message"]
+
+ # if we're not keeping artifacts, set some env variables to empty values
+ # that will cause the build process to skip copying the results to the
+ # artifacts directory. This will have no effect for operations that are
+ # not builds.
+ if not run.pop("keep-artifacts"):
+ env["DIST_TARGET_UPLOADS"] = ""
+ env["DIST_UPLOADS"] = ""
+
+ # Retry if mozharness returns TBPL_RETRY
+ worker["retry-exit-status"] = [4]
+
+ setup_secrets(config, job, taskdesc)
+
+ run["using"] = "run-task"
+ run["command"] = mozpath.join(
+ "${GECKO_PATH}",
+ run.pop("job-script", "taskcluster/scripts/builder/build-linux.sh"),
+ )
+ run.pop("secrets")
+ run.pop("requires-signed-builds")
+
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
+
+
+@run_job_using(
+ "generic-worker",
+ "mozharness",
+ schema=mozharness_run_schema,
+ defaults=mozharness_defaults,
+)
+def mozharness_on_generic_worker(config, job, taskdesc):
+ assert job["worker"]["os"] in (
+ "windows",
+ "macosx",
+ ), "only supports windows and macOS right now: {}".format(job["label"])
+
+ run = job["run"]
+
+ # fail if invalid run options are included
+ invalid = []
+ if not run.pop("keep-artifacts", True):
+ invalid.append("keep-artifacts")
+ if invalid:
+ raise Exception(
+ "Jobs run using mozharness on Windows do not support properties "
+ + ", ".join(invalid)
+ )
+
+ worker = taskdesc["worker"] = job["worker"]
+
+ worker["taskcluster-proxy"] = run.pop("taskcluster-proxy", None)
+
+ setup_secrets(config, job, taskdesc)
+
+ taskdesc["worker"].setdefault("artifacts", []).append(
+ {
+ "name": "public/logs",
+ "path": "logs",
+ "type": "directory",
+ "expires-after": get_expiration(config, "medium"),
+ }
+ )
+
+ if not worker.get("skip-artifacts", False):
+ generic_worker_add_artifacts(config, job, taskdesc)
+
+ env = worker.setdefault("env", {})
+ env.update(
+ {
+ "MOZ_BUILD_DATE": config.params["moz_build_date"],
+ "MOZ_SCM_LEVEL": config.params["level"],
+ "MH_BRANCH": config.params["project"],
+ "MOZ_SOURCE_CHANGESET": get_branch_rev(config),
+ "MOZ_SOURCE_REPO": get_branch_repo(config),
+ }
+ )
+ if run.pop("use-simple-package"):
+ env.update({"MOZ_SIMPLE_PACKAGE_NAME": "target"})
+
+ extra_config = run.pop("extra-config", {})
+ extra_config["objdir"] = "obj-build"
+ env["EXTRA_MOZHARNESS_CONFIG"] = json.dumps(extra_config, sort_keys=True)
+
+ # The windows generic worker uses batch files to pass environment variables
+ # to commands. Setting a variable to empty in a batch file unsets, so if
+ # there is no `TRY_COMMIT_MESSAGE`, pass a space instead, so that
+ # mozharness doesn't try to find the commit message on its own.
+ if is_try(config.params):
+ env["TRY_COMMIT_MSG"] = config.params["message"] or "no commit message"
+
+ if not job["attributes"]["build_platform"].startswith(("win", "macosx")):
+ raise Exception(
+ "Task generation for mozharness build jobs currently only supported on "
+ "Windows and macOS"
+ )
+
+ mh_command = []
+ if job["worker"]["os"] == "windows":
+ system_python_dir = "c:/mozilla-build/python3/"
+ gecko_path = "%GECKO_PATH%"
+ else:
+ system_python_dir = ""
+ gecko_path = "$GECKO_PATH"
+
+ if run.get("use-system-python", True):
+ python_bindir = system_python_dir
+ else:
+ # $MOZ_PYTHON_HOME is going to be substituted in run-task, when we
+ # know the actual MOZ_PYTHON_HOME value.
+ is_windows = job["worker"]["os"] == "windows"
+ if is_windows:
+ python_bindir = "%MOZ_PYTHON_HOME%/"
+ else:
+ python_bindir = "${MOZ_PYTHON_HOME}/bin/"
+
+ mh_command = ["{}python3".format(python_bindir)]
+
+ mh_command += [
+ f"{gecko_path}/mach",
+ "python",
+ "{}/testing/{}".format(gecko_path, run.pop("script")),
+ ]
+
+ for path in run.pop("config-paths", []):
+ mh_command.append(f"--extra-config-path {gecko_path}/{path}")
+
+ for cfg in run.pop("config"):
+ mh_command.extend(("--config", cfg))
+ if run.pop("use-magic-mh-args"):
+ mh_command.extend(("--branch", config.params["project"]))
+ if job["worker"]["os"] == "windows":
+ mh_command.extend(("--work-dir", r"%cd:Z:=z:%\workspace"))
+ for action in run.pop("actions", []):
+ mh_command.append("--" + action)
+
+ for option in run.pop("options", []):
+ mh_command.append("--" + option)
+ if run.get("custom-build-variant-cfg"):
+ mh_command.append("--custom-build-variant")
+ mh_command.append(run.pop("custom-build-variant-cfg"))
+
+ if job["worker"]["os"] == "macosx":
+ # Ideally, we'd use shellutil.quote, but that would single-quote
+ # $GECKO_PATH, which would defeat having the variable in the command
+ # in the first place, as it wouldn't be expanded.
+ # In practice, arguments are expected not to contain characters that
+ # would require quoting.
+ mh_command = " ".join(mh_command)
+
+ run["using"] = "run-task"
+ run["command"] = mh_command
+ run.pop("secrets")
+ run.pop("requires-signed-builds")
+ run.pop("job-script", None)
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
+
+ # Everything past this point is Windows-specific.
+ if job["worker"]["os"] == "macosx":
+ return
+
+ if taskdesc.get("use-sccache"):
+ worker["command"] = (
+ [
+ # Make the comment part of the first command, as it will help users to
+ # understand what is going on, and why these steps are implemented.
+ dedent(
+ """\
+ :: sccache currently uses the full compiler commandline as input to the
+ :: cache hash key, so create a symlink to the task dir and build from
+ :: the symlink dir to get consistent paths.
+ if exist z:\\build rmdir z:\\build"""
+ ),
+ r"mklink /d z:\build %cd%",
+ # Grant delete permission on the link to everyone.
+ r"icacls z:\build /grant *S-1-1-0:D /L",
+ r"cd /d z:\build",
+ ]
+ + worker["command"]
+ )
diff --git a/taskcluster/gecko_taskgraph/transforms/job/mozharness_test.py b/taskcluster/gecko_taskgraph/transforms/job/mozharness_test.py
new file mode 100644
index 0000000000..eb4aea609f
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/job/mozharness_test.py
@@ -0,0 +1,477 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import json
+import os
+import re
+
+from taskgraph.util.schema import Schema
+from taskgraph.util.taskcluster import get_artifact_path, get_artifact_url
+from voluptuous import Extra, Optional, Required
+
+from gecko_taskgraph.transforms.job import configure_taskdesc_for_run, run_job_using
+from gecko_taskgraph.transforms.job.common import get_expiration, support_vcs_checkout
+from gecko_taskgraph.transforms.test import normpath, test_description_schema
+from gecko_taskgraph.util.attributes import is_try
+
+VARIANTS = [
+ "shippable",
+ "shippable-qr",
+ "shippable-lite",
+ "shippable-lite-qr",
+ "devedition",
+ "pgo",
+ "asan",
+ "stylo",
+ "qr",
+ "ccov",
+]
+
+
+def get_variant(test_platform):
+ for v in VARIANTS:
+ if f"-{v}/" in test_platform:
+ return v
+ return ""
+
+
+mozharness_test_run_schema = Schema(
+ {
+ Required("using"): "mozharness-test",
+ Required("test"): {
+ Required("test-platform"): str,
+ Required("mozharness"): test_description_schema["mozharness"],
+ Required("docker-image"): test_description_schema["docker-image"],
+ Required("loopback-video"): test_description_schema["loopback-video"],
+ Required("loopback-audio"): test_description_schema["loopback-audio"],
+ Required("max-run-time"): test_description_schema["max-run-time"],
+ Optional("retry-exit-status"): test_description_schema["retry-exit-status"],
+ Extra: object,
+ },
+ # Base work directory used to set up the task.
+ Optional("workdir"): str,
+ }
+)
+
+
+def test_packages_url(taskdesc):
+ """Account for different platforms that name their test packages differently"""
+ artifact_url = get_artifact_url(
+ "<build>", get_artifact_path(taskdesc, "target.test_packages.json")
+ )
+ # for android shippable we need to add 'en-US' to the artifact url
+ test = taskdesc["run"]["test"]
+ if "android" in test["test-platform"] and (
+ get_variant(test["test-platform"])
+ in ("shippable", "shippable-qr", "shippable-lite", "shippable-lite-qr")
+ ):
+ head, tail = os.path.split(artifact_url)
+ artifact_url = os.path.join(head, "en-US", tail)
+ return artifact_url
+
+
+def installer_url(taskdesc):
+ test = taskdesc["run"]["test"]
+ mozharness = test["mozharness"]
+
+ if "installer-url" in mozharness:
+ installer_url = mozharness["installer-url"]
+ else:
+ upstream_task = (
+ "<build-signing>" if mozharness["requires-signed-builds"] else "<build>"
+ )
+ installer_url = get_artifact_url(
+ upstream_task, mozharness["build-artifact-name"]
+ )
+
+ return installer_url
+
+
+@run_job_using("docker-worker", "mozharness-test", schema=mozharness_test_run_schema)
+def mozharness_test_on_docker(config, job, taskdesc):
+ run = job["run"]
+ test = taskdesc["run"]["test"]
+ mozharness = test["mozharness"]
+ worker = taskdesc["worker"] = job["worker"]
+
+ # apply some defaults
+ worker["docker-image"] = test["docker-image"]
+ worker["allow-ptrace"] = True # required for all tests, for crashreporter
+ worker["loopback-video"] = test["loopback-video"]
+ worker["loopback-audio"] = test["loopback-audio"]
+ worker["max-run-time"] = test["max-run-time"]
+ worker["retry-exit-status"] = test["retry-exit-status"]
+ if "android-em-7.0-x86" in test["test-platform"]:
+ worker["privileged"] = True
+
+ artifacts = [
+ # (artifact name prefix, in-image path)
+ ("public/logs", "{workdir}/workspace/logs/".format(**run)),
+ ("public/test", "{workdir}/artifacts/".format(**run)),
+ (
+ "public/test_info",
+ "{workdir}/workspace/build/blobber_upload_dir/".format(**run),
+ ),
+ ]
+
+ installer = installer_url(taskdesc)
+
+ mozharness_url = get_artifact_url(
+ "<build>", get_artifact_path(taskdesc, "mozharness.zip")
+ )
+
+ worker.setdefault("artifacts", [])
+ worker["artifacts"].extend(
+ [
+ {
+ "name": prefix,
+ "path": os.path.join("{workdir}/workspace".format(**run), path),
+ "type": "directory",
+ "expires-after": get_expiration(config, "default"),
+ }
+ for (prefix, path) in artifacts
+ ]
+ )
+
+ env = worker.setdefault("env", {})
+ env.update(
+ {
+ "MOZHARNESS_CONFIG": " ".join(mozharness["config"]),
+ "MOZHARNESS_SCRIPT": mozharness["script"],
+ "MOZILLA_BUILD_URL": {"task-reference": installer},
+ "NEED_PULSEAUDIO": "true",
+ "NEED_WINDOW_MANAGER": "true",
+ "ENABLE_E10S": str(bool(test.get("e10s"))).lower(),
+ "WORKING_DIR": "/builds/worker",
+ }
+ )
+
+ env["PYTHON"] = "python3"
+
+ # Legacy linux64 tests rely on compiz.
+ if test.get("docker-image", {}).get("in-tree") == "desktop1604-test":
+ env.update({"NEED_COMPIZ": "true"})
+
+ # Bug 1602701/1601828 - use compiz on ubuntu1804 due to GTK asynchiness
+ # when manipulating windows.
+ if test.get("docker-image", {}).get("in-tree") == "ubuntu1804-test":
+ if "wdspec" in job["run"]["test"]["suite"] or (
+ "marionette" in job["run"]["test"]["suite"]
+ and "headless" not in job["label"]
+ ):
+ env.update({"NEED_COMPIZ": "true"})
+
+ # Set MOZ_ENABLE_WAYLAND env variables to enable Wayland backend.
+ if "wayland" in job["label"]:
+ env["MOZ_ENABLE_WAYLAND"] = "1"
+
+ if mozharness.get("mochitest-flavor"):
+ env["MOCHITEST_FLAVOR"] = mozharness["mochitest-flavor"]
+
+ if mozharness["set-moz-node-path"]:
+ env["MOZ_NODE_PATH"] = "/usr/local/bin/node"
+
+ if "actions" in mozharness:
+ env["MOZHARNESS_ACTIONS"] = " ".join(mozharness["actions"])
+
+ if is_try(config.params):
+ env["TRY_COMMIT_MSG"] = config.params["message"]
+
+ # handle some of the mozharness-specific options
+ if test["reboot"]:
+ raise Exception(
+ "reboot: {} not supported on generic-worker".format(test["reboot"])
+ )
+
+ # Support vcs checkouts regardless of whether the task runs from
+ # source or not in case it is needed on an interactive loaner.
+ support_vcs_checkout(config, job, taskdesc)
+
+ # If we have a source checkout, run mozharness from it instead of
+ # downloading a zip file with the same content.
+ if test["checkout"]:
+ env["MOZHARNESS_PATH"] = "{workdir}/checkouts/gecko/testing/mozharness".format(
+ **run
+ )
+ else:
+ env["MOZHARNESS_URL"] = {"task-reference": mozharness_url}
+
+ extra_config = {
+ "installer_url": installer,
+ "test_packages_url": test_packages_url(taskdesc),
+ }
+ env["EXTRA_MOZHARNESS_CONFIG"] = {
+ "task-reference": json.dumps(extra_config, sort_keys=True)
+ }
+
+ # Bug 1634554 - pass in decision task artifact URL to mozharness for WPT.
+ # Bug 1645974 - test-verify-wpt and test-coverage-wpt need artifact URL.
+ if "web-platform-tests" in test["suite"] or re.match(
+ "test-(coverage|verify)-wpt", test["suite"]
+ ):
+ env["TESTS_BY_MANIFEST_URL"] = {
+ "artifact-reference": "<decision/public/tests-by-manifest.json.gz>"
+ }
+
+ command = [
+ "{workdir}/bin/test-linux.sh".format(**run),
+ ]
+ command.extend(mozharness.get("extra-options", []))
+
+ if test.get("test-manifests"):
+ env["MOZHARNESS_TEST_PATHS"] = json.dumps(
+ {test["suite"]: test["test-manifests"]}, sort_keys=True
+ )
+
+ # TODO: remove the need for run['chunked']
+ elif mozharness.get("chunked") or test["chunks"] > 1:
+ command.append("--total-chunk={}".format(test["chunks"]))
+ command.append("--this-chunk={}".format(test["this-chunk"]))
+
+ if "download-symbols" in mozharness:
+ download_symbols = mozharness["download-symbols"]
+ download_symbols = {True: "true", False: "false"}.get(
+ download_symbols, download_symbols
+ )
+ command.append("--download-symbols=" + download_symbols)
+
+ job["run"] = {
+ "workdir": run["workdir"],
+ "tooltool-downloads": mozharness["tooltool-downloads"],
+ "checkout": test["checkout"],
+ "command": command,
+ "using": "run-task",
+ }
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
+
+
+@run_job_using("generic-worker", "mozharness-test", schema=mozharness_test_run_schema)
+def mozharness_test_on_generic_worker(config, job, taskdesc):
+ test = taskdesc["run"]["test"]
+ mozharness = test["mozharness"]
+ worker = taskdesc["worker"] = job["worker"]
+
+ bitbar_script = "test-linux.sh"
+
+ is_macosx = worker["os"] == "macosx"
+ is_windows = worker["os"] == "windows"
+ is_linux = worker["os"] == "linux" or worker["os"] == "linux-bitbar"
+ is_bitbar = worker["os"] == "linux-bitbar"
+ assert is_macosx or is_windows or is_linux
+
+ artifacts = [
+ {
+ "name": "public/logs",
+ "path": "logs",
+ "type": "directory",
+ "expires-after": get_expiration(config, "default"),
+ }
+ ]
+
+ # jittest doesn't have blob_upload_dir
+ if test["test-name"] != "jittest":
+ artifacts.append(
+ {
+ "name": "public/test_info",
+ "path": "build/blobber_upload_dir",
+ "type": "directory",
+ "expires-after": get_expiration(config, "default"),
+ }
+ )
+
+ if is_bitbar:
+ artifacts = [
+ {
+ "name": "public/test/",
+ "path": "artifacts/public",
+ "type": "directory",
+ "expires-after": get_expiration(config, "default"),
+ },
+ {
+ "name": "public/logs/",
+ "path": "workspace/logs",
+ "type": "directory",
+ "expires-after": get_expiration(config, "default"),
+ },
+ {
+ "name": "public/test_info/",
+ "path": "workspace/build/blobber_upload_dir",
+ "type": "directory",
+ "expires-after": get_expiration(config, "default"),
+ },
+ ]
+
+ installer = installer_url(taskdesc)
+
+ worker["os-groups"] = test["os-groups"]
+
+ # run-as-administrator is a feature for workers with UAC enabled and as such should not be
+ # included in tasks on workers that have UAC disabled. Currently UAC is only enabled on
+ # gecko Windows 10 workers, however this may be subject to change. Worker type
+ # environment definitions can be found in https://github.com/mozilla-releng/OpenCloudConfig
+ # See https://docs.microsoft.com/en-us/windows/desktop/secauthz/user-account-control
+ # for more information about UAC.
+ if test.get("run-as-administrator", False):
+ if job["worker-type"].startswith("win10-64") or job["worker-type"].startswith(
+ "win11-64"
+ ):
+ worker["run-as-administrator"] = True
+ else:
+ raise Exception(
+ "run-as-administrator not supported on {}".format(job["worker-type"])
+ )
+
+ if test["reboot"]:
+ raise Exception(
+ "reboot: {} not supported on generic-worker".format(test["reboot"])
+ )
+
+ worker["max-run-time"] = test["max-run-time"]
+ worker["retry-exit-status"] = test["retry-exit-status"]
+ worker.setdefault("artifacts", [])
+ worker["artifacts"].extend(artifacts)
+
+ env = worker.setdefault("env", {})
+ env["GECKO_HEAD_REPOSITORY"] = config.params["head_repository"]
+ env["GECKO_HEAD_REV"] = config.params["head_rev"]
+
+ # this list will get cleaned up / reduced / removed in bug 1354088
+ if is_macosx:
+ env.update(
+ {
+ "LC_ALL": "en_US.UTF-8",
+ "LANG": "en_US.UTF-8",
+ "MOZ_NODE_PATH": "/usr/local/bin/node",
+ "PATH": "/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin",
+ "SHELL": "/bin/bash",
+ }
+ )
+ elif is_bitbar:
+ env.update(
+ {
+ "LANG": "en_US.UTF-8",
+ "MOZHARNESS_CONFIG": " ".join(mozharness["config"]),
+ "MOZHARNESS_SCRIPT": mozharness["script"],
+ "MOZHARNESS_URL": {
+ "artifact-reference": "<build/public/build/mozharness.zip>"
+ },
+ "MOZILLA_BUILD_URL": {"task-reference": installer},
+ "MOZ_NO_REMOTE": "1",
+ "NEED_XVFB": "false",
+ "XPCOM_DEBUG_BREAK": "warn",
+ "NO_FAIL_ON_TEST_ERRORS": "1",
+ "MOZ_HIDE_RESULTS_TABLE": "1",
+ "MOZ_NODE_PATH": "/usr/local/bin/node",
+ "TASKCLUSTER_WORKER_TYPE": job["worker-type"],
+ }
+ )
+
+ extra_config = {
+ "installer_url": installer,
+ "test_packages_url": test_packages_url(taskdesc),
+ }
+ env["EXTRA_MOZHARNESS_CONFIG"] = {
+ "task-reference": json.dumps(extra_config, sort_keys=True)
+ }
+
+ # Bug 1634554 - pass in decision task artifact URL to mozharness for WPT.
+ # Bug 1645974 - test-verify-wpt and test-coverage-wpt need artifact URL.
+ if "web-platform-tests" in test["suite"] or re.match(
+ "test-(coverage|verify)-wpt", test["suite"]
+ ):
+ env["TESTS_BY_MANIFEST_URL"] = {
+ "artifact-reference": "<decision/public/tests-by-manifest.json.gz>"
+ }
+
+ if is_windows:
+ py_binary = "c:\\mozilla-build\\{python}\\{python}.exe".format(python="python3")
+ mh_command = [
+ py_binary,
+ "-u",
+ "mozharness\\scripts\\" + normpath(mozharness["script"]),
+ ]
+ elif is_bitbar:
+ py_binary = "python3"
+ mh_command = ["bash", f"./{bitbar_script}"]
+ elif is_macosx:
+ py_binary = "/usr/local/bin/{}".format("python3")
+ mh_command = [
+ py_binary,
+ "-u",
+ "mozharness/scripts/" + mozharness["script"],
+ ]
+ else:
+ # is_linux
+ py_binary = "/usr/bin/{}".format("python3")
+ mh_command = [
+ # Using /usr/bin/python2.7 rather than python2.7 because
+ # /usr/local/bin/python2.7 is broken on the mac workers.
+ # See bug #1547903.
+ py_binary,
+ "-u",
+ "mozharness/scripts/" + mozharness["script"],
+ ]
+
+ env["PYTHON"] = py_binary
+
+ for mh_config in mozharness["config"]:
+ cfg_path = "mozharness/configs/" + mh_config
+ if is_windows:
+ cfg_path = normpath(cfg_path)
+ mh_command.extend(["--cfg", cfg_path])
+ mh_command.extend(mozharness.get("extra-options", []))
+ if mozharness.get("download-symbols"):
+ if isinstance(mozharness["download-symbols"], str):
+ mh_command.extend(["--download-symbols", mozharness["download-symbols"]])
+ else:
+ mh_command.extend(["--download-symbols", "true"])
+ if mozharness.get("include-blob-upload-branch"):
+ mh_command.append("--blob-upload-branch=" + config.params["project"])
+
+ if test.get("test-manifests"):
+ env["MOZHARNESS_TEST_PATHS"] = json.dumps(
+ {test["suite"]: test["test-manifests"]}, sort_keys=True
+ )
+
+ # TODO: remove the need for run['chunked']
+ elif mozharness.get("chunked") or test["chunks"] > 1:
+ mh_command.append("--total-chunk={}".format(test["chunks"]))
+ mh_command.append("--this-chunk={}".format(test["this-chunk"]))
+
+ if is_try(config.params):
+ env["TRY_COMMIT_MSG"] = config.params["message"]
+
+ worker["mounts"] = [
+ {
+ "directory": "mozharness",
+ "content": {
+ "artifact": get_artifact_path(taskdesc, "mozharness.zip"),
+ "task-id": {"task-reference": "<build>"},
+ },
+ "format": "zip",
+ }
+ ]
+ if is_bitbar:
+ a_url = config.params.file_url(
+ f"taskcluster/scripts/tester/{bitbar_script}",
+ )
+ worker["mounts"] = [
+ {
+ "file": bitbar_script,
+ "content": {
+ "url": a_url,
+ },
+ }
+ ]
+
+ job["run"] = {
+ "tooltool-downloads": mozharness["tooltool-downloads"],
+ "checkout": test["checkout"],
+ "command": mh_command,
+ "using": "run-task",
+ }
+ if is_bitbar:
+ job["run"]["run-as-root"] = True
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
diff --git a/taskcluster/gecko_taskgraph/transforms/job/python_test.py b/taskcluster/gecko_taskgraph/transforms/job/python_test.py
new file mode 100644
index 0000000000..b572061217
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/job/python_test.py
@@ -0,0 +1,47 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running mach python-test tasks (via run-task)
+"""
+
+
+from taskgraph.util.schema import Schema
+from voluptuous import Optional, Required
+
+from gecko_taskgraph.transforms.job import configure_taskdesc_for_run, run_job_using
+
+python_test_schema = Schema(
+ {
+ Required("using"): "python-test",
+ # Python version to use
+ Required("python-version"): int,
+ # The subsuite to run
+ Required("subsuite"): str,
+ # Base work directory used to set up the task.
+ Optional("workdir"): str,
+ }
+)
+
+
+defaults = {
+ "python-version": 3,
+ "subsuite": "default",
+}
+
+
+@run_job_using(
+ "docker-worker", "python-test", schema=python_test_schema, defaults=defaults
+)
+@run_job_using(
+ "generic-worker", "python-test", schema=python_test_schema, defaults=defaults
+)
+def configure_python_test(config, job, taskdesc):
+ run = job["run"]
+ worker = job["worker"]
+
+ # defer to the mach implementation
+ run["mach"] = ("python-test --subsuite {subsuite} --run-slow").format(**run)
+ run["using"] = "mach"
+ del run["subsuite"]
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
diff --git a/taskcluster/gecko_taskgraph/transforms/job/run_task.py b/taskcluster/gecko_taskgraph/transforms/job/run_task.py
new file mode 100644
index 0000000000..201c0b825a
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/job/run_task.py
@@ -0,0 +1,308 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running jobs that are invoked via the `run-task` script.
+"""
+
+
+import os
+
+from mozbuild.util import memoize
+from mozpack import path
+from taskgraph.util.schema import Schema
+from taskgraph.util.yaml import load_yaml
+from voluptuous import Any, Extra, Optional, Required
+
+from gecko_taskgraph import GECKO
+from gecko_taskgraph.transforms.job import run_job_using
+from gecko_taskgraph.transforms.job.common import add_tooltool, support_vcs_checkout
+from gecko_taskgraph.transforms.task import taskref_or_string
+
+run_task_schema = Schema(
+ {
+ Required("using"): "run-task",
+ # if true, add a cache at ~worker/.cache, which is where things like pip
+ # tend to hide their caches. This cache is never added for level-1 jobs.
+ # TODO Once bug 1526028 is fixed, this and 'use-caches' should be merged.
+ Required("cache-dotcache"): bool,
+ # Whether or not to use caches.
+ Optional("use-caches"): bool,
+ # if true (the default), perform a checkout of gecko on the worker
+ Required("checkout"): bool,
+ Optional(
+ "cwd",
+ description="Path to run command in. If a checkout is present, the path "
+ "to the checkout will be interpolated with the key `checkout`",
+ ): str,
+ # The sparse checkout profile to use. Value is the filename relative to
+ # "sparse-profile-prefix" which defaults to "build/sparse-profiles/".
+ Required("sparse-profile"): Any(str, None),
+ # The relative path to the sparse profile.
+ Optional("sparse-profile-prefix"): str,
+ # if true, perform a checkout of a comm-central based branch inside the
+ # gecko checkout
+ Required("comm-checkout"): bool,
+ # The command arguments to pass to the `run-task` script, after the
+ # checkout arguments. If a list, it will be passed directly; otherwise
+ # it will be included in a single argument to `bash -cx`.
+ Required("command"): Any([taskref_or_string], taskref_or_string),
+ # Context to substitute into the command using format string
+ # substitution (e.g {value}). This is useful if certain aspects of the
+ # command need to be generated in transforms.
+ Optional("command-context"): {
+ # If present, loads a set of context variables from an unnested yaml
+ # file. If a value is present in both the provided file and directly
+ # in command-context, the latter will take priority.
+ Optional("from-file"): str,
+ Extra: object,
+ },
+ # Base work directory used to set up the task.
+ Optional("workdir"): str,
+ # If not false, tooltool downloads will be enabled via relengAPIProxy
+ # for either just public files, or all files. Only supported on
+ # docker-worker.
+ Required("tooltool-downloads"): Any(
+ False,
+ "public",
+ "internal",
+ ),
+ # Whether to run as root. (defaults to False)
+ Optional("run-as-root"): bool,
+ }
+)
+
+
+def common_setup(config, job, taskdesc, command):
+ run = job["run"]
+ if run["checkout"]:
+ support_vcs_checkout(config, job, taskdesc, sparse=bool(run["sparse-profile"]))
+ command.append(
+ "--gecko-checkout={}".format(taskdesc["worker"]["env"]["GECKO_PATH"])
+ )
+
+ if run["sparse-profile"]:
+ sparse_profile_prefix = run.pop(
+ "sparse-profile-prefix", "build/sparse-profiles"
+ )
+ sparse_profile_path = path.join(sparse_profile_prefix, run["sparse-profile"])
+ command.append(f"--gecko-sparse-profile={sparse_profile_path}")
+
+ taskdesc["worker"].setdefault("env", {})["MOZ_SCM_LEVEL"] = config.params["level"]
+
+
+worker_defaults = {
+ "cache-dotcache": False,
+ "checkout": True,
+ "comm-checkout": False,
+ "sparse-profile": None,
+ "tooltool-downloads": False,
+ "run-as-root": False,
+}
+
+
+load_yaml = memoize(load_yaml)
+
+
+def script_url(config, script):
+ if "MOZ_AUTOMATION" in os.environ and "TASK_ID" not in os.environ:
+ raise Exception("TASK_ID must be defined to use run-task on generic-worker")
+ task_id = os.environ.get("TASK_ID", "<TASK_ID>")
+ tc_url = "http://firefox-ci-tc.services.mozilla.com"
+ return f"{tc_url}/api/queue/v1/task/{task_id}/artifacts/public/{script}"
+
+
+def substitute_command_context(command_context, command):
+ from_file = command_context.pop("from-file", None)
+ full_context = {}
+ if from_file:
+ full_context = load_yaml(os.path.join(GECKO, from_file))
+ else:
+ full_context = {}
+
+ full_context.update(command_context)
+
+ if isinstance(command, list):
+ for i in range(len(command)):
+ command[i] = command[i].format(**full_context)
+ else:
+ command = command.format(**full_context)
+
+ return command
+
+
+@run_job_using(
+ "docker-worker", "run-task", schema=run_task_schema, defaults=worker_defaults
+)
+def docker_worker_run_task(config, job, taskdesc):
+ run = job["run"]
+ worker = taskdesc["worker"] = job["worker"]
+ command = ["/builds/worker/bin/run-task"]
+ common_setup(config, job, taskdesc, command)
+
+ if run["tooltool-downloads"]:
+ internal = run["tooltool-downloads"] == "internal"
+ add_tooltool(config, job, taskdesc, internal=internal)
+
+ if run.get("cache-dotcache"):
+ worker["caches"].append(
+ {
+ "type": "persistent",
+ "name": "{project}-dotcache".format(**config.params),
+ "mount-point": "{workdir}/.cache".format(**run),
+ "skip-untrusted": True,
+ }
+ )
+
+ if run.get("command-context"):
+ run_command = substitute_command_context(
+ run.get("command-context"), run["command"]
+ )
+ else:
+ run_command = run["command"]
+
+ run_cwd = run.get("cwd")
+ if run_cwd and run["checkout"]:
+ run_cwd = path.normpath(
+ run_cwd.format(checkout=taskdesc["worker"]["env"]["GECKO_PATH"])
+ )
+ elif run_cwd and "{checkout}" in run_cwd:
+ raise Exception(
+ "Found `{{checkout}}` interpolation in `cwd` for task {name} "
+ "but the task doesn't have a checkout: {cwd}".format(
+ cwd=run_cwd, name=job.get("name", job.get("label"))
+ )
+ )
+
+ # dict is for the case of `{'task-reference': text_type}`.
+ if isinstance(run_command, (str, dict)):
+ run_command = ["bash", "-cx", run_command]
+ if run["comm-checkout"]:
+ command.append(
+ "--comm-checkout={}/comm".format(taskdesc["worker"]["env"]["GECKO_PATH"])
+ )
+ if run["run-as-root"]:
+ command.extend(("--user", "root", "--group", "root"))
+ if run_cwd:
+ command.extend(("--task-cwd", run_cwd))
+ command.append("--")
+ command.extend(run_command)
+ worker["command"] = command
+
+
+@run_job_using(
+ "generic-worker", "run-task", schema=run_task_schema, defaults=worker_defaults
+)
+def generic_worker_run_task(config, job, taskdesc):
+ run = job["run"]
+ worker = taskdesc["worker"] = job["worker"]
+ is_win = worker["os"] == "windows"
+ is_mac = worker["os"] == "macosx"
+ is_bitbar = worker["os"] == "linux-bitbar"
+
+ if run["tooltool-downloads"]:
+ internal = run["tooltool-downloads"] == "internal"
+ add_tooltool(config, job, taskdesc, internal=internal)
+
+ if is_win:
+ command = ["C:/mozilla-build/python3/python3.exe", "run-task"]
+ elif is_mac:
+ command = ["/usr/local/bin/python3", "run-task"]
+ else:
+ command = ["./run-task"]
+
+ common_setup(config, job, taskdesc, command)
+
+ worker.setdefault("mounts", [])
+ if run.get("cache-dotcache"):
+ worker["mounts"].append(
+ {
+ "cache-name": "{project}-dotcache".format(**config.params),
+ "directory": "{workdir}/.cache".format(**run),
+ }
+ )
+ worker["mounts"].append(
+ {
+ "content": {
+ "url": script_url(config, "run-task"),
+ },
+ "file": "./run-task",
+ }
+ )
+ if job.get("fetches", {}):
+ worker["mounts"].append(
+ {
+ "content": {
+ "url": script_url(config, "fetch-content"),
+ },
+ "file": "./fetch-content",
+ }
+ )
+
+ run_command = run["command"]
+ run_cwd = run.get("cwd")
+ if run_cwd and run["checkout"]:
+ run_cwd = path.normpath(
+ run_cwd.format(checkout=taskdesc["worker"]["env"]["GECKO_PATH"])
+ )
+ elif run_cwd and "{checkout}" in run_cwd:
+ raise Exception(
+ "Found `{{checkout}}` interpolation in `cwd` for task {name} "
+ "but the task doesn't have a checkout: {cwd}".format(
+ cwd=run_cwd, name=job.get("name", job.get("label"))
+ )
+ )
+
+ # dict is for the case of `{'task-reference': text_type}`.
+ if isinstance(run_command, (str, dict)):
+ if is_win:
+ if isinstance(run_command, dict):
+ for k in run_command.keys():
+ run_command[k] = f'"{run_command[k]}"'
+ else:
+ run_command = f'"{run_command}"'
+ run_command = ["bash", "-cx", run_command]
+
+ if run.get("command-context"):
+ run_command = substitute_command_context(
+ run.get("command-context"), run_command
+ )
+
+ if run["comm-checkout"]:
+ command.append(
+ "--comm-checkout={}/comm".format(taskdesc["worker"]["env"]["GECKO_PATH"])
+ )
+
+ if run["run-as-root"]:
+ command.extend(("--user", "root", "--group", "root"))
+ if run_cwd:
+ command.extend(("--task-cwd", run_cwd))
+ command.append("--")
+ if is_bitbar:
+ # Use the bitbar wrapper script which sets up the device and adb
+ # environment variables
+ command.append("/builds/taskcluster/script.py")
+ command.extend(run_command)
+
+ if is_win:
+ taskref = False
+ for c in command:
+ if isinstance(c, dict):
+ taskref = True
+
+ if taskref:
+ cmd = []
+ for c in command:
+ if isinstance(c, dict):
+ for v in c.values():
+ cmd.append(v)
+ else:
+ cmd.append(c)
+ worker["command"] = [{"artifact-reference": " ".join(cmd)}]
+ else:
+ worker["command"] = [" ".join(command)]
+ else:
+ worker["command"] = [
+ ["chmod", "+x", "run-task"],
+ command,
+ ]
diff --git a/taskcluster/gecko_taskgraph/transforms/job/spidermonkey.py b/taskcluster/gecko_taskgraph/transforms/job/spidermonkey.py
new file mode 100644
index 0000000000..91c7e93bd6
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/job/spidermonkey.py
@@ -0,0 +1,109 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running spidermonkey jobs via dedicated scripts
+"""
+
+
+from taskgraph.util.schema import Schema
+from voluptuous import Any, Optional, Required
+
+from gecko_taskgraph.transforms.job import configure_taskdesc_for_run, run_job_using
+from gecko_taskgraph.transforms.job.common import (
+ docker_worker_add_artifacts,
+ generic_worker_add_artifacts,
+)
+
+sm_run_schema = Schema(
+ {
+ Required("using"): Any(
+ "spidermonkey",
+ "spidermonkey-package",
+ ),
+ # SPIDERMONKEY_VARIANT and SPIDERMONKEY_PLATFORM
+ Required("spidermonkey-variant"): str,
+ Optional("spidermonkey-platform"): str,
+ # Base work directory used to set up the task.
+ Optional("workdir"): str,
+ Required("tooltool-downloads"): Any(
+ False,
+ "public",
+ "internal",
+ ),
+ }
+)
+
+
+@run_job_using("docker-worker", "spidermonkey", schema=sm_run_schema)
+@run_job_using("docker-worker", "spidermonkey-package", schema=sm_run_schema)
+def docker_worker_spidermonkey(config, job, taskdesc):
+ run = job["run"]
+
+ worker = taskdesc["worker"] = job["worker"]
+ worker.setdefault("artifacts", [])
+
+ docker_worker_add_artifacts(config, job, taskdesc)
+
+ env = worker.setdefault("env", {})
+ env.update(
+ {
+ "MOZHARNESS_DISABLE": "true",
+ "SPIDERMONKEY_VARIANT": run.pop("spidermonkey-variant"),
+ "MOZ_BUILD_DATE": config.params["moz_build_date"],
+ "MOZ_SCM_LEVEL": config.params["level"],
+ }
+ )
+ if "spidermonkey-platform" in run:
+ env["SPIDERMONKEY_PLATFORM"] = run.pop("spidermonkey-platform")
+
+ script = "build-sm.sh"
+ if run["using"] == "spidermonkey-package":
+ script = "build-sm-package.sh"
+
+ run["using"] = "run-task"
+ run["cwd"] = run["workdir"]
+ run["command"] = [f"./checkouts/gecko/taskcluster/scripts/builder/{script}"]
+
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
+
+
+@run_job_using("generic-worker", "spidermonkey", schema=sm_run_schema)
+def generic_worker_spidermonkey(config, job, taskdesc):
+ assert job["worker"]["os"] == "windows", "only supports windows right now"
+
+ run = job["run"]
+
+ worker = taskdesc["worker"] = job["worker"]
+
+ generic_worker_add_artifacts(config, job, taskdesc)
+
+ env = worker.setdefault("env", {})
+ env.update(
+ {
+ "MOZHARNESS_DISABLE": "true",
+ "SPIDERMONKEY_VARIANT": run.pop("spidermonkey-variant"),
+ "MOZ_BUILD_DATE": config.params["moz_build_date"],
+ "MOZ_SCM_LEVEL": config.params["level"],
+ "SCCACHE_DISABLE": "1",
+ "WORK": ".", # Override the defaults in build scripts
+ "GECKO_PATH": "./src", # with values suiteable for windows generic worker
+ "UPLOAD_DIR": "./public/build",
+ }
+ )
+ if "spidermonkey-platform" in run:
+ env["SPIDERMONKEY_PLATFORM"] = run.pop("spidermonkey-platform")
+
+ script = "build-sm.sh"
+ if run["using"] == "spidermonkey-package":
+ script = "build-sm-package.sh"
+ # Don't allow untested configurations yet
+ raise Exception("spidermonkey-package is not a supported configuration")
+
+ run["using"] = "run-task"
+ run["command"] = [
+ "c:\\mozilla-build\\msys2\\usr\\bin\\bash.exe " # string concat
+ '"./src/taskcluster/scripts/builder/%s"' % script
+ ]
+
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
diff --git a/taskcluster/gecko_taskgraph/transforms/job/toolchain.py b/taskcluster/gecko_taskgraph/transforms/job/toolchain.py
new file mode 100644
index 0000000000..fb030019fc
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/job/toolchain.py
@@ -0,0 +1,257 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running toolchain-building jobs via dedicated scripts
+"""
+
+
+import os
+
+import taskgraph
+from mozbuild.shellutil import quote as shell_quote
+from taskgraph.util.schema import Schema, optionally_keyed_by, resolve_keyed_by
+from voluptuous import Any, Optional, Required
+
+from gecko_taskgraph import GECKO
+from gecko_taskgraph.transforms.job import configure_taskdesc_for_run, run_job_using
+from gecko_taskgraph.transforms.job.common import (
+ docker_worker_add_artifacts,
+ generic_worker_add_artifacts,
+)
+from gecko_taskgraph.util.attributes import RELEASE_PROJECTS
+from gecko_taskgraph.util.hash import hash_paths
+
+CACHE_TYPE = "toolchains.v3"
+
+toolchain_run_schema = Schema(
+ {
+ Required("using"): "toolchain-script",
+ # The script (in taskcluster/scripts/misc) to run.
+ # Python scripts are invoked with `mach python` so vendored libraries
+ # are available.
+ Required("script"): str,
+ # Arguments to pass to the script.
+ Optional("arguments"): [str],
+ # If not false, tooltool downloads will be enabled via relengAPIProxy
+ # for either just public files, or all files. Not supported on Windows
+ Required("tooltool-downloads"): Any(
+ False,
+ "public",
+ "internal",
+ ),
+ # Sparse profile to give to checkout using `run-task`. If given,
+ # Defaults to "toolchain-build". The value is relative to
+ # "sparse-profile-prefix", optionally defined below is the path,
+ # defaulting to "build/sparse-profiles".
+ # i.e. `build/sparse-profiles/toolchain-build`.
+ # If `None`, instructs `run-task` to not use a sparse profile at all.
+ Required("sparse-profile"): Any(str, None),
+ # The relative path to the sparse profile.
+ Optional("sparse-profile-prefix"): str,
+ # Paths/patterns pointing to files that influence the outcome of a
+ # toolchain build.
+ Optional("resources"): [str],
+ # Path to the artifact produced by the toolchain job
+ Required("toolchain-artifact"): str,
+ Optional(
+ "toolchain-alias",
+ description="An alias that can be used instead of the real toolchain job name in "
+ "fetch stanzas for jobs.",
+ ): optionally_keyed_by("project", Any(None, str, [str])),
+ Optional(
+ "toolchain-env",
+ description="Additional env variables to add to the worker when using this toolchain",
+ ): {str: object},
+ # Base work directory used to set up the task.
+ Optional("workdir"): str,
+ }
+)
+
+
+def get_digest_data(config, run, taskdesc):
+ files = list(run.pop("resources", []))
+ # The script
+ files.append("taskcluster/scripts/misc/{}".format(run["script"]))
+ # Tooltool manifest if any is defined:
+ tooltool_manifest = taskdesc["worker"]["env"].get("TOOLTOOL_MANIFEST")
+ if tooltool_manifest:
+ files.append(tooltool_manifest)
+
+ # Accumulate dependency hashes for index generation.
+ data = [hash_paths(GECKO, files)]
+
+ data.append(taskdesc["attributes"]["toolchain-artifact"])
+
+ # If the task uses an in-tree docker image, we want it to influence
+ # the index path as well. Ideally, the content of the docker image itself
+ # should have an influence, but at the moment, we can't get that
+ # information here. So use the docker image name as a proxy. Not a lot of
+ # changes to docker images actually have an impact on the resulting
+ # toolchain artifact, so we'll just rely on such important changes to be
+ # accompanied with a docker image name change.
+ image = taskdesc["worker"].get("docker-image", {}).get("in-tree")
+ if image:
+ data.append(image)
+
+ # Likewise script arguments should influence the index.
+ args = run.get("arguments")
+ if args:
+ data.extend(args)
+
+ if taskdesc["attributes"].get("rebuild-on-release"):
+ # Add whether this is a release branch or not
+ data.append(str(config.params["project"] in RELEASE_PROJECTS))
+ return data
+
+
+def common_toolchain(config, job, taskdesc, is_docker):
+ run = job["run"]
+
+ worker = taskdesc["worker"] = job["worker"]
+ worker["chain-of-trust"] = True
+
+ if is_docker:
+ # If the task doesn't have a docker-image, set a default
+ worker.setdefault("docker-image", {"in-tree": "deb11-toolchain-build"})
+
+ if job["worker"]["os"] == "windows":
+ # There were no caches on generic-worker before bug 1519472, and they cause
+ # all sorts of problems with Windows toolchain tasks, disable them until
+ # tasks are ready.
+ run["use-caches"] = False
+
+ env = worker.setdefault("env", {})
+ env.update(
+ {
+ "MOZ_BUILD_DATE": config.params["moz_build_date"],
+ "MOZ_SCM_LEVEL": config.params["level"],
+ "TOOLCHAIN_ARTIFACT": run["toolchain-artifact"],
+ }
+ )
+
+ if is_docker:
+ # Toolchain checkouts don't live under {workdir}/checkouts
+ workspace = "{workdir}/workspace/build".format(**run)
+ env["GECKO_PATH"] = f"{workspace}/src"
+
+ attributes = taskdesc.setdefault("attributes", {})
+ attributes["toolchain-artifact"] = run.pop("toolchain-artifact")
+ toolchain_artifact = attributes["toolchain-artifact"]
+ if not toolchain_artifact.startswith("public/build/"):
+ if "artifact_prefix" in attributes:
+ raise Exception(
+ "Toolchain {} has an artifact_prefix attribute. That is not"
+ " allowed on toolchain tasks.".format(taskdesc["label"])
+ )
+ attributes["artifact_prefix"] = os.path.dirname(toolchain_artifact)
+
+ resolve_keyed_by(
+ run,
+ "toolchain-alias",
+ item_name=taskdesc["label"],
+ project=config.params["project"],
+ )
+ alias = run.pop("toolchain-alias", None)
+ if alias:
+ attributes["toolchain-alias"] = alias
+ if "toolchain-env" in run:
+ attributes["toolchain-env"] = run.pop("toolchain-env")
+
+ # Allow the job to specify where artifacts come from, but add
+ # public/build if it's not there already.
+ artifacts = worker.setdefault("artifacts", [])
+ if not artifacts:
+ if is_docker:
+ docker_worker_add_artifacts(config, job, taskdesc)
+ else:
+ generic_worker_add_artifacts(config, job, taskdesc)
+
+ digest_data = get_digest_data(config, run, taskdesc)
+
+ if job.get("attributes", {}).get("cached_task") is not False and not taskgraph.fast:
+ name = taskdesc["label"].replace(f"{config.kind}-", "", 1)
+ taskdesc["cache"] = {
+ "type": CACHE_TYPE,
+ "name": name,
+ "digest-data": digest_data,
+ }
+
+ # Toolchains that are used for local development need to be built on a
+ # level-3 branch to be installable via `mach bootstrap`.
+ local_toolchain = taskdesc["attributes"].get("local-toolchain")
+ if local_toolchain:
+ if taskdesc.get("run-on-projects"):
+ raise Exception(
+ "Toolchain {} used for local developement must not have"
+ " run-on-projects set".format(taskdesc["label"])
+ )
+ taskdesc["run-on-projects"] = ["integration", "release"]
+
+ script = run.pop("script")
+ arguments = run.pop("arguments", [])
+ if local_toolchain and not attributes["toolchain-artifact"].startswith("public/"):
+ # Local toolchains with private artifacts are expected to have a script that
+ # fill a directory given as a final command line argument. That script, and the
+ # arguments provided, are used by the build system bootstrap code, and for the
+ # corresponding CI tasks, the command is wrapped with a script that creates an
+ # artifact based on that filled directory.
+ # We prefer automatic wrapping rather than manual wrapping in the yaml because
+ # it makes the index independent of the wrapper script, which is irrelevant.
+ # Also, an attribute is added for the bootstrap code to be able to easily parse
+ # the command.
+ attributes["toolchain-command"] = {
+ "script": script,
+ "arguments": list(arguments),
+ }
+ arguments.insert(0, script)
+ script = "private_local_toolchain.sh"
+
+ run["using"] = "run-task"
+ if is_docker:
+ gecko_path = "workspace/build/src"
+ elif job["worker"]["os"] == "windows":
+ gecko_path = "%GECKO_PATH%"
+ else:
+ gecko_path = "$GECKO_PATH"
+
+ if is_docker:
+ run["cwd"] = run["workdir"]
+ run["command"] = [
+ "{}/taskcluster/scripts/misc/{}".format(gecko_path, script)
+ ] + arguments
+ if not is_docker:
+ # Don't quote the first item in the command because it purposely contains
+ # an environment variable that is not meant to be quoted.
+ if len(run["command"]) > 1:
+ run["command"] = run["command"][0] + " " + shell_quote(*run["command"][1:])
+ else:
+ run["command"] = run["command"][0]
+
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
+
+
+toolchain_defaults = {
+ "tooltool-downloads": False,
+ "sparse-profile": "toolchain-build",
+}
+
+
+@run_job_using(
+ "docker-worker",
+ "toolchain-script",
+ schema=toolchain_run_schema,
+ defaults=toolchain_defaults,
+)
+def docker_worker_toolchain(config, job, taskdesc):
+ common_toolchain(config, job, taskdesc, is_docker=True)
+
+
+@run_job_using(
+ "generic-worker",
+ "toolchain-script",
+ schema=toolchain_run_schema,
+ defaults=toolchain_defaults,
+)
+def generic_worker_toolchain(config, job, taskdesc):
+ common_toolchain(config, job, taskdesc, is_docker=False)
diff --git a/taskcluster/gecko_taskgraph/transforms/l10n.py b/taskcluster/gecko_taskgraph/transforms/l10n.py
new file mode 100644
index 0000000000..e36c70246b
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/l10n.py
@@ -0,0 +1,416 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Do transforms specific to l10n kind
+"""
+
+
+import json
+
+from mozbuild.chunkify import chunkify
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import (
+ optionally_keyed_by,
+ resolve_keyed_by,
+ taskref_or_string,
+)
+from taskgraph.util.taskcluster import get_artifact_prefix
+from taskgraph.util.treeherder import add_suffix
+from voluptuous import Any, Optional, Required
+
+from gecko_taskgraph.loader.multi_dep import schema
+from gecko_taskgraph.transforms.job import job_description_schema
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import (
+ copy_attributes_from_dependent_job,
+ task_name,
+)
+from gecko_taskgraph.util.copy_task import copy_task
+
+
+def _by_platform(arg):
+ return optionally_keyed_by("build-platform", arg)
+
+
+l10n_description_schema = schema.extend(
+ {
+ # Name for this job, inferred from the dependent job before validation
+ Required("name"): str,
+ # build-platform, inferred from dependent job before validation
+ Required("build-platform"): str,
+ # max run time of the task
+ Required("run-time"): _by_platform(int),
+ # Locales not to repack for
+ Required("ignore-locales"): _by_platform([str]),
+ # All l10n jobs use mozharness
+ Required("mozharness"): {
+ # Script to invoke for mozharness
+ Required("script"): _by_platform(str),
+ # Config files passed to the mozharness script
+ Required("config"): _by_platform([str]),
+ # Additional paths to look for mozharness configs in. These should be
+ # relative to the base of the source checkout
+ Optional("config-paths"): [str],
+ # Options to pass to the mozharness script
+ Optional("options"): _by_platform([str]),
+ # Action commands to provide to mozharness script
+ Required("actions"): _by_platform([str]),
+ # if true, perform a checkout of a comm-central based branch inside the
+ # gecko checkout
+ Optional("comm-checkout"): bool,
+ },
+ # Items for the taskcluster index
+ Optional("index"): {
+ # Product to identify as in the taskcluster index
+ Required("product"): _by_platform(str),
+ # Job name to identify as in the taskcluster index
+ Required("job-name"): _by_platform(str),
+ # Type of index
+ Optional("type"): _by_platform(str),
+ },
+ # Description of the localized task
+ Required("description"): _by_platform(str),
+ Optional("run-on-projects"): job_description_schema["run-on-projects"],
+ # worker-type to utilize
+ Required("worker-type"): _by_platform(str),
+ # File which contains the used locales
+ Required("locales-file"): _by_platform(str),
+ # Tooltool visibility required for task.
+ Required("tooltool"): _by_platform(Any("internal", "public")),
+ # Docker image required for task. We accept only in-tree images
+ # -- generally desktop-build or android-build -- for now.
+ Optional("docker-image"): _by_platform(
+ # an in-tree generated docker image (from `taskcluster/docker/<name>`)
+ {"in-tree": str},
+ ),
+ Optional("fetches"): {
+ str: _by_platform([str]),
+ },
+ # The set of secret names to which the task has access; these are prefixed
+ # with `project/releng/gecko/{treeherder.kind}/level-{level}/`. Setting
+ # this will enable any worker features required and set the task's scopes
+ # appropriately. `true` here means ['*'], all secrets. Not supported on
+ # Windows
+ Optional("secrets"): _by_platform(Any(bool, [str])),
+ # Information for treeherder
+ Required("treeherder"): {
+ # Platform to display the task on in treeherder
+ Required("platform"): _by_platform(str),
+ # Symbol to use
+ Required("symbol"): str,
+ # Tier this task is
+ Required("tier"): _by_platform(int),
+ },
+ # Extra environment values to pass to the worker
+ Optional("env"): _by_platform({str: taskref_or_string}),
+ # Max number locales per chunk
+ Optional("locales-per-chunk"): _by_platform(int),
+ # Task deps to chain this task with, added in transforms from primary-dependency
+ # if this is a shippable-style build
+ Optional("dependencies"): {str: str},
+ # Run the task when the listed files change (if present).
+ Optional("when"): {"files-changed": [str]},
+ # passed through directly to the job description
+ Optional("attributes"): job_description_schema["attributes"],
+ Optional("extra"): job_description_schema["extra"],
+ # Shipping product and phase
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ }
+)
+
+transforms = TransformSequence()
+
+
+def parse_locales_file(locales_file, platform=None):
+ """Parse the passed locales file for a list of locales."""
+ locales = []
+
+ with open(locales_file, mode="r") as f:
+ if locales_file.endswith("json"):
+ all_locales = json.load(f)
+ # XXX Only single locales are fetched
+ locales = {
+ locale: data["revision"]
+ for locale, data in all_locales.items()
+ if platform is None or platform in data["platforms"]
+ }
+ else:
+ all_locales = f.read().split()
+ # 'default' is the hg revision at the top of hg repo, in this context
+ locales = {locale: "default" for locale in all_locales}
+ return locales
+
+
+def _remove_locales(locales, to_remove=None):
+ # ja-JP-mac is a mac-only locale, but there are no mac builds being repacked,
+ # so just omit it unconditionally
+ return {
+ locale: revision
+ for locale, revision in locales.items()
+ if locale not in to_remove
+ }
+
+
+@transforms.add
+def setup_name(config, jobs):
+ for job in jobs:
+ dep = job["primary-dependency"]
+ # Set the name to the same as the dep task, without kind name.
+ # Label will get set automatically with this kinds name.
+ job["name"] = job.get("name", task_name(dep))
+ yield job
+
+
+@transforms.add
+def copy_in_useful_magic(config, jobs):
+ for job in jobs:
+ dep = job["primary-dependency"]
+ attributes = copy_attributes_from_dependent_job(dep)
+ attributes.update(job.get("attributes", {}))
+ # build-platform is needed on `job` for by-build-platform
+ job["build-platform"] = attributes.get("build_platform")
+ job["attributes"] = attributes
+ yield job
+
+
+transforms.add_validate(l10n_description_schema)
+
+
+@transforms.add
+def setup_shippable_dependency(config, jobs):
+ """Sets up a task dependency to the signing job this relates to"""
+ for job in jobs:
+ job["dependencies"] = {"build": job["dependent-tasks"]["build"].label}
+ if job["attributes"]["build_platform"].startswith("win") or job["attributes"][
+ "build_platform"
+ ].startswith("linux"):
+ job["dependencies"].update(
+ {
+ "build-signing": job["dependent-tasks"]["build-signing"].label,
+ }
+ )
+ if job["attributes"]["build_platform"].startswith("macosx"):
+ job["dependencies"].update(
+ {"repackage": job["dependent-tasks"]["repackage"].label}
+ )
+ yield job
+
+
+@transforms.add
+def handle_keyed_by(config, jobs):
+ """Resolve fields that can be keyed by platform, etc."""
+ fields = [
+ "locales-file",
+ "locales-per-chunk",
+ "worker-type",
+ "description",
+ "run-time",
+ "docker-image",
+ "secrets",
+ "fetches.toolchain",
+ "fetches.fetch",
+ "tooltool",
+ "env",
+ "ignore-locales",
+ "mozharness.config",
+ "mozharness.options",
+ "mozharness.actions",
+ "mozharness.script",
+ "treeherder.tier",
+ "treeherder.platform",
+ "index.type",
+ "index.product",
+ "index.job-name",
+ "when.files-changed",
+ ]
+ for job in jobs:
+ job = copy_task(job) # don't overwrite dict values here
+ for field in fields:
+ resolve_keyed_by(item=job, field=field, item_name=job["name"])
+ yield job
+
+
+@transforms.add
+def handle_artifact_prefix(config, jobs):
+ """Resolve ``artifact_prefix`` in env vars"""
+ for job in jobs:
+ artifact_prefix = get_artifact_prefix(job)
+ for k1, v1 in job.get("env", {}).items():
+ if isinstance(v1, str):
+ job["env"][k1] = v1.format(artifact_prefix=artifact_prefix)
+ elif isinstance(v1, dict):
+ for k2, v2 in v1.items():
+ job["env"][k1][k2] = v2.format(artifact_prefix=artifact_prefix)
+ yield job
+
+
+@transforms.add
+def all_locales_attribute(config, jobs):
+ for job in jobs:
+ locales_platform = job["attributes"]["build_platform"].replace("-shippable", "")
+ locales_platform = locales_platform.replace("-pgo", "")
+ locales_with_changesets = parse_locales_file(
+ job["locales-file"], platform=locales_platform
+ )
+ locales_with_changesets = _remove_locales(
+ locales_with_changesets, to_remove=job["ignore-locales"]
+ )
+
+ locales = sorted(locales_with_changesets.keys())
+ attributes = job.setdefault("attributes", {})
+ attributes["all_locales"] = locales
+ attributes["all_locales_with_changesets"] = locales_with_changesets
+ if job.get("shipping-product"):
+ attributes["shipping_product"] = job["shipping-product"]
+ yield job
+
+
+@transforms.add
+def chunk_locales(config, jobs):
+ """Utilizes chunking for l10n stuff"""
+ for job in jobs:
+ locales_per_chunk = job.get("locales-per-chunk")
+ locales_with_changesets = job["attributes"]["all_locales_with_changesets"]
+ if locales_per_chunk:
+ chunks, remainder = divmod(len(locales_with_changesets), locales_per_chunk)
+ if remainder:
+ chunks = int(chunks + 1)
+ for this_chunk in range(1, chunks + 1):
+ chunked = copy_task(job)
+ chunked["name"] = chunked["name"].replace("/", f"-{this_chunk}/", 1)
+ chunked["mozharness"]["options"] = chunked["mozharness"].get(
+ "options", []
+ )
+ # chunkify doesn't work with dicts
+ locales_with_changesets_as_list = sorted(
+ locales_with_changesets.items()
+ )
+ chunked_locales = chunkify(
+ locales_with_changesets_as_list, this_chunk, chunks
+ )
+ chunked["mozharness"]["options"].extend(
+ [
+ f"locale={locale}:{changeset}"
+ for locale, changeset in chunked_locales
+ ]
+ )
+ chunked["attributes"]["l10n_chunk"] = str(this_chunk)
+ # strip revision
+ chunked["attributes"]["chunk_locales"] = [
+ locale for locale, _ in chunked_locales
+ ]
+
+ # add the chunk number to the TH symbol
+ chunked["treeherder"]["symbol"] = add_suffix(
+ chunked["treeherder"]["symbol"], this_chunk
+ )
+ yield chunked
+ else:
+ job["mozharness"]["options"] = job["mozharness"].get("options", [])
+ job["mozharness"]["options"].extend(
+ [
+ f"locale={locale}:{changeset}"
+ for locale, changeset in sorted(locales_with_changesets.items())
+ ]
+ )
+ yield job
+
+
+transforms.add_validate(l10n_description_schema)
+
+
+@transforms.add
+def stub_installer(config, jobs):
+ for job in jobs:
+ job.setdefault("attributes", {})
+ job.setdefault("env", {})
+ if job["attributes"].get("stub-installer"):
+ job["env"].update({"USE_STUB_INSTALLER": "1"})
+ yield job
+
+
+@transforms.add
+def set_extra_config(config, jobs):
+ for job in jobs:
+ job["mozharness"].setdefault("extra-config", {})["branch"] = config.params[
+ "project"
+ ]
+ if "update-channel" in job["attributes"]:
+ job["mozharness"]["extra-config"]["update_channel"] = job["attributes"][
+ "update-channel"
+ ]
+ yield job
+
+
+@transforms.add
+def make_job_description(config, jobs):
+ for job in jobs:
+ job["mozharness"].update(
+ {
+ "using": "mozharness",
+ "job-script": "taskcluster/scripts/builder/build-l10n.sh",
+ "secrets": job.get("secrets", False),
+ }
+ )
+ job_description = {
+ "name": job["name"],
+ "worker-type": job["worker-type"],
+ "description": job["description"],
+ "run": job["mozharness"],
+ "attributes": job["attributes"],
+ "treeherder": {
+ "kind": "build",
+ "tier": job["treeherder"]["tier"],
+ "symbol": job["treeherder"]["symbol"],
+ "platform": job["treeherder"]["platform"],
+ },
+ "run-on-projects": job.get("run-on-projects")
+ if job.get("run-on-projects")
+ else [],
+ }
+ if job.get("extra"):
+ job_description["extra"] = job["extra"]
+
+ job_description["run"]["tooltool-downloads"] = job["tooltool"]
+
+ job_description["worker"] = {
+ "max-run-time": job["run-time"],
+ "chain-of-trust": True,
+ }
+ if job["worker-type"] == "b-win2012":
+ job_description["worker"]["os"] = "windows"
+ job_description["run"]["use-simple-package"] = False
+ job_description["run"]["use-magic-mh-args"] = False
+
+ if job.get("docker-image"):
+ job_description["worker"]["docker-image"] = job["docker-image"]
+
+ if job.get("fetches"):
+ job_description["fetches"] = job["fetches"]
+
+ if job.get("index"):
+ job_description["index"] = {
+ "product": job["index"]["product"],
+ "job-name": job["index"]["job-name"],
+ "type": job["index"].get("type", "generic"),
+ }
+
+ if job.get("dependencies"):
+ job_description["dependencies"] = job["dependencies"]
+ if job.get("env"):
+ job_description["worker"]["env"] = job["env"]
+ if job.get("when", {}).get("files-changed"):
+ job_description.setdefault("when", {})
+ job_description["when"]["files-changed"] = [job["locales-file"]] + job[
+ "when"
+ ]["files-changed"]
+
+ if "shipping-phase" in job:
+ job_description["shipping-phase"] = job["shipping-phase"]
+
+ if "shipping-product" in job:
+ job_description["shipping-product"] = job["shipping-product"]
+
+ yield job_description
diff --git a/taskcluster/gecko_taskgraph/transforms/mac_dummy.py b/taskcluster/gecko_taskgraph/transforms/mac_dummy.py
new file mode 100644
index 0000000000..f134ee2765
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/mac_dummy.py
@@ -0,0 +1,40 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Add dependencies to dummy macosx64 tasks.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def add_dependencies(config, jobs):
+ for job in jobs:
+ dependencies = {}
+
+ platform = job.get("attributes", {}).get("build_platform")
+ if not platform:
+ continue
+ arm = platform.replace("macosx64", "macosx64-aarch64")
+ intel = platform.replace("macosx64", "macosx64-x64")
+ for dep_task in config.kind_dependencies_tasks.values():
+ # Weed out unwanted tasks.
+ if dep_task.attributes.get("build_platform"):
+ if dep_task.attributes["build_platform"] not in (platform, arm, intel):
+ continue
+ # Add matching tasks to deps
+ dependencies[dep_task.label] = dep_task.label
+ # Pick one task to copy run-on-projects from
+ if (
+ dep_task.kind == "build"
+ and dep_task.attributes["build_platform"] == platform
+ ):
+ job["run-on-projects"] = dep_task.attributes.get("run_on_projects")
+
+ job.setdefault("dependencies", {}).update(dependencies)
+ job["if-dependencies"] = list(dependencies)
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/mac_notarization.py b/taskcluster/gecko_taskgraph/transforms/mac_notarization.py
new file mode 100644
index 0000000000..5591022e1b
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/mac_notarization.py
@@ -0,0 +1,19 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform mac notarization tasks
+"""
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def only_level_3_notarization(config, jobs):
+ """Filter out any notarization jobs that are not level 3"""
+ for job in jobs:
+ if "notarization" in config.kind and int(config.params["level"]) != 3:
+ continue
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/mar_signing.py b/taskcluster/gecko_taskgraph/transforms/mar_signing.py
new file mode 100644
index 0000000000..d923721f45
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/mar_signing.py
@@ -0,0 +1,140 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the {partials,mar}-signing task into an actual task description.
+"""
+
+import logging
+import os
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.taskcluster import get_artifact_prefix
+from taskgraph.util.treeherder import inherit_treeherder_from_dep, join_symbol
+
+from gecko_taskgraph.util.attributes import (
+ copy_attributes_from_dependent_job,
+ sorted_unique_list,
+)
+from gecko_taskgraph.util.partials import get_partials_artifacts_from_params
+from gecko_taskgraph.util.scriptworker import get_signing_cert_scope_per_platform
+
+logger = logging.getLogger(__name__)
+
+SIGNING_FORMATS = {
+ "mar-signing-autograph-stage": {
+ "target.complete.mar": ["autograph_stage_mar384"],
+ },
+ "default": {
+ "target.complete.mar": ["autograph_hash_only_mar384"],
+ },
+}
+
+transforms = TransformSequence()
+
+
+def generate_partials_artifacts(job, release_history, platform, locale=None):
+ artifact_prefix = get_artifact_prefix(job)
+ if locale:
+ artifact_prefix = f"{artifact_prefix}/{locale}"
+ else:
+ locale = "en-US"
+
+ artifacts = get_partials_artifacts_from_params(release_history, platform, locale)
+
+ upstream_artifacts = [
+ {
+ "taskId": {"task-reference": "<partials>"},
+ "taskType": "partials",
+ "paths": [f"{artifact_prefix}/{path}" for path, version in artifacts],
+ "formats": ["autograph_hash_only_mar384"],
+ }
+ ]
+
+ return upstream_artifacts
+
+
+def generate_complete_artifacts(job, kind):
+ upstream_artifacts = []
+ if kind not in SIGNING_FORMATS:
+ kind = "default"
+ for artifact in job.attributes["release_artifacts"]:
+ basename = os.path.basename(artifact)
+ if basename in SIGNING_FORMATS[kind]:
+ upstream_artifacts.append(
+ {
+ "taskId": {"task-reference": f"<{job.kind}>"},
+ "taskType": "build",
+ "paths": [artifact],
+ "formats": SIGNING_FORMATS[kind][basename],
+ }
+ )
+
+ return upstream_artifacts
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+ locale = dep_job.attributes.get("locale")
+
+ treeherder = inherit_treeherder_from_dep(job, dep_job)
+ treeherder.setdefault(
+ "symbol", join_symbol(job.get("treeherder-group", "ms"), locale or "N")
+ )
+
+ label = job.get("label", f"{config.kind}-{dep_job.label}")
+
+ dependencies = {dep_job.kind: dep_job.label}
+ signing_dependencies = dep_job.dependencies
+ # This is so we get the build task etc in our dependencies to
+ # have better beetmover support.
+ dependencies.update(signing_dependencies)
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+ attributes["required_signoffs"] = sorted_unique_list(
+ attributes.get("required_signoffs", []), job.pop("required_signoffs")
+ )
+ attributes["shipping_phase"] = job["shipping-phase"]
+ if locale:
+ attributes["locale"] = locale
+
+ build_platform = attributes.get("build_platform")
+ if config.kind == "partials-signing":
+ upstream_artifacts = generate_partials_artifacts(
+ dep_job, config.params["release_history"], build_platform, locale
+ )
+ else:
+ upstream_artifacts = generate_complete_artifacts(dep_job, config.kind)
+
+ is_shippable = job.get(
+ "shippable", dep_job.attributes.get("shippable") # First check current job
+ ) # Then dep job for 'shippable'
+ signing_cert_scope = get_signing_cert_scope_per_platform(
+ build_platform, is_shippable, config
+ )
+
+ scopes = [signing_cert_scope]
+
+ task = {
+ "label": label,
+ "description": "{} {}".format(
+ dep_job.description, job["description-suffix"]
+ ),
+ "worker-type": job.get("worker-type", "linux-signing"),
+ "worker": {
+ "implementation": "scriptworker-signing",
+ "upstream-artifacts": upstream_artifacts,
+ "max-run-time": 3600,
+ },
+ "dependencies": dependencies,
+ "attributes": attributes,
+ "scopes": scopes,
+ "run-on-projects": job.get(
+ "run-on-projects", dep_job.attributes.get("run_on_projects")
+ ),
+ "treeherder": treeherder,
+ }
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/maybe_release.py b/taskcluster/gecko_taskgraph/transforms/maybe_release.py
new file mode 100644
index 0000000000..08a066001a
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/maybe_release.py
@@ -0,0 +1,23 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+from gecko_taskgraph.util.attributes import release_level
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ for job in jobs:
+ for key in ["worker-type", "scopes"]:
+ resolve_keyed_by(
+ job,
+ key,
+ item_name=job["name"],
+ **{"release-level": release_level(config.params["project"])}
+ )
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/merge_automation.py b/taskcluster/gecko_taskgraph/transforms/merge_automation.py
new file mode 100644
index 0000000000..ca5f3b6bde
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/merge_automation.py
@@ -0,0 +1,81 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the update generation task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def handle_keyed_by(config, tasks):
+ """Resolve fields that can be keyed by platform, etc."""
+ if "merge_config" not in config.params:
+ return
+ merge_config = config.params["merge_config"]
+ fields = [
+ "worker.push",
+ "worker-type",
+ "worker.l10n-bump-info",
+ "worker.source-repo",
+ ]
+ for task in tasks:
+ for field in fields:
+ resolve_keyed_by(
+ task,
+ field,
+ item_name=task["name"],
+ **{
+ "project": config.params["project"],
+ "release-type": config.params["release_type"],
+ "behavior": merge_config["behavior"],
+ }
+ )
+ yield task
+
+
+@transforms.add
+def update_labels(config, tasks):
+ for task in tasks:
+ merge_config = config.params["merge_config"]
+ task["label"] = "merge-{}".format(merge_config["behavior"])
+ treeherder = task.get("treeherder", {})
+ treeherder["symbol"] = "Rel({})".format(merge_config["behavior"])
+ task["treeherder"] = treeherder
+ yield task
+
+
+@transforms.add
+def add_payload_config(config, tasks):
+ for task in tasks:
+ if "merge_config" not in config.params:
+ break
+ merge_config = config.params["merge_config"]
+ worker = task["worker"]
+ worker["merge-info"] = config.graph_config["merge-automation"]["behaviors"][
+ merge_config["behavior"]
+ ]
+
+ if "l10n-bump-info" in worker and worker["l10n-bump-info"] is None:
+ del worker["l10n-bump-info"]
+
+ # Override defaults, useful for testing.
+ for field in [
+ "from-repo",
+ "from-branch",
+ "to-repo",
+ "to-branch",
+ "fetch-version-from",
+ ]:
+ if merge_config.get(field):
+ worker["merge-info"][field] = merge_config[field]
+
+ worker["force-dry-run"] = merge_config["force-dry-run"]
+ worker["ssh-user"] = merge_config.get("ssh-user-alias", "merge_user")
+ if merge_config.get("push"):
+ worker["push"] = merge_config["push"]
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/name_sanity.py b/taskcluster/gecko_taskgraph/transforms/name_sanity.py
new file mode 100644
index 0000000000..856f4f82be
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/name_sanity.py
@@ -0,0 +1,45 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Generate labels for tasks without names, consistently.
+Uses attributes from `primary-dependency`.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def make_label(config, jobs):
+ """Generate a sane label for a new task constructed from a dependency
+ Using attributes from the dependent job and the current task kind"""
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+ attr = dep_job.attributes.get
+
+ if attr("locale", job.get("locale")):
+ template = "{kind}-{locale}-{build_platform}/{build_type}"
+ elif attr("l10n_chunk"):
+ template = "{kind}-{build_platform}-{l10n_chunk}/{build_type}"
+ elif config.kind.startswith("release-eme-free") or config.kind.startswith(
+ "release-partner-repack"
+ ):
+ suffix = job.get("extra", {}).get("repack_suffix", None) or job.get(
+ "extra", {}
+ ).get("repack_id", None)
+ template = "{kind}-{build_platform}"
+ if suffix:
+ template += "-{}".format(suffix.replace("/", "-"))
+ else:
+ template = "{kind}-{build_platform}/{build_type}"
+ job["label"] = template.format(
+ kind=config.kind,
+ build_platform=attr("build_platform"),
+ build_type=attr("build_type"),
+ locale=attr("locale", job.get("locale", "")), # Locale can be absent
+ l10n_chunk=attr("l10n_chunk", ""), # Can be empty
+ )
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/openh264.py b/taskcluster/gecko_taskgraph/transforms/openh264.py
new file mode 100644
index 0000000000..f41215d20b
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/openh264.py
@@ -0,0 +1,26 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+This transform is used to help populate mozharness options for openh264 jobs
+"""
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def set_mh_options(config, jobs):
+ """
+ This transform sets the 'openh264_rev' attribute.
+ """
+ for job in jobs:
+ repo = job.pop("repo")
+ rev = job.pop("revision")
+ attributes = job.setdefault("attributes", {})
+ attributes["openh264_rev"] = rev
+ run = job.setdefault("run", {})
+ options = run.setdefault("options", [])
+ options.extend([f"repo={repo}", f"rev={rev}"])
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/openh264_signing.py b/taskcluster/gecko_taskgraph/transforms/openh264_signing.py
new file mode 100644
index 0000000000..dfffc7cc17
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/openh264_signing.py
@@ -0,0 +1,109 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the repackage signing task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.treeherder import inherit_treeherder_from_dep
+from voluptuous import Optional
+
+from gecko_taskgraph.loader.single_dep import schema
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+from gecko_taskgraph.util.scriptworker import get_signing_cert_scope_per_platform
+
+transforms = TransformSequence()
+
+signing_description_schema = schema.extend(
+ {
+ Optional("label"): str,
+ Optional("extra"): object,
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ }
+)
+
+transforms.add_validate(signing_description_schema)
+
+
+@transforms.add
+def make_signing_description(config, jobs):
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+ attributes = dep_job.attributes
+ build_platform = dep_job.attributes.get("build_platform")
+ is_nightly = True # cert_scope_per_platform uses this to choose the right cert
+
+ description = (
+ "Signing of OpenH264 Binaries for '"
+ "{build_platform}/{build_type}'".format(
+ build_platform=attributes.get("build_platform"),
+ build_type=attributes.get("build_type"),
+ )
+ )
+
+ # we have a genuine repackage job as our parent
+ dependencies = {"openh264": dep_job.label}
+
+ my_attributes = copy_attributes_from_dependent_job(dep_job)
+
+ signing_cert_scope = get_signing_cert_scope_per_platform(
+ build_platform, is_nightly, config
+ )
+
+ scopes = [signing_cert_scope]
+ worker_type = "linux-signing"
+ worker = {
+ "implementation": "scriptworker-signing",
+ "max-run-time": 3600,
+ }
+ rev = attributes["openh264_rev"]
+ upstream_artifact = {
+ "taskId": {"task-reference": "<openh264>"},
+ "taskType": "build",
+ }
+
+ if "win" in build_platform:
+ # job['primary-dependency'].task['payload']['command']
+ upstream_artifact["formats"] = ["autograph_authenticode_sha2"]
+ elif "mac" in build_platform:
+ upstream_artifact["formats"] = ["mac_single_file"]
+ upstream_artifact["singleFileGlobs"] = ["libgmpopenh264.dylib"]
+ worker_type = "mac-signing"
+ worker["mac-behavior"] = "mac_notarize_single_file"
+ else:
+ upstream_artifact["formats"] = ["autograph_gpg"]
+
+ upstream_artifact["paths"] = [
+ f"private/openh264/openh264-{build_platform}-{rev}.zip",
+ ]
+ worker["upstream-artifacts"] = [upstream_artifact]
+
+ treeherder = inherit_treeherder_from_dep(job, dep_job)
+ treeherder.setdefault(
+ "symbol",
+ _generate_treeherder_symbol(
+ dep_job.task.get("extra", {}).get("treeherder", {}).get("symbol")
+ ),
+ )
+
+ task = {
+ "label": job["label"],
+ "description": description,
+ "worker-type": worker_type,
+ "worker": worker,
+ "scopes": scopes,
+ "dependencies": dependencies,
+ "attributes": my_attributes,
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "treeherder": treeherder,
+ }
+
+ yield task
+
+
+def _generate_treeherder_symbol(build_symbol):
+ symbol = build_symbol + "s"
+ return symbol
diff --git a/taskcluster/gecko_taskgraph/transforms/partials.py b/taskcluster/gecko_taskgraph/transforms/partials.py
new file mode 100644
index 0000000000..267739cfc3
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/partials.py
@@ -0,0 +1,172 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the partials task into an actual task description.
+"""
+
+import logging
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.taskcluster import get_artifact_prefix
+from taskgraph.util.treeherder import inherit_treeherder_from_dep
+
+from gecko_taskgraph.util.attributes import (
+ copy_attributes_from_dependent_job,
+ release_level,
+)
+from gecko_taskgraph.util.partials import get_builds
+from gecko_taskgraph.util.platforms import architecture
+
+logger = logging.getLogger(__name__)
+
+transforms = TransformSequence()
+
+
+def _generate_task_output_files(job, filenames, locale=None):
+ locale_output_path = f"{locale}/" if locale else ""
+ artifact_prefix = get_artifact_prefix(job)
+
+ data = list()
+ for filename in filenames:
+ data.append(
+ {
+ "type": "file",
+ "path": f"/home/worker/artifacts/{filename}",
+ "name": f"{artifact_prefix}/{locale_output_path}{filename}",
+ }
+ )
+ data.append(
+ {
+ "type": "file",
+ "path": "/home/worker/artifacts/manifest.json",
+ "name": f"{artifact_prefix}/{locale_output_path}manifest.json",
+ }
+ )
+ return data
+
+
+def identify_desired_signing_keys(project, product):
+ if project in ["mozilla-central", "comm-central", "oak"]:
+ return "nightly"
+ if project == "mozilla-beta":
+ if product == "devedition":
+ return "nightly"
+ return "release"
+ if (
+ project in ["mozilla-release", "comm-beta"]
+ or project.startswith("mozilla-esr")
+ or project.startswith("comm-esr")
+ ):
+ return "release"
+ return "dep1"
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ # If no balrog release history, then don't generate partials
+ if not config.params.get("release_history"):
+ return
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+
+ treeherder = inherit_treeherder_from_dep(job, dep_job)
+ treeherder.setdefault("symbol", "p(N)")
+
+ label = job.get("label", f"partials-{dep_job.label}")
+
+ dependencies = {dep_job.kind: dep_job.label}
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+ locale = dep_job.attributes.get("locale")
+ if locale:
+ attributes["locale"] = locale
+ treeherder["symbol"] = f"p({locale})"
+ attributes["shipping_phase"] = job["shipping-phase"]
+
+ build_locale = locale or "en-US"
+
+ build_platform = attributes["build_platform"]
+ builds = get_builds(
+ config.params["release_history"], build_platform, build_locale
+ )
+
+ # If the list is empty there's no available history for this platform
+ # and locale combination, so we can't build any partials.
+ if not builds:
+ continue
+
+ extra = {"funsize": {"partials": list()}}
+ update_number = 1
+
+ locale_suffix = ""
+ if locale:
+ locale_suffix = f"{locale}/"
+ artifact_path = "<{}/{}/{}target.complete.mar>".format(
+ dep_job.kind,
+ get_artifact_prefix(dep_job),
+ locale_suffix,
+ )
+ for build in sorted(builds):
+ partial_info = {
+ "locale": build_locale,
+ "from_mar": builds[build]["mar_url"],
+ "to_mar": {"artifact-reference": artifact_path},
+ "branch": config.params["project"],
+ "update_number": update_number,
+ "dest_mar": build,
+ }
+ if "product" in builds[build]:
+ partial_info["product"] = builds[build]["product"]
+ if "previousVersion" in builds[build]:
+ partial_info["previousVersion"] = builds[build]["previousVersion"]
+ if "previousBuildNumber" in builds[build]:
+ partial_info["previousBuildNumber"] = builds[build][
+ "previousBuildNumber"
+ ]
+ extra["funsize"]["partials"].append(partial_info)
+ update_number += 1
+
+ level = config.params["level"]
+
+ worker = {
+ "artifacts": _generate_task_output_files(dep_job, builds.keys(), locale),
+ "implementation": "docker-worker",
+ "docker-image": {"in-tree": "funsize-update-generator"},
+ "os": "linux",
+ "max-run-time": 3600 if "asan" in dep_job.label else 1800,
+ "chain-of-trust": True,
+ "taskcluster-proxy": True,
+ "env": {
+ "SIGNING_CERT": identify_desired_signing_keys(
+ config.params["project"], config.params["release_product"]
+ ),
+ "EXTRA_PARAMS": f"--arch={architecture(build_platform)}",
+ "MAR_CHANNEL_ID": attributes["mar-channel-id"],
+ },
+ }
+ if release_level(config.params["project"]) == "staging":
+ worker["env"]["FUNSIZE_ALLOW_STAGING_PREFIXES"] = "true"
+
+ task = {
+ "label": label,
+ "description": f"{dep_job.description} Partials",
+ "worker-type": "b-linux-gcp",
+ "dependencies": dependencies,
+ "scopes": [],
+ "attributes": attributes,
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "treeherder": treeherder,
+ "extra": extra,
+ "worker": worker,
+ }
+
+ # We only want caching on linux/windows due to bug 1436977
+ if int(level) == 3 and any(
+ [build_platform.startswith(prefix) for prefix in ["linux", "win"]]
+ ):
+ task["scopes"].append(
+ "auth:aws-s3:read-write:tc-gp-private-1d-us-east-1/releng/mbsdiff-cache/"
+ )
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/partner_attribution.py b/taskcluster/gecko_taskgraph/transforms/partner_attribution.py
new file mode 100644
index 0000000000..0bd5e0d141
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/partner_attribution.py
@@ -0,0 +1,129 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the partner attribution task into an actual task description.
+"""
+
+
+import json
+import logging
+from collections import defaultdict
+
+from taskgraph.transforms.base import TransformSequence
+
+from gecko_taskgraph.util.partners import (
+ apply_partner_priority,
+ check_if_partners_enabled,
+ generate_attribution_code,
+ get_partner_config_by_kind,
+)
+
+log = logging.getLogger(__name__)
+
+transforms = TransformSequence()
+transforms.add(check_if_partners_enabled)
+transforms.add(apply_partner_priority)
+
+
+@transforms.add
+def add_command_arguments(config, tasks):
+ enabled_partners = config.params.get("release_partners")
+ dependencies = {}
+ fetches = defaultdict(set)
+ attributions = []
+ release_artifacts = []
+ attribution_config = get_partner_config_by_kind(config, config.kind)
+
+ for partner_config in attribution_config.get("configs", []):
+ # we might only be interested in a subset of all partners, eg for a respin
+ if enabled_partners and partner_config["campaign"] not in enabled_partners:
+ continue
+ attribution_code = generate_attribution_code(
+ attribution_config["defaults"], partner_config
+ )
+ for platform in partner_config["platforms"]:
+ stage_platform = platform.replace("-shippable", "")
+ for locale in partner_config["locales"]:
+ # find the upstream, throw away locales we don't have, somehow. Skip ?
+ if locale == "en-US":
+ upstream_label = "repackage-signing-{platform}/opt".format(
+ platform=platform
+ )
+ upstream_artifact = "target.installer.exe"
+ else:
+ upstream_label = (
+ "repackage-signing-l10n-{locale}-{platform}/opt".format(
+ locale=locale, platform=platform
+ )
+ )
+ upstream_artifact = "{locale}/target.installer.exe".format(
+ locale=locale
+ )
+ if upstream_label not in config.kind_dependencies_tasks:
+ raise Exception(f"Can't find upstream task for {platform} {locale}")
+ upstream = config.kind_dependencies_tasks[upstream_label]
+
+ # set the dependencies to just what we need rather than all of l10n
+ dependencies.update({upstream.label: upstream.label})
+
+ fetches[upstream_label].add((upstream_artifact, stage_platform, locale))
+
+ artifact_part = "{platform}/{locale}/target.installer.exe".format(
+ platform=stage_platform, locale=locale
+ )
+ artifact = (
+ "releng/partner/{partner}/{sub_partner}/{artifact_part}".format(
+ partner=partner_config["campaign"],
+ sub_partner=partner_config["content"],
+ artifact_part=artifact_part,
+ )
+ )
+ # config for script
+ # TODO - generalise input & output ??
+ # add releng/partner prefix via get_artifact_prefix..()
+ attributions.append(
+ {
+ "input": f"/builds/worker/fetches/{artifact_part}",
+ "output": f"/builds/worker/artifacts/{artifact}",
+ "attribution": attribution_code,
+ }
+ )
+ release_artifacts.append(artifact)
+
+ # bail-out early if we don't have any attributions to do
+ if not attributions:
+ return
+
+ for task in tasks:
+ worker = task.get("worker", {})
+ worker["chain-of-trust"] = True
+
+ task.setdefault("dependencies", {}).update(dependencies)
+ task.setdefault("fetches", {})
+ for upstream_label, upstream_artifacts in fetches.items():
+ task["fetches"][upstream_label] = [
+ {
+ "artifact": upstream_artifact,
+ "dest": "{platform}/{locale}".format(
+ platform=platform, locale=locale
+ ),
+ "extract": False,
+ "verify-hash": True,
+ }
+ for upstream_artifact, platform, locale in upstream_artifacts
+ ]
+ worker.setdefault("env", {})["ATTRIBUTION_CONFIG"] = json.dumps(
+ attributions, sort_keys=True
+ )
+ worker["artifacts"] = [
+ {
+ "name": "releng/partner",
+ "path": "/builds/worker/artifacts/releng/partner",
+ "type": "directory",
+ }
+ ]
+ task.setdefault("attributes", {})["release_artifacts"] = release_artifacts
+ task["label"] = config.kind
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/partner_attribution_beetmover.py b/taskcluster/gecko_taskgraph/transforms/partner_attribution_beetmover.py
new file mode 100644
index 0000000000..b2c435dd81
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/partner_attribution_beetmover.py
@@ -0,0 +1,202 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the beetmover task into an actual task description.
+"""
+
+from collections import defaultdict
+from copy import deepcopy
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import optionally_keyed_by, resolve_keyed_by
+from taskgraph.util.taskcluster import get_artifact_prefix
+from voluptuous import Any, Optional, Required
+
+from gecko_taskgraph.loader.single_dep import schema
+from gecko_taskgraph.transforms.beetmover import craft_release_properties
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import (
+ copy_attributes_from_dependent_job,
+ release_level,
+)
+from gecko_taskgraph.util.partners import (
+ apply_partner_priority,
+ get_partner_config_by_kind,
+)
+from gecko_taskgraph.util.scriptworker import (
+ add_scope_prefix,
+ get_beetmover_bucket_scope,
+)
+
+beetmover_description_schema = schema.extend(
+ {
+ # depname is used in taskref's to identify the taskID of the unsigned things
+ Required("depname", default="build"): str,
+ # unique label to describe this beetmover task, defaults to {dep.label}-beetmover
+ Optional("label"): str,
+ Required("partner-bucket-scope"): optionally_keyed_by("release-level", str),
+ Required("partner-public-path"): Any(None, str),
+ Required("partner-private-path"): Any(None, str),
+ Optional("extra"): object,
+ Required("shipping-phase"): task_description_schema["shipping-phase"],
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Optional("priority"): task_description_schema["priority"],
+ }
+)
+
+transforms = TransformSequence()
+transforms.add_validate(beetmover_description_schema)
+transforms.add(apply_partner_priority)
+
+
+@transforms.add
+def resolve_keys(config, jobs):
+ for job in jobs:
+ resolve_keyed_by(
+ job,
+ "partner-bucket-scope",
+ item_name=job["label"],
+ **{"release-level": release_level(config.params["project"])},
+ )
+ yield job
+
+
+@transforms.add
+def split_public_and_private(config, jobs):
+ # we need to separate private vs public destinations because beetmover supports one
+ # in a single task. Only use a single task for each type though.
+ partner_config = get_partner_config_by_kind(config, config.kind)
+ for job in jobs:
+ upstream_artifacts = job["primary-dependency"].attributes.get(
+ "release_artifacts"
+ )
+ attribution_task_ref = "<{}>".format(job["primary-dependency"].label)
+ prefix = get_artifact_prefix(job["primary-dependency"])
+ artifacts = defaultdict(list)
+ for artifact in upstream_artifacts:
+ partner, sub_partner, platform, locale, _ = artifact.replace(
+ prefix + "/", ""
+ ).split("/", 4)
+ destination = "private"
+ this_config = [
+ p
+ for p in partner_config["configs"]
+ if (p["campaign"] == partner and p["content"] == sub_partner)
+ ]
+ if this_config[0].get("upload_to_candidates"):
+ destination = "public"
+ artifacts[destination].append(
+ (artifact, partner, sub_partner, platform, locale)
+ )
+
+ action_scope = add_scope_prefix(config, "beetmover:action:push-to-partner")
+ public_bucket_scope = get_beetmover_bucket_scope(config)
+ partner_bucket_scope = add_scope_prefix(config, job["partner-bucket-scope"])
+ repl_dict = {
+ "build_number": config.params["build_number"],
+ "release_partner_build_number": config.params[
+ "release_partner_build_number"
+ ],
+ "version": config.params["version"],
+ "partner": "{partner}", # we'll replace these later, per artifact
+ "subpartner": "{subpartner}",
+ "platform": "{platform}",
+ "locale": "{locale}",
+ }
+ for destination, destination_artifacts in artifacts.items():
+ this_job = deepcopy(job)
+
+ if destination == "public":
+ this_job["scopes"] = [public_bucket_scope, action_scope]
+ this_job["partner_public"] = True
+ else:
+ this_job["scopes"] = [partner_bucket_scope, action_scope]
+ this_job["partner_public"] = False
+
+ partner_path_key = f"partner-{destination}-path"
+ partner_path = this_job[partner_path_key].format(**repl_dict)
+ this_job.setdefault("worker", {})[
+ "upstream-artifacts"
+ ] = generate_upstream_artifacts(
+ attribution_task_ref, destination_artifacts, partner_path
+ )
+
+ yield this_job
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+
+ attributes = dep_job.attributes
+ build_platform = attributes.get("build_platform")
+ if not build_platform:
+ raise Exception("Cannot find build platform!")
+
+ label = config.kind
+ description = "Beetmover for partner attribution"
+ if job["partner_public"]:
+ label = f"{label}-public"
+ description = f"{description} public"
+ else:
+ label = f"{label}-private"
+ description = f"{description} private"
+ attributes = copy_attributes_from_dependent_job(dep_job)
+
+ task = {
+ "label": label,
+ "description": description,
+ "dependencies": {dep_job.kind: dep_job.label},
+ "attributes": attributes,
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "shipping-phase": job["shipping-phase"],
+ "shipping-product": job.get("shipping-product"),
+ "partner_public": job["partner_public"],
+ "worker": job["worker"],
+ "scopes": job["scopes"],
+ }
+ # we may have reduced the priority for partner jobs, otherwise task.py will set it
+ if job.get("priority"):
+ task["priority"] = job["priority"]
+
+ yield task
+
+
+def generate_upstream_artifacts(attribution_task, artifacts, partner_path):
+ upstream_artifacts = []
+ for artifact, partner, subpartner, platform, locale in artifacts:
+ upstream_artifacts.append(
+ {
+ "taskId": {"task-reference": attribution_task},
+ "taskType": "repackage",
+ "paths": [artifact],
+ "locale": partner_path.format(
+ partner=partner,
+ subpartner=subpartner,
+ platform=platform,
+ locale=locale,
+ ),
+ }
+ )
+
+ if not upstream_artifacts:
+ raise Exception("Couldn't find any upstream artifacts.")
+
+ return upstream_artifacts
+
+
+@transforms.add
+def make_task_worker(config, jobs):
+ for job in jobs:
+ job["worker-type"] = "beetmover"
+ worker = {
+ "implementation": "beetmover",
+ "release-properties": craft_release_properties(config, job),
+ "partner-public": job["partner_public"],
+ }
+ job["worker"].update(worker)
+ del job["partner_public"]
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/partner_repack.py b/taskcluster/gecko_taskgraph/transforms/partner_repack.py
new file mode 100644
index 0000000000..d164c10a59
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/partner_repack.py
@@ -0,0 +1,136 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the partner repack task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+from gecko_taskgraph.util.attributes import release_level
+from gecko_taskgraph.util.partners import (
+ apply_partner_priority,
+ check_if_partners_enabled,
+ get_partner_config_by_kind,
+ get_partner_url_config,
+ get_repack_ids_by_platform,
+)
+from gecko_taskgraph.util.scriptworker import get_release_config
+
+transforms = TransformSequence()
+transforms.add(check_if_partners_enabled)
+transforms.add(apply_partner_priority)
+
+
+@transforms.add
+def skip_unnecessary_platforms(config, tasks):
+ for task in tasks:
+ if config.kind == "release-partner-repack":
+ platform = task["attributes"]["build_platform"]
+ repack_ids = get_repack_ids_by_platform(config, platform)
+ if not repack_ids:
+ continue
+ yield task
+
+
+@transforms.add
+def remove_mac_dependency(config, tasks):
+ """Remove mac dependency depending on current level
+ to accomodate for mac notarization not running on level 1
+ """
+ level = int(config.params.get("level", 0))
+ for task in tasks:
+ if "macosx" not in task["attributes"]["build_platform"]:
+ yield task
+ continue
+ skipped_kind = "mac-signing" if level == 3 else "mac-notarization"
+ for dep_label in list(task["dependencies"].keys()):
+ if skipped_kind in dep_label:
+ del task["dependencies"][dep_label]
+ yield task
+
+
+@transforms.add
+def populate_repack_manifests_url(config, tasks):
+ for task in tasks:
+ partner_url_config = get_partner_url_config(config.params, config.graph_config)
+
+ for k in partner_url_config:
+ if config.kind.startswith(k):
+ task["worker"].setdefault("env", {})[
+ "REPACK_MANIFESTS_URL"
+ ] = partner_url_config[k]
+ break
+ else:
+ raise Exception("Can't find partner REPACK_MANIFESTS_URL")
+
+ for property in ("limit-locales",):
+ property = f"extra.{property}"
+ resolve_keyed_by(
+ task,
+ property,
+ property,
+ **{"release-level": release_level(config.params["project"])},
+ )
+
+ if task["worker"]["env"]["REPACK_MANIFESTS_URL"].startswith("git@"):
+ task.setdefault("scopes", []).append(
+ "secrets:get:project/releng/gecko/build/level-{level}/partner-github-ssh".format(
+ **config.params
+ )
+ )
+
+ yield task
+
+
+@transforms.add
+def make_label(config, tasks):
+ for task in tasks:
+ task["label"] = "{}-{}".format(config.kind, task["name"])
+ yield task
+
+
+@transforms.add
+def add_command_arguments(config, tasks):
+ release_config = get_release_config(config)
+
+ # staging releases - pass reduced set of locales to the repacking script
+ all_locales = set()
+ partner_config = get_partner_config_by_kind(config, config.kind)
+ for partner in partner_config.values():
+ for sub_partner in partner.values():
+ all_locales.update(sub_partner.get("locales", []))
+
+ for task in tasks:
+ # add the MOZHARNESS_OPTIONS, eg version=61.0, build-number=1, platform=win64
+ if not task["attributes"]["build_platform"].endswith("-shippable"):
+ raise Exception(
+ "Unexpected partner repack platform: {}".format(
+ task["attributes"]["build_platform"],
+ ),
+ )
+ platform = task["attributes"]["build_platform"].partition("-shippable")[0]
+ task["run"]["options"] = [
+ "version={}".format(release_config["version"]),
+ "build-number={}".format(release_config["build_number"]),
+ f"platform={platform}",
+ ]
+ if task["extra"]["limit-locales"]:
+ for locale in all_locales:
+ task["run"]["options"].append(f"limit-locale={locale}")
+ if "partner" in config.kind and config.params["release_partners"]:
+ for partner in config.params["release_partners"]:
+ task["run"]["options"].append(f"partner={partner}")
+
+ # The upstream taskIds are stored a special environment variable, because we want to use
+ # task-reference's to resolve dependencies, but the string handling of MOZHARNESS_OPTIONS
+ # blocks that. It's space-separated string of ids in the end.
+ task["worker"]["env"]["UPSTREAM_TASKIDS"] = {
+ "task-reference": " ".join([f"<{dep}>" for dep in task["dependencies"]])
+ }
+
+ # Forward the release type for bouncer product construction
+ task["worker"]["env"]["RELEASE_TYPE"] = config.params["release_type"]
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/partner_signing.py b/taskcluster/gecko_taskgraph/transforms/partner_signing.py
new file mode 100644
index 0000000000..c9b6b7a8cc
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/partner_signing.py
@@ -0,0 +1,66 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the signing task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+from gecko_taskgraph.util.partners import get_partner_config_by_kind
+from gecko_taskgraph.util.signed_artifacts import (
+ generate_specifications_of_artifacts_to_sign,
+)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def set_mac_label(config, jobs):
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+ if "mac-notarization" in config.kind:
+ default_label = dep_job.label.replace("mac-signing", "mac-notarization")
+ job.setdefault("label", default_label)
+ assert job["label"] != dep_job.label, "Unable to determine label for {}".format(
+ config.kind
+ )
+ yield job
+
+
+@transforms.add
+def define_upstream_artifacts(config, jobs):
+ partner_configs = get_partner_config_by_kind(config, config.kind)
+ if not partner_configs:
+ return
+
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+ job["depname"] = dep_job.label
+ job["attributes"] = copy_attributes_from_dependent_job(dep_job)
+
+ repack_ids = job["extra"]["repack_ids"]
+ artifacts_specifications = generate_specifications_of_artifacts_to_sign(
+ config,
+ job,
+ keep_locale_template=True,
+ kind=config.kind,
+ )
+ task_type = "build"
+ if "notarization" in job["depname"] or "mac-signing" in job["depname"]:
+ task_type = "scriptworker"
+ job["upstream-artifacts"] = [
+ {
+ "taskId": {"task-reference": f"<{dep_job.kind}>"},
+ "taskType": task_type,
+ "paths": [
+ path_template.format(locale=repack_id)
+ for path_template in spec["artifacts"]
+ for repack_id in repack_ids
+ ],
+ "formats": spec["formats"],
+ }
+ for spec in artifacts_specifications
+ ]
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/per_platform_dummy.py b/taskcluster/gecko_taskgraph/transforms/per_platform_dummy.py
new file mode 100644
index 0000000000..5237f4f281
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/per_platform_dummy.py
@@ -0,0 +1,33 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the repackage task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def one_task_per_product_and_platform(config, jobs):
+ unique_products_and_platforms = set()
+ for job in jobs:
+ dep_task = job["primary-dependency"]
+ if "primary-dependency" in job:
+ del job["primary-dependency"]
+ product = dep_task.attributes.get("shipping_product")
+ platform = dep_task.attributes.get("build_platform")
+ if (product, platform) not in unique_products_and_platforms:
+ attr_denylist = ("l10n_chunk", "locale", "artifact_map", "artifact_prefix")
+ attributes = copy_attributes_from_dependent_job(
+ dep_task, denylist=attr_denylist
+ )
+ attributes.update(job.get("attributes", {}))
+ job["attributes"] = attributes
+ job["name"] = f"{product}-{platform}"
+ yield job
+ unique_products_and_platforms.add((product, platform))
diff --git a/taskcluster/gecko_taskgraph/transforms/perftest.py b/taskcluster/gecko_taskgraph/transforms/perftest.py
new file mode 100644
index 0000000000..5c579b48b5
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/perftest.py
@@ -0,0 +1,351 @@
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+This transform passes options from `mach perftest` to the corresponding task.
+"""
+
+
+import json
+from copy import deepcopy
+from datetime import date, timedelta
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import Schema, optionally_keyed_by, resolve_keyed_by
+from taskgraph.util.treeherder import join_symbol, split_symbol
+from voluptuous import Any, Extra, Optional
+
+transforms = TransformSequence()
+
+
+perftest_description_schema = Schema(
+ {
+ # The test names and the symbols to use for them: [test-symbol, test-path]
+ Optional("perftest"): [[str]],
+ # Metrics to gather for the test. These will be merged
+ # with options specified through perftest-perfherder-global
+ Optional("perftest-metrics"): optionally_keyed_by(
+ "perftest",
+ Any(
+ [str],
+ {str: Any(None, {str: Any(None, str, [str])})},
+ ),
+ ),
+ # Perfherder data options that will be applied to
+ # all metrics gathered.
+ Optional("perftest-perfherder-global"): optionally_keyed_by(
+ "perftest", {str: Any(None, str, [str])}
+ ),
+ # Extra options to add to the test's command
+ Optional("perftest-extra-options"): optionally_keyed_by("perftest", [str]),
+ # Variants of the test to make based on extra browsertime
+ # arguments. Expecting:
+ # [variant-suffix, options-to-use]
+ # If variant-suffix is `null` then the options will be added
+ # to the existing task. Otherwise, a new variant is created
+ # with the given suffix and with its options replaced.
+ Optional("perftest-btime-variants"): optionally_keyed_by(
+ "perftest", [[Any(None, str)]]
+ ),
+ # These options will be parsed in the next schemas
+ Extra: object,
+ }
+)
+
+
+transforms.add_validate(perftest_description_schema)
+
+
+@transforms.add
+def split_tests(config, jobs):
+ for job in jobs:
+ if job.get("perftest") is None:
+ yield job
+ continue
+
+ for test_symbol, test_name in job.pop("perftest"):
+ job_new = deepcopy(job)
+
+ job_new["perftest"] = test_symbol
+ job_new["name"] += "-" + test_symbol
+ job_new["treeherder"]["symbol"] = job["treeherder"]["symbol"].format(
+ symbol=test_symbol
+ )
+ job_new["run"]["command"] = job["run"]["command"].replace(
+ "{perftest_testname}", test_name
+ )
+
+ yield job_new
+
+
+@transforms.add
+def handle_keyed_by_perftest(config, jobs):
+ fields = ["perftest-metrics", "perftest-extra-options", "perftest-btime-variants"]
+ for job in jobs:
+ if job.get("perftest") is None:
+ yield job
+ continue
+
+ for field in fields:
+ resolve_keyed_by(job, field, item_name=job["name"])
+
+ job.pop("perftest")
+ yield job
+
+
+@transforms.add
+def parse_perftest_metrics(config, jobs):
+ """Parse the metrics into a dictionary immediately.
+
+ This way we can modify the extraOptions field (and others) entry through the
+ transforms that come later. The metrics aren't formatted until the end of the
+ transforms.
+ """
+ for job in jobs:
+ if job.get("perftest-metrics") is None:
+ yield job
+ continue
+ perftest_metrics = job.pop("perftest-metrics")
+
+ # If perftest metrics is a string, split it up first
+ if isinstance(perftest_metrics, list):
+ new_metrics_info = [{"name": metric} for metric in perftest_metrics]
+ else:
+ new_metrics_info = []
+ for metric, options in perftest_metrics.items():
+ entry = {"name": metric}
+ entry.update(options)
+ new_metrics_info.append(entry)
+
+ job["perftest-metrics"] = new_metrics_info
+ yield job
+
+
+@transforms.add
+def split_perftest_variants(config, jobs):
+ for job in jobs:
+ if job.get("variants") is None:
+ yield job
+ continue
+
+ for variant in job.pop("variants"):
+ job_new = deepcopy(job)
+
+ group, symbol = split_symbol(job_new["treeherder"]["symbol"])
+ group += "-" + variant
+ job_new["treeherder"]["symbol"] = join_symbol(group, symbol)
+ job_new["name"] += "-" + variant
+ job_new.setdefault("perftest-perfherder-global", {}).setdefault(
+ "extraOptions", []
+ ).append(variant)
+ job_new[variant] = True
+
+ yield job_new
+
+ yield job
+
+
+@transforms.add
+def split_btime_variants(config, jobs):
+ for job in jobs:
+ if job.get("perftest-btime-variants") is None:
+ yield job
+ continue
+
+ variants = job.pop("perftest-btime-variants")
+ if not variants:
+ yield job
+ continue
+
+ yield_existing = False
+ for suffix, options in variants:
+ if suffix is None:
+ # Append options to the existing job
+ job.setdefault("perftest-btime-variants", []).append(options)
+ yield_existing = True
+ else:
+ job_new = deepcopy(job)
+ group, symbol = split_symbol(job_new["treeherder"]["symbol"])
+ symbol += "-" + suffix
+ job_new["treeherder"]["symbol"] = join_symbol(group, symbol)
+ job_new["name"] += "-" + suffix
+ job_new.setdefault("perftest-perfherder-global", {}).setdefault(
+ "extraOptions", []
+ ).append(suffix)
+ # Replace the existing options with the new ones
+ job_new["perftest-btime-variants"] = [options]
+ yield job_new
+
+ # The existing job has been modified so we should also return it
+ if yield_existing:
+ yield job
+
+
+@transforms.add
+def setup_http3_tests(config, jobs):
+ for job in jobs:
+ if job.get("http3") is None or not job.pop("http3"):
+ yield job
+ continue
+ job.setdefault("perftest-btime-variants", []).append(
+ "firefox.preference=network.http.http3.enable:true"
+ )
+ yield job
+
+
+@transforms.add
+def setup_perftest_metrics(config, jobs):
+ for job in jobs:
+ if job.get("perftest-metrics") is None:
+ yield job
+ continue
+ perftest_metrics = job.pop("perftest-metrics")
+
+ # Options to apply to each metric
+ global_options = job.pop("perftest-perfherder-global", {})
+ for metric_info in perftest_metrics:
+ for opt, val in global_options.items():
+ if isinstance(val, list) and opt in metric_info:
+ metric_info[opt].extend(val)
+ elif not (isinstance(val, list) and len(val) == 0):
+ metric_info[opt] = val
+
+ quote_escape = '\\"'
+ if "win" in job.get("platform", ""):
+ # Escaping is a bit different on windows platforms
+ quote_escape = '\\\\\\"'
+
+ job["run"]["command"] = job["run"]["command"].replace(
+ "{perftest_metrics}",
+ " ".join(
+ [
+ ",".join(
+ [
+ ":".join(
+ [
+ option,
+ str(value)
+ .replace(" ", "")
+ .replace("'", quote_escape),
+ ]
+ )
+ for option, value in metric_info.items()
+ ]
+ )
+ for metric_info in perftest_metrics
+ ]
+ ),
+ )
+
+ yield job
+
+
+@transforms.add
+def setup_perftest_browsertime_variants(config, jobs):
+ for job in jobs:
+ if job.get("perftest-btime-variants") is None:
+ yield job
+ continue
+
+ job["run"]["command"] += " --browsertime-extra-options %s" % ",".join(
+ [opt.strip() for opt in job.pop("perftest-btime-variants")]
+ )
+
+ yield job
+
+
+@transforms.add
+def setup_perftest_extra_options(config, jobs):
+ for job in jobs:
+ if job.get("perftest-extra-options") is None:
+ yield job
+ continue
+ job["run"]["command"] += " " + " ".join(job.pop("perftest-extra-options"))
+ yield job
+
+
+@transforms.add
+def pass_perftest_options(config, jobs):
+ for job in jobs:
+ env = job.setdefault("worker", {}).setdefault("env", {})
+ env["PERFTEST_OPTIONS"] = json.dumps(
+ config.params["try_task_config"].get("perftest-options")
+ )
+ yield job
+
+
+@transforms.add
+def setup_perftest_test_date(config, jobs):
+ for job in jobs:
+ if (
+ job.get("attributes", {}).get("batch", False)
+ and "--test-date" not in job["run"]["command"]
+ ):
+ yesterday = (date.today() - timedelta(1)).strftime("%Y.%m.%d")
+ job["run"]["command"] += " --test-date %s" % yesterday
+ yield job
+
+
+@transforms.add
+def setup_regression_detector(config, jobs):
+ for job in jobs:
+ if "change-detector" in job.get("name"):
+
+ tasks_to_analyze = []
+ for task in config.params["try_task_config"].get("tasks", []):
+ # Explicitly skip these tasks since they're
+ # part of the mozperftest tasks
+ if "side-by-side" in task:
+ continue
+ if "change-detector" in task:
+ continue
+
+ # Select these tasks
+ if "browsertime" in task:
+ tasks_to_analyze.append(task)
+ elif "talos" in task:
+ tasks_to_analyze.append(task)
+ elif "awsy" in task:
+ tasks_to_analyze.append(task)
+ elif "perftest" in task:
+ tasks_to_analyze.append(task)
+
+ if len(tasks_to_analyze) == 0:
+ yield job
+ continue
+
+ # Make the change detector task depend on the tasks to analyze.
+ # This prevents the task from running until all data is available
+ # within the current push.
+ job["soft-dependencies"] = tasks_to_analyze
+ job["requires"] = "all-completed"
+
+ new_project = config.params["project"]
+ if (
+ "try" in config.params["project"]
+ or config.params["try_mode"] == "try_select"
+ ):
+ new_project = "try"
+
+ base_project = None
+ if (
+ config.params.get("try_task_config", {})
+ .get("env", {})
+ .get("PERF_BASE_REVISION", None)
+ is not None
+ ):
+ task_names = " --task-name ".join(tasks_to_analyze)
+ base_revision = config.params["try_task_config"]["env"][
+ "PERF_BASE_REVISION"
+ ]
+ base_project = new_project
+
+ # Add all the required information to the task
+ job["run"]["command"] = job["run"]["command"].format(
+ task_name=task_names,
+ base_revision=base_revision,
+ base_branch=base_project,
+ new_branch=new_project,
+ new_revision=config.params["head_rev"],
+ )
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/python_update.py b/taskcluster/gecko_taskgraph/transforms/python_update.py
new file mode 100644
index 0000000000..f4f135b585
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/python_update.py
@@ -0,0 +1,25 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the repo-update task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def resolve_keys(config, tasks):
+ for task in tasks:
+ env = task["worker"].setdefault("env", {})
+ env["BRANCH"] = config.params["project"]
+ for envvar in env:
+ resolve_keyed_by(env, envvar, envvar, **config.params)
+
+ for envvar in list(env.keys()):
+ if not env.get(envvar):
+ del env[envvar]
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/release.py b/taskcluster/gecko_taskgraph/transforms/release.py
new file mode 100644
index 0000000000..1158252fe7
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/release.py
@@ -0,0 +1,20 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Transforms for release tasks
+"""
+
+
+def run_on_releases(config, jobs):
+ """
+ Filter out jobs with `run-on-releases` set, and that don't match the
+ `release_type` paramater.
+ """
+ for job in jobs:
+ release_type = config.params["release_type"]
+ run_on_release_types = job.pop("run-on-releases", None)
+
+ if run_on_release_types is None or release_type in run_on_release_types:
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/release_beetmover_signed_addons.py b/taskcluster/gecko_taskgraph/transforms/release_beetmover_signed_addons.py
new file mode 100644
index 0000000000..fb8eb91f44
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/release_beetmover_signed_addons.py
@@ -0,0 +1,243 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the beetmover task into an actual task description.
+"""
+
+import copy
+import logging
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import optionally_keyed_by, resolve_keyed_by
+from taskgraph.util.treeherder import inherit_treeherder_from_dep
+from voluptuous import Optional, Required
+
+from gecko_taskgraph.loader.single_dep import schema
+from gecko_taskgraph.transforms.beetmover import craft_release_properties
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import (
+ copy_attributes_from_dependent_job,
+ release_level,
+)
+from gecko_taskgraph.util.scriptworker import (
+ generate_beetmover_artifact_map,
+ generate_beetmover_upstream_artifacts,
+ get_beetmover_action_scope,
+ get_beetmover_bucket_scope,
+)
+
+logger = logging.getLogger(__name__)
+
+
+transforms = TransformSequence()
+
+
+beetmover_description_schema = schema.extend(
+ {
+ # attributes is used for enabling artifact-map by declarative artifacts
+ Required("attributes"): {str: object},
+ # unique label to describe this beetmover task, defaults to {dep.label}-beetmover
+ Optional("label"): str,
+ # treeherder is allowed here to override any defaults we use for beetmover. See
+ # taskcluster/gecko_taskgraph/transforms/task.py for the schema details, and the
+ # below transforms for defaults of various values.
+ Optional("treeherder"): task_description_schema["treeherder"],
+ Required("description"): str,
+ Required("worker-type"): optionally_keyed_by("release-level", str),
+ Required("run-on-projects"): [],
+ # locale is passed only for l10n beetmoving
+ Optional("locale"): str,
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ }
+)
+
+
+transforms.add_validate(beetmover_description_schema)
+
+
+@transforms.add
+def resolve_keys(config, jobs):
+ for job in jobs:
+ for field in ("worker-type", "attributes.artifact_map"):
+ resolve_keyed_by(
+ job,
+ field,
+ item_name=job["label"],
+ **{
+ "release-level": release_level(config.params["project"]),
+ "release-type": config.params["release_type"],
+ "project": config.params["project"],
+ },
+ )
+ yield job
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+ attributes = dep_job.attributes
+
+ treeherder = inherit_treeherder_from_dep(job, dep_job)
+ treeherder.setdefault(
+ "symbol", "langpack(BM{})".format(attributes.get("l10n_chunk", ""))
+ )
+
+ job["attributes"].update(copy_attributes_from_dependent_job(dep_job))
+ job["attributes"]["chunk_locales"] = dep_job.attributes.get(
+ "chunk_locales", ["en-US"]
+ )
+
+ job["description"] = job["description"].format(
+ locales="/".join(job["attributes"]["chunk_locales"]),
+ platform=job["attributes"]["build_platform"],
+ )
+
+ job["scopes"] = [
+ get_beetmover_bucket_scope(config),
+ get_beetmover_action_scope(config),
+ ]
+
+ job["dependencies"] = {"langpack-copy": dep_job.label}
+
+ job["run-on-projects"] = job.get(
+ "run_on_projects", dep_job.attributes["run_on_projects"]
+ )
+ job["treeherder"] = treeherder
+ job["shipping-phase"] = job.get(
+ "shipping-phase", dep_job.attributes["shipping_phase"]
+ )
+ job["shipping-product"] = dep_job.attributes["shipping_product"]
+
+ yield job
+
+
+@transforms.add
+def make_task_worker(config, jobs):
+ for job in jobs:
+ platform = job["attributes"]["build_platform"]
+ locale = job["attributes"]["chunk_locales"]
+
+ job["worker"] = {
+ "implementation": "beetmover",
+ "release-properties": craft_release_properties(config, job),
+ "upstream-artifacts": generate_beetmover_upstream_artifacts(
+ config,
+ job,
+ platform,
+ locale,
+ ),
+ "artifact-map": generate_beetmover_artifact_map(
+ config, job, platform=platform, locale=locale
+ ),
+ }
+
+ yield job
+
+
+@transforms.add
+def strip_unused_data(config, jobs):
+ for job in jobs:
+ del job["primary-dependency"]
+
+ yield job
+
+
+@transforms.add
+def yield_all_platform_jobs(config, jobs):
+ # Even though langpacks are now platform independent, we keep beetmoving them at old
+ # platform-specific locations. That's why this transform exist
+ # The linux64 and mac specific ja-JP-mac are beetmoved along with the signing beetmover
+ # So while the dependent jobs are linux here, we only yield jobs for other platforms
+ for job in jobs:
+ platforms = ("linux", "macosx64", "win32", "win64")
+ if "devedition" in job["attributes"]["build_platform"]:
+ platforms = (f"{plat}-devedition" for plat in platforms)
+ for platform in platforms:
+ platform_job = copy.deepcopy(job)
+ if "ja" in platform_job["attributes"]["chunk_locales"] and platform in (
+ "macosx64",
+ "macosx64-devedition",
+ ):
+ platform_job = _strip_ja_data_from_linux_job(platform_job)
+
+ platform_job = _change_platform_data(config, platform_job, platform)
+
+ yield platform_job
+
+
+def _strip_ja_data_from_linux_job(platform_job):
+ # Let's take "ja" out the description. This locale is in a substring like "aa/bb/cc/dd", where
+ # "ja" could be any of "aa", "bb", "cc", "dd"
+ platform_job["description"] = platform_job["description"].replace("ja/", "")
+ platform_job["description"] = platform_job["description"].replace("/ja", "")
+
+ platform_job["worker"]["upstream-artifacts"] = [
+ artifact
+ for artifact in platform_job["worker"]["upstream-artifacts"]
+ if artifact["locale"] != "ja"
+ ]
+
+ return platform_job
+
+
+def _change_platform_in_artifact_map_paths(paths, orig_platform, new_platform):
+ amended_paths = {}
+ for artifact, artifact_info in paths.items():
+ amended_artifact_info = {
+ "checksums_path": artifact_info["checksums_path"].replace(
+ orig_platform, new_platform
+ ),
+ "destinations": [
+ d.replace(orig_platform, new_platform)
+ for d in artifact_info["destinations"]
+ ],
+ }
+ amended_paths[artifact] = amended_artifact_info
+
+ return amended_paths
+
+
+def _change_platform_data(config, platform_job, platform):
+ orig_platform = "linux64"
+ if "devedition" in platform:
+ orig_platform = "linux64-devedition"
+ platform_job["attributes"]["build_platform"] = platform
+ platform_job["label"] = platform_job["label"].replace(orig_platform, platform)
+ platform_job["description"] = platform_job["description"].replace(
+ orig_platform, platform
+ )
+ platform_job["treeherder"]["platform"] = platform_job["treeherder"][
+ "platform"
+ ].replace(orig_platform, platform)
+ platform_job["worker"]["release-properties"]["platform"] = platform
+
+ # amend artifactMap entries as well
+ platform_mapping = {
+ "linux64": "linux-x86_64",
+ "linux": "linux-i686",
+ "macosx64": "mac",
+ "win32": "win32",
+ "win64": "win64",
+ "linux64-devedition": "linux-x86_64",
+ "linux-devedition": "linux-i686",
+ "macosx64-devedition": "mac",
+ "win32-devedition": "win32",
+ "win64-devedition": "win64",
+ }
+ orig_platform = platform_mapping.get(orig_platform, orig_platform)
+ platform = platform_mapping.get(platform, platform)
+ platform_job["worker"]["artifact-map"] = [
+ {
+ "locale": entry["locale"],
+ "taskId": entry["taskId"],
+ "paths": _change_platform_in_artifact_map_paths(
+ entry["paths"], orig_platform, platform
+ ),
+ }
+ for entry in platform_job["worker"]["artifact-map"]
+ ]
+
+ return platform_job
diff --git a/taskcluster/gecko_taskgraph/transforms/release_deps.py b/taskcluster/gecko_taskgraph/transforms/release_deps.py
new file mode 100644
index 0000000000..e44af576eb
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/release_deps.py
@@ -0,0 +1,61 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Add dependencies to release tasks.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+
+PHASES = ["build", "promote", "push", "ship"]
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def add_dependencies(config, jobs):
+ for job in jobs:
+ dependencies = {}
+ # Add any kind_dependencies_tasks with matching product as dependencies
+ product = job.get("shipping-product")
+ phase = job.get("shipping-phase")
+ if product is None:
+ continue
+
+ required_signoffs = set(
+ job.setdefault("attributes", {}).get("required_signoffs", [])
+ )
+ for dep_task in config.kind_dependencies_tasks.values():
+ # Weed out unwanted tasks.
+ # XXX we have run-on-projects which specifies the on-push behavior;
+ # we need another attribute that specifies release promotion,
+ # possibly which action(s) each task belongs in.
+
+ # We can only depend on tasks in the current or previous phases
+ dep_phase = dep_task.attributes.get("shipping_phase")
+ if dep_phase and PHASES.index(dep_phase) > PHASES.index(phase):
+ continue
+
+ if dep_task.attributes.get("build_platform") and job.get(
+ "attributes", {}
+ ).get("build_platform"):
+ if (
+ dep_task.attributes["build_platform"]
+ != job["attributes"]["build_platform"]
+ ):
+ continue
+ # Add matching product tasks to deps
+ if (
+ dep_task.task.get("shipping-product") == product
+ or dep_task.attributes.get("shipping_product") == product
+ ):
+ dependencies[dep_task.label] = dep_task.label
+ required_signoffs.update(
+ dep_task.attributes.get("required_signoffs", [])
+ )
+
+ job.setdefault("dependencies", {}).update(dependencies)
+ if required_signoffs:
+ job["attributes"]["required_signoffs"] = sorted(required_signoffs)
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/release_flatpak_push.py b/taskcluster/gecko_taskgraph/transforms/release_flatpak_push.py
new file mode 100644
index 0000000000..8a336502e6
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/release_flatpak_push.py
@@ -0,0 +1,79 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the release-flatpak-push kind into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import Schema, optionally_keyed_by, resolve_keyed_by
+from voluptuous import Optional, Required
+
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import release_level
+from gecko_taskgraph.util.scriptworker import add_scope_prefix
+
+push_flatpak_description_schema = Schema(
+ {
+ Required("name"): str,
+ Required("job-from"): task_description_schema["job-from"],
+ Required("dependencies"): task_description_schema["dependencies"],
+ Required("description"): task_description_schema["description"],
+ Required("treeherder"): task_description_schema["treeherder"],
+ Required("run-on-projects"): task_description_schema["run-on-projects"],
+ Required("worker-type"): optionally_keyed_by("release-level", str),
+ Required("worker"): object,
+ Optional("scopes"): [str],
+ Required("shipping-phase"): task_description_schema["shipping-phase"],
+ Required("shipping-product"): task_description_schema["shipping-product"],
+ Optional("extra"): task_description_schema["extra"],
+ Optional("attributes"): task_description_schema["attributes"],
+ }
+)
+
+transforms = TransformSequence()
+transforms.add_validate(push_flatpak_description_schema)
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ for job in jobs:
+ if len(job["dependencies"]) != 1:
+ raise Exception("Exactly 1 dependency is required")
+
+ job["worker"]["upstream-artifacts"] = generate_upstream_artifacts(
+ job["dependencies"]
+ )
+
+ resolve_keyed_by(
+ job,
+ "worker.channel",
+ item_name=job["name"],
+ **{"release-type": config.params["release_type"]},
+ )
+ resolve_keyed_by(
+ job,
+ "worker-type",
+ item_name=job["name"],
+ **{"release-level": release_level(config.params["project"])},
+ )
+ if release_level(config.params["project"]) == "production":
+ job.setdefault("scopes", []).append(
+ add_scope_prefix(
+ config,
+ "flathub:firefox:{}".format(job["worker"]["channel"]),
+ )
+ )
+
+ yield job
+
+
+def generate_upstream_artifacts(dependencies):
+ return [
+ {
+ "taskId": {"task-reference": f"<{task_kind}>"},
+ "taskType": "build",
+ "paths": ["public/build/target.flatpak.tar.xz"],
+ }
+ for task_kind in dependencies.keys()
+ ]
diff --git a/taskcluster/gecko_taskgraph/transforms/release_flatpak_repackage.py b/taskcluster/gecko_taskgraph/transforms/release_flatpak_repackage.py
new file mode 100644
index 0000000000..7af1134c3a
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/release_flatpak_repackage.py
@@ -0,0 +1,42 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+from gecko_taskgraph.util.attributes import release_level
+from gecko_taskgraph.util.scriptworker import get_release_config
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def format(config, tasks):
+ """Apply format substitution to worker.env and worker.command."""
+
+ format_params = {
+ "release_config": get_release_config(config),
+ "config_params": config.params,
+ }
+
+ for task in tasks:
+ format_params["task"] = task
+
+ command = task.get("worker", {}).get("command", [])
+ task["worker"]["command"] = [x.format(**format_params) for x in command]
+
+ env = task.get("worker", {}).get("env", {})
+ for k in env.keys():
+ resolve_keyed_by(
+ env,
+ k,
+ "flatpak envs",
+ **{
+ "release-level": release_level(config.params["project"]),
+ "project": config.params["project"],
+ }
+ )
+ task["worker"]["env"][k] = env[k].format(**format_params)
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/release_generate_checksums.py b/taskcluster/gecko_taskgraph/transforms/release_generate_checksums.py
new file mode 100644
index 0000000000..0024b88726
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/release_generate_checksums.py
@@ -0,0 +1,53 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the checksums task into an actual task description.
+"""
+
+import copy
+import logging
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+from gecko_taskgraph.util.attributes import release_level
+from gecko_taskgraph.util.scriptworker import get_release_config
+
+logger = logging.getLogger(__name__)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def handle_keyed_by(config, jobs):
+ """Resolve fields that can be keyed by project, etc."""
+ fields = [
+ "run.config",
+ "run.extra-config",
+ ]
+ for job in jobs:
+ job = copy.deepcopy(job)
+ for field in fields:
+ resolve_keyed_by(
+ item=job,
+ field=field,
+ item_name=job["name"],
+ **{"release-level": release_level(config.params["project"])}
+ )
+ yield job
+
+
+@transforms.add
+def interpolate(config, jobs):
+ release_config = get_release_config(config)
+ for job in jobs:
+ mh_options = list(job["run"]["options"])
+ job["run"]["options"] = [
+ option.format(
+ version=release_config["version"],
+ build_number=release_config["build_number"],
+ )
+ for option in mh_options
+ ]
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/release_generate_checksums_beetmover.py b/taskcluster/gecko_taskgraph/transforms/release_generate_checksums_beetmover.py
new file mode 100644
index 0000000000..c91e807b27
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/release_generate_checksums_beetmover.py
@@ -0,0 +1,118 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the `release-generate-checksums-beetmover` task to also append `build` as dependency
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from voluptuous import Optional
+
+from gecko_taskgraph.loader.single_dep import schema
+from gecko_taskgraph.transforms.beetmover import craft_release_properties
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+from gecko_taskgraph.util.scriptworker import (
+ generate_beetmover_artifact_map,
+ generate_beetmover_upstream_artifacts,
+ get_beetmover_action_scope,
+ get_beetmover_bucket_scope,
+)
+
+transforms = TransformSequence()
+
+
+release_generate_checksums_beetmover_schema = schema.extend(
+ {
+ # unique label to describe this beetmover task, defaults to {dep.label}-beetmover
+ Optional("label"): str,
+ # treeherder is allowed here to override any defaults we use for beetmover. See
+ # taskcluster/gecko_taskgraph/transforms/task.py for the schema details, and the
+ # below transforms for defaults of various values.
+ Optional("treeherder"): task_description_schema["treeherder"],
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Optional("attributes"): task_description_schema["attributes"],
+ }
+)
+
+transforms = TransformSequence()
+transforms.add_validate(release_generate_checksums_beetmover_schema)
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+ attributes = copy_attributes_from_dependent_job(dep_job)
+ attributes.update(job.get("attributes", {}))
+
+ treeherder = job.get("treeherder", {})
+ treeherder.setdefault("symbol", "BM-SGenChcks")
+ dep_th_platform = (
+ dep_job.task.get("extra", {})
+ .get("treeherder", {})
+ .get("machine", {})
+ .get("platform", "")
+ )
+ treeherder.setdefault("platform", f"{dep_th_platform}/opt")
+ treeherder.setdefault("tier", 1)
+ treeherder.setdefault("kind", "build")
+
+ job_template = f"{dep_job.label}"
+ label = job_template.replace("signing", "beetmover")
+
+ description = "Transfer *SUMS and *SUMMARY checksums file to S3."
+
+ # first dependency is the signing task for the *SUMS files
+ dependencies = {dep_job.kind: dep_job.label}
+
+ if len(dep_job.dependencies) > 1:
+ raise NotImplementedError(
+ "Can't beetmove a signing task with multiple dependencies"
+ )
+ # update the dependencies with the dependencies of the signing task
+ dependencies.update(dep_job.dependencies)
+
+ bucket_scope = get_beetmover_bucket_scope(config)
+ action_scope = get_beetmover_action_scope(config)
+
+ task = {
+ "label": label,
+ "description": description,
+ "worker-type": "beetmover",
+ "scopes": [bucket_scope, action_scope],
+ "dependencies": dependencies,
+ "attributes": attributes,
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "treeherder": treeherder,
+ "shipping-phase": "promote",
+ }
+
+ yield task
+
+
+@transforms.add
+def make_task_worker(config, jobs):
+ for job in jobs:
+ valid_beetmover_job = len(job["dependencies"]) == 2 and any(
+ ["signing" in j for j in job["dependencies"]]
+ )
+ if not valid_beetmover_job:
+ raise NotImplementedError("Beetmover must have two dependencies.")
+
+ platform = job["attributes"]["build_platform"]
+ worker = {
+ "implementation": "beetmover",
+ "release-properties": craft_release_properties(config, job),
+ "upstream-artifacts": generate_beetmover_upstream_artifacts(
+ config, job, platform=None, locale=None
+ ),
+ "artifact-map": generate_beetmover_artifact_map(
+ config, job, platform=platform
+ ),
+ }
+
+ job["worker"] = worker
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/release_generate_checksums_signing.py b/taskcluster/gecko_taskgraph/transforms/release_generate_checksums_signing.py
new file mode 100644
index 0000000000..6dfee1d33b
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/release_generate_checksums_signing.py
@@ -0,0 +1,86 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the release-generate-checksums-signing task into task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.taskcluster import get_artifact_path
+from voluptuous import Optional
+
+from gecko_taskgraph.loader.single_dep import schema
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+from gecko_taskgraph.util.scriptworker import get_signing_cert_scope
+
+release_generate_checksums_signing_schema = schema.extend(
+ {
+ Optional("label"): str,
+ Optional("treeherder"): task_description_schema["treeherder"],
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ }
+)
+
+transforms = TransformSequence()
+transforms.add_validate(release_generate_checksums_signing_schema)
+
+
+@transforms.add
+def make_release_generate_checksums_signing_description(config, jobs):
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+ attributes = copy_attributes_from_dependent_job(dep_job)
+
+ treeherder = job.get("treeherder", {})
+ treeherder.setdefault("symbol", "SGenChcks")
+ dep_th_platform = (
+ dep_job.task.get("extra", {})
+ .get("treeherder", {})
+ .get("machine", {})
+ .get("platform", "")
+ )
+ treeherder.setdefault("platform", f"{dep_th_platform}/opt")
+ treeherder.setdefault("tier", 1)
+ treeherder.setdefault("kind", "build")
+
+ job_template = "{}-{}".format(dep_job.label, "signing")
+ label = job.get("label", job_template)
+ description = "Signing of the overall release-related checksums"
+
+ dependencies = {dep_job.kind: dep_job.label}
+
+ upstream_artifacts = [
+ {
+ "taskId": {"task-reference": f"<{str(dep_job.kind)}>"},
+ "taskType": "build",
+ "paths": [
+ get_artifact_path(dep_job, "SHA256SUMS"),
+ get_artifact_path(dep_job, "SHA512SUMS"),
+ ],
+ "formats": ["autograph_gpg"],
+ }
+ ]
+
+ signing_cert_scope = get_signing_cert_scope(config)
+
+ task = {
+ "label": label,
+ "description": description,
+ "worker-type": "linux-signing",
+ "worker": {
+ "implementation": "scriptworker-signing",
+ "upstream-artifacts": upstream_artifacts,
+ "max-run-time": 3600,
+ },
+ "scopes": [
+ signing_cert_scope,
+ ],
+ "dependencies": dependencies,
+ "attributes": attributes,
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "treeherder": treeherder,
+ }
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/release_mark_as_shipped.py b/taskcluster/gecko_taskgraph/transforms/release_mark_as_shipped.py
new file mode 100644
index 0000000000..f2ce148320
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/release_mark_as_shipped.py
@@ -0,0 +1,39 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+from gecko_taskgraph.util.attributes import release_level
+from gecko_taskgraph.util.scriptworker import get_release_config
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ release_config = get_release_config(config)
+ for job in jobs:
+ resolve_keyed_by(
+ job,
+ "worker-type",
+ item_name=job["name"],
+ **{"release-level": release_level(config.params["project"])}
+ )
+ resolve_keyed_by(
+ job,
+ "scopes",
+ item_name=job["name"],
+ **{"release-level": release_level(config.params["project"])}
+ )
+
+ job["worker"][
+ "release-name"
+ ] = "{product}-{version}-build{build_number}".format(
+ product=job["shipping-product"].capitalize(),
+ version=release_config["version"],
+ build_number=release_config["build_number"],
+ )
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/release_msix_push.py b/taskcluster/gecko_taskgraph/transforms/release_msix_push.py
new file mode 100644
index 0000000000..817341f92c
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/release_msix_push.py
@@ -0,0 +1,88 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the release-msix-push kind into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import Schema, optionally_keyed_by, resolve_keyed_by
+from voluptuous import Optional, Required
+
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import release_level
+from gecko_taskgraph.util.scriptworker import add_scope_prefix
+
+push_msix_description_schema = Schema(
+ {
+ Required("name"): str,
+ Required("job-from"): task_description_schema["job-from"],
+ Required("dependencies"): task_description_schema["dependencies"],
+ Required("description"): task_description_schema["description"],
+ Required("treeherder"): task_description_schema["treeherder"],
+ Required("run-on-projects"): task_description_schema["run-on-projects"],
+ Required("worker-type"): optionally_keyed_by("release-level", str),
+ Required("worker"): object,
+ Optional("scopes"): [str],
+ Required("shipping-phase"): task_description_schema["shipping-phase"],
+ Required("shipping-product"): task_description_schema["shipping-product"],
+ Optional("extra"): task_description_schema["extra"],
+ Optional("attributes"): task_description_schema["attributes"],
+ }
+)
+
+transforms = TransformSequence()
+transforms.add_validate(push_msix_description_schema)
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ for job in jobs:
+
+ job["worker"]["upstream-artifacts"] = generate_upstream_artifacts(
+ job["dependencies"]
+ )
+
+ resolve_keyed_by(
+ job,
+ "worker.channel",
+ item_name=job["name"],
+ **{"release-type": config.params["release_type"]},
+ )
+ resolve_keyed_by(
+ job,
+ "worker.publish-mode",
+ item_name=job["name"],
+ **{"release-type": config.params["release_type"]},
+ )
+ resolve_keyed_by(
+ job,
+ "worker-type",
+ item_name=job["name"],
+ **{"release-level": release_level(config.params["project"])},
+ )
+ if release_level(config.params["project"]) == "production":
+ job.setdefault("scopes", []).append(
+ add_scope_prefix(
+ config,
+ "microsoftstore:{}".format(job["worker"]["channel"]),
+ )
+ )
+
+ # Override shipping-phase for release: push to the Store early to
+ # allow time for certification.
+ if job["worker"]["publish-mode"] == "Manual":
+ job["shipping-phase"] = "promote"
+
+ yield job
+
+
+def generate_upstream_artifacts(dependencies):
+ return [
+ {
+ "taskId": {"task-reference": f"<{task_kind}>"},
+ "taskType": "build",
+ "paths": ["public/build/target.store.msix"],
+ }
+ for task_kind in dependencies.keys()
+ ]
diff --git a/taskcluster/gecko_taskgraph/transforms/release_notifications.py b/taskcluster/gecko_taskgraph/transforms/release_notifications.py
new file mode 100644
index 0000000000..86109ec5ed
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/release_notifications.py
@@ -0,0 +1,73 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Add notifications via taskcluster-notify for release tasks
+"""
+from string import Formatter
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+from gecko_taskgraph.util.scriptworker import get_release_config
+
+transforms = TransformSequence()
+
+
+class TitleCaseFormatter(Formatter):
+ """Support title formatter for strings"""
+
+ def convert_field(self, value, conversion):
+ if conversion == "t":
+ return str(value).title()
+ super().convert_field(value, conversion)
+ return value
+
+
+titleformatter = TitleCaseFormatter()
+
+
+@transforms.add
+def add_notifications(config, jobs):
+ release_config = get_release_config(config)
+
+ for job in jobs:
+ label = "{}-{}".format(config.kind, job["name"])
+
+ notifications = job.pop("notifications", None)
+ if notifications:
+ resolve_keyed_by(
+ notifications, "emails", label, project=config.params["project"]
+ )
+ emails = notifications["emails"]
+ format_kwargs = dict(
+ task=job,
+ config=config.__dict__,
+ release_config=release_config,
+ )
+ subject = titleformatter.format(notifications["subject"], **format_kwargs)
+ message = titleformatter.format(notifications["message"], **format_kwargs)
+ emails = [email.format(**format_kwargs) for email in emails]
+
+ # By default, we only send mail on success to avoid messages like 'blah is in the
+ # candidates dir' when cancelling graphs, dummy job failure, etc
+ status_types = notifications.get("status-types", ["on-completed"])
+ for s in status_types:
+ job.setdefault("routes", []).extend(
+ [f"notify.email.{email}.{s}" for email in emails]
+ )
+
+ # Customize the email subject to include release name and build number
+ job.setdefault("extra", {}).update(
+ {
+ "notify": {
+ "email": {
+ "subject": subject,
+ }
+ }
+ }
+ )
+ if message:
+ job["extra"]["notify"]["email"]["content"] = message
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/release_sign_and_push_langpacks.py b/taskcluster/gecko_taskgraph/transforms/release_sign_and_push_langpacks.py
new file mode 100644
index 0000000000..17e4d37fb3
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/release_sign_and_push_langpacks.py
@@ -0,0 +1,180 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the release-sign-and-push task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import optionally_keyed_by, resolve_keyed_by
+from taskgraph.util.treeherder import inherit_treeherder_from_dep
+from voluptuous import Any, Required
+
+from gecko_taskgraph.loader.single_dep import schema
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import (
+ copy_attributes_from_dependent_job,
+ release_level,
+)
+
+transforms = TransformSequence()
+
+langpack_sign_push_description_schema = schema.extend(
+ {
+ Required("label"): str,
+ Required("description"): str,
+ Required("worker-type"): optionally_keyed_by("release-level", str),
+ Required("worker"): {
+ Required("channel"): optionally_keyed_by(
+ "project", "platform", Any("listed", "unlisted")
+ ),
+ Required("upstream-artifacts"): None, # Processed here below
+ },
+ Required("run-on-projects"): [],
+ Required("scopes"): optionally_keyed_by("release-level", [str]),
+ Required("shipping-phase"): task_description_schema["shipping-phase"],
+ Required("shipping-product"): task_description_schema["shipping-product"],
+ }
+)
+
+
+@transforms.add
+def set_label(config, jobs):
+ for job in jobs:
+ label = "push-langpacks-{}".format(job["primary-dependency"].label)
+ job["label"] = label
+
+ yield job
+
+
+transforms.add_validate(langpack_sign_push_description_schema)
+
+
+@transforms.add
+def resolve_keys(config, jobs):
+ for job in jobs:
+ resolve_keyed_by(
+ job,
+ "worker-type",
+ item_name=job["label"],
+ **{"release-level": release_level(config.params["project"])},
+ )
+ resolve_keyed_by(
+ job,
+ "scopes",
+ item_name=job["label"],
+ **{"release-level": release_level(config.params["project"])},
+ )
+ resolve_keyed_by(
+ job,
+ "worker.channel",
+ item_name=job["label"],
+ project=config.params["project"],
+ platform=job["primary-dependency"].attributes["build_platform"],
+ )
+
+ yield job
+
+
+@transforms.add
+def copy_attributes(config, jobs):
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+ job["attributes"] = copy_attributes_from_dependent_job(dep_job)
+ job["attributes"]["chunk_locales"] = dep_job.attributes.get(
+ "chunk_locales", ["en-US"]
+ )
+
+ yield job
+
+
+@transforms.add
+def filter_out_macos_jobs_but_mac_only_locales(config, jobs):
+ for job in jobs:
+ build_platform = job["primary-dependency"].attributes.get("build_platform")
+
+ if build_platform in ("linux64-devedition", "linux64-shippable"):
+ yield job
+ elif (
+ build_platform in ("macosx64-devedition", "macosx64-shippable")
+ and "ja-JP-mac" in job["attributes"]["chunk_locales"]
+ ):
+ # Other locales of the same job shouldn't be processed
+ job["attributes"]["chunk_locales"] = ["ja-JP-mac"]
+ job["label"] = job["label"].replace(
+ # Guard against a chunk 10 or chunk 1 (latter on try) weird munging
+ "-{}/".format(job["attributes"]["l10n_chunk"]),
+ "-ja-JP-mac/",
+ )
+ yield job
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+
+ treeherder = inherit_treeherder_from_dep(job, dep_job)
+ treeherder.setdefault(
+ "symbol", "langpack(SnP{})".format(job["attributes"].get("l10n_chunk", ""))
+ )
+
+ job["description"] = job["description"].format(
+ locales="/".join(job["attributes"]["chunk_locales"]),
+ )
+
+ job["dependencies"] = {dep_job.kind: dep_job.label}
+ job["treeherder"] = treeherder
+
+ yield job
+
+
+def generate_upstream_artifacts(upstream_task_ref, locales):
+ return [
+ {
+ "taskId": {"task-reference": upstream_task_ref},
+ "taskType": "build",
+ "paths": [
+ "public/build{locale}/target.langpack.xpi".format(
+ locale="" if locale == "en-US" else "/" + locale
+ )
+ for locale in locales
+ ],
+ }
+ ]
+
+
+@transforms.add
+def make_task_worker(config, jobs):
+ for job in jobs:
+ upstream_task_ref = get_upstream_task_ref(
+ job, expected_kinds=("build", "shippable-l10n")
+ )
+
+ job["worker"]["implementation"] = "push-addons"
+ job["worker"]["upstream-artifacts"] = generate_upstream_artifacts(
+ upstream_task_ref, job["attributes"]["chunk_locales"]
+ )
+
+ yield job
+
+
+def get_upstream_task_ref(job, expected_kinds):
+ upstream_tasks = [
+ job_kind
+ for job_kind in job["dependencies"].keys()
+ if job_kind in expected_kinds
+ ]
+
+ if len(upstream_tasks) > 1:
+ raise Exception("Only one dependency expected")
+
+ return f"<{upstream_tasks[0]}>"
+
+
+@transforms.add
+def strip_unused_data(config, jobs):
+ for job in jobs:
+ del job["primary-dependency"]
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/release_snap_repackage.py b/taskcluster/gecko_taskgraph/transforms/release_snap_repackage.py
new file mode 100644
index 0000000000..659a203971
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/release_snap_repackage.py
@@ -0,0 +1,39 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+from gecko_taskgraph.util.attributes import release_level
+from gecko_taskgraph.util.scriptworker import get_release_config
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def format(config, tasks):
+ """Apply format substitution to worker.env and worker.command."""
+
+ format_params = {
+ "release_config": get_release_config(config),
+ "config_params": config.params,
+ }
+
+ for task in tasks:
+ format_params["task"] = task
+
+ command = task.get("worker", {}).get("command", [])
+ task["worker"]["command"] = [x.format(**format_params) for x in command]
+
+ env = task.get("worker", {}).get("env", {})
+ for k in env.keys():
+ resolve_keyed_by(
+ env,
+ k,
+ "snap envs",
+ **{"release-level": release_level(config.params["project"])}
+ )
+ task["worker"]["env"][k] = env[k].format(**format_params)
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/release_started.py b/taskcluster/gecko_taskgraph/transforms/release_started.py
new file mode 100644
index 0000000000..0f54c8e098
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/release_started.py
@@ -0,0 +1,52 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Add notifications via taskcluster-notify for release tasks
+"""
+from pipes import quote as shell_quote
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def add_notifications(config, jobs):
+ for job in jobs:
+ label = "{}-{}".format(config.kind, job["name"])
+
+ resolve_keyed_by(job, "emails", label, project=config.params["project"])
+ emails = [email.format(config=config.__dict__) for email in job.pop("emails")]
+
+ command = [
+ "release",
+ "send-buglist-email",
+ "--version",
+ config.params["version"],
+ "--product",
+ job["shipping-product"],
+ "--revision",
+ config.params["head_rev"],
+ "--build-number",
+ str(config.params["build_number"]),
+ "--repo",
+ config.params["head_repository"],
+ ]
+ for address in emails:
+ command += ["--address", address]
+ command += [
+ # We wrap this in `{'task-reference': ...}` below
+ "--task-group-id",
+ "<decision>",
+ ]
+
+ job["scopes"] = [f"notify:email:{address}" for address in emails]
+ job["run"] = {
+ "using": "mach",
+ "sparse-profile": "mach",
+ "mach": {"task-reference": " ".join(map(shell_quote, command))},
+ }
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/release_version_bump.py b/taskcluster/gecko_taskgraph/transforms/release_version_bump.py
new file mode 100644
index 0000000000..a0f3f69d05
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/release_version_bump.py
@@ -0,0 +1,42 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the update generation task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def handle_keyed_by(config, tasks):
+ """Resolve fields that can be keyed by platform, etc."""
+ default_fields = [
+ "worker.push",
+ "worker.bump-files",
+ "worker-type",
+ ]
+ for task in tasks:
+ fields = default_fields[:]
+ for additional_field in (
+ "l10n-bump-info",
+ "source-repo",
+ "dontbuild",
+ "ignore-closed-tree",
+ ):
+ if additional_field in task["worker"]:
+ fields.append(f"worker.{additional_field}")
+ for field in fields:
+ resolve_keyed_by(
+ task,
+ field,
+ item_name=task["name"],
+ **{
+ "project": config.params["project"],
+ "release-type": config.params["release_type"],
+ },
+ )
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/repackage.py b/taskcluster/gecko_taskgraph/transforms/repackage.py
new file mode 100644
index 0000000000..2fe849c32d
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/repackage.py
@@ -0,0 +1,684 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the repackage task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import optionally_keyed_by, resolve_keyed_by
+from taskgraph.util.taskcluster import get_artifact_prefix
+from voluptuous import Extra, Optional, Required
+
+from gecko_taskgraph.loader.single_dep import schema
+from gecko_taskgraph.transforms.job import job_description_schema
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+from gecko_taskgraph.util.copy_task import copy_task
+from gecko_taskgraph.util.platforms import architecture, archive_format
+from gecko_taskgraph.util.workertypes import worker_type_implementation
+
+packaging_description_schema = schema.extend(
+ {
+ # unique label to describe this repackaging task
+ Optional("label"): str,
+ Optional("worker-type"): str,
+ Optional("worker"): object,
+ # treeherder is allowed here to override any defaults we use for repackaging. See
+ # taskcluster/gecko_taskgraph/transforms/task.py for the schema details, and the
+ # below transforms for defaults of various values.
+ Optional("treeherder"): job_description_schema["treeherder"],
+ # If a l10n task, the corresponding locale
+ Optional("locale"): str,
+ # Routes specific to this task, if defined
+ Optional("routes"): [str],
+ # passed through directly to the job description
+ Optional("extra"): job_description_schema["extra"],
+ # passed through to job description
+ Optional("fetches"): job_description_schema["fetches"],
+ Optional("run-on-projects"): job_description_schema["run-on-projects"],
+ # Shipping product and phase
+ Optional("shipping-product"): job_description_schema["shipping-product"],
+ Optional("shipping-phase"): job_description_schema["shipping-phase"],
+ Required("package-formats"): optionally_keyed_by(
+ "build-platform", "release-type", [str]
+ ),
+ Optional("msix"): {
+ Optional("channel"): optionally_keyed_by(
+ "package-format",
+ "level",
+ "build-platform",
+ "release-type",
+ "shipping-product",
+ str,
+ ),
+ Optional("identity-name"): optionally_keyed_by(
+ "package-format",
+ "level",
+ "build-platform",
+ "release-type",
+ "shipping-product",
+ str,
+ ),
+ Optional("publisher"): optionally_keyed_by(
+ "package-format",
+ "level",
+ "build-platform",
+ "release-type",
+ "shipping-product",
+ str,
+ ),
+ Optional("publisher-display-name"): optionally_keyed_by(
+ "package-format",
+ "level",
+ "build-platform",
+ "release-type",
+ "shipping-product",
+ str,
+ ),
+ Optional("vendor"): str,
+ },
+ # All l10n jobs use mozharness
+ Required("mozharness"): {
+ Extra: object,
+ # Config files passed to the mozharness script
+ Required("config"): optionally_keyed_by("build-platform", [str]),
+ # Additional paths to look for mozharness configs in. These should be
+ # relative to the base of the source checkout
+ Optional("config-paths"): [str],
+ # if true, perform a checkout of a comm-central based branch inside the
+ # gecko checkout
+ Optional("comm-checkout"): bool,
+ Optional("run-as-root"): bool,
+ Optional("use-caches"): bool,
+ },
+ }
+)
+
+# The configuration passed to the mozharness repackage script. This defines the
+# arguments passed to `mach repackage`
+# - `args` is interpolated by mozharness (`{package-name}`, `{installer-tag}`,
+# `{stub-installer-tag}`, `{sfx-stub}`, `{wsx-stub}`, `{fetch-dir}`), with values
+# from mozharness.
+# - `inputs` are passed as long-options, with the filename prefixed by
+# `MOZ_FETCH_DIR`. The filename is interpolated by taskgraph
+# (`{archive_format}`).
+# - `output` is passed to `--output`, with the filename prefixed by the output
+# directory.
+PACKAGE_FORMATS = {
+ "mar": {
+ "args": [
+ "mar",
+ "--arch",
+ "{architecture}",
+ "--mar-channel-id",
+ "{mar-channel-id}",
+ ],
+ "inputs": {
+ "input": "target{archive_format}",
+ "mar": "mar-tools/mar",
+ },
+ "output": "target.complete.mar",
+ },
+ "msi": {
+ "args": [
+ "msi",
+ "--wsx",
+ "{wsx-stub}",
+ "--version",
+ "{version_display}",
+ "--locale",
+ "{_locale}",
+ "--arch",
+ "{architecture}",
+ "--candle",
+ "{fetch-dir}/candle.exe",
+ "--light",
+ "{fetch-dir}/light.exe",
+ ],
+ "inputs": {
+ "setupexe": "target.installer.exe",
+ },
+ "output": "target.installer.msi",
+ },
+ "msix": {
+ "args": [
+ "msix",
+ "--channel",
+ "{msix-channel}",
+ "--publisher",
+ "{msix-publisher}",
+ "--publisher-display-name",
+ "{msix-publisher-display-name}",
+ "--identity-name",
+ "{msix-identity-name}",
+ "--vendor",
+ "{msix-vendor}",
+ "--arch",
+ "{architecture}",
+ # For langpacks. Ignored if directory does not exist.
+ "--distribution-dir",
+ "{fetch-dir}/distribution",
+ "--verbose",
+ "--makeappx",
+ "{fetch-dir}/msix-packaging/makemsix",
+ ],
+ "inputs": {
+ "input": "target{archive_format}",
+ },
+ "output": "target.installer.msix",
+ },
+ "msix-store": {
+ "args": [
+ "msix",
+ "--channel",
+ "{msix-channel}",
+ "--publisher",
+ "{msix-publisher}",
+ "--publisher-display-name",
+ "{msix-publisher-display-name}",
+ "--identity-name",
+ "{msix-identity-name}",
+ "--vendor",
+ "{msix-vendor}",
+ "--arch",
+ "{architecture}",
+ # For langpacks. Ignored if directory does not exist.
+ "--distribution-dir",
+ "{fetch-dir}/distribution",
+ "--verbose",
+ "--makeappx",
+ "{fetch-dir}/msix-packaging/makemsix",
+ ],
+ "inputs": {
+ "input": "target{archive_format}",
+ },
+ "output": "target.store.msix",
+ },
+ "dmg": {
+ "args": ["dmg"],
+ "inputs": {
+ "input": "target{archive_format}",
+ },
+ "output": "target.dmg",
+ },
+ "pkg": {
+ "args": ["pkg"],
+ "inputs": {
+ "input": "target{archive_format}",
+ },
+ "output": "target.pkg",
+ },
+ "installer": {
+ "args": [
+ "installer",
+ "--package-name",
+ "{package-name}",
+ "--tag",
+ "{installer-tag}",
+ "--sfx-stub",
+ "{sfx-stub}",
+ ],
+ "inputs": {
+ "package": "target{archive_format}",
+ "setupexe": "setup.exe",
+ },
+ "output": "target.installer.exe",
+ },
+ "installer-stub": {
+ "args": [
+ "installer",
+ "--tag",
+ "{stub-installer-tag}",
+ "--sfx-stub",
+ "{sfx-stub}",
+ ],
+ "inputs": {
+ "setupexe": "setup-stub.exe",
+ },
+ "output": "target.stub-installer.exe",
+ },
+ "deb": {
+ "args": [
+ "deb",
+ "--arch",
+ "{architecture}",
+ "--templates",
+ "browser/installer/linux/app/debian",
+ "--version",
+ "{version_display}",
+ "--build-number",
+ "{build_number}",
+ "--release-product",
+ "{release_product}",
+ "--release-type",
+ "{release_type}",
+ ],
+ "inputs": {
+ "input": "target{archive_format}",
+ },
+ "output": "target.deb",
+ },
+ "deb-l10n": {
+ "args": [
+ "deb-l10n",
+ "--version",
+ "{version_display}",
+ "--build-number",
+ "{build_number}",
+ "--templates",
+ "browser/installer/linux/langpack/debian",
+ ],
+ "inputs": {
+ "input-xpi-file": "target.langpack.xpi",
+ "input-tar-file": "target{archive_format}",
+ },
+ "output": "target.langpack.deb",
+ },
+}
+MOZHARNESS_EXPANSIONS = [
+ "package-name",
+ "installer-tag",
+ "fetch-dir",
+ "stub-installer-tag",
+ "sfx-stub",
+ "wsx-stub",
+]
+
+transforms = TransformSequence()
+transforms.add_validate(packaging_description_schema)
+
+
+@transforms.add
+def copy_in_useful_magic(config, jobs):
+ """Copy attributes from upstream task to be used for keyed configuration."""
+ for job in jobs:
+ dep = job["primary-dependency"]
+ job["build-platform"] = dep.attributes.get("build_platform")
+ job["shipping-product"] = dep.attributes.get("shipping_product")
+ yield job
+
+
+@transforms.add
+def handle_keyed_by(config, jobs):
+ """Resolve fields that can be keyed by platform, etc, but not `msix.*` fields
+ that can be keyed by `package-format`. Such fields are handled specially below.
+ """
+ fields = [
+ "mozharness.config",
+ "package-formats",
+ "worker.max-run-time",
+ ]
+ for job in jobs:
+ job = copy_task(job) # don't overwrite dict values here
+ for field in fields:
+ resolve_keyed_by(
+ item=job,
+ field=field,
+ item_name="?",
+ **{
+ "release-type": config.params["release_type"],
+ "level": config.params["level"],
+ },
+ )
+ yield job
+
+
+@transforms.add
+def make_repackage_description(config, jobs):
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+
+ label = job.get("label", dep_job.label.replace("signing-", "repackage-"))
+ job["label"] = label
+
+ yield job
+
+
+@transforms.add
+def make_job_description(config, jobs):
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+ dependencies = {dep_job.kind: dep_job.label}
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+ attributes["repackage_type"] = "repackage"
+
+ locale = attributes.get("locale", job.get("locale"))
+ if locale:
+ attributes["locale"] = locale
+
+ description = (
+ "Repackaging for locale '{locale}' for build '"
+ "{build_platform}/{build_type}'".format(
+ locale=attributes.get("locale", "en-US"),
+ build_platform=attributes.get("build_platform"),
+ build_type=attributes.get("build_type"),
+ )
+ )
+
+ treeherder = job.get("treeherder", {})
+ treeherder.setdefault("symbol", "Rpk")
+ dep_th_platform = dep_job.task.get("extra", {}).get("treeherder-platform")
+ treeherder.setdefault("platform", dep_th_platform)
+ treeherder.setdefault("tier", 1)
+ treeherder.setdefault("kind", "build")
+
+ # Search dependencies before adding langpack dependencies.
+ signing_task = None
+ repackage_signing_task = None
+ for dependency in dependencies.keys():
+ if "repackage-signing" in dependency:
+ repackage_signing_task = dependency
+ elif "signing" in dependency or "notarization" in dependency:
+ signing_task = dependency
+
+ if config.kind == "repackage-msi":
+ treeherder["symbol"] = "MSI({})".format(locale or "N")
+
+ elif config.kind == "repackage-msix":
+ assert not locale
+
+ # Like "MSIXs(Bs)".
+ treeherder["symbol"] = "MSIX({})".format(
+ dep_job.task.get("extra", {}).get("treeherder", {}).get("symbol", "B")
+ )
+
+ elif config.kind == "repackage-shippable-l10n-msix":
+ assert not locale
+
+ if attributes.get("l10n_chunk") or attributes.get("chunk_locales"):
+ # We don't want to produce MSIXes for single-locale repack builds.
+ continue
+
+ description = (
+ "Repackaging with multiple locales for build '"
+ "{build_platform}/{build_type}'".format(
+ build_platform=attributes.get("build_platform"),
+ build_type=attributes.get("build_type"),
+ )
+ )
+
+ # Like "MSIXs(Bs-multi)".
+ treeherder["symbol"] = "MSIX({}-multi)".format(
+ dep_job.task.get("extra", {}).get("treeherder", {}).get("symbol", "B")
+ )
+
+ fetches = job.setdefault("fetches", {})
+
+ # The keys are unique, like `shippable-l10n-signing-linux64-shippable-1/opt`, so we
+ # can't ask for the tasks directly, we must filter for them.
+ for t in config.kind_dependencies_tasks.values():
+ if t.kind != "shippable-l10n-signing":
+ continue
+ if t.attributes["build_platform"] != "linux64-shippable":
+ continue
+ if t.attributes["build_type"] != "opt":
+ continue
+
+ dependencies.update({t.label: t.label})
+
+ fetches.update(
+ {
+ t.label: [
+ {
+ "artifact": f"{loc}/target.langpack.xpi",
+ "extract": False,
+ # Otherwise we can't disambiguate locales!
+ "dest": f"distribution/extensions/{loc}",
+ }
+ for loc in t.attributes["chunk_locales"]
+ ]
+ }
+ )
+
+ elif config.kind == "repackage-deb":
+ attributes["repackage_type"] = "repackage-deb"
+ description = (
+ "Repackaging the '{build_platform}/{build_type}' "
+ "{version} build into a '.deb' package"
+ ).format(
+ build_platform=attributes.get("build_platform"),
+ build_type=attributes.get("build_type"),
+ version=config.params["version"],
+ )
+
+ _fetch_subst_locale = "en-US"
+ if locale:
+ _fetch_subst_locale = locale
+
+ worker_type = job["worker-type"]
+ build_platform = attributes["build_platform"]
+
+ use_stub = attributes.get("stub-installer")
+
+ repackage_config = []
+ package_formats = job.get("package-formats")
+ if use_stub and not repackage_signing_task and "msix" not in package_formats:
+ # if repackage_signing_task doesn't exists, generate the stub installer
+ package_formats += ["installer-stub"]
+ for format in package_formats:
+ command = copy_task(PACKAGE_FORMATS[format])
+ substs = {
+ "archive_format": archive_format(build_platform),
+ "_locale": _fetch_subst_locale,
+ "architecture": architecture(build_platform),
+ "version_display": config.params["version"],
+ "mar-channel-id": attributes["mar-channel-id"],
+ "build_number": config.params["build_number"],
+ "release_product": config.params["release_product"],
+ "release_type": config.params["release_type"],
+ }
+ # Allow us to replace `args` as well, but specifying things expanded in mozharness
+ # without breaking .format and without allowing unknown through.
+ substs.update({name: f"{{{name}}}" for name in MOZHARNESS_EXPANSIONS})
+
+ # We need to resolve `msix.*` values keyed by `package-format` for each format, not
+ # just once, so we update a temporary copy just for extracting these values.
+ temp_job = copy_task(job)
+ for msix_key in (
+ "channel",
+ "identity-name",
+ "publisher",
+ "publisher-display-name",
+ "vendor",
+ ):
+ resolve_keyed_by(
+ item=temp_job,
+ field=f"msix.{msix_key}",
+ item_name="?",
+ **{
+ "package-format": format,
+ "release-type": config.params["release_type"],
+ "level": config.params["level"],
+ },
+ )
+
+ # Turn `msix.channel` into `msix-channel`, etc.
+ value = temp_job.get("msix", {}).get(msix_key)
+ if value:
+ substs.update(
+ {f"msix-{msix_key}": value},
+ )
+
+ command["inputs"] = {
+ name: filename.format(**substs)
+ for name, filename in command["inputs"].items()
+ }
+ command["args"] = [arg.format(**substs) for arg in command["args"]]
+ if "installer" in format and "aarch64" not in build_platform:
+ command["args"].append("--use-upx")
+
+ repackage_config.append(command)
+
+ run = job.get("mozharness", {})
+ run.update(
+ {
+ "using": "mozharness",
+ "script": "mozharness/scripts/repackage.py",
+ "job-script": "taskcluster/scripts/builder/repackage.sh",
+ "actions": ["setup", "repackage"],
+ "extra-config": {
+ "repackage_config": repackage_config,
+ },
+ "run-as-root": run.get("run-as-root", False),
+ "use-caches": run.get("use-caches", True),
+ }
+ )
+
+ worker = job.get("worker", {})
+ worker.update(
+ {
+ "chain-of-trust": True,
+ # Don't add generic artifact directory.
+ "skip-artifacts": True,
+ }
+ )
+ worker.setdefault("max-run-time", 3600)
+
+ if locale:
+ # Make sure we specify the locale-specific upload dir
+ worker.setdefault("env", {})["LOCALE"] = locale
+
+ worker["artifacts"] = _generate_task_output_files(
+ dep_job,
+ worker_type_implementation(config.graph_config, config.params, worker_type),
+ repackage_config=repackage_config,
+ locale=locale,
+ )
+ attributes["release_artifacts"] = [
+ artifact["name"] for artifact in worker["artifacts"]
+ ]
+
+ task = {
+ "label": job["label"],
+ "description": description,
+ "worker-type": worker_type,
+ "dependencies": dependencies,
+ "if-dependencies": [dep_job.kind],
+ "attributes": attributes,
+ "run-on-projects": job.get(
+ "run-on-projects", dep_job.attributes.get("run_on_projects")
+ ),
+ "optimization": dep_job.optimization,
+ "treeherder": treeherder,
+ "routes": job.get("routes", []),
+ "extra": job.get("extra", {}),
+ "worker": worker,
+ "run": run,
+ "fetches": _generate_download_config(
+ config,
+ dep_job,
+ build_platform,
+ signing_task,
+ repackage_signing_task,
+ locale=locale,
+ existing_fetch=job.get("fetches"),
+ ),
+ }
+
+ if build_platform.startswith("macosx"):
+ task.setdefault("fetches", {}).setdefault("toolchain", []).extend(
+ [
+ "linux64-libdmg",
+ "linux64-hfsplus",
+ "linux64-node",
+ "linux64-xar",
+ "linux64-mkbom",
+ ]
+ )
+
+ if "shipping-phase" in job:
+ task["shipping-phase"] = job["shipping-phase"]
+
+ yield task
+
+
+def _generate_download_config(
+ config,
+ task,
+ build_platform,
+ signing_task,
+ repackage_signing_task,
+ locale=None,
+ existing_fetch=None,
+):
+ locale_path = f"{locale}/" if locale else ""
+ fetch = {}
+ if existing_fetch:
+ fetch.update(existing_fetch)
+
+ if repackage_signing_task and build_platform.startswith("win"):
+ fetch.update(
+ {
+ repackage_signing_task: [f"{locale_path}target.installer.exe"],
+ }
+ )
+ elif build_platform.startswith("linux") or build_platform.startswith("macosx"):
+ signing_fetch = [
+ {
+ "artifact": "{}target{}".format(
+ locale_path, archive_format(build_platform)
+ ),
+ "extract": False,
+ },
+ ]
+ if config.kind == "repackage-deb-l10n":
+ signing_fetch.append(
+ {
+ "artifact": f"{locale_path}target.langpack.xpi",
+ "extract": False,
+ }
+ )
+ fetch.update({signing_task: signing_fetch})
+ elif build_platform.startswith("win"):
+ fetch.update(
+ {
+ signing_task: [
+ {
+ "artifact": f"{locale_path}target.zip",
+ "extract": False,
+ },
+ f"{locale_path}setup.exe",
+ ],
+ }
+ )
+
+ use_stub = task.attributes.get("stub-installer")
+ if use_stub:
+ fetch[signing_task].append(f"{locale_path}setup-stub.exe")
+
+ if fetch:
+ return fetch
+
+ raise NotImplementedError(f'Unsupported build_platform: "{build_platform}"')
+
+
+def _generate_task_output_files(
+ task, worker_implementation, repackage_config, locale=None
+):
+ locale_output_path = f"{locale}/" if locale else ""
+ artifact_prefix = get_artifact_prefix(task)
+
+ if worker_implementation == ("docker-worker", "linux"):
+ local_prefix = "/builds/worker/workspace/"
+ elif worker_implementation == ("generic-worker", "windows"):
+ local_prefix = "workspace/"
+ else:
+ raise NotImplementedError(
+ f'Unsupported worker implementation: "{worker_implementation}"'
+ )
+
+ output_files = []
+ for config in repackage_config:
+ output_files.append(
+ {
+ "type": "file",
+ "path": "{}outputs/{}{}".format(
+ local_prefix, locale_output_path, config["output"]
+ ),
+ "name": "{}/{}{}".format(
+ artifact_prefix, locale_output_path, config["output"]
+ ),
+ }
+ )
+ return output_files
diff --git a/taskcluster/gecko_taskgraph/transforms/repackage_l10n.py b/taskcluster/gecko_taskgraph/transforms/repackage_l10n.py
new file mode 100644
index 0000000000..e0f46e6fdc
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/repackage_l10n.py
@@ -0,0 +1,26 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the repackage task into an actual task description.
+"""
+
+
+from taskgraph.transforms.base import TransformSequence
+
+from gecko_taskgraph.util.copy_task import copy_task
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def split_locales(config, jobs):
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+ for locale in dep_job.attributes.get("chunk_locales", []):
+ locale_job = copy_task(job) # don't overwrite dict values here
+ treeherder = locale_job.setdefault("treeherder", {})
+ treeherder_group = locale_job.pop("treeherder-group")
+ treeherder["symbol"] = f"{treeherder_group}({locale})"
+ locale_job["locale"] = locale
+ yield locale_job
diff --git a/taskcluster/gecko_taskgraph/transforms/repackage_partner.py b/taskcluster/gecko_taskgraph/transforms/repackage_partner.py
new file mode 100644
index 0000000000..582a86dfad
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/repackage_partner.py
@@ -0,0 +1,302 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the repackage task into an actual task description.
+"""
+
+
+import copy
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import optionally_keyed_by, resolve_keyed_by
+from taskgraph.util.taskcluster import get_artifact_prefix
+from voluptuous import Optional, Required
+
+from gecko_taskgraph.loader.single_dep import schema
+from gecko_taskgraph.transforms.repackage import (
+ PACKAGE_FORMATS as PACKAGE_FORMATS_VANILLA,
+)
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+from gecko_taskgraph.util.partners import get_partner_config_by_kind
+from gecko_taskgraph.util.platforms import archive_format, executable_extension
+from gecko_taskgraph.util.workertypes import worker_type_implementation
+
+
+def _by_platform(arg):
+ return optionally_keyed_by("build-platform", arg)
+
+
+# When repacking the stub installer we need to pass a zip file and package name to the
+# repackage task. This is not needed for vanilla stub but analogous to the full installer.
+PACKAGE_FORMATS = copy.deepcopy(PACKAGE_FORMATS_VANILLA)
+PACKAGE_FORMATS["installer-stub"]["inputs"]["package"] = "target-stub{archive_format}"
+PACKAGE_FORMATS["installer-stub"]["args"].extend(["--package-name", "{package-name}"])
+
+packaging_description_schema = schema.extend(
+ {
+ # unique label to describe this repackaging task
+ Optional("label"): str,
+ # Routes specific to this task, if defined
+ Optional("routes"): [str],
+ # passed through directly to the job description
+ Optional("extra"): task_description_schema["extra"],
+ # Shipping product and phase
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ Required("package-formats"): _by_platform([str]),
+ # All l10n jobs use mozharness
+ Required("mozharness"): {
+ # Config files passed to the mozharness script
+ Required("config"): _by_platform([str]),
+ # Additional paths to look for mozharness configs in. These should be
+ # relative to the base of the source checkout
+ Optional("config-paths"): [str],
+ # if true, perform a checkout of a comm-central based branch inside the
+ # gecko checkout
+ Optional("comm-checkout"): bool,
+ },
+ # Override the default priority for the project
+ Optional("priority"): task_description_schema["priority"],
+ }
+)
+
+transforms = TransformSequence()
+transforms.add_validate(packaging_description_schema)
+
+
+@transforms.add
+def copy_in_useful_magic(config, jobs):
+ """Copy attributes from upstream task to be used for keyed configuration."""
+ for job in jobs:
+ dep = job["primary-dependency"]
+ job["build-platform"] = dep.attributes.get("build_platform")
+ yield job
+
+
+@transforms.add
+def handle_keyed_by(config, jobs):
+ """Resolve fields that can be keyed by platform, etc."""
+ fields = [
+ "mozharness.config",
+ "package-formats",
+ ]
+ for job in jobs:
+ job = copy.deepcopy(job) # don't overwrite dict values here
+ for field in fields:
+ resolve_keyed_by(item=job, field=field, item_name="?")
+ yield job
+
+
+@transforms.add
+def make_repackage_description(config, jobs):
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+
+ label = job.get("label", dep_job.label.replace("signing-", "repackage-"))
+ job["label"] = label
+
+ yield job
+
+
+@transforms.add
+def make_job_description(config, jobs):
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+ attributes = copy_attributes_from_dependent_job(dep_job)
+ build_platform = attributes["build_platform"]
+
+ if job["build-platform"].startswith("win"):
+ if dep_job.kind.endswith("signing"):
+ continue
+ if job["build-platform"].startswith("macosx"):
+ if dep_job.kind.endswith("repack"):
+ continue
+ dependencies = {dep_job.attributes.get("kind"): dep_job.label}
+ dependencies.update(dep_job.dependencies)
+
+ signing_task = None
+ for dependency in dependencies.keys():
+ if build_platform.startswith("macosx") and dependency.endswith("signing"):
+ signing_task = dependency
+ elif build_platform.startswith("win") and dependency.endswith("repack"):
+ signing_task = dependency
+
+ attributes["repackage_type"] = "repackage"
+
+ repack_id = job["extra"]["repack_id"]
+
+ partner_config = get_partner_config_by_kind(config, config.kind)
+ partner, subpartner, _ = repack_id.split("/")
+ repack_stub_installer = partner_config[partner][subpartner].get(
+ "repack_stub_installer"
+ )
+ if build_platform.startswith("win32") and repack_stub_installer:
+ job["package-formats"].append("installer-stub")
+
+ repackage_config = []
+ for format in job.get("package-formats"):
+ command = copy.deepcopy(PACKAGE_FORMATS[format])
+ substs = {
+ "archive_format": archive_format(build_platform),
+ "executable_extension": executable_extension(build_platform),
+ }
+ command["inputs"] = {
+ name: filename.format(**substs)
+ for name, filename in command["inputs"].items()
+ }
+ repackage_config.append(command)
+
+ run = job.get("mozharness", {})
+ run.update(
+ {
+ "using": "mozharness",
+ "script": "mozharness/scripts/repackage.py",
+ "job-script": "taskcluster/scripts/builder/repackage.sh",
+ "actions": ["setup", "repackage"],
+ "extra-config": {
+ "repackage_config": repackage_config,
+ },
+ }
+ )
+
+ worker = {
+ "chain-of-trust": True,
+ "max-run-time": 3600,
+ "taskcluster-proxy": True if get_artifact_prefix(dep_job) else False,
+ "env": {
+ "REPACK_ID": repack_id,
+ },
+ # Don't add generic artifact directory.
+ "skip-artifacts": True,
+ }
+
+ worker_type = "b-linux-gcp"
+ worker["docker-image"] = {"in-tree": "debian11-amd64-build"}
+
+ worker["artifacts"] = _generate_task_output_files(
+ dep_job,
+ worker_type_implementation(config.graph_config, config.params, worker_type),
+ repackage_config,
+ partner=repack_id,
+ )
+
+ description = (
+ "Repackaging for repack_id '{repack_id}' for build '"
+ "{build_platform}/{build_type}'".format(
+ repack_id=job["extra"]["repack_id"],
+ build_platform=attributes.get("build_platform"),
+ build_type=attributes.get("build_type"),
+ )
+ )
+
+ task = {
+ "label": job["label"],
+ "description": description,
+ "worker-type": worker_type,
+ "dependencies": dependencies,
+ "attributes": attributes,
+ "scopes": ["queue:get-artifact:releng/partner/*"],
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "routes": job.get("routes", []),
+ "extra": job.get("extra", {}),
+ "worker": worker,
+ "run": run,
+ "fetches": _generate_download_config(
+ dep_job,
+ build_platform,
+ signing_task,
+ partner=repack_id,
+ project=config.params["project"],
+ repack_stub_installer=repack_stub_installer,
+ ),
+ }
+
+ # we may have reduced the priority for partner jobs, otherwise task.py will set it
+ if job.get("priority"):
+ task["priority"] = job["priority"]
+ if build_platform.startswith("macosx"):
+ task.setdefault("fetches", {}).setdefault("toolchain", []).extend(
+ [
+ "linux64-libdmg",
+ "linux64-hfsplus",
+ "linux64-node",
+ ]
+ )
+ yield task
+
+
+def _generate_download_config(
+ task,
+ build_platform,
+ signing_task,
+ partner=None,
+ project=None,
+ repack_stub_installer=False,
+):
+ locale_path = f"{partner}/" if partner else ""
+
+ if build_platform.startswith("macosx"):
+ return {
+ signing_task: [
+ {
+ "artifact": f"{locale_path}target.tar.gz",
+ "extract": False,
+ },
+ ],
+ }
+ if build_platform.startswith("win"):
+ download_config = [
+ {
+ "artifact": f"{locale_path}target.zip",
+ "extract": False,
+ },
+ f"{locale_path}setup.exe",
+ ]
+ if build_platform.startswith("win32") and repack_stub_installer:
+ download_config.extend(
+ [
+ {
+ "artifact": f"{locale_path}target-stub.zip",
+ "extract": False,
+ },
+ f"{locale_path}setup-stub.exe",
+ ]
+ )
+ return {signing_task: download_config}
+
+ raise NotImplementedError(f'Unsupported build_platform: "{build_platform}"')
+
+
+def _generate_task_output_files(task, worker_implementation, repackage_config, partner):
+ """We carefully generate an explicit list here, but there's an artifacts directory
+ too, courtesy of generic_worker_add_artifacts() (windows) or docker_worker_add_artifacts().
+ Any errors here are likely masked by that.
+ """
+ partner_output_path = f"{partner}/"
+ artifact_prefix = get_artifact_prefix(task)
+
+ if worker_implementation == ("docker-worker", "linux"):
+ local_prefix = "/builds/worker/workspace/"
+ elif worker_implementation == ("generic-worker", "windows"):
+ local_prefix = "workspace/"
+ else:
+ raise NotImplementedError(
+ f'Unsupported worker implementation: "{worker_implementation}"'
+ )
+
+ output_files = []
+ for config in repackage_config:
+ output_files.append(
+ {
+ "type": "file",
+ "path": "{}outputs/{}{}".format(
+ local_prefix, partner_output_path, config["output"]
+ ),
+ "name": "{}/{}{}".format(
+ artifact_prefix, partner_output_path, config["output"]
+ ),
+ }
+ )
+ return output_files
diff --git a/taskcluster/gecko_taskgraph/transforms/repackage_routes.py b/taskcluster/gecko_taskgraph/transforms/repackage_routes.py
new file mode 100644
index 0000000000..2973ee35bd
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/repackage_routes.py
@@ -0,0 +1,34 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Add indexes to repackage kinds
+"""
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def add_indexes(config, jobs):
+ for job in jobs:
+ repackage_type = job["attributes"].get("repackage_type")
+ if repackage_type and job["attributes"]["build_type"] != "debug":
+ build_platform = job["attributes"]["build_platform"]
+ job_name = f"{build_platform}-{repackage_type}"
+ product = job.get("index", {}).get("product", "firefox")
+ index_type = "generic"
+ if job["attributes"].get("shippable") and job["attributes"].get("locale"):
+ index_type = "shippable-l10n"
+ if job["attributes"].get("shippable"):
+ index_type = "shippable"
+ if job["attributes"].get("locale"):
+ index_type = "l10n"
+ job["index"] = {
+ "job-name": job_name,
+ "product": product,
+ "type": index_type,
+ }
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/repackage_set_upstream_mac_kind.py b/taskcluster/gecko_taskgraph/transforms/repackage_set_upstream_mac_kind.py
new file mode 100644
index 0000000000..14c865eea5
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/repackage_set_upstream_mac_kind.py
@@ -0,0 +1,39 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform mac notarization tasks
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def repackage_set_upstream_mac_kind(config, tasks):
+ """
+ Notarization only runs on level 3
+ If level < 3 then repackage the mac-signing task artifact
+ Exception for debug builds, which will use signed build on level 3
+ """
+ for task in tasks:
+ if "macosx64" not in task["primary-dependency"].attributes["build_platform"]:
+ task.pop("upstream-mac-kind")
+ yield task
+ continue
+ resolve_keyed_by(
+ task,
+ "upstream-mac-kind",
+ item_name=config.kind,
+ **{
+ "build-type": task["primary-dependency"].attributes["build_type"],
+ "project": config.params.get("project"),
+ }
+ )
+ upstream_mac_kind = task.pop("upstream-mac-kind")
+
+ if task["primary-dependency"].kind != upstream_mac_kind:
+ continue
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/repackage_signing.py b/taskcluster/gecko_taskgraph/transforms/repackage_signing.py
new file mode 100644
index 0000000000..f98f8f0814
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/repackage_signing.py
@@ -0,0 +1,137 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the repackage signing task into an actual task description.
+"""
+
+import os
+
+from taskgraph.transforms.base import TransformSequence
+from voluptuous import Optional
+
+from gecko_taskgraph.loader.single_dep import schema
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+from gecko_taskgraph.util.scriptworker import get_signing_cert_scope_per_platform
+
+repackage_signing_description_schema = schema.extend(
+ {
+ Optional("label"): str,
+ Optional("treeherder"): task_description_schema["treeherder"],
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ }
+)
+
+SIGNING_FORMATS = {
+ "target.installer.exe": ["autograph_authenticode_sha2_stub"],
+ "target.stub-installer.exe": ["autograph_authenticode_sha2_stub"],
+ "target.installer.msi": ["autograph_authenticode_sha2"],
+ "target.installer.msix": ["autograph_authenticode_sha2"],
+}
+
+transforms = TransformSequence()
+transforms.add_validate(repackage_signing_description_schema)
+
+
+@transforms.add
+def make_repackage_signing_description(config, jobs):
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+ attributes = copy_attributes_from_dependent_job(dep_job)
+ locale = attributes.get("locale", dep_job.attributes.get("locale"))
+ attributes["repackage_type"] = "repackage-signing"
+
+ treeherder = job.get("treeherder", {})
+ treeherder.setdefault("symbol", "rs(B)")
+ dep_th_platform = dep_job.task.get("extra", {}).get("treeherder-platform")
+ treeherder.setdefault("platform", dep_th_platform)
+ treeherder.setdefault(
+ "tier", dep_job.task.get("extra", {}).get("treeherder", {}).get("tier", 1)
+ )
+ treeherder.setdefault("kind", "build")
+
+ if locale:
+ treeherder["symbol"] = f"rs({locale})"
+
+ if config.kind == "repackage-signing-msi":
+ treeherder["symbol"] = "MSIs({})".format(locale or "N")
+
+ elif config.kind in (
+ "repackage-signing-msix",
+ "repackage-signing-shippable-l10n-msix",
+ ):
+ # Like "MSIXs(Bs-multi)".
+ treeherder["symbol"] = "MSIXs({})".format(
+ dep_job.task.get("extra", {}).get("treeherder", {}).get("symbol", "B")
+ )
+
+ label = job["label"]
+
+ dep_kind = dep_job.kind
+ if "l10n" in dep_kind:
+ dep_kind = "repackage"
+
+ dependencies = {dep_kind: dep_job.label}
+
+ signing_dependencies = dep_job.dependencies
+ # This is so we get the build task etc in our dependencies to have better beetmover
+ # support. But for multi-locale MSIX packages, we don't want the signing task to directly
+ # depend on the langpack tasks.
+ dependencies.update(
+ {
+ k: v
+ for k, v in signing_dependencies.items()
+ if k != "docker-image"
+ and not k.startswith("shippable-l10n-signing-linux64")
+ }
+ )
+
+ description = (
+ "Signing of repackaged artifacts for locale '{locale}' for build '"
+ "{build_platform}/{build_type}'".format(
+ locale=attributes.get("locale", "en-US"),
+ build_platform=attributes.get("build_platform"),
+ build_type=attributes.get("build_type"),
+ )
+ )
+
+ build_platform = dep_job.attributes.get("build_platform")
+ is_shippable = dep_job.attributes.get("shippable")
+ signing_cert_scope = get_signing_cert_scope_per_platform(
+ build_platform, is_shippable, config
+ )
+ scopes = [signing_cert_scope]
+
+ upstream_artifacts = []
+ for artifact in sorted(dep_job.attributes.get("release_artifacts")):
+ basename = os.path.basename(artifact)
+ if basename in SIGNING_FORMATS:
+ upstream_artifacts.append(
+ {
+ "taskId": {"task-reference": f"<{dep_kind}>"},
+ "taskType": "repackage",
+ "paths": [artifact],
+ "formats": SIGNING_FORMATS[os.path.basename(artifact)],
+ }
+ )
+
+ task = {
+ "label": label,
+ "description": description,
+ "worker-type": "linux-signing" if is_shippable else "linux-depsigning",
+ "worker": {
+ "implementation": "scriptworker-signing",
+ "upstream-artifacts": upstream_artifacts,
+ "max-run-time": 3600,
+ },
+ "scopes": scopes,
+ "dependencies": dependencies,
+ "attributes": attributes,
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "optimization": dep_job.optimization,
+ "treeherder": treeherder,
+ }
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/repackage_signing_partner.py b/taskcluster/gecko_taskgraph/transforms/repackage_signing_partner.py
new file mode 100644
index 0000000000..eaf71f92a2
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/repackage_signing_partner.py
@@ -0,0 +1,145 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the repackage signing task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.taskcluster import get_artifact_path
+from voluptuous import Optional
+
+from gecko_taskgraph.loader.single_dep import schema
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+from gecko_taskgraph.util.partners import get_partner_config_by_kind
+from gecko_taskgraph.util.scriptworker import get_signing_cert_scope_per_platform
+
+transforms = TransformSequence()
+
+repackage_signing_description_schema = schema.extend(
+ {
+ Optional("label"): str,
+ Optional("extra"): object,
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ Optional("priority"): task_description_schema["priority"],
+ }
+)
+
+transforms.add_validate(repackage_signing_description_schema)
+
+
+@transforms.add
+def make_repackage_signing_description(config, jobs):
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+ repack_id = dep_job.task["extra"]["repack_id"]
+ attributes = dep_job.attributes
+ build_platform = dep_job.attributes.get("build_platform")
+ is_shippable = dep_job.attributes.get("shippable")
+
+ # Mac & windows
+ label = dep_job.label.replace("repackage-", "repackage-signing-")
+ # Linux
+ label = label.replace("chunking-dummy-", "repackage-signing-")
+ description = "Signing of repackaged artifacts for partner repack id '{repack_id}' for build '" "{build_platform}/{build_type}'".format( # NOQA: E501
+ repack_id=repack_id,
+ build_platform=attributes.get("build_platform"),
+ build_type=attributes.get("build_type"),
+ )
+
+ if "linux" in build_platform:
+ # we want the repack job, via the dependencies for the the chunking-dummy dep_job
+ for dep in dep_job.dependencies.values():
+ if dep.startswith("release-partner-repack"):
+ dependencies = {"repack": dep}
+ break
+ else:
+ # we have a genuine repackage job as our parent
+ dependencies = {"repackage": dep_job.label}
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+ attributes["repackage_type"] = "repackage-signing"
+
+ signing_cert_scope = get_signing_cert_scope_per_platform(
+ build_platform, is_shippable, config
+ )
+ scopes = [signing_cert_scope]
+
+ if "win" in build_platform:
+ upstream_artifacts = [
+ {
+ "taskId": {"task-reference": "<repackage>"},
+ "taskType": "repackage",
+ "paths": [
+ get_artifact_path(dep_job, f"{repack_id}/target.installer.exe"),
+ ],
+ "formats": ["autograph_authenticode_sha2", "autograph_gpg"],
+ }
+ ]
+
+ partner_config = get_partner_config_by_kind(config, config.kind)
+ partner, subpartner, _ = repack_id.split("/")
+ repack_stub_installer = partner_config[partner][subpartner].get(
+ "repack_stub_installer"
+ )
+ if build_platform.startswith("win32") and repack_stub_installer:
+ upstream_artifacts.append(
+ {
+ "taskId": {"task-reference": "<repackage>"},
+ "taskType": "repackage",
+ "paths": [
+ get_artifact_path(
+ dep_job,
+ f"{repack_id}/target.stub-installer.exe",
+ ),
+ ],
+ "formats": ["autograph_authenticode_sha2", "autograph_gpg"],
+ }
+ )
+ elif "mac" in build_platform:
+ upstream_artifacts = [
+ {
+ "taskId": {"task-reference": "<repackage>"},
+ "taskType": "repackage",
+ "paths": [
+ get_artifact_path(dep_job, f"{repack_id}/target.dmg"),
+ ],
+ "formats": ["autograph_gpg"],
+ }
+ ]
+ elif "linux" in build_platform:
+ upstream_artifacts = [
+ {
+ "taskId": {"task-reference": "<repack>"},
+ "taskType": "repackage",
+ "paths": [
+ get_artifact_path(dep_job, f"{repack_id}/target.tar.bz2"),
+ ],
+ "formats": ["autograph_gpg"],
+ }
+ ]
+
+ task = {
+ "label": label,
+ "description": description,
+ "worker-type": "linux-signing",
+ "worker": {
+ "implementation": "scriptworker-signing",
+ "upstream-artifacts": upstream_artifacts,
+ "max-run-time": 3600,
+ },
+ "scopes": scopes,
+ "dependencies": dependencies,
+ "attributes": attributes,
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "extra": {
+ "repack_id": repack_id,
+ },
+ }
+ # we may have reduced the priority for partner jobs, otherwise task.py will set it
+ if job.get("priority"):
+ task["priority"] = job["priority"]
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/repo_update.py b/taskcluster/gecko_taskgraph/transforms/repo_update.py
new file mode 100644
index 0000000000..f4f135b585
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/repo_update.py
@@ -0,0 +1,25 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the repo-update task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def resolve_keys(config, tasks):
+ for task in tasks:
+ env = task["worker"].setdefault("env", {})
+ env["BRANCH"] = config.params["project"]
+ for envvar in env:
+ resolve_keyed_by(env, envvar, envvar, **config.params)
+
+ for envvar in list(env.keys()):
+ if not env.get(envvar):
+ del env[envvar]
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/reprocess_symbols.py b/taskcluster/gecko_taskgraph/transforms/reprocess_symbols.py
new file mode 100644
index 0000000000..5ad359bd5a
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/reprocess_symbols.py
@@ -0,0 +1,67 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the reprocess-symbols task description template,
+taskcluster/ci/reprocess-symbols/job-template.yml into an actual task description.
+"""
+
+
+import logging
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.treeherder import inherit_treeherder_from_dep, join_symbol
+
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+
+logger = logging.getLogger(__name__)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def fill_template(config, tasks):
+ for task in tasks:
+ assert len(task["dependent-tasks"]) == 2
+
+ build_dep = task["primary-dependency"]
+ upload_dep = None
+ for dep_idx in task["dependent-tasks"]:
+ dep = task["dependent-tasks"][dep_idx]
+ if dep_idx != build_dep:
+ upload_dep = dep
+
+ task.pop("dependent-tasks", None)
+
+ # Fill out the dynamic fields in the task description
+ task["label"] = build_dep.label + "-reprocess-symbols"
+ task["dependencies"] = {"build": build_dep.label, "upload": upload_dep.label}
+ task["worker"]["env"]["GECKO_HEAD_REPOSITORY"] = config.params[
+ "head_repository"
+ ]
+ task["worker"]["env"]["GECKO_HEAD_REV"] = config.params["head_rev"]
+ task["worker"]["env"]["CRASHSTATS_SECRET"] = task["worker"]["env"][
+ "CRASHSTATS_SECRET"
+ ].format(level=config.params["level"])
+
+ attributes = copy_attributes_from_dependent_job(build_dep)
+ attributes.update(task.get("attributes", {}))
+ task["attributes"] = attributes
+
+ treeherder = inherit_treeherder_from_dep(task, build_dep)
+ th = build_dep.task.get("extra")["treeherder"]
+ th_symbol = th.get("symbol")
+ th_groupsymbol = th.get("groupSymbol", "?")
+
+ # Disambiguate the treeherder symbol.
+ sym = "Rep" + (th_symbol[1:] if th_symbol.startswith("B") else th_symbol)
+ treeherder.setdefault("symbol", join_symbol(th_groupsymbol, sym))
+ task["treeherder"] = treeherder
+
+ task["run-on-projects"] = build_dep.attributes.get("run_on_projects")
+ task["optimization"] = {"reprocess-symbols": None}
+ task["if-dependencies"] = ["build"]
+
+ del task["primary-dependency"]
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/reverse_chunk_deps.py b/taskcluster/gecko_taskgraph/transforms/reverse_chunk_deps.py
new file mode 100644
index 0000000000..ac2f282799
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/reverse_chunk_deps.py
@@ -0,0 +1,45 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Adjust dependencies to not exceed MAX_DEPENDENCIES
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.treeherder import add_suffix
+
+import gecko_taskgraph.transforms.release_deps as release_deps
+from gecko_taskgraph import MAX_DEPENDENCIES
+from gecko_taskgraph.util.copy_task import copy_task
+
+transforms = TransformSequence()
+
+
+def yield_job(orig_job, deps, count):
+ job = copy_task(orig_job)
+ job["dependencies"] = deps
+ job["name"] = "{}-{}".format(orig_job["name"], count)
+ if "treeherder" in job:
+ job["treeherder"]["symbol"] = add_suffix(
+ job["treeherder"]["symbol"], f"-{count}"
+ )
+
+ return job
+
+
+@transforms.add
+def add_dependencies(config, jobs):
+ for job in release_deps.add_dependencies(config, jobs):
+ count = 1
+ deps = {}
+
+ # sort for deterministic chunking
+ for dep_label in sorted(job["dependencies"].keys()):
+ deps[dep_label] = dep_label
+ if len(deps) == MAX_DEPENDENCIES:
+ yield yield_job(job, deps, count)
+ deps = {}
+ count += 1
+ if deps:
+ yield yield_job(job, deps, count)
+ count += 1
diff --git a/taskcluster/gecko_taskgraph/transforms/run_pgo_profile.py b/taskcluster/gecko_taskgraph/transforms/run_pgo_profile.py
new file mode 100644
index 0000000000..2585bdd712
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/run_pgo_profile.py
@@ -0,0 +1,34 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Apply some defaults and minor modifications to the pgo jobs.
+"""
+
+import logging
+
+from taskgraph.transforms.base import TransformSequence
+
+logger = logging.getLogger(__name__)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def run_profile_data(config, jobs):
+ for job in jobs:
+ build_platform = job["attributes"].get("build_platform")
+ instr = "instrumented-build-{}".format(job["name"])
+ if "android" in build_platform:
+ artifact = "geckoview-test_runner.apk"
+ elif "macosx64" in build_platform:
+ artifact = "target.dmg"
+ elif "win" in build_platform:
+ artifact = "target.zip"
+ else:
+ artifact = "target.tar.bz2"
+ job.setdefault("fetches", {})[instr] = [
+ artifact,
+ "target.crashreporter-symbols.zip",
+ ]
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/scriptworker.py b/taskcluster/gecko_taskgraph/transforms/scriptworker.py
new file mode 100644
index 0000000000..5d382702af
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/scriptworker.py
@@ -0,0 +1,18 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Transforms for adding appropriate scopes to scriptworker tasks.
+"""
+
+
+from gecko_taskgraph.util.scriptworker import get_balrog_server_scope
+
+
+def add_balrog_scopes(config, jobs):
+ for job in jobs:
+ server_scope = get_balrog_server_scope(config)
+ job["scopes"] = [server_scope]
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/scriptworker_canary.py b/taskcluster/gecko_taskgraph/transforms/scriptworker_canary.py
new file mode 100644
index 0000000000..43735f3dce
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/scriptworker_canary.py
@@ -0,0 +1,46 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Build a command to run `mach release push-scriptworker-canaries`.
+"""
+
+
+from pipes import quote as shell_quote
+
+from mozrelease.scriptworker_canary import TASK_TYPES
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def build_command(config, jobs):
+ scriptworkers = config.params["try_task_config"].get(
+ "scriptworker-canary-workers", []
+ )
+ # Filter the list of workers to those we have configured a set of canary
+ # tasks for.
+ scriptworkers = [
+ scriptworker for scriptworker in scriptworkers if scriptworker in TASK_TYPES
+ ]
+
+ if not scriptworkers:
+ return
+
+ for job in jobs:
+
+ command = ["release", "push-scriptworker-canary"]
+ for scriptworker in scriptworkers:
+ command.extend(["--scriptworker", scriptworker])
+ for address in job.pop("addresses"):
+ command.extend(["--address", address])
+ if "ssh-key-secret" in job:
+ ssh_key_secret = job.pop("ssh-key-secret")
+ command.extend(["--ssh-key-secret", ssh_key_secret])
+ job.setdefault("scopes", []).append(f"secrets:get:{ssh_key_secret}")
+
+ job.setdefault("run", {}).update(
+ {"using": "mach", "mach": " ".join(map(shell_quote, command))}
+ )
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/sentry.py b/taskcluster/gecko_taskgraph/transforms/sentry.py
new file mode 100644
index 0000000000..2e43a15518
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/sentry.py
@@ -0,0 +1,30 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def sentry(config, tasks):
+ """Do transforms specific to github-sync tasks."""
+ if config.params["project"] not in ["mozilla-central", "try"]:
+ return
+ for task in tasks:
+ scopes = [
+ scope.format(level=config.params["level"]) for scope in task["scopes"]
+ ]
+ task["scopes"] = scopes
+
+ env = {
+ key: value.format(
+ level=config.params["level"],
+ head_repository=config.params["head_repository"],
+ head_rev=config.params["head_rev"],
+ )
+ for (key, value) in task["worker"]["env"].items()
+ }
+ task["worker"]["env"] = env
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/shippable_l10n_signing.py b/taskcluster/gecko_taskgraph/transforms/shippable_l10n_signing.py
new file mode 100644
index 0000000000..324efa92dd
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/shippable_l10n_signing.py
@@ -0,0 +1,86 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the signing task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.treeherder import join_symbol
+
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+from gecko_taskgraph.util.signed_artifacts import (
+ generate_specifications_of_artifacts_to_sign,
+)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def make_signing_description(config, jobs):
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+ job["depname"] = dep_job.label
+
+ # add the chunk number to the TH symbol
+ symbol = job.get("treeherder", {}).get("symbol", "Bs")
+ symbol = "{}{}".format(symbol, dep_job.attributes.get("l10n_chunk"))
+ group = "L10n"
+
+ job["treeherder"] = {
+ "symbol": join_symbol(group, symbol),
+ }
+
+ yield job
+
+
+@transforms.add
+def define_upstream_artifacts(config, jobs):
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+ upstream_artifact_task = job.pop("upstream-artifact-task", dep_job)
+
+ job["attributes"] = copy_attributes_from_dependent_job(dep_job)
+ if dep_job.attributes.get("chunk_locales"):
+ # Used for l10n attribute passthrough
+ job["attributes"]["chunk_locales"] = dep_job.attributes.get("chunk_locales")
+
+ locale_specifications = generate_specifications_of_artifacts_to_sign(
+ config,
+ job,
+ keep_locale_template=True,
+ dep_kind=upstream_artifact_task.kind,
+ )
+
+ upstream_artifacts = []
+ for spec in locale_specifications:
+ upstream_task_type = "l10n"
+ if upstream_artifact_task.kind.endswith(
+ ("-mac-notarization", "-mac-signing")
+ ):
+ # Upstream is mac signing or notarization
+ upstream_task_type = "scriptworker"
+ upstream_artifacts.append(
+ {
+ "taskId": {"task-reference": f"<{upstream_artifact_task.kind}>"},
+ "taskType": upstream_task_type,
+ # Set paths based on artifacts in the specs (above) one per
+ # locale present in the chunk this is signing stuff for.
+ # Pass paths through set and sorted() so we get a list back
+ # and we remove any duplicates (e.g. hardcoded ja-JP-mac langpack)
+ "paths": sorted(
+ {
+ path_template.format(locale=locale)
+ for locale in upstream_artifact_task.attributes.get(
+ "chunk_locales", []
+ )
+ for path_template in spec["artifacts"]
+ }
+ ),
+ "formats": spec["formats"],
+ }
+ )
+
+ job["upstream-artifacts"] = upstream_artifacts
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/signing.py b/taskcluster/gecko_taskgraph/transforms/signing.py
new file mode 100644
index 0000000000..9a5873fc81
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/signing.py
@@ -0,0 +1,266 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the signing task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.keyed_by import evaluate_keyed_by
+from taskgraph.util.schema import taskref_or_string
+from voluptuous import Optional, Required
+
+from gecko_taskgraph.loader.single_dep import schema
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import (
+ copy_attributes_from_dependent_job,
+ release_level,
+)
+from gecko_taskgraph.util.scriptworker import (
+ add_scope_prefix,
+ get_signing_cert_scope_per_platform,
+)
+
+transforms = TransformSequence()
+
+signing_description_schema = schema.extend(
+ {
+ # Artifacts from dep task to sign - Sync with taskgraph/transforms/task.py
+ # because this is passed directly into the signingscript worker
+ Required("upstream-artifacts"): [
+ {
+ # taskId of the task with the artifact
+ Required("taskId"): taskref_or_string,
+ # type of signing task (for CoT)
+ Required("taskType"): str,
+ # Paths to the artifacts to sign
+ Required("paths"): [str],
+ # Signing formats to use on each of the paths
+ Required("formats"): [str],
+ }
+ ],
+ # depname is used in taskref's to identify the taskID of the unsigned things
+ Required("depname"): str,
+ # attributes for this task
+ Optional("attributes"): {str: object},
+ # unique label to describe this signing task, defaults to {dep.label}-signing
+ Optional("label"): str,
+ # treeherder is allowed here to override any defaults we use for signing. See
+ # taskcluster/gecko_taskgraph/transforms/task.py for the schema details, and the
+ # below transforms for defaults of various values.
+ Optional("treeherder"): task_description_schema["treeherder"],
+ # Routes specific to this task, if defined
+ Optional("routes"): [str],
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Optional("dependent-tasks"): {str: object},
+ # Optional control for how long a task may run (aka maxRunTime)
+ Optional("max-run-time"): int,
+ Optional("extra"): {str: object},
+ # Max number of partner repacks per chunk
+ Optional("repacks-per-chunk"): int,
+ # Override the default priority for the project
+ Optional("priority"): task_description_schema["priority"],
+ }
+)
+
+
+@transforms.add
+def set_defaults(config, jobs):
+ for job in jobs:
+ if not job.get("depname"):
+ dep_job = job["primary-dependency"]
+ job["depname"] = dep_job.kind
+ yield job
+
+
+transforms.add_validate(signing_description_schema)
+
+
+@transforms.add
+def add_entitlements_link(config, jobs):
+ for job in jobs:
+ entitlements_path = evaluate_keyed_by(
+ config.graph_config["mac-notarization"]["mac-entitlements"],
+ "mac entitlements",
+ {
+ "platform": job["primary-dependency"].attributes.get("build_platform"),
+ "release-level": release_level(config.params["project"]),
+ },
+ )
+ if entitlements_path:
+ job["entitlements-url"] = config.params.file_url(
+ entitlements_path,
+ )
+ yield job
+
+
+@transforms.add
+def add_requirements_link(config, jobs):
+ for job in jobs:
+ requirements_path = evaluate_keyed_by(
+ config.graph_config["mac-notarization"]["mac-requirements"],
+ "mac requirements",
+ {
+ "platform": job["primary-dependency"].attributes.get("build_platform"),
+ },
+ )
+ if requirements_path:
+ job["requirements-plist-url"] = config.params.file_url(
+ requirements_path,
+ )
+ yield job
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+ attributes = dep_job.attributes
+
+ signing_format_scopes = []
+ formats = set()
+ for artifacts in job["upstream-artifacts"]:
+ for f in artifacts["formats"]:
+ formats.add(f) # Add each format only once
+
+ is_shippable = dep_job.attributes.get("shippable", False)
+ build_platform = dep_job.attributes.get("build_platform")
+ treeherder = None
+ if "partner" not in config.kind and "eme-free" not in config.kind:
+ treeherder = job.get("treeherder", {})
+
+ dep_th_platform = (
+ dep_job.task.get("extra", {})
+ .get("treeherder", {})
+ .get("machine", {})
+ .get("platform", "")
+ )
+ build_type = dep_job.attributes.get("build_type")
+ treeherder.setdefault(
+ "platform",
+ _generate_treeherder_platform(
+ dep_th_platform, build_platform, build_type
+ ),
+ )
+
+ # ccov builds are tier 2, so they cannot have tier 1 tasks
+ # depending on them.
+ treeherder.setdefault(
+ "tier",
+ dep_job.task.get("extra", {}).get("treeherder", {}).get("tier", 1),
+ )
+ treeherder.setdefault(
+ "symbol",
+ _generate_treeherder_symbol(
+ dep_job.task.get("extra", {}).get("treeherder", {}).get("symbol")
+ ),
+ )
+ treeherder.setdefault("kind", "build")
+
+ label = job["label"]
+ description = (
+ "Initial Signing for locale '{locale}' for build '"
+ "{build_platform}/{build_type}'".format(
+ locale=attributes.get("locale", "en-US"),
+ build_platform=build_platform,
+ build_type=attributes.get("build_type"),
+ )
+ )
+
+ attributes = (
+ job["attributes"]
+ if job.get("attributes")
+ else copy_attributes_from_dependent_job(dep_job)
+ )
+ attributes["signed"] = True
+
+ if "linux" in build_platform:
+ attributes["release_artifacts"] = ["public/build/KEY"]
+
+ if dep_job.attributes.get("chunk_locales"):
+ # Used for l10n attribute passthrough
+ attributes["chunk_locales"] = dep_job.attributes.get("chunk_locales")
+
+ signing_cert_scope = get_signing_cert_scope_per_platform(
+ build_platform, is_shippable, config
+ )
+ worker_type_alias = "linux-signing" if is_shippable else "linux-depsigning"
+ task = {
+ "label": label,
+ "description": description,
+ "worker": {
+ "implementation": "scriptworker-signing",
+ "upstream-artifacts": job["upstream-artifacts"],
+ "max-run-time": job.get("max-run-time", 3600),
+ },
+ "scopes": [signing_cert_scope] + signing_format_scopes,
+ "dependencies": _generate_dependencies(job),
+ "attributes": attributes,
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "optimization": dep_job.optimization,
+ "routes": job.get("routes", []),
+ "shipping-product": job.get("shipping-product"),
+ "shipping-phase": job.get("shipping-phase"),
+ }
+ if dep_job.kind in task["dependencies"]:
+ task["if-dependencies"] = [dep_job.kind]
+
+ # build-mac-{signing,notarization} uses signingscript instead of iscript
+ if "macosx" in build_platform and config.kind.endswith("-mac-notarization"):
+ task["worker"]["mac-behavior"] = "apple_notarization"
+ task["scopes"] = [
+ add_scope_prefix(config, "signing:cert:release-apple-notarization")
+ ]
+ elif "macosx" in build_platform:
+ # iscript overrides
+ task["worker"]["mac-behavior"] = "mac_sign_and_pkg"
+
+ worker_type_alias_map = {
+ "linux-depsigning": "mac-depsigning",
+ "linux-signing": "mac-signing",
+ }
+ assert worker_type_alias in worker_type_alias_map, (
+ "Make sure to adjust the below worker_type_alias logic for "
+ "mac if you change the signing workerType aliases!"
+ " ({} not found in mapping)".format(worker_type_alias)
+ )
+ worker_type_alias = worker_type_alias_map[worker_type_alias]
+ for attr in ("entitlements-url", "requirements-plist-url"):
+ if job.get(attr):
+ task["worker"][attr] = job[attr]
+
+ task["worker-type"] = worker_type_alias
+ if treeherder:
+ task["treeherder"] = treeherder
+ if job.get("extra"):
+ task["extra"] = job["extra"]
+ # we may have reduced the priority for partner jobs, otherwise task.py will set it
+ if job.get("priority"):
+ task["priority"] = job["priority"]
+
+ yield task
+
+
+def _generate_dependencies(job):
+ if isinstance(job.get("dependent-tasks"), dict):
+ deps = {}
+ for k, v in job["dependent-tasks"].items():
+ deps[k] = v.label
+ return deps
+ return {job["depname"]: job["primary-dependency"].label}
+
+
+def _generate_treeherder_platform(dep_th_platform, build_platform, build_type):
+ if "-pgo" in build_platform:
+ actual_build_type = "pgo"
+ elif "-ccov" in build_platform:
+ actual_build_type = "ccov"
+ else:
+ actual_build_type = build_type
+ return f"{dep_th_platform}/{actual_build_type}"
+
+
+def _generate_treeherder_symbol(build_symbol):
+ symbol = build_symbol + "s"
+ return symbol
diff --git a/taskcluster/gecko_taskgraph/transforms/source_checksums_signing.py b/taskcluster/gecko_taskgraph/transforms/source_checksums_signing.py
new file mode 100644
index 0000000000..60c5f30ed3
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/source_checksums_signing.py
@@ -0,0 +1,83 @@
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the checksums signing task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from voluptuous import Optional
+
+from gecko_taskgraph.loader.single_dep import schema
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+from gecko_taskgraph.util.scriptworker import get_signing_cert_scope
+
+checksums_signing_description_schema = schema.extend(
+ {
+ Optional("label"): str,
+ Optional("treeherder"): task_description_schema["treeherder"],
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ }
+)
+
+transforms = TransformSequence()
+transforms.add_validate(checksums_signing_description_schema)
+
+
+@transforms.add
+def make_checksums_signing_description(config, jobs):
+ for job in jobs:
+ dep_job = job["primary-dependency"]
+ attributes = dep_job.attributes
+
+ treeherder = job.get("treeherder", {})
+ treeherder.setdefault("symbol", "css(N)")
+ dep_th_platform = (
+ dep_job.task.get("extra", {})
+ .get("treeherder", {})
+ .get("machine", {})
+ .get("platform", "")
+ )
+ treeherder.setdefault("platform", f"{dep_th_platform}/opt")
+ treeherder.setdefault("tier", 1)
+ treeherder.setdefault("kind", "build")
+
+ label = job["label"]
+ description = "Signing of release-source checksums file"
+ dependencies = {"beetmover": dep_job.label}
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+
+ upstream_artifacts = [
+ {
+ "taskId": {"task-reference": "<beetmover>"},
+ "taskType": "beetmover",
+ "paths": [
+ "public/target-source.checksums",
+ ],
+ "formats": ["autograph_gpg"],
+ }
+ ]
+
+ signing_cert_scope = get_signing_cert_scope(config)
+
+ task = {
+ "label": label,
+ "description": description,
+ "worker-type": "linux-signing",
+ "worker": {
+ "implementation": "scriptworker-signing",
+ "upstream-artifacts": upstream_artifacts,
+ "max-run-time": 3600,
+ },
+ "scopes": [
+ signing_cert_scope,
+ ],
+ "dependencies": dependencies,
+ "attributes": attributes,
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "treeherder": treeherder,
+ }
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/source_test.py b/taskcluster/gecko_taskgraph/transforms/source_test.py
new file mode 100644
index 0000000000..b480fcad02
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/source_test.py
@@ -0,0 +1,270 @@
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Source-test jobs can run on multiple platforms. These transforms allow jobs
+with either `platform` or a list of `platforms`, and set the appropriate
+treeherder configuration and attributes for that platform.
+"""
+
+
+import copy
+import os
+
+import taskgraph
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.attributes import keymatch
+from taskgraph.util.schema import Schema, optionally_keyed_by, resolve_keyed_by
+from taskgraph.util.treeherder import join_symbol, split_symbol
+from voluptuous import Any, Extra, Optional, Required
+
+from gecko_taskgraph.transforms.job import job_description_schema
+from gecko_taskgraph.util.hg import get_json_automationrelevance
+
+source_test_description_schema = Schema(
+ {
+ # most fields are passed directly through as job fields, and are not
+ # repeated here
+ Extra: object,
+ # The platform on which this task runs. This will be used to set up attributes
+ # (for try selection) and treeherder metadata (for display). If given as a list,
+ # the job will be "split" into multiple tasks, one with each platform.
+ Required("platform"): Any(str, [str]),
+ # Build labels required for the task. If this key is provided it must
+ # contain a build label for the task platform.
+ # The task will then depend on a build task, and the installer url will be
+ # saved to the GECKO_INSTALLER_URL environment variable.
+ Optional("require-build"): optionally_keyed_by("project", {str: str}),
+ # These fields can be keyed by "platform", and are otherwise identical to
+ # job descriptions.
+ Required("worker-type"): optionally_keyed_by(
+ "platform", job_description_schema["worker-type"]
+ ),
+ Required("worker"): optionally_keyed_by(
+ "platform", job_description_schema["worker"]
+ ),
+ Optional("python-version"): [int],
+ Optional("dependencies"): {
+ k: optionally_keyed_by("platform", v)
+ for k, v in job_description_schema["dependencies"].items()
+ },
+ # A list of artifacts to install from 'fetch' tasks.
+ Optional("fetches"): {
+ str: optionally_keyed_by(
+ "platform", job_description_schema["fetches"][str]
+ ),
+ },
+ }
+)
+
+transforms = TransformSequence()
+
+transforms.add_validate(source_test_description_schema)
+
+
+@transforms.add
+def set_job_name(config, jobs):
+ for job in jobs:
+ if "job-from" in job and job["job-from"] != "kind.yml":
+ from_name = os.path.splitext(job["job-from"])[0]
+ job["name"] = "{}-{}".format(from_name, job["name"])
+ yield job
+
+
+@transforms.add
+def expand_platforms(config, jobs):
+ for job in jobs:
+ if isinstance(job["platform"], str):
+ yield job
+ continue
+
+ for platform in job["platform"]:
+ pjob = copy.deepcopy(job)
+ pjob["platform"] = platform
+
+ if "name" in pjob:
+ pjob["name"] = "{}-{}".format(pjob["name"], platform)
+ else:
+ pjob["label"] = "{}-{}".format(pjob["label"], platform)
+ yield pjob
+
+
+@transforms.add
+def split_python(config, jobs):
+ for job in jobs:
+ key = "python-version"
+ versions = job.pop(key, [])
+ if not versions:
+ yield job
+ continue
+ for version in versions:
+ group = f"py{version}"
+ pyjob = copy.deepcopy(job)
+ if "name" in pyjob:
+ pyjob["name"] += f"-{group}"
+ else:
+ pyjob["label"] += f"-{group}"
+ symbol = split_symbol(pyjob["treeherder"]["symbol"])[1]
+ pyjob["treeherder"]["symbol"] = join_symbol(group, symbol)
+ pyjob["run"][key] = version
+ yield pyjob
+
+
+@transforms.add
+def split_jsshell(config, jobs):
+ all_shells = {"sm": "Spidermonkey", "v8": "Google V8"}
+
+ for job in jobs:
+ if not job["name"].startswith("jsshell"):
+ yield job
+ continue
+
+ test = job.pop("test")
+ for shell in job.get("shell", all_shells.keys()):
+ assert shell in all_shells
+
+ new_job = copy.deepcopy(job)
+ new_job["name"] = "{}-{}".format(new_job["name"], shell)
+ new_job["description"] = "{} on {}".format(
+ new_job["description"], all_shells[shell]
+ )
+ new_job["shell"] = shell
+
+ group = f"js-bench-{shell}"
+ symbol = split_symbol(new_job["treeherder"]["symbol"])[1]
+ new_job["treeherder"]["symbol"] = join_symbol(group, symbol)
+
+ run = new_job["run"]
+ run["mach"] = run["mach"].format(
+ shell=shell, SHELL=shell.upper(), test=test
+ )
+ yield new_job
+
+
+def add_build_dependency(config, job):
+ """
+ Add build dependency to the job and installer_url to env.
+ """
+ key = job["platform"]
+ build_labels = job.pop("require-build", {})
+ matches = keymatch(build_labels, key)
+ if not matches:
+ raise Exception(
+ "No build platform found. "
+ "Define 'require-build' for {} in the task config.".format(key)
+ )
+
+ if len(matches) > 1:
+ raise Exception(f"More than one build platform found for '{key}'.")
+
+ label = matches[0]
+ deps = job.setdefault("dependencies", {})
+ deps.update({"build": label})
+
+
+@transforms.add
+def handle_platform(config, jobs):
+ """
+ Handle the 'platform' property, setting up treeherder context as well as
+ try-related attributes.
+ """
+ fields = [
+ "always-target",
+ "fetches.toolchain",
+ "require-build",
+ "worker-type",
+ "worker",
+ ]
+
+ for job in jobs:
+ platform = job["platform"]
+
+ for field in fields:
+ resolve_keyed_by(
+ job, field, item_name=job["name"], project=config.params["project"]
+ )
+ for field in job.get("dependencies", {}):
+ resolve_keyed_by(
+ job,
+ f"dependencies.{field}",
+ item_name=job["name"],
+ project=config.params["project"],
+ )
+
+ if "treeherder" in job:
+ job["treeherder"].setdefault("platform", platform)
+
+ if "require-build" in job:
+ add_build_dependency(config, job)
+
+ del job["platform"]
+ yield job
+
+
+@transforms.add
+def handle_shell(config, jobs):
+ """
+ Handle the 'shell' property.
+ """
+ fields = [
+ "run-on-projects",
+ "worker.env",
+ ]
+
+ for job in jobs:
+ if not job.get("shell"):
+ yield job
+ continue
+
+ for field in fields:
+ resolve_keyed_by(job, field, item_name=job["name"])
+
+ del job["shell"]
+ yield job
+
+
+@transforms.add
+def set_code_review_env(config, jobs):
+ """
+ Add a CODE_REVIEW environment variable when running in code-review bot mode
+ """
+ is_code_review = config.params["target_tasks_method"] == "codereview"
+
+ for job in jobs:
+ attrs = job.get("attributes", {})
+ if is_code_review and attrs.get("code-review") is True:
+ env = job["worker"].setdefault("env", {})
+ env["CODE_REVIEW"] = "1"
+
+ yield job
+
+
+@transforms.add
+def set_base_revision_in_tgdiff(config, jobs):
+ # Don't attempt to download 'json-automation' locally as the revision may
+ # not exist in the repository.
+ if not os.environ.get("MOZ_AUTOMATION") or taskgraph.fast:
+ yield from jobs
+ return
+
+ data = get_json_automationrelevance(
+ config.params["head_repository"], config.params["head_rev"]
+ )
+ for job in jobs:
+ if job["name"] != "taskgraph-diff":
+ yield job
+ continue
+
+ job["run"]["command-context"] = {
+ "base_rev": data["changesets"][0]["parents"][0]
+ }
+ yield job
+
+
+@transforms.add
+def set_worker_exit_code(config, jobs):
+ for job in jobs:
+ worker = job["worker"]
+ worker.setdefault("retry-exit-status", [])
+ if 137 not in worker["retry-exit-status"]:
+ worker["retry-exit-status"].append(137)
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/spidermonkey.py b/taskcluster/gecko_taskgraph/transforms/spidermonkey.py
new file mode 100644
index 0000000000..8e652f1668
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/spidermonkey.py
@@ -0,0 +1,21 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import copy
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def handle_keyed_by(config, jobs):
+ """Resolve fields that can be keyed by platform, etc."""
+ fields = ["fetches.toolchain"]
+ for job in jobs:
+ job = copy.deepcopy(job) # don't overwrite dict values here
+ for field in fields:
+ resolve_keyed_by(item=job, field=field, item_name=job["name"])
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/split_by_locale.py b/taskcluster/gecko_taskgraph/transforms/split_by_locale.py
new file mode 100644
index 0000000000..ae68ab5051
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/split_by_locale.py
@@ -0,0 +1,79 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+This transform splits the jobs it receives into per-locale tasks. Locales are
+provided by the `locales-file`.
+"""
+
+from copy import deepcopy
+from pprint import pprint
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import Schema
+from voluptuous import Extra, Optional, Required
+
+from gecko_taskgraph.transforms.l10n import parse_locales_file
+
+transforms = TransformSequence()
+
+split_by_locale_schema = Schema(
+ {
+ # The file to pull locale information from. This should be a json file
+ # such as browser/locales/l10n-changesets.json.
+ Required("locales-file"): str,
+ # The platform name in the form used by the locales files. Defaults to
+ # attributes.build_platform if not provided.
+ Optional("locale-file-platform"): str,
+ # A list of properties elsewhere in the job that need to have the locale
+ # name substituted into them. The referenced properties may be strings
+ # or lists. In the case of the latter, all list values will have
+ # substitutions performed.
+ Optional("properties-with-locale"): [str],
+ Extra: object,
+ }
+)
+
+
+transforms.add_validate(split_by_locale_schema)
+
+
+@transforms.add
+def add_command(config, jobs):
+ for job in jobs:
+ locales_file = job.pop("locales-file")
+ properties_with_locale = job.pop("properties-with-locale")
+ build_platform = job.pop(
+ "locale-file-platform", job["attributes"]["build_platform"]
+ )
+
+ for locale in parse_locales_file(locales_file, build_platform):
+ locale_job = deepcopy(job)
+ locale_job["attributes"]["locale"] = locale
+ for prop in properties_with_locale:
+ container, subfield = locale_job, prop
+ while "." in subfield:
+ f, subfield = subfield.split(".", 1)
+ if f not in container:
+ raise Exception(
+ f"Unable to find property {prop} to perform locale substitution on. Job is:\n{pprint(job)}"
+ )
+ container = container[f]
+ if not isinstance(container, dict):
+ raise Exception(
+ f"{container} is not a dict, cannot perform locale substitution. Job is:\n{pprint(job)}"
+ )
+
+ if isinstance(container[subfield], str):
+ container[subfield] = container[subfield].format(locale=locale)
+ elif isinstance(container[subfield], list):
+ for i in range(len(container[subfield])):
+ container[subfield][i] = container[subfield][i].format(
+ locale=locale
+ )
+ else:
+ raise Exception(
+ f"Don't know how to subtitute locale for value of type: {type(container[subfield])}; value is: {container[subfield]}"
+ )
+
+ yield locale_job
diff --git a/taskcluster/gecko_taskgraph/transforms/startup_test.py b/taskcluster/gecko_taskgraph/transforms/startup_test.py
new file mode 100644
index 0000000000..2660ef6e93
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/startup_test.py
@@ -0,0 +1,40 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def add_command(config, jobs):
+ for job in jobs:
+ extra_config = job.pop("extra-config")
+ upstream_kind = extra_config["upstream_kind"]
+ upstream_artifact = extra_config["upstream_artifact"]
+ binary = extra_config["binary"]
+ package_to_test = "<{}/public/build/{}>".format(
+ upstream_kind, upstream_artifact
+ )
+
+ if job["attributes"]["build_platform"].startswith("linux"):
+ job["run"]["command"] = {
+ "artifact-reference": ". $HOME/scripts/xvfb.sh && start_xvfb '1600x1200x24' 0 && "
+ + "python3 ./mach python testing/mozharness/scripts/does_it_crash.py "
+ + "--run-for 30 --thing-url "
+ + package_to_test
+ + " --thing-to-run "
+ + binary
+ }
+ else:
+ job["run"]["mach"] = {
+ "artifact-reference": "python testing/mozharness/scripts/does_it_crash.py "
+ + "--run-for 30 --thing-url "
+ + package_to_test
+ + " --thing-to-run "
+ + binary
+ }
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/strip_dependent_task.py b/taskcluster/gecko_taskgraph/transforms/strip_dependent_task.py
new file mode 100644
index 0000000000..4e1ec8783a
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/strip_dependent_task.py
@@ -0,0 +1,17 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+FIXME
+"""
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def strip_dependent_task(config, jobs):
+ for job in jobs:
+ del job["primary-dependency"]
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/task.py b/taskcluster/gecko_taskgraph/transforms/task.py
new file mode 100644
index 0000000000..e823e96b21
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/task.py
@@ -0,0 +1,2266 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+These transformations take a task description and turn it into a TaskCluster
+task definition (along with attributes, label, etc.). The input to these
+transformations is generic to any kind of task, but abstracts away some of the
+complexities of worker implementations, scopes, and treeherder annotations.
+"""
+
+
+import datetime
+import hashlib
+import os
+import re
+import time
+
+import attr
+from mozbuild.util import memoize
+from taskcluster.utils import fromNow
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.keyed_by import evaluate_keyed_by
+from taskgraph.util.schema import (
+ Schema,
+ optionally_keyed_by,
+ resolve_keyed_by,
+ taskref_or_string,
+ validate_schema,
+)
+from taskgraph.util.treeherder import split_symbol
+from voluptuous import All, Any, Extra, Match, NotIn, Optional, Required
+
+from gecko_taskgraph import GECKO, MAX_DEPENDENCIES
+from gecko_taskgraph.optimize.schema import OptimizationSchema
+from gecko_taskgraph.transforms.job.common import get_expiration
+from gecko_taskgraph.util import docker as dockerutil
+from gecko_taskgraph.util.attributes import TRUNK_PROJECTS, is_try, release_level
+from gecko_taskgraph.util.copy_task import copy_task
+from gecko_taskgraph.util.hash import hash_path
+from gecko_taskgraph.util.partners import get_partners_to_be_published
+from gecko_taskgraph.util.scriptworker import BALROG_ACTIONS, get_release_config
+from gecko_taskgraph.util.signed_artifacts import get_signed_artifacts
+from gecko_taskgraph.util.workertypes import get_worker_type, worker_type_implementation
+
+RUN_TASK = os.path.join(GECKO, "taskcluster", "scripts", "run-task")
+
+SCCACHE_GCS_PROJECT = "sccache-3"
+
+
+@memoize
+def _run_task_suffix():
+ """String to append to cache names under control of run-task."""
+ return hash_path(RUN_TASK)[0:20]
+
+
+def _compute_geckoview_version(app_version, moz_build_date):
+ """Geckoview version string that matches geckoview gradle configuration"""
+ # Must be synchronized with /mobile/android/geckoview/build.gradle computeVersionCode(...)
+ version_without_milestone = re.sub(r"a[0-9]", "", app_version, 1)
+ parts = version_without_milestone.split(".")
+ return f"{parts[0]}.{parts[1]}.{moz_build_date}"
+
+
+# A task description is a general description of a TaskCluster task
+task_description_schema = Schema(
+ {
+ # the label for this task
+ Required("label"): str,
+ # description of the task (for metadata)
+ Required("description"): str,
+ # attributes for this task
+ Optional("attributes"): {str: object},
+ # relative path (from config.path) to the file task was defined in
+ Optional("job-from"): str,
+ # dependencies of this task, keyed by name; these are passed through
+ # verbatim and subject to the interpretation of the Task's get_dependencies
+ # method.
+ Optional("dependencies"): {
+ All(
+ str,
+ NotIn(
+ ["self", "decision"],
+ "Can't use 'self` or 'decision' as depdency names.",
+ ),
+ ): object,
+ },
+ # Soft dependencies of this task, as a list of tasks labels
+ Optional("soft-dependencies"): [str],
+ # Dependencies that must be scheduled in order for this task to run.
+ Optional("if-dependencies"): [str],
+ Optional("requires"): Any("all-completed", "all-resolved"),
+ # expiration and deadline times, relative to task creation, with units
+ # (e.g., "14 days"). Defaults are set based on the project.
+ Optional("expires-after"): str,
+ Optional("deadline-after"): str,
+ Optional("expiration-policy"): str,
+ # custom routes for this task; the default treeherder routes will be added
+ # automatically
+ Optional("routes"): [str],
+ # custom scopes for this task; any scopes required for the worker will be
+ # added automatically. The following parameters will be substituted in each
+ # scope:
+ # {level} -- the scm level of this push
+ # {project} -- the project of this push
+ Optional("scopes"): [str],
+ # Tags
+ Optional("tags"): {str: str},
+ # custom "task.extra" content
+ Optional("extra"): {str: object},
+ # treeherder-related information; see
+ # https://firefox-ci-tc.services.mozilla.com/schemas/taskcluster-treeherder/v1/task-treeherder-config.json
+ # If not specified, no treeherder extra information or routes will be
+ # added to the task
+ Optional("treeherder"): {
+ # either a bare symbol, or "grp(sym)".
+ "symbol": str,
+ # the job kind
+ "kind": Any("build", "test", "other"),
+ # tier for this task
+ "tier": int,
+ # task platform, in the form platform/collection, used to set
+ # treeherder.machine.platform and treeherder.collection or
+ # treeherder.labels
+ "platform": Match("^[A-Za-z0-9_-]{1,50}/[A-Za-z0-9_-]{1,50}$"),
+ },
+ # information for indexing this build so its artifacts can be discovered;
+ # if omitted, the build will not be indexed.
+ Optional("index"): {
+ # the name of the product this build produces
+ "product": str,
+ # the names to use for this job in the TaskCluster index
+ "job-name": str,
+ # Type of gecko v2 index to use
+ "type": Any(
+ "generic",
+ "l10n",
+ "shippable",
+ "shippable-l10n",
+ "android-shippable",
+ "android-shippable-with-multi-l10n",
+ "shippable-with-multi-l10n",
+ ),
+ # The rank that the task will receive in the TaskCluster
+ # index. A newly completed task supercedes the currently
+ # indexed task iff it has a higher rank. If unspecified,
+ # 'by-tier' behavior will be used.
+ "rank": Any(
+ # Rank is equal the timestamp of the build_date for tier-1
+ # tasks, and zero for non-tier-1. This sorts tier-{2,3}
+ # builds below tier-1 in the index.
+ "by-tier",
+ # Rank is given as an integer constant (e.g. zero to make
+ # sure a task is last in the index).
+ int,
+ # Rank is equal to the timestamp of the build_date. This
+ # option can be used to override the 'by-tier' behavior
+ # for non-tier-1 tasks.
+ "build_date",
+ ),
+ },
+ # The `run_on_projects` attribute, defaulting to "all". This dictates the
+ # projects on which this task should be included in the target task set.
+ # See the attributes documentation for details.
+ Optional("run-on-projects"): optionally_keyed_by("build-platform", [str]),
+ # Like `run_on_projects`, `run-on-hg-branches` defaults to "all".
+ Optional("run-on-hg-branches"): optionally_keyed_by("project", [str]),
+ # The `shipping_phase` attribute, defaulting to None. This specifies the
+ # release promotion phase that this task belongs to.
+ Required("shipping-phase"): Any(
+ None,
+ "build",
+ "promote",
+ "push",
+ "ship",
+ ),
+ # The `shipping_product` attribute, defaulting to None. This specifies the
+ # release promotion product that this task belongs to.
+ Required("shipping-product"): Any(None, str),
+ # The `always-target` attribute will cause the task to be included in the
+ # target_task_graph regardless of filtering. Tasks included in this manner
+ # will be candidates for optimization even when `optimize_target_tasks` is
+ # False, unless the task was also explicitly chosen by the target_tasks
+ # method.
+ Required("always-target"): bool,
+ # Optimization to perform on this task during the optimization phase.
+ # Optimizations are defined in taskcluster/gecko_taskgraph/optimize.py.
+ Required("optimization"): OptimizationSchema,
+ # the provisioner-id/worker-type for the task. The following parameters will
+ # be substituted in this string:
+ # {level} -- the scm level of this push
+ "worker-type": str,
+ # Whether the job should use sccache compiler caching.
+ Required("use-sccache"): bool,
+ # information specific to the worker implementation that will run this task
+ Optional("worker"): {
+ Required("implementation"): str,
+ Extra: object,
+ },
+ # Override the default priority for the project
+ Optional("priority"): str,
+ }
+)
+
+TC_TREEHERDER_SCHEMA_URL = (
+ "https://github.com/taskcluster/taskcluster-treeherder/"
+ "blob/master/schemas/task-treeherder-config.yml"
+)
+
+
+UNKNOWN_GROUP_NAME = (
+ "Treeherder group {} (from {}) has no name; " "add it to taskcluster/ci/config.yml"
+)
+
+V2_ROUTE_TEMPLATES = [
+ "index.{trust-domain}.v2.{project}.latest.{product}.{job-name}",
+ "index.{trust-domain}.v2.{project}.pushdate.{build_date_long}.{product}.{job-name}",
+ "index.{trust-domain}.v2.{project}.pushdate.{build_date}.latest.{product}.{job-name}",
+ "index.{trust-domain}.v2.{project}.pushlog-id.{pushlog_id}.{product}.{job-name}",
+ "index.{trust-domain}.v2.{project}.revision.{branch_rev}.{product}.{job-name}",
+]
+
+# {central, inbound, autoland} write to a "trunk" index prefix. This facilitates
+# walking of tasks with similar configurations.
+V2_TRUNK_ROUTE_TEMPLATES = [
+ "index.{trust-domain}.v2.trunk.revision.{branch_rev}.{product}.{job-name}",
+]
+
+V2_SHIPPABLE_TEMPLATES = [
+ "index.{trust-domain}.v2.{project}.shippable.latest.{product}.{job-name}",
+ "index.{trust-domain}.v2.{project}.shippable.{build_date}.revision.{branch_rev}.{product}.{job-name}", # noqa - too long
+ "index.{trust-domain}.v2.{project}.shippable.{build_date}.latest.{product}.{job-name}",
+ "index.{trust-domain}.v2.{project}.shippable.revision.{branch_rev}.{product}.{job-name}",
+]
+
+V2_SHIPPABLE_L10N_TEMPLATES = [
+ "index.{trust-domain}.v2.{project}.shippable.latest.{product}-l10n.{job-name}.{locale}",
+ "index.{trust-domain}.v2.{project}.shippable.{build_date}.revision.{branch_rev}.{product}-l10n.{job-name}.{locale}", # noqa - too long
+ "index.{trust-domain}.v2.{project}.shippable.{build_date}.latest.{product}-l10n.{job-name}.{locale}", # noqa - too long
+ "index.{trust-domain}.v2.{project}.shippable.revision.{branch_rev}.{product}-l10n.{job-name}.{locale}", # noqa - too long
+]
+
+V2_L10N_TEMPLATES = [
+ "index.{trust-domain}.v2.{project}.revision.{branch_rev}.{product}-l10n.{job-name}.{locale}",
+ "index.{trust-domain}.v2.{project}.pushdate.{build_date_long}.{product}-l10n.{job-name}.{locale}", # noqa - too long
+ "index.{trust-domain}.v2.{project}.pushlog-id.{pushlog_id}.{product}-l10n.{job-name}.{locale}",
+ "index.{trust-domain}.v2.{project}.latest.{product}-l10n.{job-name}.{locale}",
+]
+
+# This index is specifically for builds that include geckoview releases,
+# so we can hard-code the project to "geckoview"
+V2_GECKOVIEW_RELEASE = "index.{trust-domain}.v2.{project}.geckoview-version.{geckoview-version}.{product}.{job-name}" # noqa - too long
+
+# the roots of the treeherder routes
+TREEHERDER_ROUTE_ROOT = "tc-treeherder"
+
+
+def get_branch_rev(config):
+ return config.params[
+ "{}head_rev".format(config.graph_config["project-repo-param-prefix"])
+ ]
+
+
+def get_branch_repo(config):
+ return config.params[
+ "{}head_repository".format(
+ config.graph_config["project-repo-param-prefix"],
+ )
+ ]
+
+
+@memoize
+def get_default_priority(graph_config, project):
+ return evaluate_keyed_by(
+ graph_config["task-priority"], "Graph Config", {"project": project}
+ )
+
+
+# define a collection of payload builders, depending on the worker implementation
+payload_builders = {}
+
+
+@attr.s(frozen=True)
+class PayloadBuilder:
+ schema = attr.ib(type=Schema)
+ builder = attr.ib()
+
+
+def payload_builder(name, schema):
+ schema = Schema({Required("implementation"): name, Optional("os"): str}).extend(
+ schema
+ )
+
+ def wrap(func):
+ payload_builders[name] = PayloadBuilder(schema, func)
+ return func
+
+ return wrap
+
+
+# define a collection of index builders, depending on the type implementation
+index_builders = {}
+
+
+def index_builder(name):
+ def wrap(func):
+ index_builders[name] = func
+ return func
+
+ return wrap
+
+
+UNSUPPORTED_INDEX_PRODUCT_ERROR = """\
+The gecko-v2 product {product} is not in the list of configured products in
+`taskcluster/ci/config.yml'.
+"""
+
+
+def verify_index(config, index):
+ product = index["product"]
+ if product not in config.graph_config["index"]["products"]:
+ raise Exception(UNSUPPORTED_INDEX_PRODUCT_ERROR.format(product=product))
+
+
+@payload_builder(
+ "docker-worker",
+ schema={
+ Required("os"): "linux",
+ # For tasks that will run in docker-worker, this is the
+ # name of the docker image or in-tree docker image to run the task in. If
+ # in-tree, then a dependency will be created automatically. This is
+ # generally `desktop-test`, or an image that acts an awful lot like it.
+ Required("docker-image"): Any(
+ # a raw Docker image path (repo/image:tag)
+ str,
+ # an in-tree generated docker image (from `taskcluster/docker/<name>`)
+ {"in-tree": str},
+ # an indexed docker image
+ {"indexed": str},
+ ),
+ # worker features that should be enabled
+ Required("chain-of-trust"): bool,
+ Required("taskcluster-proxy"): bool,
+ Required("allow-ptrace"): bool,
+ Required("loopback-video"): bool,
+ Required("loopback-audio"): bool,
+ Required("docker-in-docker"): bool, # (aka 'dind')
+ Required("privileged"): bool,
+ # Paths to Docker volumes.
+ #
+ # For in-tree Docker images, volumes can be parsed from Dockerfile.
+ # This only works for the Dockerfile itself: if a volume is defined in
+ # a base image, it will need to be declared here. Out-of-tree Docker
+ # images will also require explicit volume annotation.
+ #
+ # Caches are often mounted to the same path as Docker volumes. In this
+ # case, they take precedence over a Docker volume. But a volume still
+ # needs to be declared for the path.
+ Optional("volumes"): [str],
+ Optional(
+ "required-volumes",
+ description=(
+ "Paths that are required to be volumes for performance reasons. "
+ "For in-tree images, these paths will be checked to verify that they "
+ "are defined as volumes."
+ ),
+ ): [str],
+ # caches to set up for the task
+ Optional("caches"): [
+ {
+ # only one type is supported by any of the workers right now
+ "type": "persistent",
+ # name of the cache, allowing re-use by subsequent tasks naming the
+ # same cache
+ "name": str,
+ # location in the task image where the cache will be mounted
+ "mount-point": str,
+ # Whether the cache is not used in untrusted environments
+ # (like the Try repo).
+ Optional("skip-untrusted"): bool,
+ }
+ ],
+ # artifacts to extract from the task image after completion
+ Optional("artifacts"): [
+ {
+ # type of artifact -- simple file, or recursive directory
+ "type": Any("file", "directory"),
+ # task image path from which to read artifact
+ "path": str,
+ # name of the produced artifact (root of the names for
+ # type=directory)
+ "name": str,
+ "expires-after": str,
+ }
+ ],
+ # environment variables
+ Required("env"): {str: taskref_or_string},
+ # the command to run; if not given, docker-worker will default to the
+ # command in the docker image
+ Optional("command"): [taskref_or_string],
+ # the maximum time to run, in seconds
+ Required("max-run-time"): int,
+ # the exit status code(s) that indicates the task should be retried
+ Optional("retry-exit-status"): [int],
+ # the exit status code(s) that indicates the caches used by the task
+ # should be purged
+ Optional("purge-caches-exit-status"): [int],
+ # Whether any artifacts are assigned to this worker
+ Optional("skip-artifacts"): bool,
+ },
+)
+def build_docker_worker_payload(config, task, task_def):
+ worker = task["worker"]
+ level = int(config.params["level"])
+
+ image = worker["docker-image"]
+ if isinstance(image, dict):
+ if "in-tree" in image:
+ name = image["in-tree"]
+ docker_image_task = "docker-image-" + image["in-tree"]
+ task.setdefault("dependencies", {})["docker-image"] = docker_image_task
+
+ image = {
+ "path": "public/image.tar.zst",
+ "taskId": {"task-reference": "<docker-image>"},
+ "type": "task-image",
+ }
+
+ # Find VOLUME in Dockerfile.
+ volumes = dockerutil.parse_volumes(name)
+ for v in sorted(volumes):
+ if v in worker["volumes"]:
+ raise Exception(
+ "volume %s already defined; "
+ "if it is defined in a Dockerfile, "
+ "it does not need to be specified in the "
+ "worker definition" % v
+ )
+
+ worker["volumes"].append(v)
+
+ elif "indexed" in image:
+ image = {
+ "path": "public/image.tar.zst",
+ "namespace": image["indexed"],
+ "type": "indexed-image",
+ }
+ else:
+ raise Exception("unknown docker image type")
+
+ features = {}
+
+ if worker.get("taskcluster-proxy"):
+ features["taskclusterProxy"] = True
+
+ if worker.get("allow-ptrace"):
+ features["allowPtrace"] = True
+ task_def["scopes"].append("docker-worker:feature:allowPtrace")
+
+ if worker.get("chain-of-trust"):
+ features["chainOfTrust"] = True
+
+ if worker.get("docker-in-docker"):
+ features["dind"] = True
+
+ # Never enable sccache on the toolchains repo, as there is no benefit from it
+ # because each push uses a different compiler.
+ if task.get("use-sccache") and config.params["project"] != "toolchains":
+ features["taskclusterProxy"] = True
+ task_def["scopes"].append(
+ "assume:project:taskcluster:{trust_domain}:level-{level}-sccache-buckets".format(
+ trust_domain=config.graph_config["trust-domain"],
+ level=config.params["level"],
+ )
+ )
+ worker["env"]["USE_SCCACHE"] = "1"
+ worker["env"]["SCCACHE_GCS_PROJECT"] = SCCACHE_GCS_PROJECT
+ # Disable sccache idle shutdown.
+ worker["env"]["SCCACHE_IDLE_TIMEOUT"] = "0"
+ else:
+ worker["env"]["SCCACHE_DISABLE"] = "1"
+
+ capabilities = {}
+
+ for lo in "audio", "video":
+ if worker.get("loopback-" + lo):
+ capitalized = "loopback" + lo.capitalize()
+ devices = capabilities.setdefault("devices", {})
+ devices[capitalized] = True
+ task_def["scopes"].append("docker-worker:capability:device:" + capitalized)
+
+ if worker.get("privileged"):
+ capabilities["privileged"] = True
+ task_def["scopes"].append("docker-worker:capability:privileged")
+
+ task_def["payload"] = payload = {
+ "image": image,
+ "env": worker["env"],
+ }
+ if "command" in worker:
+ payload["command"] = worker["command"]
+
+ if "max-run-time" in worker:
+ payload["maxRunTime"] = worker["max-run-time"]
+
+ run_task = payload.get("command", [""])[0].endswith("run-task")
+
+ # run-task exits EXIT_PURGE_CACHES if there is a problem with caches.
+ # Automatically retry the tasks and purge caches if we see this exit
+ # code.
+ # TODO move this closer to code adding run-task once bug 1469697 is
+ # addressed.
+ if run_task:
+ worker.setdefault("retry-exit-status", []).append(72)
+ worker.setdefault("purge-caches-exit-status", []).append(72)
+
+ payload["onExitStatus"] = {}
+ if "retry-exit-status" in worker:
+ payload["onExitStatus"]["retry"] = worker["retry-exit-status"]
+ if "purge-caches-exit-status" in worker:
+ payload["onExitStatus"]["purgeCaches"] = worker["purge-caches-exit-status"]
+
+ if "artifacts" in worker:
+ artifacts = {}
+ expires_policy = get_expiration(
+ config, task.get("expiration-policy", "default")
+ )
+ now = datetime.datetime.utcnow()
+ task_exp = task_def["expires"]["relative-datestamp"]
+ task_exp_from_now = fromNow(task_exp)
+ for artifact in worker["artifacts"]:
+ art_exp = artifact.get("expires-after", expires_policy)
+ expires = art_exp if fromNow(art_exp, now) < task_exp_from_now else task_exp
+ artifacts[artifact["name"]] = {
+ "path": artifact["path"],
+ "type": artifact["type"],
+ "expires": {"relative-datestamp": expires},
+ }
+ payload["artifacts"] = artifacts
+
+ if isinstance(worker.get("docker-image"), str):
+ out_of_tree_image = worker["docker-image"]
+ else:
+ out_of_tree_image = None
+ image = worker.get("docker-image", {}).get("in-tree")
+
+ if "caches" in worker:
+ caches = {}
+
+ # run-task knows how to validate caches.
+ #
+ # To help ensure new run-task features and bug fixes don't interfere
+ # with existing caches, we seed the hash of run-task into cache names.
+ # So, any time run-task changes, we should get a fresh set of caches.
+ # This means run-task can make changes to cache interaction at any time
+ # without regards for backwards or future compatibility.
+ #
+ # But this mechanism only works for in-tree Docker images that are built
+ # with the current run-task! For out-of-tree Docker images, we have no
+ # way of knowing their content of run-task. So, in addition to varying
+ # cache names by the contents of run-task, we also take the Docker image
+ # name into consideration. This means that different Docker images will
+ # never share the same cache. This is a bit unfortunate. But it is the
+ # safest thing to do. Fortunately, most images are defined in-tree.
+ #
+ # For out-of-tree Docker images, we don't strictly need to incorporate
+ # the run-task content into the cache name. However, doing so preserves
+ # the mechanism whereby changing run-task results in new caches
+ # everywhere.
+
+ # As an additional mechanism to force the use of different caches, the
+ # string literal in the variable below can be changed. This is
+ # preferred to changing run-task because it doesn't require images
+ # to be rebuilt.
+ cache_version = "v3"
+
+ if run_task:
+ suffix = f"{cache_version}-{_run_task_suffix()}"
+
+ if out_of_tree_image:
+ name_hash = hashlib.sha256(
+ out_of_tree_image.encode("utf-8")
+ ).hexdigest()
+ suffix += name_hash[0:12]
+
+ else:
+ suffix = cache_version
+
+ skip_untrusted = is_try(config.params) or level == 1
+
+ for cache in worker["caches"]:
+ # Some caches aren't enabled in environments where we can't
+ # guarantee certain behavior. Filter those out.
+ if cache.get("skip-untrusted") and skip_untrusted:
+ continue
+
+ name = "{trust_domain}-level-{level}-{name}-{suffix}".format(
+ trust_domain=config.graph_config["trust-domain"],
+ level=config.params["level"],
+ name=cache["name"],
+ suffix=suffix,
+ )
+
+ caches[name] = cache["mount-point"]
+ task_def["scopes"].append("docker-worker:cache:%s" % name)
+
+ # Assertion: only run-task is interested in this.
+ if run_task:
+ payload["env"]["TASKCLUSTER_CACHES"] = ";".join(sorted(caches.values()))
+
+ payload["cache"] = caches
+
+ # And send down volumes information to run-task as well.
+ if run_task and worker.get("volumes"):
+ payload["env"]["TASKCLUSTER_VOLUMES"] = ";".join(sorted(worker["volumes"]))
+
+ if payload.get("cache") and skip_untrusted:
+ payload["env"]["TASKCLUSTER_UNTRUSTED_CACHES"] = "1"
+
+ if features:
+ payload["features"] = features
+ if capabilities:
+ payload["capabilities"] = capabilities
+
+ check_caches_are_volumes(task)
+ check_required_volumes(task)
+
+
+@payload_builder(
+ "generic-worker",
+ schema={
+ Required("os"): Any("windows", "macosx", "linux", "linux-bitbar"),
+ # see http://schemas.taskcluster.net/generic-worker/v1/payload.json
+ # and https://docs.taskcluster.net/reference/workers/generic-worker/payload
+ # command is a list of commands to run, sequentially
+ # on Windows, each command is a string, on OS X and Linux, each command is
+ # a string array
+ Required("command"): Any(
+ [taskref_or_string], [[taskref_or_string]] # Windows # Linux / OS X
+ ),
+ # artifacts to extract from the task image after completion; note that artifacts
+ # for the generic worker cannot have names
+ Optional("artifacts"): [
+ {
+ # type of artifact -- simple file, or recursive directory
+ "type": Any("file", "directory"),
+ # filesystem path from which to read artifact
+ "path": str,
+ # if not specified, path is used for artifact name
+ Optional("name"): str,
+ "expires-after": str,
+ }
+ ],
+ # Directories and/or files to be mounted.
+ # The actual allowed combinations are stricter than the model below,
+ # but this provides a simple starting point.
+ # See https://docs.taskcluster.net/reference/workers/generic-worker/payload
+ Optional("mounts"): [
+ {
+ # A unique name for the cache volume, implies writable cache directory
+ # (otherwise mount is a read-only file or directory).
+ Optional("cache-name"): str,
+ # Optional content for pre-loading cache, or mandatory content for
+ # read-only file or directory. Pre-loaded content can come from either
+ # a task artifact or from a URL.
+ Optional("content"): {
+ # *** Either (artifact and task-id) or url must be specified. ***
+ # Artifact name that contains the content.
+ Optional("artifact"): str,
+ # Task ID that has the artifact that contains the content.
+ Optional("task-id"): taskref_or_string,
+ # URL that supplies the content in response to an unauthenticated
+ # GET request.
+ Optional("url"): str,
+ },
+ # *** Either file or directory must be specified. ***
+ # If mounting a cache or read-only directory, the filesystem location of
+ # the directory should be specified as a relative path to the task
+ # directory here.
+ Optional("directory"): str,
+ # If mounting a file, specify the relative path within the task
+ # directory to mount the file (the file will be read only).
+ Optional("file"): str,
+ # Required if and only if `content` is specified and mounting a
+ # directory (not a file). This should be the archive format of the
+ # content (either pre-loaded cache or read-only directory).
+ Optional("format"): Any("rar", "tar.bz2", "tar.gz", "zip"),
+ }
+ ],
+ # environment variables
+ Required("env"): {str: taskref_or_string},
+ # the maximum time to run, in seconds
+ Required("max-run-time"): int,
+ # os user groups for test task workers
+ Optional("os-groups"): [str],
+ # feature for test task to run as administarotr
+ Optional("run-as-administrator"): bool,
+ # optional features
+ Required("chain-of-trust"): bool,
+ Optional("taskcluster-proxy"): bool,
+ # the exit status code(s) that indicates the task should be retried
+ Optional("retry-exit-status"): [int],
+ # Wether any artifacts are assigned to this worker
+ Optional("skip-artifacts"): bool,
+ },
+)
+def build_generic_worker_payload(config, task, task_def):
+ worker = task["worker"]
+ features = {}
+
+ task_def["payload"] = {
+ "command": worker["command"],
+ "maxRunTime": worker["max-run-time"],
+ }
+
+ if worker["os"] == "windows":
+ task_def["payload"]["onExitStatus"] = {
+ "retry": [
+ # These codes (on windows) indicate a process interruption,
+ # rather than a task run failure. See bug 1544403.
+ 1073807364, # process force-killed due to system shutdown
+ 3221225786, # sigint (any interrupt)
+ ]
+ }
+ if "retry-exit-status" in worker:
+ task_def["payload"].setdefault("onExitStatus", {}).setdefault(
+ "retry", []
+ ).extend(worker["retry-exit-status"])
+ if worker["os"] == "linux-bitbar":
+ task_def["payload"].setdefault("onExitStatus", {}).setdefault("retry", [])
+ # exit code 4 is used to indicate an intermittent android device error
+ if 4 not in task_def["payload"]["onExitStatus"]["retry"]:
+ task_def["payload"]["onExitStatus"]["retry"].extend([4])
+
+ env = worker.get("env", {})
+
+ # Never enable sccache on the toolchains repo, as there is no benefit from it
+ # because each push uses a different compiler.
+ if task.get("use-sccache") and config.params["project"] != "toolchains":
+ features["taskclusterProxy"] = True
+ task_def["scopes"].append(
+ "assume:project:taskcluster:{trust_domain}:level-{level}-sccache-buckets".format(
+ trust_domain=config.graph_config["trust-domain"],
+ level=config.params["level"],
+ )
+ )
+ env["USE_SCCACHE"] = "1"
+ worker["env"]["SCCACHE_GCS_PROJECT"] = SCCACHE_GCS_PROJECT
+ # Disable sccache idle shutdown.
+ env["SCCACHE_IDLE_TIMEOUT"] = "0"
+ else:
+ env["SCCACHE_DISABLE"] = "1"
+
+ if env:
+ task_def["payload"]["env"] = env
+
+ artifacts = []
+
+ expires_policy = get_expiration(config, task.get("expiration-policy", "default"))
+ now = datetime.datetime.utcnow()
+ task_exp = task_def["expires"]["relative-datestamp"]
+ task_exp_from_now = fromNow(task_exp)
+ for artifact in worker.get("artifacts", []):
+ art_exp = artifact.get("expires-after", expires_policy)
+ task_exp = task_def["expires"]["relative-datestamp"]
+ expires = art_exp if fromNow(art_exp, now) < task_exp_from_now else task_exp
+ a = {
+ "path": artifact["path"],
+ "type": artifact["type"],
+ "expires": {"relative-datestamp": expires},
+ }
+ if "name" in artifact:
+ a["name"] = artifact["name"]
+ artifacts.append(a)
+
+ if artifacts:
+ task_def["payload"]["artifacts"] = artifacts
+
+ # Need to copy over mounts, but rename keys to respect naming convention
+ # * 'cache-name' -> 'cacheName'
+ # * 'task-id' -> 'taskId'
+ # All other key names are already suitable, and don't need renaming.
+ mounts = copy_task(worker.get("mounts", []))
+ for mount in mounts:
+ if "cache-name" in mount:
+ mount["cacheName"] = "{trust_domain}-level-{level}-{name}".format(
+ trust_domain=config.graph_config["trust-domain"],
+ level=config.params["level"],
+ name=mount.pop("cache-name"),
+ )
+ task_def["scopes"].append(
+ "generic-worker:cache:{}".format(mount["cacheName"])
+ )
+ if "content" in mount:
+ if "task-id" in mount["content"]:
+ mount["content"]["taskId"] = mount["content"].pop("task-id")
+ if "artifact" in mount["content"]:
+ if not mount["content"]["artifact"].startswith("public/"):
+ task_def["scopes"].append(
+ "queue:get-artifact:{}".format(mount["content"]["artifact"])
+ )
+
+ if mounts:
+ task_def["payload"]["mounts"] = mounts
+
+ if worker.get("os-groups"):
+ task_def["payload"]["osGroups"] = worker["os-groups"]
+ task_def["scopes"].extend(
+ [
+ "generic-worker:os-group:{}/{}".format(task["worker-type"], group)
+ for group in worker["os-groups"]
+ ]
+ )
+
+ if worker.get("chain-of-trust"):
+ features["chainOfTrust"] = True
+
+ if worker.get("taskcluster-proxy"):
+ features["taskclusterProxy"] = True
+
+ if worker.get("run-as-administrator", False):
+ features["runAsAdministrator"] = True
+ task_def["scopes"].append(
+ "generic-worker:run-as-administrator:{}".format(task["worker-type"]),
+ )
+
+ if features:
+ task_def["payload"]["features"] = features
+
+
+@payload_builder(
+ "scriptworker-signing",
+ schema={
+ # the maximum time to run, in seconds
+ Required("max-run-time"): int,
+ # list of artifact URLs for the artifacts that should be signed
+ Required("upstream-artifacts"): [
+ {
+ # taskId of the task with the artifact
+ Required("taskId"): taskref_or_string,
+ # type of signing task (for CoT)
+ Required("taskType"): str,
+ # Paths to the artifacts to sign
+ Required("paths"): [str],
+ # Signing formats to use on each of the paths
+ Required("formats"): [str],
+ Optional("singleFileGlobs"): [str],
+ }
+ ],
+ # behavior for mac iscript
+ Optional("mac-behavior"): Any(
+ "apple_notarization",
+ "mac_sign_and_pkg",
+ "mac_geckodriver",
+ "mac_notarize_geckodriver",
+ "mac_single_file",
+ "mac_notarize_single_file",
+ ),
+ Optional("entitlements-url"): str,
+ Optional("requirements-plist-url"): str,
+ },
+)
+def build_scriptworker_signing_payload(config, task, task_def):
+ worker = task["worker"]
+
+ task_def["payload"] = {
+ "maxRunTime": worker["max-run-time"],
+ "upstreamArtifacts": worker["upstream-artifacts"],
+ }
+ if worker.get("mac-behavior"):
+ task_def["payload"]["behavior"] = worker["mac-behavior"]
+ for attribute in ("entitlements-url", "requirements-plist-url"):
+ if worker.get(attribute):
+ task_def["payload"][attribute] = worker[attribute]
+
+ artifacts = set(task.setdefault("attributes", {}).get("release_artifacts", []))
+ for upstream_artifact in worker["upstream-artifacts"]:
+ for path in upstream_artifact["paths"]:
+ artifacts.update(
+ get_signed_artifacts(
+ input=path,
+ formats=upstream_artifact["formats"],
+ behavior=worker.get("mac-behavior"),
+ )
+ )
+ task["attributes"]["release_artifacts"] = sorted(list(artifacts))
+
+
+@payload_builder(
+ "beetmover",
+ schema={
+ # the maximum time to run, in seconds
+ Required("max-run-time"): int,
+ # locale key, if this is a locale beetmover job
+ Optional("locale"): str,
+ Optional("partner-public"): bool,
+ Required("release-properties"): {
+ "app-name": str,
+ "app-version": str,
+ "branch": str,
+ "build-id": str,
+ "hash-type": str,
+ "platform": str,
+ },
+ # list of artifact URLs for the artifacts that should be beetmoved
+ Required("upstream-artifacts"): [
+ {
+ # taskId of the task with the artifact
+ Required("taskId"): taskref_or_string,
+ # type of signing task (for CoT)
+ Required("taskType"): str,
+ # Paths to the artifacts to sign
+ Required("paths"): [str],
+ # locale is used to map upload path and allow for duplicate simple names
+ Required("locale"): str,
+ }
+ ],
+ Optional("artifact-map"): object,
+ },
+)
+def build_beetmover_payload(config, task, task_def):
+ worker = task["worker"]
+ release_config = get_release_config(config)
+ release_properties = worker["release-properties"]
+
+ task_def["payload"] = {
+ "maxRunTime": worker["max-run-time"],
+ "releaseProperties": {
+ "appName": release_properties["app-name"],
+ "appVersion": release_properties["app-version"],
+ "branch": release_properties["branch"],
+ "buildid": release_properties["build-id"],
+ "hashType": release_properties["hash-type"],
+ "platform": release_properties["platform"],
+ },
+ "upload_date": config.params["build_date"],
+ "upstreamArtifacts": worker["upstream-artifacts"],
+ }
+ if worker.get("locale"):
+ task_def["payload"]["locale"] = worker["locale"]
+ if worker.get("artifact-map"):
+ task_def["payload"]["artifactMap"] = worker["artifact-map"]
+ if worker.get("partner-public"):
+ task_def["payload"]["is_partner_repack_public"] = worker["partner-public"]
+ if release_config:
+ task_def["payload"].update(release_config)
+
+
+@payload_builder(
+ "beetmover-push-to-release",
+ schema={
+ # the maximum time to run, in seconds
+ Required("max-run-time"): int,
+ Required("product"): str,
+ },
+)
+def build_beetmover_push_to_release_payload(config, task, task_def):
+ worker = task["worker"]
+ release_config = get_release_config(config)
+ partners = [f"{p}/{s}" for p, s, _ in get_partners_to_be_published(config)]
+
+ task_def["payload"] = {
+ "maxRunTime": worker["max-run-time"],
+ "product": worker["product"],
+ "version": release_config["version"],
+ "build_number": release_config["build_number"],
+ "partners": partners,
+ }
+
+
+@payload_builder(
+ "beetmover-import-from-gcs-to-artifact-registry",
+ schema={
+ Required("max-run-time"): int,
+ Required("gcs-sources"): [str],
+ Required("product"): str,
+ },
+)
+def build_import_from_gcs_to_artifact_registry_payload(config, task, task_def):
+ task_def["payload"] = {
+ "product": task["worker"]["product"],
+ "gcs_sources": task["worker"]["gcs-sources"],
+ }
+
+
+@payload_builder(
+ "beetmover-maven",
+ schema={
+ Required("max-run-time"): int,
+ Required("release-properties"): {
+ "app-name": str,
+ "app-version": str,
+ "branch": str,
+ "build-id": str,
+ "artifact-id": str,
+ "hash-type": str,
+ "platform": str,
+ },
+ Required("upstream-artifacts"): [
+ {
+ Required("taskId"): taskref_or_string,
+ Required("taskType"): str,
+ Required("paths"): [str],
+ Optional("zipExtract"): bool,
+ }
+ ],
+ Optional("artifact-map"): object,
+ },
+)
+def build_beetmover_maven_payload(config, task, task_def):
+ build_beetmover_payload(config, task, task_def)
+
+ task_def["payload"]["artifact_id"] = task["worker"]["release-properties"][
+ "artifact-id"
+ ]
+ if task["worker"].get("artifact-map"):
+ task_def["payload"]["artifactMap"] = task["worker"]["artifact-map"]
+
+ task_def["payload"]["version"] = _compute_geckoview_version(
+ task["worker"]["release-properties"]["app-version"],
+ task["worker"]["release-properties"]["build-id"],
+ )
+
+ del task_def["payload"]["releaseProperties"]["hashType"]
+ del task_def["payload"]["releaseProperties"]["platform"]
+
+
+@payload_builder(
+ "balrog",
+ schema={
+ Required("balrog-action"): Any(*BALROG_ACTIONS),
+ Optional("product"): str,
+ Optional("platforms"): [str],
+ Optional("release-eta"): str,
+ Optional("channel-names"): optionally_keyed_by("release-type", [str]),
+ Optional("require-mirrors"): bool,
+ Optional("publish-rules"): optionally_keyed_by(
+ "release-type", "release-level", [int]
+ ),
+ Optional("rules-to-update"): optionally_keyed_by(
+ "release-type", "release-level", [str]
+ ),
+ Optional("archive-domain"): optionally_keyed_by("release-level", str),
+ Optional("download-domain"): optionally_keyed_by("release-level", str),
+ Optional("blob-suffix"): str,
+ Optional("complete-mar-filename-pattern"): str,
+ Optional("complete-mar-bouncer-product-pattern"): str,
+ Optional("update-line"): object,
+ Optional("suffixes"): [str],
+ Optional("background-rate"): optionally_keyed_by(
+ "release-type", "beta-number", Any(int, None)
+ ),
+ Optional("force-fallback-mapping-update"): optionally_keyed_by(
+ "release-type", "beta-number", bool
+ ),
+ Optional("pin-channels"): optionally_keyed_by(
+ "release-type", "release-level", [str]
+ ),
+ # list of artifact URLs for the artifacts that should be beetmoved
+ Optional("upstream-artifacts"): [
+ {
+ # taskId of the task with the artifact
+ Required("taskId"): taskref_or_string,
+ # type of signing task (for CoT)
+ Required("taskType"): str,
+ # Paths to the artifacts to sign
+ Required("paths"): [str],
+ }
+ ],
+ },
+)
+def build_balrog_payload(config, task, task_def):
+ worker = task["worker"]
+ release_config = get_release_config(config)
+ beta_number = None
+ if "b" in release_config["version"]:
+ beta_number = release_config["version"].split("b")[-1]
+
+ task_def["payload"] = {
+ "behavior": worker["balrog-action"],
+ }
+
+ if (
+ worker["balrog-action"] == "submit-locale"
+ or worker["balrog-action"] == "v2-submit-locale"
+ ):
+ task_def["payload"].update(
+ {
+ "upstreamArtifacts": worker["upstream-artifacts"],
+ "suffixes": worker["suffixes"],
+ }
+ )
+ else:
+ for prop in (
+ "archive-domain",
+ "channel-names",
+ "download-domain",
+ "publish-rules",
+ "rules-to-update",
+ "background-rate",
+ "force-fallback-mapping-update",
+ "pin-channels",
+ ):
+ if prop in worker:
+ resolve_keyed_by(
+ worker,
+ prop,
+ task["description"],
+ **{
+ "release-type": config.params["release_type"],
+ "release-level": release_level(config.params["project"]),
+ "beta-number": beta_number,
+ },
+ )
+ task_def["payload"].update(
+ {
+ "build_number": release_config["build_number"],
+ "product": worker["product"],
+ "version": release_config["version"],
+ }
+ )
+ for prop in (
+ "blob-suffix",
+ "complete-mar-filename-pattern",
+ "complete-mar-bouncer-product-pattern",
+ "pin-channels",
+ ):
+ if prop in worker:
+ task_def["payload"][prop.replace("-", "_")] = worker[prop]
+ if (
+ worker["balrog-action"] == "submit-toplevel"
+ or worker["balrog-action"] == "v2-submit-toplevel"
+ ):
+ task_def["payload"].update(
+ {
+ "app_version": release_config["appVersion"],
+ "archive_domain": worker["archive-domain"],
+ "channel_names": worker["channel-names"],
+ "download_domain": worker["download-domain"],
+ "partial_versions": release_config.get("partial_versions", ""),
+ "platforms": worker["platforms"],
+ "rules_to_update": worker["rules-to-update"],
+ "require_mirrors": worker["require-mirrors"],
+ "update_line": worker["update-line"],
+ }
+ )
+ else: # schedule / ship
+ task_def["payload"].update(
+ {
+ "publish_rules": worker["publish-rules"],
+ "release_eta": worker.get(
+ "release-eta", config.params.get("release_eta")
+ )
+ or "",
+ }
+ )
+ if worker.get("force-fallback-mapping-update"):
+ task_def["payload"]["force_fallback_mapping_update"] = worker[
+ "force-fallback-mapping-update"
+ ]
+ if worker.get("background-rate"):
+ task_def["payload"]["background_rate"] = worker["background-rate"]
+
+
+@payload_builder(
+ "bouncer-aliases",
+ schema={
+ Required("entries"): object,
+ },
+)
+def build_bouncer_aliases_payload(config, task, task_def):
+ worker = task["worker"]
+
+ task_def["payload"] = {"aliases_entries": worker["entries"]}
+
+
+@payload_builder(
+ "bouncer-locations",
+ schema={
+ Required("implementation"): "bouncer-locations",
+ Required("bouncer-products"): [str],
+ },
+)
+def build_bouncer_locations_payload(config, task, task_def):
+ worker = task["worker"]
+ release_config = get_release_config(config)
+
+ task_def["payload"] = {
+ "bouncer_products": worker["bouncer-products"],
+ "version": release_config["version"],
+ "product": task["shipping-product"],
+ }
+
+
+@payload_builder(
+ "bouncer-submission",
+ schema={
+ Required("locales"): [str],
+ Required("entries"): object,
+ },
+)
+def build_bouncer_submission_payload(config, task, task_def):
+ worker = task["worker"]
+
+ task_def["payload"] = {
+ "locales": worker["locales"],
+ "submission_entries": worker["entries"],
+ }
+
+
+@payload_builder(
+ "push-flatpak",
+ schema={
+ Required("channel"): str,
+ Required("upstream-artifacts"): [
+ {
+ Required("taskId"): taskref_or_string,
+ Required("taskType"): str,
+ Required("paths"): [str],
+ }
+ ],
+ },
+)
+def build_push_flatpak_payload(config, task, task_def):
+ worker = task["worker"]
+
+ task_def["payload"] = {
+ "channel": worker["channel"],
+ "upstreamArtifacts": worker["upstream-artifacts"],
+ }
+
+
+@payload_builder(
+ "push-msix",
+ schema={
+ Required("channel"): str,
+ Optional("publish-mode"): str,
+ Required("upstream-artifacts"): [
+ {
+ Required("taskId"): taskref_or_string,
+ Required("taskType"): str,
+ Required("paths"): [str],
+ }
+ ],
+ },
+)
+def build_push_msix_payload(config, task, task_def):
+ worker = task["worker"]
+
+ task_def["payload"] = {
+ "channel": worker["channel"],
+ "upstreamArtifacts": worker["upstream-artifacts"],
+ }
+ if worker.get("publish-mode"):
+ task_def["payload"]["publishMode"] = worker["publish-mode"]
+
+
+@payload_builder(
+ "shipit-shipped",
+ schema={
+ Required("release-name"): str,
+ },
+)
+def build_ship_it_shipped_payload(config, task, task_def):
+ worker = task["worker"]
+
+ task_def["payload"] = {"release_name": worker["release-name"]}
+
+
+@payload_builder(
+ "shipit-maybe-release",
+ schema={
+ Required("phase"): str,
+ },
+)
+def build_ship_it_maybe_release_payload(config, task, task_def):
+ # expect branch name, including path
+ branch = config.params["head_repository"][len("https://hg.mozilla.org/") :]
+ # 'version' is e.g. '71.0b13' (app_version doesn't have beta number)
+ version = config.params["version"]
+
+ task_def["payload"] = {
+ "product": task["shipping-product"],
+ "branch": branch,
+ "phase": task["worker"]["phase"],
+ "version": version,
+ "cron_revision": config.params["head_rev"],
+ }
+
+
+@payload_builder(
+ "push-addons",
+ schema={
+ Required("channel"): Any("listed", "unlisted"),
+ Required("upstream-artifacts"): [
+ {
+ Required("taskId"): taskref_or_string,
+ Required("taskType"): str,
+ Required("paths"): [str],
+ }
+ ],
+ },
+)
+def build_push_addons_payload(config, task, task_def):
+ worker = task["worker"]
+
+ task_def["payload"] = {
+ "channel": worker["channel"],
+ "upstreamArtifacts": worker["upstream-artifacts"],
+ }
+
+
+@payload_builder(
+ "treescript",
+ schema={
+ Required("tags"): [Any("buildN", "release", None)],
+ Required("bump"): bool,
+ Optional("bump-files"): [str],
+ Optional("repo-param-prefix"): str,
+ Optional("dontbuild"): bool,
+ Optional("ignore-closed-tree"): bool,
+ Optional("force-dry-run"): bool,
+ Optional("push"): bool,
+ Optional("source-repo"): str,
+ Optional("ssh-user"): str,
+ Optional("l10n-bump-info"): {
+ Required("name"): str,
+ Required("path"): str,
+ Required("version-path"): str,
+ Optional("l10n-repo-url"): str,
+ Optional("ignore-config"): object,
+ Required("platform-configs"): [
+ {
+ Required("platforms"): [str],
+ Required("path"): str,
+ Optional("format"): str,
+ }
+ ],
+ },
+ Optional("merge-info"): object,
+ },
+)
+def build_treescript_payload(config, task, task_def):
+ worker = task["worker"]
+ release_config = get_release_config(config)
+
+ task_def["payload"] = {"actions": []}
+ actions = task_def["payload"]["actions"]
+ if worker["tags"]:
+ tag_names = []
+ product = task["shipping-product"].upper()
+ version = release_config["version"].replace(".", "_")
+ buildnum = release_config["build_number"]
+ if "buildN" in worker["tags"]:
+ tag_names.extend(
+ [
+ f"{product}_{version}_BUILD{buildnum}",
+ ]
+ )
+ if "release" in worker["tags"]:
+ tag_names.extend([f"{product}_{version}_RELEASE"])
+ tag_info = {
+ "tags": tag_names,
+ "revision": config.params[
+ "{}head_rev".format(worker.get("repo-param-prefix", ""))
+ ],
+ }
+ task_def["payload"]["tag_info"] = tag_info
+ actions.append("tag")
+
+ if worker["bump"]:
+ if not worker["bump-files"]:
+ raise Exception("Version Bump requested without bump-files")
+
+ bump_info = {}
+ bump_info["next_version"] = release_config["next_version"]
+ bump_info["files"] = worker["bump-files"]
+ task_def["payload"]["version_bump_info"] = bump_info
+ actions.append("version_bump")
+
+ if worker.get("l10n-bump-info"):
+ l10n_bump_info = {}
+ for k, v in worker["l10n-bump-info"].items():
+ l10n_bump_info[k.replace("-", "_")] = worker["l10n-bump-info"][k]
+ task_def["payload"]["l10n_bump_info"] = [l10n_bump_info]
+ actions.append("l10n_bump")
+
+ if worker.get("merge-info"):
+ merge_info = {
+ merge_param_name.replace("-", "_"): merge_param_value
+ for merge_param_name, merge_param_value in worker["merge-info"].items()
+ if merge_param_name != "version-files"
+ }
+ merge_info["version_files"] = [
+ {
+ file_param_name.replace("-", "_"): file_param_value
+ for file_param_name, file_param_value in file_entry.items()
+ }
+ for file_entry in worker["merge-info"]["version-files"]
+ ]
+ task_def["payload"]["merge_info"] = merge_info
+ actions.append("merge_day")
+
+ if worker["push"]:
+ actions.append("push")
+
+ if worker.get("force-dry-run"):
+ task_def["payload"]["dry_run"] = True
+
+ if worker.get("dontbuild"):
+ task_def["payload"]["dontbuild"] = True
+
+ if worker.get("ignore-closed-tree") is not None:
+ task_def["payload"]["ignore_closed_tree"] = worker["ignore-closed-tree"]
+
+ if worker.get("source-repo"):
+ task_def["payload"]["source_repo"] = worker["source-repo"]
+
+ if worker.get("ssh-user"):
+ task_def["payload"]["ssh_user"] = worker["ssh-user"]
+
+
+@payload_builder(
+ "invalid",
+ schema={
+ # an invalid task is one which should never actually be created; this is used in
+ # release automation on branches where the task just doesn't make sense
+ Extra: object,
+ },
+)
+def build_invalid_payload(config, task, task_def):
+ task_def["payload"] = "invalid task - should never be created"
+
+
+@payload_builder(
+ "always-optimized",
+ schema={
+ Extra: object,
+ },
+)
+@payload_builder("succeed", schema={})
+def build_dummy_payload(config, task, task_def):
+ task_def["payload"] = {}
+
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def set_implementation(config, tasks):
+ """
+ Set the worker implementation based on the worker-type alias.
+ """
+ for task in tasks:
+ if "implementation" in task["worker"]:
+ yield task
+ continue
+
+ impl, os = worker_type_implementation(
+ config.graph_config, config.params, task["worker-type"]
+ )
+
+ tags = task.setdefault("tags", {})
+ tags["worker-implementation"] = impl
+ if os:
+ tags["os"] = os
+
+ worker = task.setdefault("worker", {})
+ worker["implementation"] = impl
+ if os:
+ worker["os"] = os
+
+ yield task
+
+
+@transforms.add
+def set_defaults(config, tasks):
+ for task in tasks:
+ task.setdefault("shipping-phase", None)
+ task.setdefault("shipping-product", None)
+ task.setdefault("always-target", False)
+ task.setdefault("optimization", None)
+ task.setdefault("use-sccache", False)
+
+ worker = task["worker"]
+ if worker["implementation"] in ("docker-worker",):
+ worker.setdefault("chain-of-trust", False)
+ worker.setdefault("taskcluster-proxy", False)
+ worker.setdefault("allow-ptrace", True)
+ worker.setdefault("loopback-video", False)
+ worker.setdefault("loopback-audio", False)
+ worker.setdefault("docker-in-docker", False)
+ worker.setdefault("privileged", False)
+ worker.setdefault("volumes", [])
+ worker.setdefault("env", {})
+ if "caches" in worker:
+ for c in worker["caches"]:
+ c.setdefault("skip-untrusted", False)
+ elif worker["implementation"] == "generic-worker":
+ worker.setdefault("env", {})
+ worker.setdefault("os-groups", [])
+ if worker["os-groups"] and worker["os"] != "windows":
+ raise Exception(
+ "os-groups feature of generic-worker is only supported on "
+ "Windows, not on {}".format(worker["os"])
+ )
+ worker.setdefault("chain-of-trust", False)
+ elif worker["implementation"] in (
+ "scriptworker-signing",
+ "beetmover",
+ "beetmover-push-to-release",
+ "beetmover-maven",
+ "beetmover-import-from-gcs-to-artifact-registry",
+ ):
+ worker.setdefault("max-run-time", 600)
+ elif worker["implementation"] == "push-apk":
+ worker.setdefault("commit", False)
+
+ yield task
+
+
+@transforms.add
+def setup_raptor(config, tasks):
+ """Add options that are specific to raptor jobs (identified by suite=raptor).
+
+ This variant uses a separate set of transforms for manipulating the tests at the
+ task-level. Currently only used for setting the taskcluster proxy setting and
+ the scopes required for perftest secrets.
+ """
+ from gecko_taskgraph.transforms.test.raptor import (
+ task_transforms as raptor_transforms,
+ )
+
+ for task in tasks:
+ if task.get("extra", {}).get("suite", "") != "raptor":
+ yield task
+ continue
+
+ yield from raptor_transforms(config, [task])
+
+
+@transforms.add
+def task_name_from_label(config, tasks):
+ for task in tasks:
+ taskname = task.pop("name", None)
+ if "label" not in task:
+ if taskname is None:
+ raise Exception("task has neither a name nor a label")
+ task["label"] = "{}-{}".format(config.kind, taskname)
+ yield task
+
+
+UNSUPPORTED_SHIPPING_PRODUCT_ERROR = """\
+The shipping product {product} is not in the list of configured products in
+`taskcluster/ci/config.yml'.
+"""
+
+
+def validate_shipping_product(config, product):
+ if product not in config.graph_config["release-promotion"]["products"]:
+ raise Exception(UNSUPPORTED_SHIPPING_PRODUCT_ERROR.format(product=product))
+
+
+@transforms.add
+def validate(config, tasks):
+ for task in tasks:
+ validate_schema(
+ task_description_schema,
+ task,
+ "In task {!r}:".format(task.get("label", "?no-label?")),
+ )
+ validate_schema(
+ payload_builders[task["worker"]["implementation"]].schema,
+ task["worker"],
+ "In task.run {!r}:".format(task.get("label", "?no-label?")),
+ )
+ if task["shipping-product"] is not None:
+ validate_shipping_product(config, task["shipping-product"])
+ yield task
+
+
+@index_builder("generic")
+def add_generic_index_routes(config, task):
+ index = task.get("index")
+ routes = task.setdefault("routes", [])
+
+ verify_index(config, index)
+
+ subs = config.params.copy()
+ subs["job-name"] = index["job-name"]
+ subs["build_date_long"] = time.strftime(
+ "%Y.%m.%d.%Y%m%d%H%M%S", time.gmtime(config.params["build_date"])
+ )
+ subs["build_date"] = time.strftime(
+ "%Y.%m.%d", time.gmtime(config.params["build_date"])
+ )
+ subs["product"] = index["product"]
+ subs["trust-domain"] = config.graph_config["trust-domain"]
+ subs["branch_rev"] = get_branch_rev(config)
+
+ project = config.params.get("project")
+
+ for tpl in V2_ROUTE_TEMPLATES:
+ routes.append(tpl.format(**subs))
+
+ # Additionally alias all tasks for "trunk" repos into a common
+ # namespace.
+ if project and project in TRUNK_PROJECTS:
+ for tpl in V2_TRUNK_ROUTE_TEMPLATES:
+ routes.append(tpl.format(**subs))
+
+ return task
+
+
+@index_builder("shippable")
+def add_shippable_index_routes(config, task):
+ index = task.get("index")
+ routes = task.setdefault("routes", [])
+
+ verify_index(config, index)
+
+ subs = config.params.copy()
+ subs["job-name"] = index["job-name"]
+ subs["build_date_long"] = time.strftime(
+ "%Y.%m.%d.%Y%m%d%H%M%S", time.gmtime(config.params["build_date"])
+ )
+ subs["build_date"] = time.strftime(
+ "%Y.%m.%d", time.gmtime(config.params["build_date"])
+ )
+ subs["product"] = index["product"]
+ subs["trust-domain"] = config.graph_config["trust-domain"]
+ subs["branch_rev"] = get_branch_rev(config)
+
+ for tpl in V2_SHIPPABLE_TEMPLATES:
+ routes.append(tpl.format(**subs))
+
+ # Also add routes for en-US
+ task = add_shippable_l10n_index_routes(config, task, force_locale="en-US")
+
+ return task
+
+
+@index_builder("shippable-with-multi-l10n")
+def add_shippable_multi_index_routes(config, task):
+ task = add_shippable_index_routes(config, task)
+ task = add_l10n_index_routes(config, task, force_locale="multi")
+ return task
+
+
+@index_builder("l10n")
+def add_l10n_index_routes(config, task, force_locale=None):
+ index = task.get("index")
+ routes = task.setdefault("routes", [])
+
+ verify_index(config, index)
+
+ subs = config.params.copy()
+ subs["job-name"] = index["job-name"]
+ subs["build_date_long"] = time.strftime(
+ "%Y.%m.%d.%Y%m%d%H%M%S", time.gmtime(config.params["build_date"])
+ )
+ subs["product"] = index["product"]
+ subs["trust-domain"] = config.graph_config["trust-domain"]
+ subs["branch_rev"] = get_branch_rev(config)
+
+ locales = task["attributes"].get(
+ "chunk_locales", task["attributes"].get("all_locales")
+ )
+ # Some tasks has only one locale set
+ if task["attributes"].get("locale"):
+ locales = [task["attributes"]["locale"]]
+
+ if force_locale:
+ # Used for en-US and multi-locale
+ locales = [force_locale]
+
+ if not locales:
+ raise Exception("Error: Unable to use l10n index for tasks without locales")
+
+ # If there are too many locales, we can't write a route for all of them
+ # See Bug 1323792
+ if len(locales) > 18: # 18 * 3 = 54, max routes = 64
+ return task
+
+ for locale in locales:
+ for tpl in V2_L10N_TEMPLATES:
+ routes.append(tpl.format(locale=locale, **subs))
+
+ return task
+
+
+@index_builder("shippable-l10n")
+def add_shippable_l10n_index_routes(config, task, force_locale=None):
+ index = task.get("index")
+ routes = task.setdefault("routes", [])
+
+ verify_index(config, index)
+
+ subs = config.params.copy()
+ subs["job-name"] = index["job-name"]
+ subs["build_date_long"] = time.strftime(
+ "%Y.%m.%d.%Y%m%d%H%M%S", time.gmtime(config.params["build_date"])
+ )
+ subs["product"] = index["product"]
+ subs["trust-domain"] = config.graph_config["trust-domain"]
+ subs["branch_rev"] = get_branch_rev(config)
+
+ locales = task["attributes"].get(
+ "chunk_locales", task["attributes"].get("all_locales")
+ )
+ # Some tasks has only one locale set
+ if task["attributes"].get("locale"):
+ locales = [task["attributes"]["locale"]]
+
+ if force_locale:
+ # Used for en-US and multi-locale
+ locales = [force_locale]
+
+ if not locales:
+ raise Exception("Error: Unable to use l10n index for tasks without locales")
+
+ # If there are too many locales, we can't write a route for all of them
+ # See Bug 1323792
+ if len(locales) > 18: # 18 * 3 = 54, max routes = 64
+ return task
+
+ for locale in locales:
+ for tpl in V2_SHIPPABLE_L10N_TEMPLATES:
+ routes.append(tpl.format(locale=locale, **subs))
+
+ return task
+
+
+def add_geckoview_index_routes(config, task):
+ index = task.get("index")
+ routes = task.setdefault("routes", [])
+ geckoview_version = _compute_geckoview_version(
+ config.params["app_version"], config.params["moz_build_date"]
+ )
+
+ subs = {
+ "geckoview-version": geckoview_version,
+ "job-name": index["job-name"],
+ "product": index["product"],
+ "project": config.params["project"],
+ "trust-domain": config.graph_config["trust-domain"],
+ }
+ routes.append(V2_GECKOVIEW_RELEASE.format(**subs))
+
+ return task
+
+
+@index_builder("android-shippable")
+def add_android_shippable_index_routes(config, task):
+ task = add_shippable_index_routes(config, task)
+ task = add_geckoview_index_routes(config, task)
+
+ return task
+
+
+@index_builder("android-shippable-with-multi-l10n")
+def add_android_shippable_multi_index_routes(config, task):
+ task = add_shippable_multi_index_routes(config, task)
+ task = add_geckoview_index_routes(config, task)
+
+ return task
+
+
+@transforms.add
+def add_index_routes(config, tasks):
+ for task in tasks:
+ index = task.get("index", {})
+
+ # The default behavior is to rank tasks according to their tier
+ extra_index = task.setdefault("extra", {}).setdefault("index", {})
+ rank = index.get("rank", "by-tier")
+
+ if rank == "by-tier":
+ # rank is zero for non-tier-1 tasks and based on pushid for others;
+ # this sorts tier-{2,3} builds below tier-1 in the index
+ tier = task.get("treeherder", {}).get("tier", 3)
+ extra_index["rank"] = 0 if tier > 1 else int(config.params["build_date"])
+ elif rank == "build_date":
+ extra_index["rank"] = int(config.params["build_date"])
+ else:
+ extra_index["rank"] = rank
+
+ if not index:
+ yield task
+ continue
+
+ index_type = index.get("type", "generic")
+ task = index_builders[index_type](config, task)
+
+ del task["index"]
+ yield task
+
+
+@transforms.add
+def try_task_config_env(config, tasks):
+ """Set environment variables in the task."""
+ env = config.params["try_task_config"].get("env")
+ if not env:
+ yield from tasks
+ return
+
+ # Find all implementations that have an 'env' key.
+ implementations = {
+ name
+ for name, builder in payload_builders.items()
+ if "env" in builder.schema.schema
+ }
+ for task in tasks:
+ if task["worker"]["implementation"] in implementations:
+ task["worker"]["env"].update(env)
+ yield task
+
+
+@transforms.add
+def try_task_config_chemspill_prio(config, tasks):
+ """Increase the priority from lowest and very-low -> low, but leave others unchanged."""
+ chemspill_prio = config.params["try_task_config"].get("chemspill-prio")
+ if not chemspill_prio:
+ yield from tasks
+ return
+
+ for task in tasks:
+ if task["priority"] in ("lowest", "very-low"):
+ task["priority"] = "low"
+ yield task
+
+
+@transforms.add
+def try_task_config_routes(config, tasks):
+ """Set routes in the task."""
+ routes = config.params["try_task_config"].get("routes")
+ for task in tasks:
+ if routes:
+ task_routes = task.setdefault("routes", [])
+ task_routes.extend(routes)
+ yield task
+
+
+@transforms.add
+def set_task_and_artifact_expiry(config, jobs):
+ """Set the default expiry for tasks and their artifacts.
+
+ These values are read from ci/config.yml
+ """
+ now = datetime.datetime.utcnow()
+ for job in jobs:
+ expires = get_expiration(config, job.get("expiration-policy", "default"))
+ job_expiry = job.setdefault("expires-after", expires)
+ job_expiry_from_now = fromNow(job_expiry)
+
+ for artifact in job["worker"].get("artifacts", ()):
+ artifact_expiry = artifact.setdefault("expires-after", expires)
+
+ # By using > instead of >=, there's a chance of mismatch
+ # where the artifact expires sooner than the task.
+ # There is no chance, however, of mismatch where artifacts
+ # expire _after_ the task.
+ # Currently this leads to some build tasks having logs
+ # that expire in 1 year while the task expires in 3 years.
+ if fromNow(artifact_expiry, now) > job_expiry_from_now:
+ artifact["expires-after"] = job_expiry
+
+ yield job
+
+
+@transforms.add
+def build_task(config, tasks):
+ for task in tasks:
+ level = str(config.params["level"])
+
+ task_worker_type = task["worker-type"]
+ worker_overrides = config.params["try_task_config"].get("worker-overrides", {})
+ if task_worker_type in worker_overrides:
+ worker_pool = worker_overrides[task_worker_type]
+ provisioner_id, worker_type = worker_pool.split("/", 1)
+ else:
+ provisioner_id, worker_type = get_worker_type(
+ config.graph_config,
+ config.params,
+ task_worker_type,
+ )
+ task["worker-type"] = "/".join([provisioner_id, worker_type])
+ project = config.params["project"]
+
+ routes = task.get("routes", [])
+ scopes = [
+ s.format(level=level, project=project) for s in task.get("scopes", [])
+ ]
+
+ # set up extra
+ extra = task.get("extra", {})
+ extra["parent"] = {"task-reference": "<decision>"}
+ task_th = task.get("treeherder")
+ if task_th:
+ extra.setdefault("treeherder-platform", task_th["platform"])
+ treeherder = extra.setdefault("treeherder", {})
+
+ machine_platform, collection = task_th["platform"].split("/", 1)
+ treeherder["machine"] = {"platform": machine_platform}
+ treeherder["collection"] = {collection: True}
+
+ group_names = config.graph_config["treeherder"]["group-names"]
+ groupSymbol, symbol = split_symbol(task_th["symbol"])
+ if groupSymbol != "?":
+ treeherder["groupSymbol"] = groupSymbol
+ if groupSymbol not in group_names:
+ path = os.path.join(config.path, task.get("job-from", ""))
+ raise Exception(UNKNOWN_GROUP_NAME.format(groupSymbol, path))
+ treeherder["groupName"] = group_names[groupSymbol]
+ treeherder["symbol"] = symbol
+ if len(symbol) > 25 or len(groupSymbol) > 25:
+ raise RuntimeError(
+ "Treeherder group and symbol names must not be longer than "
+ "25 characters: {} (see {})".format(
+ task_th["symbol"],
+ TC_TREEHERDER_SCHEMA_URL,
+ )
+ )
+ treeherder["jobKind"] = task_th["kind"]
+ treeherder["tier"] = task_th["tier"]
+
+ branch_rev = get_branch_rev(config)
+
+ routes.append(
+ "{}.v2.{}.{}".format(
+ TREEHERDER_ROUTE_ROOT,
+ config.params["project"],
+ branch_rev,
+ )
+ )
+
+ if "deadline-after" not in task:
+ task["deadline-after"] = "1 day"
+
+ if "priority" not in task:
+ task["priority"] = get_default_priority(
+ config.graph_config, config.params["project"]
+ )
+
+ tags = task.get("tags", {})
+ attributes = task.get("attributes", {})
+
+ tags.update(
+ {
+ "createdForUser": config.params["owner"],
+ "kind": config.kind,
+ "label": task["label"],
+ "retrigger": "true" if attributes.get("retrigger", False) else "false",
+ }
+ )
+
+ task_def = {
+ "provisionerId": provisioner_id,
+ "workerType": worker_type,
+ "routes": routes,
+ "created": {"relative-datestamp": "0 seconds"},
+ "deadline": {"relative-datestamp": task["deadline-after"]},
+ "expires": {"relative-datestamp": task["expires-after"]},
+ "scopes": scopes,
+ "metadata": {
+ "description": task["description"],
+ "name": task["label"],
+ "owner": config.params["owner"],
+ "source": config.params.file_url(config.path, pretty=True),
+ },
+ "extra": extra,
+ "tags": tags,
+ "priority": task["priority"],
+ }
+
+ if task.get("requires", None):
+ task_def["requires"] = task["requires"]
+
+ if task_th:
+ # link back to treeherder in description
+ th_job_link = (
+ "https://treeherder.mozilla.org/#/jobs?repo={}&revision={}&selectedTaskRun=<self>"
+ ).format(config.params["project"], branch_rev)
+ task_def["metadata"]["description"] = {
+ "task-reference": "{description} ([Treeherder job]({th_job_link}))".format(
+ description=task_def["metadata"]["description"],
+ th_job_link=th_job_link,
+ )
+ }
+
+ # add the payload and adjust anything else as required (e.g., scopes)
+ payload_builders[task["worker"]["implementation"]].builder(
+ config, task, task_def
+ )
+
+ # Resolve run-on-projects
+ build_platform = attributes.get("build_platform")
+ resolve_keyed_by(
+ task,
+ "run-on-projects",
+ item_name=task["label"],
+ **{"build-platform": build_platform},
+ )
+ attributes["run_on_projects"] = task.get("run-on-projects", ["all"])
+ attributes["always_target"] = task["always-target"]
+ # This logic is here since downstream tasks don't always match their
+ # upstream dependency's shipping_phase.
+ # A text_type task['shipping-phase'] takes precedence, then
+ # an existing attributes['shipping_phase'], then fall back to None.
+ if task.get("shipping-phase") is not None:
+ attributes["shipping_phase"] = task["shipping-phase"]
+ else:
+ attributes.setdefault("shipping_phase", None)
+ # shipping_product will always match the upstream task's
+ # shipping_product, so a pre-set existing attributes['shipping_product']
+ # takes precedence over task['shipping-product']. However, make sure
+ # we don't have conflicting values.
+ if task.get("shipping-product") and attributes.get("shipping_product") not in (
+ None,
+ task["shipping-product"],
+ ):
+ raise Exception(
+ "{} shipping_product {} doesn't match task shipping-product {}!".format(
+ task["label"],
+ attributes["shipping_product"],
+ task["shipping-product"],
+ )
+ )
+ attributes.setdefault("shipping_product", task["shipping-product"])
+
+ # Set MOZ_AUTOMATION on all jobs.
+ if task["worker"]["implementation"] in (
+ "generic-worker",
+ "docker-worker",
+ ):
+ payload = task_def.get("payload")
+ if payload:
+ env = payload.setdefault("env", {})
+ env["MOZ_AUTOMATION"] = "1"
+
+ dependencies = task.get("dependencies", {})
+ if_dependencies = task.get("if-dependencies", [])
+ if if_dependencies:
+ for i, dep in enumerate(if_dependencies):
+ if dep in dependencies:
+ if_dependencies[i] = dependencies[dep]
+ continue
+
+ raise Exception(
+ "{label} specifies '{dep}' in if-dependencies, "
+ "but {dep} is not a dependency!".format(
+ label=task["label"], dep=dep
+ )
+ )
+
+ yield {
+ "label": task["label"],
+ "description": task["description"],
+ "task": task_def,
+ "dependencies": dependencies,
+ "if-dependencies": if_dependencies,
+ "soft-dependencies": task.get("soft-dependencies", []),
+ "attributes": attributes,
+ "optimization": task.get("optimization", None),
+ }
+
+
+@transforms.add
+def chain_of_trust(config, tasks):
+ for task in tasks:
+ if task["task"].get("payload", {}).get("features", {}).get("chainOfTrust"):
+ image = task.get("dependencies", {}).get("docker-image")
+ if image:
+ cot = (
+ task["task"].setdefault("extra", {}).setdefault("chainOfTrust", {})
+ )
+ cot.setdefault("inputs", {})["docker-image"] = {
+ "task-reference": "<docker-image>"
+ }
+ yield task
+
+
+@transforms.add
+def check_task_identifiers(config, tasks):
+ """Ensures that all tasks have well defined identifiers:
+ ``^[a-zA-Z0-9_-]{1,38}$``
+ """
+ e = re.compile("^[a-zA-Z0-9_-]{1,38}$")
+ for task in tasks:
+ for attrib in ("workerType", "provisionerId"):
+ if not e.match(task["task"][attrib]):
+ raise Exception(
+ "task {}.{} is not a valid identifier: {}".format(
+ task["label"], attrib, task["task"][attrib]
+ )
+ )
+ yield task
+
+
+@transforms.add
+def check_task_dependencies(config, tasks):
+ """Ensures that tasks don't have more than 100 dependencies."""
+ for task in tasks:
+ if len(task["dependencies"]) > MAX_DEPENDENCIES:
+ raise Exception(
+ "task {}/{} has too many dependencies ({} > {})".format(
+ config.kind,
+ task["label"],
+ len(task["dependencies"]),
+ MAX_DEPENDENCIES,
+ )
+ )
+ yield task
+
+
+def check_caches_are_volumes(task):
+ """Ensures that all cache paths are defined as volumes.
+
+ Caches and volumes are the only filesystem locations whose content
+ isn't defined by the Docker image itself. Some caches are optional
+ depending on the job environment. We want paths that are potentially
+ caches to have as similar behavior regardless of whether a cache is
+ used. To help enforce this, we require that all paths used as caches
+ to be declared as Docker volumes. This check won't catch all offenders.
+ But it is better than nothing.
+ """
+ volumes = {s for s in task["worker"]["volumes"]}
+ paths = {c["mount-point"] for c in task["worker"].get("caches", [])}
+ missing = paths - volumes
+
+ if not missing:
+ return
+
+ raise Exception(
+ "task %s (image %s) has caches that are not declared as "
+ "Docker volumes: %s "
+ "(have you added them as VOLUMEs in the Dockerfile?)"
+ % (task["label"], task["worker"]["docker-image"], ", ".join(sorted(missing)))
+ )
+
+
+def check_required_volumes(task):
+ """
+ Ensures that all paths that are required to be volumes are defined as volumes.
+
+ Performance of writing to files in poor in directories not marked as
+ volumes, in docker. Ensure that paths that are often written to are marked
+ as volumes.
+ """
+ volumes = set(task["worker"]["volumes"])
+ paths = set(task["worker"].get("required-volumes", []))
+ missing = paths - volumes
+
+ if not missing:
+ return
+
+ raise Exception(
+ "task %s (image %s) has paths that should be volumes for peformance "
+ "that are not declared as Docker volumes: %s "
+ "(have you added them as VOLUMEs in the Dockerfile?)"
+ % (task["label"], task["worker"]["docker-image"], ", ".join(sorted(missing)))
+ )
+
+
+@transforms.add
+def check_run_task_caches(config, tasks):
+ """Audit for caches requiring run-task.
+
+ run-task manages caches in certain ways. If a cache managed by run-task
+ is used by a non run-task task, it could cause problems. So we audit for
+ that and make sure certain cache names are exclusive to run-task.
+
+ IF YOU ARE TEMPTED TO MAKE EXCLUSIONS TO THIS POLICY, YOU ARE LIKELY
+ CONTRIBUTING TECHNICAL DEBT AND WILL HAVE TO SOLVE MANY OF THE PROBLEMS
+ THAT RUN-TASK ALREADY SOLVES. THINK LONG AND HARD BEFORE DOING THAT.
+ """
+ re_reserved_caches = re.compile(
+ """^
+ (checkouts|tooltool-cache)
+ """,
+ re.VERBOSE,
+ )
+
+ re_sparse_checkout_cache = re.compile("^checkouts-sparse")
+
+ cache_prefix = "{trust_domain}-level-{level}-".format(
+ trust_domain=config.graph_config["trust-domain"],
+ level=config.params["level"],
+ )
+
+ suffix = _run_task_suffix()
+
+ for task in tasks:
+ payload = task["task"].get("payload", {})
+ command = payload.get("command") or [""]
+
+ main_command = command[0] if isinstance(command[0], str) else ""
+ run_task = main_command.endswith("run-task")
+
+ require_sparse_cache = False
+ have_sparse_cache = False
+
+ if run_task:
+ for arg in command[1:]:
+ if not isinstance(arg, str):
+ continue
+
+ if arg == "--":
+ break
+
+ if arg.startswith("--gecko-sparse-profile"):
+ if "=" not in arg:
+ raise Exception(
+ "{} is specifying `--gecko-sparse-profile` to run-task "
+ "as two arguments. Unable to determine if the sparse "
+ "profile exists.".format(task["label"])
+ )
+ _, sparse_profile = arg.split("=", 1)
+ if not os.path.exists(os.path.join(GECKO, sparse_profile)):
+ raise Exception(
+ "{} is using non-existant sparse profile {}.".format(
+ task["label"], sparse_profile
+ )
+ )
+ require_sparse_cache = True
+ break
+
+ for cache in payload.get("cache", {}):
+ if not cache.startswith(cache_prefix):
+ raise Exception(
+ "{} is using a cache ({}) which is not appropriate "
+ "for its trust-domain and level. It should start with {}.".format(
+ task["label"], cache, cache_prefix
+ )
+ )
+
+ cache = cache[len(cache_prefix) :]
+
+ if re_sparse_checkout_cache.match(cache):
+ have_sparse_cache = True
+
+ if not re_reserved_caches.match(cache):
+ continue
+
+ if not run_task:
+ raise Exception(
+ "%s is using a cache (%s) reserved for run-task "
+ "change the task to use run-task or use a different "
+ "cache name" % (task["label"], cache)
+ )
+
+ if not cache.endswith(suffix):
+ raise Exception(
+ "%s is using a cache (%s) reserved for run-task "
+ "but the cache name is not dependent on the contents "
+ "of run-task; change the cache name to conform to the "
+ "naming requirements" % (task["label"], cache)
+ )
+
+ if require_sparse_cache and not have_sparse_cache:
+ raise Exception(
+ "%s is using a sparse checkout but not using "
+ "a sparse checkout cache; change the checkout "
+ "cache name so it is sparse aware" % task["label"]
+ )
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/test/__init__.py b/taskcluster/gecko_taskgraph/transforms/test/__init__.py
new file mode 100644
index 0000000000..ac17554baa
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/test/__init__.py
@@ -0,0 +1,538 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+These transforms construct a task description to run the given test, based on a
+test description. The implementation here is shared among all test kinds, but
+contains specific support for how we run tests in Gecko (via mozharness,
+invoked in particular ways).
+
+This is a good place to translate a test-description option such as
+`single-core: true` to the implementation of that option in a task description
+(worker options, mozharness commandline, environment variables, etc.)
+
+The test description should be fully formed by the time it reaches these
+transforms, and these transforms should not embody any specific knowledge about
+what should run where. this is the wrong place for special-casing platforms,
+for example - use `all_tests.py` instead.
+"""
+
+
+import logging
+from importlib import import_module
+
+from mozbuild.schedules import INCLUSIVE_COMPONENTS
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import Schema, optionally_keyed_by, resolve_keyed_by
+from voluptuous import Any, Exclusive, Optional, Required
+
+from gecko_taskgraph.optimize.schema import OptimizationSchema
+from gecko_taskgraph.transforms.test.other import get_mobile_project
+from gecko_taskgraph.util.chunking import manifest_loaders
+
+logger = logging.getLogger(__name__)
+transforms = TransformSequence()
+
+
+# Schema for a test description
+#
+# *****WARNING*****
+#
+# This is a great place for baffling cruft to accumulate, and that makes
+# everyone move more slowly. Be considerate of your fellow hackers!
+# See the warnings in taskcluster/docs/how-tos.rst
+#
+# *****WARNING*****
+test_description_schema = Schema(
+ {
+ # description of the suite, for the task metadata
+ Required("description"): str,
+ # test suite category and name
+ Optional("suite"): Any(
+ optionally_keyed_by("variant", str),
+ {
+ Optional("category"): str,
+ Optional("name"): optionally_keyed_by("variant", str),
+ },
+ ),
+ # base work directory used to set up the task.
+ Optional("workdir"): optionally_keyed_by("test-platform", Any(str, "default")),
+ # the name by which this test suite is addressed in try syntax; defaults to
+ # the test-name. This will translate to the `unittest_try_name` or
+ # `talos_try_name` attribute.
+ Optional("try-name"): str,
+ # additional tags to mark up this type of test
+ Optional("tags"): {str: object},
+ # the symbol, or group(symbol), under which this task should appear in
+ # treeherder.
+ Required("treeherder-symbol"): str,
+ # the value to place in task.extra.treeherder.machine.platform; ideally
+ # this is the same as build-platform, and that is the default, but in
+ # practice it's not always a match.
+ Optional("treeherder-machine-platform"): str,
+ # attributes to appear in the resulting task (later transforms will add the
+ # common attributes)
+ Optional("attributes"): {str: object},
+ # relative path (from config.path) to the file task was defined in
+ Optional("job-from"): str,
+ # The `run_on_projects` attribute, defaulting to "all". This dictates the
+ # projects on which this task should be included in the target task set.
+ # See the attributes documentation for details.
+ #
+ # Note that the special case 'built-projects', the default, uses the parent
+ # build task's run-on-projects, meaning that tests run only on platforms
+ # that are built.
+ Optional("run-on-projects"): optionally_keyed_by(
+ "app",
+ "subtest",
+ "test-platform",
+ "test-name",
+ "variant",
+ Any([str], "built-projects"),
+ ),
+ # When set only run on projects where the build would already be running.
+ # This ensures tasks where this is True won't be the cause of the build
+ # running on a project it otherwise wouldn't have.
+ Optional("built-projects-only"): bool,
+ # the sheriffing tier for this task (default: set based on test platform)
+ Optional("tier"): optionally_keyed_by(
+ "test-platform", "variant", "app", "subtest", Any(int, "default")
+ ),
+ # number of chunks to create for this task. This can be keyed by test
+ # platform by passing a dictionary in the `by-test-platform` key. If the
+ # test platform is not found, the key 'default' will be tried.
+ Required("chunks"): optionally_keyed_by(
+ "test-platform", "variant", Any(int, "dynamic")
+ ),
+ # Custom 'test_manifest_loader' to use, overriding the one configured in the
+ # parameters. When 'null', no test chunking will be performed. Can also
+ # be used to disable "manifest scheduling".
+ Optional("test-manifest-loader"): Any(None, *list(manifest_loaders)),
+ # the time (with unit) after which this task is deleted; default depends on
+ # the branch (see below)
+ Optional("expires-after"): str,
+ # The different configurations that should be run against this task, defined
+ # in the TEST_VARIANTS object in the variant.py transforms.
+ Optional("variants"): [str],
+ # Whether to run this task without any variants applied.
+ Required("run-without-variant"): optionally_keyed_by("test-platform", bool),
+ # The EC2 instance size to run these tests on.
+ Required("instance-size"): optionally_keyed_by(
+ "test-platform", Any("default", "large", "xlarge")
+ ),
+ # type of virtualization or hardware required by test.
+ Required("virtualization"): optionally_keyed_by(
+ "test-platform", Any("virtual", "virtual-with-gpu", "hardware")
+ ),
+ # Whether the task requires loopback audio or video (whatever that may mean
+ # on the platform)
+ Required("loopback-audio"): bool,
+ Required("loopback-video"): bool,
+ # Whether the test can run using a software GL implementation on Linux
+ # using the GL compositor. May not be used with "legacy" sized instances
+ # due to poor LLVMPipe performance (bug 1296086). Defaults to true for
+ # unit tests on linux platforms and false otherwise
+ Optional("allow-software-gl-layers"): bool,
+ # For tasks that will run in docker-worker, this is the
+ # name of the docker image or in-tree docker image to run the task in. If
+ # in-tree, then a dependency will be created automatically. This is
+ # generally `desktop-test`, or an image that acts an awful lot like it.
+ Required("docker-image"): optionally_keyed_by(
+ "test-platform",
+ Any(
+ # a raw Docker image path (repo/image:tag)
+ str,
+ # an in-tree generated docker image (from `taskcluster/docker/<name>`)
+ {"in-tree": str},
+ # an indexed docker image
+ {"indexed": str},
+ ),
+ ),
+ # seconds of runtime after which the task will be killed. Like 'chunks',
+ # this can be keyed by test platform, but also variant.
+ Required("max-run-time"): optionally_keyed_by(
+ "test-platform", "subtest", "variant", "app", int
+ ),
+ # the exit status code that indicates the task should be retried
+ Optional("retry-exit-status"): [int],
+ # Whether to perform a gecko checkout.
+ Required("checkout"): bool,
+ # Wheter to perform a machine reboot after test is done
+ Optional("reboot"): Any(False, "always", "on-exception", "on-failure"),
+ # What to run
+ Required("mozharness"): {
+ # the mozharness script used to run this task
+ Required("script"): optionally_keyed_by("test-platform", str),
+ # the config files required for the task
+ Required("config"): optionally_keyed_by("test-platform", [str]),
+ # mochitest flavor for mochitest runs
+ Optional("mochitest-flavor"): str,
+ # any additional actions to pass to the mozharness command
+ Optional("actions"): [str],
+ # additional command-line options for mozharness, beyond those
+ # automatically added
+ Required("extra-options"): optionally_keyed_by("test-platform", [str]),
+ # the artifact name (including path) to test on the build task; this is
+ # generally set in a per-kind transformation
+ Optional("build-artifact-name"): str,
+ Optional("installer-url"): str,
+ # If not false, tooltool downloads will be enabled via relengAPIProxy
+ # for either just public files, or all files. Not supported on Windows
+ Required("tooltool-downloads"): Any(
+ False,
+ "public",
+ "internal",
+ ),
+ # Add --blob-upload-branch=<project> mozharness parameter
+ Optional("include-blob-upload-branch"): bool,
+ # The setting for --download-symbols (if omitted, the option will not
+ # be passed to mozharness)
+ Optional("download-symbols"): Any(True, "ondemand"),
+ # If set, then MOZ_NODE_PATH=/usr/local/bin/node is included in the
+ # environment. This is more than just a helpful path setting -- it
+ # causes xpcshell tests to start additional servers, and runs
+ # additional tests.
+ Required("set-moz-node-path"): bool,
+ # If true, include chunking information in the command even if the number
+ # of chunks is 1
+ Required("chunked"): optionally_keyed_by("test-platform", bool),
+ Required("requires-signed-builds"): optionally_keyed_by(
+ "test-platform", "variant", bool
+ ),
+ },
+ # The set of test manifests to run.
+ Optional("test-manifests"): Any(
+ [str],
+ {"active": [str], "skipped": [str]},
+ ),
+ # The current chunk (if chunking is enabled).
+ Optional("this-chunk"): int,
+ # os user groups for test task workers; required scopes, will be
+ # added automatically
+ Optional("os-groups"): optionally_keyed_by("test-platform", [str]),
+ Optional("run-as-administrator"): optionally_keyed_by("test-platform", bool),
+ # -- values supplied by the task-generation infrastructure
+ # the platform of the build this task is testing
+ Required("build-platform"): str,
+ # the label of the build task generating the materials to test
+ Required("build-label"): str,
+ # the label of the signing task generating the materials to test.
+ # Signed builds are used in xpcshell tests on Windows, for instance.
+ Optional("build-signing-label"): optionally_keyed_by("variant", str),
+ # the build's attributes
+ Required("build-attributes"): {str: object},
+ # the platform on which the tests will run
+ Required("test-platform"): str,
+ # limit the test-platforms (as defined in test-platforms.yml)
+ # that the test will run on
+ Optional("limit-platforms"): optionally_keyed_by("app", "subtest", [str]),
+ # the name of the test (the key in tests.yml)
+ Required("test-name"): str,
+ # the product name, defaults to firefox
+ Optional("product"): str,
+ # conditional files to determine when these tests should be run
+ Exclusive("when", "optimization"): {
+ Optional("files-changed"): [str],
+ },
+ # Optimization to perform on this task during the optimization phase.
+ # Optimizations are defined in taskcluster/gecko_taskgraph/optimize.py.
+ Exclusive("optimization", "optimization"): OptimizationSchema,
+ # The SCHEDULES component for this task; this defaults to the suite
+ # (not including the flavor) but can be overridden here.
+ Exclusive("schedules-component", "optimization"): Any(
+ str,
+ [str],
+ ),
+ Optional("worker-type"): optionally_keyed_by(
+ "test-platform",
+ Any(str, None),
+ ),
+ Optional(
+ "require-signed-extensions",
+ description="Whether the build being tested requires extensions be signed.",
+ ): optionally_keyed_by("release-type", "test-platform", bool),
+ # The target name, specifying the build artifact to be tested.
+ # If None or not specified, a transform sets the target based on OS:
+ # target.dmg (Mac), target.apk (Android), target.tar.bz2 (Linux),
+ # or target.zip (Windows).
+ Optional("target"): optionally_keyed_by(
+ "app",
+ "test-platform",
+ "variant",
+ Any(
+ str,
+ None,
+ {Required("index"): str, Required("name"): str},
+ ),
+ ),
+ # A list of artifacts to install from 'fetch' tasks. Validation deferred
+ # to 'job' transforms.
+ Optional("fetches"): object,
+ # Raptor / browsertime specific keys, defer validation to 'raptor.py'
+ # transform.
+ Optional("raptor"): object,
+ # Raptor / browsertime specific keys that need to be here since 'raptor' schema
+ # is evluated *before* test_description_schema
+ Optional("app"): str,
+ Optional("subtest"): str,
+ # Define if a given task supports artifact builds or not, see bug 1695325.
+ Optional("supports-artifact-builds"): bool,
+ }
+)
+
+
+@transforms.add
+def handle_keyed_by_mozharness(config, tasks):
+ """Resolve a mozharness field if it is keyed by something"""
+ fields = [
+ "mozharness",
+ "mozharness.chunked",
+ "mozharness.config",
+ "mozharness.extra-options",
+ "mozharness.script",
+ ]
+ for task in tasks:
+ for field in fields:
+ resolve_keyed_by(
+ task,
+ field,
+ item_name=task["test-name"],
+ enforce_single_match=False,
+ )
+ yield task
+
+
+@transforms.add
+def set_defaults(config, tasks):
+ for task in tasks:
+ build_platform = task["build-platform"]
+ if build_platform.startswith("android"):
+ # all Android test tasks download internal objects from tooltool
+ task["mozharness"]["tooltool-downloads"] = "internal"
+ task["mozharness"]["actions"] = ["get-secrets"]
+
+ # loopback-video is always true for Android, but false for other
+ # platform phyla
+ task["loopback-video"] = True
+ task["mozharness"]["set-moz-node-path"] = True
+
+ # software-gl-layers is only meaningful on linux unittests, where it defaults to True
+ if task["test-platform"].startswith("linux") and task["suite"] not in [
+ "talos",
+ "raptor",
+ ]:
+ task.setdefault("allow-software-gl-layers", True)
+ else:
+ task["allow-software-gl-layers"] = False
+
+ task.setdefault("try-name", task["test-name"])
+ task.setdefault("os-groups", [])
+ task.setdefault("run-as-administrator", False)
+ task.setdefault("chunks", 1)
+ task.setdefault("run-on-projects", "built-projects")
+ task.setdefault("built-projects-only", False)
+ task.setdefault("instance-size", "default")
+ task.setdefault("max-run-time", 3600)
+ task.setdefault("reboot", False)
+ task.setdefault("virtualization", "virtual")
+ task.setdefault("loopback-audio", False)
+ task.setdefault("loopback-video", False)
+ task.setdefault("limit-platforms", [])
+ task.setdefault("docker-image", {"in-tree": "ubuntu1804-test"})
+ task.setdefault("checkout", False)
+ task.setdefault("require-signed-extensions", False)
+ task.setdefault("run-without-variant", True)
+ task.setdefault("variants", [])
+ task.setdefault("supports-artifact-builds", True)
+
+ task["mozharness"].setdefault("extra-options", [])
+ task["mozharness"].setdefault("requires-signed-builds", False)
+ task["mozharness"].setdefault("tooltool-downloads", "public")
+ task["mozharness"].setdefault("set-moz-node-path", False)
+ task["mozharness"].setdefault("chunked", False)
+ yield task
+
+
+transforms.add_validate(test_description_schema)
+
+
+@transforms.add
+def run_variant_transforms(config, tasks):
+ """Variant transforms are run as soon as possible to allow other transforms
+ to key by variant."""
+ for task in tasks:
+ xforms = TransformSequence()
+ mod = import_module("gecko_taskgraph.transforms.test.variant")
+ xforms.add(mod.transforms)
+
+ yield from xforms(config, [task])
+
+
+@transforms.add
+def resolve_keys(config, tasks):
+ keys = ("require-signed-extensions", "run-without-variant", "suite", "suite.name")
+ for task in tasks:
+ for key in keys:
+ resolve_keyed_by(
+ task,
+ key,
+ item_name=task["test-name"],
+ enforce_single_match=False,
+ **{
+ "release-type": config.params["release_type"],
+ "variant": task["attributes"].get("unittest_variant"),
+ },
+ )
+ yield task
+
+
+@transforms.add
+def run_remaining_transforms(config, tasks):
+ """Runs other transform files next to this module."""
+ # List of modules to load transforms from in order.
+ transform_modules = (
+ ("raptor", lambda t: t["suite"] == "raptor"),
+ ("other", None),
+ ("worker", None),
+ # These transforms should always run last as there is never any
+ # difference in configuration from one chunk to another (other than
+ # chunk number).
+ ("chunk", None),
+ )
+
+ for task in tasks:
+ xforms = TransformSequence()
+ for name, filterfn in transform_modules:
+ if filterfn and not filterfn(task):
+ continue
+
+ mod = import_module(f"gecko_taskgraph.transforms.test.{name}")
+ xforms.add(mod.transforms)
+
+ yield from xforms(config, [task])
+
+
+@transforms.add
+def make_job_description(config, tasks):
+ """Convert *test* descriptions to *job* descriptions (input to
+ gecko_taskgraph.transforms.job)"""
+
+ for task in tasks:
+ attributes = task.get("attributes", {})
+
+ mobile = get_mobile_project(task)
+ if mobile and (mobile not in task["test-name"]):
+ label = "{}-{}-{}-{}".format(
+ config.kind, task["test-platform"], mobile, task["test-name"]
+ )
+ else:
+ label = "{}-{}-{}".format(
+ config.kind, task["test-platform"], task["test-name"]
+ )
+
+ try_name = task["try-name"]
+ if attributes.get("unittest_variant"):
+ suffix = task.pop("variant-suffix")
+ label += suffix
+ try_name += suffix
+
+ if task["chunks"] > 1:
+ label += "-{}".format(task["this-chunk"])
+
+ build_label = task["build-label"]
+
+ if task["suite"] == "talos":
+ attr_try_name = "talos_try_name"
+ elif task["suite"] == "raptor":
+ attr_try_name = "raptor_try_name"
+ else:
+ attr_try_name = "unittest_try_name"
+
+ attr_build_platform, attr_build_type = task["build-platform"].split("/", 1)
+ attributes.update(
+ {
+ "build_platform": attr_build_platform,
+ "build_type": attr_build_type,
+ "test_platform": task["test-platform"],
+ "test_chunk": str(task["this-chunk"]),
+ "supports-artifact-builds": task["supports-artifact-builds"],
+ attr_try_name: try_name,
+ }
+ )
+
+ if "test-manifests" in task:
+ attributes["test_manifests"] = task["test-manifests"]
+
+ jobdesc = {}
+ name = "{}-{}".format(task["test-platform"], task["test-name"])
+ jobdesc["name"] = name
+ jobdesc["label"] = label
+ jobdesc["description"] = task["description"]
+ jobdesc["attributes"] = attributes
+ jobdesc["dependencies"] = {"build": build_label}
+ jobdesc["job-from"] = task["job-from"]
+
+ if task.get("fetches"):
+ jobdesc["fetches"] = task["fetches"]
+
+ if task["mozharness"]["requires-signed-builds"] is True:
+ jobdesc["dependencies"]["build-signing"] = task["build-signing-label"]
+
+ if "expires-after" in task:
+ jobdesc["expires-after"] = task["expires-after"]
+
+ jobdesc["routes"] = []
+ jobdesc["run-on-projects"] = sorted(task["run-on-projects"])
+ jobdesc["scopes"] = []
+ jobdesc["tags"] = task.get("tags", {})
+ jobdesc["extra"] = {
+ "chunks": {
+ "current": task["this-chunk"],
+ "total": task["chunks"],
+ },
+ "suite": attributes["unittest_suite"],
+ "test-setting": task.pop("test-setting"),
+ }
+ jobdesc["treeherder"] = {
+ "symbol": task["treeherder-symbol"],
+ "kind": "test",
+ "tier": task["tier"],
+ "platform": task.get("treeherder-machine-platform", task["build-platform"]),
+ }
+
+ schedules = task.get("schedules-component", [])
+ if task.get("when"):
+ # This may still be used by comm-central.
+ jobdesc["when"] = task["when"]
+ elif "optimization" in task:
+ jobdesc["optimization"] = task["optimization"]
+ elif set(schedules) & set(INCLUSIVE_COMPONENTS):
+ jobdesc["optimization"] = {"test-inclusive": schedules}
+ else:
+ jobdesc["optimization"] = {"test": schedules}
+
+ run = jobdesc["run"] = {}
+ run["using"] = "mozharness-test"
+ run["test"] = task
+
+ if "workdir" in task:
+ run["workdir"] = task.pop("workdir")
+
+ jobdesc["worker-type"] = task.pop("worker-type")
+
+ if "worker" in task:
+ jobdesc["worker"] = task.pop("worker")
+
+ if task.get("fetches"):
+ jobdesc["fetches"] = task.pop("fetches")
+
+ yield jobdesc
+
+
+def normpath(path):
+ return path.replace("/", "\\")
+
+
+def get_firefox_version():
+ with open("browser/config/version.txt") as f:
+ return f.readline().strip()
diff --git a/taskcluster/gecko_taskgraph/transforms/test/chunk.py b/taskcluster/gecko_taskgraph/transforms/test/chunk.py
new file mode 100644
index 0000000000..f6442e3755
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/test/chunk.py
@@ -0,0 +1,262 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import json
+
+import taskgraph
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.attributes import keymatch
+from taskgraph.util.treeherder import join_symbol, split_symbol
+
+from gecko_taskgraph.util.attributes import is_try
+from gecko_taskgraph.util.chunking import (
+ DefaultLoader,
+ chunk_manifests,
+ get_manifest_loader,
+ get_runtimes,
+ guess_mozinfo_from_task,
+)
+from gecko_taskgraph.util.copy_task import copy_task
+from gecko_taskgraph.util.perfile import perfile_number_of_chunks
+
+DYNAMIC_CHUNK_DURATION = 20 * 60 # seconds
+"""The approximate time each test chunk should take to run."""
+
+
+DYNAMIC_CHUNK_MULTIPLIER = {
+ # Desktop xpcshell tests run in parallel. Reduce the total runtime to
+ # compensate.
+ "^(?!android).*-xpcshell.*": 0.2,
+}
+"""A multiplication factor to tweak the total duration per platform / suite."""
+
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def set_test_verify_chunks(config, tasks):
+ """Set the number of chunks we use for test-verify."""
+ for task in tasks:
+ if any(task["suite"].startswith(s) for s in ("test-verify", "test-coverage")):
+ env = config.params.get("try_task_config", {}) or {}
+ env = env.get("templates", {}).get("env", {})
+ task["chunks"] = perfile_number_of_chunks(
+ is_try(config.params),
+ env.get("MOZHARNESS_TEST_PATHS", ""),
+ config.params.get("head_repository", ""),
+ config.params.get("head_rev", ""),
+ task["test-name"],
+ )
+
+ # limit the number of chunks we run for test-verify mode because
+ # test-verify is comprehensive and takes a lot of time, if we have
+ # >30 tests changed, this is probably an import of external tests,
+ # or a patch renaming/moving files in bulk
+ maximum_number_verify_chunks = 3
+ if task["chunks"] > maximum_number_verify_chunks:
+ task["chunks"] = maximum_number_verify_chunks
+
+ yield task
+
+
+@transforms.add
+def set_test_manifests(config, tasks):
+ """Determine the set of test manifests that should run in this task."""
+
+ for task in tasks:
+ # When a task explicitly requests no 'test_manifest_loader', test
+ # resolving will happen at test runtime rather than in the taskgraph.
+ if "test-manifest-loader" in task and task["test-manifest-loader"] is None:
+ yield task
+ continue
+
+ # Set 'tests_grouped' to "1", so we can differentiate between suites that are
+ # chunked at the test runtime and those that are chunked in the taskgraph.
+ task.setdefault("tags", {})["tests_grouped"] = "1"
+
+ if taskgraph.fast:
+ # We want to avoid evaluating manifests when taskgraph.fast is set. But
+ # manifests are required for dynamic chunking. Just set the number of
+ # chunks to one in this case.
+ if task["chunks"] == "dynamic":
+ task["chunks"] = 1
+ yield task
+ continue
+
+ manifests = task.get("test-manifests")
+ if manifests:
+ if isinstance(manifests, list):
+ task["test-manifests"] = {"active": manifests, "skipped": []}
+ yield task
+ continue
+
+ mozinfo = guess_mozinfo_from_task(
+ task, config.params.get("head_repository", "")
+ )
+
+ loader_name = task.pop(
+ "test-manifest-loader", config.params["test_manifest_loader"]
+ )
+ loader = get_manifest_loader(loader_name, config.params)
+
+ task["test-manifests"] = loader.get_manifests(
+ task["suite"],
+ frozenset(mozinfo.items()),
+ )
+
+ # When scheduling with test paths, we often find manifests scheduled but all tests
+ # are skipped on a given config. This will remove the task from the task set if
+ # no manifests have active tests for the given task/config
+ mh_test_paths = {}
+ if "MOZHARNESS_TEST_PATHS" in config.params.get("try_task_config", {}).get(
+ "env", {}
+ ):
+ mh_test_paths = json.loads(
+ config.params["try_task_config"]["env"]["MOZHARNESS_TEST_PATHS"]
+ )
+
+ if task["attributes"]["unittest_suite"] in mh_test_paths.keys():
+ input_paths = mh_test_paths[task["attributes"]["unittest_suite"]]
+ remaining_manifests = []
+
+ # if we have web-platform tests incoming, just yield task
+ for m in input_paths:
+ if m.startswith("testing/web-platform/tests/"):
+ if not isinstance(loader, DefaultLoader):
+ task["chunks"] = "dynamic"
+ yield task
+ break
+
+ # input paths can exist in other directories (i.e. [../../dir/test.js])
+ # we need to look for all [active] manifests that include tests in the path
+ for m in input_paths:
+ if [tm for tm in task["test-manifests"]["active"] if tm.startswith(m)]:
+ remaining_manifests.append(m)
+
+ # look in the 'other' manifests
+ for m in input_paths:
+ man = m
+ for tm in task["test-manifests"]["other_dirs"]:
+ matched_dirs = [
+ dp
+ for dp in task["test-manifests"]["other_dirs"].get(tm)
+ if dp.startswith(man)
+ ]
+ if matched_dirs:
+ if tm not in task["test-manifests"]["active"]:
+ continue
+ if m not in remaining_manifests:
+ remaining_manifests.append(m)
+
+ if remaining_manifests == []:
+ continue
+
+ # The default loader loads all manifests. If we use a non-default
+ # loader, we'll only run some subset of manifests and the hardcoded
+ # chunk numbers will no longer be valid. Dynamic chunking should yield
+ # better results.
+ if not isinstance(loader, DefaultLoader):
+ task["chunks"] = "dynamic"
+
+ yield task
+
+
+@transforms.add
+def resolve_dynamic_chunks(config, tasks):
+ """Determine how many chunks are needed to handle the given set of manifests."""
+
+ for task in tasks:
+ if task["chunks"] != "dynamic":
+ yield task
+ continue
+
+ if not task.get("test-manifests"):
+ raise Exception(
+ "{} must define 'test-manifests' to use dynamic chunking!".format(
+ task["test-name"]
+ )
+ )
+
+ runtimes = {
+ m: r
+ for m, r in get_runtimes(task["test-platform"], task["suite"]).items()
+ if m in task["test-manifests"]["active"]
+ }
+
+ # Truncate runtimes that are above the desired chunk duration. They
+ # will be assigned to a chunk on their own and the excess duration
+ # shouldn't cause additional chunks to be needed.
+ times = [min(DYNAMIC_CHUNK_DURATION, r) for r in runtimes.values()]
+ avg = round(sum(times) / len(times), 2) if times else 0
+ total = sum(times)
+
+ # If there are manifests missing from the runtimes data, fill them in
+ # with the average of all present manifests.
+ missing = [m for m in task["test-manifests"]["active"] if m not in runtimes]
+ total += avg * len(missing)
+
+ # Apply any chunk multipliers if found.
+ key = "{}-{}".format(task["test-platform"], task["test-name"])
+ matches = keymatch(DYNAMIC_CHUNK_MULTIPLIER, key)
+ if len(matches) > 1:
+ raise Exception(
+ "Multiple matching values for {} found while "
+ "determining dynamic chunk multiplier!".format(key)
+ )
+ elif matches:
+ total = total * matches[0]
+
+ chunks = int(round(total / DYNAMIC_CHUNK_DURATION))
+
+ # Make sure we never exceed the number of manifests, nor have a chunk
+ # length of 0.
+ task["chunks"] = min(chunks, len(task["test-manifests"]["active"])) or 1
+ yield task
+
+
+@transforms.add
+def split_chunks(config, tasks):
+ """Based on the 'chunks' key, split tests up into chunks by duplicating
+ them and assigning 'this-chunk' appropriately and updating the treeherder
+ symbol.
+ """
+
+ for task in tasks:
+ # If test-manifests are set, chunk them ahead of time to avoid running
+ # the algorithm more than once.
+ chunked_manifests = None
+ if "test-manifests" in task:
+ manifests = task["test-manifests"]
+ chunked_manifests = chunk_manifests(
+ task["suite"],
+ task["test-platform"],
+ task["chunks"],
+ manifests["active"],
+ )
+
+ # Add all skipped manifests to the first chunk of backstop pushes
+ # so they still show up in the logs. They won't impact runtime much
+ # and this way tools like ActiveData are still aware that they
+ # exist.
+ if config.params["backstop"] and manifests["active"]:
+ chunked_manifests[0].extend(manifests["skipped"])
+
+ for i in range(task["chunks"]):
+ this_chunk = i + 1
+
+ # copy the test and update with the chunk number
+ chunked = copy_task(task)
+ chunked["this-chunk"] = this_chunk
+
+ if chunked_manifests is not None:
+ chunked["test-manifests"] = sorted(chunked_manifests[i])
+
+ group, symbol = split_symbol(chunked["treeherder-symbol"])
+ if task["chunks"] > 1 or not symbol:
+ # add the chunk number to the TH symbol
+ symbol += str(this_chunk)
+ chunked["treeherder-symbol"] = join_symbol(group, symbol)
+
+ yield chunked
diff --git a/taskcluster/gecko_taskgraph/transforms/test/other.py b/taskcluster/gecko_taskgraph/transforms/test/other.py
new file mode 100644
index 0000000000..1c08d290e4
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/test/other.py
@@ -0,0 +1,1081 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import hashlib
+import json
+import re
+
+from mozbuild.schedules import INCLUSIVE_COMPONENTS
+from mozbuild.util import ReadOnlyDict
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.attributes import keymatch
+from taskgraph.util.keyed_by import evaluate_keyed_by
+from taskgraph.util.schema import Schema, resolve_keyed_by
+from taskgraph.util.taskcluster import get_artifact_path, get_index_url
+from voluptuous import Any, Optional, Required
+
+from gecko_taskgraph.transforms.test.variant import TEST_VARIANTS
+from gecko_taskgraph.util.platforms import platform_family
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def limit_platforms(config, tasks):
+ for task in tasks:
+ if not task["limit-platforms"]:
+ yield task
+ continue
+
+ limited_platforms = {key: key for key in task["limit-platforms"]}
+ if keymatch(limited_platforms, task["test-platform"]):
+ yield task
+
+
+@transforms.add
+def handle_suite_category(config, tasks):
+ for task in tasks:
+ task.setdefault("suite", {})
+
+ if isinstance(task["suite"], str):
+ task["suite"] = {"name": task["suite"]}
+
+ suite = task["suite"].setdefault("name", task["test-name"])
+ category = task["suite"].setdefault("category", suite)
+
+ task.setdefault("attributes", {})
+ task["attributes"]["unittest_suite"] = suite
+ task["attributes"]["unittest_category"] = category
+
+ script = task["mozharness"]["script"]
+ category_arg = None
+ if suite.startswith("test-verify") or suite.startswith("test-coverage"):
+ pass
+ elif script in ("android_emulator_unittest.py", "android_hardware_unittest.py"):
+ category_arg = "--test-suite"
+ elif script == "desktop_unittest.py":
+ category_arg = f"--{category}-suite"
+
+ if category_arg:
+ task["mozharness"].setdefault("extra-options", [])
+ extra = task["mozharness"]["extra-options"]
+ if not any(arg.startswith(category_arg) for arg in extra):
+ extra.append(f"{category_arg}={suite}")
+
+ # From here on out we only use the suite name.
+ task["suite"] = suite
+ yield task
+
+
+@transforms.add
+def setup_talos(config, tasks):
+ """Add options that are specific to talos jobs (identified by suite=talos)"""
+ for task in tasks:
+ if task["suite"] != "talos":
+ yield task
+ continue
+
+ extra_options = task.setdefault("mozharness", {}).setdefault(
+ "extra-options", []
+ )
+ extra_options.append("--use-talos-json")
+
+ # win7 needs to test skip
+ if task["build-platform"].startswith("win32"):
+ extra_options.append("--add-option")
+ extra_options.append("--setpref,gfx.direct2d.disabled=true")
+
+ if config.params.get("project", None):
+ extra_options.append("--project=%s" % config.params["project"])
+
+ yield task
+
+
+@transforms.add
+def setup_browsertime_flag(config, tasks):
+ """Optionally add `--browsertime` flag to Raptor pageload tests."""
+
+ browsertime_flag = config.params["try_task_config"].get("browsertime", False)
+
+ for task in tasks:
+ if not browsertime_flag or task["suite"] != "raptor":
+ yield task
+ continue
+
+ if task["treeherder-symbol"].startswith("Rap"):
+ # The Rap group is subdivided as Rap{-fenix,-refbrow(...),
+ # so `taskgraph.util.treeherder.replace_group` isn't appropriate.
+ task["treeherder-symbol"] = task["treeherder-symbol"].replace(
+ "Rap", "Btime", 1
+ )
+
+ extra_options = task.setdefault("mozharness", {}).setdefault(
+ "extra-options", []
+ )
+ extra_options.append("--browsertime")
+
+ yield task
+
+
+@transforms.add
+def handle_artifact_prefix(config, tasks):
+ """Handle translating `artifact_prefix` appropriately"""
+ for task in tasks:
+ if task["build-attributes"].get("artifact_prefix"):
+ task.setdefault("attributes", {}).setdefault(
+ "artifact_prefix", task["build-attributes"]["artifact_prefix"]
+ )
+ yield task
+
+
+@transforms.add
+def set_treeherder_machine_platform(config, tasks):
+ """Set the appropriate task.extra.treeherder.machine.platform"""
+ translation = {
+ # Linux64 build platform for asan is specified differently to
+ # treeherder.
+ "macosx1100-64/opt": "osx-1100/opt",
+ "macosx1100-64-shippable/opt": "osx-1100-shippable/opt",
+ "win64-asan/opt": "windows10-64/asan",
+ "win64-aarch64/opt": "windows10-aarch64/opt",
+ }
+ for task in tasks:
+ # For most desktop platforms, the above table is not used for "regular"
+ # builds, so we'll always pick the test platform here.
+ # On macOS though, the regular builds are in the table. This causes a
+ # conflict in `verify_task_graph_symbol` once you add a new test
+ # platform based on regular macOS builds, such as for QR.
+ # Since it's unclear if the regular macOS builds can be removed from
+ # the table, workaround the issue for QR.
+ if "android" in task["test-platform"] and "pgo/opt" in task["test-platform"]:
+ platform_new = task["test-platform"].replace("-pgo/opt", "/pgo")
+ task["treeherder-machine-platform"] = platform_new
+ elif "android-em-7.0-x86_64-qr" in task["test-platform"]:
+ task["treeherder-machine-platform"] = task["test-platform"].replace(
+ ".", "-"
+ )
+ elif "android-em-7.0-x86_64-shippable-qr" in task["test-platform"]:
+ task["treeherder-machine-platform"] = task["test-platform"].replace(
+ ".", "-"
+ )
+ elif "android-em-7.0-x86_64-lite-qr" in task["test-platform"]:
+ task["treeherder-machine-platform"] = task["test-platform"].replace(
+ ".", "-"
+ )
+ elif "android-em-7.0-x86_64-shippable-lite-qr" in task["test-platform"]:
+ task["treeherder-machine-platform"] = task["test-platform"].replace(
+ ".", "-"
+ )
+ elif "-qr" in task["test-platform"]:
+ task["treeherder-machine-platform"] = task["test-platform"]
+ elif "android-hw" in task["test-platform"]:
+ task["treeherder-machine-platform"] = task["test-platform"]
+ elif "android-em-7.0-x86_64" in task["test-platform"]:
+ task["treeherder-machine-platform"] = task["test-platform"].replace(
+ ".", "-"
+ )
+ elif "android-em-7.0-x86" in task["test-platform"]:
+ task["treeherder-machine-platform"] = task["test-platform"].replace(
+ ".", "-"
+ )
+ # Bug 1602863 - must separately define linux64/asan and linux1804-64/asan
+ # otherwise causes an exception during taskgraph generation about
+ # duplicate treeherder platform/symbol.
+ elif "linux64-asan/opt" in task["test-platform"]:
+ task["treeherder-machine-platform"] = "linux64/asan"
+ elif "linux1804-asan/opt" in task["test-platform"]:
+ task["treeherder-machine-platform"] = "linux1804-64/asan"
+ else:
+ task["treeherder-machine-platform"] = translation.get(
+ task["build-platform"], task["test-platform"]
+ )
+ yield task
+
+
+@transforms.add
+def set_download_symbols(config, tasks):
+ """In general, we download symbols immediately for debug builds, but only
+ on demand for everything else. ASAN builds shouldn't download
+ symbols since they don't product symbol zips see bug 1283879"""
+ for task in tasks:
+ if task["test-platform"].split("/")[-1] == "debug":
+ task["mozharness"]["download-symbols"] = True
+ elif "asan" in task["build-platform"] or "tsan" in task["build-platform"]:
+ if "download-symbols" in task["mozharness"]:
+ del task["mozharness"]["download-symbols"]
+ else:
+ task["mozharness"]["download-symbols"] = "ondemand"
+ yield task
+
+
+@transforms.add
+def handle_keyed_by(config, tasks):
+ """Resolve fields that can be keyed by platform, etc."""
+ fields = [
+ "instance-size",
+ "docker-image",
+ "max-run-time",
+ "chunks",
+ "suite",
+ "run-on-projects",
+ "os-groups",
+ "run-as-administrator",
+ "workdir",
+ "worker-type",
+ "virtualization",
+ "fetches.fetch",
+ "fetches.toolchain",
+ "target",
+ "webrender-run-on-projects",
+ "mozharness.requires-signed-builds",
+ "build-signing-label",
+ ]
+ for task in tasks:
+ for field in fields:
+ resolve_keyed_by(
+ task,
+ field,
+ item_name=task["test-name"],
+ enforce_single_match=False,
+ project=config.params["project"],
+ variant=task["attributes"].get("unittest_variant"),
+ )
+ yield task
+
+
+@transforms.add
+def set_target(config, tasks):
+ for task in tasks:
+ build_platform = task["build-platform"]
+ target = None
+ if "target" in task:
+ target = task["target"]
+ if not target:
+ if build_platform.startswith("macosx"):
+ target = "target.dmg"
+ elif build_platform.startswith("android"):
+ target = "target.apk"
+ elif build_platform.startswith("win"):
+ target = "target.zip"
+ else:
+ target = "target.tar.bz2"
+
+ if isinstance(target, dict):
+ # TODO Remove hardcoded mobile artifact prefix
+ index_url = get_index_url(target["index"])
+ installer_url = "{}/artifacts/public/{}".format(index_url, target["name"])
+ task["mozharness"]["installer-url"] = installer_url
+ else:
+ task["mozharness"]["build-artifact-name"] = get_artifact_path(task, target)
+
+ yield task
+
+
+@transforms.add
+def setup_browsertime(config, tasks):
+ """Configure browsertime dependencies for Raptor pageload tests that have
+ `--browsertime` extra option."""
+
+ for task in tasks:
+ # We need to make non-trivial changes to various fetches, and our
+ # `by-test-platform` may not be "compatible" with existing
+ # `by-test-platform` filters. Therefore we do everything after
+ # `handle_keyed_by` so that existing fields have been resolved down to
+ # simple lists. But we use the `by-test-platform` machinery to express
+ # filters so that when the time comes to move browsertime into YAML
+ # files, the transition is straight-forward.
+ extra_options = task.get("mozharness", {}).get("extra-options", [])
+
+ if task["suite"] != "raptor" or "--webext" in extra_options:
+ yield task
+ continue
+
+ ts = {
+ "by-test-platform": {
+ "android.*": ["browsertime", "linux64-geckodriver", "linux64-node-16"],
+ "linux.*": ["browsertime", "linux64-geckodriver", "linux64-node-16"],
+ "macosx.*": ["browsertime", "macosx64-geckodriver", "macosx64-node-16"],
+ "windows.*aarch64.*": [
+ "browsertime",
+ "win32-geckodriver",
+ "win32-node-16",
+ ],
+ "windows.*-32.*": ["browsertime", "win32-geckodriver", "win32-node-16"],
+ "windows.*-64.*": ["browsertime", "win64-geckodriver", "win64-node-16"],
+ },
+ }
+
+ task.setdefault("fetches", {}).setdefault("toolchain", []).extend(
+ evaluate_keyed_by(ts, "fetches.toolchain", task)
+ )
+
+ fs = {
+ "by-test-platform": {
+ "android.*": ["linux64-ffmpeg-4.4.1"],
+ "linux.*": ["linux64-ffmpeg-4.4.1"],
+ "macosx.*": ["mac64-ffmpeg-4.4.1"],
+ "windows.*aarch64.*": ["win64-ffmpeg-4.4.1"],
+ "windows.*-32.*": ["win64-ffmpeg-4.4.1"],
+ "windows.*-64.*": ["win64-ffmpeg-4.4.1"],
+ },
+ }
+
+ cd_fetches = {
+ "android.*": [
+ "linux64-chromedriver-109",
+ "linux64-chromedriver-110",
+ "linux64-chromedriver-111",
+ "linux64-chromedriver-112",
+ "linux64-chromedriver-113",
+ "linux64-chromedriver-114",
+ ],
+ "linux.*": [
+ "linux64-chromedriver-112",
+ "linux64-chromedriver-113",
+ "linux64-chromedriver-114",
+ ],
+ "macosx.*": [
+ "mac64-chromedriver-109",
+ "mac64-chromedriver-110",
+ "mac64-chromedriver-111",
+ "mac64-chromedriver-112",
+ "mac64-chromedriver-113",
+ "mac64-chromedriver-114",
+ ],
+ "windows.*aarch64.*": [
+ "win32-chromedriver-112",
+ "win32-chromedriver-113",
+ "win32-chromedriver-114",
+ ],
+ "windows.*-32.*": [
+ "win32-chromedriver-112",
+ "win32-chromedriver-113",
+ "win32-chromedriver-114",
+ ],
+ "windows.*-64.*": [
+ "win32-chromedriver-112",
+ "win32-chromedriver-113",
+ "win32-chromedriver-114",
+ ],
+ }
+
+ chromium_fetches = {
+ "linux.*": ["linux64-chromium"],
+ "macosx.*": ["mac-chromium"],
+ "windows.*aarch64.*": ["win32-chromium"],
+ "windows.*-32.*": ["win32-chromium"],
+ "windows.*-64.*": ["win64-chromium"],
+ }
+
+ cd_extracted_name = {
+ "windows": "{}chromedriver.exe",
+ "mac": "{}chromedriver",
+ "default": "{}chromedriver",
+ }
+
+ if "--app=chrome" in extra_options or "--app=chrome-m" in extra_options:
+ # Only add the chromedriver fetches when chrome is running
+ for platform in cd_fetches:
+ fs["by-test-platform"][platform].extend(cd_fetches[platform])
+ if "--app=chromium" in extra_options or "--app=custom-car" in extra_options:
+ for platform in chromium_fetches:
+ fs["by-test-platform"][platform].extend(chromium_fetches[platform])
+
+ # The chromedrivers for chromium are repackaged into the archives
+ # that we get the chromium binary from so we always have a compatible
+ # version.
+ cd_extracted_name = {
+ "windows": "chrome-win/chromedriver.exe",
+ "mac": "chrome-mac/chromedriver",
+ "default": "chrome-linux/chromedriver",
+ }
+
+ # Disable the Raptor install step
+ if "--app=chrome-m" in extra_options:
+ extra_options.append("--noinstall")
+
+ task.setdefault("fetches", {}).setdefault("fetch", []).extend(
+ evaluate_keyed_by(fs, "fetches.fetch", task)
+ )
+
+ extra_options.extend(
+ (
+ "--browsertime-browsertimejs",
+ "$MOZ_FETCHES_DIR/browsertime/node_modules/browsertime/bin/browsertime.js",
+ )
+ ) # noqa: E501
+
+ eos = {
+ "by-test-platform": {
+ "windows.*": [
+ "--browsertime-node",
+ "$MOZ_FETCHES_DIR/node/node.exe",
+ "--browsertime-geckodriver",
+ "$MOZ_FETCHES_DIR/geckodriver.exe",
+ "--browsertime-chromedriver",
+ "$MOZ_FETCHES_DIR/" + cd_extracted_name["windows"],
+ "--browsertime-ffmpeg",
+ "$MOZ_FETCHES_DIR/ffmpeg-4.4.1-full_build/bin/ffmpeg.exe",
+ ],
+ "macosx.*": [
+ "--browsertime-node",
+ "$MOZ_FETCHES_DIR/node/bin/node",
+ "--browsertime-geckodriver",
+ "$MOZ_FETCHES_DIR/geckodriver",
+ "--browsertime-chromedriver",
+ "$MOZ_FETCHES_DIR/" + cd_extracted_name["mac"],
+ "--browsertime-ffmpeg",
+ "$MOZ_FETCHES_DIR/ffmpeg-macos/ffmpeg",
+ ],
+ "default": [
+ "--browsertime-node",
+ "$MOZ_FETCHES_DIR/node/bin/node",
+ "--browsertime-geckodriver",
+ "$MOZ_FETCHES_DIR/geckodriver",
+ "--browsertime-chromedriver",
+ "$MOZ_FETCHES_DIR/" + cd_extracted_name["default"],
+ "--browsertime-ffmpeg",
+ "$MOZ_FETCHES_DIR/ffmpeg-4.4.1-i686-static/ffmpeg",
+ ],
+ }
+ }
+
+ extra_options.extend(evaluate_keyed_by(eos, "mozharness.extra-options", task))
+
+ yield task
+
+
+def get_mobile_project(task):
+ """Returns the mobile project of the specified task or None."""
+
+ if not task["build-platform"].startswith("android"):
+ return
+
+ mobile_projects = ("fenix", "geckoview", "refbrow", "chrome-m")
+
+ for name in mobile_projects:
+ if name in task["test-name"]:
+ return name
+
+ target = None
+ if "target" in task:
+ resolve_keyed_by(
+ task, "target", item_name=task["test-name"], enforce_single_match=False
+ )
+ target = task["target"]
+ if target:
+ if isinstance(target, dict):
+ target = target["name"]
+
+ for name in mobile_projects:
+ if name in target:
+ return name
+
+ return None
+
+
+@transforms.add
+def disable_wpt_timeouts_on_autoland(config, tasks):
+ """do not run web-platform-tests that are expected TIMEOUT on autoland"""
+ for task in tasks:
+ if (
+ "web-platform-tests" in task["test-name"]
+ and config.params["project"] == "autoland"
+ ):
+ task["mozharness"].setdefault("extra-options", []).append("--skip-timeout")
+ yield task
+
+
+@transforms.add
+def enable_code_coverage(config, tasks):
+ """Enable code coverage for the ccov build-platforms"""
+ for task in tasks:
+ if "ccov" in task["build-platform"]:
+ # Do not run tests on fuzzing builds
+ if "fuzzing" in task["build-platform"]:
+ task["run-on-projects"] = []
+ continue
+
+ # Skip this transform for android code coverage builds.
+ if "android" in task["build-platform"]:
+ task.setdefault("fetches", {}).setdefault("toolchain", []).append(
+ "linux64-grcov"
+ )
+ task["mozharness"].setdefault("extra-options", []).append(
+ "--java-code-coverage"
+ )
+ yield task
+ continue
+ task["mozharness"].setdefault("extra-options", []).append("--code-coverage")
+ task["instance-size"] = "xlarge"
+
+ # Temporarily disable Mac tests on mozilla-central
+ if "mac" in task["build-platform"]:
+ task["run-on-projects"] = []
+
+ # Ensure we always run on the projects defined by the build, unless the test
+ # is try only or shouldn't run at all.
+ if task["run-on-projects"] not in [[]]:
+ task["run-on-projects"] = "built-projects"
+
+ # Ensure we don't optimize test suites out.
+ # We always want to run all test suites for coverage purposes.
+ task.pop("schedules-component", None)
+ task.pop("when", None)
+ task["optimization"] = None
+
+ # Add a toolchain and a fetch task for the grcov binary.
+ if any(p in task["build-platform"] for p in ("linux", "osx", "win")):
+ task.setdefault("fetches", {})
+ task["fetches"].setdefault("fetch", [])
+ task["fetches"].setdefault("toolchain", [])
+ task["fetches"].setdefault("build", [])
+
+ if "linux" in task["build-platform"]:
+ task["fetches"]["toolchain"].append("linux64-grcov")
+ elif "osx" in task["build-platform"]:
+ task["fetches"]["toolchain"].append("macosx64-grcov")
+ elif "win" in task["build-platform"]:
+ task["fetches"]["toolchain"].append("win64-grcov")
+
+ task["fetches"]["build"].append({"artifact": "target.mozinfo.json"})
+
+ if "talos" in task["test-name"]:
+ task["max-run-time"] = 7200
+ if "linux" in task["build-platform"]:
+ task["docker-image"] = {"in-tree": "ubuntu1804-test"}
+ task["mozharness"]["extra-options"].append("--add-option")
+ task["mozharness"]["extra-options"].append("--cycles,1")
+ task["mozharness"]["extra-options"].append("--add-option")
+ task["mozharness"]["extra-options"].append("--tppagecycles,1")
+ task["mozharness"]["extra-options"].append("--add-option")
+ task["mozharness"]["extra-options"].append("--no-upload-results")
+ task["mozharness"]["extra-options"].append("--add-option")
+ task["mozharness"]["extra-options"].append("--tptimeout,15000")
+ if "raptor" in task["test-name"]:
+ task["max-run-time"] = 1800
+ yield task
+
+
+@transforms.add
+def handle_run_on_projects(config, tasks):
+ """Handle translating `built-projects` appropriately"""
+ for task in tasks:
+ if task["run-on-projects"] == "built-projects":
+ task["run-on-projects"] = task["build-attributes"].get(
+ "run_on_projects", ["all"]
+ )
+
+ if task.pop("built-projects-only", False):
+ built_projects = set(
+ task["build-attributes"].get("run_on_projects", {"all"})
+ )
+ run_on_projects = set(task.get("run-on-projects", set()))
+
+ # If 'all' exists in run-on-projects, then the intersection of both
+ # is built-projects. Similarly if 'all' exists in built-projects,
+ # the intersection is run-on-projects (so do nothing). When neither
+ # contains 'all', take the actual set intersection.
+ if "all" in run_on_projects:
+ task["run-on-projects"] = sorted(built_projects)
+ elif "all" not in built_projects:
+ task["run-on-projects"] = sorted(run_on_projects & built_projects)
+ yield task
+
+
+@transforms.add
+def handle_tier(config, tasks):
+ """Set the tier based on policy for all test descriptions that do not
+ specify a tier otherwise."""
+ for task in tasks:
+ if "tier" in task:
+ resolve_keyed_by(
+ task,
+ "tier",
+ item_name=task["test-name"],
+ variant=task["attributes"].get("unittest_variant"),
+ enforce_single_match=False,
+ )
+
+ # only override if not set for the test
+ if "tier" not in task or task["tier"] == "default":
+ if task["test-platform"] in [
+ "linux64/opt",
+ "linux64/debug",
+ "linux64-shippable/opt",
+ "linux64-devedition/opt",
+ "linux64-asan/opt",
+ "linux64-qr/opt",
+ "linux64-qr/debug",
+ "linux64-shippable-qr/opt",
+ "linux1804-64/opt",
+ "linux1804-64/debug",
+ "linux1804-64-shippable/opt",
+ "linux1804-64-devedition/opt",
+ "linux1804-64-qr/opt",
+ "linux1804-64-qr/debug",
+ "linux1804-64-shippable-qr/opt",
+ "linux1804-64-asan-qr/opt",
+ "linux1804-64-tsan-qr/opt",
+ "windows7-32-qr/debug",
+ "windows7-32-qr/opt",
+ "windows7-32-devedition-qr/opt",
+ "windows7-32-shippable-qr/opt",
+ "windows10-32-qr/debug",
+ "windows10-32-qr/opt",
+ "windows10-32-shippable-qr/opt",
+ "windows10-32-2004-qr/debug",
+ "windows10-32-2004-qr/opt",
+ "windows10-32-2004-shippable-qr/opt",
+ "windows10-aarch64-qr/opt",
+ "windows10-64/debug",
+ "windows10-64/opt",
+ "windows10-64-shippable/opt",
+ "windows10-64-devedition/opt",
+ "windows10-64-qr/opt",
+ "windows10-64-qr/debug",
+ "windows10-64-shippable-qr/opt",
+ "windows10-64-devedition-qr/opt",
+ "windows10-64-asan-qr/opt",
+ "windows10-64-2004-qr/opt",
+ "windows10-64-2004-qr/debug",
+ "windows10-64-2004-shippable-qr/opt",
+ "windows10-64-2004-devedition-qr/opt",
+ "windows10-64-2004-asan-qr/opt",
+ "windows11-32-2009-qr/debug",
+ "windows11-32-2009-qr/opt",
+ "windows11-32-2009-shippable-qr/opt",
+ "windows11-64-2009-qr/opt",
+ "windows11-64-2009-qr/debug",
+ "windows11-64-2009-shippable-qr/opt",
+ "windows11-64-2009-devedition-qr/opt",
+ "windows11-64-2009-asan-qr/opt",
+ "macosx1015-64/opt",
+ "macosx1015-64/debug",
+ "macosx1015-64-shippable/opt",
+ "macosx1015-64-devedition/opt",
+ "macosx1015-64-devedition-qr/opt",
+ "macosx1015-64-qr/opt",
+ "macosx1015-64-shippable-qr/opt",
+ "macosx1015-64-qr/debug",
+ "macosx1100-64-shippable-qr/opt",
+ "macosx1100-64-qr/debug",
+ "android-em-7.0-x86_64-shippable/opt",
+ "android-em-7.0-x86_64-shippable-lite/opt",
+ "android-em-7.0-x86_64/debug",
+ "android-em-7.0-x86_64/debug-isolated-process",
+ "android-em-7.0-x86_64/opt",
+ "android-em-7.0-x86_64-lite/opt",
+ "android-em-7.0-x86-shippable/opt",
+ "android-em-7.0-x86-shippable-lite/opt",
+ "android-em-7.0-x86_64-shippable-qr/opt",
+ "android-em-7.0-x86_64-qr/debug",
+ "android-em-7.0-x86_64-qr/debug-isolated-process",
+ "android-em-7.0-x86_64-qr/opt",
+ "android-em-7.0-x86_64-shippable-lite-qr/opt",
+ "android-em-7.0-x86_64-lite-qr/debug",
+ "android-em-7.0-x86_64-lite-qr/opt",
+ ]:
+ task["tier"] = 1
+ else:
+ task["tier"] = 2
+
+ yield task
+
+
+@transforms.add
+def apply_raptor_tier_optimization(config, tasks):
+ for task in tasks:
+ if task["suite"] != "raptor":
+ yield task
+ continue
+
+ if "regression-tests" in task["test-name"]:
+ # Don't optimize the regression tests
+ yield task
+ continue
+
+ if not task["test-platform"].startswith("android-hw"):
+ task["optimization"] = {"skip-unless-expanded": None}
+ if task["tier"] > 1:
+ task["optimization"] = {"skip-unless-backstop": None}
+
+ if task["attributes"].get("unittest_variant"):
+ task["tier"] = max(task["tier"], 2)
+ yield task
+
+
+@transforms.add
+def disable_try_only_platforms(config, tasks):
+ """Turns off platforms that should only run on try."""
+ try_only_platforms = ()
+ for task in tasks:
+ if any(re.match(k + "$", task["test-platform"]) for k in try_only_platforms):
+ task["run-on-projects"] = []
+ yield task
+
+
+@transforms.add
+def ensure_spi_disabled_on_all_but_spi(config, tasks):
+ for task in tasks:
+ variant = task["attributes"].get("unittest_variant", "")
+ has_no_setpref = ("gtest", "cppunit", "jittest", "junit", "raptor")
+
+ if (
+ all(s not in task["suite"] for s in has_no_setpref)
+ and "socketprocess" not in variant
+ ):
+ task["mozharness"]["extra-options"].append(
+ "--setpref=media.peerconnection.mtransport_process=false"
+ )
+ task["mozharness"]["extra-options"].append(
+ "--setpref=network.process.enabled=false"
+ )
+
+ yield task
+
+
+test_setting_description_schema = Schema(
+ {
+ Required("_hash"): str,
+ "platform": {
+ Required("arch"): Any("32", "64", "aarch64", "arm7", "x86_64"),
+ Required("os"): {
+ Required("name"): Any("android", "linux", "macosx", "windows"),
+ Required("version"): str,
+ Optional("build"): str,
+ },
+ Optional("device"): str,
+ Optional("display"): "wayland",
+ Optional("machine"): Any("ref-hw-2017"),
+ },
+ "build": {
+ Required("type"): Any("opt", "debug", "debug-isolated-process"),
+ Any(
+ "asan",
+ "ccov",
+ "clang-trunk",
+ "devedition",
+ "domstreams",
+ "lite",
+ "mingwclang",
+ "nightlyasrelease",
+ "shippable",
+ "tsan",
+ ): bool,
+ },
+ "runtime": {Any(*list(TEST_VARIANTS.keys()) + ["1proc"]): bool},
+ },
+ check=False,
+)
+"""Schema test settings must conform to. Validated by
+:py:func:`~test.test_mozilla_central.test_test_setting`"""
+
+
+@transforms.add
+def set_test_setting(config, tasks):
+ """A test ``setting`` is the set of configuration that uniquely
+ distinguishes a test task from other tasks that run the same suite
+ (ignoring chunks).
+
+ There are three different types of information that make up a setting:
+
+ 1. Platform - Information describing the underlying platform tests run on,
+ e.g, OS, CPU architecture, etc.
+
+ 2. Build - Information describing the build being tested, e.g build type,
+ ccov, asan/tsan, etc.
+
+ 3. Runtime - Information describing which runtime parameters are enabled,
+ e.g, prefs, environment variables, etc.
+
+ This transform adds a ``test-setting`` object to the ``extra`` portion of
+ all test tasks, of the form:
+
+ .. code-block::
+
+ {
+ "platform": { ... },
+ "build": { ... },
+ "runtime": { ... }
+ }
+
+ This information could be derived from the label, but consuming this
+ object is less brittle.
+ """
+ # Some attributes have a dash in them which complicates parsing. Ensure we
+ # don't split them up.
+ # TODO Rename these so they don't have a dash.
+ dash_attrs = [
+ "clang-trunk",
+ "ref-hw-2017",
+ ]
+ dash_token = "%D%"
+ platform_re = re.compile(r"(\D+)(\d*)")
+
+ for task in tasks:
+ setting = {
+ "platform": {
+ "os": {},
+ },
+ "build": {},
+ "runtime": {},
+ }
+
+ # parse platform and build information out of 'test-platform'
+ platform, build_type = task["test-platform"].split("/", 1)
+
+ # ensure dashed attributes don't get split up
+ for attr in dash_attrs:
+ if attr in platform:
+ platform = platform.replace(attr, attr.replace("-", dash_token))
+
+ parts = platform.split("-")
+
+ # restore dashes now that split is finished
+ for i, part in enumerate(parts):
+ if dash_token in part:
+ parts[i] = part.replace(dash_token, "-")
+
+ match = platform_re.match(parts.pop(0))
+ assert match
+ os_name, os_version = match.groups()
+
+ device = machine = os_build = display = None
+ if os_name == "android":
+ device = parts.pop(0)
+ if device == "hw":
+ device = parts.pop(0)
+ else:
+ device = "emulator"
+
+ os_version = parts.pop(0)
+ if parts[0].isdigit():
+ os_version = f"{os_version}.{parts.pop(0)}"
+
+ if parts[0] == "android":
+ parts.pop(0)
+
+ arch = parts.pop(0)
+
+ else:
+ arch = parts.pop(0)
+ if parts[0].isdigit():
+ os_build = parts.pop(0)
+
+ if parts[0] == "ref-hw-2017":
+ machine = parts.pop(0)
+
+ if parts[0] == "wayland":
+ display = parts.pop(0)
+
+ # It's not always possible to glean the exact architecture used from
+ # the task, so sometimes this will just be set to "32" or "64".
+ setting["platform"]["arch"] = arch
+ setting["platform"]["os"] = {
+ "name": os_name,
+ "version": os_version,
+ }
+
+ if os_build:
+ setting["platform"]["os"]["build"] = os_build
+
+ if device:
+ setting["platform"]["device"] = device
+
+ if machine:
+ setting["platform"]["machine"] = machine
+
+ if display:
+ setting["platform"]["display"] = display
+
+ # parse remaining parts as build attributes
+ setting["build"]["type"] = build_type
+ while parts:
+ attr = parts.pop(0)
+ if attr == "qr":
+ # all tasks are webrender now, no need to store it
+ continue
+
+ setting["build"][attr] = True
+
+ unittest_variant = task["attributes"].get("unittest_variant")
+ if unittest_variant:
+ for variant in unittest_variant.split("+"):
+ setting["runtime"][variant] = True
+
+ # add a hash of the setting object for easy comparisons
+ setting["_hash"] = hashlib.sha256(
+ json.dumps(setting, sort_keys=True).encode("utf-8")
+ ).hexdigest()[:12]
+
+ task["test-setting"] = ReadOnlyDict(**setting)
+ yield task
+
+
+@transforms.add
+def allow_software_gl_layers(config, tasks):
+ """
+ Handle the "allow-software-gl-layers" property for platforms where it
+ applies.
+ """
+ for task in tasks:
+ if task.get("allow-software-gl-layers"):
+ # This should be set always once bug 1296086 is resolved.
+ task["mozharness"].setdefault("extra-options", []).append(
+ "--allow-software-gl-layers"
+ )
+
+ yield task
+
+
+@transforms.add
+def enable_webrender(config, tasks):
+ """
+ Handle the "webrender" property by passing a flag to mozharness if it is
+ enabled.
+ """
+ for task in tasks:
+ # TODO: this was all conditionally in enable_webrender- do we still need this?
+ extra_options = task["mozharness"].setdefault("extra-options", [])
+ # We only want to 'setpref' on tests that have a profile
+ if not task["attributes"]["unittest_category"] in [
+ "cppunittest",
+ "geckoview-junit",
+ "gtest",
+ "jittest",
+ "raptor",
+ ]:
+ extra_options.append("--setpref=layers.d3d11.enable-blacklist=false")
+
+ yield task
+
+
+@transforms.add
+def set_schedules_for_webrender_android(config, tasks):
+ """android-hw has limited resources, we need webrender on phones"""
+ for task in tasks:
+ if task["suite"] in ["crashtest", "reftest"] and task[
+ "test-platform"
+ ].startswith("android-hw"):
+ task["schedules-component"] = "android-hw-gfx"
+ yield task
+
+
+@transforms.add
+def set_retry_exit_status(config, tasks):
+ """Set the retry exit status to TBPL_RETRY, the value returned by mozharness
+ scripts to indicate a transient failure that should be retried."""
+ for task in tasks:
+ # add in 137 as it is an error with GCP workers
+ task["retry-exit-status"] = [4, 137]
+ yield task
+
+
+@transforms.add
+def set_profile(config, tasks):
+ """Set profiling mode for tests."""
+ ttconfig = config.params["try_task_config"]
+ profile = ttconfig.get("gecko-profile", False)
+ settings = (
+ "gecko-profile-interval",
+ "gecko-profile-entries",
+ "gecko-profile-threads",
+ "gecko-profile-features",
+ )
+
+ for task in tasks:
+ if profile and task["suite"] in ["talos", "raptor"]:
+ extras = task["mozharness"]["extra-options"]
+ extras.append("--gecko-profile")
+ for setting in settings:
+ value = ttconfig.get(setting)
+ if value is not None:
+ # These values can contain spaces (eg the "DOM Worker"
+ # thread) and the command is constructed in different,
+ # incompatible ways on different platforms.
+
+ if task["test-platform"].startswith("win"):
+ # Double quotes for Windows (single won't work).
+ extras.append("--" + setting + '="' + str(value) + '"')
+ else:
+ # Other platforms keep things as separate values,
+ # rather than joining with spaces.
+ extras.append("--" + setting + "=" + str(value))
+
+ yield task
+
+
+@transforms.add
+def set_tag(config, tasks):
+ """Set test for a specific tag."""
+ tag = None
+ if config.params["try_mode"] == "try_option_syntax":
+ tag = config.params["try_options"]["tag"]
+ for task in tasks:
+ if tag:
+ task["mozharness"]["extra-options"].extend(["--tag", tag])
+ yield task
+
+
+@transforms.add
+def set_test_type(config, tasks):
+ types = ["mochitest", "reftest", "talos", "raptor", "geckoview-junit", "gtest"]
+ for task in tasks:
+ for test_type in types:
+ if test_type in task["suite"] and "web-platform" not in task["suite"]:
+ task.setdefault("tags", {})["test-type"] = test_type
+ yield task
+
+
+@transforms.add
+def set_schedules_components(config, tasks):
+ for task in tasks:
+ if "optimization" in task or "when" in task:
+ yield task
+ continue
+
+ category = task["attributes"]["unittest_category"]
+ schedules = task.get("schedules-component", category)
+ if isinstance(schedules, str):
+ schedules = [schedules]
+
+ schedules = set(schedules)
+ if schedules & set(INCLUSIVE_COMPONENTS):
+ # if this is an "inclusive" test, then all files which might
+ # cause it to run are annotated with SCHEDULES in moz.build,
+ # so do not include the platform or any other components here
+ task["schedules-component"] = sorted(schedules)
+ yield task
+ continue
+
+ schedules.add(category)
+ schedules.add(platform_family(task["build-platform"]))
+
+ task["schedules-component"] = sorted(schedules)
+ yield task
+
+
+@transforms.add
+def enable_parallel_marking_in_tsan_tests(config, tasks):
+ """Enable parallel marking in TSAN tests"""
+ skip_list = ["cppunittest", "gtest"]
+ for task in tasks:
+ if "-tsan-" in task["test-platform"]:
+ if task["suite"] not in skip_list:
+ extra_options = task["mozharness"].setdefault("extra-options", [])
+ extra_options.append(
+ "--setpref=javascript.options.mem.gc_parallel_marking=true"
+ )
+
+ yield task
+
+
+@transforms.add
+def apply_windows7_optimization(config, tasks):
+ for task in tasks:
+ if task["test-platform"].startswith("windows7"):
+ task["optimization"] = {"skip-unless-backstop": None}
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/test/raptor.py b/taskcluster/gecko_taskgraph/transforms/test/raptor.py
new file mode 100644
index 0000000000..3eac5dd9ef
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/test/raptor.py
@@ -0,0 +1,317 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import Schema, optionally_keyed_by, resolve_keyed_by
+from taskgraph.util.treeherder import join_symbol, split_symbol
+from voluptuous import Extra, Optional, Required
+
+from gecko_taskgraph.transforms.test import test_description_schema
+from gecko_taskgraph.util.copy_task import copy_task
+
+transforms = TransformSequence()
+task_transforms = TransformSequence()
+
+raptor_description_schema = Schema(
+ {
+ # Raptor specific configs.
+ Optional("raptor"): {
+ Optional("activity"): optionally_keyed_by("app", str),
+ Optional("apps"): optionally_keyed_by("test-platform", "subtest", [str]),
+ Optional("binary-path"): optionally_keyed_by("app", str),
+ Optional("run-visual-metrics"): optionally_keyed_by("app", bool),
+ Optional("subtests"): optionally_keyed_by("app", "test-platform", list),
+ Optional("test"): str,
+ Optional("test-url-param"): optionally_keyed_by(
+ "subtest", "test-platform", str
+ ),
+ },
+ # Configs defined in the 'test_description_schema'.
+ Optional("max-run-time"): optionally_keyed_by(
+ "app", "subtest", "test-platform", test_description_schema["max-run-time"]
+ ),
+ Optional("run-on-projects"): optionally_keyed_by(
+ "app",
+ "test-name",
+ "raptor.test",
+ "subtest",
+ "variant",
+ test_description_schema["run-on-projects"],
+ ),
+ Optional("variants"): test_description_schema["variants"],
+ Optional("target"): optionally_keyed_by(
+ "app", test_description_schema["target"]
+ ),
+ Optional("tier"): optionally_keyed_by(
+ "app", "raptor.test", "subtest", "variant", test_description_schema["tier"]
+ ),
+ Required("test-name"): test_description_schema["test-name"],
+ Required("test-platform"): test_description_schema["test-platform"],
+ Required("require-signed-extensions"): test_description_schema[
+ "require-signed-extensions"
+ ],
+ Required("treeherder-symbol"): test_description_schema["treeherder-symbol"],
+ # Any unrecognized keys will be validated against the test_description_schema.
+ Extra: object,
+ }
+)
+
+transforms.add_validate(raptor_description_schema)
+
+
+@transforms.add
+def set_defaults(config, tests):
+ for test in tests:
+ test.setdefault("raptor", {}).setdefault("run-visual-metrics", False)
+ yield test
+
+
+@transforms.add
+def split_apps(config, tests):
+ app_symbols = {
+ "chrome": "ChR",
+ "chrome-m": "ChR",
+ "chromium": "Cr",
+ "fenix": "fenix",
+ "refbrow": "refbrow",
+ "safari": "Saf",
+ "custom-car": "CaR",
+ }
+
+ for test in tests:
+ apps = test["raptor"].pop("apps", None)
+ if not apps:
+ yield test
+ continue
+
+ for app in apps:
+ # Ignore variants for non-Firefox or non-mobile applications.
+ if app not in ["firefox", "geckoview", "fenix", "chrome-m"] and test[
+ "attributes"
+ ].get("unittest_variant"):
+ continue
+
+ atest = copy_task(test)
+ suffix = f"-{app}"
+ atest["app"] = app
+ atest["description"] += f" on {app.capitalize()}"
+
+ name = atest["test-name"] + suffix
+ atest["test-name"] = name
+ atest["try-name"] = name
+
+ if app in app_symbols:
+ group, symbol = split_symbol(atest["treeherder-symbol"])
+ group += f"-{app_symbols[app]}"
+ atest["treeherder-symbol"] = join_symbol(group, symbol)
+
+ yield atest
+
+
+@transforms.add
+def handle_keyed_by_prereqs(config, tests):
+ """
+ Only resolve keys for prerequisite fields here since the
+ these keyed-by options might have keyed-by fields
+ as well.
+ """
+ for test in tests:
+ resolve_keyed_by(test, "raptor.subtests", item_name=test["test-name"])
+ yield test
+
+
+@transforms.add
+def split_raptor_subtests(config, tests):
+ for test in tests:
+ # For tests that have 'subtests' listed, we want to create a separate
+ # test job for every subtest (i.e. split out each page-load URL into its own job)
+ subtests = test["raptor"].pop("subtests", None)
+ if not subtests:
+ yield test
+ continue
+
+ for chunk_number, subtest in enumerate(subtests):
+
+ # Create new test job
+ chunked = copy_task(test)
+ chunked["chunk-number"] = 1 + chunk_number
+ chunked["subtest"] = subtest
+ chunked["subtest-symbol"] = subtest
+ if isinstance(chunked["subtest"], list):
+ chunked["subtest"] = subtest[0]
+ chunked["subtest-symbol"] = subtest[1]
+ chunked = resolve_keyed_by(
+ chunked, "tier", chunked["subtest"], defer=["variant"]
+ )
+ yield chunked
+
+
+@transforms.add
+def handle_keyed_by(config, tests):
+ fields = [
+ "raptor.test-url-param",
+ "raptor.run-visual-metrics",
+ "raptor.activity",
+ "raptor.binary-path",
+ "limit-platforms",
+ "fetches.fetch",
+ "max-run-time",
+ "run-on-projects",
+ "target",
+ "tier",
+ ]
+ for test in tests:
+ for field in fields:
+ resolve_keyed_by(
+ test, field, item_name=test["test-name"], defer=["variant"]
+ )
+ yield test
+
+
+@transforms.add
+def split_page_load_by_url(config, tests):
+ for test in tests:
+ # `chunk-number` and 'subtest' only exists when the task had a
+ # definition for `subtests`
+ chunk_number = test.pop("chunk-number", None)
+ subtest = test.get(
+ "subtest"
+ ) # don't pop as some tasks need this value after splitting variants
+ subtest_symbol = test.pop("subtest-symbol", None)
+
+ if not chunk_number or not subtest:
+ yield test
+ continue
+
+ if len(subtest_symbol) > 10 and "ytp" not in subtest_symbol:
+ raise Exception(
+ "Treeherder symbol %s is larger than 10 char! Please use a different symbol."
+ % subtest_symbol
+ )
+
+ if test["test-name"].startswith("browsertime-"):
+ test["raptor"]["test"] = subtest
+
+ # Remove youtube-playback in the test name to avoid duplication
+ test["test-name"] = test["test-name"].replace("youtube-playback-", "")
+ else:
+ # Use full test name if running on webextension
+ test["raptor"]["test"] = "raptor-tp6-" + subtest + "-{}".format(test["app"])
+
+ # Only run the subtest/single URL
+ test["test-name"] += f"-{subtest}"
+ test["try-name"] += f"-{subtest}"
+
+ # Set treeherder symbol and description
+ group, _ = split_symbol(test["treeherder-symbol"])
+ test["treeherder-symbol"] = join_symbol(group, subtest_symbol)
+ test["description"] += f" on {subtest}"
+
+ yield test
+
+
+@transforms.add
+def modify_extra_options(config, tests):
+ for test in tests:
+ test_name = test.get("test-name", None)
+
+ if "first-install" in test_name:
+ # First-install tests should never use conditioned profiles
+ extra_options = test.setdefault("mozharness", {}).setdefault(
+ "extra-options", []
+ )
+
+ for i, opt in enumerate(extra_options):
+ if "conditioned-profile" in opt:
+ if i:
+ extra_options.pop(i)
+ break
+
+ if "-widevine" in test_name:
+ extra_options = test.setdefault("mozharness", {}).setdefault(
+ "extra-options", []
+ )
+ for i, opt in enumerate(extra_options):
+ if "--conditioned-profile=settled" in opt:
+ if i:
+ extra_options[i] += "-youtube"
+ break
+
+ if "unity-webgl" in test_name:
+ # Disable the extra-profiler-run for unity-webgl tests.
+ extra_options = test.setdefault("mozharness", {}).setdefault(
+ "extra-options", []
+ )
+ for i, opt in enumerate(extra_options):
+ if "extra-profiler-run" in opt:
+ if i:
+ extra_options.pop(i)
+ break
+
+ yield test
+
+
+@transforms.add
+def add_extra_options(config, tests):
+ for test in tests:
+ mozharness = test.setdefault("mozharness", {})
+ if test.get("app", "") == "chrome-m":
+ mozharness["tooltool-downloads"] = "internal"
+
+ extra_options = mozharness.setdefault("extra-options", [])
+
+ # Adding device name if we're on android
+ test_platform = test["test-platform"]
+ if test_platform.startswith("android-hw-g5"):
+ extra_options.append("--device-name=g5")
+ elif test_platform.startswith("android-hw-a51"):
+ extra_options.append("--device-name=a51")
+ elif test_platform.startswith("android-hw-p5"):
+ extra_options.append("--device-name=p5_aarch64")
+
+ if test["raptor"].pop("run-visual-metrics", False):
+ extra_options.append("--browsertime-video")
+ extra_options.append("--browsertime-visualmetrics")
+ test["attributes"]["run-visual-metrics"] = True
+
+ if "app" in test:
+ extra_options.append(
+ "--app={}".format(test["app"])
+ ) # don't pop as some tasks need this value after splitting variants
+
+ if "activity" in test["raptor"]:
+ extra_options.append("--activity={}".format(test["raptor"].pop("activity")))
+
+ if "binary-path" in test["raptor"]:
+ extra_options.append(
+ "--binary-path={}".format(test["raptor"].pop("binary-path"))
+ )
+
+ if "test" in test["raptor"]:
+ extra_options.append("--test={}".format(test["raptor"].pop("test")))
+
+ if test["require-signed-extensions"]:
+ extra_options.append("--is-release-build")
+
+ if "test-url-param" in test["raptor"]:
+ param = test["raptor"].pop("test-url-param")
+ if not param == []:
+ extra_options.append(
+ "--test-url-params={}".format(param.replace(" ", ""))
+ )
+
+ extra_options.append("--project={}".format(config.params.get("project")))
+
+ yield test
+
+
+@task_transforms.add
+def add_scopes_and_proxy(config, tasks):
+ for task in tasks:
+ task.setdefault("worker", {})["taskcluster-proxy"] = True
+ task.setdefault("scopes", []).append(
+ "secrets:get:project/perftest/gecko/level-{level}/perftest-login"
+ )
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/test/variant.py b/taskcluster/gecko_taskgraph/transforms/test/variant.py
new file mode 100644
index 0000000000..e2fd9764e1
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/test/variant.py
@@ -0,0 +1,128 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import datetime
+
+import jsone
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import Schema, validate_schema
+from taskgraph.util.treeherder import join_symbol, split_symbol
+from taskgraph.util.yaml import load_yaml
+from voluptuous import Any, Optional, Required
+
+import gecko_taskgraph
+from gecko_taskgraph.util.copy_task import copy_task
+from gecko_taskgraph.util.templates import merge
+
+transforms = TransformSequence()
+
+TEST_VARIANTS = load_yaml(
+ gecko_taskgraph.GECKO, "taskcluster", "ci", "test", "variants.yml"
+)
+"""List of available test variants defined."""
+
+
+variant_description_schema = Schema(
+ {
+ str: {
+ Required("description"): str,
+ Required("suffix"): str,
+ Required("component"): str,
+ Required("expiration"): str,
+ Optional("when"): {Any("$eval", "$if"): str},
+ Optional("replace"): {str: object},
+ Optional("merge"): {str: object},
+ }
+ }
+)
+"""variant description schema"""
+
+
+@transforms.add
+def split_variants(config, tasks):
+ """Splits test definitions into multiple tasks based on the `variants` key.
+
+ If `variants` are defined, the original task will be yielded along with a
+ copy of the original task for each variant defined in the list. The copies
+ will have the 'unittest_variant' attribute set.
+ """
+ validate_schema(variant_description_schema, TEST_VARIANTS, "In variants.yml:")
+
+ def find_expired_variants(variants):
+ expired = []
+
+ # do not expire on esr/beta/release
+ if config.params.get("release_type", "") in [
+ "release",
+ "beta",
+ ]:
+ return []
+
+ if "esr" in config.params.get("release_type", ""):
+ return []
+
+ today = datetime.datetime.today()
+ for variant in variants:
+
+ expiration = variants[variant]["expiration"]
+ if len(expiration.split("-")) == 1:
+ continue
+ expires_at = datetime.datetime.strptime(expiration, "%Y-%m-%d")
+ if expires_at < today:
+ expired.append(variant)
+ return expired
+
+ def remove_expired(variants, expired):
+ remaining_variants = []
+ for name in variants:
+ parts = [p for p in name.split("+") if p not in expired]
+ if len(parts) == 0:
+ continue
+
+ remaining_variants.append(name)
+ return remaining_variants
+
+ def apply_variant(variant, task):
+ task["description"] = variant["description"].format(**task)
+
+ suffix = f"-{variant['suffix']}"
+ group, symbol = split_symbol(task["treeherder-symbol"])
+ if group != "?":
+ group += suffix
+ else:
+ symbol += suffix
+ task["treeherder-symbol"] = join_symbol(group, symbol)
+
+ # This will be used to set the label and try-name in 'make_job_description'.
+ task.setdefault("variant-suffix", "")
+ task["variant-suffix"] += suffix
+
+ # Replace and/or merge the configuration.
+ task.update(variant.get("replace", {}))
+ return merge(task, variant.get("merge", {}))
+
+ expired_variants = find_expired_variants(TEST_VARIANTS)
+ for task in tasks:
+ variants = task.pop("variants", [])
+ variants = remove_expired(variants, expired_variants)
+
+ if task.pop("run-without-variant"):
+ yield copy_task(task)
+
+ for name in variants:
+ # Apply composite variants (joined by '+') in order.
+ parts = name.split("+")
+ taskv = copy_task(task)
+ for part in parts:
+ variant = TEST_VARIANTS[part]
+
+ # If any variant in a composite fails this check we skip it.
+ if "when" in variant:
+ context = {"task": task}
+ if not jsone.render(variant["when"], context):
+ break
+
+ taskv = apply_variant(variant, taskv)
+ else:
+ taskv["attributes"]["unittest_variant"] = name
+ yield taskv
diff --git a/taskcluster/gecko_taskgraph/transforms/test/worker.py b/taskcluster/gecko_taskgraph/transforms/test/worker.py
new file mode 100644
index 0000000000..0d8d72162d
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/test/worker.py
@@ -0,0 +1,201 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from taskgraph.transforms.base import TransformSequence
+
+# default worker types keyed by instance-size
+LINUX_WORKER_TYPES = {
+ "large": "t-linux-large",
+ "xlarge": "t-linux-xlarge",
+ "default": "t-linux-large",
+}
+
+# windows worker types keyed by test-platform and virtualization
+WINDOWS_WORKER_TYPES = {
+ "windows7-32-qr": {
+ "virtual": "t-win7-32",
+ "virtual-with-gpu": "t-win7-32-gpu",
+ "hardware": "t-win10-64-1803-hw",
+ },
+ "windows7-32-shippable-qr": {
+ "virtual": "t-win7-32",
+ "virtual-with-gpu": "t-win7-32-gpu",
+ "hardware": "t-win10-64-1803-hw",
+ },
+ "windows7-32-devedition-qr": { # build only, tests have no value
+ "virtual": "t-win7-32",
+ "virtual-with-gpu": "t-win7-32-gpu",
+ "hardware": "t-win10-64-1803-hw",
+ },
+ "windows10-64": { # source-test
+ "virtual": "t-win10-64",
+ "virtual-with-gpu": "t-win10-64-gpu-s",
+ "hardware": "t-win10-64-1803-hw",
+ },
+ "windows10-64-shippable-qr": {
+ "virtual": "t-win10-64",
+ "virtual-with-gpu": "t-win10-64-gpu-s",
+ "hardware": "t-win10-64-1803-hw",
+ },
+ "windows10-64-ref-hw-2017": {
+ "virtual": "t-win10-64",
+ "virtual-with-gpu": "t-win10-64-gpu-s",
+ "hardware": "t-win10-64-ref-hw",
+ },
+ "windows10-64-2009-qr": {
+ "virtual": "win10-64-2009",
+ "virtual-with-gpu": "win10-64-2009-gpu",
+ },
+ "windows10-64-2009-shippable-qr": {
+ "virtual": "win10-64-2009",
+ "virtual-with-gpu": "win10-64-2009-gpu",
+ },
+ "windows11-32-2009-mingwclang-qr": {
+ "virtual": "win11-64-2009",
+ "virtual-with-gpu": "win11-64-2009-gpu",
+ },
+ "windows11-32-2009-qr": {
+ "virtual": "win11-64-2009",
+ "virtual-with-gpu": "win11-64-2009-gpu",
+ },
+ "windows11-32-2009-shippable-qr": {
+ "virtual": "win11-64-2009",
+ "virtual-with-gpu": "win11-64-2009-gpu",
+ },
+ "windows11-64-2009": {
+ "virtual": "win11-64-2009",
+ "virtual-with-gpu": "win11-64-2009-gpu",
+ },
+ "windows11-64-2009-ccov": {
+ "virtual": "win11-64-2009-ssd",
+ "virtual-with-gpu": "win11-64-2009-ssd-gpu",
+ },
+ "windows11-64-2009-ccov-qr": {
+ "virtual": "win11-64-2009-ssd",
+ "virtual-with-gpu": "win11-64-2009-ssd-gpu",
+ },
+ "windows11-64-2009-devedition": {
+ "virtual": "win11-64-2009",
+ "virtual-with-gpu": "win11-64-2009-gpu",
+ },
+ "windows11-64-2009-shippable": {
+ "virtual": "win11-64-2009",
+ "virtual-with-gpu": "win11-64-2009-gpu",
+ },
+ "windows11-64-2009-qr": {
+ "virtual": "win11-64-2009",
+ "virtual-with-gpu": "win11-64-2009-gpu",
+ },
+ "windows11-64-2009-shippable-qr": {
+ "virtual": "win11-64-2009",
+ "virtual-with-gpu": "win11-64-2009-gpu",
+ },
+ "windows11-64-2009-devedition-qr": {
+ "virtual": "win11-64-2009",
+ "virtual-with-gpu": "win11-64-2009-gpu",
+ },
+ "windows11-64-2009-asan-qr": {
+ "virtual": "win11-64-2009",
+ "virtual-with-gpu": "win11-64-2009-gpu",
+ },
+ "windows11-64-2009-mingwclang-qr": {
+ "virtual": "win11-64-2009",
+ "virtual-with-gpu": "win11-64-2009-gpu",
+ },
+}
+
+# os x worker types keyed by test-platform
+MACOSX_WORKER_TYPES = {
+ "macosx1015-64-power": "t-osx-1015-power",
+ "macosx1015-64": "t-osx-1015-r8",
+ "macosx1100-64": "t-osx-1100-m1",
+}
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def set_worker_type(config, tasks):
+ """Set the worker type based on the test platform."""
+ for task in tasks:
+ # during the taskcluster migration, this is a bit tortured, but it
+ # will get simpler eventually!
+ test_platform = task["test-platform"]
+ if task.get("worker-type", "default") != "default":
+ # This test already has its worker type defined, so just use that (yields below)
+ # Unless the value is set to "default", in that case ignore it.
+ pass
+ elif test_platform.startswith("macosx1015-64"):
+ if "--power-test" in task["mozharness"]["extra-options"]:
+ task["worker-type"] = MACOSX_WORKER_TYPES["macosx1015-64-power"]
+ else:
+ task["worker-type"] = MACOSX_WORKER_TYPES["macosx1015-64"]
+ elif test_platform.startswith("macosx1100-64"):
+ task["worker-type"] = MACOSX_WORKER_TYPES["macosx1100-64"]
+ elif test_platform.startswith("win"):
+ # figure out what platform the job needs to run on
+ if task["virtualization"] == "hardware":
+ # some jobs like talos and reftest run on real h/w - those are all win10
+ if test_platform.startswith("windows10-64-ref-hw-2017"):
+ win_worker_type_platform = WINDOWS_WORKER_TYPES[
+ "windows10-64-ref-hw-2017"
+ ]
+ else:
+ win_worker_type_platform = WINDOWS_WORKER_TYPES["windows10-64"]
+ else:
+ # the other jobs run on a vm which may or may not be a win10 vm
+ win_worker_type_platform = WINDOWS_WORKER_TYPES[
+ test_platform.split("/")[0]
+ ]
+ if task[
+ "virtualization"
+ ] == "virtual-with-gpu" and test_platform.startswith("windows1"):
+ # add in `--requires-gpu` to the mozharness options
+ task["mozharness"]["extra-options"].append("--requires-gpu")
+
+ # now we have the right platform set the worker type accordingly
+ task["worker-type"] = win_worker_type_platform[task["virtualization"]]
+ elif test_platform.startswith("android-hw-g5"):
+ if task["suite"] != "raptor":
+ task["worker-type"] = "t-bitbar-gw-unit-g5"
+ else:
+ task["worker-type"] = "t-bitbar-gw-perf-g5"
+ elif test_platform.startswith("android-hw-p5"):
+ if task["suite"] != "raptor":
+ task["worker-type"] = "t-bitbar-gw-unit-p5"
+ else:
+ task["worker-type"] = "t-bitbar-gw-perf-p5"
+ elif test_platform.startswith("android-hw-a51"):
+ if task["suite"] != "raptor":
+ task["worker-type"] = "t-bitbar-gw-unit-a51"
+ else:
+ task["worker-type"] = "t-bitbar-gw-perf-a51"
+ elif test_platform.startswith("android-em-7.0-x86"):
+ task["worker-type"] = "t-linux-kvm"
+ elif test_platform.startswith("linux") or test_platform.startswith("android"):
+ if "wayland" in test_platform:
+ task["worker-type"] = "t-linux-wayland"
+ elif task.get("suite", "") in ["talos", "raptor"] and not task[
+ "build-platform"
+ ].startswith("linux64-ccov"):
+ task["worker-type"] = "t-linux-talos-1804"
+ else:
+ task["worker-type"] = LINUX_WORKER_TYPES[task["instance-size"]]
+ else:
+ raise Exception(f"unknown test_platform {test_platform}")
+
+ yield task
+
+
+@transforms.add
+def set_wayland_env(config, tasks):
+ for task in tasks:
+ if task["worker-type"] != "t-linux-wayland":
+ yield task
+ continue
+
+ env = task.setdefault("worker", {}).setdefault("env", {})
+ env["MOZ_ENABLE_WAYLAND"] = "1"
+ env["WAYLAND_DISPLAY"] = "wayland-0"
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/try_job.py b/taskcluster/gecko_taskgraph/transforms/try_job.py
new file mode 100644
index 0000000000..4b3281f5c5
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/try_job.py
@@ -0,0 +1,18 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def set_job_try_name(config, jobs):
+ """
+ For a task which is governed by `-j` in try syntax, set the `job_try_name`
+ attribute based on the job name.
+ """
+ for job in jobs:
+ job.setdefault("attributes", {}).setdefault("job_try_name", job["name"])
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/update_verify.py b/taskcluster/gecko_taskgraph/transforms/update_verify.py
new file mode 100644
index 0000000000..19c932c746
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/update_verify.py
@@ -0,0 +1,58 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the beetmover task into an actual task description.
+"""
+
+
+from copy import deepcopy
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.treeherder import add_suffix, inherit_treeherder_from_dep
+
+from gecko_taskgraph.util.attributes import task_name
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def add_command(config, tasks):
+ config_tasks = {}
+ for dep in config.kind_dependencies_tasks.values():
+ if (
+ "update-verify-config" in dep.kind
+ or "update-verify-next-config" in dep.kind
+ ):
+ config_tasks[task_name(dep)] = dep
+
+ for task in tasks:
+ config_task = config_tasks[task["name"]]
+ total_chunks = task["extra"]["chunks"]
+ task["worker"].setdefault("env", {})["CHANNEL"] = config_task.task["extra"][
+ "channel"
+ ]
+ task.setdefault("fetches", {})[config_task.label] = [
+ "update-verify.cfg",
+ ]
+ task["treeherder"] = inherit_treeherder_from_dep(task, config_task)
+
+ for this_chunk in range(1, total_chunks + 1):
+ chunked = deepcopy(task)
+ chunked["treeherder"]["symbol"] = add_suffix(
+ chunked["treeherder"]["symbol"], this_chunk
+ )
+ chunked["label"] = "release-update-verify-{}-{}/{}".format(
+ chunked["name"], this_chunk, total_chunks
+ )
+ if not chunked["worker"].get("env"):
+ chunked["worker"]["env"] = {}
+ chunked["run"] = {
+ "using": "run-task",
+ "cwd": "{checkout}",
+ "command": "tools/update-verify/scripts/chunked-verify.sh "
+ f"--total-chunks={total_chunks} --this-chunk={this_chunk}",
+ "sparse-profile": "update-verify",
+ }
+
+ yield chunked
diff --git a/taskcluster/gecko_taskgraph/transforms/update_verify_config.py b/taskcluster/gecko_taskgraph/transforms/update_verify_config.py
new file mode 100644
index 0000000000..1de808f82b
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/update_verify_config.py
@@ -0,0 +1,148 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the beetmover task into an actual task description.
+"""
+
+from urllib.parse import urlsplit
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+from gecko_taskgraph.transforms.task import get_branch_repo, get_branch_rev
+from gecko_taskgraph.util.attributes import release_level
+from gecko_taskgraph.util.scriptworker import get_release_config
+
+transforms = TransformSequence()
+
+
+# The beta regexes do not match point releases.
+# In the rare event that we do ship a point
+# release to beta, we need to either:
+# 1) update these regexes to match that specific version
+# 2) pass a second include version that matches that specific version
+INCLUDE_VERSION_REGEXES = {
+ "beta": r"'^(\d+\.\d+(b\d+)?)$'",
+ "nonbeta": r"'^\d+\.\d+(\.\d+)?$'",
+ # Same as nonbeta, except for the esr suffix
+ "esr": r"'^\d+\.\d+(\.\d+)?esr$'",
+ # Previous esr versions, for update testing before we update users to esr102
+ "esr115-next": r"'^(52|60|68|78|91|102)+\.\d+(\.\d+)?esr$'",
+}
+
+MAR_CHANNEL_ID_OVERRIDE_REGEXES = {
+ "beta": r"'^\d+\.\d+(\.\d+)?$$,firefox-mozilla-beta,firefox-mozilla-release'",
+}
+
+
+def ensure_wrapped_singlequote(regexes):
+ """Ensure that a regex (from INCLUDE_VERSION_REGEXES or MAR_CHANNEL_ID_OVERRIDE_REGEXES)
+ is wrapper in single quotes.
+ """
+ for name, regex in regexes.items():
+ if regex[0] != "'" or regex[-1] != "'":
+ raise Exception(
+ "Regex {} is invalid: not wrapped with single quotes.\n{}".format(
+ name, regex
+ )
+ )
+
+
+ensure_wrapped_singlequote(INCLUDE_VERSION_REGEXES)
+ensure_wrapped_singlequote(MAR_CHANNEL_ID_OVERRIDE_REGEXES)
+
+
+@transforms.add
+def add_command(config, tasks):
+ keyed_by_args = [
+ "channel",
+ "archive-prefix",
+ "previous-archive-prefix",
+ "aus-server",
+ "override-certs",
+ "include-version",
+ "mar-channel-id-override",
+ "last-watershed",
+ ]
+ optional_args = [
+ "updater-platform",
+ ]
+
+ release_config = get_release_config(config)
+
+ for task in tasks:
+ task["description"] = "generate update verify config for {}".format(
+ task["attributes"]["build_platform"]
+ )
+
+ command = [
+ "python",
+ "testing/mozharness/scripts/release/update-verify-config-creator.py",
+ "--product",
+ task["extra"]["product"],
+ "--stage-product",
+ task["shipping-product"],
+ "--app-name",
+ task["extra"]["app-name"],
+ "--branch-prefix",
+ task["extra"]["branch-prefix"],
+ "--platform",
+ task["extra"]["platform"],
+ "--to-version",
+ release_config["version"],
+ "--to-app-version",
+ release_config["appVersion"],
+ "--to-build-number",
+ str(release_config["build_number"]),
+ "--to-buildid",
+ config.params["moz_build_date"],
+ "--to-revision",
+ get_branch_rev(config),
+ "--output-file",
+ "update-verify.cfg",
+ ]
+
+ repo_path = urlsplit(get_branch_repo(config)).path.lstrip("/")
+ command.extend(["--repo-path", repo_path])
+
+ if release_config.get("partial_versions"):
+ for partial in release_config["partial_versions"].split(","):
+ command.extend(["--partial-version", partial.split("build")[0]])
+
+ for arg in optional_args:
+ if task["extra"].get(arg):
+ command.append(f"--{arg}")
+ command.append(task["extra"][arg])
+
+ for arg in keyed_by_args:
+ thing = f"extra.{arg}"
+ resolve_keyed_by(
+ task,
+ thing,
+ item_name=task["name"],
+ platform=task["attributes"]["build_platform"],
+ **{
+ "release-type": config.params["release_type"],
+ "release-level": release_level(config.params["project"]),
+ },
+ )
+ # ignore things that resolved to null
+ if not task["extra"].get(arg):
+ continue
+ if arg == "include-version":
+ task["extra"][arg] = INCLUDE_VERSION_REGEXES[task["extra"][arg]]
+ if arg == "mar-channel-id-override":
+ task["extra"][arg] = MAR_CHANNEL_ID_OVERRIDE_REGEXES[task["extra"][arg]]
+
+ command.append(f"--{arg}")
+ command.append(task["extra"][arg])
+
+ task["run"].update(
+ {
+ "using": "mach",
+ "mach": " ".join(command),
+ }
+ )
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/upload_generated_sources.py b/taskcluster/gecko_taskgraph/transforms/upload_generated_sources.py
new file mode 100644
index 0000000000..b862645eed
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/upload_generated_sources.py
@@ -0,0 +1,40 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the upload-generated-files task description template,
+taskcluster/ci/upload-generated-sources/kind.yml, into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def add_task_info(config, jobs):
+ for job in jobs:
+ dep_task = job["primary-dependency"]
+ del job["primary-dependency"]
+
+ # Add a dependency on the build task.
+ job["dependencies"] = {"build": dep_task.label}
+ # Label the job to match the build task it's uploading from.
+ job["label"] = dep_task.label.replace("build-", "upload-generated-sources-")
+ # Copy over some bits of metdata from the build task.
+ dep_th = dep_task.task["extra"]["treeherder"]
+ job.setdefault("attributes", {})
+ job["attributes"]["build_platform"] = dep_task.attributes.get("build_platform")
+ if dep_task.attributes.get("shippable"):
+ job["attributes"]["shippable"] = True
+ plat = "{}/{}".format(
+ dep_th["machine"]["platform"], dep_task.attributes.get("build_type")
+ )
+ job["treeherder"]["platform"] = plat
+ job["treeherder"]["tier"] = dep_th["tier"]
+ if dep_th["symbol"] != "N":
+ job["treeherder"]["symbol"] = "Ugs{}".format(dep_th["symbol"])
+ job["run-on-projects"] = dep_task.attributes.get("run_on_projects")
+ job["optimization"] = dep_task.optimization
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/upload_symbols.py b/taskcluster/gecko_taskgraph/transforms/upload_symbols.py
new file mode 100644
index 0000000000..f6d40e9a45
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/upload_symbols.py
@@ -0,0 +1,95 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the upload-symbols task description template,
+taskcluster/ci/upload-symbols/job-template.yml into an actual task description.
+"""
+
+
+import logging
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.treeherder import inherit_treeherder_from_dep, join_symbol
+
+from gecko_taskgraph.util.attributes import (
+ RELEASE_PROJECTS,
+ copy_attributes_from_dependent_job,
+)
+
+logger = logging.getLogger(__name__)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def check_nightlies(config, tasks):
+ """Ensure that we upload symbols for all shippable builds, so that crash-stats can
+ resolve any reports sent to it. Try may enable full symbols but not upload them.
+
+ Putting this check here (instead of the transforms for the build kind) lets us
+ leverage the any not-for-build-platforms set in the update-symbols kind."""
+ for task in tasks:
+ dep = task["primary-dependency"]
+ if (
+ config.params["project"] in RELEASE_PROJECTS
+ and dep.attributes.get("shippable")
+ and not dep.attributes.get("enable-full-crashsymbols")
+ and not dep.attributes.get("skip-upload-crashsymbols")
+ ):
+ raise Exception(
+ "Shippable job %s should have enable-full-crashsymbols attribute "
+ "set to true to enable symbol upload to crash-stats" % dep.label
+ )
+ yield task
+
+
+@transforms.add
+def fill_template(config, tasks):
+ for task in tasks:
+ dep = task["primary-dependency"]
+ task.pop("dependent-tasks", None)
+
+ # Fill out the dynamic fields in the task description
+ task["label"] = dep.label + "-upload-symbols"
+
+ # Skip tasks where we don't have the full crashsymbols enabled
+ if not dep.attributes.get("enable-full-crashsymbols") or dep.attributes.get(
+ "skip-upload-crashsymbols"
+ ):
+ logger.debug("Skipping upload symbols task for %s", task["label"])
+ continue
+
+ task["dependencies"] = {"build": dep.label}
+ task["worker"]["env"]["GECKO_HEAD_REPOSITORY"] = config.params[
+ "head_repository"
+ ]
+ task["worker"]["env"]["GECKO_HEAD_REV"] = config.params["head_rev"]
+ task["worker"]["env"]["SYMBOL_SECRET"] = task["worker"]["env"][
+ "SYMBOL_SECRET"
+ ].format(level=config.params["level"])
+
+ attributes = copy_attributes_from_dependent_job(dep)
+ attributes.update(task.get("attributes", {}))
+ task["attributes"] = attributes
+
+ treeherder = inherit_treeherder_from_dep(task, dep)
+ th = dep.task.get("extra")["treeherder"]
+ th_symbol = th.get("symbol")
+ th_groupsymbol = th.get("groupSymbol", "?")
+
+ # Disambiguate the treeherder symbol.
+ sym = "Sym" + (th_symbol[1:] if th_symbol.startswith("B") else th_symbol)
+ treeherder.setdefault("symbol", join_symbol(th_groupsymbol, sym))
+ task["treeherder"] = treeherder
+
+ # We only want to run these tasks if the build is run.
+ # XXX Better to run this on promote phase instead?
+ task["run-on-projects"] = dep.attributes.get("run_on_projects")
+ task["optimization"] = {"upload-symbols": None}
+ task["if-dependencies"] = ["build"]
+
+ # clear out the stuff that's not part of a task description
+ del task["primary-dependency"]
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/upstream_artifact_task.py b/taskcluster/gecko_taskgraph/transforms/upstream_artifact_task.py
new file mode 100644
index 0000000000..62f94a8238
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/upstream_artifact_task.py
@@ -0,0 +1,29 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Find upstream artifact task.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def find_upstream_artifact_task(config, jobs):
+ for job in jobs:
+ dep_job = None
+ if job.get("dependent-tasks"):
+ dep_labels = [l for l in job["dependent-tasks"].keys()]
+ for label in dep_labels:
+ if label.endswith("-mac-signing"):
+ assert (
+ dep_job is None
+ ), "Can't determine whether " "{} or {} is dep_job!".format(
+ dep_job.label, label
+ )
+ dep_job = job["dependent-tasks"][label]
+ if dep_job is not None:
+ job["upstream-artifact-task"] = dep_job
+ yield job