summaryrefslogtreecommitdiffstats
path: root/taskcluster/gecko_taskgraph
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 00:47:55 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 00:47:55 +0000
commit26a029d407be480d791972afb5975cf62c9360a6 (patch)
treef435a8308119effd964b339f76abb83a57c29483 /taskcluster/gecko_taskgraph
parentInitial commit. (diff)
downloadfirefox-26a029d407be480d791972afb5975cf62c9360a6.tar.xz
firefox-26a029d407be480d791972afb5975cf62c9360a6.zip
Adding upstream version 124.0.1.upstream/124.0.1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'taskcluster/gecko_taskgraph')
-rw-r--r--taskcluster/gecko_taskgraph/.ruff.toml4
-rw-r--r--taskcluster/gecko_taskgraph/__init__.py68
-rw-r--r--taskcluster/gecko_taskgraph/actions/__init__.py16
-rw-r--r--taskcluster/gecko_taskgraph/actions/add_new_jobs.py59
-rw-r--r--taskcluster/gecko_taskgraph/actions/add_talos.py59
-rw-r--r--taskcluster/gecko_taskgraph/actions/backfill.py441
-rw-r--r--taskcluster/gecko_taskgraph/actions/cancel.py36
-rw-r--r--taskcluster/gecko_taskgraph/actions/cancel_all.py60
-rw-r--r--taskcluster/gecko_taskgraph/actions/confirm_failure.py268
-rw-r--r--taskcluster/gecko_taskgraph/actions/create_interactive.py188
-rw-r--r--taskcluster/gecko_taskgraph/actions/gecko_profile.py138
-rw-r--r--taskcluster/gecko_taskgraph/actions/merge_automation.py98
-rw-r--r--taskcluster/gecko_taskgraph/actions/openh264.py33
-rw-r--r--taskcluster/gecko_taskgraph/actions/purge_caches.py34
-rw-r--r--taskcluster/gecko_taskgraph/actions/raptor_extra_options.py77
-rw-r--r--taskcluster/gecko_taskgraph/actions/rebuild_cached_tasks.py37
-rw-r--r--taskcluster/gecko_taskgraph/actions/registry.py371
-rw-r--r--taskcluster/gecko_taskgraph/actions/release_promotion.py427
-rw-r--r--taskcluster/gecko_taskgraph/actions/retrigger.py311
-rw-r--r--taskcluster/gecko_taskgraph/actions/retrigger_custom.py185
-rw-r--r--taskcluster/gecko_taskgraph/actions/run_missing_tests.py62
-rw-r--r--taskcluster/gecko_taskgraph/actions/scriptworker_canary.py45
-rw-r--r--taskcluster/gecko_taskgraph/actions/side_by_side.py189
-rw-r--r--taskcluster/gecko_taskgraph/actions/util.py437
-rw-r--r--taskcluster/gecko_taskgraph/config.py142
-rw-r--r--taskcluster/gecko_taskgraph/decision.py498
-rw-r--r--taskcluster/gecko_taskgraph/docker.py91
-rw-r--r--taskcluster/gecko_taskgraph/files_changed.py95
-rw-r--r--taskcluster/gecko_taskgraph/loader/__init__.py0
-rw-r--r--taskcluster/gecko_taskgraph/loader/test.py142
-rw-r--r--taskcluster/gecko_taskgraph/loader/transform.py59
-rw-r--r--taskcluster/gecko_taskgraph/main.py813
-rw-r--r--taskcluster/gecko_taskgraph/manifests/fennec_geckoview.yml210
-rw-r--r--taskcluster/gecko_taskgraph/manifests/firefox_candidates.yml433
-rw-r--r--taskcluster/gecko_taskgraph/manifests/firefox_candidates_checksums.yml94
-rw-r--r--taskcluster/gecko_taskgraph/manifests/firefox_nightly.yml523
-rw-r--r--taskcluster/gecko_taskgraph/manifests/firefox_nightly_checksums.yml59
-rw-r--r--taskcluster/gecko_taskgraph/manifests/release_checksums.yml70
-rw-r--r--taskcluster/gecko_taskgraph/manifests/source_checksums.yml52
-rw-r--r--taskcluster/gecko_taskgraph/manifests/source_files.yml52
-rw-r--r--taskcluster/gecko_taskgraph/morph.py263
-rw-r--r--taskcluster/gecko_taskgraph/optimize/__init__.py284
-rw-r--r--taskcluster/gecko_taskgraph/optimize/backstop.py47
-rw-r--r--taskcluster/gecko_taskgraph/optimize/bugbug.py321
-rw-r--r--taskcluster/gecko_taskgraph/optimize/schema.py60
-rw-r--r--taskcluster/gecko_taskgraph/optimize/strategies.py77
-rw-r--r--taskcluster/gecko_taskgraph/parameters.py137
-rw-r--r--taskcluster/gecko_taskgraph/target_tasks.py1606
-rw-r--r--taskcluster/gecko_taskgraph/test/__init__.py0
-rw-r--r--taskcluster/gecko_taskgraph/test/automationrelevance.json358
-rw-r--r--taskcluster/gecko_taskgraph/test/conftest.py218
-rw-r--r--taskcluster/gecko_taskgraph/test/docs/kinds.rst12
-rw-r--r--taskcluster/gecko_taskgraph/test/docs/parameters.rst14
-rw-r--r--taskcluster/gecko_taskgraph/test/python.toml42
-rw-r--r--taskcluster/gecko_taskgraph/test/test_actions_util.py179
-rw-r--r--taskcluster/gecko_taskgraph/test/test_decision.py175
-rw-r--r--taskcluster/gecko_taskgraph/test/test_files_changed.py90
-rw-r--r--taskcluster/gecko_taskgraph/test/test_main.py67
-rw-r--r--taskcluster/gecko_taskgraph/test/test_morph.py108
-rw-r--r--taskcluster/gecko_taskgraph/test/test_optimize_strategies.py515
-rw-r--r--taskcluster/gecko_taskgraph/test/test_target_tasks.py428
-rw-r--r--taskcluster/gecko_taskgraph/test/test_taskcluster_yml.py145
-rw-r--r--taskcluster/gecko_taskgraph/test/test_transforms_job.py111
-rw-r--r--taskcluster/gecko_taskgraph/test/test_transforms_test.py330
-rw-r--r--taskcluster/gecko_taskgraph/test/test_try_option_syntax.py430
-rw-r--r--taskcluster/gecko_taskgraph/test/test_util_attributes.py99
-rw-r--r--taskcluster/gecko_taskgraph/test/test_util_backstop.py155
-rw-r--r--taskcluster/gecko_taskgraph/test/test_util_bugbug.py57
-rw-r--r--taskcluster/gecko_taskgraph/test/test_util_chunking.py411
-rw-r--r--taskcluster/gecko_taskgraph/test/test_util_docker.py255
-rw-r--r--taskcluster/gecko_taskgraph/test/test_util_partials.py128
-rw-r--r--taskcluster/gecko_taskgraph/test/test_util_runnable_jobs.py75
-rw-r--r--taskcluster/gecko_taskgraph/test/test_util_templates.py79
-rw-r--r--taskcluster/gecko_taskgraph/test/test_util_verify.py149
-rw-r--r--taskcluster/gecko_taskgraph/transforms/__init__.py0
-rw-r--r--taskcluster/gecko_taskgraph/transforms/artifact.py116
-rw-r--r--taskcluster/gecko_taskgraph/transforms/artifacts.yml20
-rw-r--r--taskcluster/gecko_taskgraph/transforms/attribution.py69
-rw-r--r--taskcluster/gecko_taskgraph/transforms/balrog_submit.py155
-rw-r--r--taskcluster/gecko_taskgraph/transforms/balrog_toplevel.py42
-rw-r--r--taskcluster/gecko_taskgraph/transforms/beetmover.py178
-rw-r--r--taskcluster/gecko_taskgraph/transforms/beetmover_apt.py119
-rw-r--r--taskcluster/gecko_taskgraph/transforms/beetmover_checksums.py145
-rw-r--r--taskcluster/gecko_taskgraph/transforms/beetmover_emefree_checksums.py154
-rw-r--r--taskcluster/gecko_taskgraph/transforms/beetmover_geckoview.py181
-rw-r--r--taskcluster/gecko_taskgraph/transforms/beetmover_langpack_checksums.py143
-rw-r--r--taskcluster/gecko_taskgraph/transforms/beetmover_push_to_release.py93
-rw-r--r--taskcluster/gecko_taskgraph/transforms/beetmover_repackage.py358
-rw-r--r--taskcluster/gecko_taskgraph/transforms/beetmover_repackage_l10n.py44
-rw-r--r--taskcluster/gecko_taskgraph/transforms/beetmover_repackage_partner.py288
-rw-r--r--taskcluster/gecko_taskgraph/transforms/beetmover_snap.py42
-rw-r--r--taskcluster/gecko_taskgraph/transforms/beetmover_source.py35
-rw-r--r--taskcluster/gecko_taskgraph/transforms/beetmover_source_checksums.py152
-rw-r--r--taskcluster/gecko_taskgraph/transforms/bootstrap.py132
-rw-r--r--taskcluster/gecko_taskgraph/transforms/bouncer_aliases.py108
-rw-r--r--taskcluster/gecko_taskgraph/transforms/bouncer_check.py111
-rw-r--r--taskcluster/gecko_taskgraph/transforms/bouncer_locations.py35
-rw-r--r--taskcluster/gecko_taskgraph/transforms/bouncer_submission.py335
-rw-r--r--taskcluster/gecko_taskgraph/transforms/bouncer_submission_partners.py193
-rw-r--r--taskcluster/gecko_taskgraph/transforms/build.py235
-rw-r--r--taskcluster/gecko_taskgraph/transforms/build_attrs.py50
-rw-r--r--taskcluster/gecko_taskgraph/transforms/build_fat_aar.py78
-rw-r--r--taskcluster/gecko_taskgraph/transforms/build_lints.py59
-rw-r--r--taskcluster/gecko_taskgraph/transforms/build_signing.py74
-rw-r--r--taskcluster/gecko_taskgraph/transforms/cached_tasks.py101
-rw-r--r--taskcluster/gecko_taskgraph/transforms/chunk_partners.py78
-rw-r--r--taskcluster/gecko_taskgraph/transforms/code_review.py33
-rw-r--r--taskcluster/gecko_taskgraph/transforms/condprof.py91
-rw-r--r--taskcluster/gecko_taskgraph/transforms/cross_channel.py44
-rw-r--r--taskcluster/gecko_taskgraph/transforms/diffoscope.py172
-rw-r--r--taskcluster/gecko_taskgraph/transforms/docker_image.py210
-rw-r--r--taskcluster/gecko_taskgraph/transforms/fetch.py388
-rw-r--r--taskcluster/gecko_taskgraph/transforms/final_verify.py35
-rw-r--r--taskcluster/gecko_taskgraph/transforms/fxrecord.py30
-rw-r--r--taskcluster/gecko_taskgraph/transforms/geckodriver_mac_notarization.py83
-rw-r--r--taskcluster/gecko_taskgraph/transforms/geckodriver_signing.py139
-rw-r--r--taskcluster/gecko_taskgraph/transforms/github_sync.py23
-rw-r--r--taskcluster/gecko_taskgraph/transforms/hardened_signing.py111
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/__init__.py507
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/common.py269
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/distro_package.py240
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/hazard.py66
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/mach.py80
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/mozharness.py366
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/mozharness_test.py477
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/python_test.py47
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/run_task.py268
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/spidermonkey.py109
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/toolchain.py257
-rw-r--r--taskcluster/gecko_taskgraph/transforms/l10n.py423
-rw-r--r--taskcluster/gecko_taskgraph/transforms/mac_dummy.py40
-rw-r--r--taskcluster/gecko_taskgraph/transforms/mac_notarization.py19
-rw-r--r--taskcluster/gecko_taskgraph/transforms/mar_signing.py143
-rw-r--r--taskcluster/gecko_taskgraph/transforms/maybe_release.py23
-rw-r--r--taskcluster/gecko_taskgraph/transforms/merge_automation.py81
-rw-r--r--taskcluster/gecko_taskgraph/transforms/name_sanity.py48
-rw-r--r--taskcluster/gecko_taskgraph/transforms/openh264.py26
-rw-r--r--taskcluster/gecko_taskgraph/transforms/openh264_signing.py123
-rw-r--r--taskcluster/gecko_taskgraph/transforms/partials.py174
-rw-r--r--taskcluster/gecko_taskgraph/transforms/partner_attribution.py129
-rw-r--r--taskcluster/gecko_taskgraph/transforms/partner_attribution_beetmover.py157
-rw-r--r--taskcluster/gecko_taskgraph/transforms/partner_repack.py136
-rw-r--r--taskcluster/gecko_taskgraph/transforms/partner_signing.py68
-rw-r--r--taskcluster/gecko_taskgraph/transforms/per_platform_dummy.py36
-rw-r--r--taskcluster/gecko_taskgraph/transforms/perftest.py358
-rw-r--r--taskcluster/gecko_taskgraph/transforms/python_update.py25
-rw-r--r--taskcluster/gecko_taskgraph/transforms/release.py20
-rw-r--r--taskcluster/gecko_taskgraph/transforms/release_beetmover_signed_addons.py246
-rw-r--r--taskcluster/gecko_taskgraph/transforms/release_deps.py61
-rw-r--r--taskcluster/gecko_taskgraph/transforms/release_flatpak_push.py81
-rw-r--r--taskcluster/gecko_taskgraph/transforms/release_flatpak_repackage.py42
-rw-r--r--taskcluster/gecko_taskgraph/transforms/release_generate_checksums.py53
-rw-r--r--taskcluster/gecko_taskgraph/transforms/release_generate_checksums_beetmover.py133
-rw-r--r--taskcluster/gecko_taskgraph/transforms/release_generate_checksums_signing.py102
-rw-r--r--taskcluster/gecko_taskgraph/transforms/release_mark_as_shipped.py39
-rw-r--r--taskcluster/gecko_taskgraph/transforms/release_msix_push.py87
-rw-r--r--taskcluster/gecko_taskgraph/transforms/release_notifications.py73
-rw-r--r--taskcluster/gecko_taskgraph/transforms/release_sign_and_push_langpacks.py190
-rw-r--r--taskcluster/gecko_taskgraph/transforms/release_snap_repackage.py39
-rw-r--r--taskcluster/gecko_taskgraph/transforms/release_started.py52
-rw-r--r--taskcluster/gecko_taskgraph/transforms/release_version_bump.py42
-rw-r--r--taskcluster/gecko_taskgraph/transforms/repackage.py716
-rw-r--r--taskcluster/gecko_taskgraph/transforms/repackage_l10n.py29
-rw-r--r--taskcluster/gecko_taskgraph/transforms/repackage_partner.py316
-rw-r--r--taskcluster/gecko_taskgraph/transforms/repackage_routes.py34
-rw-r--r--taskcluster/gecko_taskgraph/transforms/repackage_set_upstream_mac_kind.py43
-rw-r--r--taskcluster/gecko_taskgraph/transforms/repackage_signing.py153
-rw-r--r--taskcluster/gecko_taskgraph/transforms/repackage_signing_partner.py163
-rw-r--r--taskcluster/gecko_taskgraph/transforms/repo_update.py25
-rw-r--r--taskcluster/gecko_taskgraph/transforms/reprocess_symbols.py72
-rw-r--r--taskcluster/gecko_taskgraph/transforms/reverse_chunk_deps.py45
-rw-r--r--taskcluster/gecko_taskgraph/transforms/run_pgo_profile.py34
-rw-r--r--taskcluster/gecko_taskgraph/transforms/scriptworker.py18
-rw-r--r--taskcluster/gecko_taskgraph/transforms/scriptworker_canary.py45
-rw-r--r--taskcluster/gecko_taskgraph/transforms/sentry.py30
-rw-r--r--taskcluster/gecko_taskgraph/transforms/shippable_l10n_signing.py88
-rw-r--r--taskcluster/gecko_taskgraph/transforms/signing.py258
-rw-r--r--taskcluster/gecko_taskgraph/transforms/snap_test.py48
-rw-r--r--taskcluster/gecko_taskgraph/transforms/source_checksums_signing.py99
-rw-r--r--taskcluster/gecko_taskgraph/transforms/source_test.py300
-rw-r--r--taskcluster/gecko_taskgraph/transforms/spidermonkey.py21
-rw-r--r--taskcluster/gecko_taskgraph/transforms/split_by_locale.py79
-rw-r--r--taskcluster/gecko_taskgraph/transforms/startup_test.py40
-rw-r--r--taskcluster/gecko_taskgraph/transforms/task.py2296
-rw-r--r--taskcluster/gecko_taskgraph/transforms/test/__init__.py544
-rw-r--r--taskcluster/gecko_taskgraph/transforms/test/chunk.py269
-rw-r--r--taskcluster/gecko_taskgraph/transforms/test/confirm_failure.py46
-rw-r--r--taskcluster/gecko_taskgraph/transforms/test/other.py1107
-rw-r--r--taskcluster/gecko_taskgraph/transforms/test/raptor.py326
-rw-r--r--taskcluster/gecko_taskgraph/transforms/test/variant.py124
-rw-r--r--taskcluster/gecko_taskgraph/transforms/test/worker.py204
-rw-r--r--taskcluster/gecko_taskgraph/transforms/trigger_comm_central.py24
-rw-r--r--taskcluster/gecko_taskgraph/transforms/try_job.py18
-rw-r--r--taskcluster/gecko_taskgraph/transforms/update_verify.py58
-rw-r--r--taskcluster/gecko_taskgraph/transforms/update_verify_config.py148
-rw-r--r--taskcluster/gecko_taskgraph/transforms/upload_generated_sources.py42
-rw-r--r--taskcluster/gecko_taskgraph/transforms/upload_symbols.py94
-rw-r--r--taskcluster/gecko_taskgraph/transforms/upstream_artifact_task.py29
-rw-r--r--taskcluster/gecko_taskgraph/try_option_syntax.py750
-rw-r--r--taskcluster/gecko_taskgraph/util/__init__.py0
-rw-r--r--taskcluster/gecko_taskgraph/util/attributes.py147
-rw-r--r--taskcluster/gecko_taskgraph/util/backstop.py84
-rw-r--r--taskcluster/gecko_taskgraph/util/bugbug.py125
-rw-r--r--taskcluster/gecko_taskgraph/util/cached_tasks.py82
-rw-r--r--taskcluster/gecko_taskgraph/util/chunking.py351
-rw-r--r--taskcluster/gecko_taskgraph/util/copy_task.py40
-rw-r--r--taskcluster/gecko_taskgraph/util/declarative_artifacts.py92
-rw-r--r--taskcluster/gecko_taskgraph/util/dependencies.py156
-rw-r--r--taskcluster/gecko_taskgraph/util/docker.py333
-rw-r--r--taskcluster/gecko_taskgraph/util/hash.py68
-rw-r--r--taskcluster/gecko_taskgraph/util/hg.py139
-rw-r--r--taskcluster/gecko_taskgraph/util/partials.py297
-rw-r--r--taskcluster/gecko_taskgraph/util/partners.py555
-rw-r--r--taskcluster/gecko_taskgraph/util/perfile.py104
-rw-r--r--taskcluster/gecko_taskgraph/util/platforms.py58
-rw-r--r--taskcluster/gecko_taskgraph/util/scriptworker.py865
-rw-r--r--taskcluster/gecko_taskgraph/util/signed_artifacts.py198
-rw-r--r--taskcluster/gecko_taskgraph/util/taskcluster.py128
-rw-r--r--taskcluster/gecko_taskgraph/util/taskgraph.py49
-rw-r--r--taskcluster/gecko_taskgraph/util/templates.py59
-rw-r--r--taskcluster/gecko_taskgraph/util/verify.py454
-rw-r--r--taskcluster/gecko_taskgraph/util/workertypes.py103
222 files changed, 38860 insertions, 0 deletions
diff --git a/taskcluster/gecko_taskgraph/.ruff.toml b/taskcluster/gecko_taskgraph/.ruff.toml
new file mode 100644
index 0000000000..0cd744c1cb
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/.ruff.toml
@@ -0,0 +1,4 @@
+extend = "../../pyproject.toml"
+
+[isort]
+known-first-party = ["gecko_taskgraph"]
diff --git a/taskcluster/gecko_taskgraph/__init__.py b/taskcluster/gecko_taskgraph/__init__.py
new file mode 100644
index 0000000000..f1de1e9120
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/__init__.py
@@ -0,0 +1,68 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+
+from taskgraph import config as taskgraph_config
+from taskgraph import morph as taskgraph_morph
+from taskgraph.util import schema
+from taskgraph.util import taskcluster as tc_util
+
+from gecko_taskgraph.config import graph_config_schema
+
+GECKO = os.path.normpath(os.path.realpath(os.path.join(__file__, "..", "..", "..")))
+
+# Maximum number of dependencies a single task can have
+# https://firefox-ci-tc.services.mozilla.com/docs/reference/platform/queue/task-schema
+# specifies 100, but we also optionally add the decision task id as a dep in
+# taskgraph.create, so let's set this to 99.
+MAX_DEPENDENCIES = 99
+
+# Overwrite Taskgraph's default graph_config_schema with a custom one.
+taskgraph_config.graph_config_schema = graph_config_schema
+
+# Don't use any of the upstream morphs.
+# TODO Investigate merging our morphs with upstream.
+taskgraph_morph.registered_morphs = []
+
+# Default rootUrl to use if none is given in the environment; this should point
+# to the production Taskcluster deployment used for CI.
+tc_util.PRODUCTION_TASKCLUSTER_ROOT_URL = "https://firefox-ci-tc.services.mozilla.com"
+
+# Schemas for YAML files should use dashed identifiers by default. If there are
+# components of the schema for which there is a good reason to use another format,
+# exceptions can be added here.
+schema.EXCEPTED_SCHEMA_IDENTIFIERS.extend(
+ [
+ "test_name",
+ "json_location",
+ "video_location",
+ "profile_name",
+ "target_path",
+ "try_task_config",
+ ]
+)
+
+
+def register(graph_config):
+ """Used to register Gecko specific extensions.
+
+ Args:
+ graph_config: The graph configuration object.
+ """
+ from taskgraph import generator
+
+ from gecko_taskgraph import ( # noqa: trigger target task method registration
+ morph, # noqa: trigger morph registration
+ target_tasks,
+ )
+ from gecko_taskgraph.parameters import register_parameters
+ from gecko_taskgraph.util import dependencies # noqa: trigger group_by registration
+ from gecko_taskgraph.util.verify import verifications
+
+ # Don't use the upstream verifications, and replace them with our own.
+ # TODO Investigate merging our verifications with upstream.
+ generator.verifications = verifications
+
+ register_parameters()
diff --git a/taskcluster/gecko_taskgraph/actions/__init__.py b/taskcluster/gecko_taskgraph/actions/__init__.py
new file mode 100644
index 0000000000..590a957282
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/actions/__init__.py
@@ -0,0 +1,16 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+from .registry import (
+ register_callback_action,
+ render_actions_json,
+ trigger_action_callback,
+)
+
+__all__ = [
+ "register_callback_action",
+ "render_actions_json",
+ "trigger_action_callback",
+]
diff --git a/taskcluster/gecko_taskgraph/actions/add_new_jobs.py b/taskcluster/gecko_taskgraph/actions/add_new_jobs.py
new file mode 100644
index 0000000000..05ca5cb7d4
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/actions/add_new_jobs.py
@@ -0,0 +1,59 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+from .registry import register_callback_action
+from .util import combine_task_graph_files, create_tasks, fetch_graph_and_labels
+
+
+@register_callback_action(
+ name="add-new-jobs",
+ title="Add new jobs",
+ symbol="add-new",
+ description="Add new jobs using task labels.",
+ order=100,
+ context=[],
+ schema={
+ "type": "object",
+ "properties": {
+ "tasks": {
+ "type": "array",
+ "description": "An array of task labels",
+ "items": {"type": "string"},
+ },
+ "times": {
+ "type": "integer",
+ "default": 1,
+ "minimum": 1,
+ "maximum": 100,
+ "title": "Times",
+ "description": "How many times to run each task.",
+ },
+ },
+ },
+)
+def add_new_jobs_action(parameters, graph_config, input, task_group_id, task_id):
+ decision_task_id, full_task_graph, label_to_taskid, _ = fetch_graph_and_labels(
+ parameters, graph_config
+ )
+
+ to_run = []
+ for elem in input["tasks"]:
+ if elem in full_task_graph.tasks:
+ to_run.append(elem)
+ else:
+ raise Exception(f"{elem} was not found in the task-graph")
+
+ times = input.get("times", 1)
+ for i in range(times):
+ create_tasks(
+ graph_config,
+ to_run,
+ full_task_graph,
+ label_to_taskid,
+ parameters,
+ decision_task_id,
+ i,
+ )
+ combine_task_graph_files(list(range(times)))
diff --git a/taskcluster/gecko_taskgraph/actions/add_talos.py b/taskcluster/gecko_taskgraph/actions/add_talos.py
new file mode 100644
index 0000000000..ed3980713b
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/actions/add_talos.py
@@ -0,0 +1,59 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import logging
+
+from ..target_tasks import standard_filter
+from .registry import register_callback_action
+from .util import create_tasks, fetch_graph_and_labels
+
+logger = logging.getLogger(__name__)
+
+
+@register_callback_action(
+ name="run-all-talos",
+ title="Run All Talos Tests",
+ symbol="raT",
+ description="Add all Talos tasks to a push.",
+ order=150,
+ context=[],
+ schema={
+ "type": "object",
+ "properties": {
+ "times": {
+ "type": "integer",
+ "default": 1,
+ "minimum": 1,
+ "maximum": 6,
+ "title": "Times",
+ "description": "How many times to run each task.",
+ }
+ },
+ "additionalProperties": False,
+ },
+)
+def add_all_talos(parameters, graph_config, input, task_group_id, task_id):
+ decision_task_id, full_task_graph, label_to_taskid, _ = fetch_graph_and_labels(
+ parameters, graph_config
+ )
+
+ times = input.get("times", 1)
+ for i in range(times):
+ to_run = [
+ label
+ for label, entry in full_task_graph.tasks.items()
+ if "talos_try_name" in entry.attributes
+ and standard_filter(entry, parameters)
+ ]
+
+ create_tasks(
+ graph_config,
+ to_run,
+ full_task_graph,
+ label_to_taskid,
+ parameters,
+ decision_task_id,
+ )
+ logger.info(f"Scheduled {len(to_run)} talos tasks (time {i + 1}/{times})")
diff --git a/taskcluster/gecko_taskgraph/actions/backfill.py b/taskcluster/gecko_taskgraph/actions/backfill.py
new file mode 100644
index 0000000000..b5ee66b54c
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/actions/backfill.py
@@ -0,0 +1,441 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import json
+import logging
+import re
+import sys
+from functools import partial
+
+from taskgraph.util.taskcluster import get_task_definition
+
+from .registry import register_callback_action
+from .util import (
+ combine_task_graph_files,
+ create_tasks,
+ fetch_graph_and_labels,
+ get_decision_task_id,
+ get_pushes,
+ get_pushes_from_params_input,
+ trigger_action,
+)
+
+logger = logging.getLogger(__name__)
+SYMBOL_REGEX = re.compile("^(.*)-[a-z0-9]{11}-bk$")
+GROUP_SYMBOL_REGEX = re.compile("^(.*)-bk$")
+
+
+def input_for_support_action(revision, task, times=1, retrigger=True):
+ """Generate input for action to be scheduled.
+
+ Define what label to schedule with 'label'.
+ If it is a test task that uses explicit manifests add that information.
+ """
+ input = {
+ "label": task["metadata"]["name"],
+ "revision": revision,
+ "times": times,
+ # We want the backfilled tasks to share the same symbol as the originating task
+ "symbol": task["extra"]["treeherder"]["symbol"],
+ "retrigger": retrigger,
+ }
+
+ # Support tasks that are using manifest based scheduling
+ if task["payload"].get("env", {}).get("MOZHARNESS_TEST_PATHS"):
+ input["test_manifests"] = json.loads(
+ task["payload"]["env"]["MOZHARNESS_TEST_PATHS"]
+ )
+
+ return input
+
+
+@register_callback_action(
+ title="Backfill",
+ name="backfill",
+ permission="backfill",
+ symbol="Bk",
+ description=("Given a task schedule it on previous pushes in the same project."),
+ order=200,
+ context=[{}], # This will be available for all tasks
+ schema={
+ "type": "object",
+ "properties": {
+ "depth": {
+ "type": "integer",
+ "default": 19,
+ "minimum": 1,
+ "maximum": 25,
+ "title": "Depth",
+ "description": (
+ "The number of previous pushes before the current "
+ "push to attempt to trigger this task on."
+ ),
+ },
+ "inclusive": {
+ "type": "boolean",
+ "default": False,
+ "title": "Inclusive Range",
+ "description": (
+ "If true, the backfill will also retrigger the task "
+ "on the selected push."
+ ),
+ },
+ "times": {
+ "type": "integer",
+ "default": 1,
+ "minimum": 1,
+ "maximum": 10,
+ "title": "Times",
+ "description": (
+ "The number of times to execute each job you are backfilling."
+ ),
+ },
+ "retrigger": {
+ "type": "boolean",
+ "default": True,
+ "title": "Retrigger",
+ "description": (
+ "If False, the task won't retrigger on pushes that have already "
+ "ran it."
+ ),
+ },
+ },
+ "additionalProperties": False,
+ },
+ available=lambda parameters: True,
+)
+def backfill_action(parameters, graph_config, input, task_group_id, task_id):
+ """
+ This action takes a task ID and schedules it on previous pushes (via support action).
+
+ To execute this action locally follow the documentation here:
+ https://firefox-source-docs.mozilla.org/taskcluster/actions.html#testing-the-action-locally
+ """
+ task = get_task_definition(task_id)
+ pushes = get_pushes_from_params_input(parameters, input)
+ failed = False
+ input_for_action = input_for_support_action(
+ revision=parameters["head_rev"],
+ task=task,
+ times=input.get("times", 1),
+ retrigger=input.get("retrigger", True),
+ )
+
+ for push_id in pushes:
+ try:
+ # The Gecko decision task can sometimes fail on a push and we need to handle
+ # the exception that this call will produce
+ push_decision_task_id = get_decision_task_id(parameters["project"], push_id)
+ except Exception:
+ logger.warning(f"Could not find decision task for push {push_id}")
+ # The decision task may have failed, this is common enough that we
+ # don't want to report an error for it.
+ continue
+
+ try:
+ trigger_action(
+ action_name="backfill-task",
+ # This lets the action know on which push we want to add a new task
+ decision_task_id=push_decision_task_id,
+ input=input_for_action,
+ )
+ except Exception:
+ logger.exception(f"Failed to trigger action for {push_id}")
+ failed = True
+
+ if failed:
+ sys.exit(1)
+
+
+def add_backfill_suffix(regex, symbol, suffix):
+ m = regex.match(symbol)
+ if m is None:
+ symbol += suffix
+ return symbol
+
+
+def backfill_modifier(task, input):
+ if task.label != input["label"]:
+ return task
+
+ logger.debug(f"Modifying test_manifests for {task.label}")
+ times = input.get("times", 1)
+
+ # Set task duplicates based on 'times' value.
+ if times > 1:
+ task.attributes["task_duplicates"] = times
+
+ # If the original task has defined test paths
+ test_manifests = input.get("test_manifests")
+ if test_manifests:
+ revision = input.get("revision")
+
+ task.attributes["test_manifests"] = test_manifests
+ task.task["payload"]["env"]["MOZHARNESS_TEST_PATHS"] = json.dumps(
+ test_manifests
+ )
+ # The name/label might have been modify in new_label, thus, change it here as well
+ task.task["metadata"]["name"] = task.label
+ th_info = task.task["extra"]["treeherder"]
+ # Use a job symbol of the originating task as defined in the backfill action
+ th_info["symbol"] = add_backfill_suffix(
+ SYMBOL_REGEX, th_info["symbol"], f"-{revision[0:11]}-bk"
+ )
+ if th_info.get("groupSymbol"):
+ # Group all backfilled tasks together
+ th_info["groupSymbol"] = add_backfill_suffix(
+ GROUP_SYMBOL_REGEX, th_info["groupSymbol"], "-bk"
+ )
+ task.task["tags"]["action"] = "backfill-task"
+ return task
+
+
+def do_not_modify(task):
+ return task
+
+
+def new_label(label, tasks):
+ """This is to handle the case when a previous push does not contain a specific task label
+ and we try to find a label we can reuse.
+
+ For instance, we try to backfill chunk #3, however, a previous push does not contain such
+ chunk, thus, we try to reuse another task/label.
+ """
+ logger.info(f"Extracting new label for {label}")
+
+ if "-" not in label:
+ raise Exception(
+ f"Expected '-' was not found in label {label}, cannot extract new label."
+ )
+
+ begining_label, ending = label.rsplit("-", 1)
+
+ if ending.isdigit():
+ # We assume that the taskgraph has chunk #1 OR unnumbered chunk and we hijack it
+ if begining_label in tasks:
+ return begining_label
+ if begining_label + "-1" in tasks:
+ return begining_label + "-1"
+ raise Exception(f"New label ({label}) was not found in the task-graph")
+ else:
+ raise Exception(f"{label} was not found in the task-graph")
+
+
+@register_callback_action(
+ name="backfill-task",
+ title="Backfill task on a push.",
+ permission="backfill",
+ symbol="backfill-task",
+ description="This action is normally scheduled by the backfill action. "
+ "The intent is to schedule a task on previous pushes.",
+ order=500,
+ context=[],
+ schema={
+ "type": "object",
+ "properties": {
+ "label": {"type": "string", "description": "A task label"},
+ "revision": {
+ "type": "string",
+ "description": "Revision of the original push from where we backfill.",
+ },
+ "symbol": {
+ "type": "string",
+ "description": "Symbol to be used by the scheduled task.",
+ },
+ "test_manifests": {
+ "type": "array",
+ "default": [],
+ "description": "An array of test manifest paths",
+ "items": {"type": "string"},
+ },
+ "times": {
+ "type": "integer",
+ "default": 1,
+ "minimum": 1,
+ "maximum": 10,
+ "title": "Times",
+ "description": (
+ "The number of times to execute each job " "you are backfilling."
+ ),
+ },
+ "retrigger": {
+ "type": "boolean",
+ "default": True,
+ "title": "Retrigger",
+ "description": (
+ "If False, the task won't retrigger on pushes that have already "
+ "ran it."
+ ),
+ },
+ },
+ },
+)
+def add_task_with_original_manifests(
+ parameters, graph_config, input, task_group_id, task_id
+):
+ """
+ This action is normally scheduled by the backfill action. The intent is to schedule a test
+ task with the test manifests from the original task (if available).
+
+ The push in which we want to schedule a new task is defined by the parameters object.
+
+ To execute this action locally follow the documentation here:
+ https://firefox-source-docs.mozilla.org/taskcluster/actions.html#testing-the-action-locally
+ """
+ # This step takes a lot of time when executed locally
+ logger.info("Retreving the full task graph and labels.")
+ decision_task_id, full_task_graph, label_to_taskid, _ = fetch_graph_and_labels(
+ parameters, graph_config
+ )
+
+ label = input.get("label")
+ if not input.get("retrigger") and label in label_to_taskid:
+ logger.info(
+ f"Skipping push with decision task ID {decision_task_id} as it already has this test."
+ )
+ return
+
+ if label not in full_task_graph.tasks:
+ label = new_label(label, full_task_graph.tasks)
+
+ to_run = [label]
+
+ logger.info("Creating tasks...")
+ create_tasks(
+ graph_config,
+ to_run,
+ full_task_graph,
+ label_to_taskid,
+ parameters,
+ decision_task_id,
+ suffix="0",
+ modifier=partial(backfill_modifier, input=input),
+ )
+
+ # TODO Implement a way to write out artifacts without assuming there's
+ # multiple sets of them so we can stop passing in "suffix".
+ combine_task_graph_files(["0"])
+
+
+@register_callback_action(
+ title="Backfill all browsertime",
+ name="backfill-all-browsertime",
+ permission="backfill",
+ symbol="baB",
+ description=(
+ "Schedule all browsertime tests for the current and previous push in the same project."
+ ),
+ order=800,
+ context=[], # This will be available for all tasks
+ available=lambda parameters: True,
+)
+def backfill_all_browsertime(parameters, graph_config, input, task_group_id, task_id):
+ """
+ This action takes a revision and schedules it on previous pushes (via support action).
+
+ To execute this action locally follow the documentation here:
+ https://firefox-source-docs.mozilla.org/taskcluster/actions.html#testing-the-action-locally
+ """
+ pushes = get_pushes(
+ project=parameters["head_repository"],
+ end_id=int(parameters["pushlog_id"]),
+ depth=2,
+ )
+
+ for push_id in pushes:
+ try:
+ # The Gecko decision task can sometimes fail on a push and we need to handle
+ # the exception that this call will produce
+ push_decision_task_id = get_decision_task_id(parameters["project"], push_id)
+ except Exception:
+ logger.warning(f"Could not find decision task for push {push_id}")
+ # The decision task may have failed, this is common enough that we
+ # don't want to report an error for it.
+ continue
+
+ try:
+ trigger_action(
+ action_name="add-all-browsertime",
+ # This lets the action know on which push we want to add a new task
+ decision_task_id=push_decision_task_id,
+ )
+ except Exception:
+ logger.exception(f"Failed to trigger action for {push_id}")
+ sys.exit(1)
+
+
+def filter_raptor_jobs(full_task_graph, label_to_taskid, project):
+ # Late import to prevent impacting other backfill action tasks
+ from ..util.attributes import match_run_on_projects
+
+ to_run = []
+ for label, entry in full_task_graph.tasks.items():
+ if entry.kind != "test":
+ continue
+ if entry.task.get("extra", {}).get("suite", "") != "raptor":
+ continue
+ if not match_run_on_projects(
+ project, entry.attributes.get("run_on_projects", [])
+ ):
+ continue
+ if "browsertime" not in entry.attributes.get("raptor_try_name", ""):
+ continue
+ if not entry.attributes.get("test_platform", "").endswith("shippable-qr/opt"):
+ continue
+ if "android" in entry.attributes.get("test_platform", ""):
+ # Bug 1786254 - The backfill bot is scheduling too many tests atm
+ continue
+ exceptions = ("live", "profiling", "youtube-playback")
+ if any(e in entry.attributes.get("raptor_try_name", "") for e in exceptions):
+ continue
+ if "firefox" in entry.attributes.get(
+ "raptor_try_name", ""
+ ) and entry.attributes.get("test_platform", "").endswith("64-shippable-qr/opt"):
+ # add the browsertime test
+ if label not in label_to_taskid:
+ to_run.append(label)
+ if "geckoview" in entry.attributes.get("raptor_try_name", ""):
+ # add the pageload test
+ if label not in label_to_taskid:
+ to_run.append(label)
+ return to_run
+
+
+@register_callback_action(
+ name="add-all-browsertime",
+ title="Add All Browsertime Tests.",
+ permission="backfill",
+ symbol="aaB",
+ description="This action is normally scheduled by the backfill-all-browsertime action. "
+ "The intent is to schedule all browsertime tests on a specific pushe.",
+ order=900,
+ context=[],
+)
+def add_all_browsertime(parameters, graph_config, input, task_group_id, task_id):
+ """
+ This action is normally scheduled by the backfill-all-browsertime action. The intent is to
+ trigger all browsertime tasks for the current revision.
+
+ The push in which we want to schedule a new task is defined by the parameters object.
+
+ To execute this action locally follow the documentation here:
+ https://firefox-source-docs.mozilla.org/taskcluster/actions.html#testing-the-action-locally
+ """
+ logger.info("Retreving the full task graph and labels.")
+ decision_task_id, full_task_graph, label_to_taskid, _ = fetch_graph_and_labels(
+ parameters, graph_config
+ )
+
+ to_run = filter_raptor_jobs(full_task_graph, label_to_taskid, parameters["project"])
+
+ create_tasks(
+ graph_config,
+ to_run,
+ full_task_graph,
+ label_to_taskid,
+ parameters,
+ decision_task_id,
+ )
+ logger.info(f"Scheduled {len(to_run)} raptor tasks (time 1)")
diff --git a/taskcluster/gecko_taskgraph/actions/cancel.py b/taskcluster/gecko_taskgraph/actions/cancel.py
new file mode 100644
index 0000000000..d895781395
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/actions/cancel.py
@@ -0,0 +1,36 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import logging
+
+import requests
+from taskgraph.util.taskcluster import cancel_task
+
+from .registry import register_callback_action
+
+logger = logging.getLogger(__name__)
+
+
+@register_callback_action(
+ title="Cancel Task",
+ name="cancel",
+ symbol="cx",
+ description=("Cancel the given task"),
+ order=350,
+ context=[{}],
+)
+def cancel_action(parameters, graph_config, input, task_group_id, task_id):
+ # Note that this is limited by the scopes afforded to generic actions to
+ # only cancel tasks with the level-specific schedulerId.
+ try:
+ cancel_task(task_id, use_proxy=True)
+ except requests.HTTPError as e:
+ if e.response.status_code == 409:
+ # A 409 response indicates that this task is past its deadline. It
+ # cannot be cancelled at this time, but it's also not running
+ # anymore, so we can ignore this error.
+ logger.info(f"Task {task_id} is past its deadline and cannot be cancelled.")
+ return
+ raise
diff --git a/taskcluster/gecko_taskgraph/actions/cancel_all.py b/taskcluster/gecko_taskgraph/actions/cancel_all.py
new file mode 100644
index 0000000000..d74b83b7d8
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/actions/cancel_all.py
@@ -0,0 +1,60 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import concurrent.futures as futures
+import logging
+import os
+
+import requests
+from taskgraph.util.taskcluster import CONCURRENCY, cancel_task
+
+from gecko_taskgraph.util.taskcluster import list_task_group_incomplete_task_ids
+
+from .registry import register_callback_action
+
+logger = logging.getLogger(__name__)
+
+
+@register_callback_action(
+ title="Cancel All",
+ name="cancel-all",
+ symbol="cAll",
+ description=(
+ "Cancel all running and pending tasks created by the decision task "
+ "this action task is associated with."
+ ),
+ order=400,
+ context=[],
+)
+def cancel_all_action(parameters, graph_config, input, task_group_id, task_id):
+ def do_cancel_task(task_id):
+ logger.info(f"Cancelling task {task_id}")
+ try:
+ cancel_task(task_id, use_proxy=True)
+ except requests.HTTPError as e:
+ if e.response.status_code == 409:
+ # A 409 response indicates that this task is past its deadline. It
+ # cannot be cancelled at this time, but it's also not running
+ # anymore, so we can ignore this error.
+ logger.info(
+ "Task {} is past its deadline and cannot be cancelled.".format(
+ task_id
+ )
+ )
+ return
+ raise
+
+ own_task_id = os.environ.get("TASK_ID", "")
+ to_cancel = [
+ t
+ for t in list_task_group_incomplete_task_ids(task_group_id)
+ if t != own_task_id
+ ]
+
+ logger.info(f"Cancelling {len(to_cancel)} tasks")
+ with futures.ThreadPoolExecutor(CONCURRENCY) as e:
+ cancel_futs = [e.submit(do_cancel_task, t) for t in to_cancel]
+ for f in futures.as_completed(cancel_futs):
+ f.result()
diff --git a/taskcluster/gecko_taskgraph/actions/confirm_failure.py b/taskcluster/gecko_taskgraph/actions/confirm_failure.py
new file mode 100644
index 0000000000..84dbda2997
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/actions/confirm_failure.py
@@ -0,0 +1,268 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import json
+import logging
+from functools import partial
+
+from taskgraph.util.taskcluster import get_artifact, get_task_definition, list_artifacts
+
+from .registry import register_callback_action
+from .retrigger import retrigger_action
+from .util import add_args_to_command, create_tasks, fetch_graph_and_labels
+
+logger = logging.getLogger(__name__)
+
+
+def get_failures(task_id, task_definition):
+ """Returns a dict containing properties containing a list of
+ directories containing test failures and a separate list of
+ individual test failures from the errorsummary.log artifact for
+ the task.
+
+ Find test path to pass to the task in
+ MOZHARNESS_TEST_PATHS. If no appropriate test path can be
+ determined, nothing is returned.
+ """
+
+ def fix_wpt_name(test):
+ # TODO: find other cases to handle
+ if ".any." in test:
+ test = "%s.any.js" % test.split(".any.")[0]
+ if ".window.html" in test:
+ test = test.replace(".window.html", ".window.js")
+
+ if test.startswith("/_mozilla"):
+ test = "testing/web-platform/mozilla/tests" + test[len("_mozilla") :]
+ else:
+ test = "testing/web-platform/tests/" + test.strip("/")
+ # some wpt tests have params, those are not supported
+ test = test.split("?")[0]
+
+ return test
+
+ # collect dirs that don't have a specific manifest
+ dirs = []
+ tests = []
+
+ artifacts = list_artifacts(task_id)
+ for artifact in artifacts:
+ if "name" not in artifact or not artifact["name"].endswith("errorsummary.log"):
+ continue
+
+ stream = get_artifact(task_id, artifact["name"])
+ if not stream:
+ continue
+
+ # We handle the stream as raw bytes because it may contain invalid
+ # UTF-8 characters in portions other than those containing the error
+ # messages we're looking for.
+ for line in stream.read().split(b"\n"):
+ if not line.strip():
+ continue
+
+ l = json.loads(line)
+ if "group_results" in l.keys() and l["status"] != "OK":
+ dirs.append(l["group_results"].group())
+
+ elif "test" in l.keys():
+ if not l["test"]:
+ print("Warning: no testname in errorsummary line: %s" % l)
+ continue
+
+ test_path = l["test"].split(" ")[0]
+ found_path = False
+
+ # tests with url params (wpt), will get confused here
+ if "?" not in test_path:
+ test_path = test_path.split(":")[-1]
+
+ # edge case where a crash on shutdown has a "test" name == group name
+ if (
+ test_path.endswith(".toml")
+ or test_path.endswith(".ini")
+ or test_path.endswith(".list")
+ ):
+ # TODO: consider running just the manifest
+ continue
+
+ # edge cases with missing test names
+ if (
+ test_path is None
+ or test_path == "None"
+ or "SimpleTest" in test_path
+ ):
+ continue
+
+ if "signature" in l.keys():
+ # dealing with a crash
+ found_path = True
+ if "web-platform" in task_definition["extra"]["suite"]:
+ test_path = fix_wpt_name(test_path)
+ else:
+ if "status" not in l and "expected" not in l:
+ continue
+
+ if l["status"] != l["expected"]:
+ if l["status"] not in l.get("known_intermittent", []):
+ found_path = True
+ if "web-platform" in task_definition["extra"]["suite"]:
+ test_path = fix_wpt_name(test_path)
+
+ if found_path and test_path:
+ fpath = test_path.replace("\\", "/")
+ tval = {"path": fpath, "group": l["group"]}
+ # only store one failure per test
+ if not [t for t in tests if t["path"] == fpath]:
+ tests.append(tval)
+
+ # only run the failing test not both test + dir
+ if l["group"] in dirs:
+ dirs.remove(l["group"])
+
+ # TODO: 10 is too much; how to get only NEW failures?
+ if len(tests) > 10:
+ break
+
+ dirs = [{"path": "", "group": d} for d in list(set(dirs))]
+ return {"dirs": dirs, "tests": tests}
+
+
+def get_repeat_args(task_definition, failure_group):
+ task_name = task_definition["metadata"]["name"]
+ repeatable_task = False
+ if (
+ "crashtest" in task_name
+ or "mochitest" in task_name
+ or "reftest" in task_name
+ or "xpcshell" in task_name
+ or "web-platform" in task_name
+ and "jsreftest" not in task_name
+ ):
+ repeatable_task = True
+
+ repeat_args = ""
+ if not repeatable_task:
+ return repeat_args
+
+ if failure_group == "dirs":
+ # execute 3 total loops
+ repeat_args = ["--repeat=2"] if repeatable_task else []
+ elif failure_group == "tests":
+ # execute 5 total loops
+ repeat_args = ["--repeat=4"] if repeatable_task else []
+
+ return repeat_args
+
+
+def confirm_modifier(task, input):
+ if task.label != input["label"]:
+ return task
+
+ logger.debug(f"Modifying paths for {task.label}")
+
+ # If the original task has defined test paths
+ suite = input.get("suite")
+ test_path = input.get("test_path")
+ test_group = input.get("test_group")
+ if test_path or test_group:
+ repeat_args = input.get("repeat_args")
+
+ if repeat_args:
+ task.task["payload"]["command"] = add_args_to_command(
+ task.task["payload"]["command"], extra_args=repeat_args
+ )
+
+ # TODO: do we need this attribute?
+ task.attributes["test_path"] = test_path
+
+ task.task["payload"]["env"]["MOZHARNESS_TEST_PATHS"] = json.dumps(
+ {suite: [test_group]}, sort_keys=True
+ )
+ task.task["payload"]["env"]["MOZHARNESS_CONFIRM_PATHS"] = json.dumps(
+ {suite: [test_path]}, sort_keys=True
+ )
+ task.task["payload"]["env"]["MOZLOG_DUMP_ALL_TESTS"] = "1"
+
+ task.task["metadata"]["name"] = task.label
+ task.task["tags"]["action"] = "confirm-failure"
+ return task
+
+
+@register_callback_action(
+ name="confirm-failures",
+ title="Confirm failures in job",
+ symbol="cf",
+ description="Re-run Tests for original manifest, directories or tests for failing tests.",
+ order=150,
+ context=[{"kind": "test"}],
+ schema={
+ "type": "object",
+ "properties": {
+ "label": {"type": "string", "description": "A task label"},
+ "suite": {"type": "string", "description": "Test suite"},
+ "test_path": {"type": "string", "description": "A full path to test"},
+ "test_group": {
+ "type": "string",
+ "description": "A full path to group name",
+ },
+ "repeat_args": {
+ "type": "string",
+ "description": "args to pass to test harness",
+ },
+ },
+ "additionalProperties": False,
+ },
+)
+def confirm_failures(parameters, graph_config, input, task_group_id, task_id):
+ task_definition = get_task_definition(task_id)
+ decision_task_id, full_task_graph, label_to_taskid, _ = fetch_graph_and_labels(
+ parameters, graph_config
+ )
+
+ # create -cf label; ideally make this a common function
+ task_definition["metadata"]["name"].split("-")
+ cfname = "%s-cf" % task_definition["metadata"]["name"]
+
+ if cfname not in full_task_graph.tasks:
+ raise Exception(f"{cfname} was not found in the task-graph")
+
+ to_run = [cfname]
+
+ suite = task_definition["extra"]["suite"]
+ if "-coverage" in suite:
+ suite = suite[: suite.index("-coverage")]
+ if "-qr" in suite:
+ suite = suite[: suite.index("-qr")]
+ failures = get_failures(task_id, task_definition)
+
+ if failures["dirs"] == [] and failures["tests"] == []:
+ logger.info("need to retrigger task as no specific test failures found")
+ retrigger_action(parameters, graph_config, input, decision_task_id, task_id)
+ return
+
+ # for each unique failure, create a new confirm failure job
+ for failure_group in failures:
+ for failure_path in failures[failure_group]:
+ repeat_args = get_repeat_args(task_definition, failure_group)
+
+ input = {
+ "label": cfname,
+ "suite": suite,
+ "test_path": failure_path["path"],
+ "test_group": failure_path["group"],
+ "repeat_args": repeat_args,
+ }
+
+ logger.info("confirm_failures: %s" % failures)
+ create_tasks(
+ graph_config,
+ to_run,
+ full_task_graph,
+ label_to_taskid,
+ parameters,
+ decision_task_id,
+ modifier=partial(confirm_modifier, input=input),
+ )
diff --git a/taskcluster/gecko_taskgraph/actions/create_interactive.py b/taskcluster/gecko_taskgraph/actions/create_interactive.py
new file mode 100644
index 0000000000..27ec3e78df
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/actions/create_interactive.py
@@ -0,0 +1,188 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import logging
+import os
+import re
+
+import taskcluster_urls
+from taskgraph.util.taskcluster import get_root_url, get_task_definition
+
+from gecko_taskgraph.actions.registry import register_callback_action
+from gecko_taskgraph.actions.util import create_tasks, fetch_graph_and_labels
+
+logger = logging.getLogger(__name__)
+
+EMAIL_SUBJECT = "Your Interactive Task for {label}"
+EMAIL_CONTENT = """\
+As you requested, Firefox CI has created an interactive task to run {label}
+on revision {revision} in {repo}. Click the button below to connect to the
+task. You may need to wait for it to begin running.
+"""
+
+###
+# Security Concerns
+#
+# An "interactive task" is, quite literally, shell access to a worker. That
+# is limited by being in a Docker container, but we assume that Docker has
+# bugs so we do not want to rely on container isolation exclusively.
+#
+# Interactive tasks should never be allowed on hosts that build binaries
+# leading to a release -- level 3 builders.
+#
+# Users must not be allowed to create interactive tasks for tasks above
+# their own level.
+#
+# Interactive tasks must not have any routes that might make them appear
+# in the index to be used by other production tasks.
+#
+# Interactive tasks should not be able to write to any docker-worker caches.
+
+SCOPE_WHITELIST = [
+ # these are not actually secrets, and just about everything needs them
+ re.compile(r"^secrets:get:project/taskcluster/gecko/(hgfingerprint|hgmointernal)$"),
+ # public downloads are OK
+ re.compile(r"^docker-worker:relengapi-proxy:tooltool.download.public$"),
+ re.compile(r"^project:releng:services/tooltool/api/download/public$"),
+ # internal downloads are OK
+ re.compile(r"^docker-worker:relengapi-proxy:tooltool.download.internal$"),
+ re.compile(r"^project:releng:services/tooltool/api/download/internal$"),
+ # private toolchain artifacts from tasks
+ re.compile(r"^queue:get-artifact:project/gecko/.*$"),
+ # level-appropriate secrets are generally necessary to run a task; these
+ # also are "not that secret" - most of them are built into the resulting
+ # binary and could be extracted by someone with `strings`.
+ re.compile(r"^secrets:get:project/releng/gecko/build/level-[0-9]/\*"),
+ # ptracing is generally useful for interactive tasks, too!
+ re.compile(r"^docker-worker:feature:allowPtrace$"),
+ # docker-worker capabilities include loopback devices
+ re.compile(r"^docker-worker:capability:device:.*$"),
+ re.compile(r"^docker-worker:capability:privileged$"),
+ re.compile(r"^docker-worker:cache:gecko-level-1-checkouts.*$"),
+ re.compile(r"^docker-worker:cache:gecko-level-1-tooltool-cache.*$"),
+]
+
+
+def context(params):
+ # available for any docker-worker tasks at levels 1, 2; and for
+ # test tasks on level 3 (level-3 builders are firewalled off)
+ if int(params["level"]) < 3:
+ return [{"worker-implementation": "docker-worker"}]
+ return [{"worker-implementation": "docker-worker", "kind": "test"}]
+ # Windows is not supported by one-click loaners yet. See
+ # https://wiki.mozilla.org/ReleaseEngineering/How_To/Self_Provision_a_TaskCluster_Windows_Instance
+ # for instructions for using them.
+
+
+@register_callback_action(
+ title="Create Interactive Task",
+ name="create-interactive",
+ symbol="create-inter",
+ description=("Create a a copy of the task that you can interact with"),
+ order=50,
+ context=context,
+ schema={
+ "type": "object",
+ "properties": {
+ "notify": {
+ "type": "string",
+ "format": "email",
+ "title": "Who to notify of the pending interactive task",
+ "description": (
+ "Enter your email here to get an email containing a link "
+ "to interact with the task"
+ ),
+ # include a default for ease of users' editing
+ "default": "noreply@noreply.mozilla.org",
+ },
+ },
+ "additionalProperties": False,
+ },
+)
+def create_interactive_action(parameters, graph_config, input, task_group_id, task_id):
+ # fetch the original task definition from the taskgraph, to avoid
+ # creating interactive copies of unexpected tasks. Note that this only applies
+ # to docker-worker tasks, so we can assume the docker-worker payload format.
+ decision_task_id, full_task_graph, label_to_taskid, _ = fetch_graph_and_labels(
+ parameters, graph_config
+ )
+ task = get_task_definition(task_id)
+ label = task["metadata"]["name"]
+
+ def edit(task):
+ if task.label != label:
+ return task
+ task_def = task.task
+
+ # drop task routes (don't index this!)
+ task_def["routes"] = []
+
+ # only try this once
+ task_def["retries"] = 0
+
+ # short expirations, at least 3 hour maxRunTime
+ task_def["deadline"] = {"relative-datestamp": "12 hours"}
+ task_def["created"] = {"relative-datestamp": "0 hours"}
+ task_def["expires"] = {"relative-datestamp": "1 day"}
+
+ # filter scopes with the SCOPE_WHITELIST
+ task.task["scopes"] = [
+ s
+ for s in task.task.get("scopes", [])
+ if any(p.match(s) for p in SCOPE_WHITELIST)
+ ]
+
+ payload = task_def["payload"]
+
+ # make sure the task runs for long enough..
+ payload["maxRunTime"] = max(3600 * 3, payload.get("maxRunTime", 0))
+
+ # no caches or artifacts
+ payload["cache"] = {}
+ payload["artifacts"] = {}
+
+ # enable interactive mode
+ payload.setdefault("features", {})["interactive"] = True
+ payload.setdefault("env", {})["TASKCLUSTER_INTERACTIVE"] = "true"
+
+ for key in task_def["payload"]["env"].keys():
+ payload["env"][key] = task_def["payload"]["env"].get(key, "")
+
+ # add notification
+ email = input.get("notify")
+ # no point sending to a noreply address!
+ if email and email != "noreply@noreply.mozilla.org":
+ info = {
+ "url": taskcluster_urls.ui(
+ get_root_url(False), "tasks/${status.taskId}/connect"
+ ),
+ "label": label,
+ "revision": parameters["head_rev"],
+ "repo": parameters["head_repository"],
+ }
+ task_def.setdefault("extra", {}).setdefault("notify", {})["email"] = {
+ "subject": EMAIL_SUBJECT.format(**info),
+ "content": EMAIL_CONTENT.format(**info),
+ "link": {"text": "Connect", "href": info["url"]},
+ }
+ task_def["routes"].append(f"notify.email.{email}.on-pending")
+
+ return task
+
+ # Create the task and any of its dependencies. This uses a new taskGroupId to avoid
+ # polluting the existing taskGroup with interactive tasks.
+ action_task_id = os.environ.get("TASK_ID")
+ label_to_taskid = create_tasks(
+ graph_config,
+ [label],
+ full_task_graph,
+ label_to_taskid,
+ parameters,
+ decision_task_id=action_task_id,
+ modifier=edit,
+ )
+
+ taskId = label_to_taskid[label]
+ logger.info(f"Created interactive task {taskId}")
diff --git a/taskcluster/gecko_taskgraph/actions/gecko_profile.py b/taskcluster/gecko_taskgraph/actions/gecko_profile.py
new file mode 100644
index 0000000000..ce4394e77c
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/actions/gecko_profile.py
@@ -0,0 +1,138 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import logging
+
+import requests
+from requests.exceptions import HTTPError
+from taskgraph.taskgraph import TaskGraph
+from taskgraph.util.taskcluster import get_artifact_from_index, get_task_definition
+
+from gecko_taskgraph.util.taskgraph import find_decision_task
+
+from .registry import register_callback_action
+from .util import combine_task_graph_files, create_tasks
+
+PUSHLOG_TMPL = "{}/json-pushes?version=2&startID={}&endID={}"
+INDEX_TMPL = "gecko.v2.{}.pushlog-id.{}.decision"
+
+logger = logging.getLogger(__name__)
+
+
+@register_callback_action(
+ title="GeckoProfile",
+ name="geckoprofile",
+ symbol="Gp",
+ description=(
+ "Take the label of the current task, "
+ "and trigger the task with that label "
+ "on previous pushes in the same project "
+ "while adding the --gecko-profile cmd arg."
+ ),
+ order=200,
+ context=[{"test-type": "talos"}, {"test-type": "raptor"}],
+ schema={},
+ available=lambda parameters: True,
+)
+def geckoprofile_action(parameters, graph_config, input, task_group_id, task_id):
+ task = get_task_definition(task_id)
+ label = task["metadata"]["name"]
+ pushes = []
+ depth = 2
+ end_id = int(parameters["pushlog_id"])
+
+ while True:
+ start_id = max(end_id - depth, 0)
+ pushlog_url = PUSHLOG_TMPL.format(
+ parameters["head_repository"], start_id, end_id
+ )
+ r = requests.get(pushlog_url)
+ r.raise_for_status()
+ pushes = pushes + list(r.json()["pushes"].keys())
+ if len(pushes) >= depth:
+ break
+
+ end_id = start_id - 1
+ start_id -= depth
+ if start_id < 0:
+ break
+
+ pushes = sorted(pushes)[-depth:]
+ backfill_pushes = []
+
+ for push in pushes:
+ try:
+ full_task_graph = get_artifact_from_index(
+ INDEX_TMPL.format(parameters["project"], push),
+ "public/full-task-graph.json",
+ )
+ _, full_task_graph = TaskGraph.from_json(full_task_graph)
+ label_to_taskid = get_artifact_from_index(
+ INDEX_TMPL.format(parameters["project"], push),
+ "public/label-to-taskid.json",
+ )
+ push_params = get_artifact_from_index(
+ INDEX_TMPL.format(parameters["project"], push), "public/parameters.yml"
+ )
+ push_decision_task_id = find_decision_task(push_params, graph_config)
+ except HTTPError as e:
+ logger.info(f"Skipping {push} due to missing index artifacts! Error: {e}")
+ continue
+
+ if label in full_task_graph.tasks.keys():
+
+ def modifier(task):
+ if task.label != label:
+ return task
+
+ cmd = task.task["payload"]["command"]
+ task.task["payload"]["command"] = add_args_to_perf_command(
+ cmd, ["--gecko-profile"]
+ )
+ task.task["extra"]["treeherder"]["symbol"] += "-p"
+ task.task["extra"]["treeherder"]["groupName"] += " (profiling)"
+ return task
+
+ create_tasks(
+ graph_config,
+ [label],
+ full_task_graph,
+ label_to_taskid,
+ push_params,
+ push_decision_task_id,
+ push,
+ modifier=modifier,
+ )
+ backfill_pushes.append(push)
+ else:
+ logging.info(f"Could not find {label} on {push}. Skipping.")
+ combine_task_graph_files(backfill_pushes)
+
+
+def add_args_to_perf_command(payload_commands, extra_args=[]):
+ """
+ Add custom command line args to a given command.
+ args:
+ payload_commands: the raw command as seen by taskcluster
+ extra_args: array of args we want to inject
+ """
+ perf_command_idx = -1 # currently, it's the last (or only) command
+ perf_command = payload_commands[perf_command_idx]
+
+ command_form = "default"
+ if isinstance(perf_command, str):
+ # windows has a single command, in long string form
+ perf_command = perf_command.split(" ")
+ command_form = "string"
+ # osx & linux have an array of subarrays
+
+ perf_command.extend(extra_args)
+
+ if command_form == "string":
+ # pack it back to list
+ perf_command = " ".join(perf_command)
+
+ payload_commands[perf_command_idx] = perf_command
+ return payload_commands
diff --git a/taskcluster/gecko_taskgraph/actions/merge_automation.py b/taskcluster/gecko_taskgraph/actions/merge_automation.py
new file mode 100644
index 0000000000..264383dd66
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/actions/merge_automation.py
@@ -0,0 +1,98 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from taskgraph.parameters import Parameters
+
+from gecko_taskgraph.actions.registry import register_callback_action
+from gecko_taskgraph.decision import taskgraph_decision
+from gecko_taskgraph.util.attributes import RELEASE_PROMOTION_PROJECTS
+
+
+def is_release_promotion_available(parameters):
+ return parameters["project"] in RELEASE_PROMOTION_PROJECTS
+
+
+@register_callback_action(
+ name="merge-automation",
+ title="Merge Day Automation",
+ symbol="${input.behavior}",
+ description="Merge repository branches.",
+ permission="merge-automation",
+ order=500,
+ context=[],
+ available=is_release_promotion_available,
+ schema=lambda graph_config: {
+ "type": "object",
+ "properties": {
+ "force-dry-run": {
+ "type": "boolean",
+ "description": "Override other options and do not push changes",
+ "default": True,
+ },
+ "push": {
+ "type": "boolean",
+ "description": "Push changes using to_repo and to_branch",
+ "default": False,
+ },
+ "behavior": {
+ "type": "string",
+ "description": "The type of release promotion to perform.",
+ "enum": sorted(graph_config["merge-automation"]["behaviors"].keys()),
+ "default": "central-to-beta",
+ },
+ "from-repo": {
+ "type": "string",
+ "description": "The URI of the source repository",
+ },
+ "to-repo": {
+ "type": "string",
+ "description": "The push URI of the target repository",
+ },
+ "from-branch": {
+ "type": "string",
+ "description": "The fx head of the source, such as central",
+ },
+ "to-branch": {
+ "type": "string",
+ "description": "The fx head of the target, such as beta",
+ },
+ "ssh-user-alias": {
+ "type": "string",
+ "description": "The alias of an ssh account to use when pushing changes.",
+ },
+ "fetch-version-from": {
+ "type": "string",
+ "description": "Path to file used when querying current version.",
+ },
+ },
+ "required": ["behavior"],
+ },
+)
+def merge_automation_action(parameters, graph_config, input, task_group_id, task_id):
+ # make parameters read-write
+ parameters = dict(parameters)
+
+ parameters["target_tasks_method"] = "merge_automation"
+ parameters["merge_config"] = {
+ "force-dry-run": input.get("force-dry-run", False),
+ "behavior": input["behavior"],
+ }
+
+ for field in [
+ "from-repo",
+ "from-branch",
+ "to-repo",
+ "to-branch",
+ "ssh-user-alias",
+ "push",
+ "fetch-version-from",
+ ]:
+ if input.get(field):
+ parameters["merge_config"][field] = input[field]
+ parameters["tasks_for"] = "action"
+
+ # make parameters read-only
+ parameters = Parameters(**parameters)
+
+ taskgraph_decision({"root": graph_config.root_dir}, parameters=parameters)
diff --git a/taskcluster/gecko_taskgraph/actions/openh264.py b/taskcluster/gecko_taskgraph/actions/openh264.py
new file mode 100644
index 0000000000..046d5910d2
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/actions/openh264.py
@@ -0,0 +1,33 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+from .registry import register_callback_action
+from .util import create_tasks, fetch_graph_and_labels
+
+
+@register_callback_action(
+ name="openh264",
+ title="OpenH264 Binaries",
+ symbol="h264",
+ description="Action to prepare openh264 binaries for shipping",
+ context=[],
+)
+def openh264_action(parameters, graph_config, input, task_group_id, task_id):
+ decision_task_id, full_task_graph, label_to_taskid, _ = fetch_graph_and_labels(
+ parameters, graph_config
+ )
+ to_run = [
+ label
+ for label, entry in full_task_graph.tasks.items()
+ if "openh264" in entry.kind
+ ]
+ create_tasks(
+ graph_config,
+ to_run,
+ full_task_graph,
+ label_to_taskid,
+ parameters,
+ decision_task_id,
+ )
diff --git a/taskcluster/gecko_taskgraph/actions/purge_caches.py b/taskcluster/gecko_taskgraph/actions/purge_caches.py
new file mode 100644
index 0000000000..4905526f6c
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/actions/purge_caches.py
@@ -0,0 +1,34 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import logging
+
+from taskgraph.util.taskcluster import get_task_definition, purge_cache
+
+from .registry import register_callback_action
+
+logger = logging.getLogger(__name__)
+
+
+@register_callback_action(
+ title="Purge Worker Caches",
+ name="purge-cache",
+ symbol="purge-cache",
+ description=(
+ "Purge any caches associated with this task "
+ "across all workers of the same workertype as the task."
+ ),
+ order=450,
+ context=[{"worker-implementation": "docker-worker"}],
+)
+def purge_caches_action(parameters, graph_config, input, task_group_id, task_id):
+ task = get_task_definition(task_id)
+ if task["payload"].get("cache"):
+ for cache in task["payload"]["cache"]:
+ purge_cache(
+ task["provisionerId"], task["workerType"], cache, use_proxy=True
+ )
+ else:
+ logger.info("Task has no caches. Will not clear anything!")
diff --git a/taskcluster/gecko_taskgraph/actions/raptor_extra_options.py b/taskcluster/gecko_taskgraph/actions/raptor_extra_options.py
new file mode 100644
index 0000000000..c8a7753319
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/actions/raptor_extra_options.py
@@ -0,0 +1,77 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import logging
+
+from taskgraph.util.taskcluster import get_task_definition
+
+from .registry import register_callback_action
+from .util import create_tasks, fetch_graph_and_labels
+
+logger = logging.getLogger(__name__)
+
+
+@register_callback_action(
+ title="Raptor Extra Options",
+ name="raptor-extra-options",
+ symbol="rxo",
+ description=(
+ "Allows the user to rerun raptor-browsertime tasks with additional arguments."
+ ),
+ order=200,
+ context=[{"test-type": "raptor"}],
+ schema={
+ "type": "object",
+ "properties": {
+ "extra_options": {
+ "type": "string",
+ "default": "",
+ "description": "A space-delimited string of extra options "
+ "to be passed into a raptor-browsertime test."
+ "This also works with options with values, where the values "
+ "should be set as an assignment e.g. browser-cycles=3 "
+ "Passing multiple extra options could look something this: "
+ "`verbose browser-cycles=3` where the test runs with verbose "
+ "mode on and the browser cycles only 3 times.",
+ }
+ },
+ },
+ available=lambda parameters: True,
+)
+def raptor_extra_options_action(
+ parameters, graph_config, input, task_group_id, task_id
+):
+ decision_task_id, full_task_graph, label_to_taskid, _ = fetch_graph_and_labels(
+ parameters, graph_config
+ )
+ task = get_task_definition(task_id)
+ label = task["metadata"]["name"]
+
+ def modifier(task):
+ if task.label != label:
+ return task
+
+ if task.task["payload"]["env"].get("PERF_FLAGS"):
+ task.task["payload"]["env"]["PERF_FLAGS"] += " " + input.get(
+ "extra_options"
+ )
+ else:
+ task.task["payload"]["env"].setdefault(
+ "PERF_FLAGS", input.get("extra_options")
+ )
+
+ task.task["extra"]["treeherder"]["symbol"] += "-rxo"
+ task.task["extra"]["treeherder"]["groupName"] += " (extra options run)"
+ return task
+
+ create_tasks(
+ graph_config,
+ [label],
+ full_task_graph,
+ label_to_taskid,
+ parameters,
+ decision_task_id,
+ modifier=modifier,
+ )
diff --git a/taskcluster/gecko_taskgraph/actions/rebuild_cached_tasks.py b/taskcluster/gecko_taskgraph/actions/rebuild_cached_tasks.py
new file mode 100644
index 0000000000..612da374ad
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/actions/rebuild_cached_tasks.py
@@ -0,0 +1,37 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+from .registry import register_callback_action
+from .util import create_tasks, fetch_graph_and_labels
+
+
+@register_callback_action(
+ name="rebuild-cached-tasks",
+ title="Rebuild Cached Tasks",
+ symbol="rebuild-cached",
+ description="Rebuild cached tasks.",
+ order=1000,
+ context=[],
+)
+def rebuild_cached_tasks_action(
+ parameters, graph_config, input, task_group_id, task_id
+):
+ decision_task_id, full_task_graph, label_to_taskid, _ = fetch_graph_and_labels(
+ parameters, graph_config
+ )
+ cached_tasks = [
+ label
+ for label, task in full_task_graph.tasks.items()
+ if task.attributes.get("cached_task", False)
+ ]
+ if cached_tasks:
+ create_tasks(
+ graph_config,
+ cached_tasks,
+ full_task_graph,
+ label_to_taskid,
+ parameters,
+ decision_task_id,
+ )
diff --git a/taskcluster/gecko_taskgraph/actions/registry.py b/taskcluster/gecko_taskgraph/actions/registry.py
new file mode 100644
index 0000000000..0c99e68d20
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/actions/registry.py
@@ -0,0 +1,371 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import json
+import re
+from collections import namedtuple
+from types import FunctionType
+
+from mozbuild.util import memoize
+from taskgraph import create
+from taskgraph.config import load_graph_config
+from taskgraph.parameters import Parameters
+from taskgraph.util import taskcluster, yaml
+from taskgraph.util.python_path import import_sibling_modules
+
+from gecko_taskgraph.util import hash
+
+actions = []
+callbacks = {}
+
+Action = namedtuple("Action", ["order", "cb_name", "permission", "action_builder"])
+
+
+def is_json(data):
+ """Return ``True``, if ``data`` is a JSON serializable data structure."""
+ try:
+ json.dumps(data)
+ except ValueError:
+ return False
+ return True
+
+
+@memoize
+def read_taskcluster_yml(filename):
+ """Load and parse .taskcluster.yml, memoized to save some time"""
+ return yaml.load_yaml(filename)
+
+
+@memoize
+def hash_taskcluster_yml(filename):
+ """
+ Generate a hash of the given .taskcluster.yml. This is the first 10 digits
+ of the sha256 of the file's content, and is used by administrative scripts
+ to create a hook based on this content.
+ """
+ return hash.hash_path(filename)[:10]
+
+
+def register_callback_action(
+ name,
+ title,
+ symbol,
+ description,
+ order=10000,
+ context=[],
+ available=lambda parameters: True,
+ schema=None,
+ permission="generic",
+ cb_name=None,
+):
+ """
+ Register an action callback that can be triggered from supporting
+ user interfaces, such as Treeherder.
+
+ This function is to be used as a decorator for a callback that takes
+ parameters as follows:
+
+ ``parameters``:
+ Decision task parameters, see ``taskgraph.parameters.Parameters``.
+ ``input``:
+ Input matching specified JSON schema, ``None`` if no ``schema``
+ parameter is given to ``register_callback_action``.
+ ``task_group_id``:
+ The id of the task-group this was triggered for.
+ ``task_id`` and `task``:
+ task identifier and task definition for task the action was triggered
+ for, ``None`` if no ``context`` parameters was given to
+ ``register_callback_action``.
+
+ Parameters
+ ----------
+ name : str
+ An identifier for this action, used by UIs to find the action.
+ title : str
+ A human readable title for the action to be used as label on a button
+ or text on a link for triggering the action.
+ symbol : str
+ Treeherder symbol for the action callback, this is the symbol that the
+ task calling your callback will be displayed as. This is usually 1-3
+ letters abbreviating the action title.
+ description : str
+ A human readable description of the action in **markdown**.
+ This will be display as tooltip and in dialog window when the action
+ is triggered. This is a good place to describe how to use the action.
+ order : int
+ Order of the action in menus, this is relative to the ``order`` of
+ other actions declared.
+ context : list of dict
+ List of tag-sets specifying which tasks the action is can take as input.
+ If no tag-sets is specified as input the action is related to the
+ entire task-group, and won't be triggered with a given task.
+
+ Otherwise, if ``context = [{'k': 'b', 'p': 'l'}, {'k': 't'}]`` will only
+ be displayed in the context menu for tasks that has
+ ``task.tags.k == 'b' && task.tags.p = 'l'`` or ``task.tags.k = 't'``.
+ Esentially, this allows filtering on ``task.tags``.
+
+ If this is a function, it is given the decision parameters and must return
+ a value of the form described above.
+ available : function
+ An optional function that given decision parameters decides if the
+ action is available. Defaults to a function that always returns ``True``.
+ schema : dict
+ JSON schema specifying input accepted by the action.
+ This is optional and can be left ``null`` if no input is taken.
+ permission : string
+ This defaults to ``generic`` and needs to be set for actions that need
+ additional permissions. It appears appears in ci-configuration and
+ various role and hook
+ names.
+ cb_name : string
+ The name under which this function should be registered, defaulting to
+ `name`. Unlike `name`, which can appear multiple times, cb_name must be
+ unique among all registered callbacks.
+
+ Returns
+ -------
+ function
+ To be used as decorator for the callback function.
+ """
+ mem = {"registered": False} # workaround nonlocal missing in 2.x
+
+ assert isinstance(title, str), "title must be a string"
+ assert isinstance(description, str), "description must be a string"
+ title = title.strip()
+ description = description.strip()
+
+ if not cb_name:
+ cb_name = name
+
+ # ensure that context is callable
+ if not callable(context):
+ context_value = context
+
+ # Because of the same name as param it must be redefined
+ # pylint: disable=E0102
+ def context(params):
+ return context_value # noqa
+
+ def register_callback(cb):
+ assert isinstance(name, str), "name must be a string"
+ assert isinstance(order, int), "order must be an integer"
+ assert callable(schema) or is_json(
+ schema
+ ), "schema must be a JSON compatible object"
+ assert isinstance(cb, FunctionType), "callback must be a function"
+ # Allow for json-e > 25 chars in the symbol.
+ if "$" not in symbol:
+ assert 1 <= len(symbol) <= 25, "symbol must be between 1 and 25 characters"
+ assert isinstance(symbol, str), "symbol must be a string"
+
+ assert not mem[
+ "registered"
+ ], "register_callback_action must be used as decorator"
+ assert cb_name not in callbacks, "callback name {} is not unique".format(
+ cb_name
+ )
+
+ def action_builder(parameters, graph_config, decision_task_id):
+ if not available(parameters):
+ return None
+
+ # gather up the common decision-task-supplied data for this action
+ repo_param = "{}head_repository".format(
+ graph_config["project-repo-param-prefix"]
+ )
+ repository = {
+ "url": parameters[repo_param],
+ "project": parameters["project"],
+ "level": parameters["level"],
+ }
+
+ revision = parameters[
+ "{}head_rev".format(graph_config["project-repo-param-prefix"])
+ ]
+ base_revision = parameters[
+ "{}base_rev".format(graph_config["project-repo-param-prefix"])
+ ]
+ push = {
+ "owner": "mozilla-taskcluster-maintenance@mozilla.com",
+ "pushlog_id": parameters["pushlog_id"],
+ "revision": revision,
+ "base_revision": base_revision,
+ }
+
+ match = re.match(
+ r"https://(hg.mozilla.org)/(.*?)/?$", parameters[repo_param]
+ )
+ if not match:
+ raise Exception(f"Unrecognized {repo_param}")
+ action = {
+ "name": name,
+ "title": title,
+ "description": description,
+ # target taskGroupId (the task group this decision task is creating)
+ "taskGroupId": decision_task_id,
+ "cb_name": cb_name,
+ "symbol": symbol,
+ }
+
+ rv = {
+ "name": name,
+ "title": title,
+ "description": description,
+ "context": context(parameters),
+ }
+ if schema:
+ rv["schema"] = (
+ schema(graph_config=graph_config) if callable(schema) else schema
+ )
+
+ trustDomain = graph_config["trust-domain"]
+ level = parameters["level"]
+ tcyml_hash = hash_taskcluster_yml(graph_config.taskcluster_yml)
+
+ # the tcyml_hash is prefixed with `/` in the hookId, so users will be granted
+ # hooks:trigger-hook:project-gecko/in-tree-action-3-myaction/*; if another
+ # action was named `myaction/release`, then the `*` in the scope would also
+ # match that action. To prevent such an accident, we prohibit `/` in hook
+ # names.
+ if "/" in permission:
+ raise Exception("`/` is not allowed in action names; use `-`")
+
+ rv.update(
+ {
+ "kind": "hook",
+ "hookGroupId": f"project-{trustDomain}",
+ "hookId": "in-tree-action-{}-{}/{}".format(
+ level, permission, tcyml_hash
+ ),
+ "hookPayload": {
+ # provide the decision-task parameters as context for triggerHook
+ "decision": {
+ "action": action,
+ "repository": repository,
+ "push": push,
+ },
+ # and pass everything else through from our own context
+ "user": {
+ "input": {"$eval": "input"},
+ "taskId": {"$eval": "taskId"}, # target taskId (or null)
+ "taskGroupId": {
+ "$eval": "taskGroupId"
+ }, # target task group
+ },
+ },
+ "extra": {
+ "actionPerm": permission,
+ },
+ }
+ )
+
+ return rv
+
+ actions.append(Action(order, cb_name, permission, action_builder))
+
+ mem["registered"] = True
+ callbacks[cb_name] = cb
+ return cb
+
+ return register_callback
+
+
+def render_actions_json(parameters, graph_config, decision_task_id):
+ """
+ Render JSON object for the ``public/actions.json`` artifact.
+
+ Parameters
+ ----------
+ parameters : taskgraph.parameters.Parameters
+ Decision task parameters.
+
+ Returns
+ -------
+ dict
+ JSON object representation of the ``public/actions.json`` artifact.
+ """
+ assert isinstance(parameters, Parameters), "requires instance of Parameters"
+ actions = []
+ for action in sorted(_get_actions(graph_config), key=lambda action: action.order):
+ action = action.action_builder(parameters, graph_config, decision_task_id)
+ if action:
+ assert is_json(action), "action must be a JSON compatible object"
+ actions.append(action)
+ return {
+ "version": 1,
+ "variables": {},
+ "actions": actions,
+ }
+
+
+def sanity_check_task_scope(callback, parameters, graph_config):
+ """
+ If this action is not generic, then verify that this task has the necessary
+ scope to run the action. This serves as a backstop preventing abuse by
+ running non-generic actions using generic hooks. While scopes should
+ prevent serious damage from such abuse, it's never a valid thing to do.
+ """
+ for action in _get_actions(graph_config):
+ if action.cb_name == callback:
+ break
+ else:
+ raise Exception(f"No action with cb_name {callback}")
+
+ repo_param = "{}head_repository".format(graph_config["project-repo-param-prefix"])
+ head_repository = parameters[repo_param]
+ assert head_repository.startswith("https://hg.mozilla.org/")
+ expected_scope = "assume:repo:{}:action:{}".format(
+ head_repository[8:], action.permission
+ )
+
+ # the scope should appear literally; no need for a satisfaction check. The use of
+ # get_current_scopes here calls the auth service through the Taskcluster Proxy, giving
+ # the precise scopes available to this task.
+ if expected_scope not in taskcluster.get_current_scopes():
+ raise Exception(f"Expected task scope {expected_scope} for this action")
+
+
+def trigger_action_callback(
+ task_group_id, task_id, input, callback, parameters, root, test=False
+):
+ """
+ Trigger action callback with the given inputs. If `test` is true, then run
+ the action callback in testing mode, without actually creating tasks.
+ """
+ graph_config = load_graph_config(root)
+ graph_config.register()
+ callbacks = _get_callbacks(graph_config)
+ cb = callbacks.get(callback, None)
+ if not cb:
+ raise Exception(
+ "Unknown callback: {}. Known callbacks: {}".format(
+ callback, ", ".join(callbacks)
+ )
+ )
+
+ if test:
+ create.testing = True
+ taskcluster.testing = True
+
+ if not test:
+ sanity_check_task_scope(callback, parameters, graph_config)
+
+ cb(Parameters(**parameters), graph_config, input, task_group_id, task_id)
+
+
+def _load(graph_config):
+ # Load all modules from this folder, relying on the side-effects of register_
+ # functions to populate the action registry.
+ import_sibling_modules(exceptions=("util.py",))
+ return callbacks, actions
+
+
+def _get_callbacks(graph_config):
+ return _load(graph_config)[0]
+
+
+def _get_actions(graph_config):
+ return _load(graph_config)[1]
diff --git a/taskcluster/gecko_taskgraph/actions/release_promotion.py b/taskcluster/gecko_taskgraph/actions/release_promotion.py
new file mode 100644
index 0000000000..0d3c8f3e04
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/actions/release_promotion.py
@@ -0,0 +1,427 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import json
+import os
+
+import requests
+from taskgraph.parameters import Parameters
+from taskgraph.taskgraph import TaskGraph
+from taskgraph.util.taskcluster import get_artifact, list_task_group_incomplete_tasks
+
+from gecko_taskgraph.actions.registry import register_callback_action
+from gecko_taskgraph.decision import taskgraph_decision
+from gecko_taskgraph.util.attributes import RELEASE_PROMOTION_PROJECTS, release_level
+from gecko_taskgraph.util.partials import populate_release_history
+from gecko_taskgraph.util.partners import (
+ fix_partner_config,
+ get_partner_config_by_url,
+ get_partner_url_config,
+ get_token,
+)
+from gecko_taskgraph.util.taskgraph import (
+ find_decision_task,
+ find_existing_tasks_from_previous_kinds,
+)
+
+RELEASE_PROMOTION_SIGNOFFS = ("mar-signing",)
+
+
+def is_release_promotion_available(parameters):
+ return parameters["project"] in RELEASE_PROMOTION_PROJECTS
+
+
+def get_partner_config(partner_url_config, github_token):
+ partner_config = {}
+ for kind, url in partner_url_config.items():
+ if url:
+ partner_config[kind] = get_partner_config_by_url(url, kind, github_token)
+ return partner_config
+
+
+def get_signoff_properties():
+ props = {}
+ for signoff in RELEASE_PROMOTION_SIGNOFFS:
+ props[signoff] = {
+ "type": "string",
+ }
+ return props
+
+
+def get_required_signoffs(input, parameters):
+ input_signoffs = set(input.get("required_signoffs", []))
+ params_signoffs = set(parameters["required_signoffs"] or [])
+ return sorted(list(input_signoffs | params_signoffs))
+
+
+def get_signoff_urls(input, parameters):
+ signoff_urls = parameters["signoff_urls"]
+ signoff_urls.update(input.get("signoff_urls", {}))
+ return signoff_urls
+
+
+def get_flavors(graph_config, param):
+ """
+ Get all flavors with the given parameter enabled.
+ """
+ promotion_flavors = graph_config["release-promotion"]["flavors"]
+ return sorted(
+ flavor
+ for (flavor, config) in promotion_flavors.items()
+ if config.get(param, False)
+ )
+
+
+@register_callback_action(
+ name="release-promotion",
+ title="Release Promotion",
+ symbol="${input.release_promotion_flavor}",
+ description="Promote a release.",
+ permission="release-promotion",
+ order=500,
+ context=[],
+ available=is_release_promotion_available,
+ schema=lambda graph_config: {
+ "type": "object",
+ "properties": {
+ "build_number": {
+ "type": "integer",
+ "default": 1,
+ "minimum": 1,
+ "title": "The release build number",
+ "description": (
+ "The release build number. Starts at 1 per "
+ "release version, and increments on rebuild."
+ ),
+ },
+ "do_not_optimize": {
+ "type": "array",
+ "description": (
+ "Optional: a list of labels to avoid optimizing out "
+ "of the graph (to force a rerun of, say, "
+ "funsize docker-image tasks)."
+ ),
+ "items": {
+ "type": "string",
+ },
+ },
+ "revision": {
+ "type": "string",
+ "title": "Optional: revision to promote",
+ "description": (
+ "Optional: the revision to promote. If specified, "
+ "and `previous_graph_kinds is not specified, find the "
+ "push graph to promote based on the revision."
+ ),
+ },
+ "release_promotion_flavor": {
+ "type": "string",
+ "description": "The flavor of release promotion to perform.",
+ "default": "FILL ME OUT",
+ "enum": sorted(graph_config["release-promotion"]["flavors"].keys()),
+ },
+ "rebuild_kinds": {
+ "type": "array",
+ "description": (
+ "Optional: an array of kinds to ignore from the previous "
+ "graph(s)."
+ ),
+ "default": graph_config["release-promotion"].get("rebuild-kinds", []),
+ "items": {
+ "type": "string",
+ },
+ },
+ "previous_graph_ids": {
+ "type": "array",
+ "description": (
+ "Optional: an array of taskIds of decision or action "
+ "tasks from the previous graph(s) to use to populate "
+ "our `previous_graph_kinds`."
+ ),
+ "items": {
+ "type": "string",
+ },
+ },
+ "version": {
+ "type": "string",
+ "description": (
+ "Optional: override the version for release promotion. "
+ "Occasionally we'll land a taskgraph fix in a later "
+ "commit, but want to act on a build from a previous "
+ "commit. If a version bump has landed in the meantime, "
+ "relying on the in-tree version will break things."
+ ),
+ "default": "",
+ },
+ "next_version": {
+ "type": "string",
+ "description": (
+ "Next version. Required in the following flavors: "
+ "{}".format(get_flavors(graph_config, "version-bump"))
+ ),
+ "default": "",
+ },
+ # Example:
+ # 'partial_updates': {
+ # '38.0': {
+ # 'buildNumber': 1,
+ # 'locales': ['de', 'en-GB', 'ru', 'uk', 'zh-TW']
+ # },
+ # '37.0': {
+ # 'buildNumber': 2,
+ # 'locales': ['de', 'en-GB', 'ru', 'uk']
+ # }
+ # }
+ "partial_updates": {
+ "type": "object",
+ "description": (
+ "Partial updates. Required in the following flavors: "
+ "{}".format(get_flavors(graph_config, "partial-updates"))
+ ),
+ "default": {},
+ "additionalProperties": {
+ "type": "object",
+ "properties": {
+ "buildNumber": {
+ "type": "number",
+ },
+ "locales": {
+ "type": "array",
+ "items": {
+ "type": "string",
+ },
+ },
+ },
+ "required": [
+ "buildNumber",
+ "locales",
+ ],
+ "additionalProperties": False,
+ },
+ },
+ "release_eta": {
+ "type": "string",
+ "default": "",
+ },
+ "release_enable_partner_repack": {
+ "type": "boolean",
+ "default": False,
+ "description": "Toggle for creating partner repacks",
+ },
+ "release_enable_partner_attribution": {
+ "type": "boolean",
+ "default": False,
+ "description": "Toggle for creating partner attribution",
+ },
+ "release_partner_build_number": {
+ "type": "integer",
+ "default": 1,
+ "minimum": 1,
+ "description": (
+ "The partner build number. This translates to, e.g. "
+ "`v1` in the path. We generally only have to "
+ "bump this on off-cycle partner rebuilds."
+ ),
+ },
+ "release_partners": {
+ "type": "array",
+ "description": (
+ "A list of partners to repack, or if null or empty then use "
+ "the current full set"
+ ),
+ "items": {
+ "type": "string",
+ },
+ },
+ "release_partner_config": {
+ "type": "object",
+ "description": "Partner configuration to use for partner repacks.",
+ "properties": {},
+ "additionalProperties": True,
+ },
+ "release_enable_emefree": {
+ "type": "boolean",
+ "default": False,
+ "description": "Toggle for creating EME-free repacks",
+ },
+ "required_signoffs": {
+ "type": "array",
+ "description": ("The flavor of release promotion to perform."),
+ "items": {
+ "enum": RELEASE_PROMOTION_SIGNOFFS,
+ },
+ },
+ "signoff_urls": {
+ "type": "object",
+ "default": {},
+ "additionalProperties": False,
+ "properties": get_signoff_properties(),
+ },
+ },
+ "required": ["release_promotion_flavor", "build_number"],
+ },
+)
+def release_promotion_action(parameters, graph_config, input, task_group_id, task_id):
+ release_promotion_flavor = input["release_promotion_flavor"]
+ promotion_config = graph_config["release-promotion"]["flavors"][
+ release_promotion_flavor
+ ]
+ release_history = {}
+ product = promotion_config["product"]
+
+ next_version = str(input.get("next_version") or "")
+ if promotion_config.get("version-bump", False):
+ # We force str() the input, hence the 'None'
+ if next_version in ["", "None"]:
+ raise Exception(
+ "`next_version` property needs to be provided for `{}` "
+ "target.".format(release_promotion_flavor)
+ )
+
+ if promotion_config.get("partial-updates", False):
+ partial_updates = input.get("partial_updates", {})
+ if not partial_updates and release_level(parameters["project"]) == "production":
+ raise Exception(
+ "`partial_updates` property needs to be provided for `{}`"
+ "target.".format(release_promotion_flavor)
+ )
+ balrog_prefix = product.title()
+ os.environ["PARTIAL_UPDATES"] = json.dumps(partial_updates, sort_keys=True)
+ release_history = populate_release_history(
+ balrog_prefix, parameters["project"], partial_updates=partial_updates
+ )
+
+ target_tasks_method = promotion_config["target-tasks-method"].format(
+ project=parameters["project"]
+ )
+ rebuild_kinds = input.get(
+ "rebuild_kinds", promotion_config.get("rebuild-kinds", [])
+ )
+ do_not_optimize = input.get(
+ "do_not_optimize", promotion_config.get("do-not-optimize", [])
+ )
+
+ # Make sure no pending tasks remain from a previous run
+ own_task_id = os.environ.get("TASK_ID", "")
+ try:
+ for t in list_task_group_incomplete_tasks(own_task_id):
+ if t == own_task_id:
+ continue
+ raise Exception(
+ "task group has unexpected pre-existing incomplete tasks (e.g. {})".format(
+ t
+ )
+ )
+ except requests.exceptions.HTTPError as e:
+ # 404 means the task group doesn't exist yet, and we're fine
+ if e.response.status_code != 404:
+ raise
+
+ # Build previous_graph_ids from ``previous_graph_ids``, ``revision``,
+ # or the action parameters.
+ previous_graph_ids = input.get("previous_graph_ids")
+ if not previous_graph_ids:
+ revision = input.get("revision")
+ if revision:
+ head_rev_param = "{}head_rev".format(
+ graph_config["project-repo-param-prefix"]
+ )
+ push_parameters = {
+ head_rev_param: revision,
+ "project": parameters["project"],
+ }
+ else:
+ push_parameters = parameters
+ previous_graph_ids = [find_decision_task(push_parameters, graph_config)]
+
+ # Download parameters from the first decision task
+ parameters = get_artifact(previous_graph_ids[0], "public/parameters.yml")
+ # Download and combine full task graphs from each of the previous_graph_ids.
+ # Sometimes previous relpro action tasks will add tasks, like partials,
+ # that didn't exist in the first full_task_graph, so combining them is
+ # important. The rightmost graph should take precedence in the case of
+ # conflicts.
+ combined_full_task_graph = {}
+ for graph_id in previous_graph_ids:
+ full_task_graph = get_artifact(graph_id, "public/full-task-graph.json")
+ combined_full_task_graph.update(full_task_graph)
+ _, combined_full_task_graph = TaskGraph.from_json(combined_full_task_graph)
+ parameters["existing_tasks"] = find_existing_tasks_from_previous_kinds(
+ combined_full_task_graph, previous_graph_ids, rebuild_kinds
+ )
+ parameters["do_not_optimize"] = do_not_optimize
+ parameters["target_tasks_method"] = target_tasks_method
+ parameters["build_number"] = int(input["build_number"])
+ parameters["next_version"] = next_version
+ parameters["release_history"] = release_history
+ if promotion_config.get("is-rc"):
+ parameters["release_type"] += "-rc"
+ parameters["release_eta"] = input.get("release_eta", "")
+ parameters["release_product"] = product
+ # When doing staging releases on try, we still want to re-use tasks from
+ # previous graphs.
+ parameters["optimize_target_tasks"] = True
+
+ if release_promotion_flavor == "promote_firefox_partner_repack":
+ release_enable_partner_repack = True
+ release_enable_partner_attribution = False
+ release_enable_emefree = False
+ elif release_promotion_flavor == "promote_firefox_partner_attribution":
+ release_enable_partner_repack = False
+ release_enable_partner_attribution = True
+ release_enable_emefree = False
+ else:
+ # for promotion or ship phases, we use the action input to turn the repacks/attribution off
+ release_enable_partner_repack = input["release_enable_partner_repack"]
+ release_enable_partner_attribution = input["release_enable_partner_attribution"]
+ release_enable_emefree = input["release_enable_emefree"]
+
+ partner_url_config = get_partner_url_config(parameters, graph_config)
+ if (
+ release_enable_partner_repack
+ and not partner_url_config["release-partner-repack"]
+ ):
+ raise Exception("Can't enable partner repacks when no config url found")
+ if (
+ release_enable_partner_attribution
+ and not partner_url_config["release-partner-attribution"]
+ ):
+ raise Exception("Can't enable partner attribution when no config url found")
+ if release_enable_emefree and not partner_url_config["release-eme-free-repack"]:
+ raise Exception("Can't enable EMEfree repacks when no config url found")
+ parameters["release_enable_partner_repack"] = release_enable_partner_repack
+ parameters[
+ "release_enable_partner_attribution"
+ ] = release_enable_partner_attribution
+ parameters["release_enable_emefree"] = release_enable_emefree
+
+ partner_config = input.get("release_partner_config")
+ if not partner_config and any(
+ [
+ release_enable_partner_repack,
+ release_enable_partner_attribution,
+ release_enable_emefree,
+ ]
+ ):
+ github_token = get_token(parameters)
+ partner_config = get_partner_config(partner_url_config, github_token)
+ if partner_config:
+ parameters["release_partner_config"] = fix_partner_config(partner_config)
+ parameters["release_partners"] = input.get("release_partners")
+ if input.get("release_partner_build_number"):
+ parameters["release_partner_build_number"] = input[
+ "release_partner_build_number"
+ ]
+
+ if input["version"]:
+ parameters["version"] = input["version"]
+
+ parameters["required_signoffs"] = get_required_signoffs(input, parameters)
+ parameters["signoff_urls"] = get_signoff_urls(input, parameters)
+
+ # make parameters read-only
+ parameters = Parameters(**parameters)
+
+ taskgraph_decision({"root": graph_config.root_dir}, parameters=parameters)
diff --git a/taskcluster/gecko_taskgraph/actions/retrigger.py b/taskcluster/gecko_taskgraph/actions/retrigger.py
new file mode 100644
index 0000000000..bb4dfa8f89
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/actions/retrigger.py
@@ -0,0 +1,311 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import itertools
+import logging
+import sys
+import textwrap
+
+from taskgraph.util.taskcluster import get_task_definition, rerun_task
+
+from gecko_taskgraph.util.taskcluster import state_task
+
+from .registry import register_callback_action
+from .util import (
+ combine_task_graph_files,
+ create_task_from_def,
+ create_tasks,
+ fetch_graph_and_labels,
+ get_tasks_with_downstream,
+ relativize_datestamps,
+)
+
+logger = logging.getLogger(__name__)
+
+RERUN_STATES = ("exception", "failed")
+
+
+def _should_retrigger(task_graph, label):
+ """
+ Return whether a given task in the taskgraph should be retriggered.
+
+ This handles the case where the task isn't there by assuming it should not be.
+ """
+ if label not in task_graph:
+ logger.info(
+ "Task {} not in full taskgraph, assuming task should not be retriggered.".format(
+ label
+ )
+ )
+ return False
+ return task_graph[label].attributes.get("retrigger", False)
+
+
+@register_callback_action(
+ title="Retrigger",
+ name="retrigger",
+ symbol="rt",
+ cb_name="retrigger-decision",
+ description=textwrap.dedent(
+ """\
+ Create a clone of the task (retriggering decision, action, and cron tasks requires
+ special scopes)."""
+ ),
+ order=11,
+ context=[
+ {"kind": "decision-task"},
+ {"kind": "action-callback"},
+ {"kind": "cron-task"},
+ {"action": "backfill-task"},
+ ],
+)
+def retrigger_decision_action(parameters, graph_config, input, task_group_id, task_id):
+ """For a single task, we try to just run exactly the same task once more.
+ It's quite possible that we don't have the scopes to do so (especially for
+ an action), but this is best-effort."""
+
+ # make all of the timestamps relative; they will then be turned back into
+ # absolute timestamps relative to the current time.
+ task = get_task_definition(task_id)
+ task = relativize_datestamps(task)
+ create_task_from_def(
+ task, parameters["level"], action_tag="retrigger-decision-task"
+ )
+
+
+@register_callback_action(
+ title="Retrigger",
+ name="retrigger",
+ symbol="rt",
+ description=("Create a clone of the task."),
+ order=19, # must be greater than other orders in this file, as this is the fallback version
+ context=[{"retrigger": "true"}],
+ schema={
+ "type": "object",
+ "properties": {
+ "downstream": {
+ "type": "boolean",
+ "description": (
+ "If true, downstream tasks from this one will be cloned as well. "
+ "The dependencies will be updated to work with the new task at the root."
+ ),
+ "default": False,
+ },
+ "times": {
+ "type": "integer",
+ "default": 1,
+ "minimum": 1,
+ "maximum": 100,
+ "title": "Times",
+ "description": "How many times to run each task.",
+ },
+ },
+ },
+)
+@register_callback_action(
+ title="Retrigger (disabled)",
+ name="retrigger",
+ cb_name="retrigger-disabled",
+ symbol="rt",
+ description=(
+ "Create a clone of the task.\n\n"
+ "This type of task should typically be re-run instead of re-triggered."
+ ),
+ order=20, # must be greater than other orders in this file, as this is the fallback version
+ context=[{}],
+ schema={
+ "type": "object",
+ "properties": {
+ "downstream": {
+ "type": "boolean",
+ "description": (
+ "If true, downstream tasks from this one will be cloned as well. "
+ "The dependencies will be updated to work with the new task at the root."
+ ),
+ "default": False,
+ },
+ "times": {
+ "type": "integer",
+ "default": 1,
+ "minimum": 1,
+ "maximum": 100,
+ "title": "Times",
+ "description": "How many times to run each task.",
+ },
+ "force": {
+ "type": "boolean",
+ "default": False,
+ "description": (
+ "This task should not be re-triggered. "
+ "This can be overridden by passing `true` here."
+ ),
+ },
+ },
+ },
+)
+def retrigger_action(parameters, graph_config, input, task_group_id, task_id):
+ decision_task_id, full_task_graph, label_to_taskid, _ = fetch_graph_and_labels(
+ parameters, graph_config
+ )
+
+ task = get_task_definition(task_id)
+ label = task["metadata"]["name"]
+
+ with_downstream = " "
+ to_run = [label]
+
+ if not input.get("force", None) and not _should_retrigger(full_task_graph, label):
+ logger.info(
+ "Not retriggering task {}, task should not be retrigged "
+ "and force not specified.".format(label)
+ )
+ sys.exit(1)
+
+ if input.get("downstream"):
+ to_run = get_tasks_with_downstream(to_run, full_task_graph, label_to_taskid)
+ with_downstream = " (with downstream) "
+
+ times = input.get("times", 1)
+ for i in range(times):
+ create_tasks(
+ graph_config,
+ to_run,
+ full_task_graph,
+ label_to_taskid,
+ parameters,
+ decision_task_id,
+ i,
+ action_tag="retrigger-task",
+ )
+
+ logger.info(f"Scheduled {label}{with_downstream}(time {i + 1}/{times})")
+ combine_task_graph_files(list(range(times)))
+
+
+@register_callback_action(
+ title="Rerun",
+ name="rerun",
+ symbol="rr",
+ description=(
+ "Rerun a task.\n\n"
+ "This only works on failed or exception tasks in the original taskgraph,"
+ " and is CoT friendly."
+ ),
+ order=300,
+ context=[{}],
+ schema={"type": "object", "properties": {}},
+)
+def rerun_action(parameters, graph_config, input, task_group_id, task_id):
+ task = get_task_definition(task_id)
+ parameters = dict(parameters)
+ (
+ decision_task_id,
+ full_task_graph,
+ label_to_taskid,
+ label_to_taskids,
+ ) = fetch_graph_and_labels(parameters, graph_config)
+ label = task["metadata"]["name"]
+ if task_id not in itertools.chain(*label_to_taskid.values()):
+ # XXX the error message is wrong, we're also looking at label_to_taskid
+ # from action and cron tasks on that revision
+ logger.error(
+ "Refusing to rerun {}: taskId {} not in decision task {} label_to_taskid!".format(
+ label, task_id, decision_task_id
+ )
+ )
+
+ _rerun_task(task_id, label)
+
+
+def _rerun_task(task_id, label):
+ state = state_task(task_id)
+ if state not in RERUN_STATES:
+ logger.warning(
+ "No need to rerun {}: state '{}' not in {}!".format(
+ label, state, RERUN_STATES
+ )
+ )
+ return
+ rerun_task(task_id)
+ logger.info(f"Reran {label}")
+
+
+@register_callback_action(
+ title="Retrigger",
+ name="retrigger-multiple",
+ symbol="rt",
+ description=("Create a clone of the task."),
+ context=[],
+ schema={
+ "type": "object",
+ "properties": {
+ "requests": {
+ "type": "array",
+ "items": {
+ "tasks": {
+ "type": "array",
+ "description": "An array of task labels",
+ "items": {"type": "string"},
+ },
+ "times": {
+ "type": "integer",
+ "minimum": 1,
+ "maximum": 100,
+ "title": "Times",
+ "description": "How many times to run each task.",
+ },
+ "additionalProperties": False,
+ },
+ },
+ "additionalProperties": False,
+ },
+ },
+)
+def retrigger_multiple(parameters, graph_config, input, task_group_id, task_id):
+ (
+ decision_task_id,
+ full_task_graph,
+ label_to_taskid,
+ label_to_taskids,
+ ) = fetch_graph_and_labels(parameters, graph_config)
+
+ suffixes = []
+ for i, request in enumerate(input.get("requests", [])):
+ times = request.get("times", 1)
+ rerun_tasks = [
+ label
+ for label in request.get("tasks")
+ if not _should_retrigger(full_task_graph, label)
+ ]
+ retrigger_tasks = [
+ label
+ for label in request.get("tasks")
+ if _should_retrigger(full_task_graph, label)
+ ]
+
+ for label in rerun_tasks:
+ # XXX we should not re-run tasks pulled in from other pushes
+ # In practice, this shouldn't matter, as only completed tasks
+ # are pulled in from other pushes and treeherder won't pass
+ # those labels.
+ for rerun_taskid in label_to_taskids[label]:
+ _rerun_task(rerun_taskid, label)
+
+ for j in range(times):
+ suffix = f"{i}-{j}"
+ suffixes.append(suffix)
+ create_tasks(
+ graph_config,
+ retrigger_tasks,
+ full_task_graph,
+ label_to_taskid,
+ parameters,
+ decision_task_id,
+ suffix,
+ action_tag="retrigger-multiple-task",
+ )
+
+ if suffixes:
+ combine_task_graph_files(suffixes)
diff --git a/taskcluster/gecko_taskgraph/actions/retrigger_custom.py b/taskcluster/gecko_taskgraph/actions/retrigger_custom.py
new file mode 100644
index 0000000000..a217009e82
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/actions/retrigger_custom.py
@@ -0,0 +1,185 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import json
+import logging
+
+from taskgraph.util.parameterization import resolve_task_references
+from taskgraph.util.taskcluster import get_task_definition
+
+from .registry import register_callback_action
+from .util import create_task_from_def, fetch_graph_and_labels
+
+logger = logging.getLogger(__name__)
+
+# Properties available for custom retrigger of any supported test suites
+basic_properties = {
+ "path": {
+ "type": "string",
+ "maxLength": 255,
+ "default": "",
+ "title": "Path name",
+ "description": "Path of test(s) to retrigger",
+ },
+ "logLevel": {
+ "type": "string",
+ "enum": ["debug", "info", "warning", "error", "critical"],
+ "default": "info",
+ "title": "Log level",
+ "description": "Log level for output (INFO is normal, DEBUG gives more detail)",
+ },
+ "environment": {
+ "type": "object",
+ "default": {"MOZ_LOG": ""},
+ "title": "Extra environment variables",
+ "description": "Extra environment variables to use for this run",
+ "additionalProperties": {"type": "string"},
+ },
+}
+
+# Additional properties available for custom retrigger of some additional test suites
+extended_properties = basic_properties.copy()
+extended_properties.update(
+ {
+ "runUntilFail": {
+ "type": "boolean",
+ "default": False,
+ "title": "Run until failure",
+ "description": (
+ "Runs the specified set of tests repeatedly "
+ "until failure (up to REPEAT times)"
+ ),
+ },
+ "repeat": {
+ "type": "integer",
+ "default": 0,
+ "minimum": 0,
+ "title": "Repeat test(s) N times",
+ "description": (
+ "Run test(s) repeatedly (usually used in "
+ "conjunction with runUntilFail)"
+ ),
+ },
+ "preferences": {
+ "type": "object",
+ "default": {"remote.log.level": "Info"},
+ "title": "Extra gecko (about:config) preferences",
+ "description": "Extra gecko (about:config) preferences to use for this run",
+ "additionalProperties": {"type": "string"},
+ },
+ }
+)
+
+
+@register_callback_action(
+ name="retrigger-custom",
+ title="Retrigger task with custom parameters",
+ symbol="rt",
+ description="Retriggers the specified task with custom environment and parameters",
+ context=[
+ {"test-type": "mochitest", "worker-implementation": "docker-worker"},
+ {"test-type": "reftest", "worker-implementation": "docker-worker"},
+ {"test-type": "geckoview-junit", "worker-implementation": "docker-worker"},
+ ],
+ order=10,
+ schema={
+ "type": "object",
+ "properties": extended_properties,
+ "additionalProperties": False,
+ "required": ["path"],
+ },
+)
+def extended_custom_retrigger_action(
+ parameters, graph_config, input, task_group_id, task_id
+):
+ handle_custom_retrigger(parameters, graph_config, input, task_group_id, task_id)
+
+
+@register_callback_action(
+ name="retrigger-custom (gtest)",
+ title="Retrigger gtest task with custom parameters",
+ symbol="rt",
+ description="Retriggers the specified task with custom environment and parameters",
+ context=[{"test-type": "gtest", "worker-implementation": "docker-worker"}],
+ order=10,
+ schema={
+ "type": "object",
+ "properties": basic_properties,
+ "additionalProperties": False,
+ "required": ["path"],
+ },
+)
+def basic_custom_retrigger_action_basic(
+ parameters, graph_config, input, task_group_id, task_id
+):
+ handle_custom_retrigger(parameters, graph_config, input, task_group_id, task_id)
+
+
+def handle_custom_retrigger(parameters, graph_config, input, task_group_id, task_id):
+ task = get_task_definition(task_id)
+ decision_task_id, full_task_graph, label_to_taskid, _ = fetch_graph_and_labels(
+ parameters, graph_config
+ )
+
+ pre_task = full_task_graph.tasks[task["metadata"]["name"]]
+
+ # fix up the task's dependencies, similar to how optimization would
+ # have done in the decision
+ dependencies = {
+ name: label_to_taskid[label] for name, label in pre_task.dependencies.items()
+ }
+ new_task_definition = resolve_task_references(
+ pre_task.label, pre_task.task, task_id, decision_task_id, dependencies
+ )
+ new_task_definition.setdefault("dependencies", []).extend(dependencies.values())
+
+ # don't want to run mozharness tests, want a custom mach command instead
+ new_task_definition["payload"]["command"] += ["--no-run-tests"]
+
+ custom_mach_command = [task["tags"]["test-type"]]
+
+ # mochitests may specify a flavor
+ if new_task_definition["payload"]["env"].get("MOCHITEST_FLAVOR"):
+ custom_mach_command += [
+ "--keep-open=false",
+ "-f",
+ new_task_definition["payload"]["env"]["MOCHITEST_FLAVOR"],
+ ]
+
+ enable_e10s = json.loads(
+ new_task_definition["payload"]["env"].get("ENABLE_E10S", "true")
+ )
+ if not enable_e10s:
+ custom_mach_command += ["--disable-e10s"]
+
+ custom_mach_command += [
+ "--log-tbpl=-",
+ "--log-tbpl-level={}".format(input.get("logLevel", "debug")),
+ ]
+ if input.get("runUntilFail"):
+ custom_mach_command += ["--run-until-failure"]
+ if input.get("repeat"):
+ custom_mach_command += ["--repeat", str(input.get("repeat", 30))]
+
+ # add any custom gecko preferences
+ for key, val in input.get("preferences", {}).items():
+ custom_mach_command += ["--setpref", f"{key}={val}"]
+
+ custom_mach_command += [input["path"]]
+ new_task_definition["payload"]["env"]["CUSTOM_MACH_COMMAND"] = " ".join(
+ custom_mach_command
+ )
+
+ # update environment
+ new_task_definition["payload"]["env"].update(input.get("environment", {}))
+
+ # tweak the treeherder symbol
+ new_task_definition["extra"]["treeherder"]["symbol"] += "-custom"
+
+ logging.info("New task definition: %s", new_task_definition)
+
+ create_task_from_def(
+ new_task_definition, parameters["level"], action_tag="retrigger-custom-task"
+ )
diff --git a/taskcluster/gecko_taskgraph/actions/run_missing_tests.py b/taskcluster/gecko_taskgraph/actions/run_missing_tests.py
new file mode 100644
index 0000000000..b30bc0370a
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/actions/run_missing_tests.py
@@ -0,0 +1,62 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import logging
+
+from taskgraph.util.taskcluster import get_artifact
+
+from .registry import register_callback_action
+from .util import create_tasks, fetch_graph_and_labels
+
+logger = logging.getLogger(__name__)
+
+
+@register_callback_action(
+ name="run-missing-tests",
+ title="Run Missing Tests",
+ symbol="rmt",
+ description=(
+ "Run tests in the selected push that were optimized away, usually by SETA."
+ "\n"
+ "This action is for use on pushes that will be merged into another branch,"
+ "to check that optimization hasn't hidden any failures."
+ ),
+ order=250,
+ context=[], # Applies to decision task
+)
+def run_missing_tests(parameters, graph_config, input, task_group_id, task_id):
+ decision_task_id, full_task_graph, label_to_taskid, _ = fetch_graph_and_labels(
+ parameters, graph_config
+ )
+ target_tasks = get_artifact(decision_task_id, "public/target-tasks.json")
+
+ # The idea here is to schedule all tasks of the `test` kind that were
+ # targetted but did not appear in the final task-graph -- those were the
+ # optimized tasks.
+ to_run = []
+ already_run = 0
+ for label in target_tasks:
+ task = full_task_graph.tasks[label]
+ if task.kind != "test":
+ continue # not a test
+ if label in label_to_taskid:
+ already_run += 1
+ continue
+ to_run.append(label)
+
+ create_tasks(
+ graph_config,
+ to_run,
+ full_task_graph,
+ label_to_taskid,
+ parameters,
+ decision_task_id,
+ )
+
+ logger.info(
+ "Out of {} test tasks, {} already existed and the action created {}".format(
+ already_run + len(to_run), already_run, len(to_run)
+ )
+ )
diff --git a/taskcluster/gecko_taskgraph/actions/scriptworker_canary.py b/taskcluster/gecko_taskgraph/actions/scriptworker_canary.py
new file mode 100644
index 0000000000..e0057da9a6
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/actions/scriptworker_canary.py
@@ -0,0 +1,45 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from taskgraph.parameters import Parameters
+
+from gecko_taskgraph.actions.registry import register_callback_action
+from gecko_taskgraph.decision import taskgraph_decision
+
+
+@register_callback_action(
+ title="Push scriptworker canaries.",
+ name="scriptworker-canary",
+ symbol="scriptworker-canary",
+ description="Trigger scriptworker-canary pushes for the given scriptworkers.",
+ schema={
+ "type": "object",
+ "properties": {
+ "scriptworkers": {
+ "type": "array",
+ "description": "List of scriptworker types to run canaries for.",
+ "items": {"type": "string"},
+ },
+ },
+ },
+ order=1000,
+ permission="scriptworker-canary",
+ context=[],
+)
+def scriptworker_canary(parameters, graph_config, input, task_group_id, task_id):
+ scriptworkers = input["scriptworkers"]
+
+ # make parameters read-write
+ parameters = dict(parameters)
+
+ parameters["target_tasks_method"] = "scriptworker_canary"
+ parameters["try_task_config"] = {
+ "scriptworker-canary-workers": scriptworkers,
+ }
+ parameters["tasks_for"] = "action"
+
+ # make parameters read-only
+ parameters = Parameters(**parameters)
+
+ taskgraph_decision({"root": graph_config.root_dir}, parameters=parameters)
diff --git a/taskcluster/gecko_taskgraph/actions/side_by_side.py b/taskcluster/gecko_taskgraph/actions/side_by_side.py
new file mode 100644
index 0000000000..0880c37760
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/actions/side_by_side.py
@@ -0,0 +1,189 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import logging
+import os
+import sys
+from functools import partial
+
+from taskgraph.util.taskcluster import get_artifact, get_task_definition
+
+from ..util.taskcluster import list_task_group_complete_tasks
+from .registry import register_callback_action
+from .util import create_tasks, fetch_graph_and_labels, get_decision_task_id, get_pushes
+
+logger = logging.getLogger(__name__)
+
+
+def input_for_support_action(revision, base_revision, base_branch, task):
+ """Generate input for action to be scheduled.
+
+ Define what label to schedule with 'label'.
+ If it is a test task that uses explicit manifests add that information.
+ """
+ platform, test_name = task["metadata"]["name"].split("/opt-")
+ new_branch = os.environ.get("GECKO_HEAD_REPOSITORY", "/try").split("/")[-1]
+ symbol = task["extra"]["treeherder"]["symbol"]
+ input = {
+ "label": "perftest-linux-side-by-side",
+ "symbol": symbol,
+ "new_revision": revision,
+ "base_revision": base_revision,
+ "test_name": test_name,
+ "platform": platform,
+ "base_branch": base_branch,
+ "new_branch": new_branch,
+ }
+
+ return input
+
+
+def side_by_side_modifier(task, input):
+ if task.label != input["label"]:
+ return task
+
+ # Make side-by-side job searchable by the platform, test name, and revisions
+ # it was triggered for
+ task.task["metadata"][
+ "name"
+ ] = f"{input['platform']} {input['test_name']} {input['base_revision'][:12]} {input['new_revision'][:12]}"
+ # Use a job symbol to include the symbol of the job the side-by-side
+ # is running for
+ task.task["extra"]["treeherder"]["symbol"] += f"-{input['symbol']}"
+
+ cmd = task.task["payload"]["command"]
+ task.task["payload"]["command"][1][-1] = cmd[1][-1].format(**input)
+
+ return task
+
+
+@register_callback_action(
+ title="Side by side",
+ name="side-by-side",
+ symbol="gen-sxs",
+ description=(
+ "Given a performance test pageload job generate a side-by-side comparison against"
+ "the pageload job from the revision at the input."
+ ),
+ order=200,
+ context=[{"test-type": "raptor"}],
+ schema={
+ "type": "object",
+ "properties": {
+ "revision": {
+ "type": "string",
+ "default": "",
+ "description": "Revision of the push against the comparison is wanted.",
+ },
+ "project": {
+ "type": "string",
+ "default": "autoland",
+ "description": "Revision of the push against the comparison is wanted.",
+ },
+ },
+ "additionalProperties": False,
+ },
+)
+def side_by_side_action(parameters, graph_config, input, task_group_id, task_id):
+ """
+ This action does a side-by-side comparison between current revision and
+ the revision entered manually or the latest revision that ran the
+ pageload job (via support action).
+
+ To execute this action locally follow the documentation here:
+ https://firefox-source-docs.mozilla.org/taskcluster/actions.html#testing-the-action-locally
+ """
+ task = get_task_definition(task_id)
+ decision_task_id, full_task_graph, label_to_taskid, _ = fetch_graph_and_labels(
+ parameters, graph_config
+ )
+ # TODO: find another way to detect side-by-side comparable jobs
+ # (potentially lookig at the visual metrics flag)
+ if not (
+ "browsertime-tp6" in task["metadata"]["name"]
+ or "welcome" in task["metadata"]["name"]
+ ):
+ logger.exception(
+ f"Task {task['metadata']['name']} is not side-by-side comparable."
+ )
+ return
+
+ failed = False
+ input_for_action = {}
+
+ if input.get("revision"):
+ # If base_revision was introduced manually, use that
+ input_for_action = input_for_support_action(
+ revision=parameters["head_rev"],
+ base_revision=input.get("revision"),
+ base_branch=input.get("project"),
+ task=task,
+ )
+ else:
+ current_push_id = int(parameters["pushlog_id"]) - 1
+ # Go decrementally through pushlog_id, get push data, decision task id,
+ # full task graph and everything needed to find which of the past revisions
+ # ran the pageload job to compare against
+ while int(parameters["pushlog_id"]) - current_push_id < 30:
+ pushes = get_pushes(
+ project=parameters["head_repository"],
+ end_id=current_push_id,
+ depth=1,
+ full_response=True,
+ )
+ try:
+ # Get label-to-taskid.json artifact + the tasks triggered
+ # by the action tasks at a later time than the decision task
+ current_decision_task_id = get_decision_task_id(
+ parameters["project"], current_push_id
+ )
+ current_task_group_id = get_task_definition(current_decision_task_id)[
+ "taskGroupId"
+ ]
+ current_label_to_taskid = get_artifact(
+ current_decision_task_id, "public/label-to-taskid.json"
+ )
+ current_full_label_to_taskid = current_label_to_taskid.copy()
+ action_task_triggered = list_task_group_complete_tasks(
+ current_task_group_id
+ )
+ current_full_label_to_taskid.update(action_task_triggered)
+ if task["metadata"]["name"] in current_full_label_to_taskid.keys():
+ input_for_action = input_for_support_action(
+ revision=parameters["head_rev"],
+ base_revision=pushes[str(current_push_id)]["changesets"][-1],
+ base_branch=input.get("project", parameters["project"]),
+ task=task,
+ )
+ break
+ except Exception:
+ logger.warning(
+ f"Could not find decision task for push {current_push_id}"
+ )
+ # The decision task may have failed, this is common enough that we
+ # don't want to report an error for it.
+ continue
+ current_push_id -= 1
+ if not input_for_action:
+ raise Exception(
+ "Could not find a side-by-side comparable task within a depth of 30 revisions."
+ )
+
+ try:
+ create_tasks(
+ graph_config,
+ [input_for_action["label"]],
+ full_task_graph,
+ label_to_taskid,
+ parameters,
+ decision_task_id,
+ modifier=partial(side_by_side_modifier, input=input_for_action),
+ )
+ except Exception as e:
+ logger.exception(f"Failed to trigger action: {e}.")
+ failed = True
+
+ if failed:
+ sys.exit(1)
diff --git a/taskcluster/gecko_taskgraph/actions/util.py b/taskcluster/gecko_taskgraph/actions/util.py
new file mode 100644
index 0000000000..0a18b146cb
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/actions/util.py
@@ -0,0 +1,437 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import concurrent.futures as futures
+import copy
+import logging
+import os
+import re
+from functools import reduce
+
+import jsone
+import requests
+from requests.exceptions import HTTPError
+from slugid import nice as slugid
+from taskgraph import create
+from taskgraph.optimize.base import optimize_task_graph
+from taskgraph.taskgraph import TaskGraph
+from taskgraph.util.taskcluster import (
+ CONCURRENCY,
+ find_task_id,
+ get_artifact,
+ get_session,
+ get_task_definition,
+ list_tasks,
+ parse_time,
+)
+
+from gecko_taskgraph.decision import read_artifact, rename_artifact, write_artifact
+from gecko_taskgraph.util.taskcluster import trigger_hook
+from gecko_taskgraph.util.taskgraph import find_decision_task
+
+logger = logging.getLogger(__name__)
+
+INDEX_TMPL = "gecko.v2.{}.pushlog-id.{}.decision"
+PUSHLOG_TMPL = "{}/json-pushes?version=2&startID={}&endID={}"
+
+
+def _tags_within_context(tags, context=[]):
+ """A context of [] means that it *only* applies to a task group"""
+ return any(
+ all(tag in tags and tags[tag] == tag_set[tag] for tag in tag_set.keys())
+ for tag_set in context
+ )
+
+
+def _extract_applicable_action(actions_json, action_name, task_group_id, task_id):
+ """Extract action that applies to the given task or task group.
+
+ A task (as defined by its tags) is said to match a tag-set if its
+ tags are a super-set of the tag-set. A tag-set is a set of key-value pairs.
+
+ An action (as defined by its context) is said to be relevant for
+ a given task, if the task's tags match one of the tag-sets given
+ in the context property of the action.
+
+ The order of the actions is significant. When multiple actions apply to a
+ task the first one takes precedence.
+
+ For more details visit:
+ https://docs.taskcluster.net/docs/manual/design/conventions/actions/spec
+ """
+ if task_id:
+ tags = get_task_definition(task_id).get("tags")
+
+ for _action in actions_json["actions"]:
+ if action_name != _action["name"]:
+ continue
+
+ context = _action.get("context", [])
+ # Ensure the task is within the context of the action
+ if task_id and tags and _tags_within_context(tags, context):
+ return _action
+ if context == []:
+ return _action
+
+ available_actions = ", ".join(sorted({a["name"] for a in actions_json["actions"]}))
+ raise LookupError(
+ "{} action is not available for this task. Available: {}".format(
+ action_name, available_actions
+ )
+ )
+
+
+def trigger_action(action_name, decision_task_id, task_id=None, input={}):
+ if not decision_task_id:
+ raise ValueError("No decision task. We can't find the actions artifact.")
+ actions_json = get_artifact(decision_task_id, "public/actions.json")
+ if actions_json["version"] != 1:
+ raise RuntimeError("Wrong version of actions.json, unable to continue")
+
+ # These values substitute $eval in the template
+ context = {
+ "input": input,
+ "taskId": task_id,
+ "taskGroupId": decision_task_id,
+ }
+ # https://docs.taskcluster.net/docs/manual/design/conventions/actions/spec#variables
+ context.update(actions_json["variables"])
+ action = _extract_applicable_action(
+ actions_json, action_name, decision_task_id, task_id
+ )
+ kind = action["kind"]
+ if create.testing:
+ logger.info(f"Skipped triggering action for {kind} as testing is enabled")
+ elif kind == "hook":
+ hook_payload = jsone.render(action["hookPayload"], context)
+ trigger_hook(action["hookGroupId"], action["hookId"], hook_payload)
+ else:
+ raise NotImplementedError(f"Unable to submit actions with {kind} kind.")
+
+
+def get_pushes_from_params_input(parameters, input):
+ inclusive_tweak = 1 if input.get("inclusive") else 0
+ return get_pushes(
+ project=parameters["head_repository"],
+ end_id=int(parameters["pushlog_id"]) - (1 - inclusive_tweak),
+ depth=input.get("depth", 9) + inclusive_tweak,
+ )
+
+
+def get_pushes(project, end_id, depth, full_response=False):
+ pushes = []
+ while True:
+ start_id = max(end_id - depth, 0)
+ pushlog_url = PUSHLOG_TMPL.format(project, start_id, end_id)
+ logger.debug(pushlog_url)
+ r = requests.get(pushlog_url)
+ r.raise_for_status()
+ pushes = pushes + list(r.json()["pushes"].keys())
+ if len(pushes) >= depth:
+ break
+
+ end_id = start_id - 1
+ start_id -= depth
+ if start_id < 0:
+ break
+
+ pushes = sorted(pushes)[-depth:]
+ push_dict = {push: r.json()["pushes"][push] for push in pushes}
+ return push_dict if full_response else pushes
+
+
+def get_decision_task_id(project, push_id):
+ return find_task_id(INDEX_TMPL.format(project, push_id))
+
+
+def get_parameters(decision_task_id):
+ return get_artifact(decision_task_id, "public/parameters.yml")
+
+
+def get_tasks_with_downstream(labels, full_task_graph, label_to_taskid):
+ # Used to gather tasks when downstream tasks need to run as well
+ return full_task_graph.graph.transitive_closure(
+ set(labels), reverse=True
+ ).nodes & set(label_to_taskid.keys())
+
+
+def fetch_graph_and_labels(parameters, graph_config):
+ decision_task_id = find_decision_task(parameters, graph_config)
+
+ # First grab the graph and labels generated during the initial decision task
+ full_task_graph = get_artifact(decision_task_id, "public/full-task-graph.json")
+ logger.info("Load taskgraph from JSON.")
+ _, full_task_graph = TaskGraph.from_json(full_task_graph)
+ label_to_taskid = get_artifact(decision_task_id, "public/label-to-taskid.json")
+ label_to_taskids = {label: [task_id] for label, task_id in label_to_taskid.items()}
+
+ logger.info("Fetching additional tasks from action and cron tasks.")
+ # fetch everything in parallel; this avoids serializing any delay in downloading
+ # each artifact (such as waiting for the artifact to be mirrored locally)
+ with futures.ThreadPoolExecutor(CONCURRENCY) as e:
+ fetches = []
+
+ # fetch any modifications made by action tasks and add the new tasks
+ def fetch_action(task_id):
+ logger.info(f"fetching label-to-taskid.json for action task {task_id}")
+ try:
+ run_label_to_id = get_artifact(task_id, "public/label-to-taskid.json")
+ label_to_taskid.update(run_label_to_id)
+ for label, task_id in run_label_to_id.items():
+ label_to_taskids.setdefault(label, []).append(task_id)
+ except HTTPError as e:
+ if e.response.status_code != 404:
+ raise
+ logger.debug(f"No label-to-taskid.json found for {task_id}: {e}")
+
+ head_rev_param = "{}head_rev".format(graph_config["project-repo-param-prefix"])
+
+ namespace = "{}.v2.{}.revision.{}.taskgraph.actions".format(
+ graph_config["trust-domain"],
+ parameters["project"],
+ parameters[head_rev_param],
+ )
+ for task_id in list_tasks(namespace):
+ fetches.append(e.submit(fetch_action, task_id))
+
+ # Similarly for cron tasks..
+ def fetch_cron(task_id):
+ logger.info(f"fetching label-to-taskid.json for cron task {task_id}")
+ try:
+ run_label_to_id = get_artifact(task_id, "public/label-to-taskid.json")
+ label_to_taskid.update(run_label_to_id)
+ for label, task_id in run_label_to_id.items():
+ label_to_taskids.setdefault(label, []).append(task_id)
+ except HTTPError as e:
+ if e.response.status_code != 404:
+ raise
+ logger.debug(f"No label-to-taskid.json found for {task_id}: {e}")
+
+ namespace = "{}.v2.{}.revision.{}.cron".format(
+ graph_config["trust-domain"],
+ parameters["project"],
+ parameters[head_rev_param],
+ )
+ for task_id in list_tasks(namespace):
+ fetches.append(e.submit(fetch_cron, task_id))
+
+ # now wait for each fetch to complete, raising an exception if there
+ # were any issues
+ for f in futures.as_completed(fetches):
+ f.result()
+
+ return (decision_task_id, full_task_graph, label_to_taskid, label_to_taskids)
+
+
+def create_task_from_def(task_def, level, action_tag=None):
+ """Create a new task from a definition rather than from a label
+ that is already in the full-task-graph. The task definition will
+ have {relative-datestamp': '..'} rendered just like in a decision task.
+ Use this for entirely new tasks or ones that change internals of the task.
+ It is useful if you want to "edit" the full_task_graph and then hand
+ it to this function. No dependencies will be scheduled. You must handle
+ this yourself. Seeing how create_tasks handles it might prove helpful."""
+ task_def["schedulerId"] = f"gecko-level-{level}"
+ label = task_def["metadata"]["name"]
+ task_id = slugid()
+ session = get_session()
+ if action_tag:
+ task_def.setdefault("tags", {}).setdefault("action", action_tag)
+ create.create_task(session, task_id, label, task_def)
+
+
+def update_parent(task, graph):
+ task.task.setdefault("extra", {})["parent"] = os.environ.get("TASK_ID", "")
+ return task
+
+
+def update_action_tag(task, graph, action_tag):
+ task.task.setdefault("tags", {}).setdefault("action", action_tag)
+ return task
+
+
+def update_dependencies(task, graph):
+ if os.environ.get("TASK_ID"):
+ task.task.setdefault("dependencies", []).append(os.environ["TASK_ID"])
+ return task
+
+
+def create_tasks(
+ graph_config,
+ to_run,
+ full_task_graph,
+ label_to_taskid,
+ params,
+ decision_task_id,
+ suffix="",
+ modifier=lambda t: t,
+ action_tag=None,
+):
+ """Create new tasks. The task definition will have {relative-datestamp':
+ '..'} rendered just like in a decision task. Action callbacks should use
+ this function to create new tasks,
+ allowing easy debugging with `mach taskgraph action-callback --test`.
+ This builds up all required tasks to run in order to run the tasks requested.
+
+ Optionally this function takes a `modifier` function that is passed in each
+ task before it is put into a new graph. It should return a valid task. Note
+ that this is passed _all_ tasks in the graph, not just the set in to_run. You
+ may want to skip modifying tasks not in your to_run list.
+
+ If `suffix` is given, then it is used to give unique names to the resulting
+ artifacts. If you call this function multiple times in the same action,
+ pass a different suffix each time to avoid overwriting artifacts.
+
+ If you wish to create the tasks in a new group, leave out decision_task_id.
+
+ Returns an updated label_to_taskid containing the new tasks"""
+ import gecko_taskgraph.optimize # noqa: triggers registration of strategies
+
+ if suffix != "":
+ suffix = f"-{suffix}"
+ to_run = set(to_run)
+
+ # Copy to avoid side-effects later
+ full_task_graph = copy.deepcopy(full_task_graph)
+ label_to_taskid = label_to_taskid.copy()
+
+ target_graph = full_task_graph.graph.transitive_closure(to_run)
+ target_task_graph = TaskGraph(
+ {l: modifier(full_task_graph[l]) for l in target_graph.nodes}, target_graph
+ )
+ target_task_graph.for_each_task(update_parent)
+ if action_tag:
+ target_task_graph.for_each_task(update_action_tag, action_tag)
+ if decision_task_id and decision_task_id != os.environ.get("TASK_ID"):
+ target_task_graph.for_each_task(update_dependencies)
+ optimized_task_graph, label_to_taskid = optimize_task_graph(
+ target_task_graph,
+ to_run,
+ params,
+ to_run,
+ decision_task_id,
+ existing_tasks=label_to_taskid,
+ )
+ write_artifact(f"task-graph{suffix}.json", optimized_task_graph.to_json())
+ write_artifact(f"label-to-taskid{suffix}.json", label_to_taskid)
+ write_artifact(f"to-run{suffix}.json", list(to_run))
+ create.create_tasks(
+ graph_config,
+ optimized_task_graph,
+ label_to_taskid,
+ params,
+ decision_task_id,
+ )
+ return label_to_taskid
+
+
+def _update_reducer(accumulator, new_value):
+ "similar to set or dict `update` method, but returning the modified object"
+ accumulator.update(new_value)
+ return accumulator
+
+
+def combine_task_graph_files(suffixes):
+ """Combine task-graph-{suffix}.json files into a single task-graph.json file.
+
+ Since Chain of Trust verification requires a task-graph.json file that
+ contains all children tasks, we can combine the various task-graph-0.json
+ type files into a master task-graph.json file at the end.
+
+ Actions also look for various artifacts, so we combine those in a similar
+ fashion.
+
+ In the case where there is only one suffix, we simply rename it to avoid the
+ additional cost of uploading two copies of the same data.
+ """
+
+ if len(suffixes) == 1:
+ for filename in ["task-graph", "label-to-taskid", "to-run"]:
+ rename_artifact(f"{filename}-{suffixes[0]}.json", f"{filename}.json")
+ return
+
+ def combine(file_contents, base):
+ return reduce(_update_reducer, file_contents, base)
+
+ files = [read_artifact(f"task-graph-{suffix}.json") for suffix in suffixes]
+ write_artifact("task-graph.json", combine(files, dict()))
+
+ files = [read_artifact(f"label-to-taskid-{suffix}.json") for suffix in suffixes]
+ write_artifact("label-to-taskid.json", combine(files, dict()))
+
+ files = [read_artifact(f"to-run-{suffix}.json") for suffix in suffixes]
+ write_artifact("to-run.json", list(combine(files, set())))
+
+
+def relativize_datestamps(task_def):
+ """
+ Given a task definition as received from the queue, convert all datestamps
+ to {relative_datestamp: ..} format, with the task creation time as "now".
+ The result is useful for handing to ``create_task``.
+ """
+ base = parse_time(task_def["created"])
+ # borrowed from https://github.com/epoberezkin/ajv/blob/master/lib/compile/formats.js
+ ts_pattern = re.compile(
+ r"^\d\d\d\d-[0-1]\d-[0-3]\d[t\s]"
+ r"(?:[0-2]\d:[0-5]\d:[0-5]\d|23:59:60)(?:\.\d+)?"
+ r"(?:z|[+-]\d\d:\d\d)$",
+ re.I,
+ )
+
+ def recurse(value):
+ if isinstance(value, str):
+ if ts_pattern.match(value):
+ value = parse_time(value)
+ diff = value - base
+ return {"relative-datestamp": f"{int(diff.total_seconds())} seconds"}
+ if isinstance(value, list):
+ return [recurse(e) for e in value]
+ if isinstance(value, dict):
+ return {k: recurse(v) for k, v in value.items()}
+ return value
+
+ return recurse(task_def)
+
+
+def add_args_to_command(cmd_parts, extra_args=[]):
+ """
+ Add custom command line args to a given command.
+ args:
+ cmd_parts: the raw command as seen by taskcluster
+ extra_args: array of args we want to add
+ """
+ # Prevent modification of the caller's copy of cmd_parts
+ cmd_parts = copy.deepcopy(cmd_parts)
+ cmd_type = "default"
+ if len(cmd_parts) == 1 and isinstance(cmd_parts[0], dict):
+ # windows has single cmd part as dict: 'task-reference', with long string
+ cmd_parts = cmd_parts[0]["task-reference"].split(" ")
+ cmd_type = "dict"
+ elif len(cmd_parts) == 1 and isinstance(cmd_parts[0], str):
+ # windows has single cmd part as a long string
+ cmd_parts = cmd_parts[0].split(" ")
+ cmd_type = "unicode"
+ elif len(cmd_parts) == 1 and isinstance(cmd_parts[0], list):
+ # osx has an single value array with an array inside
+ cmd_parts = cmd_parts[0]
+ cmd_type = "subarray"
+ elif len(cmd_parts) == 2 and isinstance(cmd_parts[1], list):
+ # osx has an double value array with an array inside each element.
+ # The first element is a pre-requisite command while the second
+ # is the actual test command.
+ cmd_type = "subarray2"
+
+ if cmd_type == "subarray2":
+ cmd_parts[1].extend(extra_args)
+ else:
+ cmd_parts.extend(extra_args)
+
+ if cmd_type == "dict":
+ cmd_parts = [{"task-reference": " ".join(cmd_parts)}]
+ elif cmd_type == "unicode":
+ cmd_parts = [" ".join(cmd_parts)]
+ elif cmd_type == "subarray":
+ cmd_parts = [cmd_parts]
+ return cmd_parts
diff --git a/taskcluster/gecko_taskgraph/config.py b/taskcluster/gecko_taskgraph/config.py
new file mode 100644
index 0000000000..5045963b46
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/config.py
@@ -0,0 +1,142 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from taskgraph.util.schema import Schema, optionally_keyed_by
+from voluptuous import Any, Optional, Required
+
+graph_config_schema = Schema(
+ {
+ # The trust-domain for this graph.
+ # (See https://firefox-source-docs.mozilla.org/taskcluster/taskcluster/taskgraph.html#taskgraph-trust-domain) # noqa
+ Required("trust-domain"): str,
+ # This specifes the prefix for repo parameters that refer to the project being built.
+ # This selects between `head_rev` and `comm_head_rev` and related paramters.
+ # (See http://firefox-source-docs.mozilla.org/taskcluster/taskcluster/parameters.html#push-information # noqa
+ # and http://firefox-source-docs.mozilla.org/taskcluster/taskcluster/parameters.html#comm-push-information) # noqa
+ Required("project-repo-param-prefix"): str,
+ # This specifies the top level directory of the application being built.
+ # ie. "browser/" for Firefox, "comm/mail/" for Thunderbird.
+ Required("product-dir"): str,
+ Required("treeherder"): {
+ # Mapping of treeherder group symbols to descriptive names
+ Required("group-names"): {str: str}
+ },
+ Required("index"): {Required("products"): [str]},
+ Required("try"): {
+ # We have a few platforms for which we want to do some "extra" builds, or at
+ # least build-ish things. Sort of. Anyway, these other things are implemented
+ # as different "platforms". These do *not* automatically ride along with "-p
+ # all"
+ Required("ridealong-builds"): {str: [str]},
+ },
+ Required("release-promotion"): {
+ Required("products"): [str],
+ Required("flavors"): {
+ str: {
+ Required("product"): str,
+ Required("target-tasks-method"): str,
+ Optional("is-rc"): bool,
+ Optional("rebuild-kinds"): [str],
+ Optional("version-bump"): bool,
+ Optional("partial-updates"): bool,
+ }
+ },
+ Optional("rebuild-kinds"): [str],
+ },
+ Required("merge-automation"): {
+ Required("behaviors"): {
+ str: {
+ Optional("from-branch"): str,
+ Required("to-branch"): str,
+ Optional("from-repo"): str,
+ Required("to-repo"): str,
+ Required("version-files"): [
+ {
+ Required("filename"): str,
+ Optional("new-suffix"): str,
+ Optional("version-bump"): Any("major", "minor"),
+ }
+ ],
+ Required("replacements"): [[str]],
+ Required("merge-old-head"): bool,
+ Optional("base-tag"): str,
+ Optional("end-tag"): str,
+ Optional("fetch-version-from"): str,
+ }
+ },
+ },
+ Required("scriptworker"): {
+ # Prefix to add to scopes controlling scriptworkers
+ Required("scope-prefix"): str,
+ },
+ Required("task-priority"): optionally_keyed_by(
+ "project",
+ Any(
+ "highest",
+ "very-high",
+ "high",
+ "medium",
+ "low",
+ "very-low",
+ "lowest",
+ ),
+ ),
+ Required("partner-urls"): {
+ Required("release-partner-repack"): optionally_keyed_by(
+ "release-product", "release-level", "release-type", Any(str, None)
+ ),
+ Optional("release-partner-attribution"): optionally_keyed_by(
+ "release-product", "release-level", "release-type", Any(str, None)
+ ),
+ Required("release-eme-free-repack"): optionally_keyed_by(
+ "release-product", "release-level", "release-type", Any(str, None)
+ ),
+ },
+ Required("workers"): {
+ Required("aliases"): {
+ str: {
+ Required("provisioner"): optionally_keyed_by("level", str),
+ Required("implementation"): str,
+ Required("os"): str,
+ Required("worker-type"): optionally_keyed_by(
+ "level", "release-level", "project", str
+ ),
+ }
+ },
+ },
+ Required("mac-notarization"): {
+ Required("mac-entitlements"): optionally_keyed_by(
+ "platform", "release-level", str
+ ),
+ Required("mac-requirements"): optionally_keyed_by("platform", str),
+ },
+ Required("mac-signing"): {
+ Required("hardened-sign-config"): optionally_keyed_by(
+ "hardened-signing-type",
+ [
+ {
+ Optional("deep"): bool,
+ Optional("runtime"): bool,
+ Optional("force"): bool,
+ Optional("requirements"): optionally_keyed_by(
+ "release-product", "release-level", str
+ ),
+ Optional("entitlements"): optionally_keyed_by(
+ "build-platform", "project", str
+ ),
+ Required("globs"): [str],
+ }
+ ],
+ )
+ },
+ Required("taskgraph"): {
+ Optional(
+ "register",
+ description="Python function to call to register extensions.",
+ ): str,
+ Optional("decision-parameters"): str,
+ },
+ Required("expiration-policy"): optionally_keyed_by("project", {str: str}),
+ }
+)
diff --git a/taskcluster/gecko_taskgraph/decision.py b/taskcluster/gecko_taskgraph/decision.py
new file mode 100644
index 0000000000..e0bc9e3ca8
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/decision.py
@@ -0,0 +1,498 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import json
+import logging
+import os
+import shutil
+import sys
+import time
+from collections import defaultdict
+
+import yaml
+from redo import retry
+from taskgraph import create
+from taskgraph.create import create_tasks
+
+# TODO: Let standalone taskgraph generate parameters instead of calling internals
+from taskgraph.decision import (
+ _determine_more_accurate_base_ref,
+ _determine_more_accurate_base_rev,
+ _get_env_prefix,
+)
+from taskgraph.generator import TaskGraphGenerator
+from taskgraph.parameters import Parameters
+from taskgraph.taskgraph import TaskGraph
+from taskgraph.util.python_path import find_object
+from taskgraph.util.taskcluster import get_artifact
+from taskgraph.util.vcs import get_repository
+from taskgraph.util.yaml import load_yaml
+
+from . import GECKO
+from .actions import render_actions_json
+from .parameters import get_app_version, get_version
+from .try_option_syntax import parse_message
+from .util.backstop import BACKSTOP_INDEX, is_backstop
+from .util.bugbug import push_schedules
+from .util.chunking import resolver
+from .util.hg import get_hg_commit_message, get_hg_revision_branch
+from .util.partials import populate_release_history
+from .util.taskcluster import insert_index
+from .util.taskgraph import find_decision_task, find_existing_tasks_from_previous_kinds
+
+logger = logging.getLogger(__name__)
+
+ARTIFACTS_DIR = "artifacts"
+
+# For each project, this gives a set of parameters specific to the project.
+# See `taskcluster/docs/parameters.rst` for information on parameters.
+PER_PROJECT_PARAMETERS = {
+ "try": {
+ "enable_always_target": True,
+ "target_tasks_method": "try_tasks",
+ "release_type": "nightly",
+ },
+ "kaios-try": {
+ "target_tasks_method": "try_tasks",
+ },
+ "ash": {
+ "target_tasks_method": "default",
+ },
+ "cedar": {
+ "target_tasks_method": "default",
+ },
+ "holly": {
+ "enable_always_target": True,
+ "target_tasks_method": "holly_tasks",
+ },
+ "oak": {
+ "target_tasks_method": "default",
+ "release_type": "nightly-oak",
+ },
+ "graphics": {
+ "target_tasks_method": "graphics_tasks",
+ },
+ "autoland": {
+ "optimize_strategies": "gecko_taskgraph.optimize:project.autoland",
+ "target_tasks_method": "autoland_tasks",
+ "test_manifest_loader": "bugbug", # Remove this line to disable "manifest scheduling".
+ },
+ "mozilla-central": {
+ "target_tasks_method": "mozilla_central_tasks",
+ "release_type": "nightly",
+ },
+ "mozilla-beta": {
+ "target_tasks_method": "mozilla_beta_tasks",
+ "release_type": "beta",
+ },
+ "mozilla-release": {
+ "target_tasks_method": "mozilla_release_tasks",
+ "release_type": "release",
+ },
+ "mozilla-esr115": {
+ "target_tasks_method": "mozilla_esr115_tasks",
+ "release_type": "esr115",
+ },
+ "pine": {
+ "target_tasks_method": "pine_tasks",
+ "release_type": "nightly-pine",
+ },
+ "larch": {
+ "target_tasks_method": "larch_tasks",
+ "release_type": "nightly-larch",
+ },
+ "kaios": {
+ "target_tasks_method": "kaios_tasks",
+ },
+ "toolchains": {
+ "target_tasks_method": "mozilla_central_tasks",
+ },
+ # the default parameters are used for projects that do not match above.
+ "default": {
+ "target_tasks_method": "default",
+ },
+}
+
+
+def full_task_graph_to_runnable_jobs(full_task_json):
+ runnable_jobs = {}
+ for label, node in full_task_json.items():
+ if not ("extra" in node["task"] and "treeherder" in node["task"]["extra"]):
+ continue
+
+ th = node["task"]["extra"]["treeherder"]
+ runnable_jobs[label] = {"symbol": th["symbol"]}
+
+ for i in ("groupName", "groupSymbol", "collection"):
+ if i in th:
+ runnable_jobs[label][i] = th[i]
+ if th.get("machine", {}).get("platform"):
+ runnable_jobs[label]["platform"] = th["machine"]["platform"]
+ return runnable_jobs
+
+
+def full_task_graph_to_manifests_by_task(full_task_json):
+ manifests_by_task = defaultdict(list)
+ for label, node in full_task_json.items():
+ manifests = node["attributes"].get("test_manifests")
+ if not manifests:
+ continue
+
+ manifests_by_task[label].extend(manifests)
+ return manifests_by_task
+
+
+def try_syntax_from_message(message):
+ """
+ Parse the try syntax out of a commit message, returning '' if none is
+ found.
+ """
+ try_idx = message.find("try:")
+ if try_idx == -1:
+ return ""
+ return message[try_idx:].split("\n", 1)[0]
+
+
+def taskgraph_decision(options, parameters=None):
+ """
+ Run the decision task. This function implements `mach taskgraph decision`,
+ and is responsible for
+
+ * processing decision task command-line options into parameters
+ * running task-graph generation exactly the same way the other `mach
+ taskgraph` commands do
+ * generating a set of artifacts to memorialize the graph
+ * calling TaskCluster APIs to create the graph
+ """
+
+ parameters = parameters or (
+ lambda graph_config: get_decision_parameters(graph_config, options)
+ )
+
+ decision_task_id = os.environ["TASK_ID"]
+
+ # create a TaskGraphGenerator instance
+ tgg = TaskGraphGenerator(
+ root_dir=options.get("root"),
+ parameters=parameters,
+ decision_task_id=decision_task_id,
+ write_artifacts=True,
+ )
+
+ if not create.testing:
+ # set additional index paths for the decision task
+ set_decision_indexes(decision_task_id, tgg.parameters, tgg.graph_config)
+
+ # write out the parameters used to generate this graph
+ write_artifact("parameters.yml", dict(**tgg.parameters))
+
+ # write out the public/actions.json file
+ write_artifact(
+ "actions.json",
+ render_actions_json(tgg.parameters, tgg.graph_config, decision_task_id),
+ )
+
+ # write out the full graph for reference
+ full_task_json = tgg.full_task_graph.to_json()
+ write_artifact("full-task-graph.json", full_task_json)
+
+ # write out the public/runnable-jobs.json file
+ write_artifact(
+ "runnable-jobs.json", full_task_graph_to_runnable_jobs(full_task_json)
+ )
+
+ # write out the public/manifests-by-task.json file
+ write_artifact(
+ "manifests-by-task.json.gz",
+ full_task_graph_to_manifests_by_task(full_task_json),
+ )
+
+ # write out the public/tests-by-manifest.json file
+ write_artifact("tests-by-manifest.json.gz", resolver.tests_by_manifest)
+
+ # this is just a test to check whether the from_json() function is working
+ _, _ = TaskGraph.from_json(full_task_json)
+
+ # write out the target task set to allow reproducing this as input
+ write_artifact("target-tasks.json", list(tgg.target_task_set.tasks.keys()))
+
+ # write out the optimized task graph to describe what will actually happen,
+ # and the map of labels to taskids
+ write_artifact("task-graph.json", tgg.morphed_task_graph.to_json())
+ write_artifact("label-to-taskid.json", tgg.label_to_taskid)
+
+ # write bugbug scheduling information if it was invoked
+ if len(push_schedules) > 0:
+ write_artifact("bugbug-push-schedules.json", push_schedules.popitem()[1])
+
+ # cache run-task & misc/fetch-content
+ scripts_root_dir = os.path.join(GECKO, "taskcluster/scripts")
+ run_task_file_path = os.path.join(scripts_root_dir, "run-task")
+ fetch_content_file_path = os.path.join(scripts_root_dir, "misc/fetch-content")
+ shutil.copy2(run_task_file_path, ARTIFACTS_DIR)
+ shutil.copy2(fetch_content_file_path, ARTIFACTS_DIR)
+
+ # actually create the graph
+ create_tasks(
+ tgg.graph_config,
+ tgg.morphed_task_graph,
+ tgg.label_to_taskid,
+ tgg.parameters,
+ decision_task_id=decision_task_id,
+ )
+
+
+def get_decision_parameters(graph_config, options):
+ """
+ Load parameters from the command-line options for 'taskgraph decision'.
+ This also applies per-project parameters, based on the given project.
+
+ """
+ product_dir = graph_config["product-dir"]
+
+ parameters = {
+ n: options[n]
+ for n in [
+ "base_repository",
+ "base_ref",
+ "base_rev",
+ "head_repository",
+ "head_rev",
+ "head_ref",
+ "head_tag",
+ "project",
+ "pushlog_id",
+ "pushdate",
+ "owner",
+ "level",
+ "repository_type",
+ "target_tasks_method",
+ "tasks_for",
+ ]
+ if n in options
+ }
+
+ commit_message = get_hg_commit_message(os.path.join(GECKO, product_dir))
+
+ repo_path = os.getcwd()
+ repo = get_repository(repo_path)
+ parameters["base_ref"] = _determine_more_accurate_base_ref(
+ repo,
+ candidate_base_ref=options.get("base_ref"),
+ head_ref=options.get("head_ref"),
+ base_rev=options.get("base_rev"),
+ )
+
+ parameters["base_rev"] = _determine_more_accurate_base_rev(
+ repo,
+ base_ref=parameters["base_ref"],
+ candidate_base_rev=options.get("base_rev"),
+ head_rev=options.get("head_rev"),
+ env_prefix=_get_env_prefix(graph_config),
+ )
+
+ # Define default filter list, as most configurations shouldn't need
+ # custom filters.
+ parameters["filters"] = [
+ "target_tasks_method",
+ ]
+ parameters["enable_always_target"] = ["docker-image"]
+ parameters["existing_tasks"] = {}
+ parameters["do_not_optimize"] = []
+ parameters["build_number"] = 1
+ parameters["version"] = get_version(product_dir)
+ parameters["app_version"] = get_app_version(product_dir)
+ parameters["message"] = try_syntax_from_message(commit_message)
+ parameters["hg_branch"] = get_hg_revision_branch(
+ GECKO, revision=parameters["head_rev"]
+ )
+ parameters["next_version"] = None
+ parameters["optimize_strategies"] = None
+ parameters["optimize_target_tasks"] = True
+ parameters["phabricator_diff"] = None
+ parameters["release_type"] = ""
+ parameters["release_eta"] = ""
+ parameters["release_enable_partner_repack"] = False
+ parameters["release_enable_partner_attribution"] = False
+ parameters["release_partners"] = []
+ parameters["release_partner_config"] = {}
+ parameters["release_partner_build_number"] = 1
+ parameters["release_enable_emefree"] = False
+ parameters["release_product"] = None
+ parameters["required_signoffs"] = []
+ parameters["signoff_urls"] = {}
+ parameters["test_manifest_loader"] = "default"
+ parameters["try_mode"] = None
+ parameters["try_task_config"] = {}
+ parameters["try_options"] = None
+
+ # owner must be an email, but sometimes (e.g., for ffxbld) it is not, in which
+ # case, fake it
+ if "@" not in parameters["owner"]:
+ parameters["owner"] += "@noreply.mozilla.org"
+
+ # use the pushdate as build_date if given, else use current time
+ parameters["build_date"] = parameters["pushdate"] or int(time.time())
+ # moz_build_date is the build identifier based on build_date
+ parameters["moz_build_date"] = time.strftime(
+ "%Y%m%d%H%M%S", time.gmtime(parameters["build_date"])
+ )
+
+ project = parameters["project"]
+ try:
+ parameters.update(PER_PROJECT_PARAMETERS[project])
+ except KeyError:
+ logger.warning(
+ "using default project parameters; add {} to "
+ "PER_PROJECT_PARAMETERS in {} to customize behavior "
+ "for this project".format(project, __file__)
+ )
+ parameters.update(PER_PROJECT_PARAMETERS["default"])
+
+ # `target_tasks_method` has higher precedence than `project` parameters
+ if options.get("target_tasks_method"):
+ parameters["target_tasks_method"] = options["target_tasks_method"]
+
+ # ..but can be overridden by the commit message: if it contains the special
+ # string "DONTBUILD" and this is an on-push decision task, then use the
+ # special 'nothing' target task method.
+ if "DONTBUILD" in commit_message and options["tasks_for"] == "hg-push":
+ parameters["target_tasks_method"] = "nothing"
+
+ if options.get("include_push_tasks"):
+ get_existing_tasks(options.get("rebuild_kinds", []), parameters, graph_config)
+
+ # If the target method is nightly, we should build partials. This means
+ # knowing what has been released previously.
+ # An empty release_history is fine, it just means no partials will be built
+ parameters.setdefault("release_history", dict())
+ if "nightly" in parameters.get("target_tasks_method", ""):
+ parameters["release_history"] = populate_release_history("Firefox", project)
+
+ if options.get("try_task_config_file"):
+ task_config_file = os.path.abspath(options.get("try_task_config_file"))
+ else:
+ # if try_task_config.json is present, load it
+ task_config_file = os.path.join(os.getcwd(), "try_task_config.json")
+
+ # load try settings
+ if "try" in project and options["tasks_for"] == "hg-push":
+ set_try_config(parameters, task_config_file)
+
+ if options.get("optimize_target_tasks") is not None:
+ parameters["optimize_target_tasks"] = options["optimize_target_tasks"]
+
+ # Determine if this should be a backstop push.
+ parameters["backstop"] = is_backstop(parameters)
+
+ if "decision-parameters" in graph_config["taskgraph"]:
+ find_object(graph_config["taskgraph"]["decision-parameters"])(
+ graph_config, parameters
+ )
+
+ result = Parameters(**parameters)
+ result.check()
+ return result
+
+
+def get_existing_tasks(rebuild_kinds, parameters, graph_config):
+ """
+ Find the decision task corresponding to the on-push graph, and return
+ a mapping of labels to task-ids from it. This will skip the kinds specificed
+ by `rebuild_kinds`.
+ """
+ try:
+ decision_task = retry(
+ find_decision_task,
+ args=(parameters, graph_config),
+ attempts=4,
+ sleeptime=5 * 60,
+ )
+ except Exception:
+ logger.exception("Didn't find existing push task.")
+ sys.exit(1)
+ _, task_graph = TaskGraph.from_json(
+ get_artifact(decision_task, "public/full-task-graph.json")
+ )
+ parameters["existing_tasks"] = find_existing_tasks_from_previous_kinds(
+ task_graph, [decision_task], rebuild_kinds
+ )
+
+
+def set_try_config(parameters, task_config_file):
+ if os.path.isfile(task_config_file):
+ logger.info(f"using try tasks from {task_config_file}")
+ with open(task_config_file) as fh:
+ task_config = json.load(fh)
+ task_config_version = task_config.pop("version", 1)
+ if task_config_version == 1:
+ parameters["try_mode"] = "try_task_config"
+ parameters["try_task_config"] = task_config
+ elif task_config_version == 2:
+ parameters.update(task_config["parameters"])
+ parameters["try_mode"] = "try_task_config"
+ else:
+ raise Exception(
+ f"Unknown `try_task_config.json` version: {task_config_version}"
+ )
+
+ if "try:" in parameters["message"]:
+ parameters["try_mode"] = "try_option_syntax"
+ parameters.update(parse_message(parameters["message"]))
+ else:
+ parameters["try_options"] = None
+
+
+def set_decision_indexes(decision_task_id, params, graph_config):
+ index_paths = []
+ if params["backstop"]:
+ index_paths.append(BACKSTOP_INDEX)
+
+ subs = params.copy()
+ subs["trust-domain"] = graph_config["trust-domain"]
+
+ index_paths = [i.format(**subs) for i in index_paths]
+ for index_path in index_paths:
+ insert_index(index_path, decision_task_id, use_proxy=True)
+
+
+def write_artifact(filename, data):
+ logger.info(f"writing artifact file `{filename}`")
+ if not os.path.isdir(ARTIFACTS_DIR):
+ os.mkdir(ARTIFACTS_DIR)
+ path = os.path.join(ARTIFACTS_DIR, filename)
+ if filename.endswith(".yml"):
+ with open(path, "w") as f:
+ yaml.safe_dump(data, f, allow_unicode=True, default_flow_style=False)
+ elif filename.endswith(".json"):
+ with open(path, "w") as f:
+ json.dump(data, f, sort_keys=True, indent=2, separators=(",", ": "))
+ elif filename.endswith(".json.gz"):
+ import gzip
+
+ with gzip.open(path, "wb") as f:
+ f.write(json.dumps(data).encode("utf-8"))
+ else:
+ raise TypeError(f"Don't know how to write to {filename}")
+
+
+def read_artifact(filename):
+ path = os.path.join(ARTIFACTS_DIR, filename)
+ if filename.endswith(".yml"):
+ return load_yaml(path, filename)
+ if filename.endswith(".json"):
+ with open(path) as f:
+ return json.load(f)
+ if filename.endswith(".json.gz"):
+ import gzip
+
+ with gzip.open(path, "rb") as f:
+ return json.load(f.decode("utf-8"))
+ else:
+ raise TypeError(f"Don't know how to read {filename}")
+
+
+def rename_artifact(src, dest):
+ os.rename(os.path.join(ARTIFACTS_DIR, src), os.path.join(ARTIFACTS_DIR, dest))
diff --git a/taskcluster/gecko_taskgraph/docker.py b/taskcluster/gecko_taskgraph/docker.py
new file mode 100644
index 0000000000..efff0a158b
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/docker.py
@@ -0,0 +1,91 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import os
+from io import BytesIO
+
+from taskgraph.docker import load_image_by_task_id
+from taskgraph.generator import load_tasks_for_kind
+from taskgraph.optimize.strategies import IndexSearch
+from taskgraph.parameters import Parameters
+
+from gecko_taskgraph.util import docker
+
+from . import GECKO
+
+
+def get_image_digest(image_name):
+ params = Parameters(
+ level=os.environ.get("MOZ_SCM_LEVEL", "3"),
+ strict=False,
+ )
+ tasks = load_tasks_for_kind(params, "docker-image")
+ task = tasks[f"docker-image-{image_name}"]
+ return task.attributes["cached_task"]["digest"]
+
+
+def load_image_by_name(image_name, tag=None):
+ params = {"level": os.environ.get("MOZ_SCM_LEVEL", "3")}
+ tasks = load_tasks_for_kind(params, "docker-image")
+ task = tasks[f"docker-image-{image_name}"]
+ deadline = None
+ task_id = IndexSearch().should_replace_task(
+ task, {}, deadline, task.optimization.get("index-search", [])
+ )
+
+ if task_id in (True, False):
+ print(
+ "Could not find artifacts for a docker image "
+ "named `{image_name}`. Local commits and other changes "
+ "in your checkout may cause this error. Try "
+ "updating to a fresh checkout of mozilla-central "
+ "to download image.".format(image_name=image_name)
+ )
+ return False
+
+ return load_image_by_task_id(task_id, tag)
+
+
+def build_context(name, outputFile, args=None):
+ """Build a context.tar for image with specified name."""
+ if not name:
+ raise ValueError("must provide a Docker image name")
+ if not outputFile:
+ raise ValueError("must provide a outputFile")
+
+ image_dir = docker.image_path(name)
+ if not os.path.isdir(image_dir):
+ raise Exception("image directory does not exist: %s" % image_dir)
+
+ docker.create_context_tar(GECKO, image_dir, outputFile, image_name=name, args=args)
+
+
+def build_image(name, tag, args=None):
+ """Build a Docker image of specified name.
+
+ Output from image building process will be printed to stdout.
+ """
+ if not name:
+ raise ValueError("must provide a Docker image name")
+
+ image_dir = docker.image_path(name)
+ if not os.path.isdir(image_dir):
+ raise Exception("image directory does not exist: %s" % image_dir)
+
+ tag = tag or docker.docker_image(name, by_tag=True)
+
+ buf = BytesIO()
+ docker.stream_context_tar(GECKO, image_dir, buf, name, args)
+ docker.post_to_docker(buf.getvalue(), "/build", nocache=1, t=tag)
+
+ print(f"Successfully built {name} and tagged with {tag}")
+
+ if tag.endswith(":latest"):
+ print("*" * 50)
+ print("WARNING: no VERSION file found in image directory.")
+ print("Image is not suitable for deploying/pushing.")
+ print("Create an image suitable for deploying/pushing by creating")
+ print("a VERSION file in the image directory.")
+ print("*" * 50)
diff --git a/taskcluster/gecko_taskgraph/files_changed.py b/taskcluster/gecko_taskgraph/files_changed.py
new file mode 100644
index 0000000000..c814df0806
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/files_changed.py
@@ -0,0 +1,95 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Support for optimizing tasks based on the set of files that have changed.
+"""
+
+import logging
+import os
+from subprocess import CalledProcessError
+
+from mozbuild.util import memoize
+from mozpack.path import join as join_path
+from mozpack.path import match as mozpackmatch
+from mozversioncontrol import InvalidRepoPath, get_repository_object
+
+from gecko_taskgraph import GECKO
+from gecko_taskgraph.util.hg import get_json_automationrelevance
+
+logger = logging.getLogger(__name__)
+
+
+@memoize
+def get_changed_files(repository, revision):
+ """
+ Get the set of files changed in the push headed by the given revision.
+ Responses are cached, so multiple calls with the same arguments are OK.
+ """
+ contents = get_json_automationrelevance(repository, revision)
+ try:
+ changesets = contents["changesets"]
+ except KeyError:
+ # We shouldn't hit this error in CI.
+ if os.environ.get("MOZ_AUTOMATION"):
+ raise
+
+ # We're likely on an unpublished commit, grab changed files from
+ # version control.
+ return get_locally_changed_files(GECKO)
+
+ logger.debug("{} commits influencing task scheduling:".format(len(changesets)))
+ changed_files = set()
+ for c in changesets:
+ desc = "" # Support empty desc
+ if c["desc"]:
+ desc = c["desc"].splitlines()[0].encode("ascii", "ignore")
+ logger.debug(" {cset} {desc}".format(cset=c["node"][0:12], desc=desc))
+ changed_files |= set(c["files"])
+
+ return changed_files
+
+
+def check(params, file_patterns):
+ """Determine whether any of the files changed in the indicated push to
+ https://hg.mozilla.org match any of the given file patterns."""
+ repository = params.get("head_repository")
+ revision = params.get("head_rev")
+ if not repository or not revision:
+ logger.warning(
+ "Missing `head_repository` or `head_rev` parameters; "
+ "assuming all files have changed"
+ )
+ return True
+
+ changed_files = get_changed_files(repository, revision)
+
+ if "comm_head_repository" in params:
+ repository = params.get("comm_head_repository")
+ revision = params.get("comm_head_rev")
+ if not revision:
+ logger.warning(
+ "Missing `comm_head_rev` parameters; " "assuming all files have changed"
+ )
+ return True
+
+ changed_files |= {
+ join_path("comm", file) for file in get_changed_files(repository, revision)
+ }
+
+ for pattern in file_patterns:
+ for path in changed_files:
+ if mozpackmatch(path, pattern):
+ return True
+
+ return False
+
+
+@memoize
+def get_locally_changed_files(repo):
+ try:
+ vcs = get_repository_object(repo)
+ return set(vcs.get_outgoing_files("AM"))
+ except (InvalidRepoPath, CalledProcessError):
+ return set()
diff --git a/taskcluster/gecko_taskgraph/loader/__init__.py b/taskcluster/gecko_taskgraph/loader/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/loader/__init__.py
diff --git a/taskcluster/gecko_taskgraph/loader/test.py b/taskcluster/gecko_taskgraph/loader/test.py
new file mode 100644
index 0000000000..c97acecd1a
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/loader/test.py
@@ -0,0 +1,142 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import logging
+
+from taskgraph.util.yaml import load_yaml
+
+from gecko_taskgraph.util.copy_task import copy_task
+
+from .transform import loader as transform_loader
+
+logger = logging.getLogger(__name__)
+
+
+def loader(kind, path, config, params, loaded_tasks):
+ """
+ Generate tasks implementing Gecko tests.
+ """
+
+ builds_by_platform = get_builds_by_platform(
+ dep_kind="build", loaded_tasks=loaded_tasks
+ )
+ signed_builds_by_platform = get_builds_by_platform(
+ dep_kind="build-signing", loaded_tasks=loaded_tasks
+ )
+
+ # get the test platforms for those build tasks
+ test_platforms_cfg = load_yaml(path, "test-platforms.yml")
+ test_platforms = get_test_platforms(
+ test_platforms_cfg, builds_by_platform, signed_builds_by_platform
+ )
+
+ # expand the test sets for each of those platforms
+ test_sets_cfg = load_yaml(path, "test-sets.yml")
+ test_platforms = expand_tests(test_sets_cfg, test_platforms)
+
+ # load the test descriptions
+ tests = transform_loader(kind, path, config, params, loaded_tasks)
+ test_descriptions = {t.pop("name"): t for t in tests}
+
+ # generate all tests for all test platforms
+ for test_platform_name, test_platform in test_platforms.items():
+ for test_name in test_platform["test-names"]:
+ test = copy_task(test_descriptions[test_name])
+ test["build-platform"] = test_platform["build-platform"]
+ test["test-platform"] = test_platform_name
+ test["build-label"] = test_platform["build-label"]
+ if test_platform.get("build-signing-label", None):
+ test["build-signing-label"] = test_platform["build-signing-label"]
+
+ test["build-attributes"] = test_platform["build-attributes"]
+ test["test-name"] = test_name
+ if test_platform.get("shippable"):
+ test.setdefault("attributes", {})["shippable"] = True
+ test["attributes"]["shipping_product"] = test_platform[
+ "shipping_product"
+ ]
+
+ logger.debug(
+ "Generating tasks for test {} on platform {}".format(
+ test_name, test["test-platform"]
+ )
+ )
+ yield test
+
+
+def get_builds_by_platform(dep_kind, loaded_tasks):
+ """Find the build tasks on which tests will depend, keyed by
+ platform/type. Returns a dictionary mapping build platform to task."""
+ builds_by_platform = {}
+ for task in loaded_tasks:
+ if task.kind != dep_kind:
+ continue
+
+ build_platform = task.attributes.get("build_platform")
+ build_type = task.attributes.get("build_type")
+ if not build_platform or not build_type:
+ continue
+ platform = f"{build_platform}/{build_type}"
+ if platform in builds_by_platform:
+ raise Exception("multiple build jobs for " + platform)
+ builds_by_platform[platform] = task
+ return builds_by_platform
+
+
+def get_test_platforms(
+ test_platforms_cfg, builds_by_platform, signed_builds_by_platform={}
+):
+ """Get the test platforms for which test tasks should be generated,
+ based on the available build platforms. Returns a dictionary mapping
+ test platform to {test-set, build-platform, build-label}."""
+ test_platforms = {}
+ for test_platform, cfg in test_platforms_cfg.items():
+ build_platform = cfg["build-platform"]
+ if build_platform not in builds_by_platform:
+ logger.warning(
+ "No build task with platform {}; ignoring test platform {}".format(
+ build_platform, test_platform
+ )
+ )
+ continue
+ test_platforms[test_platform] = {
+ "build-platform": build_platform,
+ "build-label": builds_by_platform[build_platform].label,
+ "build-attributes": builds_by_platform[build_platform].attributes,
+ }
+
+ if builds_by_platform[build_platform].attributes.get("shippable"):
+ test_platforms[test_platform]["shippable"] = builds_by_platform[
+ build_platform
+ ].attributes["shippable"]
+ test_platforms[test_platform]["shipping_product"] = builds_by_platform[
+ build_platform
+ ].attributes["shipping_product"]
+
+ test_platforms[test_platform].update(cfg)
+
+ return test_platforms
+
+
+def expand_tests(test_sets_cfg, test_platforms):
+ """Expand the test sets in `test_platforms` out to sets of test names.
+ Returns a dictionary like `get_test_platforms`, with an additional
+ `test-names` key for each test platform, containing a set of test
+ names."""
+ rv = {}
+ for test_platform, cfg in test_platforms.items():
+ test_sets = cfg["test-sets"]
+ if not set(test_sets) <= set(test_sets_cfg):
+ raise Exception(
+ "Test sets {} for test platform {} are not defined".format(
+ ", ".join(test_sets), test_platform
+ )
+ )
+ test_names = set()
+ for test_set in test_sets:
+ test_names.update(test_sets_cfg[test_set])
+ rv[test_platform] = cfg.copy()
+ rv[test_platform]["test-names"] = test_names
+ return rv
diff --git a/taskcluster/gecko_taskgraph/loader/transform.py b/taskcluster/gecko_taskgraph/loader/transform.py
new file mode 100644
index 0000000000..1e513bcb73
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/loader/transform.py
@@ -0,0 +1,59 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import logging
+
+from taskgraph.util.yaml import load_yaml
+
+from ..util.templates import merge
+
+logger = logging.getLogger(__name__)
+
+
+def loader(kind, path, config, params, loaded_tasks):
+ """
+ Get the input elements that will be transformed into tasks in a generic
+ way. The elements themselves are free-form, and become the input to the
+ first transform.
+
+ By default, this reads jobs from the `jobs` key, or from yaml files
+ named by `jobs-from`. The entities are read from mappings, and the
+ keys to those mappings are added in the `name` key of each entity.
+
+ If there is a `job-defaults` config, then every job is merged with it.
+ This provides a simple way to set default values for all jobs of a kind.
+ The `job-defaults` key can also be specified in a yaml file pointed to by
+ `jobs-from`. In this case it will only apply to tasks defined in the same
+ file.
+
+ Other kind implementations can use a different loader function to
+ produce inputs and hand them to `transform_inputs`.
+ """
+
+ def jobs():
+ defaults = config.get("job-defaults")
+ for name, job in config.get("jobs", {}).items():
+ if defaults:
+ job = merge(defaults, job)
+ job["job-from"] = "kind.yml"
+ yield name, job
+
+ for filename in config.get("jobs-from", []):
+ tasks = load_yaml(path, filename)
+
+ file_defaults = tasks.pop("job-defaults", None)
+ if defaults:
+ file_defaults = merge(defaults, file_defaults or {})
+
+ for name, job in tasks.items():
+ if file_defaults:
+ job = merge(file_defaults, job)
+ job["job-from"] = filename
+ yield name, job
+
+ for name, job in jobs():
+ job["name"] = name
+ logger.debug(f"Generating tasks for {kind} {name}")
+ yield job
diff --git a/taskcluster/gecko_taskgraph/main.py b/taskcluster/gecko_taskgraph/main.py
new file mode 100644
index 0000000000..e9a353f246
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/main.py
@@ -0,0 +1,813 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import atexit
+import json
+import logging
+import os
+import re
+import shutil
+import subprocess
+import sys
+import tempfile
+import traceback
+from collections import namedtuple
+from concurrent.futures import ProcessPoolExecutor, as_completed
+from pathlib import Path
+from typing import Any, List
+
+import appdirs
+import yaml
+
+Command = namedtuple("Command", ["func", "args", "kwargs", "defaults"])
+commands = {}
+
+
+def command(*args, **kwargs):
+ defaults = kwargs.pop("defaults", {})
+
+ def decorator(func):
+ commands[args[0]] = Command(func, args, kwargs, defaults)
+ return func
+
+ return decorator
+
+
+def argument(*args, **kwargs):
+ def decorator(func):
+ if not hasattr(func, "args"):
+ func.args = []
+ func.args.append((args, kwargs))
+ return func
+
+ return decorator
+
+
+def format_taskgraph_labels(taskgraph):
+ return "\n".join(
+ sorted(
+ taskgraph.tasks[index].label for index in taskgraph.graph.visit_postorder()
+ )
+ )
+
+
+def format_taskgraph_json(taskgraph):
+ return json.dumps(
+ taskgraph.to_json(), sort_keys=True, indent=2, separators=(",", ": ")
+ )
+
+
+def format_taskgraph_yaml(taskgraph):
+ from mozbuild.util import ReadOnlyDict
+
+ class TGDumper(yaml.SafeDumper):
+ def ignore_aliases(self, data):
+ return True
+
+ def represent_ro_dict(self, data):
+ return self.represent_dict(dict(data))
+
+ TGDumper.add_representer(ReadOnlyDict, TGDumper.represent_ro_dict)
+
+ return yaml.dump(taskgraph.to_json(), Dumper=TGDumper, default_flow_style=False)
+
+
+def get_filtered_taskgraph(taskgraph, tasksregex, exclude_keys):
+ """
+ Filter all the tasks on basis of a regular expression
+ and returns a new TaskGraph object
+ """
+ from taskgraph.graph import Graph
+ from taskgraph.task import Task
+ from taskgraph.taskgraph import TaskGraph
+
+ if tasksregex:
+ named_links_dict = taskgraph.graph.named_links_dict()
+ filteredtasks = {}
+ filterededges = set()
+ regexprogram = re.compile(tasksregex)
+
+ for key in taskgraph.graph.visit_postorder():
+ task = taskgraph.tasks[key]
+ if regexprogram.match(task.label):
+ filteredtasks[key] = task
+ for depname, dep in named_links_dict[key].items():
+ if regexprogram.match(dep):
+ filterededges.add((key, dep, depname))
+
+ taskgraph = TaskGraph(filteredtasks, Graph(set(filteredtasks), filterededges))
+
+ if exclude_keys:
+ for label, task in taskgraph.tasks.items():
+ task_dict = task.to_json()
+ for key in exclude_keys:
+ obj = task_dict
+ attrs = key.split(".")
+ while attrs[0] in obj:
+ if len(attrs) == 1:
+ del obj[attrs[0]]
+ break
+ obj = obj[attrs[0]]
+ attrs = attrs[1:]
+ taskgraph.tasks[label] = Task.from_json(task_dict)
+
+ return taskgraph
+
+
+FORMAT_METHODS = {
+ "labels": format_taskgraph_labels,
+ "json": format_taskgraph_json,
+ "yaml": format_taskgraph_yaml,
+}
+
+
+def get_taskgraph_generator(root, parameters):
+ """Helper function to make testing a little easier."""
+ from taskgraph.generator import TaskGraphGenerator
+
+ return TaskGraphGenerator(root_dir=root, parameters=parameters)
+
+
+def format_taskgraph(options, parameters, logfile=None):
+ import taskgraph
+ from taskgraph.parameters import parameters_loader
+
+ if logfile:
+ handler = logging.FileHandler(logfile, mode="w")
+ if logging.root.handlers:
+ oldhandler = logging.root.handlers[-1]
+ logging.root.removeHandler(oldhandler)
+ handler.setFormatter(oldhandler.formatter)
+ logging.root.addHandler(handler)
+
+ if options["fast"]:
+ taskgraph.fast = True
+
+ if isinstance(parameters, str):
+ parameters = parameters_loader(
+ parameters,
+ overrides={"target-kinds": options.get("target_kinds")},
+ strict=False,
+ )
+
+ tgg = get_taskgraph_generator(options.get("root"), parameters)
+
+ tg = getattr(tgg, options["graph_attr"])
+ tg = get_filtered_taskgraph(tg, options["tasks_regex"], options["exclude_keys"])
+ format_method = FORMAT_METHODS[options["format"] or "labels"]
+ return format_method(tg)
+
+
+def dump_output(out, path=None, params_spec=None):
+ from taskgraph.parameters import Parameters
+
+ params_name = Parameters.format_spec(params_spec)
+ fh = None
+ if path:
+ # Substitute params name into file path if necessary
+ if params_spec and "{params}" not in path:
+ name, ext = os.path.splitext(path)
+ name += "_{params}"
+ path = name + ext
+
+ path = path.format(params=params_name)
+ fh = open(path, "w")
+ else:
+ print(
+ "Dumping result with parameters from {}:".format(params_name),
+ file=sys.stderr,
+ )
+ print(out + "\n", file=fh)
+
+
+def generate_taskgraph(options, parameters, logdir):
+ from taskgraph.parameters import Parameters
+
+ def logfile(spec):
+ """Determine logfile given a parameters specification."""
+ if logdir is None:
+ return None
+ return os.path.join(
+ logdir,
+ "{}_{}.log".format(options["graph_attr"], Parameters.format_spec(spec)),
+ )
+
+ # Don't bother using futures if there's only one parameter. This can make
+ # tracebacks a little more readable and avoids additional process overhead.
+ if len(parameters) == 1:
+ spec = parameters[0]
+ out = format_taskgraph(options, spec, logfile(spec))
+ dump_output(out, options["output_file"])
+ return
+
+ futures = {}
+ with ProcessPoolExecutor(max_workers=options["max_workers"]) as executor:
+ for spec in parameters:
+ f = executor.submit(format_taskgraph, options, spec, logfile(spec))
+ futures[f] = spec
+
+ for future in as_completed(futures):
+ output_file = options["output_file"]
+ spec = futures[future]
+ e = future.exception()
+ if e:
+ out = "".join(traceback.format_exception(type(e), e, e.__traceback__))
+ if options["diff"]:
+ # Dump to console so we don't accidentally diff the tracebacks.
+ output_file = None
+ else:
+ out = future.result()
+
+ dump_output(
+ out,
+ path=output_file,
+ params_spec=spec if len(parameters) > 1 else None,
+ )
+
+
+@command(
+ "tasks",
+ help="Show all tasks in the taskgraph.",
+ defaults={"graph_attr": "full_task_set"},
+)
+@command(
+ "full", help="Show the full taskgraph.", defaults={"graph_attr": "full_task_graph"}
+)
+@command(
+ "target",
+ help="Show the set of target tasks.",
+ defaults={"graph_attr": "target_task_set"},
+)
+@command(
+ "target-graph",
+ help="Show the target graph.",
+ defaults={"graph_attr": "target_task_graph"},
+)
+@command(
+ "optimized",
+ help="Show the optimized graph.",
+ defaults={"graph_attr": "optimized_task_graph"},
+)
+@command(
+ "morphed",
+ help="Show the morphed graph.",
+ defaults={"graph_attr": "morphed_task_graph"},
+)
+@argument("--root", "-r", help="root of the taskgraph definition relative to topsrcdir")
+@argument("--quiet", "-q", action="store_true", help="suppress all logging output")
+@argument(
+ "--verbose", "-v", action="store_true", help="include debug-level logging output"
+)
+@argument(
+ "--json",
+ "-J",
+ action="store_const",
+ dest="format",
+ const="json",
+ help="Output task graph as a JSON object",
+)
+@argument(
+ "--yaml",
+ "-Y",
+ action="store_const",
+ dest="format",
+ const="yaml",
+ help="Output task graph as a YAML object",
+)
+@argument(
+ "--labels",
+ "-L",
+ action="store_const",
+ dest="format",
+ const="labels",
+ help="Output the label for each task in the task graph (default)",
+)
+@argument(
+ "--parameters",
+ "-p",
+ default=None,
+ action="append",
+ help="Parameters to use for the generation. Can be a path to file (.yml or "
+ ".json; see `taskcluster/docs/parameters.rst`), a directory (containing "
+ "parameters files), a url, of the form `project=mozilla-central` to download "
+ "latest parameters file for the specified project from CI, or of the form "
+ "`task-id=<decision task id>` to download parameters from the specified "
+ "decision task. Can be specified multiple times, in which case multiple "
+ "generations will happen from the same invocation (one per parameters "
+ "specified).",
+)
+@argument(
+ "--no-optimize",
+ dest="optimize",
+ action="store_false",
+ default="true",
+ help="do not remove tasks from the graph that are found in the "
+ "index (a.k.a. optimize the graph)",
+)
+@argument(
+ "-o",
+ "--output-file",
+ default=None,
+ help="file path to store generated output.",
+)
+@argument(
+ "--tasks-regex",
+ "--tasks",
+ default=None,
+ help="only return tasks with labels matching this regular " "expression.",
+)
+@argument(
+ "--exclude-key",
+ default=None,
+ dest="exclude_keys",
+ action="append",
+ help="Exclude the specified key (using dot notation) from the final result. "
+ "This is mainly useful with '--diff' to filter out expected differences.",
+)
+@argument(
+ "-k",
+ "--target-kind",
+ dest="target_kinds",
+ action="append",
+ default=[],
+ help="only return tasks that are of the given kind, or their dependencies.",
+)
+@argument(
+ "-F",
+ "--fast",
+ default=False,
+ action="store_true",
+ help="enable fast task generation for local debugging.",
+)
+@argument(
+ "--diff",
+ const="default",
+ nargs="?",
+ default=None,
+ help="Generate and diff the current taskgraph against another revision. "
+ "Without args the base revision will be used. A revision specifier such as "
+ "the hash or `.~1` (hg) or `HEAD~1` (git) can be used as well.",
+)
+@argument(
+ "-j",
+ "--max-workers",
+ dest="max_workers",
+ default=None,
+ type=int,
+ help="The maximum number of workers to use for parallel operations such as"
+ "when multiple parameters files are passed.",
+)
+def show_taskgraph(options):
+ from mozversioncontrol import get_repository_object as get_repository
+ from taskgraph.parameters import Parameters, parameters_loader
+
+ if options.pop("verbose", False):
+ logging.root.setLevel(logging.DEBUG)
+
+ repo = None
+ cur_ref = None
+ diffdir = None
+ output_file = options["output_file"]
+
+ if options["diff"]:
+ # --root argument is taskgraph's config at <repo>/taskcluster/ci
+ repo_root = os.getcwd()
+ if options["root"]:
+ repo_root = f"{options['root']}/../.."
+ repo = get_repository(repo_root)
+
+ if not repo.working_directory_clean():
+ print(
+ "abort: can't diff taskgraph with dirty working directory",
+ file=sys.stderr,
+ )
+ return 1
+
+ # We want to return the working directory to the current state
+ # as best we can after we're done. In all known cases, using
+ # branch or bookmark (which are both available on the VCS object)
+ # as `branch` is preferable to a specific revision.
+ cur_ref = repo.branch or repo.head_ref[:12]
+
+ diffdir = tempfile.mkdtemp()
+ atexit.register(
+ shutil.rmtree, diffdir
+ ) # make sure the directory gets cleaned up
+ options["output_file"] = os.path.join(
+ diffdir, f"{options['graph_attr']}_{cur_ref}"
+ )
+ print(f"Generating {options['graph_attr']} @ {cur_ref}", file=sys.stderr)
+
+ parameters: List[Any[str, Parameters]] = options.pop("parameters")
+ if not parameters:
+ overrides = {
+ "target-kinds": options.get("target_kinds"),
+ }
+ parameters = [
+ parameters_loader(None, strict=False, overrides=overrides)
+ ] # will use default values
+
+ for param in parameters[:]:
+ if isinstance(param, str) and os.path.isdir(param):
+ parameters.remove(param)
+ parameters.extend(
+ [
+ p.as_posix()
+ for p in Path(param).iterdir()
+ if p.suffix in (".yml", ".json")
+ ]
+ )
+
+ logdir = None
+ if len(parameters) > 1:
+ # Log to separate files for each process instead of stderr to
+ # avoid interleaving.
+ basename = os.path.basename(os.getcwd())
+ logdir = os.path.join(appdirs.user_log_dir("taskgraph"), basename)
+ if not os.path.isdir(logdir):
+ os.makedirs(logdir)
+ else:
+ # Only setup logging if we have a single parameter spec. Otherwise
+ # logging will go to files. This is also used as a hook for Gecko
+ # to setup its `mach` based logging.
+ setup_logging()
+
+ generate_taskgraph(options, parameters, logdir)
+
+ if options["diff"]:
+ assert diffdir is not None
+ assert repo is not None
+
+ # Reload taskgraph modules to pick up changes and clear global state.
+ for mod in sys.modules.copy():
+ if mod != __name__ and mod.split(".", 1)[0].endswith(
+ ("taskgraph", "mozbuild")
+ ):
+ del sys.modules[mod]
+
+ # Ensure gecko_taskgraph is ahead of taskcluster_taskgraph in sys.path.
+ # Without this, we may end up validating some things against the wrong
+ # schema.
+ import gecko_taskgraph # noqa
+
+ if options["diff"] == "default":
+ base_ref = repo.base_ref
+ else:
+ base_ref = options["diff"]
+
+ try:
+ repo.update(base_ref)
+ base_ref = repo.head_ref[:12]
+ options["output_file"] = os.path.join(
+ diffdir, f"{options['graph_attr']}_{base_ref}"
+ )
+ print(f"Generating {options['graph_attr']} @ {base_ref}", file=sys.stderr)
+ generate_taskgraph(options, parameters, logdir)
+ finally:
+ repo.update(cur_ref)
+
+ # Generate diff(s)
+ diffcmd = [
+ "diff",
+ "-U20",
+ "--report-identical-files",
+ f"--label={options['graph_attr']}@{base_ref}",
+ f"--label={options['graph_attr']}@{cur_ref}",
+ ]
+
+ non_fatal_failures = []
+ for spec in parameters:
+ base_path = os.path.join(diffdir, f"{options['graph_attr']}_{base_ref}")
+ cur_path = os.path.join(diffdir, f"{options['graph_attr']}_{cur_ref}")
+
+ params_name = None
+ if len(parameters) > 1:
+ params_name = Parameters.format_spec(spec)
+ base_path += f"_{params_name}"
+ cur_path += f"_{params_name}"
+
+ # If the base or cur files are missing it means that generation
+ # failed. If one of them failed but not the other, the failure is
+ # likely due to the patch making changes to taskgraph in modules
+ # that don't get reloaded (safe to ignore). If both generations
+ # failed, there's likely a real issue.
+ base_missing = not os.path.isfile(base_path)
+ cur_missing = not os.path.isfile(cur_path)
+ if base_missing != cur_missing: # != is equivalent to XOR for booleans
+ non_fatal_failures.append(os.path.basename(base_path))
+ continue
+
+ try:
+ # If the output file(s) are missing, this command will raise
+ # CalledProcessError with a returncode > 1.
+ proc = subprocess.run(
+ diffcmd + [base_path, cur_path],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ universal_newlines=True,
+ check=True,
+ )
+ diff_output = proc.stdout
+ returncode = 0
+ except subprocess.CalledProcessError as e:
+ # returncode 1 simply means diffs were found
+ if e.returncode != 1:
+ print(e.stderr, file=sys.stderr)
+ raise
+ diff_output = e.output
+ returncode = e.returncode
+
+ dump_output(
+ diff_output,
+ # Don't bother saving file if no diffs were found. Log to
+ # console in this case instead.
+ path=None if returncode == 0 else output_file,
+ params_spec=spec if len(parameters) > 1 else None,
+ )
+
+ if non_fatal_failures:
+ failstr = "\n ".join(sorted(non_fatal_failures))
+ print(
+ "WARNING: Diff skipped for the following generation{s} "
+ "due to failures:\n {failstr}".format(
+ s="s" if len(non_fatal_failures) > 1 else "", failstr=failstr
+ ),
+ file=sys.stderr,
+ )
+
+ if options["format"] != "json":
+ print(
+ "If you were expecting differences in task bodies "
+ 'you should pass "-J"\n',
+ file=sys.stderr,
+ )
+
+ if len(parameters) > 1:
+ print("See '{}' for logs".format(logdir), file=sys.stderr)
+
+
+@command("build-image", help="Build a Docker image")
+@argument("image_name", help="Name of the image to build")
+@argument(
+ "-t", "--tag", help="tag that the image should be built as.", metavar="name:tag"
+)
+@argument(
+ "--context-only",
+ help="File name the context tarball should be written to."
+ "with this option it will only build the context.tar.",
+ metavar="context.tar",
+)
+def build_image(args):
+ from gecko_taskgraph.docker import build_context, build_image
+
+ if args["context_only"] is None:
+ build_image(args["image_name"], args["tag"], os.environ)
+ else:
+ build_context(args["image_name"], args["context_only"], os.environ)
+
+
+@command(
+ "load-image",
+ help="Load a pre-built Docker image. Note that you need to "
+ "have docker installed and running for this to work.",
+)
+@argument(
+ "--task-id",
+ help="Load the image at public/image.tar.zst in this task, "
+ "rather than searching the index",
+)
+@argument(
+ "-t",
+ "--tag",
+ help="tag that the image should be loaded as. If not "
+ "image will be loaded with tag from the tarball",
+ metavar="name:tag",
+)
+@argument(
+ "image_name",
+ nargs="?",
+ help="Load the image of this name based on the current "
+ "contents of the tree (as built for mozilla-central "
+ "or mozilla-inbound)",
+)
+def load_image(args):
+ from gecko_taskgraph.docker import load_image_by_name, load_image_by_task_id
+
+ if not args.get("image_name") and not args.get("task_id"):
+ print("Specify either IMAGE-NAME or TASK-ID")
+ sys.exit(1)
+ try:
+ if args["task_id"]:
+ ok = load_image_by_task_id(args["task_id"], args.get("tag"))
+ else:
+ ok = load_image_by_name(args["image_name"], args.get("tag"))
+ if not ok:
+ sys.exit(1)
+ except Exception:
+ traceback.print_exc()
+ sys.exit(1)
+
+
+@command("image-digest", help="Print the digest of a docker image.")
+@argument(
+ "image_name",
+ help="Print the digest of the image of this name based on the current "
+ "contents of the tree.",
+)
+def image_digest(args):
+ from gecko_taskgraph.docker import get_image_digest
+
+ try:
+ digest = get_image_digest(args["image_name"])
+ print(digest)
+ except Exception:
+ traceback.print_exc()
+ sys.exit(1)
+
+
+@command("decision", help="Run the decision task")
+@argument("--root", "-r", help="root of the taskgraph definition relative to topsrcdir")
+@argument(
+ "--message",
+ required=False,
+ help=argparse.SUPPRESS,
+)
+@argument(
+ "--project",
+ required=True,
+ help="Project to use for creating task graph. Example: --project=try",
+)
+@argument("--pushlog-id", dest="pushlog_id", required=True, default="0")
+@argument("--pushdate", dest="pushdate", required=True, type=int, default=0)
+@argument("--owner", required=True, help="email address of who owns this graph")
+@argument("--level", required=True, help="SCM level of this repository")
+@argument(
+ "--target-tasks-method", help="method for selecting the target tasks to generate"
+)
+@argument(
+ "--repository-type",
+ required=True,
+ help='Type of repository, either "hg" or "git"',
+)
+@argument("--base-repository", required=True, help='URL for "base" repository to clone')
+@argument(
+ "--base-ref", default="", help='Reference of the revision in the "base" repository'
+)
+@argument(
+ "--base-rev",
+ default="",
+ help="Taskgraph decides what to do based on the revision range between "
+ "`--base-rev` and `--head-rev`. Value is determined automatically if not provided",
+)
+@argument(
+ "--head-repository",
+ required=True,
+ help='URL for "head" repository to fetch revision from',
+)
+@argument(
+ "--head-ref", required=True, help="Reference (this is same as rev usually for hg)"
+)
+@argument(
+ "--head-rev", required=True, help="Commit revision to use from head repository"
+)
+@argument("--head-tag", help="Tag attached to the revision", default="")
+@argument(
+ "--tasks-for", required=True, help="the tasks_for value used to generate this task"
+)
+@argument("--try-task-config-file", help="path to try task configuration file")
+def decision(options):
+ from gecko_taskgraph.decision import taskgraph_decision
+
+ taskgraph_decision(options)
+
+
+@command("action-callback", description="Run action callback used by action tasks")
+@argument(
+ "--root",
+ "-r",
+ default="taskcluster/ci",
+ help="root of the taskgraph definition relative to topsrcdir",
+)
+def action_callback(options):
+ from gecko_taskgraph.actions import trigger_action_callback
+ from gecko_taskgraph.actions.util import get_parameters
+
+ try:
+ # the target task for this action (or null if it's a group action)
+ task_id = json.loads(os.environ.get("ACTION_TASK_ID", "null"))
+ # the target task group for this action
+ task_group_id = os.environ.get("ACTION_TASK_GROUP_ID", None)
+ input = json.loads(os.environ.get("ACTION_INPUT", "null"))
+ callback = os.environ.get("ACTION_CALLBACK", None)
+ root = options["root"]
+
+ parameters = get_parameters(task_group_id)
+
+ return trigger_action_callback(
+ task_group_id=task_group_id,
+ task_id=task_id,
+ input=input,
+ callback=callback,
+ parameters=parameters,
+ root=root,
+ test=False,
+ )
+ except Exception:
+ traceback.print_exc()
+ sys.exit(1)
+
+
+@command("test-action-callback", description="Run an action callback in a testing mode")
+@argument(
+ "--root",
+ "-r",
+ default="taskcluster/ci",
+ help="root of the taskgraph definition relative to topsrcdir",
+)
+@argument(
+ "--parameters",
+ "-p",
+ default="",
+ help="parameters file (.yml or .json; see " "`taskcluster/docs/parameters.rst`)`",
+)
+@argument("--task-id", default=None, help="TaskId to which the action applies")
+@argument(
+ "--task-group-id", default=None, help="TaskGroupId to which the action applies"
+)
+@argument("--input", default=None, help="Action input (.yml or .json)")
+@argument("callback", default=None, help="Action callback name (Python function name)")
+def test_action_callback(options):
+ import taskgraph.parameters
+ from taskgraph.config import load_graph_config
+ from taskgraph.util import yaml
+
+ import gecko_taskgraph.actions
+
+ def load_data(filename):
+ with open(filename) as f:
+ if filename.endswith(".yml"):
+ return yaml.load_stream(f)
+ if filename.endswith(".json"):
+ return json.load(f)
+ raise Exception(f"unknown filename {filename}")
+
+ try:
+ task_id = options["task_id"]
+
+ if options["input"]:
+ input = load_data(options["input"])
+ else:
+ input = None
+
+ root = options["root"]
+ graph_config = load_graph_config(root)
+ trust_domain = graph_config["trust-domain"]
+ graph_config.register()
+
+ parameters = taskgraph.parameters.load_parameters_file(
+ options["parameters"], strict=False, trust_domain=trust_domain
+ )
+ parameters.check()
+
+ return gecko_taskgraph.actions.trigger_action_callback(
+ task_group_id=options["task_group_id"],
+ task_id=task_id,
+ input=input,
+ callback=options["callback"],
+ parameters=parameters,
+ root=root,
+ test=True,
+ )
+ except Exception:
+ traceback.print_exc()
+ sys.exit(1)
+
+
+def create_parser():
+ parser = argparse.ArgumentParser(description="Interact with taskgraph")
+ subparsers = parser.add_subparsers()
+ for _, (func, args, kwargs, defaults) in commands.items():
+ subparser = subparsers.add_parser(*args, **kwargs)
+ for arg in func.args:
+ subparser.add_argument(*arg[0], **arg[1])
+ subparser.set_defaults(command=func, **defaults)
+ return parser
+
+
+def setup_logging():
+ logging.basicConfig(
+ format="%(asctime)s - %(levelname)s - %(message)s", level=logging.INFO
+ )
+
+
+def main(args=sys.argv[1:]):
+ setup_logging()
+ parser = create_parser()
+ args = parser.parse_args(args)
+ try:
+ args.command(vars(args))
+ except Exception:
+ traceback.print_exc()
+ sys.exit(1)
diff --git a/taskcluster/gecko_taskgraph/manifests/fennec_geckoview.yml b/taskcluster/gecko_taskgraph/manifests/fennec_geckoview.yml
new file mode 100644
index 0000000000..18974d3c19
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/manifests/fennec_geckoview.yml
@@ -0,0 +1,210 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+---
+s3_bucket_paths:
+ - maven2
+default_locales: # Ignored for geckoview
+ - en-US
+tasktype_map: # Map task reference to task type.
+ build: build
+ build-fat-aar: build
+ build-signing: signing
+
+# A default entry, which the mappings below extend and override.
+# Final 'destinations' will be the product of:
+# s3_bucket_paths + destinations + locale_prefix + pretty_name
+default: &default
+ locale_prefix: ''
+ source_path_modifier: maven/org/mozilla/geckoview/${artifact_id}/${major_version}.${minor_version}.${build_date}
+ description: "TO_BE_OVERRIDDEN"
+ destinations: # locale_prefix is appended
+ - org/mozilla/geckoview/${artifact_id}/${major_version}.${minor_version}.${build_date}
+
+mapping:
+ ${artifact_id}-${major_version}.${minor_version}.${build_date}.aar:
+ <<: *default
+ from:
+ - build
+ - build-fat-aar
+ pretty_name: ${artifact_id}-${major_version}.${minor_version}.${build_date}.aar
+ checksums_path: ${artifact_id}-${major_version}.${minor_version}.${build_date}.aar
+ ${artifact_id}-${major_version}.${minor_version}.${build_date}.aar.asc:
+ <<: *default
+ from: ['build-signing']
+ pretty_name: ${artifact_id}-${major_version}.${minor_version}.${build_date}.aar.asc
+ checksums_path: ${artifact_id}-${major_version}.${minor_version}.${build_date}.aar.asc
+ ${artifact_id}-${major_version}.${minor_version}.${build_date}.aar.md5:
+ <<: *default
+ from:
+ - build
+ - build-fat-aar
+ pretty_name: ${artifact_id}-${major_version}.${minor_version}.${build_date}.aar.md5
+ checksums_path: ${artifact_id}-${major_version}.${minor_version}.${build_date}.aar.md5
+ ${artifact_id}-${major_version}.${minor_version}.${build_date}.aar.sha1:
+ <<: *default
+ from:
+ - build
+ - build-fat-aar
+ pretty_name: ${artifact_id}-${major_version}.${minor_version}.${build_date}.aar.sha1
+ checksums_path: ${artifact_id}-${major_version}.${minor_version}.${build_date}.aar.sha1
+ ${artifact_id}-${major_version}.${minor_version}.${build_date}.aar.sha256:
+ <<: *default
+ from:
+ - build
+ - build-fat-aar
+ pretty_name: ${artifact_id}-${major_version}.${minor_version}.${build_date}.aar.sha256
+ checksums_path: ${artifact_id}-${major_version}.${minor_version}.${build_date}.aar.sha256
+ ${artifact_id}-${major_version}.${minor_version}.${build_date}.aar.sha512:
+ <<: *default
+ from:
+ - build
+ - build-fat-aar
+ pretty_name: ${artifact_id}-${major_version}.${minor_version}.${build_date}.aar.sha512
+ checksums_path: ${artifact_id}-${major_version}.${minor_version}.${build_date}.aar.sha512
+ ${artifact_id}-${major_version}.${minor_version}.${build_date}.pom:
+ <<: *default
+ from:
+ - build
+ - build-fat-aar
+ pretty_name: ${artifact_id}-${major_version}.${minor_version}.${build_date}.pom
+ checksums_path: ${artifact_id}-${major_version}.${minor_version}.${build_date}.pom
+ ${artifact_id}-${major_version}.${minor_version}.${build_date}.pom.asc:
+ <<: *default
+ from: ['build-signing']
+ pretty_name: ${artifact_id}-${major_version}.${minor_version}.${build_date}.pom.asc
+ checksums_path: ${artifact_id}-${major_version}.${minor_version}.${build_date}.pom.asc
+ ${artifact_id}-${major_version}.${minor_version}.${build_date}.pom.md5:
+ <<: *default
+ from:
+ - build
+ - build-fat-aar
+ pretty_name: ${artifact_id}-${major_version}.${minor_version}.${build_date}.pom.md5
+ checksums_path: ${artifact_id}-${major_version}.${minor_version}.${build_date}.pom.md5
+ ${artifact_id}-${major_version}.${minor_version}.${build_date}.pom.sha1:
+ <<: *default
+ from:
+ - build
+ - build-fat-aar
+ pretty_name: ${artifact_id}-${major_version}.${minor_version}.${build_date}.pom.sha1
+ checksums_path: ${artifact_id}-${major_version}.${minor_version}.${build_date}.pom.sha1
+ ${artifact_id}-${major_version}.${minor_version}.${build_date}.pom.sha256:
+ <<: *default
+ from:
+ - build
+ - build-fat-aar
+ pretty_name: ${artifact_id}-${major_version}.${minor_version}.${build_date}.pom.sha256
+ checksums_path: ${artifact_id}-${major_version}.${minor_version}.${build_date}.pom.sha256
+ ${artifact_id}-${major_version}.${minor_version}.${build_date}.pom.sha512:
+ <<: *default
+ from:
+ - build
+ - build-fat-aar
+ pretty_name: ${artifact_id}-${major_version}.${minor_version}.${build_date}.pom.sha512
+ checksums_path: ${artifact_id}-${major_version}.${minor_version}.${build_date}.pom.sha512
+ ${artifact_id}-${major_version}.${minor_version}.${build_date}.module:
+ <<: *default
+ from:
+ - build
+ - build-fat-aar
+ pretty_name: ${artifact_id}-${major_version}.${minor_version}.${build_date}.module
+ checksums_path: ${artifact_id}-${major_version}.${minor_version}.${build_date}.module
+ ${artifact_id}-${major_version}.${minor_version}.${build_date}.module.asc:
+ <<: *default
+ from: ['build-signing']
+ pretty_name: ${artifact_id}-${major_version}.${minor_version}.${build_date}.module.asc
+ checksums_path: ${artifact_id}-${major_version}.${minor_version}.${build_date}.module.asc
+ ${artifact_id}-${major_version}.${minor_version}.${build_date}.module.md5:
+ <<: *default
+ from:
+ - build
+ - build-fat-aar
+ pretty_name: ${artifact_id}-${major_version}.${minor_version}.${build_date}.module.md5
+ checksums_path: ${artifact_id}-${major_version}.${minor_version}.${build_date}.module.md5
+ ${artifact_id}-${major_version}.${minor_version}.${build_date}.module.sha1:
+ <<: *default
+ from:
+ - build
+ - build-fat-aar
+ pretty_name: ${artifact_id}-${major_version}.${minor_version}.${build_date}.module.sha1
+ checksums_path: ${artifact_id}-${major_version}.${minor_version}.${build_date}.module.sha1
+ ${artifact_id}-${major_version}.${minor_version}.${build_date}.module.sha256:
+ <<: *default
+ from:
+ - build
+ - build-fat-aar
+ pretty_name: ${artifact_id}-${major_version}.${minor_version}.${build_date}.module.sha256
+ checksums_path: ${artifact_id}-${major_version}.${minor_version}.${build_date}.module.sha256
+ ${artifact_id}-${major_version}.${minor_version}.${build_date}.module.sha512:
+ <<: *default
+ from:
+ - build
+ - build-fat-aar
+ pretty_name: ${artifact_id}-${major_version}.${minor_version}.${build_date}.module.sha512
+ checksums_path: ${artifact_id}-${major_version}.${minor_version}.${build_date}.module.sha512
+ ${artifact_id}-${major_version}.${minor_version}.${build_date}-javadoc.jar:
+ <<: *default
+ from:
+ - build
+ - build-fat-aar
+ pretty_name: ${artifact_id}-${major_version}.${minor_version}.${build_date}-javadoc.jar
+ checksums_path: ${artifact_id}-${major_version}.${minor_version}.${build_date}-javadoc.jar
+ ${artifact_id}-${major_version}.${minor_version}.${build_date}-javadoc.jar.asc:
+ <<: *default
+ from: ['build-signing']
+ pretty_name: ${artifact_id}-${major_version}.${minor_version}.${build_date}-javadoc.jar.asc
+ checksums_path: ${artifact_id}-${major_version}.${minor_version}.${build_date}-javadoc.jar.asc
+ ${artifact_id}-${major_version}.${minor_version}.${build_date}-javadoc.jar.md5:
+ <<: *default
+ from:
+ - build
+ - build-fat-aar
+ pretty_name: ${artifact_id}-${major_version}.${minor_version}.${build_date}-javadoc.jar.md5
+ checksums_path: ${artifact_id}-${major_version}.${minor_version}.${build_date}-javadoc.jar.md5
+ ${artifact_id}-${major_version}.${minor_version}.${build_date}-javadoc.jar.sha1:
+ <<: *default
+ from:
+ - build
+ - build-fat-aar
+ pretty_name: ${artifact_id}-${major_version}.${minor_version}.${build_date}-javadoc.jar.sha1
+ checksums_path: ${artifact_id}-${major_version}.${minor_version}.${build_date}-javadoc.jar.sha1
+ ${artifact_id}-${major_version}.${minor_version}.${build_date}-javadoc.jar.sha256:
+ <<: *default
+ from:
+ - build
+ - build-fat-aar
+ pretty_name: ${artifact_id}-${major_version}.${minor_version}.${build_date}-javadoc.jar.sha256
+ checksums_path: ${artifact_id}-${major_version}.${minor_version}.${build_date}-javadoc.jar.sha256
+ ${artifact_id}-${major_version}.${minor_version}.${build_date}-javadoc.jar.sha512:
+ <<: *default
+ from:
+ - build
+ - build-fat-aar
+ pretty_name: ${artifact_id}-${major_version}.${minor_version}.${build_date}-javadoc.jar.sha512
+ checksums_path: ${artifact_id}-${major_version}.${minor_version}.${build_date}-javadoc.jar.sha512
+ ${artifact_id}-${major_version}.${minor_version}.${build_date}-sources.jar:
+ <<: *default
+ from:
+ - build
+ - build-fat-aar
+ pretty_name: ${artifact_id}-${major_version}.${minor_version}.${build_date}-sources.jar
+ checksums_path: ${artifact_id}-${major_version}.${minor_version}.${build_date}-sources.jar
+ ${artifact_id}-${major_version}.${minor_version}.${build_date}-sources.jar.asc:
+ <<: *default
+ from: ['build-signing']
+ pretty_name: ${artifact_id}-${major_version}.${minor_version}.${build_date}-sources.jar.asc
+ checksums_path: ${artifact_id}-${major_version}.${minor_version}.${build_date}-sources.jar.asc
+ ${artifact_id}-${major_version}.${minor_version}.${build_date}-sources.jar.md5:
+ <<: *default
+ from:
+ - build
+ - build-fat-aar
+ pretty_name: ${artifact_id}-${major_version}.${minor_version}.${build_date}-sources.jar.md5
+ checksums_path: ${artifact_id}-${major_version}.${minor_version}.${build_date}-sources.jar.md5
+ ${artifact_id}-${major_version}.${minor_version}.${build_date}-sources.jar.sha1:
+ <<: *default
+ from:
+ - build
+ - build-fat-aar
+ pretty_name: ${artifact_id}-${major_version}.${minor_version}.${build_date}-sources.jar.sha1
+ checksums_path: ${artifact_id}-${major_version}.${minor_version}.${build_date}-sources.jar.sha1
diff --git a/taskcluster/gecko_taskgraph/manifests/firefox_candidates.yml b/taskcluster/gecko_taskgraph/manifests/firefox_candidates.yml
new file mode 100644
index 0000000000..fb58439509
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/manifests/firefox_candidates.yml
@@ -0,0 +1,433 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+---
+# This file contains exhaustive information about all the release artifacs that
+# are needed within a type of release.
+#
+# Structure
+# --------
+# `s3_bucket_paths` -- prefix to be used per product to correctly access our S3 buckets
+# `default_locales` -- list of locales to be used when composing upstream artifacts or the list of
+# destinations. If given an empty locale, it uses these locales instead.
+# `tasktype_map` -- mapping between task reference and task type, particularly usefule when
+# composing the upstreamArtifacts for scriptworker.
+# `platform_names` -- various platform mappings used in reckoning artifacts or other paths
+# `default` -- a default entry, which the mappings extend and override in such a way that
+# final path full-destinations will be a concatenation of the following:
+# `s3_bucket_paths`, `destinations`, `locale_prefix`, `pretty_name`
+# `from` -- specifies the dependency(ies) from which to expect the particular artifact
+# `all_locales` -- boolean argument to specify whether that particular artifact is to be expected
+# for all locales or just the default one
+# `description` -- brief summary of what that artifact is
+# `locale_prefix` -- prefix to be used in the final destination paths, whether that's for default locale or not
+# `source_path_modifier` -- any parent dir that might be used in between artifact prefix and filename at source location
+# for example `public/build` vs `public/build/ach/`.
+# `destinations` -- final list of directories where to push the artifacts in S3
+# `pretty_name` -- the final name the artifact will have at destination
+# `checksums_path` -- the name to identify one artifact within the checksums file
+# `not_for_platforms` -- filtering option to avoid associating an artifact with a specific platform
+# `only_for_platforms` -- filtering option to exclusively include the association of an artifact for a specific platform
+# `partials_only` -- filtering option to avoid associating an artifact unless this flag is present
+# `update_balrog_manifest`-- flag needed downstream in beetmover jobs to reckon the balrog manifest
+# `from_buildid` -- flag needed downstream in beetmover jobs to reckon the balrog manifest
+
+s3_bucket_paths:
+ by-platform:
+ .*devedition.*:
+ - pub/devedition/candidates
+ default:
+ - pub/firefox/candidates
+default_locales:
+ - en-US
+tasktype_map:
+ build: build
+ signing: signing
+ mar-signing: signing
+ partials-signing: signing
+ repackage: repackage
+ repackage-deb: repackage
+ repackage-deb-l10n: repackage
+ repackage-signing: repackage
+ repackage-signing-msi: repackage
+ repackage-signing-shippable-l10n-msix: signing
+ langpack-copy: scriptworker
+ attribution: build
+ attribution-l10n: build
+platform_names:
+ path_platform:
+ by-platform:
+ linux-shippable: 'linux-i686'
+ linux-devedition: 'linux-i686'
+ linux64-shippable: 'linux-x86_64'
+ linux64-devedition: 'linux-x86_64'
+ linux64-asan-reporter-shippable: 'linux-x86_64-asan-reporter'
+ macosx64-shippable: 'mac'
+ macosx64-devedition: 'mac'
+ win32-shippable: 'win32'
+ win32-devedition: 'win32'
+ win64-shippable: 'win64'
+ win64-devedition: 'win64'
+ win64-aarch64-shippable: 'win64-aarch64'
+ win64-aarch64-devedition: 'win64-aarch64'
+ win64-asan-reporter-shippable: 'win64-asan-reporter'
+ tools_platform:
+ by-platform:
+ linux-shippable: 'linux'
+ linux-devedition: 'linux-devedition'
+ linux64-shippable: 'linux64'
+ linux64-devedition: 'linux64-devedition'
+ linux64-asan-reporter-shippable: 'linux-x86_64-asan-reporter'
+ macosx64-shippable: 'macosx64'
+ macosx64-devedition: 'macosx64-devedition'
+ win32-shippable: 'win32'
+ win32-devedition: 'win32-devedition'
+ win64-shippable: 'win64'
+ win64-devedition: 'win64-devedition'
+ win64-aarch64-shippable: 'win64-aarch64'
+ win64-aarch64-devedition: 'win64-aarch64-devedition'
+ win64-asan-reporter-shippable: 'win64-asan-reporter'
+ filename_platform:
+ by-platform:
+ linux-shippable: 'linux'
+ linux-devedition: 'linux'
+ linux64-shippable: 'linux64'
+ linux64-devedition: 'linux64'
+ linux64-asan-reporter-shippable: 'linux-x86_64-asan-reporter'
+ macosx64-shippable: 'macosx64'
+ macosx64-devedition: 'macosx64'
+ win32-shippable: 'win32'
+ win32-devedition: 'win32'
+ win64-shippable: 'win64'
+ win64-devedition: 'win64'
+ win64-aarch64-shippable: 'win64_aarch64'
+ win64-aarch64-devedition: 'win64_aarch64'
+ win64-asan-reporter-shippable: 'win64-asan-reporter'
+
+default: &default
+ from:
+ - build
+ all_locales: false
+ description: "TO_BE_OVERRIDDEN"
+ locale_prefix: '${locale}/'
+ source_path_modifier:
+ by-locale:
+ default: '${locale}'
+ en-US: ''
+ destinations:
+ - ${version}-candidates/build${build_number}/${path_platform}
+
+mapping:
+ buildhub.json:
+ <<: *default
+ all_locales: false
+ description: "Build related information to be consumed by Buildhub service"
+ pretty_name: buildhub.json
+ checksums_path: ${path_platform}/${locale}/buildhub.json
+ target.common.tests.tar.gz:
+ <<: *default
+ description: "Mixture of reftests, mochitests, UI and others, commonly bundled together in a test suite"
+ pretty_name: firefox-${version}.common.tests.tar.gz
+ checksums_path: ${path_platform}/${locale}/firefox-${version}.common.tests.tar.gz
+ target.cppunittest.tests.tar.gz:
+ <<: *default
+ description: "C++ unittests related in-tree test infrastructure"
+ pretty_name: firefox-${version}.cppunittest.tests.tar.gz
+ checksums_path: ${path_platform}/${locale}/firefox-${version}.cppunittest.tests.tar.gz
+ target.crashreporter-symbols.zip:
+ <<: *default
+ description: "Crashreporter symbols to be consumed by Socorro"
+ pretty_name: firefox-${version}.crashreporter-symbols.zip
+ checksums_path: ${path_platform}/${locale}/firefox-${version}.crashreporter-symbols.zip
+ target.json:
+ <<: *default
+ description: "Various compile and moz_app flags baked together in a json file"
+ pretty_name: firefox-${version}.json
+ checksums_path: ${path_platform}/${locale}/firefox-${version}.json
+ target.mochitest.tests.tar.gz:
+ <<: *default
+ description: "Results for running the mochitest testing framework via Javascript function calls"
+ pretty_name: firefox-${version}.mochitest.tests.tar.gz
+ checksums_path: ${path_platform}/${locale}/firefox-${version}.mochitest.tests.tar.gz
+ target.mozinfo.json:
+ <<: *default
+ description: "Various compile and moz_app flags baked together in a json file"
+ pretty_name: firefox-${version}.mozinfo.json
+ checksums_path: ${path_platform}/${locale}/firefox-${version}.mozinfo.json
+ target.reftest.tests.tar.gz:
+ <<: *default
+ description: "Results for running the reftest testing framework via display of two Web pages comparison"
+ pretty_name: firefox-${version}.reftest.tests.tar.gz
+ checksums_path: ${path_platform}/${locale}/firefox-${version}.reftest.tests.tar.gz
+ target.talos.tests.tar.gz:
+ <<: *default
+ description: "Results for running the talos testing framework to measure performance"
+ pretty_name: firefox-${version}.talos.tests.tar.gz
+ checksums_path: ${path_platform}/${locale}/firefox-${version}.talos.tests.tar.gz
+ target.awsy.tests.tar.gz:
+ <<: *default
+ description: "Results for running the awsy testing framework to track memory usage"
+ pretty_name: firefox-${version}.awsy.tests.tar.gz
+ checksums_path: ${path_platform}/${locale}/firefox-${version}.awsy.tests.tar.gz
+ target.test_packages.json:
+ <<: *default
+ description: "File containing metadata about all other files and testing harnesses specifics"
+ pretty_name: firefox-${version}.test_packages.json
+ checksums_path: ${path_platform}/${locale}/firefox-${version}.test_packages.json
+ target.web-platform.tests.tar.gz:
+ <<: *default
+ description: "Results for running the webplatform testing framework to cover standard Web platform features"
+ pretty_name: firefox-${version}.web-platform.tests.tar.gz
+ checksums_path: ${path_platform}/${locale}/firefox-${version}.web-platform.tests.tar.gz
+ target.xpcshell.tests.tar.gz:
+ <<: *default
+ description: "Results for running the xpcshell testing framework to enable XPConnect console application"
+ pretty_name: firefox-${version}.xpcshell.tests.tar.gz
+ checksums_path: ${path_platform}/${locale}/firefox-${version}.xpcshell.tests.tar.gz
+ target_info.txt:
+ <<: *default
+ description: "File containing the buildID"
+ locale_prefix: ''
+ pretty_name: ${filename_platform}_info.txt
+ checksums_path: ${filename_platform}_info.txt
+ destinations:
+ - ${version}-candidates/build${build_number}
+ mozharness.zip:
+ <<: *default
+ description: "File containing the mozharness set of scripts and configuration used by various automation tools"
+ pretty_name: mozharness.zip
+ checksums_path: ${path_platform}/${locale}/mozharness.zip
+ target.jsshell.zip:
+ <<: *default
+ description: "Set of shells to allow test snippets of Javascript code without needing to reload the page"
+ locale_prefix: ''
+ pretty_name: jsshell-${path_platform}.zip
+ checksums_path: jsshell/jsshell-${path_platform}.zip
+ destinations:
+ - ${version}-candidates/build${build_number}/jsshell
+ target.langpack.xpi:
+ <<: *default
+ all_locales: true
+ description: "Localized repack that grabs a packaged en-US Firefox and repackages it as locale-specific Firefox"
+ locale_prefix: ''
+ from:
+ - langpack-copy
+ - signing
+ only_for_platforms:
+ - linux-shippable
+ - linux64-shippable
+ - linux64-devedition
+ - macosx64-shippable
+ - win32-shippable
+ - win64-shippable
+ pretty_name: ${locale}.xpi
+ checksums_path: ${path_platform}/xpi/${locale}.xpi
+ destinations:
+ - ${version}-candidates/build${build_number}/${path_platform}/xpi
+ target.langpack.deb:
+ <<: *default
+ all_locales: true
+ description: "langpack.xpi repackaged as a .deb"
+ locale_prefix: ''
+ from:
+ - repackage-deb-l10n
+ only_for_platforms:
+ - linux-shippable
+ - linux64-shippable
+ - linux-devedition
+ - linux64-devedition
+ pretty_name: ${locale}.deb
+ checksums_path: ${path_platform}/deb-l10n/${locale}.deb
+ destinations:
+ - ${version}-candidates/build${build_number}/${path_platform}/deb-l10n
+ update_balrog_manifest: false
+ mar:
+ <<: *default
+ description: "Alongside `mbsdiff`, a tool used to generate partials"
+ locale_prefix: ''
+ source_path_modifier: 'host/bin'
+ pretty_name: ${tools_platform}/mar
+ checksums_path: mar-tools/${tools_platform}/mar
+ not_for_platforms:
+ - win32-shippable
+ - win64-shippable
+ - win64-aarch64-shippable
+ - win32-devedition
+ - win64-devedition
+ - win64-aarch64-devedition
+ destinations:
+ - ${version}-candidates/build${build_number}/mar-tools
+ mbsdiff:
+ <<: *default
+ description: "Alongside `mar`, a tool used to generate partials"
+ locale_prefix: ''
+ source_path_modifier: 'host/bin'
+ pretty_name: ${tools_platform}/mbsdiff
+ checksums_path: mar-tools/${tools_platform}/mbsdiff
+ not_for_platforms:
+ - win32-shippable
+ - win64-shippable
+ - win64-aarch64-shippable
+ - win32-devedition
+ - win64-devedition
+ - win64-aarch64-devedition
+ destinations:
+ - ${version}-candidates/build${build_number}/mar-tools
+ target.tar.bz2:
+ <<: *default
+ description: "Main installer for Linux platforms"
+ all_locales: true
+ from:
+ - signing
+ only_for_platforms:
+ - linux-shippable
+ - linux64-shippable
+ - linux-devedition
+ - linux64-devedition
+ pretty_name: firefox-${version}.tar.bz2
+ checksums_path: ${path_platform}/${locale}/firefox-${version}.tar.bz2
+ target.tar.bz2.asc:
+ <<: *default
+ description: "Detached signature for the checksums file"
+ all_locales: true
+ from:
+ - signing
+ only_for_platforms:
+ - linux-shippable
+ - linux64-shippable
+ - linux-devedition
+ - linux64-devedition
+ pretty_name: firefox-${version}.tar.bz2.asc
+ checksums_path: ${path_platform}/${locale}/firefox-${version}.tar.bz2.asc
+ target.pkg:
+ <<: *default
+ description: "Main package installer for Mac OS X platforms"
+ all_locales: true
+ from:
+ - signing
+ only_for_platforms:
+ - macosx64-shippable
+ pretty_name: Firefox ${version}.pkg
+ checksums_path: ${path_platform}/${locale}/Firefox ${version}.pkg
+ target.dmg:
+ <<: *default
+ description: "Main package disk image for Mac OS X platforms"
+ all_locales: true
+ # Attribution-l10n jobs don't have locale in the artifact path
+ source_path_modifier: ""
+ from:
+ - attribution
+ - attribution-l10n
+ only_for_platforms:
+ - macosx64-shippable
+ - macosx64-devedition
+ pretty_name: Firefox ${version}.dmg
+ checksums_path: ${path_platform}/${locale}/Firefox ${version}.dmg
+ target.zip:
+ <<: *default
+ description: "Main package installer for Windows platforms"
+ all_locales: true
+ from:
+ - signing
+ only_for_platforms:
+ - win64-shippable
+ - win32-shippable
+ - win64-aarch64-shippable
+ - win64-devedition
+ - win32-devedition
+ - win64-aarch64-devedition
+ pretty_name: firefox-${version}.zip
+ checksums_path: ${path_platform}/${locale}/firefox-${version}.zip
+ target.installer.exe:
+ <<: *default
+ description: "Main installer for Windows platforms"
+ all_locales: true
+ source_path_modifier: ''
+ from:
+ - attribution
+ - attribution-l10n
+ only_for_platforms:
+ - win64-shippable
+ - win32-shippable
+ - win64-aarch64-shippable
+ - win64-devedition
+ - win32-devedition
+ - win64-aarch64-devedition
+ pretty_name: Firefox Setup ${version}.exe
+ checksums_path: ${path_platform}/${locale}/Firefox Setup ${version}.exe
+ target.stub-installer.exe:
+ <<: *default
+ description: "Stub installer for Win32 platforms"
+ all_locales: true
+ source_path_modifier: ''
+ from:
+ - attribution
+ - attribution-l10n
+ only_for_platforms:
+ - win32-shippable
+ - win32-devedition
+ pretty_name: Firefox Installer.exe
+ checksums_path: ${path_platform}/${locale}/Firefox Installer.exe
+ target.installer.msi:
+ <<: *default
+ description: "Windows installer for MSI platform"
+ all_locales: true
+ from:
+ - repackage-signing-msi
+ only_for_platforms:
+ - win64-shippable
+ - win32-shippable
+ - win64-devedition
+ - win32-devedition
+ pretty_name: Firefox Setup ${version}.msi
+ checksums_path: ${path_platform}/${locale}/Firefox Setup ${version}.msi
+ target.installer.msix:
+ <<: *default
+ description: "Windows MSIX installer"
+ from:
+ - repackage-signing-shippable-l10n-msix
+ only_for_platforms:
+ - win64-shippable
+ - win32-shippable
+ - win64-devedition
+ - win32-devedition
+ locale_prefix: 'multi/'
+ pretty_name: Firefox Setup ${version}.msix
+ checksums_path: ${path_platform}/multi/Firefox Setup ${version}.msix
+ target.complete.mar:
+ <<: *default
+ description: "Complete MAR to serve as updates"
+ all_locales: true
+ from:
+ - mar-signing
+ pretty_name: firefox-${version}.complete.mar
+ checksums_path: update/${path_platform}/${locale}/firefox-${version}.complete.mar
+ update_balrog_manifest: true
+ destinations:
+ - ${version}-candidates/build${build_number}/update/${path_platform}
+ target.deb:
+ <<: *default
+ description: "Firefox as a .deb package"
+ only_for_platforms:
+ - linux-shippable
+ - linux64-shippable
+ - linux-devedition
+ - linux64-devedition
+ pretty_name: firefox-${version}.deb
+ checksums_path: ${path_platform}/${locale}/firefox-${version}.deb
+ from:
+ - repackage-deb
+ update_balrog_manifest: false
+ ${partial}:
+ <<: *default
+ description: "Partials MAR files to serve as updates"
+ all_locales: true
+ from:
+ - partials-signing
+ partials_only: true
+ pretty_name: firefox-${previous_version}-${version}.partial.mar
+ checksums_path: update/${path_platform}/${locale}/firefox-${previous_version}-${version}.partial.mar
+ update_balrog_manifest: true
+ from_buildid: ${from_buildid}
+ destinations:
+ - ${version}-candidates/build${build_number}/update/${path_platform}
diff --git a/taskcluster/gecko_taskgraph/manifests/firefox_candidates_checksums.yml b/taskcluster/gecko_taskgraph/manifests/firefox_candidates_checksums.yml
new file mode 100644
index 0000000000..43ba4cbf15
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/manifests/firefox_candidates_checksums.yml
@@ -0,0 +1,94 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+---
+# This file contains exhaustive information about all the release artifacs that
+# are needed within a type of release.
+#
+# Structure
+# --------
+# `s3_bucket_paths` -- prefix to be used per product to correctly access our S3 buckets
+# `default_locales` -- list of locales to be used when composing upstream artifacts or the list of
+# destinations. If given an empty locale, it uses these locales instead.
+# `tasktype_map` -- mapping between task reference and task type, particularly usefule when
+# composing the upstreamArtifacts for scriptworker.
+# `platform_names` -- various platform mappings used in reckoning artifacts or other paths
+# `default` -- a default entry, which the mappings extend and override in such a way that
+# final path full-destinations will be a concatenation of the following:
+# `s3_bucket_paths`, `destinations`, `locale_prefix`, `pretty_name`
+# `from` -- specifies the dependency(ies) from which to expect the particular artifact
+# `all_locales` -- boolean argument to specify whether that particular artifact is to be expected
+# for all locales or just the default one
+# `description` -- brief summary of what that artifact is
+# `locale_prefix` -- prefix to be used in the final destination paths, whether that's for default locale or not
+# `source_path_modifier` -- any parent dir that might be used in between artifact prefix and filename at source location
+# for example `public/build` vs `public/build/ach/`.
+# `destinations` -- final list of directories where to push the artifacts in S3
+# `pretty_name` -- the final name the artifact will have at destination
+# `checksums_path` -- the name to identify one artifact within the checksums file
+# `not_for_platforms` -- filtering option to avoid associating an artifact with a specific platform
+# `only_for_platforms` -- filtering option to exclusively include the association of an artifact for a specific platform
+# `partials_only` -- filtering option to avoid associating an artifact unless this flag is present
+# `update_balrog_manifest`-- flag needed downstream in beetmover jobs to reckon the balrog manifest
+# `from_buildid` -- flag needed downstream in beetmover jobs to reckon the balrog manifest
+
+s3_bucket_paths:
+ by-platform:
+ .*devedition.*:
+ - pub/devedition/candidates
+ default:
+ - pub/firefox/candidates
+default_locales:
+ - en-US
+tasktype_map:
+ beetmover-repackage: beetmover
+ release-beetmover-signed-langpacks: signing
+platform_names:
+ path_platform:
+ by-platform:
+ linux-shippable: 'linux-i686'
+ linux-devedition: 'linux-i686'
+ linux64-shippable: 'linux-x86_64'
+ linux64-devedition: 'linux-x86_64'
+ linux64-asan-reporter-shippable: 'linux-x86_64-asan-reporter'
+ macosx64-shippable: 'mac'
+ macosx64-devedition: 'mac'
+ win32-shippable: 'win32'
+ win32-devedition: 'win32'
+ win64-shippable: 'win64'
+ win64-devedition: 'win64'
+ win64-aarch64-shippable: 'win64-aarch64'
+ win64-aarch64-devedition: 'win64-aarch64'
+ win64-asan-reporter-shippable: 'win64-asan-reporter'
+ linux: 'linux-i686'
+ linux64: 'linux-x86_64'
+ macosx64: 'mac'
+ win32: 'win32'
+ win64: 'win64'
+
+default: &default
+ from:
+ - beetmover-repackage
+ all_locales: true
+ description: "TO_BE_OVERRIDDEN"
+ locale_prefix: '${locale}/'
+ source_path_modifier: ''
+ destinations:
+ - ${version}-candidates/build${build_number}/beetmover-checksums/${path_platform}
+
+mapping:
+ target.checksums:
+ <<: *default
+ description: "Checksums file containing size, hash, sha algorithm and filename"
+ pretty_name: firefox-${version}.checksums.beet
+ checksums_path: beetmover-checksums/${path_platform}/${locale}/firefox-${version}.checksums.beet
+ target-langpack.checksums:
+ <<: *default
+ description: "Checksums file containing size, hash, sha algorithm and filename for the langpack"
+ locale_prefix: ''
+ from:
+ - release-beetmover-signed-langpacks
+ pretty_name: ${locale}.checksums.beet
+ checksums_path: beetmover-checksums/${path_platform}/xpi/${locale}.checksums.beet
+ destinations:
+ - ${version}-candidates/build${build_number}/beetmover-checksums/${path_platform}/xpi
diff --git a/taskcluster/gecko_taskgraph/manifests/firefox_nightly.yml b/taskcluster/gecko_taskgraph/manifests/firefox_nightly.yml
new file mode 100644
index 0000000000..d413ede3bd
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/manifests/firefox_nightly.yml
@@ -0,0 +1,523 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+---
+# This file contains exhaustive information about all the release artifacs that
+# are needed within a type of release.
+#
+# Structure
+# --------
+# `s3_bucket_paths` -- prefix to be used per product to correctly access our S3 buckets
+# `default_locales` -- list of locales to be used when composing upstream artifacts or the list of
+# destinations. If given an empty locale, it uses these locales instead.
+# `tasktype_map` -- mapping between task reference and task type, particularly usefule when
+# composing the upstreamArtifacts for scriptworker.
+# `platform_names` -- various platform mappings used in reckoning artifacts or other paths
+# `default` -- a default entry, which the mappings extend and override in such a way that
+# final path full-destinations will be a concatenation of the following:
+# `s3_bucket_paths`, `destinations`, `locale_prefix`, `pretty_name`
+# `from` -- specifies the dependency(ies) from which to expect the particular artifact
+# `all_locales` -- boolean argument to specify whether that particular artifact is to be expected
+# for all locales or just the default one
+# `description` -- brief summary of what that artifact is
+# `locale_prefix` -- prefix to be used in the final destination paths, whether that's for default locale or not
+# `source_path_modifier` -- any parent dir that might be used in between artifact prefix and filename at source location
+# for example `public/build` vs `public/build/ach/`.
+# `destinations` -- final list of directories where to push the artifacts in S3
+# `pretty_name` -- the final name the artifact will have at destination
+# `checksums_path` -- the name to identify one artifact within the checksums file
+# `not_for_platforms` -- filtering option to avoid associating an artifact with a specific platform
+# `only_for_platforms` -- filtering option to exclusively include the association of an artifact for a specific platform
+# `partials_only` -- filtering option to avoid associating an artifact unless this flag is present
+# `update_balrog_manifest`-- flag needed downstream in beetmover jobs to reckon the balrog manifest
+# `from_buildid` -- flag needed downstream in beetmover jobs to reckon the balrog manifest
+
+s3_bucket_paths:
+ - pub/firefox/nightly
+default_locales:
+ - en-US
+tasktype_map:
+ build: build
+ signing: signing
+ mar-signing: signing
+ partials-signing: signing
+ repackage: repackage
+ repackage-deb: repackage
+ repackage-deb-l10n: repackage
+ repackage-signing: repackage
+ repackage-signing-msi: repackage
+ repackage-signing-shippable-l10n-msix: signing
+ langpack-copy: signing
+ attribution: build
+ attribution-l10n: build
+platform_names:
+ filename_platform:
+ by-platform:
+ linux-shippable: 'linux-i686'
+ linux-devedition: 'linux-i686'
+ linux64-shippable: 'linux-x86_64'
+ linux64-devedition: 'linux-x86_64'
+ linux64-asan-reporter-shippable: 'linux-x86_64-asan-reporter'
+ macosx64-shippable: 'mac'
+ macosx64-devedition: 'mac'
+ win32-shippable: 'win32'
+ win32-devedition: 'win32'
+ win64-shippable: 'win64'
+ win64-devedition: 'win64'
+ win64-aarch64-shippable: 'win64-aarch64'
+ win64-aarch64-devedition: 'win64-aarch64'
+ win64-asan-reporter-shippable: 'win64-asan-reporter'
+ stage_platform:
+ by-platform:
+ linux-shippable: 'linux'
+ linux-devedition: 'linux'
+ linux64-asan-reporter-shippable: 'linux64-asan-reporter'
+ linux64-shippable: 'linux64'
+ linux64-devedition: 'linux64'
+ macosx64-shippable: 'macosx64'
+ macosx64-devedition: 'macosx64'
+ win32-shippable: 'win32'
+ win32-devedition: 'win32'
+ win64-shippable: 'win64'
+ win64-devedition: 'win64'
+ win64-aarch64-shippable: 'win64-aarch64'
+ win64-aarch64-devedition: 'win64-aarch64'
+ win64-asan-reporter-shippable: 'win64-asan-reporter'
+
+default: &default
+ from:
+ - build
+ all_locales: false
+ description: "TO_BE_OVERRIDDEN"
+ locale_prefix: ''
+ source_path_modifier:
+ by-locale:
+ default: '${locale}'
+ en-US: ''
+ destinations:
+ by-locale:
+ en-US:
+ - ${year}/${month}/${upload_date}-${branch}
+ - latest-${branch}
+ default:
+ - ${year}/${month}/${upload_date}-${branch}-l10n
+ - latest-${branch}-l10n
+
+mapping:
+ buildhub.json:
+ <<: *default
+ description: "Build related information to be consumed by Buildhub service"
+ pretty_name: firefox-${version}.${locale}.${filename_platform}.buildhub.json
+ checksums_path: firefox-${version}.${locale}.${filename_platform}.buildhub.json
+ destinations:
+ by-locale:
+ en-US:
+ - ${year}/${month}/${upload_date}-${branch}
+ - latest-${branch}
+ - latest-${branch}-l10n
+ default:
+ - ${year}/${month}/${upload_date}-${branch}-l10n
+ - latest-${branch}-l10n
+ KEY:
+ <<: *default
+ from:
+ - signing
+ description: "Public GPG Key"
+ pretty_name: KEY
+ checksums_path: KEY
+ only_for_platforms:
+ - linux64-shippable
+ destinations:
+ - ${year}/${month}/${upload_date}-${branch}
+ - latest-${branch}
+ target.common.tests.tar.gz:
+ <<: *default
+ description: "Mixture of reftests, mochitests, UI and others, commonly bundled together in a test suite"
+ pretty_name: firefox-${version}.${locale}.${filename_platform}.common.tests.tar.gz
+ checksums_path: firefox-${version}.${locale}.${filename_platform}.common.tests.tar.gz
+ target.cppunittest.tests.tar.gz:
+ <<: *default
+ description: "C++ unittests related in-tree test infrastructure"
+ pretty_name: firefox-${version}.${locale}.${filename_platform}.cppunittest.tests.tar.gz
+ checksums_path: firefox-${version}.${locale}.${filename_platform}.cppunittest.tests.tar.gz
+ target.crashreporter-symbols.zip:
+ <<: *default
+ description: "Crashreporter symbols to be consumed by Socorro"
+ pretty_name: firefox-${version}.${locale}.${filename_platform}.crashreporter-symbols.zip
+ checksums_path: firefox-${version}.${locale}.${filename_platform}.crashreporter-symbols.zip
+ not_for_platforms:
+ - linux64-asan-reporter-shippable
+ - win64-asan-reporter-shippable
+ target.json:
+ <<: *default
+ description: "Various compile and moz_app flags baked together in a json file"
+ pretty_name: firefox-${version}.${locale}.${filename_platform}.json
+ checksums_path: firefox-${version}.${locale}.${filename_platform}.json
+ target.mochitest.tests.tar.gz:
+ <<: *default
+ description: "Results for running the mochitest testing framework via Javascript function calls"
+ pretty_name: firefox-${version}.${locale}.${filename_platform}.mochitest.tests.tar.gz
+ checksums_path: firefox-${version}.${locale}.${filename_platform}.mochitest.tests.tar.gz
+ target.mozinfo.json:
+ <<: *default
+ description: "Various compile and moz_app flags baked together in a json file"
+ pretty_name: firefox-${version}.${locale}.${filename_platform}.mozinfo.json
+ checksums_path: firefox-${version}.${locale}.${filename_platform}.mozinfo.json
+ target.reftest.tests.tar.gz:
+ <<: *default
+ description: "Results for running the reftest testing framework via display of two Web pages comparison"
+ pretty_name: firefox-${version}.${locale}.${filename_platform}.reftest.tests.tar.gz
+ checksums_path: firefox-${version}.${locale}.${filename_platform}.reftest.tests.tar.gz
+ target.talos.tests.tar.gz:
+ <<: *default
+ description: "Results for running the talos testing framework to measure performance"
+ pretty_name: firefox-${version}.${locale}.${filename_platform}.talos.tests.tar.gz
+ checksums_path: firefox-${version}.${locale}.${filename_platform}.talos.tests.tar.gz
+ target.awsy.tests.tar.gz:
+ <<: *default
+ description: "Results for running the awsy testing framework to track memory usage"
+ pretty_name: firefox-${version}.${locale}.${filename_platform}.awsy.tests.tar.gz
+ checksums_path: firefox-${version}.${locale}.${filename_platform}.awsy.tests.tar.gz
+ target.test_packages.json:
+ <<: *default
+ description: "File containing metadata about all other files and testing harnesses specifics"
+ pretty_name: firefox-${version}.${locale}.${filename_platform}.test_packages.json
+ checksums_path: firefox-${version}.${locale}.${filename_platform}.test_packages.json
+ target.txt:
+ <<: *default
+ description: "File containing buildid and revision"
+ pretty_name: firefox-${version}.${locale}.${filename_platform}.txt
+ checksums_path: firefox-${version}.${locale}.${filename_platform}.txt
+ target.web-platform.tests.tar.gz:
+ <<: *default
+ description: "Results for running the webplatform testing framework to cover standard Web platform features"
+ pretty_name: firefox-${version}.${locale}.${filename_platform}.web-platform.tests.tar.gz
+ checksums_path: firefox-${version}.${locale}.${filename_platform}.web-platform.tests.tar.gz
+ target.xpcshell.tests.tar.gz:
+ <<: *default
+ description: "Results for running the xpcshell testing framework to enable XPConnect console application"
+ pretty_name: firefox-${version}.${locale}.${filename_platform}.xpcshell.tests.tar.gz
+ checksums_path: firefox-${version}.${locale}.${filename_platform}.xpcshell.tests.tar.gz
+ target_info.txt:
+ <<: *default
+ description: "File containing the buildID"
+ pretty_name: firefox-${version}.${locale}.${filename_platform}_info.txt
+ checksums_path: firefox-${version}.${locale}.${filename_platform}_info.txt
+ mozharness.zip:
+ <<: *default
+ description: "File containing the mozharness set of scripts and configuration used by various automation tools"
+ pretty_name: mozharness.zip
+ checksums_path: mozharness.zip
+ target.jsshell.zip:
+ <<: *default
+ description: "Set of shells to allow test snippets of Javascript code without needing to reload the page"
+ pretty_name: jsshell-${filename_platform}.zip
+ checksums_path: jsshell-${filename_platform}.zip
+ not_for_platforms:
+ - linux64-asan-reporter-shippable
+ - win64-asan-reporter-shippable
+ target.langpack.xpi:
+ <<: *default
+ all_locales: true
+ description: "Localized repack that grabs a packaged en-US Firefox and repackages it as locale-specific Firefox"
+ from:
+ - langpack-copy
+ - signing
+ only_for_platforms:
+ - linux-shippable
+ - linux64-shippable
+ - macosx64-shippable
+ - win64-shippable
+ - win32-shippable
+ - win64-shippable
+ - win64-aarch64-shippable
+ - win64-asan-reporter-shippable
+ - linux64-asan-reporter-shippable
+ pretty_name: firefox-${version}.${locale}.langpack.xpi
+ checksums_path: firefox-${version}.${locale}.langpack.xpi
+ destinations:
+ by-locale:
+ en-US:
+ - ${year}/${month}/${upload_date}-${branch}
+ - latest-${branch}
+ default:
+ - ${year}/${month}/${upload_date}-${branch}-l10n/${filename_platform}/xpi
+ - latest-${branch}-l10n/${filename_platform}/xpi
+ target.langpack.deb:
+ <<: *default
+ all_locales: true
+ description: "langpack.xpi repackaged as a .deb"
+ from:
+ - repackage-deb-l10n
+ only_for_platforms:
+ - linux-shippable
+ - linux64-shippable
+ - linux-devedition
+ - linux64-devedition
+ pretty_name: firefox-${version}.${locale}.langpack.deb
+ checksums_path: firefox-${version}.${locale}.langpack.deb
+ destinations:
+ by-locale:
+ en-US:
+ - ${year}/${month}/${upload_date}-${branch}
+ - latest-${branch}
+ default:
+ - ${year}/${month}/${upload_date}-${branch}-l10n/${filename_platform}/deb-l10n
+ - latest-${branch}-l10n/${filename_platform}/deb-l10n
+ mar:
+ <<: *default
+ description: "Alongside `mbsdiff`, a tool used to generate partials"
+ source_path_modifier: 'host/bin'
+ pretty_name: mar
+ checksums_path: mar
+ not_for_platforms:
+ - win32-shippable
+ - win64-shippable
+ - win64-aarch64-shippable
+ - win64-asan-reporter-shippable
+ destinations:
+ - ${year}/${month}/${upload_date}-${branch}/mar-tools/${stage_platform}
+ - latest-${branch}/mar-tools/${stage_platform}
+ mbsdiff:
+ <<: *default
+ description: "Alongside `mar`, a tool used to generate partials"
+ source_path_modifier: 'host/bin'
+ pretty_name: mbsdiff
+ checksums_path: mbsdiff
+ not_for_platforms:
+ - win32-shippable
+ - win64-shippable
+ - win64-aarch64-shippable
+ - win64-asan-reporter-shippable
+ destinations:
+ - ${year}/${month}/${upload_date}-${branch}/mar-tools/${stage_platform}
+ - latest-${branch}/mar-tools/${stage_platform}
+ target.tar.bz2:
+ <<: *default
+ description: "Main installer for Linux platforms"
+ all_locales: true
+ from:
+ - signing
+ only_for_platforms:
+ - linux-shippable
+ - linux64-shippable
+ - linux64-asan-reporter-shippable
+ pretty_name: firefox-${version}.${locale}.${filename_platform}.tar.bz2
+ checksums_path: firefox-${version}.${locale}.${filename_platform}.tar.bz2
+ destinations:
+ by-locale:
+ en-US:
+ - ${year}/${month}/${upload_date}-${branch}
+ - latest-${branch}
+ - latest-${branch}-l10n
+ default:
+ - ${year}/${month}/${upload_date}-${branch}-l10n
+ - latest-${branch}-l10n
+ target.tar.bz2.asc:
+ <<: *default
+ description: "Detached signature for the checksums file"
+ all_locales: true
+ from:
+ - signing
+ only_for_platforms:
+ - linux-shippable
+ - linux64-shippable
+ - linux64-asan-reporter-shippable
+ pretty_name: firefox-${version}.${locale}.${filename_platform}.tar.bz2.asc
+ checksums_path: firefox-${version}.${locale}.${filename_platform}.tar.bz2.asc
+ destinations:
+ by-locale:
+ en-US:
+ - ${year}/${month}/${upload_date}-${branch}
+ - latest-${branch}
+ - latest-${branch}-l10n
+ default:
+ - ${year}/${month}/${upload_date}-${branch}-l10n
+ - latest-${branch}-l10n
+ target.pkg:
+ <<: *default
+ description: "Main package installer for Mac OS X platforms"
+ all_locales: true
+ from:
+ - signing
+ only_for_platforms:
+ - macosx64-shippable
+ pretty_name: firefox-${version}.${locale}.${filename_platform}.pkg
+ checksums_path: firefox-${version}.${locale}.${filename_platform}.pkg
+ destinations:
+ by-locale:
+ en-US:
+ - ${year}/${month}/${upload_date}-${branch}
+ - latest-${branch}
+ - latest-${branch}-l10n
+ default:
+ - ${year}/${month}/${upload_date}-${branch}-l10n
+ - latest-${branch}-l10n
+ target.dmg:
+ <<: *default
+ description: "Main package disk image for Mac OS X platforms"
+ all_locales: true
+ # Attribution-l10n jobs don't have locale in the artifact path
+ source_path_modifier: ""
+ from:
+ - attribution
+ - attribution-l10n
+ only_for_platforms:
+ - macosx64-shippable
+ pretty_name: firefox-${version}.${locale}.${filename_platform}.dmg
+ checksums_path: firefox-${version}.${locale}.${filename_platform}.dmg
+ destinations:
+ by-locale:
+ en-US:
+ - ${year}/${month}/${upload_date}-${branch}
+ - latest-${branch}
+ - latest-${branch}-l10n
+ default:
+ - ${year}/${month}/${upload_date}-${branch}-l10n
+ - latest-${branch}-l10n
+ target.zip:
+ <<: *default
+ description: "Main package installer for Windows platforms"
+ all_locales: true
+ from:
+ - signing
+ only_for_platforms:
+ - win64-shippable
+ - win32-shippable
+ - win64-aarch64-shippable
+ - win64-asan-reporter-shippable
+ pretty_name: firefox-${version}.${locale}.${filename_platform}.zip
+ checksums_path: firefox-${version}.${locale}.${filename_platform}.zip
+ target.installer.exe:
+ <<: *default
+ description: "Main installer for Windows platforms"
+ all_locales: true
+ source_path_modifier: ''
+ from:
+ by-platform:
+ win64-asan-reporter-shippable:
+ - repackage-signing
+ default:
+ - attribution
+ - attribution-l10n
+ only_for_platforms:
+ - win64-shippable
+ - win32-shippable
+ - win64-aarch64-shippable
+ - win64-asan-reporter-shippable
+ pretty_name: firefox-${version}.${locale}.${filename_platform}.installer.exe
+ checksums_path: firefox-${version}.${locale}.${filename_platform}.installer.exe
+ destinations:
+ by-locale:
+ en-US:
+ - ${year}/${month}/${upload_date}-${branch}
+ - latest-${branch}
+ - latest-${branch}-l10n
+ default:
+ - ${year}/${month}/${upload_date}-${branch}-l10n
+ - latest-${branch}-l10n
+ target.stub-installer.exe:
+ <<: *default
+ description: "Stub installer for Win32 platforms"
+ all_locales: true
+ source_path_modifier: ''
+ from:
+ - attribution
+ - attribution-l10n
+ only_for_platforms:
+ - win32-shippable
+ pretty_name: Firefox Installer.${locale}.exe
+ checksums_path: Firefox Installer.${locale}.exe
+ destinations:
+ by-locale:
+ en-US:
+ - ${year}/${month}/${upload_date}-${branch}
+ - latest-${branch}
+ - latest-${branch}-l10n
+ default:
+ - ${year}/${month}/${upload_date}-${branch}-l10n
+ - latest-${branch}-l10n
+ target.installer.msi:
+ <<: *default
+ description: "Windows installer for MSI platform"
+ all_locales: true
+ from:
+ - repackage-signing-msi
+ only_for_platforms:
+ - win64-shippable
+ - win32-shippable
+ pretty_name: firefox-${version}.${locale}.${filename_platform}.installer.msi
+ checksums_path: firefox-${version}.${locale}.${filename_platform}.installer.msi
+ destinations:
+ by-locale:
+ en-US:
+ - ${year}/${month}/${upload_date}-${branch}
+ - latest-${branch}
+ - latest-${branch}-l10n
+ default:
+ - ${year}/${month}/${upload_date}-${branch}-l10n
+ - latest-${branch}-l10n
+ target.installer.msix:
+ <<: *default
+ description: "Windows MSIX installer"
+ all_locales: true
+ from:
+ - repackage-signing-shippable-l10n-msix
+ only_for_platforms:
+ - win64-shippable
+ - win32-shippable
+ pretty_name: firefox-${version}.multi.${filename_platform}.installer.msix
+ checksums_path: firefox-${version}.multi.${filename_platform}.installer.msix
+ destinations:
+ - ${year}/${month}/${upload_date}-${branch}
+ - latest-${branch}
+ - latest-${branch}-l10n
+ target.complete.mar:
+ <<: *default
+ description: "The main installer we ship our products baked within"
+ all_locales: true
+ from:
+ - mar-signing
+ pretty_name: firefox-${version}.${locale}.${filename_platform}.complete.mar
+ checksums_path: firefox-${version}.${locale}.${filename_platform}.complete.mar
+ update_balrog_manifest: true
+ destinations:
+ by-locale:
+ en-US:
+ - ${year}/${month}/${upload_date}-${branch}
+ - latest-${branch}
+ - latest-${branch}-l10n
+ default:
+ - ${year}/${month}/${upload_date}-${branch}-l10n
+ - latest-${branch}-l10n
+ target.deb:
+ <<: *default
+ description: "Firefox as a .deb package"
+ from:
+ - repackage-deb
+ only_for_platforms:
+ - linux-shippable
+ - linux64-shippable
+ pretty_name: firefox-${version}.${locale}.${filename_platform}.deb
+ checksums_path: firefox-${version}.${locale}.${filename_platform}.deb
+ update_balrog_manifest: false
+ destinations:
+ - ${year}/${month}/${upload_date}-${branch}
+ - latest-${branch}
+ ${partial}:
+ <<: *default
+ description: "Partials MAR files to serve as updates"
+ all_locales: true
+ from:
+ - partials-signing
+ partials_only: true
+ pretty_name: firefox-${branch}-${version}-${filename_platform}-${locale}-${from_buildid}-${buildid}.partial.mar
+ checksums_path: firefox-${branch}-${version}-${filename_platform}-${locale}-${from_buildid}-${buildid}.partial.mar
+ update_balrog_manifest: true
+ from_buildid: ${from_buildid}
+ destinations:
+ by-locale:
+ en-US:
+ - partials/${year}/${month}/${upload_date}-${branch}
+ default:
+ - partials/${year}/${month}/${upload_date}-${branch}-l10n
diff --git a/taskcluster/gecko_taskgraph/manifests/firefox_nightly_checksums.yml b/taskcluster/gecko_taskgraph/manifests/firefox_nightly_checksums.yml
new file mode 100644
index 0000000000..f1b81572ab
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/manifests/firefox_nightly_checksums.yml
@@ -0,0 +1,59 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+---
+s3_bucket_paths:
+ by-platform:
+ .*devedition.*:
+ - pub/devedition/nightly
+ default:
+ - pub/firefox/nightly
+default_locales: # if given an empty locale, use these locales
+ - en-US
+tasktype_map: # Map task reference to task type.
+ beetmover-repackage: beetmover
+platform_names:
+ filename_platform:
+ by-platform:
+ linux-shippable: 'linux-i686'
+ linux-devedition: 'linux-i686'
+ linux64-shippable: 'linux-x86_64'
+ linux64-devedition: 'linux-x86_64'
+ linux64-asan-reporter-shippable: 'linux-x86_64-asan-reporter'
+ macosx64-shippable: 'mac'
+ macosx64-devedition: 'mac'
+ win32-shippable: 'win32'
+ win32-devedition: 'win32'
+ win64-shippable: 'win64'
+ win64-devedition: 'win64'
+ win64-aarch64-shippable: 'win64-aarch64'
+ win64-aarch64-devedition: 'win64-aarch64'
+ win64-asan-reporter-shippable: 'win64-asan-reporter'
+
+# A default entry, which the mappings below extend and override.
+# Final 'destinations' will be the product of:
+# s3_bucket_paths + destinations + locale_prefix + pretty_name
+default: &default
+ from:
+ - beetmover-repackage
+ all_locales: true
+ description: "TO_BE_OVERRIDDEN"
+ locale_prefix: ''
+ source_path_modifier: ''
+ destinations: # locale_prefix is appended
+ by-locale:
+ en-US:
+ - ${year}/${month}/${upload_date}-${branch}
+ - latest-${branch}
+ - latest-${branch}-l10n
+ default:
+ - ${year}/${month}/${upload_date}-${branch}-l10n
+ - latest-${branch}-l10n
+
+# Configuration for individual files. Extends 'default', above.
+mapping:
+ target.checksums:
+ <<: *default
+ description: "Checksums file containing size, hash, sha algorithm and filename"
+ pretty_name: firefox-${version}.${locale}.${filename_platform}.checksums
+ checksums_path: firefox-${version}.${locale}.${filename_platform}.checksums
diff --git a/taskcluster/gecko_taskgraph/manifests/release_checksums.yml b/taskcluster/gecko_taskgraph/manifests/release_checksums.yml
new file mode 100644
index 0000000000..c11e339958
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/manifests/release_checksums.yml
@@ -0,0 +1,70 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+---
+s3_bucket_paths:
+ by-platform:
+ devedition-release:
+ - pub/devedition/candidates
+ firefox-release:
+ - pub/firefox/candidates
+default_locales: # if given an empty locale, use these locales
+ - en-US
+tasktype_map: # Map task reference to task type.
+ release-generate-checksums: build
+ release-generate-checksums-signing: signing
+
+# A default entry, which the mappings below extend and override.
+# Final 'destinations' will be the product of:
+# s3_bucket_paths + destinations + locale_prefix + pretty_name
+default: &default
+ from:
+ - release-generate-checksums-signing
+ all_locales: true
+ description: "TO_BE_OVERRIDDEN"
+ locale_prefix: ''
+ source_path_modifier: ''
+ destinations: # locale_prefix is appended
+ - ${version}-candidates/build${build_number}
+
+# Configuration for individual files. Extends 'default', above.
+mapping:
+ SHA256SUMMARY:
+ <<: *default
+ description: "Merkle-tree for the release artifacts with sha 256 hashes"
+ from:
+ - release-generate-checksums
+ pretty_name: SHA256SUMMARY
+ checksums_path: SHA256SUMMARY
+ SHA512SUMMARY:
+ <<: *default
+ description: "Merkle-tree for the release artifacts with sha 512 hashes"
+ from:
+ - release-generate-checksums
+ pretty_name: SHA512SUMMARY
+ checksums_path: SHA512SUMMARY
+ KEY:
+ <<: *default
+ description: "Public side of the key that was used to sign the release artifacts"
+ pretty_name: KEY
+ checksums_path: KEY
+ SHA256SUMS:
+ <<: *default
+ description: "Aggregated checksums with main installers details per platform in sha512 hashes"
+ pretty_name: SHA256SUMS
+ checksums_path: SHA256SUMS
+ SHA256SUMS.asc:
+ <<: *default
+ description: "Detached signature for the checksums file"
+ pretty_name: SHA256SUMS.asc
+ checksums_path: SHA256SUMS.asc
+ SHA512SUMS:
+ <<: *default
+ description: "Aggregated checksums with main installers details per platform in sha256 hashes"
+ pretty_name: SHA512SUMS
+ checksums_path: SHA512SUMS
+ SHA512SUMS.asc:
+ <<: *default
+ description: "Detached signature for the checksums file"
+ pretty_name: SHA512SUMS.asc
+ checksums_path: SHA512SUMS.asc
diff --git a/taskcluster/gecko_taskgraph/manifests/source_checksums.yml b/taskcluster/gecko_taskgraph/manifests/source_checksums.yml
new file mode 100644
index 0000000000..0789bcfa93
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/manifests/source_checksums.yml
@@ -0,0 +1,52 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+---
+s3_bucket_paths:
+ by-platform:
+ devedition-source:
+ - pub/devedition/candidates
+ firefox-source:
+ - pub/firefox/candidates
+default_locales: # if given an empty locale, use these locales
+ - en-US
+tasktype_map: # Map task reference to task type.
+ release-source-checksums-signing: signing
+
+# A default entry, which the mappings below extend and override.
+# Final 'destinations' will be the product of:
+# s3_bucket_paths + destinations + locale_prefix + pretty_name
+default: &default
+ from:
+ - release-source-checksums-signing
+ all_locales: false
+ description: "TO_BE_OVERRIDDEN"
+ locale_prefix: ''
+ source_path_modifier: ''
+ destinations: # locale_prefix is appended
+ - ${version}-candidates/build${build_number}/beetmover-checksums/source
+
+# Configuration for individual files. Extends 'default', above.
+mapping:
+ target-source.checksums:
+ <<: *default
+ description: "Checksums file for the source zip files"
+ pretty_name:
+ by-platform:
+ firefox-source: firefox-${version}.checksums.beet
+ devedition-source: firefox-${version}.checksums.beet
+ checksums_path:
+ by-platform:
+ firefox-source: firefox-${version}.checksums.beet
+ devedition-source: firefox-${version}.checksums.beet
+ target-source.checksums.asc:
+ <<: *default
+ description: "Detached signature for the checksums file"
+ pretty_name:
+ by-platform:
+ firefox-source: firefox-${version}.checksums.asc
+ devedition-source: firefox-${version}.checksums.asc
+ checksums_path:
+ by-platform:
+ firefox-source: firefox-${version}.checksums.asc
+ devedition-source: firefox-${version}.checksums.asc
diff --git a/taskcluster/gecko_taskgraph/manifests/source_files.yml b/taskcluster/gecko_taskgraph/manifests/source_files.yml
new file mode 100644
index 0000000000..0f5f5b5250
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/manifests/source_files.yml
@@ -0,0 +1,52 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+---
+s3_bucket_paths:
+ by-platform:
+ devedition-source:
+ - pub/devedition/candidates
+ firefox-source:
+ - pub/firefox/candidates
+default_locales: # if given an empty locale, use these locales
+ - en-US
+tasktype_map: # Map task reference to task type.
+ release-source-signing: signing
+
+# A default entry, which the mappings below extend and override.
+# Final 'destinations' will be the product of:
+# s3_bucket_paths + destinations + locale_prefix + pretty_name
+default: &default
+ from:
+ - release-source-signing
+ all_locales: false
+ description: "TO_BE_OVERRIDDEN"
+ locale_prefix: ''
+ source_path_modifier: ''
+ destinations: # locale_prefix is appended
+ - ${version}-candidates/build${build_number}/source
+
+# Configuration for individual files. Extends 'default', above.
+mapping:
+ source.tar.xz:
+ <<: *default
+ description: "Source file with the in-tree code archived"
+ pretty_name:
+ by-platform:
+ firefox-source: firefox-${version}.source.tar.xz
+ devedition-source: firefox-${version}.source.tar.xz
+ checksums_path:
+ by-platform:
+ firefox-source: source/firefox-${version}.source.tar.xz
+ devedition-source: source/firefox-${version}.source.tar.xz
+ source.tar.xz.asc:
+ <<: *default
+ description: "Detached signature for the source file"
+ pretty_name:
+ by-platform:
+ firefox-source: firefox-${version}.source.tar.xz.asc
+ devedition-source: firefox-${version}.source.tar.xz.asc
+ checksums_path:
+ by-platform:
+ firefox-source: source/firefox-${version}.source.tar.xz.asc
+ devedition-source: source/firefox-${version}.source.tar.xz.asc
diff --git a/taskcluster/gecko_taskgraph/morph.py b/taskcluster/gecko_taskgraph/morph.py
new file mode 100644
index 0000000000..1d03ddaab6
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/morph.py
@@ -0,0 +1,263 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Graph morphs are modifications to task-graphs that take place *after* the
+optimization phase.
+
+These graph morphs are largely invisible to developers running `./mach`
+locally, so they should be limited to changes that do not modify the meaning of
+the graph.
+"""
+
+# Note that the translation of `{'task-reference': '..'}` and
+# `artifact-reference` are handled in the optimization phase (since
+# optimization involves dealing with taskIds directly). Similarly,
+# `{'relative-datestamp': '..'}` is handled at the last possible moment during
+# task creation.
+
+
+import copy
+import logging
+import os
+import re
+
+from slugid import nice as slugid
+from taskgraph.graph import Graph
+from taskgraph.morph import register_morph
+from taskgraph.task import Task
+from taskgraph.taskgraph import TaskGraph
+
+from .util.workertypes import get_worker_type
+
+here = os.path.abspath(os.path.dirname(__file__))
+logger = logging.getLogger(__name__)
+MAX_ROUTES = 10
+
+
+def amend_taskgraph(taskgraph, label_to_taskid, to_add):
+ """Add the given tasks to the taskgraph, returning a new taskgraph"""
+ new_tasks = taskgraph.tasks.copy()
+ new_edges = set(taskgraph.graph.edges)
+ for task in to_add:
+ new_tasks[task.task_id] = task
+ assert task.label not in label_to_taskid
+ label_to_taskid[task.label] = task.task_id
+ for depname, dep in task.dependencies.items():
+ new_edges.add((task.task_id, dep, depname))
+
+ taskgraph = TaskGraph(new_tasks, Graph(set(new_tasks), new_edges))
+ return taskgraph, label_to_taskid
+
+
+def derive_misc_task(
+ target_task,
+ purpose,
+ image,
+ taskgraph,
+ label_to_taskid,
+ parameters,
+ graph_config,
+ dependencies,
+):
+ """Create the shell of a task that depends on `dependencies` and on the given docker
+ image."""
+ label = f"{purpose}-{target_task.label}"
+
+ # this is why all docker image tasks are included in the target task graph: we
+ # need to find them in label_to_taskid, even if nothing else required them
+ image_taskid = label_to_taskid["docker-image-" + image]
+
+ provisioner_id, worker_type = get_worker_type(
+ graph_config,
+ parameters,
+ "misc",
+ )
+
+ deps = copy.copy(dependencies)
+ deps["docker-image"] = image_taskid
+
+ task_def = {
+ "provisionerId": provisioner_id,
+ "workerType": worker_type,
+ "dependencies": [d for d in deps.values()],
+ "created": {"relative-datestamp": "0 seconds"},
+ "deadline": target_task.task["deadline"],
+ # no point existing past the parent task's deadline
+ "expires": target_task.task["deadline"],
+ "metadata": {
+ "name": label,
+ "description": f"{purpose} for {target_task.description}",
+ "owner": target_task.task["metadata"]["owner"],
+ "source": target_task.task["metadata"]["source"],
+ },
+ "scopes": [],
+ "payload": {
+ "image": {
+ "path": "public/image.tar.zst",
+ "taskId": image_taskid,
+ "type": "task-image",
+ },
+ "features": {"taskclusterProxy": True},
+ "maxRunTime": 600,
+ },
+ }
+
+ if image_taskid not in taskgraph.tasks:
+ # The task above depends on the replaced docker-image not one in
+ # this current graph.
+ del deps["docker-image"]
+
+ task = Task(
+ kind="misc",
+ label=label,
+ attributes={},
+ task=task_def,
+ dependencies=deps,
+ )
+ task.task_id = slugid()
+ return task
+
+
+# these regular expressions capture route prefixes for which we have a star
+# scope, allowing them to be summarized. Each should correspond to a star scope
+# in each Gecko `assume:repo:hg.mozilla.org/...` role.
+SCOPE_SUMMARY_REGEXPS = [
+ re.compile(r"(index:insert-task:docker\.images\.v1\.[^.]*\.).*"),
+ re.compile(r"(index:insert-task:gecko\.v2\.[^.]*\.).*"),
+ re.compile(r"(index:insert-task:comm\.v2\.[^.]*\.).*"),
+]
+
+
+def make_index_task(
+ parent_task,
+ taskgraph,
+ label_to_taskid,
+ parameters,
+ graph_config,
+ index_paths,
+ index_rank,
+ purpose,
+ dependencies,
+):
+ task = derive_misc_task(
+ parent_task,
+ purpose,
+ "index-task",
+ taskgraph,
+ label_to_taskid,
+ parameters,
+ graph_config,
+ dependencies,
+ )
+
+ # we need to "summarize" the scopes, otherwise a particularly
+ # namespace-heavy index task might have more scopes than can fit in a
+ # temporary credential.
+ scopes = set()
+ for path in index_paths:
+ scope = f"index:insert-task:{path}"
+ for summ_re in SCOPE_SUMMARY_REGEXPS:
+ match = summ_re.match(scope)
+ if match:
+ scope = match.group(1) + "*"
+ break
+ scopes.add(scope)
+ task.task["scopes"] = sorted(scopes)
+
+ task.task["payload"]["command"] = ["insert-indexes.js"] + index_paths
+ task.task["payload"]["env"] = {
+ "TARGET_TASKID": parent_task.task_id,
+ "INDEX_RANK": index_rank,
+ }
+ return task
+
+
+@register_morph
+def add_index_tasks(taskgraph, label_to_taskid, parameters, graph_config):
+ """
+ The TaskCluster queue only allows 10 routes on a task, but we have tasks
+ with many more routes, for purposes of indexing. This graph morph adds
+ "index tasks" that depend on such tasks and do the index insertions
+ directly, avoiding the limits on task.routes.
+ """
+ logger.debug("Morphing: adding index tasks")
+
+ # Add indexes for tasks that exceed MAX_ROUTES.
+ added = []
+ for label, task in taskgraph.tasks.items():
+ if len(task.task.get("routes", [])) <= MAX_ROUTES:
+ continue
+ index_paths = [
+ r.split(".", 1)[1] for r in task.task["routes"] if r.startswith("index.")
+ ]
+ task.task["routes"] = [
+ r for r in task.task["routes"] if not r.startswith("index.")
+ ]
+ added.append(
+ make_index_task(
+ task,
+ taskgraph,
+ label_to_taskid,
+ parameters,
+ graph_config,
+ index_paths=index_paths,
+ index_rank=task.task.get("extra", {}).get("index", {}).get("rank", 0),
+ purpose="index-task",
+ dependencies={"parent": task.task_id},
+ )
+ )
+
+ if added:
+ taskgraph, label_to_taskid = amend_taskgraph(taskgraph, label_to_taskid, added)
+ logger.info(f"Added {len(added)} index tasks")
+
+ return taskgraph, label_to_taskid
+
+
+@register_morph
+def add_eager_cache_index_tasks(taskgraph, label_to_taskid, parameters, graph_config):
+ """
+ Some tasks (e.g. cached tasks) we want to exist in the index before they even
+ run/complete. Our current use is to allow us to depend on an unfinished cached
+ task in future pushes. This graph morph adds "eager-index tasks" that depend on
+ the decision task and do the index insertions directly, which does not need to
+ wait on the pointed at task to complete.
+ """
+ logger.debug("Morphing: Adding eager cached index's")
+
+ added = []
+ for label, task in taskgraph.tasks.items():
+ if "eager_indexes" not in task.attributes:
+ continue
+ eager_indexes = task.attributes["eager_indexes"]
+ added.append(
+ make_index_task(
+ task,
+ taskgraph,
+ label_to_taskid,
+ parameters,
+ graph_config,
+ index_paths=eager_indexes,
+ index_rank=0, # Be sure complete tasks get priority
+ purpose="eager-index",
+ dependencies={},
+ )
+ )
+
+ if added:
+ taskgraph, label_to_taskid = amend_taskgraph(taskgraph, label_to_taskid, added)
+ logger.info(f"Added {len(added)} eager index tasks")
+ return taskgraph, label_to_taskid
+
+
+@register_morph
+def add_try_task_duplicates(taskgraph, label_to_taskid, parameters, graph_config):
+ try_config = parameters["try_task_config"]
+ rebuild = try_config.get("rebuild")
+ if rebuild:
+ for task in taskgraph.tasks.values():
+ if task.label in try_config.get("tasks", []):
+ task.attributes["task_duplicates"] = rebuild
+ return taskgraph, label_to_taskid
diff --git a/taskcluster/gecko_taskgraph/optimize/__init__.py b/taskcluster/gecko_taskgraph/optimize/__init__.py
new file mode 100644
index 0000000000..96b067da7b
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/optimize/__init__.py
@@ -0,0 +1,284 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+The objective of optimization is to remove as many tasks from the graph as
+possible, as efficiently as possible, thereby delivering useful results as
+quickly as possible. For example, ideally if only a test script is modified in
+a push, then the resulting graph contains only the corresponding test suite
+task.
+
+See ``taskcluster/docs/optimization.rst`` for more information.
+"""
+
+from taskgraph.optimize.base import Alias, All, Any, Not, register_strategy
+from taskgraph.util.python_path import import_sibling_modules
+
+# Trigger registration in sibling modules.
+import_sibling_modules()
+
+
+def split_bugbug_arg(arg, substrategies):
+ """Split args for bugbug based strategies.
+
+ Many bugbug based optimizations require passing an empty dict by reference
+ to communicate to downstream strategies. This function passes the provided
+ arg to the first (non bugbug) strategies and a shared empty dict to the
+ bugbug strategy and all substrategies after it.
+ """
+ from gecko_taskgraph.optimize.bugbug import BugBugPushSchedules
+
+ index = [
+ i
+ for i, strategy in enumerate(substrategies)
+ if isinstance(strategy, BugBugPushSchedules)
+ ][0]
+
+ return [arg] * index + [{}] * (len(substrategies) - index)
+
+
+# Register composite strategies.
+register_strategy("build", args=("skip-unless-schedules",))(Alias)
+register_strategy("test", args=("skip-unless-schedules",))(Alias)
+register_strategy("test-inclusive", args=("skip-unless-schedules",))(Alias)
+register_strategy("test-verify", args=("skip-unless-schedules",))(Alias)
+register_strategy("upload-symbols", args=("never",))(Alias)
+register_strategy("reprocess-symbols", args=("never",))(Alias)
+
+
+# Strategy overrides used to tweak the default strategies. These are referenced
+# by the `optimize_strategies` parameter.
+
+
+class project:
+ """Strategies that should be applied per-project."""
+
+ autoland = {
+ "test": Any(
+ # This `Any` strategy implements bi-modal behaviour. It allows different
+ # strategies on expanded pushes vs regular pushes.
+ # This first `All` handles "expanded" pushes.
+ All(
+ # There are three substrategies in this `All`, the first two act as barriers
+ # that help determine when to apply the third:
+ # 1. On backstop pushes, `skip-unless-backstop` returns False. Therefore
+ # the overall composite strategy is False and we don't optimize.
+ # 2. On regular pushes, `Not('skip-unless-expanded')` returns False. Therefore
+ # the overall composite strategy is False and we don't optimize.
+ # 3. On expanded pushes, the third strategy will determine whether or
+ # not to optimize each individual task.
+ # The barrier strategies.
+ "skip-unless-backstop",
+ Not("skip-unless-expanded"),
+ # The actual test strategy applied to "expanded" pushes.
+ Any(
+ "skip-unless-schedules",
+ "bugbug-reduced-manifests-fallback-last-10-pushes",
+ "platform-disperse",
+ split_args=split_bugbug_arg,
+ ),
+ ),
+ # This second `All` handles regular (aka not expanded or backstop)
+ # pushes.
+ All(
+ # There are two substrategies in this `All`, the first acts as a barrier
+ # that determines when to apply the second:
+ # 1. On expanded pushes (which includes backstops), `skip-unless-expanded`
+ # returns False. Therefore the overall composite strategy is False and we
+ # don't optimize.
+ # 2. On regular pushes, the second strategy will determine whether or
+ # not to optimize each individual task.
+ # The barrier strategy.
+ "skip-unless-expanded",
+ # The actual test strategy applied to regular pushes.
+ Any(
+ "skip-unless-schedules",
+ "bugbug-reduced-manifests-fallback-low",
+ "platform-disperse",
+ split_args=split_bugbug_arg,
+ ),
+ ),
+ ),
+ "build": All(
+ "skip-unless-expanded",
+ Any(
+ "skip-unless-schedules",
+ "bugbug-reduced-fallback",
+ split_args=split_bugbug_arg,
+ ),
+ ),
+ }
+ """Strategy overrides that apply to autoland."""
+
+
+class experimental:
+ """Experimental strategies either under development or used as benchmarks.
+
+ These run as "shadow-schedulers" on each autoland push (tier 3) and/or can be used
+ with `./mach try auto`. E.g:
+
+ ./mach try auto --strategy relevant_tests
+ """
+
+ bugbug_tasks_medium = {
+ "test": Any(
+ "skip-unless-schedules", "bugbug-tasks-medium", split_args=split_bugbug_arg
+ ),
+ }
+ """Doesn't limit platforms, medium confidence threshold."""
+
+ bugbug_tasks_high = {
+ "test": Any(
+ "skip-unless-schedules", "bugbug-tasks-high", split_args=split_bugbug_arg
+ ),
+ }
+ """Doesn't limit platforms, high confidence threshold."""
+
+ bugbug_debug_disperse = {
+ "test": Any(
+ "skip-unless-schedules",
+ "bugbug-low",
+ "platform-debug",
+ "platform-disperse",
+ split_args=split_bugbug_arg,
+ ),
+ }
+ """Restricts tests to debug platforms."""
+
+ bugbug_disperse_low = {
+ "test": Any(
+ "skip-unless-schedules",
+ "bugbug-low",
+ "platform-disperse",
+ split_args=split_bugbug_arg,
+ ),
+ }
+ """Disperse tests across platforms, low confidence threshold."""
+
+ bugbug_disperse_medium = {
+ "test": Any(
+ "skip-unless-schedules",
+ "bugbug-medium",
+ "platform-disperse",
+ split_args=split_bugbug_arg,
+ ),
+ }
+ """Disperse tests across platforms, medium confidence threshold."""
+
+ bugbug_disperse_reduced_medium = {
+ "test": Any(
+ "skip-unless-schedules",
+ "bugbug-reduced-manifests",
+ "platform-disperse",
+ split_args=split_bugbug_arg,
+ ),
+ }
+ """Disperse tests across platforms, medium confidence threshold with reduced tasks."""
+
+ bugbug_reduced_manifests_config_selection_low = {
+ "test": Any(
+ "skip-unless-schedules",
+ "bugbug-reduced-manifests-config-selection-low",
+ split_args=split_bugbug_arg,
+ ),
+ }
+ """Choose configs selected by bugbug, low confidence threshold with reduced tasks."""
+
+ bugbug_reduced_manifests_config_selection_medium = {
+ "test": Any(
+ "skip-unless-schedules",
+ "bugbug-reduced-manifests-config-selection",
+ split_args=split_bugbug_arg,
+ ),
+ }
+ """Choose configs selected by bugbug, medium confidence threshold with reduced tasks."""
+
+ bugbug_disperse_medium_no_unseen = {
+ "test": Any(
+ "skip-unless-schedules",
+ "bugbug-medium",
+ "platform-disperse-no-unseen",
+ split_args=split_bugbug_arg,
+ ),
+ }
+ """Disperse tests across platforms (no modified for unseen configurations), medium confidence
+ threshold."""
+
+ bugbug_disperse_medium_only_one = {
+ "test": Any(
+ "skip-unless-schedules",
+ "bugbug-medium",
+ "platform-disperse-only-one",
+ split_args=split_bugbug_arg,
+ ),
+ }
+ """Disperse tests across platforms (one platform per group), medium confidence threshold."""
+
+ bugbug_disperse_high = {
+ "test": Any(
+ "skip-unless-schedules",
+ "bugbug-high",
+ "platform-disperse",
+ split_args=split_bugbug_arg,
+ ),
+ }
+ """Disperse tests across platforms, high confidence threshold."""
+
+ bugbug_reduced = {
+ "test": Any(
+ "skip-unless-schedules", "bugbug-reduced", split_args=split_bugbug_arg
+ ),
+ }
+ """Use the reduced set of tasks (and no groups) chosen by bugbug."""
+
+ bugbug_reduced_high = {
+ "test": Any(
+ "skip-unless-schedules", "bugbug-reduced-high", split_args=split_bugbug_arg
+ ),
+ }
+ """Use the reduced set of tasks (and no groups) chosen by bugbug, high
+ confidence threshold."""
+
+ relevant_tests = {
+ "test": Any("skip-unless-schedules", "skip-unless-has-relevant-tests"),
+ }
+ """Runs task containing tests in the same directories as modified files."""
+
+
+class ExperimentalOverride:
+ """Overrides dictionaries that are stored in a container with new values.
+
+ This can be used to modify all strategies in a collection the same way,
+ presumably with strategies affecting kinds of tasks tangential to the
+ current context.
+
+ Args:
+ base (object): A container class supporting attribute access.
+ overrides (dict): Values to update any accessed dictionaries with.
+ """
+
+ def __init__(self, base, overrides):
+ self.base = base
+ self.overrides = overrides
+
+ def __getattr__(self, name):
+ val = getattr(self.base, name).copy()
+ for name, strategy in self.overrides.items():
+ if isinstance(strategy, str) and strategy.startswith("base:"):
+ strategy = val[strategy[len("base:") :]]
+
+ val[name] = strategy
+ return val
+
+
+tryselect = ExperimentalOverride(
+ experimental,
+ {
+ "build": Any(
+ "skip-unless-schedules", "bugbug-reduced", split_args=split_bugbug_arg
+ ),
+ "test-verify": "base:test",
+ "upload-symbols": Alias("always"),
+ "reprocess-symbols": Alias("always"),
+ },
+)
diff --git a/taskcluster/gecko_taskgraph/optimize/backstop.py b/taskcluster/gecko_taskgraph/optimize/backstop.py
new file mode 100644
index 0000000000..7b0c86222b
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/optimize/backstop.py
@@ -0,0 +1,47 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+from taskgraph.optimize.base import All, OptimizationStrategy, register_strategy
+
+from gecko_taskgraph.util.backstop import BACKSTOP_PUSH_INTERVAL
+
+
+@register_strategy("skip-unless-backstop")
+class SkipUnlessBackstop(OptimizationStrategy):
+ """Always removes tasks except on backstop pushes."""
+
+ def should_remove_task(self, task, params, _):
+ return not params["backstop"]
+
+
+class SkipUnlessPushInterval(OptimizationStrategy):
+ """Always removes tasks except every N pushes.
+
+ Args:
+ push_interval (int): Number of pushes
+ """
+
+ def __init__(self, push_interval, remove_on_projects=None):
+ self.push_interval = push_interval
+
+ @property
+ def description(self):
+ return f"skip-unless-push-interval-{self.push_interval}"
+
+ def should_remove_task(self, task, params, _):
+ # On every Nth push, want to run all tasks.
+ return int(params["pushlog_id"]) % self.push_interval != 0
+
+
+# Strategy to run tasks on "expanded" pushes, currently defined as pushes that
+# are half the backstop interval. The 'All' composite strategy means that the
+# "backstop" strategy will prevent "expanded" from applying on backstop pushes.
+register_strategy(
+ "skip-unless-expanded",
+ args=(
+ "skip-unless-backstop",
+ SkipUnlessPushInterval(BACKSTOP_PUSH_INTERVAL / 2),
+ ),
+)(All)
diff --git a/taskcluster/gecko_taskgraph/optimize/bugbug.py b/taskcluster/gecko_taskgraph/optimize/bugbug.py
new file mode 100644
index 0000000000..d8603560ef
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/optimize/bugbug.py
@@ -0,0 +1,321 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+from collections import defaultdict
+from fnmatch import fnmatch
+
+from taskgraph.optimize.base import OptimizationStrategy, register_strategy, registry
+
+from gecko_taskgraph.util.bugbug import (
+ CT_HIGH,
+ CT_LOW,
+ CT_MEDIUM,
+ BugbugTimeoutException,
+ push_schedules,
+)
+from gecko_taskgraph.util.hg import get_push_data
+
+FALLBACK = "skip-unless-has-relevant-tests"
+
+
+def merge_bugbug_replies(data, new_data):
+ """Merge a bugbug reply (stored in the `new_data` argument) into another (stored
+ in the `data` argument).
+ """
+ for key, value in new_data.items():
+ if isinstance(value, dict):
+ if key not in data:
+ data[key] = {}
+
+ if len(value) == 0:
+ continue
+
+ dict_value = next(iter(value.values()))
+ if isinstance(dict_value, list):
+ for name, configs in value.items():
+ if name not in data[key]:
+ data[key][name] = set()
+
+ data[key][name].update(configs)
+ else:
+ for name, confidence in value.items():
+ if name not in data[key] or data[key][name] < confidence:
+ data[key][name] = confidence
+ elif isinstance(value, list):
+ if key not in data:
+ data[key] = set()
+
+ data[key].update(value)
+
+
+@register_strategy("bugbug-low", args=(CT_LOW,))
+@register_strategy("bugbug-medium", args=(CT_MEDIUM,))
+@register_strategy("bugbug-high", args=(CT_HIGH,))
+@register_strategy("bugbug-tasks-medium", args=(CT_MEDIUM, True))
+@register_strategy("bugbug-tasks-high", args=(CT_HIGH, True))
+@register_strategy("bugbug-reduced", args=(CT_MEDIUM, True, True))
+@register_strategy("bugbug-reduced-fallback", args=(CT_MEDIUM, True, True, FALLBACK))
+@register_strategy("bugbug-reduced-high", args=(CT_HIGH, True, True))
+@register_strategy("bugbug-reduced-manifests", args=(CT_MEDIUM, False, True))
+@register_strategy(
+ "bugbug-reduced-manifests-config-selection-low",
+ args=(CT_LOW, False, True, None, 1, True),
+)
+@register_strategy(
+ "bugbug-reduced-manifests-config-selection",
+ args=(CT_MEDIUM, False, True, None, 1, True),
+)
+@register_strategy(
+ "bugbug-reduced-manifests-fallback-low", args=(CT_LOW, False, True, FALLBACK)
+)
+@register_strategy(
+ "bugbug-reduced-manifests-fallback", args=(CT_MEDIUM, False, True, FALLBACK)
+)
+@register_strategy(
+ "bugbug-reduced-manifests-fallback-last-10-pushes",
+ args=(0.3, False, True, FALLBACK, 10),
+)
+class BugBugPushSchedules(OptimizationStrategy):
+ """Query the 'bugbug' service to retrieve relevant tasks and manifests.
+
+ Args:
+ confidence_threshold (float): The minimum confidence threshold (in
+ range [0, 1]) needed for a task to be scheduled.
+ tasks_only (bool): Whether or not to only use tasks and no groups
+ (default: False)
+ use_reduced_tasks (bool): Whether or not to use the reduced set of tasks
+ provided by the bugbug service (default: False).
+ fallback (str): The fallback strategy to use if there
+ was a failure in bugbug (default: None)
+ num_pushes (int): The number of pushes to consider for the selection
+ (default: 1).
+ select_configs (bool): Whether to select configurations for manifests
+ too (default: False).
+ """
+
+ def __init__(
+ self,
+ confidence_threshold,
+ tasks_only=False,
+ use_reduced_tasks=False,
+ fallback=None,
+ num_pushes=1,
+ select_configs=False,
+ ):
+ self.confidence_threshold = confidence_threshold
+ self.use_reduced_tasks = use_reduced_tasks
+ self.fallback = fallback
+ self.tasks_only = tasks_only
+ self.num_pushes = num_pushes
+ self.select_configs = select_configs
+ self.timedout = False
+
+ def should_remove_task(self, task, params, importance):
+ project = params["project"]
+
+ if project not in ("autoland", "try"):
+ return False
+
+ current_push_id = int(params["pushlog_id"])
+
+ rev = params["head_rev"]
+
+ if self.timedout:
+ return registry[self.fallback].should_remove_task(task, params, importance)
+
+ data = {}
+
+ start_push_id = current_push_id - self.num_pushes + 1
+ if self.num_pushes != 1:
+ push_data = get_push_data(
+ params["head_repository"], project, start_push_id, current_push_id - 1
+ )
+
+ for push_id in range(start_push_id, current_push_id + 1):
+ if push_id == current_push_id:
+ rev = params["head_rev"]
+ else:
+ rev = push_data[push_id]["changesets"][-1]
+
+ try:
+ new_data = push_schedules(params["project"], rev)
+ merge_bugbug_replies(data, new_data)
+ except BugbugTimeoutException:
+ if not self.fallback:
+ raise
+
+ self.timedout = True
+ return self.should_remove_task(task, params, importance)
+
+ key = "reduced_tasks" if self.use_reduced_tasks else "tasks"
+ tasks = {
+ task
+ for task, confidence in data.get(key, {}).items()
+ if confidence >= self.confidence_threshold
+ }
+
+ test_manifests = task.attributes.get("test_manifests")
+ if test_manifests is None or self.tasks_only:
+ if data.get("known_tasks") and task.label not in data["known_tasks"]:
+ return False
+
+ if task.label not in tasks:
+ return True
+
+ return False
+
+ # If a task contains more than one group, use the max confidence.
+ groups = data.get("groups", {})
+ confidences = [c for g, c in groups.items() if g in test_manifests]
+ if not confidences or max(confidences) < self.confidence_threshold:
+ return True
+
+ # If the task configuration doesn't match the ones selected by bugbug for
+ # the manifests, optimize out.
+ if self.select_configs:
+ selected_groups = [
+ g
+ for g, c in groups.items()
+ if g in test_manifests and c > self.confidence_threshold
+ ]
+
+ config_groups = data.get("config_groups", defaultdict(list))
+
+ # Configurations returned by bugbug are in a format such as
+ # `test-windows10-64/opt-*-e10s`, while task labels are like
+ # test-windows10-64-qr/opt-mochitest-browser-chrome-e10s-6.
+ # In order to match the strings, we need to ignore the chunk number
+ # from the task label.
+ parts = task.label.split("-")
+ label_without_chunk_number = "-".join(
+ parts[:-1] if parts[-1].isdigit() else parts
+ )
+
+ if not any(
+ fnmatch(label_without_chunk_number, config)
+ for group in selected_groups
+ for config in config_groups[group]
+ ):
+ return True
+
+ # Store group importance so future optimizers can access it.
+ for manifest in test_manifests:
+ if manifest not in groups:
+ continue
+
+ confidence = groups[manifest]
+ if confidence >= CT_HIGH:
+ importance[manifest] = "high"
+ elif confidence >= CT_MEDIUM:
+ importance[manifest] = "medium"
+ elif confidence >= CT_LOW:
+ importance[manifest] = "low"
+ else:
+ importance[manifest] = "lowest"
+
+ return False
+
+
+@register_strategy("platform-debug")
+class SkipUnlessDebug(OptimizationStrategy):
+ """Only run debug platforms."""
+
+ def should_remove_task(self, task, params, arg):
+ return (
+ "build_type" in task.attributes and task.attributes["build_type"] != "debug"
+ )
+
+
+@register_strategy("platform-disperse")
+@register_strategy("platform-disperse-no-unseen", args=(None, 0))
+@register_strategy(
+ "platform-disperse-only-one",
+ args=(
+ {
+ "high": 1,
+ "medium": 1,
+ "low": 1,
+ "lowest": 0,
+ },
+ 0,
+ ),
+)
+class DisperseGroups(OptimizationStrategy):
+ """Disperse groups across test configs.
+
+ Each task has an associated 'importance' dict passed in via the arg. This
+ is of the form `{<group>: <importance>}`.
+
+ Where 'group' is a test group id (usually a path to a manifest), and 'importance' is
+ one of `{'lowest', 'low', 'medium', 'high'}`.
+
+ Each importance value has an associated 'count' as defined in
+ `self.target_counts`. It guarantees that 'manifest' will run in at least
+ 'count' different configurations (assuming there are enough tasks
+ containing 'manifest').
+
+ On configurations that haven't been seen before, we'll increase the target
+ count by `self.unseen_modifier` to increase the likelihood of scheduling a
+ task on that configuration.
+
+ Args:
+ target_counts (dict): Override DEFAULT_TARGET_COUNTS with custom counts. This
+ is a dict mapping the importance value ('lowest', 'low', etc) to the
+ minimum number of configurations manifests with this value should run
+ on.
+
+ unseen_modifier (int): Override DEFAULT_UNSEEN_MODIFIER to a custom
+ value. This is the amount we'll increase 'target_count' by for unseen
+ configurations.
+ """
+
+ DEFAULT_TARGET_COUNTS = {
+ "high": 3,
+ "medium": 2,
+ "low": 1,
+ "lowest": 0,
+ }
+ DEFAULT_UNSEEN_MODIFIER = 1
+
+ def __init__(self, target_counts=None, unseen_modifier=DEFAULT_UNSEEN_MODIFIER):
+ self.target_counts = self.DEFAULT_TARGET_COUNTS.copy()
+ if target_counts:
+ self.target_counts.update(target_counts)
+ self.unseen_modifier = unseen_modifier
+
+ self.count = defaultdict(int)
+ self.seen_configurations = set()
+
+ def should_remove_task(self, task, params, importance):
+ test_manifests = task.attributes.get("test_manifests")
+ test_platform = task.attributes.get("test_platform")
+
+ if not importance or not test_manifests or not test_platform:
+ return False
+
+ # Build the test configuration key.
+ key = test_platform
+ if "unittest_variant" in task.attributes:
+ key += "-" + task.attributes["unittest_variant"]
+
+ important_manifests = set(test_manifests) & set(importance)
+ for manifest in important_manifests:
+ target_count = self.target_counts[importance[manifest]]
+
+ # If this configuration hasn't been seen before, increase the
+ # likelihood of scheduling the task.
+ if key not in self.seen_configurations:
+ target_count += self.unseen_modifier
+
+ if self.count[manifest] < target_count:
+ # Update manifest counts and seen configurations.
+ self.seen_configurations.add(key)
+ for manifest in important_manifests:
+ self.count[manifest] += 1
+ return False
+
+ # Should remove task because all manifests have reached their
+ # importance count (or there were no important manifests).
+ return True
diff --git a/taskcluster/gecko_taskgraph/optimize/schema.py b/taskcluster/gecko_taskgraph/optimize/schema.py
new file mode 100644
index 0000000000..a7f878cf60
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/optimize/schema.py
@@ -0,0 +1,60 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import logging
+
+import voluptuous
+from mozbuild import schedules
+
+logger = logging.getLogger(__name__)
+
+
+default_optimizations = (
+ # always run this task (default)
+ None,
+ # always optimize this task
+ {"always": None},
+ # optimize strategy aliases for build kind
+ {"build": list(schedules.ALL_COMPONENTS)},
+ # search the index for the given index namespaces, and replace this task if found
+ # the search occurs in order, with the first match winning
+ {"index-search": [str]},
+ # never optimize this task
+ {"never": None},
+ # skip the task except for every Nth push
+ {"skip-unless-expanded": None},
+ {"skip-unless-backstop": None},
+ # skip this task if none of the given file patterns match
+ {"skip-unless-changed": [str]},
+ # skip this task if unless the change files' SCHEDULES contains any of these components
+ {"skip-unless-schedules": list(schedules.ALL_COMPONENTS)},
+ # optimize strategy aliases for the test kind
+ {"test": list(schedules.ALL_COMPONENTS)},
+ {"test-inclusive": list(schedules.ALL_COMPONENTS)},
+ # optimize strategy alias for test-verify tasks
+ {"test-verify": list(schedules.ALL_COMPONENTS)},
+ # optimize strategy alias for upload-symbols tasks
+ {"upload-symbols": None},
+ # optimize strategy alias for reprocess-symbols tasks
+ {"reprocess-symbols": None},
+)
+
+OptimizationSchema = voluptuous.Any(*default_optimizations)
+
+
+def set_optimization_schema(schema_tuple):
+ """Sets OptimizationSchema so it can be imported by the task transform.
+ This function is called by projects that extend Firefox's taskgraph.
+ It should be called by the project's taskgraph:register function before
+ any transport or job runner code is imported.
+
+ :param tuple schema_tuple: Tuple of possible optimization strategies
+ """
+ global OptimizationSchema
+ if OptimizationSchema.validators == default_optimizations:
+ logger.info("OptimizationSchema updated.")
+ OptimizationSchema = voluptuous.Any(*schema_tuple)
+ else:
+ raise Exception("Can only call set_optimization_schema once.")
diff --git a/taskcluster/gecko_taskgraph/optimize/strategies.py b/taskcluster/gecko_taskgraph/optimize/strategies.py
new file mode 100644
index 0000000000..4d0d23a5ac
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/optimize/strategies.py
@@ -0,0 +1,77 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import logging
+
+import mozpack.path as mozpath
+from mozbuild.base import MozbuildObject
+from mozbuild.util import memoize
+from taskgraph.optimize.base import OptimizationStrategy, register_strategy
+
+from gecko_taskgraph import files_changed
+
+logger = logging.getLogger(__name__)
+
+
+@register_strategy("skip-unless-schedules")
+class SkipUnlessSchedules(OptimizationStrategy):
+ @memoize
+ def scheduled_by_push(self, repository, revision):
+ changed_files = files_changed.get_changed_files(repository, revision)
+
+ mbo = MozbuildObject.from_environment()
+ # the decision task has a sparse checkout, so, mozbuild_reader will use
+ # a MercurialRevisionFinder with revision '.', which should be the same
+ # as `revision`; in other circumstances, it will use a default reader
+ rdr = mbo.mozbuild_reader(config_mode="empty")
+
+ components = set()
+ for p, m in rdr.files_info(changed_files).items():
+ components |= set(m["SCHEDULES"].components)
+
+ return components
+
+ def should_remove_task(self, task, params, conditions):
+ if params.get("pushlog_id") == -1:
+ return False
+
+ scheduled = self.scheduled_by_push(
+ params["head_repository"], params["head_rev"]
+ )
+ conditions = set(conditions)
+ # if *any* of the condition components are scheduled, do not optimize
+ if conditions & scheduled:
+ return False
+
+ return True
+
+
+@register_strategy("skip-unless-has-relevant-tests")
+class SkipUnlessHasRelevantTests(OptimizationStrategy):
+ """Optimizes tasks that don't run any tests that were
+ in child directories of a modified file.
+ """
+
+ @memoize
+ def get_changed_dirs(self, repo, rev):
+ changed = map(mozpath.dirname, files_changed.get_changed_files(repo, rev))
+ # Filter out empty directories (from files modified in the root).
+ # Otherwise all tasks would be scheduled.
+ return {d for d in changed if d}
+
+ def should_remove_task(self, task, params, _):
+ if not task.attributes.get("test_manifests"):
+ return True
+
+ for d in self.get_changed_dirs(params["head_repository"], params["head_rev"]):
+ for t in task.attributes["test_manifests"]:
+ if t.startswith(d):
+ logger.debug(
+ "{} runs a test path ({}) contained by a modified file ({})".format(
+ task.label, t, d
+ )
+ )
+ return False
+ return True
diff --git a/taskcluster/gecko_taskgraph/parameters.py b/taskcluster/gecko_taskgraph/parameters.py
new file mode 100644
index 0000000000..2a61a71b96
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/parameters.py
@@ -0,0 +1,137 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import logging
+import os
+
+from taskgraph.parameters import extend_parameters_schema
+from voluptuous import Any, Optional, Required
+
+from gecko_taskgraph import GECKO
+
+logger = logging.getLogger(__name__)
+
+
+gecko_parameters_schema = {
+ Required("app_version"): str,
+ Required("backstop"): bool,
+ Required("build_number"): int,
+ Required("enable_always_target"): Any(bool, [str]),
+ Required("hg_branch"): str,
+ Required("message"): str,
+ Required("next_version"): Any(None, str),
+ Required("optimize_strategies"): Any(None, str),
+ Required("phabricator_diff"): Any(None, str),
+ Required("release_enable_emefree"): bool,
+ Required("release_enable_partner_repack"): bool,
+ Required("release_enable_partner_attribution"): bool,
+ Required("release_eta"): Any(None, str),
+ Required("release_history"): {str: dict},
+ Required("release_partners"): Any(None, [str]),
+ Required("release_partner_config"): Any(None, dict),
+ Required("release_partner_build_number"): int,
+ Required("release_type"): str,
+ Required("release_product"): Any(None, str),
+ Required("required_signoffs"): [str],
+ Required("signoff_urls"): dict,
+ Required("test_manifest_loader"): str,
+ Required("try_mode"): Any(None, str),
+ Required("try_options"): Any(None, dict),
+ Required("try_task_config"): {
+ Optional("tasks"): [str],
+ Optional("browsertime"): bool,
+ Optional("chemspill-prio"): bool,
+ Optional("disable-pgo"): bool,
+ Optional("env"): {str: str},
+ Optional("gecko-profile"): bool,
+ Optional("gecko-profile-interval"): float,
+ Optional("gecko-profile-entries"): int,
+ Optional("gecko-profile-features"): str,
+ Optional("gecko-profile-threads"): str,
+ Optional(
+ "new-test-config",
+ description="adjust parameters, chunks, etc. to speed up the process "
+ "of greening up a new test config.",
+ ): bool,
+ Optional(
+ "perftest-options",
+ description="Options passed from `mach perftest` to try.",
+ ): object,
+ Optional(
+ "optimize-strategies",
+ description="Alternative optimization strategies to use instead of the default. "
+ "A module path pointing to a dict to be use as the `strategy_override` "
+ "argument in `taskgraph.optimize.base.optimize_task_graph`.",
+ ): str,
+ Optional("rebuild"): int,
+ Optional("tasks-regex"): {
+ "include": Any(None, [str]),
+ "exclude": Any(None, [str]),
+ },
+ Optional("use-artifact-builds"): bool,
+ Optional(
+ "worker-overrides",
+ description="Mapping of worker alias to worker pools to use for those aliases.",
+ ): {str: str},
+ Optional("routes"): [str],
+ },
+ Required("version"): str,
+}
+
+
+def get_contents(path):
+ with open(path, "r") as fh:
+ contents = fh.readline().rstrip()
+ return contents
+
+
+def get_version(product_dir="browser"):
+ version_path = os.path.join(GECKO, product_dir, "config", "version_display.txt")
+ return get_contents(version_path)
+
+
+def get_app_version(product_dir="browser"):
+ app_version_path = os.path.join(GECKO, product_dir, "config", "version.txt")
+ return get_contents(app_version_path)
+
+
+def get_defaults(repo_root=None):
+ return {
+ "app_version": get_app_version(),
+ "backstop": False,
+ "base_repository": "https://hg.mozilla.org/mozilla-unified",
+ "build_number": 1,
+ "enable_always_target": ["docker-image"],
+ "head_repository": "https://hg.mozilla.org/mozilla-central",
+ "hg_branch": "default",
+ "message": "",
+ "next_version": None,
+ "optimize_strategies": None,
+ "phabricator_diff": None,
+ "project": "mozilla-central",
+ "release_enable_emefree": False,
+ "release_enable_partner_repack": False,
+ "release_enable_partner_attribution": False,
+ "release_eta": "",
+ "release_history": {},
+ "release_partners": [],
+ "release_partner_config": None,
+ "release_partner_build_number": 1,
+ "release_product": None,
+ "release_type": "nightly",
+ # This refers to the upstream repo rather than the local checkout, so
+ # should be hardcoded to 'hg' even with git-cinnabar.
+ "repository_type": "hg",
+ "required_signoffs": [],
+ "signoff_urls": {},
+ "test_manifest_loader": "default",
+ "try_mode": None,
+ "try_options": None,
+ "try_task_config": {},
+ "version": get_version(),
+ }
+
+
+def register_parameters():
+ extend_parameters_schema(gecko_parameters_schema, defaults_fn=get_defaults)
diff --git a/taskcluster/gecko_taskgraph/target_tasks.py b/taskcluster/gecko_taskgraph/target_tasks.py
new file mode 100644
index 0000000000..2f445d3f95
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/target_tasks.py
@@ -0,0 +1,1606 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import itertools
+import os
+import re
+from datetime import datetime, timedelta
+
+from redo import retry
+from taskgraph.parameters import Parameters
+from taskgraph.target_tasks import _target_task, get_method
+from taskgraph.util.taskcluster import find_task_id
+
+from gecko_taskgraph import GECKO, try_option_syntax
+from gecko_taskgraph.util.attributes import (
+ match_run_on_hg_branches,
+ match_run_on_projects,
+)
+from gecko_taskgraph.util.hg import find_hg_revision_push_info, get_hg_commit_message
+from gecko_taskgraph.util.platforms import platform_family
+
+# Some tasks show up in the target task set, but are possibly special cases,
+# uncommon tasks, or tasks running against limited hardware set that they
+# should only be selectable with --full.
+UNCOMMON_TRY_TASK_LABELS = [
+ # Platforms and/or Build types
+ r"build-.*-gcp", # Bug 1631990
+ r"mingwclang", # Bug 1631990
+ r"valgrind", # Bug 1631990
+ # Android tasks
+ r"android-geckoview-docs",
+ r"android-hw",
+ # Windows tasks
+ r"windows10-64-ref-hw",
+ r"windows10-aarch64-qr",
+ # Linux tasks
+ r"linux-", # hide all linux32 tasks by default - bug 1599197
+ r"linux1804-32", # hide linux32 tests - bug 1599197
+ # Test tasks
+ r"web-platform-tests.*backlog", # hide wpt jobs that are not implemented yet - bug 1572820
+ r"-ccov",
+ r"-profiling-", # talos/raptor profiling jobs are run too often
+ r"-32-.*-webgpu", # webgpu gets little benefit from these tests.
+ r"-asan-.*-webgpu",
+ r"-tsan-.*-webgpu",
+ # Hide shippable versions of tests we have opt versions of because the non-shippable
+ # versions are faster to run. This is mostly perf tests.
+ r"-shippable(?!.*(awsy|browsertime|marionette-headless|mochitest-devtools-chrome-fis|raptor|talos|web-platform-tests-wdspec-headless|mochitest-plain-headless))", # noqa - too long
+]
+
+
+def index_exists(index_path, reason=""):
+ print(f"Looking for existing index {index_path} {reason}...")
+ try:
+ task_id = find_task_id(index_path)
+ print(f"Index {index_path} exists: taskId {task_id}")
+ return True
+ except KeyError:
+ print(f"Index {index_path} doesn't exist.")
+ return False
+
+
+def filter_out_shipping_phase(task, parameters):
+ return (
+ # nightly still here because of geckodriver
+ not task.attributes.get("nightly")
+ and task.attributes.get("shipping_phase") in (None, "build")
+ )
+
+
+def filter_out_devedition(task, parameters):
+ return not task.attributes.get("shipping_product") == "devedition"
+
+
+def filter_out_cron(task, parameters):
+ """
+ Filter out tasks that run via cron.
+ """
+ return not task.attributes.get("cron")
+
+
+def filter_for_project(task, parameters):
+ """Filter tasks by project. Optionally enable nightlies."""
+ run_on_projects = set(task.attributes.get("run_on_projects", []))
+ return match_run_on_projects(parameters["project"], run_on_projects)
+
+
+def filter_for_hg_branch(task, parameters):
+ """Filter tasks by hg branch.
+ If `run_on_hg_branch` is not defined, then task runs on all branches"""
+ run_on_hg_branches = set(task.attributes.get("run_on_hg_branches", ["all"]))
+ return match_run_on_hg_branches(parameters["hg_branch"], run_on_hg_branches)
+
+
+def filter_on_platforms(task, platforms):
+ """Filter tasks on the given platform"""
+ platform = task.attributes.get("build_platform")
+ return platform in platforms
+
+
+def filter_by_uncommon_try_tasks(task, optional_filters=None):
+ """Filters tasks that should not be commonly run on try.
+
+ Args:
+ task (str): String representing the task name.
+ optional_filters (list, optional):
+ Additional filters to apply to task filtering.
+
+ Returns:
+ (Boolean): True if task does not match any known filters.
+ False otherwise.
+ """
+ filters = UNCOMMON_TRY_TASK_LABELS
+ if optional_filters:
+ filters = itertools.chain(filters, optional_filters)
+
+ return not any(re.search(pattern, task) for pattern in filters)
+
+
+def filter_by_regex(task_label, regexes, mode="include"):
+ """Filters tasks according to a list of pre-compiled reguar expressions.
+
+ If mode is "include", a task label must match any regex to pass.
+ If it is "exclude", a task label must _not_ match any regex to pass.
+ """
+ if not regexes:
+ return True
+
+ assert mode in ["include", "exclude"]
+
+ any_match = any(r.search(task_label) for r in regexes)
+ if any_match:
+ return mode == "include"
+ return mode != "include"
+
+
+def filter_release_tasks(task, parameters):
+ platform = task.attributes.get("build_platform")
+ if platform in (
+ "linux",
+ "linux64",
+ "macosx64",
+ "win32",
+ "win64",
+ "win64-aarch64",
+ ):
+ if task.attributes["kind"] == "l10n":
+ # This is on-change l10n
+ return True
+ if (
+ task.attributes["build_type"] == "opt"
+ and task.attributes.get("unittest_suite") != "talos"
+ and task.attributes.get("unittest_suite") != "raptor"
+ ):
+ return False
+
+ if task.attributes.get("shipping_phase") not in (None, "build"):
+ return False
+
+ """ No debug on release, keep on ESR with 4 week cycles, release
+ will not be too different from central, but ESR will live for a long time.
+
+ From June 2019 -> June 2020, we found 1 unique regression on ESR debug
+ and 5 unique regressions on beta/release. Keeping spidermonkey and linux
+ debug finds all but 1 unique regressions (windows found on try) for beta/release.
+
+ ...but debug-only failures started showing up on ESR (esr-91, esr-102) so
+ desktop debug tests were added back for beta.
+ """
+ build_type = task.attributes.get("build_type", "")
+ build_platform = task.attributes.get("build_platform", "")
+ test_platform = task.attributes.get("test_platform", "")
+
+ if parameters["release_type"].startswith("esr") or (
+ parameters["release_type"] == "beta" and "android" not in build_platform
+ ):
+ return True
+
+ # code below here is intended to reduce release debug tasks
+ if task.kind == "hazard" or "toolchain" in build_platform:
+ # keep hazard and toolchain builds around
+ return True
+
+ if build_type == "debug":
+ if "linux" not in build_platform:
+ # filter out windows/mac/android
+ return False
+ if task.kind not in ["spidermonkey"] and "-qr" in test_platform:
+ # filter out linux-qr tests, leave spidermonkey
+ return False
+ if "64" not in build_platform:
+ # filter out linux32 builds
+ return False
+
+ # webrender-android-*-debug doesn't have attributes to find 'debug', using task.label.
+ if task.kind == "webrender" and "debug" in task.label:
+ return False
+ return True
+
+
+def filter_out_missing_signoffs(task, parameters):
+ for signoff in parameters["required_signoffs"]:
+ if signoff not in parameters["signoff_urls"] and signoff in task.attributes.get(
+ "required_signoffs", []
+ ):
+ return False
+ return True
+
+
+def filter_tests_without_manifests(task, parameters):
+ """Remove test tasks that have an empty 'test_manifests' attribute.
+
+ This situation can arise when the test loader (e.g bugbug) decided there
+ weren't any important manifests to run for the given push. We filter tasks
+ out here rather than in the transforms so that the full task graph is still
+ aware that the task exists (which is needed by the backfill action).
+ """
+ if (
+ task.kind == "test"
+ and "test_manifests" in task.attributes
+ and not task.attributes["test_manifests"]
+ ):
+ return False
+ return True
+
+
+def standard_filter(task, parameters):
+ return all(
+ filter_func(task, parameters)
+ for filter_func in (
+ filter_out_cron,
+ filter_for_project,
+ filter_for_hg_branch,
+ filter_tests_without_manifests,
+ )
+ )
+
+
+def accept_raptor_android_build(platform):
+ """Helper function for selecting the correct android raptor builds."""
+ if "android" not in platform:
+ return False
+ if "shippable" not in platform:
+ return False
+ if "p5" in platform and "aarch64" in platform:
+ return False
+ if "p6" in platform and "aarch64" in platform:
+ return False
+ if "s21" in platform and "aarch64" in platform:
+ return False
+ if "a51" in platform:
+ return True
+ return False
+
+
+def accept_raptor_desktop_build(platform):
+ """Helper function for selecting correct desktop raptor builds."""
+ if "android" in platform:
+ return False
+ # ignore all windows 7 perf jobs scheduled automatically
+ if "windows7" in platform or "windows10-32" in platform:
+ return False
+ # Completely ignore all non-shippable platforms
+ if "shippable" in platform:
+ return True
+ return False
+
+
+def accept_awsy_task(try_name, platform):
+ if accept_raptor_desktop_build(platform):
+ if "windows" in platform and "windows11-64" not in platform:
+ return False
+ if "dmd" in try_name:
+ return False
+ if "awsy-base" in try_name:
+ return True
+ if "awsy-tp6" in try_name:
+ return True
+ return False
+
+
+def filter_unsupported_artifact_builds(task, parameters):
+ try_config = parameters.get("try_task_config", {})
+ if not try_config.get("use-artifact-builds", False):
+ return True
+
+ supports_artifact_builds = task.attributes.get("supports-artifact-builds", True)
+ return supports_artifact_builds
+
+
+def filter_out_shippable(task):
+ return not task.attributes.get("shippable", False)
+
+
+def _try_task_config(full_task_graph, parameters, graph_config):
+ requested_tasks = parameters["try_task_config"]["tasks"]
+ pattern_tasks = [x for x in requested_tasks if x.endswith("-*")]
+ tasks = list(set(requested_tasks) - set(pattern_tasks))
+ matched_tasks = []
+ for pattern in pattern_tasks:
+ matched_tasks.extend(
+ [
+ t
+ for t in full_task_graph.graph.nodes
+ if t.split(pattern.replace("*", ""))[-1].isnumeric()
+ ]
+ )
+
+ return list(set(tasks) | set(matched_tasks))
+
+
+def _try_option_syntax(full_task_graph, parameters, graph_config):
+ """Generate a list of target tasks based on try syntax in
+ parameters['message'] and, for context, the full task graph."""
+ options = try_option_syntax.TryOptionSyntax(
+ parameters, full_task_graph, graph_config
+ )
+ target_tasks_labels = [
+ t.label
+ for t in full_task_graph.tasks.values()
+ if options.task_matches(t)
+ and filter_by_uncommon_try_tasks(t.label)
+ and filter_unsupported_artifact_builds(t, parameters)
+ ]
+
+ attributes = {
+ k: getattr(options, k)
+ for k in [
+ "no_retry",
+ "tag",
+ ]
+ }
+
+ for l in target_tasks_labels:
+ task = full_task_graph[l]
+ if "unittest_suite" in task.attributes:
+ task.attributes["task_duplicates"] = options.trigger_tests
+
+ for l in target_tasks_labels:
+ task = full_task_graph[l]
+ # If the developer wants test jobs to be rebuilt N times we add that value here
+ if options.trigger_tests > 1 and "unittest_suite" in task.attributes:
+ task.attributes["task_duplicates"] = options.trigger_tests
+
+ # If the developer wants test talos jobs to be rebuilt N times we add that value here
+ if (
+ options.talos_trigger_tests > 1
+ and task.attributes.get("unittest_suite") == "talos"
+ ):
+ task.attributes["task_duplicates"] = options.talos_trigger_tests
+
+ # If the developer wants test raptor jobs to be rebuilt N times we add that value here
+ if (
+ options.raptor_trigger_tests
+ and options.raptor_trigger_tests > 1
+ and task.attributes.get("unittest_suite") == "raptor"
+ ):
+ task.attributes["task_duplicates"] = options.raptor_trigger_tests
+
+ task.attributes.update(attributes)
+
+ # Add notifications here as well
+ if options.notifications:
+ for task in full_task_graph:
+ owner = parameters.get("owner")
+ routes = task.task.setdefault("routes", [])
+ if options.notifications == "all":
+ routes.append(f"notify.email.{owner}.on-any")
+ elif options.notifications == "failure":
+ routes.append(f"notify.email.{owner}.on-failed")
+ routes.append(f"notify.email.{owner}.on-exception")
+
+ return target_tasks_labels
+
+
+@_target_task("try_tasks")
+def target_tasks_try(full_task_graph, parameters, graph_config):
+ try_mode = parameters["try_mode"]
+ if try_mode == "try_task_config":
+ return _try_task_config(full_task_graph, parameters, graph_config)
+ if try_mode == "try_option_syntax":
+ return _try_option_syntax(full_task_graph, parameters, graph_config)
+ # With no try mode, we schedule nothing, allowing the user to add tasks
+ # later via treeherder.
+ return []
+
+
+@_target_task("try_select_tasks")
+def target_tasks_try_select(full_task_graph, parameters, graph_config):
+ tasks = target_tasks_try_select_uncommon(full_task_graph, parameters, graph_config)
+ return [l for l in tasks if filter_by_uncommon_try_tasks(l)]
+
+
+@_target_task("try_select_tasks_uncommon")
+def target_tasks_try_select_uncommon(full_task_graph, parameters, graph_config):
+ from gecko_taskgraph.decision import PER_PROJECT_PARAMETERS
+
+ projects = ("autoland", "mozilla-central")
+ if parameters["project"] not in projects:
+ projects = (parameters["project"],)
+
+ tasks = set()
+ for project in projects:
+ params = dict(parameters)
+ params["project"] = project
+ parameters = Parameters(**params)
+
+ try:
+ target_tasks_method = PER_PROJECT_PARAMETERS[project]["target_tasks_method"]
+ except KeyError:
+ target_tasks_method = "default"
+
+ tasks.update(
+ get_method(target_tasks_method)(full_task_graph, parameters, graph_config)
+ )
+
+ return sorted(tasks)
+
+
+@_target_task("try_auto")
+def target_tasks_try_auto(full_task_graph, parameters, graph_config):
+ """Target the tasks which have indicated they should be run on autoland
+ (rather than try) via the `run_on_projects` attributes.
+
+ Should do the same thing as the `default` target tasks method.
+ """
+ params = dict(parameters)
+ params["project"] = "autoland"
+ parameters = Parameters(**params)
+
+ regex_filters = parameters["try_task_config"].get("tasks-regex")
+ include_regexes = exclude_regexes = []
+ if regex_filters:
+ include_regexes = [re.compile(r) for r in regex_filters.get("include", [])]
+ exclude_regexes = [re.compile(r) for r in regex_filters.get("exclude", [])]
+
+ return [
+ l
+ for l, t in full_task_graph.tasks.items()
+ if standard_filter(t, parameters)
+ and filter_out_shipping_phase(t, parameters)
+ and filter_out_devedition(t, parameters)
+ and filter_by_uncommon_try_tasks(t.label)
+ and filter_by_regex(t.label, include_regexes, mode="include")
+ and filter_by_regex(t.label, exclude_regexes, mode="exclude")
+ and filter_unsupported_artifact_builds(t, parameters)
+ and filter_out_shippable(t)
+ ]
+
+
+@_target_task("default")
+def target_tasks_default(full_task_graph, parameters, graph_config):
+ """Target the tasks which have indicated they should be run on this project
+ via the `run_on_projects` attributes."""
+ return [
+ l
+ for l, t in full_task_graph.tasks.items()
+ if standard_filter(t, parameters)
+ and filter_out_shipping_phase(t, parameters)
+ and filter_out_devedition(t, parameters)
+ ]
+
+
+@_target_task("autoland_tasks")
+def target_tasks_autoland(full_task_graph, parameters, graph_config):
+ """In addition to doing the filtering by project that the 'default'
+ filter does, also remove any tests running against shippable builds
+ for non-backstop pushes."""
+ filtered_for_project = target_tasks_default(
+ full_task_graph, parameters, graph_config
+ )
+
+ def filter(task):
+ if task.kind != "test":
+ return True
+
+ if parameters["backstop"]:
+ return True
+
+ build_type = task.attributes.get("build_type")
+
+ if not build_type or build_type != "opt" or filter_out_shippable(task):
+ return True
+
+ return False
+
+ return [l for l in filtered_for_project if filter(full_task_graph[l])]
+
+
+@_target_task("mozilla_central_tasks")
+def target_tasks_mozilla_central(full_task_graph, parameters, graph_config):
+ """In addition to doing the filtering by project that the 'default'
+ filter does, also remove any tests running against regular (aka not shippable,
+ asan, etc.) opt builds."""
+ filtered_for_project = target_tasks_default(
+ full_task_graph, parameters, graph_config
+ )
+
+ def filter(task):
+ if task.kind != "test":
+ return True
+
+ build_platform = task.attributes.get("build_platform")
+ build_type = task.attributes.get("build_type")
+ shippable = task.attributes.get("shippable", False)
+
+ if not build_platform or not build_type:
+ return True
+
+ family = platform_family(build_platform)
+ # We need to know whether this test is against a "regular" opt build
+ # (which is to say, not shippable, asan, tsan, or any other opt build
+ # with other properties). There's no positive test for this, so we have to
+ # do it somewhat hackily. Android doesn't have variants other than shippable
+ # so it is pretty straightforward to check for. Other platforms have many
+ # variants, but none of the regular opt builds we're looking for have a "-"
+ # in their platform name, so this works (for now).
+ is_regular_opt = (
+ family == "android" and not shippable
+ ) or "-" not in build_platform
+
+ if build_type != "opt" or not is_regular_opt:
+ return True
+
+ return False
+
+ return [l for l in filtered_for_project if filter(full_task_graph[l])]
+
+
+@_target_task("graphics_tasks")
+def target_tasks_graphics(full_task_graph, parameters, graph_config):
+ """In addition to doing the filtering by project that the 'default'
+ filter does, also remove artifact builds because we have csets on
+ the graphics branch that aren't on the candidate branches of artifact
+ builds"""
+ filtered_for_project = target_tasks_default(
+ full_task_graph, parameters, graph_config
+ )
+
+ def filter(task):
+ if task.attributes["kind"] == "artifact-build":
+ return False
+ return True
+
+ return [l for l in filtered_for_project if filter(full_task_graph[l])]
+
+
+@_target_task("mozilla_beta_tasks")
+def target_tasks_mozilla_beta(full_task_graph, parameters, graph_config):
+ """Select the set of tasks required for a promotable beta or release build
+ of desktop, plus android CI. The candidates build process involves a pipeline
+ of builds and signing, but does not include beetmover or balrog jobs."""
+
+ return [
+ l
+ for l, t in full_task_graph.tasks.items()
+ if filter_release_tasks(t, parameters) and standard_filter(t, parameters)
+ ]
+
+
+@_target_task("mozilla_release_tasks")
+def target_tasks_mozilla_release(full_task_graph, parameters, graph_config):
+ """Select the set of tasks required for a promotable beta or release build
+ of desktop, plus android CI. The candidates build process involves a pipeline
+ of builds and signing, but does not include beetmover or balrog jobs."""
+
+ return [
+ l
+ for l, t in full_task_graph.tasks.items()
+ if filter_release_tasks(t, parameters) and standard_filter(t, parameters)
+ ]
+
+
+@_target_task("mozilla_esr115_tasks")
+def target_tasks_mozilla_esr115(full_task_graph, parameters, graph_config):
+ """Select the set of tasks required for a promotable beta or release build
+ of desktop, without android CI. The candidates build process involves a pipeline
+ of builds and signing, but does not include beetmover or balrog jobs."""
+
+ def filter(task):
+ if not filter_release_tasks(task, parameters):
+ return False
+
+ if not standard_filter(task, parameters):
+ return False
+
+ platform = task.attributes.get("build_platform")
+
+ # Android is not built on esr115.
+ if platform and "android" in platform:
+ return False
+
+ return True
+
+ return [l for l, t in full_task_graph.tasks.items() if filter(t)]
+
+
+@_target_task("promote_desktop")
+def target_tasks_promote_desktop(full_task_graph, parameters, graph_config):
+ """Select the superset of tasks required to promote a beta or release build
+ of a desktop product. This should include all non-android
+ mozilla_{beta,release} tasks, plus l10n, beetmover, balrog, etc."""
+
+ def filter(task):
+ # Bug 1758507 - geckoview ships in the promote phase
+ if not parameters["release_type"].startswith("esr") and is_geckoview(
+ task, parameters
+ ):
+ return True
+
+ if task.attributes.get("shipping_product") != parameters["release_product"]:
+ return False
+
+ # 'secondary' balrog/update verify/final verify tasks only run for RCs
+ if parameters.get("release_type") != "release-rc":
+ if "secondary" in task.kind:
+ return False
+
+ if not filter_out_missing_signoffs(task, parameters):
+ return False
+
+ if task.attributes.get("shipping_phase") == "promote":
+ return True
+
+ return [l for l, t in full_task_graph.tasks.items() if filter(t)]
+
+
+def is_geckoview(task, parameters):
+ return (
+ task.attributes.get("shipping_product") == "fennec"
+ and task.kind in ("beetmover-geckoview", "upload-symbols")
+ and parameters["release_product"] == "firefox"
+ )
+
+
+@_target_task("push_desktop")
+def target_tasks_push_desktop(full_task_graph, parameters, graph_config):
+ """Select the set of tasks required to push a build of desktop to cdns.
+ Previous build deps will be optimized out via action task."""
+ filtered_for_candidates = target_tasks_promote_desktop(
+ full_task_graph,
+ parameters,
+ graph_config,
+ )
+
+ def filter(task):
+ if not filter_out_missing_signoffs(task, parameters):
+ return False
+ # Include promotion tasks; these will be optimized out
+ if task.label in filtered_for_candidates:
+ return True
+
+ if (
+ task.attributes.get("shipping_product") == parameters["release_product"]
+ and task.attributes.get("shipping_phase") == "push"
+ ):
+ return True
+
+ return [l for l, t in full_task_graph.tasks.items() if filter(t)]
+
+
+@_target_task("ship_desktop")
+def target_tasks_ship_desktop(full_task_graph, parameters, graph_config):
+ """Select the set of tasks required to ship desktop.
+ Previous build deps will be optimized out via action task."""
+ is_rc = parameters.get("release_type") == "release-rc"
+ if is_rc:
+ # ship_firefox_rc runs after `promote` rather than `push`; include
+ # all promote tasks.
+ filtered_for_candidates = target_tasks_promote_desktop(
+ full_task_graph,
+ parameters,
+ graph_config,
+ )
+ else:
+ # ship_firefox runs after `push`; include all push tasks.
+ filtered_for_candidates = target_tasks_push_desktop(
+ full_task_graph,
+ parameters,
+ graph_config,
+ )
+
+ def filter(task):
+ if not filter_out_missing_signoffs(task, parameters):
+ return False
+ # Include promotion tasks; these will be optimized out
+ if task.label in filtered_for_candidates:
+ return True
+
+ if (
+ task.attributes.get("shipping_product") != parameters["release_product"]
+ or task.attributes.get("shipping_phase") != "ship"
+ ):
+ return False
+
+ if "secondary" in task.kind:
+ return is_rc
+ return not is_rc
+
+ return [l for l, t in full_task_graph.tasks.items() if filter(t)]
+
+
+@_target_task("pine_tasks")
+def target_tasks_pine(full_task_graph, parameters, graph_config):
+ """Bug 1879960 - no reftests or wpt needed"""
+ filtered_for_project = target_tasks_default(
+ full_task_graph, parameters, graph_config
+ )
+
+ def filter(task):
+ suite = task.attributes.get("unittest_suite", "")
+ if "reftest" in suite or "web-platform" in suite:
+ return False
+ return True
+
+ return [l for l in filtered_for_project if filter(full_task_graph[l])]
+
+
+@_target_task("larch_tasks")
+def target_tasks_larch(full_task_graph, parameters, graph_config):
+ """Bug 1879213 - only run necessary tasks on larch"""
+ filtered_for_project = target_tasks_default(
+ full_task_graph, parameters, graph_config
+ )
+
+ def filter(task):
+ # no localized builds, no android
+ if (
+ "l10n" in task.kind
+ or "msix" in task.kind
+ or "android" in task.attributes.get("build_platform", "")
+ ):
+ return False
+ # otherwise reduce tests only
+ if task.kind != "test":
+ return True
+ return "browser-chrome" in task.label or "xpcshell" in task.label
+
+ return [l for l in filtered_for_project if filter(full_task_graph[l])]
+
+
+@_target_task("kaios_tasks")
+def target_tasks_kaios(full_task_graph, parameters, graph_config):
+ """The set of tasks to run for kaios integration"""
+
+ def filter(task):
+ # We disable everything in central, and adjust downstream.
+ return False
+
+ return [l for l, t in full_task_graph.tasks.items() if filter(t)]
+
+
+@_target_task("ship_geckoview")
+def target_tasks_ship_geckoview(full_task_graph, parameters, graph_config):
+ """Select the set of tasks required to ship geckoview nightly. The
+ nightly build process involves a pipeline of builds and an upload to
+ maven.mozilla.org."""
+ index_path = (
+ f"{graph_config['trust-domain']}.v2.{parameters['project']}.revision."
+ f"{parameters['head_rev']}.taskgraph.decision-ship-geckoview"
+ )
+ if os.environ.get("MOZ_AUTOMATION") and retry(
+ index_exists,
+ args=(index_path,),
+ kwargs={
+ "reason": "to avoid triggering multiple nightlies off the same revision",
+ },
+ ):
+ return []
+
+ def filter(task):
+ # XXX Starting 69, we don't ship Fennec Nightly anymore. We just want geckoview to be
+ # uploaded
+ return task.attributes.get("shipping_product") == "fennec" and task.kind in (
+ "beetmover-geckoview",
+ "upload-symbols",
+ )
+
+ return [l for l, t in full_task_graph.tasks.items() if filter(t)]
+
+
+@_target_task("custom-car_perf_testing")
+def target_tasks_custom_car_perf_testing(full_task_graph, parameters, graph_config):
+ """Select tasks required for running daily performance tests for custom chromium-as-release."""
+
+ def filter(task):
+ platform = task.attributes.get("test_platform")
+ attributes = task.attributes
+ if attributes.get("unittest_suite") != "raptor":
+ return False
+
+ try_name = attributes.get("raptor_try_name")
+
+ # Desktop and Android selection for CaR
+ if accept_raptor_desktop_build(platform) or accept_raptor_android_build(
+ platform
+ ):
+ if "browsertime" in try_name and (
+ "custom-car" in try_name or "cstm-car-m" in try_name
+ ):
+ return True
+ return False
+
+ return [l for l, t in full_task_graph.tasks.items() if filter(t)]
+
+
+@_target_task("general_perf_testing")
+def target_tasks_general_perf_testing(full_task_graph, parameters, graph_config):
+ """
+ Select tasks required for running performance tests 3 times a week.
+ """
+
+ def filter(task):
+ platform = task.attributes.get("test_platform")
+ attributes = task.attributes
+ if attributes.get("unittest_suite") != "raptor":
+ return False
+
+ try_name = attributes.get("raptor_try_name")
+
+ if "tp6-bench" in try_name:
+ return False
+
+ # Bug 1867669 - Temporarily disable all live site tests
+ if "live" in try_name and "sheriffed" not in try_name:
+ return False
+
+ # Desktop selection
+ if accept_raptor_desktop_build(platform):
+ # Select some browsertime tasks as desktop smoke-tests
+ if "browsertime" in try_name:
+ if "chrome" in try_name:
+ if "tp6" in try_name and "essential" not in try_name:
+ return False
+ return True
+ if "chromium" in try_name:
+ if "tp6" in try_name and "essential" not in try_name:
+ return False
+ return True
+ # chromium-as-release has it's own cron
+ if "custom-car" in try_name:
+ return False
+ if "-live" in try_name:
+ return True
+ if "-fis" in try_name:
+ return False
+ if "linux" in platform:
+ if "speedometer" in try_name:
+ return True
+ if "safari" and "benchmark" in try_name:
+ return True
+ # Android selection
+ elif accept_raptor_android_build(platform):
+ if "chrome-m" in try_name and (
+ ("ebay" in try_name and "live" not in try_name)
+ or (
+ "live" in try_name
+ and ("facebook" in try_name or "dailymail" in try_name)
+ )
+ ):
+ return False
+ # Ignore all fennec tests here, we run those weekly
+ if "fennec" in try_name:
+ return False
+ # Only run webrender tests
+ if "chrome-m" not in try_name and "-qr" not in platform:
+ return False
+ # Select live site tests
+ if "-live" in try_name:
+ return True
+ # Select fenix resource usage tests
+ if "fenix" in try_name:
+ # Bug 1816421 disable fission perf tests
+ if "-fis" in try_name:
+ return False
+ if "-power" in try_name:
+ return True
+ # Select geckoview resource usage tests
+ if "geckoview" in try_name:
+ # Bug 1816421 disable fission perf tests
+ if "-fis" in try_name:
+ return False
+ # Run cpu+memory, and power tests
+ cpu_n_memory_task = "-cpu" in try_name and "-memory" in try_name
+ power_task = "-power" in try_name
+ # Ignore cpu+memory+power tests
+ if power_task and cpu_n_memory_task:
+ return False
+ if cpu_n_memory_task:
+ return False
+ if power_task:
+ return "browsertime" in try_name
+ # Select browsertime-specific tests
+ if "browsertime" in try_name:
+ # Don't run android CaR sp tests as we already have a cron for this.
+ if "m-car" in try_name:
+ return False
+ if "speedometer" in try_name:
+ return True
+ return False
+
+ return [l for l, t in full_task_graph.tasks.items() if filter(t)]
+
+
+def make_desktop_nightly_filter(platforms):
+ """Returns a filter that gets all nightly tasks on the given platform."""
+
+ def filter(task, parameters):
+ return all(
+ [
+ filter_on_platforms(task, platforms),
+ filter_for_project(task, parameters),
+ task.attributes.get("shippable", False),
+ # Tests and nightly only builds don't have `shipping_product` set
+ task.attributes.get("shipping_product")
+ in {None, "firefox", "thunderbird"},
+ task.kind not in {"l10n"}, # no on-change l10n
+ ]
+ )
+
+ return filter
+
+
+@_target_task("sp-perftests")
+def target_tasks_speedometer_tests(full_task_graph, parameters, graph_config):
+ def filter(task):
+ platform = task.attributes.get("test_platform")
+ attributes = task.attributes
+ if attributes.get("unittest_suite") != "raptor":
+ return False
+
+ if accept_raptor_desktop_build(platform) or accept_raptor_android_build(
+ platform
+ ):
+ try_name = attributes.get("raptor_try_name")
+ if (
+ "browsertime" in try_name
+ and "speedometer" in try_name
+ and "chrome" in try_name
+ ):
+ return True
+
+ return [l for l, t in full_task_graph.tasks.items() if filter(t)]
+
+
+@_target_task("nightly_linux")
+def target_tasks_nightly_linux(full_task_graph, parameters, graph_config):
+ """Select the set of tasks required for a nightly build of linux. The
+ nightly build process involves a pipeline of builds, signing,
+ and, eventually, uploading the tasks to balrog."""
+ filter = make_desktop_nightly_filter({"linux64-shippable", "linux-shippable"})
+ return [l for l, t in full_task_graph.tasks.items() if filter(t, parameters)]
+
+
+@_target_task("nightly_macosx")
+def target_tasks_nightly_macosx(full_task_graph, parameters, graph_config):
+ """Select the set of tasks required for a nightly build of macosx. The
+ nightly build process involves a pipeline of builds, signing,
+ and, eventually, uploading the tasks to balrog."""
+ filter = make_desktop_nightly_filter({"macosx64-shippable"})
+ return [l for l, t in full_task_graph.tasks.items() if filter(t, parameters)]
+
+
+@_target_task("nightly_win32")
+def target_tasks_nightly_win32(full_task_graph, parameters, graph_config):
+ """Select the set of tasks required for a nightly build of win32 and win64.
+ The nightly build process involves a pipeline of builds, signing,
+ and, eventually, uploading the tasks to balrog."""
+ filter = make_desktop_nightly_filter({"win32-shippable"})
+ return [l for l, t in full_task_graph.tasks.items() if filter(t, parameters)]
+
+
+@_target_task("nightly_win64")
+def target_tasks_nightly_win64(full_task_graph, parameters, graph_config):
+ """Select the set of tasks required for a nightly build of win32 and win64.
+ The nightly build process involves a pipeline of builds, signing,
+ and, eventually, uploading the tasks to balrog."""
+ filter = make_desktop_nightly_filter({"win64-shippable"})
+ return [l for l, t in full_task_graph.tasks.items() if filter(t, parameters)]
+
+
+@_target_task("nightly_win64_aarch64")
+def target_tasks_nightly_win64_aarch64(full_task_graph, parameters, graph_config):
+ """Select the set of tasks required for a nightly build of win32 and win64.
+ The nightly build process involves a pipeline of builds, signing,
+ and, eventually, uploading the tasks to balrog."""
+ filter = make_desktop_nightly_filter({"win64-aarch64-shippable"})
+ return [l for l, t in full_task_graph.tasks.items() if filter(t, parameters)]
+
+
+@_target_task("nightly_asan")
+def target_tasks_nightly_asan(full_task_graph, parameters, graph_config):
+ """Select the set of tasks required for a nightly build of asan. The
+ nightly build process involves a pipeline of builds, signing,
+ and, eventually, uploading the tasks to balrog."""
+ filter = make_desktop_nightly_filter(
+ {"linux64-asan-reporter-shippable", "win64-asan-reporter-shippable"}
+ )
+ return [l for l, t in full_task_graph.tasks.items() if filter(t, parameters)]
+
+
+@_target_task("daily_releases")
+def target_tasks_daily_releases(full_task_graph, parameters, graph_config):
+ """Select the set of tasks required to identify if we should release.
+ If we determine that we should the task will communicate to ship-it to
+ schedule the release itself."""
+
+ def filter(task):
+ return task.kind in ["maybe-release"]
+
+ return [l for l, t in full_task_graph.tasks.items() if filter(t)]
+
+
+@_target_task("nightly_desktop")
+def target_tasks_nightly_desktop(full_task_graph, parameters, graph_config):
+ """Select the set of tasks required for a nightly build of linux, mac,
+ windows."""
+ index_path = (
+ f"{graph_config['trust-domain']}.v2.{parameters['project']}.revision."
+ f"{parameters['head_rev']}.taskgraph.decision-nightly-desktop"
+ )
+ if os.environ.get("MOZ_AUTOMATION") and retry(
+ index_exists,
+ args=(index_path,),
+ kwargs={
+ "reason": "to avoid triggering multiple nightlies off the same revision",
+ },
+ ):
+ return []
+
+ # Tasks that aren't platform specific
+ release_filter = make_desktop_nightly_filter({None})
+ release_tasks = [
+ l for l, t in full_task_graph.tasks.items() if release_filter(t, parameters)
+ ]
+ # Avoid duplicate tasks.
+ return list(
+ set(target_tasks_nightly_win32(full_task_graph, parameters, graph_config))
+ | set(target_tasks_nightly_win64(full_task_graph, parameters, graph_config))
+ | set(
+ target_tasks_nightly_win64_aarch64(
+ full_task_graph, parameters, graph_config
+ )
+ )
+ | set(target_tasks_nightly_macosx(full_task_graph, parameters, graph_config))
+ | set(target_tasks_nightly_linux(full_task_graph, parameters, graph_config))
+ | set(target_tasks_nightly_asan(full_task_graph, parameters, graph_config))
+ | set(release_tasks)
+ )
+
+
+# Run Searchfox analysis once daily.
+@_target_task("searchfox_index")
+def target_tasks_searchfox(full_task_graph, parameters, graph_config):
+ """Select tasks required for indexing Firefox for Searchfox web site each day"""
+ return [
+ "searchfox-linux64-searchfox/debug",
+ "searchfox-macosx64-searchfox/debug",
+ "searchfox-win64-searchfox/debug",
+ "searchfox-android-armv7-searchfox/debug",
+ "source-test-file-metadata-bugzilla-components",
+ "source-test-file-metadata-test-info-all",
+ "source-test-wpt-metadata-summary",
+ ]
+
+
+# Run build linux64-plain-clang-trunk/opt on mozilla-central/beta with perf tests
+@_target_task("linux64_clang_trunk_perf")
+def target_tasks_build_linux64_clang_trunk_perf(
+ full_task_graph, parameters, graph_config
+):
+ """Select tasks required to run perf test on linux64 build with clang trunk"""
+
+ # Only keep tasks generated from platform `linux1804-64-clang-trunk-qr/opt`
+ def filter(task_label):
+ if "linux1804-64-clang-trunk-qr/opt" in task_label:
+ return True
+ return False
+
+ return [l for l, t in full_task_graph.tasks.items() if filter(t.label)]
+
+
+# Run Updatebot's cron job 4 times daily.
+@_target_task("updatebot_cron")
+def target_tasks_updatebot_cron(full_task_graph, parameters, graph_config):
+ """Select tasks required to run Updatebot's cron job"""
+ return ["updatebot-cron"]
+
+
+@_target_task("customv8_update")
+def target_tasks_customv8_update(full_task_graph, parameters, graph_config):
+ """Select tasks required for building latest d8/v8 version."""
+ return ["toolchain-linux64-custom-v8"]
+
+
+@_target_task("file_update")
+def target_tasks_file_update(full_task_graph, parameters, graph_config):
+ """Select the set of tasks required to perform nightly in-tree file updates"""
+
+ def filter(task):
+ # For now any task in the repo-update kind is ok
+ return task.kind in ["repo-update"]
+
+ return [l for l, t in full_task_graph.tasks.items() if filter(t)]
+
+
+@_target_task("l10n_bump")
+def target_tasks_l10n_bump(full_task_graph, parameters, graph_config):
+ """Select the set of tasks required to perform l10n bumping."""
+
+ def filter(task):
+ # For now any task in the repo-update kind is ok
+ return task.kind in ["l10n-bump"]
+
+ return [l for l, t in full_task_graph.tasks.items() if filter(t)]
+
+
+@_target_task("merge_automation")
+def target_tasks_merge_automation(full_task_graph, parameters, graph_config):
+ """Select the set of tasks required to perform repository merges."""
+
+ def filter(task):
+ # For now any task in the repo-update kind is ok
+ return task.kind in ["merge-automation"]
+
+ return [l for l, t in full_task_graph.tasks.items() if filter(t)]
+
+
+@_target_task("scriptworker_canary")
+def target_tasks_scriptworker_canary(full_task_graph, parameters, graph_config):
+ """Select the set of tasks required to run scriptworker canaries."""
+
+ def filter(task):
+ # For now any task in the repo-update kind is ok
+ return task.kind in ["scriptworker-canary"]
+
+ return [l for l, t in full_task_graph.tasks.items() if filter(t)]
+
+
+@_target_task("cron_bouncer_check")
+def target_tasks_bouncer_check(full_task_graph, parameters, graph_config):
+ """Select the set of tasks required to perform bouncer version verification."""
+
+ def filter(task):
+ if not filter_for_project(task, parameters):
+ return False
+ # For now any task in the repo-update kind is ok
+ return task.kind in ["cron-bouncer-check"]
+
+ return [l for l, t in full_task_graph.tasks.items() if filter(t)]
+
+
+@_target_task("staging_release_builds")
+def target_tasks_staging_release(full_task_graph, parameters, graph_config):
+ """
+ Select all builds that are part of releases.
+ """
+
+ def filter(task):
+ if not task.attributes.get("shipping_product"):
+ return False
+ if parameters["release_type"].startswith(
+ "esr"
+ ) and "android" in task.attributes.get("build_platform", ""):
+ return False
+ if parameters["release_type"] != "beta" and "devedition" in task.attributes.get(
+ "build_platform", ""
+ ):
+ return False
+ if task.attributes.get("shipping_phase") == "build":
+ return True
+ return False
+
+ return [l for l, t in full_task_graph.tasks.items() if filter(t)]
+
+
+@_target_task("release_simulation")
+def target_tasks_release_simulation(full_task_graph, parameters, graph_config):
+ """
+ Select builds that would run on push on a release branch.
+ """
+ project_by_release = {
+ "nightly": "mozilla-central",
+ "beta": "mozilla-beta",
+ "release": "mozilla-release",
+ "esr115": "mozilla-esr115",
+ }
+ target_project = project_by_release.get(parameters["release_type"])
+ if target_project is None:
+ raise Exception("Unknown or unspecified release type in simulation run.")
+
+ def filter_for_target_project(task):
+ """Filter tasks by project. Optionally enable nightlies."""
+ run_on_projects = set(task.attributes.get("run_on_projects", []))
+ return match_run_on_projects(target_project, run_on_projects)
+
+ def filter_out_android_on_esr(task):
+ if parameters["release_type"].startswith(
+ "esr"
+ ) and "android" in task.attributes.get("build_platform", ""):
+ return False
+ return True
+
+ return [
+ l
+ for l, t in full_task_graph.tasks.items()
+ if filter_release_tasks(t, parameters)
+ and filter_out_cron(t, parameters)
+ and filter_for_target_project(t)
+ and filter_out_android_on_esr(t)
+ ]
+
+
+@_target_task("codereview")
+def target_tasks_codereview(full_task_graph, parameters, graph_config):
+ """Select all code review tasks needed to produce a report"""
+
+ def filter(task):
+ # Ending tasks
+ if task.kind in ["code-review"]:
+ return True
+
+ # Analyzer tasks
+ if task.attributes.get("code-review") is True:
+ return True
+
+ return False
+
+ return [l for l, t in full_task_graph.tasks.items() if filter(t)]
+
+
+@_target_task("nothing")
+def target_tasks_nothing(full_task_graph, parameters, graph_config):
+ """Select nothing, for DONTBUILD pushes"""
+ return []
+
+
+@_target_task("daily_beta_perf")
+def target_tasks_daily_beta_perf(full_task_graph, parameters, graph_config):
+ """
+ Select performance tests on the beta branch to be run daily
+ """
+ index_path = (
+ f"{graph_config['trust-domain']}.v2.{parameters['project']}.revision."
+ f"{parameters['head_rev']}.taskgraph.decision-daily-beta-perf"
+ )
+ if os.environ.get("MOZ_AUTOMATION") and retry(
+ index_exists,
+ args=(index_path,),
+ kwargs={
+ "reason": "to avoid triggering multiple daily beta perftests off of the same revision",
+ },
+ ):
+ return []
+
+ def filter(task):
+ platform = task.attributes.get("test_platform")
+ attributes = task.attributes
+ try_name = attributes.get("raptor_try_name") or task.label
+
+ unittest_suite = attributes.get("unittest_suite")
+ if unittest_suite not in ("raptor", "awsy", "talos"):
+ return False
+ if not platform:
+ return False
+
+ # Select beta tasks for awsy
+ if "awsy" in try_name:
+ if accept_awsy_task(try_name, platform):
+ return True
+ return False
+
+ # Select beta tasks for talos
+ if "talos" == unittest_suite:
+ if accept_raptor_desktop_build(platform):
+ if "windows11-64" in platform:
+ if "xperf" in try_name:
+ return True
+ return False
+ if ("mac" in platform or "windows" in platform) and "g3" in try_name:
+ return False
+ if "-swr" in try_name:
+ if "dromaeo" in try_name:
+ return False
+ if "perf-reftest-singletons" in try_name:
+ return False
+ if "realworldweb" in try_name:
+ return False
+ if any(
+ x in try_name
+ for x in ("prof", "ipc", "gli", "sessionrestore", "tabswitch")
+ ):
+ return False
+ return True
+ return False
+
+ if accept_raptor_desktop_build(platform):
+ if "browsertime" and "firefox" in try_name:
+ if "profiling" in try_name:
+ return False
+ if "bytecode" in try_name:
+ return False
+ if "live" in try_name:
+ return False
+ if "webext" in try_name:
+ return False
+ if "unity" in try_name:
+ return False
+ if "wasm" in try_name:
+ return False
+ if "tp6-bench" in try_name:
+ return False
+ if "tp6" in try_name:
+ return True
+ if "benchmark" in try_name:
+ return True
+ elif accept_raptor_android_build(platform):
+ # Select browsertime & geckoview specific tests
+ if "browsertime" and "geckoview" in try_name:
+ if "power" in try_name:
+ return False
+ if "cpu" in try_name:
+ return False
+ if "profiling" in try_name:
+ return False
+ if "-live" in try_name:
+ return False
+ if "speedometer" in try_name:
+ return True
+ if "webgl" in try_name:
+ return True
+ if "tp6m" in try_name:
+ return True
+
+ return False
+
+ return [l for l, t in full_task_graph.tasks.items() if filter(t)]
+
+
+@_target_task("weekly_release_perf")
+def target_tasks_weekly_release_perf(full_task_graph, parameters, graph_config):
+ """
+ Select performance tests on the release branch to be run weekly
+ """
+
+ def filter(task):
+ platform = task.attributes.get("test_platform")
+ attributes = task.attributes
+ try_name = attributes.get("raptor_try_name") or task.label
+
+ if attributes.get("unittest_suite") not in ("raptor", "awsy"):
+ return False
+ if not platform:
+ return False
+
+ # Select release tasks for awsy
+ if "awsy" in try_name:
+ if accept_awsy_task(try_name, platform):
+ return True
+ return False
+
+ # Select browsertime tests
+ if accept_raptor_desktop_build(platform):
+ if "browsertime" and "firefox" in try_name:
+ if "power" in try_name:
+ return False
+ if "profiling" in try_name:
+ return False
+ if "bytecode" in try_name:
+ return False
+ if "live" in try_name:
+ return False
+ if "webext" in try_name:
+ return False
+ if "tp6-bench" in try_name:
+ return False
+ if "tp6" in try_name:
+ return True
+ if "benchmark" in try_name:
+ return True
+ if "youtube-playback" in try_name:
+ return True
+ elif accept_raptor_android_build(platform):
+ # Select browsertime & geckoview specific tests
+ if "browsertime" and "geckoview" in try_name:
+ if "power" in try_name:
+ return False
+ if "cpu" in try_name:
+ return False
+ if "profiling" in try_name:
+ return False
+ if "-live" in try_name:
+ return False
+ if "speedometer" in try_name:
+ return True
+ if "webgl" in try_name:
+ return True
+ if "tp6m" in try_name:
+ return True
+ if "youtube-playback" in try_name:
+ return True
+
+ return False
+
+ return [l for l, t in full_task_graph.tasks.items() if filter(t)]
+
+
+@_target_task("raptor_tp6m")
+def target_tasks_raptor_tp6m(full_task_graph, parameters, graph_config):
+ """
+ Select tasks required for running raptor cold page-load tests on fenix and refbrow
+ """
+
+ def filter(task):
+ platform = task.attributes.get("build_platform")
+ attributes = task.attributes
+
+ if platform and "android" not in platform:
+ return False
+ if attributes.get("unittest_suite") != "raptor":
+ return False
+ try_name = attributes.get("raptor_try_name")
+ if "-cold" in try_name and "shippable" in platform:
+ # Get browsertime amazon smoke tests
+ if (
+ "browsertime" in try_name
+ and "amazon" in try_name
+ and "search" not in try_name
+ and "fenix" in try_name
+ ):
+ return True
+
+ return [l for l, t in full_task_graph.tasks.items() if filter(t)]
+
+
+@_target_task("backfill_all_browsertime")
+def target_tasks_backfill_all_browsertime(full_task_graph, parameters, graph_config):
+ """
+ Search for revisions that contains patches that were reviewed by perftest reviewers
+ and landed the day before the cron is running. Trigger backfill-all-browsertime action
+ task on each of them.
+ """
+ from gecko_taskgraph.actions.util import get_decision_task_id, get_pushes
+
+ def date_is_yesterday(date):
+ yesterday = datetime.today() - timedelta(days=1)
+ date = datetime.fromtimestamp(date)
+ return date.date() == yesterday.date()
+
+ def reviewed_by_perftest(push):
+ try:
+ commit_message = get_hg_commit_message(
+ os.path.join(GECKO, graph_config["product-dir"]), rev=push
+ )
+ except Exception as e:
+ print(e)
+ return False
+
+ for line in commit_message.split("\n\n"):
+ if line.lower().startswith("bug ") and "r=" in line:
+ if "perftest-reviewers" in line.split("r=")[-1]:
+ print(line)
+ return True
+ return False
+
+ pushes = get_pushes(
+ project=parameters["head_repository"],
+ end_id=int(parameters["pushlog_id"]),
+ depth=200,
+ full_response=True,
+ )
+ for push_id in sorted([int(p) for p in pushes.keys()], reverse=True):
+ push_rev = pushes[str(push_id)]["changesets"][-1]
+ push_info = find_hg_revision_push_info(
+ "https://hg.mozilla.org/integration/" + parameters["project"], push_rev
+ )
+ pushdate = int(push_info["pushdate"])
+ if date_is_yesterday(pushdate) and reviewed_by_perftest(push_rev):
+ from gecko_taskgraph.actions.util import trigger_action
+
+ print(
+ f"Revision {push_rev} was created yesterday and was reviewed by "
+ f"#perftest-reviewers."
+ )
+ try:
+ push_decision_task_id = get_decision_task_id(
+ parameters["project"], push_id
+ )
+ except Exception:
+ print(f"Could not find decision task for push {push_id}")
+ continue
+ try:
+ trigger_action(
+ action_name="backfill-all-browsertime",
+ # This lets the action know on which push we want to add a new task
+ decision_task_id=push_decision_task_id,
+ )
+ except Exception as e:
+ print(f"Failed to trigger action for {push_rev}: {e}")
+
+ return []
+
+
+@_target_task("condprof")
+def target_tasks_condprof(full_task_graph, parameters, graph_config):
+ """
+ Select tasks required for building conditioned profiles.
+ """
+ for name, task in full_task_graph.tasks.items():
+ if task.kind == "condprof":
+ if "a51" not in name: # bug 1765348
+ yield name
+
+
+@_target_task("system_symbols")
+def target_tasks_system_symbols(full_task_graph, parameters, graph_config):
+ """
+ Select tasks for scraping and uploading system symbols.
+ """
+ for name, task in full_task_graph.tasks.items():
+ if task.kind in [
+ "system-symbols",
+ "system-symbols-upload",
+ "system-symbols-reprocess",
+ ]:
+ yield name
+
+
+@_target_task("perftest")
+def target_tasks_perftest(full_task_graph, parameters, graph_config):
+ """
+ Select perftest tasks we want to run daily
+ """
+ for name, task in full_task_graph.tasks.items():
+ if task.kind != "perftest":
+ continue
+ if task.attributes.get("cron", False):
+ yield name
+
+
+@_target_task("perftest-on-autoland")
+def target_tasks_perftest_autoland(full_task_graph, parameters, graph_config):
+ """
+ Select perftest tasks we want to run daily
+ """
+ for name, task in full_task_graph.tasks.items():
+ if task.kind != "perftest":
+ continue
+ if task.attributes.get("cron", False) and any(
+ test_name in name for test_name in ["view"]
+ ):
+ yield name
+
+
+@_target_task("l10n-cross-channel")
+def target_tasks_l10n_cross_channel(full_task_graph, parameters, graph_config):
+ """Select the set of tasks required to run l10n cross-channel."""
+
+ def filter(task):
+ return task.kind in ["l10n-cross-channel"]
+
+ return [l for l, t in full_task_graph.tasks.items() if filter(t)]
+
+
+@_target_task("are-we-esmified-yet")
+def target_tasks_are_we_esmified_yet(full_task_graph, parameters, graph_config):
+ """
+ select the task to track the progress of the esmification project
+ """
+ return [
+ l for l, t in full_task_graph.tasks.items() if t.kind == "are-we-esmified-yet"
+ ]
+
+
+@_target_task("eslint-build")
+def target_tasks_eslint_build(full_task_graph, parameters, graph_config):
+ """Select the task to run additional ESLint rules which require a build."""
+
+ for name, task in full_task_graph.tasks.items():
+ if task.kind != "source-test":
+ continue
+ if "eslint-build" in name:
+ yield name
+
+
+@_target_task("holly_tasks")
+def target_tasks_holly(full_task_graph, parameters, graph_config):
+ """Bug 1814661: only run updatebot tasks on holly"""
+
+ def filter(task):
+ return task.kind == "updatebot"
+
+ return [l for l, t in full_task_graph.tasks.items() if filter(t)]
+
+
+@_target_task("snap_upstream_tests")
+def target_tasks_snap_upstream_tests(full_task_graph, parameters, graph_config):
+ """
+ Select tasks for testing Snap package built as upstream. Omit -try because
+ it does not really make sense on a m-c cron
+ """
+ for name, task in full_task_graph.tasks.items():
+ if "snap-upstream-test" in name and not "-try" in name:
+ yield name
diff --git a/taskcluster/gecko_taskgraph/test/__init__.py b/taskcluster/gecko_taskgraph/test/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/test/__init__.py
diff --git a/taskcluster/gecko_taskgraph/test/automationrelevance.json b/taskcluster/gecko_taskgraph/test/automationrelevance.json
new file mode 100644
index 0000000000..3bdfa9ed9e
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/test/automationrelevance.json
@@ -0,0 +1,358 @@
+{
+ "changesets": [
+ {
+ "author": "James Long <longster@gmail.com>",
+ "backsoutnodes": [],
+ "bugs": [
+ {
+ "no": "1300866",
+ "url": "https://bugzilla.mozilla.org/show_bug.cgi?id=1300866"
+ }
+ ],
+ "date": [1473196655.0, 14400],
+ "desc": "Bug 1300866 - expose devtools require to new debugger r=jlast,bgrins",
+ "extra": {
+ "branch": "default"
+ },
+ "files": ["devtools/client/debugger/index.html"],
+ "node": "ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "parents": ["37c9349b4e8167a61b08b7e119c21ea177b98942"],
+ "perfherderurl": "https://treeherder.mozilla.org/perf.html#/compare?originalProject=mozilla-central&originalRevision=a14f88a9af7a59e677478694bafd9375ac53683e&newProject=mozilla-central&newRevision=ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "pushdate": [1473261248, 0],
+ "pushhead": "a14f88a9af7a59e677478694bafd9375ac53683e",
+ "pushid": 30664,
+ "pushnodes": [
+ "ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "73a6a267a50a0e1c41e689b265ad3eebe43d7ac6",
+ "16a1a91f9269ab95dd83eb29dc5d0227665f7d94",
+ "99c542fa43a72ee863c813b5624048d1b443549b",
+ "a6b6a93eb41a05e310a11f0172f01ba9b21d3eac",
+ "541c9086c0f27fba60beecc9bc94543103895c86",
+ "041a925171e431bf51fb50193ab19d156088c89a",
+ "a14f88a9af7a59e677478694bafd9375ac53683e"
+ ],
+ "pushuser": "cbook@mozilla.com",
+ "rev": 312890,
+ "reviewers": [
+ {
+ "name": "jlast",
+ "revset": "reviewer(jlast)"
+ },
+ {
+ "name": "bgrins",
+ "revset": "reviewer(bgrins)"
+ }
+ ],
+ "treeherderrepo": "mozilla-central",
+ "treeherderrepourl": "https://treeherder.mozilla.org/#/jobs?repo=mozilla-central"
+ },
+ {
+ "author": "Wes Kocher <wkocher@mozilla.com>",
+ "backsoutnodes": [],
+ "bugs": [],
+ "date": [1473208638.0, 25200],
+ "desc": "Merge m-c to fx-team, a=merge",
+ "extra": {
+ "branch": "default"
+ },
+ "files": ["taskcluster/scripts/builder/build-l10n.sh"],
+ "node": "73a6a267a50a0e1c41e689b265ad3eebe43d7ac6",
+ "parents": [
+ "ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "91c2b9d5c1354ca79e5b174591dbb03b32b15bbf"
+ ],
+ "perfherderurl": "https://treeherder.mozilla.org/perf.html#/compare?originalProject=mozilla-central&originalRevision=a14f88a9af7a59e677478694bafd9375ac53683e&newProject=mozilla-central&newRevision=ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "pushdate": [1473261248, 0],
+ "pushhead": "a14f88a9af7a59e677478694bafd9375ac53683e",
+ "pushid": 30664,
+ "pushnodes": [
+ "ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "73a6a267a50a0e1c41e689b265ad3eebe43d7ac6",
+ "16a1a91f9269ab95dd83eb29dc5d0227665f7d94",
+ "99c542fa43a72ee863c813b5624048d1b443549b",
+ "a6b6a93eb41a05e310a11f0172f01ba9b21d3eac",
+ "541c9086c0f27fba60beecc9bc94543103895c86",
+ "041a925171e431bf51fb50193ab19d156088c89a",
+ "a14f88a9af7a59e677478694bafd9375ac53683e"
+ ],
+ "pushuser": "cbook@mozilla.com",
+ "rev": 312891,
+ "reviewers": [
+ {
+ "name": "merge",
+ "revset": "reviewer(merge)"
+ }
+ ],
+ "treeherderrepo": "mozilla-central",
+ "treeherderrepourl": "https://treeherder.mozilla.org/#/jobs?repo=mozilla-central"
+ },
+ {
+ "author": "Towkir Ahmed <towkir17@gmail.com>",
+ "backsoutnodes": [],
+ "bugs": [
+ {
+ "no": "1296648",
+ "url": "https://bugzilla.mozilla.org/show_bug.cgi?id=1296648"
+ }
+ ],
+ "date": [1472957580.0, 14400],
+ "desc": "Bug 1296648 - Fix direction of .ruleview-expander.theme-twisty in RTL locales. r=ntim",
+ "extra": {
+ "branch": "default"
+ },
+ "files": ["devtools/client/themes/rules.css"],
+ "node": "16a1a91f9269ab95dd83eb29dc5d0227665f7d94",
+ "parents": ["73a6a267a50a0e1c41e689b265ad3eebe43d7ac6"],
+ "perfherderurl": "https://treeherder.mozilla.org/perf.html#/compare?originalProject=mozilla-central&originalRevision=a14f88a9af7a59e677478694bafd9375ac53683e&newProject=mozilla-central&newRevision=ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "pushdate": [1473261248, 0],
+ "pushhead": "a14f88a9af7a59e677478694bafd9375ac53683e",
+ "pushid": 30664,
+ "pushnodes": [
+ "ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "73a6a267a50a0e1c41e689b265ad3eebe43d7ac6",
+ "16a1a91f9269ab95dd83eb29dc5d0227665f7d94",
+ "99c542fa43a72ee863c813b5624048d1b443549b",
+ "a6b6a93eb41a05e310a11f0172f01ba9b21d3eac",
+ "541c9086c0f27fba60beecc9bc94543103895c86",
+ "041a925171e431bf51fb50193ab19d156088c89a",
+ "a14f88a9af7a59e677478694bafd9375ac53683e"
+ ],
+ "pushuser": "cbook@mozilla.com",
+ "rev": 312892,
+ "reviewers": [
+ {
+ "name": "ntim",
+ "revset": "reviewer(ntim)"
+ }
+ ],
+ "treeherderrepo": "mozilla-central",
+ "treeherderrepourl": "https://treeherder.mozilla.org/#/jobs?repo=mozilla-central"
+ },
+ {
+ "author": "Oriol <oriol-bugzilla@hotmail.com>",
+ "backsoutnodes": [],
+ "bugs": [
+ {
+ "no": "1300336",
+ "url": "https://bugzilla.mozilla.org/show_bug.cgi?id=1300336"
+ }
+ ],
+ "date": [1472921160.0, 14400],
+ "desc": "Bug 1300336 - Allow pseudo-arrays to have a length property. r=fitzgen",
+ "extra": {
+ "branch": "default"
+ },
+ "files": [
+ "devtools/client/webconsole/test/browser_webconsole_output_06.js",
+ "devtools/server/actors/object.js"
+ ],
+ "node": "99c542fa43a72ee863c813b5624048d1b443549b",
+ "parents": ["16a1a91f9269ab95dd83eb29dc5d0227665f7d94"],
+ "perfherderurl": "https://treeherder.mozilla.org/perf.html#/compare?originalProject=mozilla-central&originalRevision=a14f88a9af7a59e677478694bafd9375ac53683e&newProject=mozilla-central&newRevision=ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "pushdate": [1473261248, 0],
+ "pushhead": "a14f88a9af7a59e677478694bafd9375ac53683e",
+ "pushid": 30664,
+ "pushnodes": [
+ "ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "73a6a267a50a0e1c41e689b265ad3eebe43d7ac6",
+ "16a1a91f9269ab95dd83eb29dc5d0227665f7d94",
+ "99c542fa43a72ee863c813b5624048d1b443549b",
+ "a6b6a93eb41a05e310a11f0172f01ba9b21d3eac",
+ "541c9086c0f27fba60beecc9bc94543103895c86",
+ "041a925171e431bf51fb50193ab19d156088c89a",
+ "a14f88a9af7a59e677478694bafd9375ac53683e"
+ ],
+ "pushuser": "cbook@mozilla.com",
+ "rev": 312893,
+ "reviewers": [
+ {
+ "name": "fitzgen",
+ "revset": "reviewer(fitzgen)"
+ }
+ ],
+ "treeherderrepo": "mozilla-central",
+ "treeherderrepourl": "https://treeherder.mozilla.org/#/jobs?repo=mozilla-central"
+ },
+ {
+ "author": "Ruturaj Vartak <ruturaj@gmail.com>",
+ "backsoutnodes": [],
+ "bugs": [
+ {
+ "no": "1295010",
+ "url": "https://bugzilla.mozilla.org/show_bug.cgi?id=1295010"
+ }
+ ],
+ "date": [1472854020.0, -7200],
+ "desc": "Bug 1295010 - Don't move the eyedropper to the out of browser window by keyboard navigation. r=pbro\n\nMozReview-Commit-ID: vBwmSxVNXK",
+ "extra": {
+ "amend_source": "6885024ef00cfa33d73c59dc03c48ebcda9ccbdd",
+ "branch": "default",
+ "histedit_source": "c43167f0a7cbe9f4c733b15da726e5150a9529ba",
+ "rebase_source": "b74df421630fc46dab6b6cc026bf3e0ae6b4a651"
+ },
+ "files": [
+ "devtools/client/inspector/test/browser_inspector_highlighter-eyedropper-events.js",
+ "devtools/client/inspector/test/head.js",
+ "devtools/server/actors/highlighters/eye-dropper.js"
+ ],
+ "node": "a6b6a93eb41a05e310a11f0172f01ba9b21d3eac",
+ "parents": ["99c542fa43a72ee863c813b5624048d1b443549b"],
+ "perfherderurl": "https://treeherder.mozilla.org/perf.html#/compare?originalProject=mozilla-central&originalRevision=a14f88a9af7a59e677478694bafd9375ac53683e&newProject=mozilla-central&newRevision=ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "pushdate": [1473261248, 0],
+ "pushhead": "a14f88a9af7a59e677478694bafd9375ac53683e",
+ "pushid": 30664,
+ "pushnodes": [
+ "ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "73a6a267a50a0e1c41e689b265ad3eebe43d7ac6",
+ "16a1a91f9269ab95dd83eb29dc5d0227665f7d94",
+ "99c542fa43a72ee863c813b5624048d1b443549b",
+ "a6b6a93eb41a05e310a11f0172f01ba9b21d3eac",
+ "541c9086c0f27fba60beecc9bc94543103895c86",
+ "041a925171e431bf51fb50193ab19d156088c89a",
+ "a14f88a9af7a59e677478694bafd9375ac53683e"
+ ],
+ "pushuser": "cbook@mozilla.com",
+ "rev": 312894,
+ "reviewers": [
+ {
+ "name": "pbro",
+ "revset": "reviewer(pbro)"
+ }
+ ],
+ "treeherderrepo": "mozilla-central",
+ "treeherderrepourl": "https://treeherder.mozilla.org/#/jobs?repo=mozilla-central"
+ },
+ {
+ "author": "Matteo Ferretti <mferretti@mozilla.com>",
+ "backsoutnodes": [],
+ "bugs": [
+ {
+ "no": "1299154",
+ "url": "https://bugzilla.mozilla.org/show_bug.cgi?id=1299154"
+ }
+ ],
+ "date": [1472629906.0, -7200],
+ "desc": "Bug 1299154 - added Set/GetOverrideDPPX to restorefromHistory; r=mstange\n\nMozReview-Commit-ID: AsyAcG3Igbn\n",
+ "extra": {
+ "branch": "default",
+ "committer": "Matteo Ferretti <mferretti@mozilla.com> 1473236511 -7200"
+ },
+ "files": [
+ "docshell/base/nsDocShell.cpp",
+ "dom/tests/mochitest/general/test_contentViewer_overrideDPPX.html"
+ ],
+ "node": "541c9086c0f27fba60beecc9bc94543103895c86",
+ "parents": ["a6b6a93eb41a05e310a11f0172f01ba9b21d3eac"],
+ "perfherderurl": "https://treeherder.mozilla.org/perf.html#/compare?originalProject=mozilla-central&originalRevision=a14f88a9af7a59e677478694bafd9375ac53683e&newProject=mozilla-central&newRevision=ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "pushdate": [1473261248, 0],
+ "pushhead": "a14f88a9af7a59e677478694bafd9375ac53683e",
+ "pushid": 30664,
+ "pushnodes": [
+ "ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "73a6a267a50a0e1c41e689b265ad3eebe43d7ac6",
+ "16a1a91f9269ab95dd83eb29dc5d0227665f7d94",
+ "99c542fa43a72ee863c813b5624048d1b443549b",
+ "a6b6a93eb41a05e310a11f0172f01ba9b21d3eac",
+ "541c9086c0f27fba60beecc9bc94543103895c86",
+ "041a925171e431bf51fb50193ab19d156088c89a",
+ "a14f88a9af7a59e677478694bafd9375ac53683e"
+ ],
+ "pushuser": "cbook@mozilla.com",
+ "rev": 312895,
+ "reviewers": [
+ {
+ "name": "mstange",
+ "revset": "reviewer(mstange)"
+ }
+ ],
+ "treeherderrepo": "mozilla-central",
+ "treeherderrepourl": "https://treeherder.mozilla.org/#/jobs?repo=mozilla-central"
+ },
+ {
+ "author": "Patrick Brosset <pbrosset@mozilla.com>",
+ "backsoutnodes": [],
+ "bugs": [
+ {
+ "no": "1295010",
+ "url": "https://bugzilla.mozilla.org/show_bug.cgi?id=1295010"
+ }
+ ],
+ "date": [1473239449.0, -7200],
+ "desc": "Bug 1295010 - Removed testActor from highlighterHelper in inspector tests; r=me\n\nMozReview-Commit-ID: GMksl81iGcp",
+ "extra": {
+ "branch": "default"
+ },
+ "files": [
+ "devtools/client/inspector/test/browser_inspector_highlighter-eyedropper-events.js",
+ "devtools/client/inspector/test/head.js"
+ ],
+ "node": "041a925171e431bf51fb50193ab19d156088c89a",
+ "parents": ["541c9086c0f27fba60beecc9bc94543103895c86"],
+ "perfherderurl": "https://treeherder.mozilla.org/perf.html#/compare?originalProject=mozilla-central&originalRevision=a14f88a9af7a59e677478694bafd9375ac53683e&newProject=mozilla-central&newRevision=ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "pushdate": [1473261248, 0],
+ "pushhead": "a14f88a9af7a59e677478694bafd9375ac53683e",
+ "pushid": 30664,
+ "pushnodes": [
+ "ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "73a6a267a50a0e1c41e689b265ad3eebe43d7ac6",
+ "16a1a91f9269ab95dd83eb29dc5d0227665f7d94",
+ "99c542fa43a72ee863c813b5624048d1b443549b",
+ "a6b6a93eb41a05e310a11f0172f01ba9b21d3eac",
+ "541c9086c0f27fba60beecc9bc94543103895c86",
+ "041a925171e431bf51fb50193ab19d156088c89a",
+ "a14f88a9af7a59e677478694bafd9375ac53683e"
+ ],
+ "pushuser": "cbook@mozilla.com",
+ "rev": 312896,
+ "reviewers": [
+ {
+ "name": "me",
+ "revset": "reviewer(me)"
+ }
+ ],
+ "treeherderrepo": "mozilla-central",
+ "treeherderrepourl": "https://treeherder.mozilla.org/#/jobs?repo=mozilla-central"
+ },
+ {
+ "author": "Carsten \"Tomcat\" Book <cbook@mozilla.com>",
+ "backsoutnodes": [],
+ "bugs": [],
+ "date": [1473261233.0, -7200],
+ "desc": "merge fx-team to mozilla-central a=merge",
+ "extra": {
+ "branch": "default"
+ },
+ "files": [],
+ "node": "a14f88a9af7a59e677478694bafd9375ac53683e",
+ "parents": [
+ "3d0b41fdd93bd8233745eadb4e0358e385bf2cb9",
+ "041a925171e431bf51fb50193ab19d156088c89a"
+ ],
+ "perfherderurl": "https://treeherder.mozilla.org/perf.html#/compare?originalProject=mozilla-central&originalRevision=a14f88a9af7a59e677478694bafd9375ac53683e&newProject=mozilla-central&newRevision=ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "pushdate": [1473261248, 0],
+ "pushhead": "a14f88a9af7a59e677478694bafd9375ac53683e",
+ "pushid": 30664,
+ "pushnodes": [
+ "ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "73a6a267a50a0e1c41e689b265ad3eebe43d7ac6",
+ "16a1a91f9269ab95dd83eb29dc5d0227665f7d94",
+ "99c542fa43a72ee863c813b5624048d1b443549b",
+ "a6b6a93eb41a05e310a11f0172f01ba9b21d3eac",
+ "541c9086c0f27fba60beecc9bc94543103895c86",
+ "041a925171e431bf51fb50193ab19d156088c89a",
+ "a14f88a9af7a59e677478694bafd9375ac53683e"
+ ],
+ "pushuser": "cbook@mozilla.com",
+ "rev": 312897,
+ "reviewers": [
+ {
+ "name": "merge",
+ "revset": "reviewer(merge)"
+ }
+ ],
+ "treeherderrepo": "mozilla-central",
+ "treeherderrepourl": "https://treeherder.mozilla.org/#/jobs?repo=mozilla-central"
+ }
+ ],
+ "visible": true
+}
diff --git a/taskcluster/gecko_taskgraph/test/conftest.py b/taskcluster/gecko_taskgraph/test/conftest.py
new file mode 100644
index 0000000000..360c2da65e
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/test/conftest.py
@@ -0,0 +1,218 @@
+# Any copyright is dedicated to the public domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+import os
+
+import pytest
+from mach.logging import LoggingManager
+from responses import RequestsMock
+from taskgraph import generator as generator_mod
+from taskgraph import target_tasks as target_tasks_mod
+from taskgraph.config import GraphConfig, load_graph_config
+from taskgraph.generator import Kind, TaskGraphGenerator
+from taskgraph.optimize import base as optimize_mod
+from taskgraph.optimize.base import OptimizationStrategy
+from taskgraph.parameters import Parameters
+
+from gecko_taskgraph import GECKO
+from gecko_taskgraph.actions import render_actions_json
+from gecko_taskgraph.util.templates import merge
+
+
+@pytest.fixture
+def responses():
+ with RequestsMock() as rsps:
+ yield rsps
+
+
+@pytest.fixture(scope="session", autouse=True)
+def patch_prefherder(request):
+ from _pytest.monkeypatch import MonkeyPatch
+
+ m = MonkeyPatch()
+ m.setattr(
+ "gecko_taskgraph.util.bugbug._write_perfherder_data",
+ lambda lower_is_better: None,
+ )
+ yield
+ m.undo()
+
+
+@pytest.fixture(scope="session", autouse=True)
+def enable_logging():
+ """Ensure logs from gecko_taskgraph are displayed when a test fails."""
+ lm = LoggingManager()
+ lm.add_terminal_logging()
+
+
+@pytest.fixture(scope="session")
+def graph_config():
+ return load_graph_config(os.path.join(GECKO, "taskcluster", "ci"))
+
+
+@pytest.fixture(scope="session")
+def actions_json(graph_config):
+ decision_task_id = "abcdef"
+ return render_actions_json(Parameters(strict=False), graph_config, decision_task_id)
+
+
+def fake_loader(kind, path, config, parameters, loaded_tasks):
+ for i in range(3):
+ dependencies = {}
+ if i >= 1:
+ dependencies["prev"] = f"{kind}-t-{i - 1}"
+
+ task = {
+ "kind": kind,
+ "label": f"{kind}-t-{i}",
+ "description": f"{kind} task {i}",
+ "attributes": {"_tasknum": str(i)},
+ "task": {
+ "i": i,
+ "metadata": {"name": f"t-{i}"},
+ "deadline": "soon",
+ },
+ "dependencies": dependencies,
+ }
+ if "job-defaults" in config:
+ task = merge(config["job-defaults"], task)
+ yield task
+
+
+class FakeTransform:
+ transforms = []
+ params = {}
+
+ def __init__(self):
+ pass
+
+ @classmethod
+ def get(self, field, default):
+ try:
+ return getattr(self, field)
+ except AttributeError:
+ return default
+
+
+class FakeKind(Kind):
+ def _get_loader(self):
+ return fake_loader
+
+ def load_tasks(self, parameters, loaded_tasks, write_artifacts):
+ FakeKind.loaded_kinds.append(self.name)
+ return super().load_tasks(parameters, loaded_tasks, write_artifacts)
+
+ @staticmethod
+ def create(name, extra_config, graph_config):
+ if name == "fullfake":
+ config = FakeTransform()
+ else:
+ config = {"transforms": []}
+ if extra_config:
+ config.update(extra_config)
+ return FakeKind(name, "/fake", config, graph_config)
+
+
+class WithFakeKind(TaskGraphGenerator):
+ def _load_kinds(self, graph_config, target_kinds=None):
+ for kind_name, cfg in self.parameters["_kinds"]:
+ yield FakeKind.create(kind_name, cfg, graph_config)
+
+
+def fake_load_graph_config(root_dir):
+ graph_config = GraphConfig(
+ {"trust-domain": "test-domain", "taskgraph": {}}, root_dir
+ )
+ graph_config.__dict__["register"] = lambda: None
+ return graph_config
+
+
+class FakeParameters(dict):
+ strict = True
+
+ def file_url(self, path, pretty=False):
+ return ""
+
+
+class FakeOptimization(OptimizationStrategy):
+ description = "Fake strategy for testing"
+
+ def __init__(self, mode, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.mode = mode
+
+ def should_remove_task(self, task, params, arg):
+ if self.mode == "always":
+ return True
+ if self.mode == "even":
+ return task.task["i"] % 2 == 0
+ if self.mode == "odd":
+ return task.task["i"] % 2 != 0
+ return False
+
+
+@pytest.fixture
+def maketgg(monkeypatch):
+ def inner(target_tasks=None, kinds=[("_fake", [])], params=None):
+ params = params or {}
+ FakeKind.loaded_kinds = loaded_kinds = []
+ target_tasks = target_tasks or []
+
+ def target_tasks_method(full_task_graph, parameters, graph_config):
+ return target_tasks
+
+ fake_registry = {
+ mode: FakeOptimization(mode) for mode in ("always", "never", "even", "odd")
+ }
+
+ target_tasks_mod._target_task_methods["test_method"] = target_tasks_method
+ monkeypatch.setattr(optimize_mod, "registry", fake_registry)
+
+ parameters = FakeParameters(
+ {
+ "_kinds": kinds,
+ "backstop": False,
+ "enable_always_target": False,
+ "target_tasks_method": "test_method",
+ "test_manifest_loader": "default",
+ "try_mode": None,
+ "try_task_config": {},
+ "tasks_for": "hg-push",
+ "project": "mozilla-central",
+ }
+ )
+ parameters.update(params)
+
+ monkeypatch.setattr(generator_mod, "load_graph_config", fake_load_graph_config)
+
+ tgg = WithFakeKind("/root", parameters)
+ tgg.loaded_kinds = loaded_kinds
+ return tgg
+
+ return inner
+
+
+@pytest.fixture
+def run_transform():
+ graph_config = fake_load_graph_config("/root")
+ kind = FakeKind.create("fake", {}, graph_config)
+
+ def inner(xform, tasks):
+ if isinstance(tasks, dict):
+ tasks = [tasks]
+ return xform(kind.config, tasks)
+
+ return inner
+
+
+@pytest.fixture
+def run_full_config_transform():
+ graph_config = fake_load_graph_config("/root")
+ kind = FakeKind.create("fullfake", {}, graph_config)
+
+ def inner(xform, tasks):
+ if isinstance(tasks, dict):
+ tasks = [tasks]
+ return xform(kind.config, tasks)
+
+ return inner
diff --git a/taskcluster/gecko_taskgraph/test/docs/kinds.rst b/taskcluster/gecko_taskgraph/test/docs/kinds.rst
new file mode 100644
index 0000000000..fdc16db1e3
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/test/docs/kinds.rst
@@ -0,0 +1,12 @@
+Task Kinds
+==========
+
+Fake task kind documentation.
+
+newkind
+----------
+Kind found in separate doc dir,
+
+anotherkind
+-----------
+Here's another.
diff --git a/taskcluster/gecko_taskgraph/test/docs/parameters.rst b/taskcluster/gecko_taskgraph/test/docs/parameters.rst
new file mode 100644
index 0000000000..f943f48e69
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/test/docs/parameters.rst
@@ -0,0 +1,14 @@
+==========
+Parameters
+==========
+
+Fake parameters documentation.
+
+Heading
+-------
+
+``newparameter``
+ A new parameter that could be defined in a project.
+
+``anotherparameter``
+ And here is another one.
diff --git a/taskcluster/gecko_taskgraph/test/python.toml b/taskcluster/gecko_taskgraph/test/python.toml
new file mode 100644
index 0000000000..597a02d8aa
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/test/python.toml
@@ -0,0 +1,42 @@
+[DEFAULT]
+subsuite = "taskgraph"
+
+["test_actions_util.py"]
+
+["test_decision.py"]
+
+["test_files_changed.py"]
+
+["test_main.py"]
+
+["test_morph.py"]
+
+["test_optimize_strategies.py"]
+
+["test_target_tasks.py"]
+
+["test_taskcluster_yml.py"]
+
+["test_transforms_job.py"]
+
+["test_transforms_test.py"]
+
+["test_try_option_syntax.py"]
+
+["test_util_attributes.py"]
+
+["test_util_backstop.py"]
+
+["test_util_bugbug.py"]
+
+["test_util_chunking.py"]
+
+["test_util_docker.py"]
+
+["test_util_partials.py"]
+
+["test_util_runnable_jobs.py"]
+
+["test_util_templates.py"]
+
+["test_util_verify.py"]
diff --git a/taskcluster/gecko_taskgraph/test/test_actions_util.py b/taskcluster/gecko_taskgraph/test/test_actions_util.py
new file mode 100644
index 0000000000..7c38caea57
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/test/test_actions_util.py
@@ -0,0 +1,179 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import json
+import unittest
+from pprint import pprint
+from unittest.mock import patch
+
+import pytest
+from mozunit import MockedOpen, main
+from taskgraph import create
+from taskgraph.util import taskcluster
+
+from gecko_taskgraph import actions
+from gecko_taskgraph.actions.util import combine_task_graph_files, relativize_datestamps
+from gecko_taskgraph.decision import read_artifact
+
+TASK_DEF = {
+ "created": "2017-10-10T18:33:03.460Z",
+ # note that this is not an even number of seconds off!
+ "deadline": "2017-10-11T18:33:03.461Z",
+ "dependencies": [],
+ "expires": "2018-10-10T18:33:04.461Z",
+ "payload": {
+ "artifacts": {
+ "public": {
+ "expires": "2018-10-10T18:33:03.463Z",
+ "path": "/builds/worker/artifacts",
+ "type": "directory",
+ },
+ },
+ "maxRunTime": 1800,
+ },
+}
+
+
+@pytest.fixture(scope="module", autouse=True)
+def enable_test_mode():
+ create.testing = True
+ taskcluster.testing = True
+
+
+class TestRelativize(unittest.TestCase):
+ def test_relativize(self):
+ rel = relativize_datestamps(TASK_DEF)
+ import pprint
+
+ pprint.pprint(rel)
+ assert rel["created"] == {"relative-datestamp": "0 seconds"}
+ assert rel["deadline"] == {"relative-datestamp": "86400 seconds"}
+ assert rel["expires"] == {"relative-datestamp": "31536001 seconds"}
+ assert rel["payload"]["artifacts"]["public"]["expires"] == {
+ "relative-datestamp": "31536000 seconds"
+ }
+
+
+class TestCombineTaskGraphFiles(unittest.TestCase):
+ def test_no_suffixes(self):
+ with MockedOpen({}):
+ combine_task_graph_files([])
+ self.assertRaises(Exception, open("artifacts/to-run.json"))
+
+ @patch("gecko_taskgraph.actions.util.rename_artifact")
+ def test_one_suffix(self, rename_artifact):
+ combine_task_graph_files(["0"])
+ rename_artifact.assert_any_call("task-graph-0.json", "task-graph.json")
+ rename_artifact.assert_any_call(
+ "label-to-taskid-0.json", "label-to-taskid.json"
+ )
+ rename_artifact.assert_any_call("to-run-0.json", "to-run.json")
+
+ def test_several_suffixes(self):
+ files = {
+ "artifacts/task-graph-0.json": json.dumps({"taska": {}}),
+ "artifacts/label-to-taskid-0.json": json.dumps({"taska": "TASKA"}),
+ "artifacts/to-run-0.json": json.dumps(["taska"]),
+ "artifacts/task-graph-1.json": json.dumps({"taskb": {}}),
+ "artifacts/label-to-taskid-1.json": json.dumps({"taskb": "TASKB"}),
+ "artifacts/to-run-1.json": json.dumps(["taskb"]),
+ }
+ with MockedOpen(files):
+ combine_task_graph_files(["0", "1"])
+ self.assertEqual(
+ read_artifact("task-graph.json"),
+ {
+ "taska": {},
+ "taskb": {},
+ },
+ )
+ self.assertEqual(
+ read_artifact("label-to-taskid.json"),
+ {
+ "taska": "TASKA",
+ "taskb": "TASKB",
+ },
+ )
+ self.assertEqual(
+ sorted(read_artifact("to-run.json")),
+ [
+ "taska",
+ "taskb",
+ ],
+ )
+
+
+def is_subset(subset, superset):
+ if isinstance(subset, dict):
+ return all(
+ key in superset and is_subset(val, superset[key])
+ for key, val in subset.items()
+ )
+
+ if isinstance(subset, list) or isinstance(subset, set):
+ return all(
+ any(is_subset(subitem, superitem) for superitem in superset)
+ for subitem in subset
+ )
+
+ if isinstance(subset, str):
+ return subset in superset
+
+ # assume that subset is a plain value if none of the above match
+ return subset == superset
+
+
+@pytest.mark.parametrize(
+ "task_def,expected",
+ [
+ pytest.param(
+ {"tags": {"kind": "decision-task"}},
+ {
+ "hookPayload": {
+ "decision": {
+ "action": {"cb_name": "retrigger-decision"},
+ },
+ },
+ },
+ id="retrigger_decision",
+ ),
+ pytest.param(
+ {"tags": {"action": "backfill-task"}},
+ {
+ "hookPayload": {
+ "decision": {
+ "action": {"cb_name": "retrigger-decision"},
+ },
+ },
+ },
+ id="retrigger_backfill",
+ ),
+ ],
+)
+def test_extract_applicable_action(
+ responses, monkeypatch, actions_json, task_def, expected
+):
+ base_url = "https://taskcluster"
+ decision_task_id = "dddd"
+ task_id = "tttt"
+
+ monkeypatch.setenv("TASK_ID", task_id)
+ monkeypatch.setenv("TASKCLUSTER_ROOT_URL", base_url)
+ monkeypatch.setenv("TASKCLUSTER_PROXY_URL", base_url)
+ responses.add(
+ responses.GET,
+ f"{base_url}/api/queue/v1/task/{task_id}",
+ status=200,
+ json=task_def,
+ )
+ action = actions.util._extract_applicable_action(
+ actions_json, "retrigger", decision_task_id, task_id
+ )
+ pprint(action, indent=2)
+ assert is_subset(expected, action)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/taskcluster/gecko_taskgraph/test/test_decision.py b/taskcluster/gecko_taskgraph/test/test_decision.py
new file mode 100644
index 0000000000..8440b8e13f
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/test/test_decision.py
@@ -0,0 +1,175 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import json
+import os
+import shutil
+import tempfile
+import unittest
+from unittest.mock import patch
+
+import pytest
+from mozunit import MockedOpen, main
+from taskgraph.util.yaml import load_yaml
+
+from gecko_taskgraph import decision
+from gecko_taskgraph.parameters import register_parameters
+
+FAKE_GRAPH_CONFIG = {"product-dir": "browser", "taskgraph": {}}
+
+
+@pytest.fixture(scope="module", autouse=True)
+def register():
+ register_parameters()
+
+
+class TestDecision(unittest.TestCase):
+ def test_write_artifact_json(self):
+ data = [{"some": "data"}]
+ tmpdir = tempfile.mkdtemp()
+ try:
+ decision.ARTIFACTS_DIR = os.path.join(tmpdir, "artifacts")
+ decision.write_artifact("artifact.json", data)
+ with open(os.path.join(decision.ARTIFACTS_DIR, "artifact.json")) as f:
+ self.assertEqual(json.load(f), data)
+ finally:
+ if os.path.exists(tmpdir):
+ shutil.rmtree(tmpdir)
+ decision.ARTIFACTS_DIR = "artifacts"
+
+ def test_write_artifact_yml(self):
+ data = [{"some": "data"}]
+ tmpdir = tempfile.mkdtemp()
+ try:
+ decision.ARTIFACTS_DIR = os.path.join(tmpdir, "artifacts")
+ decision.write_artifact("artifact.yml", data)
+ self.assertEqual(load_yaml(decision.ARTIFACTS_DIR, "artifact.yml"), data)
+ finally:
+ if os.path.exists(tmpdir):
+ shutil.rmtree(tmpdir)
+ decision.ARTIFACTS_DIR = "artifacts"
+
+
+class TestGetDecisionParameters(unittest.TestCase):
+ ttc_file = os.path.join(os.getcwd(), "try_task_config.json")
+
+ def setUp(self):
+ self.options = {
+ "base_repository": "https://hg.mozilla.org/mozilla-unified",
+ "head_repository": "https://hg.mozilla.org/mozilla-central",
+ "head_rev": "abcd",
+ "head_ref": "ef01",
+ "head_tag": "",
+ "message": "",
+ "project": "mozilla-central",
+ "pushlog_id": "143",
+ "pushdate": 1503691511,
+ "owner": "nobody@mozilla.com",
+ "repository_type": "hg",
+ "tasks_for": "hg-push",
+ "level": "3",
+ }
+
+ @patch("gecko_taskgraph.decision.get_hg_revision_branch")
+ @patch("gecko_taskgraph.decision._determine_more_accurate_base_rev")
+ def test_simple_options(
+ self, mock_determine_more_accurate_base_rev, mock_get_hg_revision_branch
+ ):
+ mock_get_hg_revision_branch.return_value = "default"
+ mock_determine_more_accurate_base_rev.return_value = "baserev"
+ with MockedOpen({self.ttc_file: None}):
+ params = decision.get_decision_parameters(FAKE_GRAPH_CONFIG, self.options)
+ self.assertEqual(params["pushlog_id"], "143")
+ self.assertEqual(params["build_date"], 1503691511)
+ self.assertEqual(params["hg_branch"], "default")
+ self.assertEqual(params["moz_build_date"], "20170825200511")
+ self.assertEqual(params["try_mode"], None)
+ self.assertEqual(params["try_options"], None)
+ self.assertEqual(params["try_task_config"], {})
+
+ @patch("gecko_taskgraph.decision.get_hg_revision_branch")
+ @patch("gecko_taskgraph.decision._determine_more_accurate_base_rev")
+ def test_no_email_owner(
+ self, mock_determine_more_accurate_base_rev, mock_get_hg_revision_branch
+ ):
+ mock_get_hg_revision_branch.return_value = "default"
+ mock_determine_more_accurate_base_rev.return_value = "baserev"
+ self.options["owner"] = "ffxbld"
+ with MockedOpen({self.ttc_file: None}):
+ params = decision.get_decision_parameters(FAKE_GRAPH_CONFIG, self.options)
+ self.assertEqual(params["owner"], "ffxbld@noreply.mozilla.org")
+
+ @patch("gecko_taskgraph.decision.get_hg_revision_branch")
+ @patch("gecko_taskgraph.decision.get_hg_commit_message")
+ @patch("gecko_taskgraph.decision._determine_more_accurate_base_rev")
+ def test_try_options(
+ self,
+ mock_determine_more_accurate_base_rev,
+ mock_get_hg_commit_message,
+ mock_get_hg_revision_branch,
+ ):
+ mock_get_hg_commit_message.return_value = "try: -b do -t all --artifact"
+ mock_get_hg_revision_branch.return_value = "default"
+ mock_determine_more_accurate_base_rev.return_value = "baserev"
+ self.options["project"] = "try"
+ with MockedOpen({self.ttc_file: None}):
+ params = decision.get_decision_parameters(FAKE_GRAPH_CONFIG, self.options)
+ self.assertEqual(params["try_mode"], "try_option_syntax")
+ self.assertEqual(params["try_options"]["build_types"], "do")
+ self.assertEqual(params["try_options"]["unittests"], "all")
+ self.assertEqual(
+ params["try_task_config"],
+ {
+ "gecko-profile": False,
+ "use-artifact-builds": True,
+ "env": {},
+ },
+ )
+
+ @patch("gecko_taskgraph.decision.get_hg_revision_branch")
+ @patch("gecko_taskgraph.decision.get_hg_commit_message")
+ @patch("gecko_taskgraph.decision._determine_more_accurate_base_rev")
+ def test_try_task_config(
+ self,
+ mock_get_hg_commit_message,
+ mock_get_hg_revision_branch,
+ mock_determine_more_accurate_base_rev,
+ ):
+ mock_get_hg_commit_message.return_value = "Fuzzy query=foo"
+ mock_get_hg_revision_branch.return_value = "default"
+ mock_determine_more_accurate_base_rev.return_value = "baserev"
+ ttc = {"tasks": ["a", "b"]}
+ self.options["project"] = "try"
+ with MockedOpen({self.ttc_file: json.dumps(ttc)}):
+ params = decision.get_decision_parameters(FAKE_GRAPH_CONFIG, self.options)
+ self.assertEqual(params["try_mode"], "try_task_config")
+ self.assertEqual(params["try_options"], None)
+ self.assertEqual(params["try_task_config"], ttc)
+
+ def test_try_syntax_from_message_empty(self):
+ self.assertEqual(decision.try_syntax_from_message(""), "")
+
+ def test_try_syntax_from_message_no_try_syntax(self):
+ self.assertEqual(decision.try_syntax_from_message("abc | def"), "")
+
+ def test_try_syntax_from_message_initial_try_syntax(self):
+ self.assertEqual(
+ decision.try_syntax_from_message("try: -f -o -o"), "try: -f -o -o"
+ )
+
+ def test_try_syntax_from_message_initial_try_syntax_multiline(self):
+ self.assertEqual(
+ decision.try_syntax_from_message("try: -f -o -o\nabc\ndef"), "try: -f -o -o"
+ )
+
+ def test_try_syntax_from_message_embedded_try_syntax_multiline(self):
+ self.assertEqual(
+ decision.try_syntax_from_message("some stuff\ntry: -f -o -o\nabc\ndef"),
+ "try: -f -o -o",
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/taskcluster/gecko_taskgraph/test/test_files_changed.py b/taskcluster/gecko_taskgraph/test/test_files_changed.py
new file mode 100644
index 0000000000..5b9a016649
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/test/test_files_changed.py
@@ -0,0 +1,90 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import json
+import os
+import unittest
+
+from mozunit import main
+
+from gecko_taskgraph import files_changed
+from gecko_taskgraph.util import hg
+
+PARAMS = {
+ "head_repository": "https://hg.mozilla.org/mozilla-central",
+ "head_rev": "a14f88a9af7a",
+}
+
+FILES_CHANGED = [
+ "devtools/client/debugger/index.html",
+ "devtools/client/inspector/test/browser_inspector_highlighter-eyedropper-events.js",
+ "devtools/client/inspector/test/head.js",
+ "devtools/client/themes/rules.css",
+ "devtools/client/webconsole/test/browser_webconsole_output_06.js",
+ "devtools/server/actors/highlighters/eye-dropper.js",
+ "devtools/server/actors/object.js",
+ "docshell/base/nsDocShell.cpp",
+ "dom/tests/mochitest/general/test_contentViewer_overrideDPPX.html",
+ "taskcluster/scripts/builder/build-l10n.sh",
+]
+
+
+class FakeResponse:
+ def json(self):
+ with open(
+ os.path.join(os.path.dirname(__file__), "automationrelevance.json")
+ ) as f:
+ return json.load(f)
+
+
+class TestGetChangedFiles(unittest.TestCase):
+ def setUp(self):
+ files_changed.get_changed_files.clear()
+ self.old_get = hg.requests.get
+
+ def fake_get(url, **kwargs):
+ return FakeResponse()
+
+ hg.requests.get = fake_get
+
+ def tearDown(self):
+ hg.requests.get = self.old_get
+ files_changed.get_changed_files.clear()
+
+ def test_get_changed_files(self):
+ """Get_changed_files correctly gets the list of changed files in a push.
+ This tests against the production hg.mozilla.org so that it will detect
+ any changes in the format of the returned data."""
+ self.assertEqual(
+ sorted(
+ files_changed.get_changed_files(
+ PARAMS["head_repository"], PARAMS["head_rev"]
+ )
+ ),
+ FILES_CHANGED,
+ )
+
+
+class TestCheck(unittest.TestCase):
+ def setUp(self):
+ files_changed.get_changed_files[
+ PARAMS["head_repository"], PARAMS["head_rev"]
+ ] = FILES_CHANGED
+
+ def tearDown(self):
+ files_changed.get_changed_files.clear()
+
+ def test_check_no_params(self):
+ self.assertTrue(files_changed.check({}, ["ignored"]))
+
+ def test_check_no_match(self):
+ self.assertFalse(files_changed.check(PARAMS, ["nosuch/**"]))
+
+ def test_check_match(self):
+ self.assertTrue(files_changed.check(PARAMS, ["devtools/**"]))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/taskcluster/gecko_taskgraph/test/test_main.py b/taskcluster/gecko_taskgraph/test/test_main.py
new file mode 100644
index 0000000000..bb1aa1caeb
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/test/test_main.py
@@ -0,0 +1,67 @@
+# Any copyright is dedicated to the public domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+import pytest
+from mozunit import main as mozunit_main
+
+import gecko_taskgraph
+from gecko_taskgraph.main import main as taskgraph_main
+
+
+@pytest.fixture
+def run_main(maketgg, monkeypatch):
+ def inner(args, **kwargs):
+ kwargs.setdefault("target_tasks", ["_fake-t-0", "_fake-t-1"])
+ tgg = maketgg(**kwargs)
+
+ def fake_get_taskgraph_generator(*args):
+ return tgg
+
+ monkeypatch.setattr(
+ gecko_taskgraph.main,
+ "get_taskgraph_generator",
+ fake_get_taskgraph_generator,
+ )
+ taskgraph_main(args)
+ return tgg
+
+ return inner
+
+
+@pytest.mark.parametrize(
+ "attr,expected",
+ (
+ ("tasks", ["_fake-t-0", "_fake-t-1", "_fake-t-2"]),
+ ("full", ["_fake-t-0", "_fake-t-1", "_fake-t-2"]),
+ ("target", ["_fake-t-0", "_fake-t-1"]),
+ ("target-graph", ["_fake-t-0", "_fake-t-1"]),
+ ("optimized", ["_fake-t-0", "_fake-t-1"]),
+ ("morphed", ["_fake-t-0", "_fake-t-1"]),
+ ),
+)
+def test_show_taskgraph(run_main, capsys, attr, expected):
+ run_main([attr])
+ out, err = capsys.readouterr()
+ assert out.strip() == "\n".join(expected)
+ assert "Dumping result" in err
+
+
+def test_tasks_regex(run_main, capsys):
+ run_main(["full", "--tasks=_.*-t-1"])
+ out, _ = capsys.readouterr()
+ assert out.strip() == "_fake-t-1"
+
+
+def test_output_file(run_main, tmpdir):
+ output_file = tmpdir.join("out.txt")
+ assert not output_file.check()
+
+ run_main(["full", f"--output-file={output_file.strpath}"])
+ assert output_file.check()
+ assert output_file.read_text("utf-8").strip() == "\n".join(
+ ["_fake-t-0", "_fake-t-1", "_fake-t-2"]
+ )
+
+
+if __name__ == "__main__":
+ mozunit_main()
diff --git a/taskcluster/gecko_taskgraph/test/test_morph.py b/taskcluster/gecko_taskgraph/test/test_morph.py
new file mode 100644
index 0000000000..c29fb58207
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/test/test_morph.py
@@ -0,0 +1,108 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import pytest
+from mozunit import main
+from taskgraph.graph import Graph
+from taskgraph.parameters import Parameters
+from taskgraph.task import Task
+from taskgraph.taskgraph import TaskGraph
+
+from gecko_taskgraph import morph
+
+
+@pytest.fixture
+def make_taskgraph():
+ def inner(tasks):
+ label_to_taskid = {k: k + "-tid" for k in tasks}
+ for label, task_id in label_to_taskid.items():
+ tasks[label].task_id = task_id
+ graph = Graph(nodes=set(tasks), edges=set())
+ taskgraph = TaskGraph(tasks, graph)
+ return taskgraph, label_to_taskid
+
+ return inner
+
+
+def test_make_index_tasks(make_taskgraph, graph_config):
+ task_def = {
+ "routes": [
+ "index.gecko.v2.mozilla-central.latest.firefox-l10n.linux64-opt.es-MX",
+ "index.gecko.v2.mozilla-central.latest.firefox-l10n.linux64-opt.fy-NL",
+ "index.gecko.v2.mozilla-central.latest.firefox-l10n.linux64-opt.sk",
+ "index.gecko.v2.mozilla-central.latest.firefox-l10n.linux64-opt.sl",
+ "index.gecko.v2.mozilla-central.latest.firefox-l10n.linux64-opt.uk",
+ "index.gecko.v2.mozilla-central.latest.firefox-l10n.linux64-opt.zh-CN",
+ "index.gecko.v2.mozilla-central.pushdate."
+ "2017.04.04.20170404100210.firefox-l10n.linux64-opt.es-MX",
+ "index.gecko.v2.mozilla-central.pushdate."
+ "2017.04.04.20170404100210.firefox-l10n.linux64-opt.fy-NL",
+ "index.gecko.v2.mozilla-central.pushdate."
+ "2017.04.04.20170404100210.firefox-l10n.linux64-opt.sk",
+ "index.gecko.v2.mozilla-central.pushdate."
+ "2017.04.04.20170404100210.firefox-l10n.linux64-opt.sl",
+ "index.gecko.v2.mozilla-central.pushdate."
+ "2017.04.04.20170404100210.firefox-l10n.linux64-opt.uk",
+ "index.gecko.v2.mozilla-central.pushdate."
+ "2017.04.04.20170404100210.firefox-l10n.linux64-opt.zh-CN",
+ "index.gecko.v2.mozilla-central.revision."
+ "b5d8b27a753725c1de41ffae2e338798f3b5cacd.firefox-l10n.linux64-opt.es-MX",
+ "index.gecko.v2.mozilla-central.revision."
+ "b5d8b27a753725c1de41ffae2e338798f3b5cacd.firefox-l10n.linux64-opt.fy-NL",
+ "index.gecko.v2.mozilla-central.revision."
+ "b5d8b27a753725c1de41ffae2e338798f3b5cacd.firefox-l10n.linux64-opt.sk",
+ "index.gecko.v2.mozilla-central.revision."
+ "b5d8b27a753725c1de41ffae2e338798f3b5cacd.firefox-l10n.linux64-opt.sl",
+ "index.gecko.v2.mozilla-central.revision."
+ "b5d8b27a753725c1de41ffae2e338798f3b5cacd.firefox-l10n.linux64-opt.uk",
+ "index.gecko.v2.mozilla-central.revision."
+ "b5d8b27a753725c1de41ffae2e338798f3b5cacd.firefox-l10n.linux64-opt.zh-CN",
+ ],
+ "deadline": "soon",
+ "metadata": {
+ "description": "desc",
+ "owner": "owner@foo.com",
+ "source": "https://source",
+ },
+ "extra": {
+ "index": {"rank": 1540722354},
+ },
+ }
+ task = Task(kind="test", label="a", attributes={}, task=task_def)
+ docker_task = Task(
+ kind="docker-image", label="docker-image-index-task", attributes={}, task={}
+ )
+ taskgraph, label_to_taskid = make_taskgraph(
+ {
+ task.label: task,
+ docker_task.label: docker_task,
+ }
+ )
+
+ index_paths = [
+ r.split(".", 1)[1] for r in task_def["routes"] if r.startswith("index.")
+ ]
+ index_task = morph.make_index_task(
+ task,
+ taskgraph,
+ label_to_taskid,
+ Parameters(strict=False),
+ graph_config,
+ index_paths=index_paths,
+ index_rank=1540722354,
+ purpose="index-task",
+ dependencies={},
+ )
+
+ assert index_task.task["payload"]["command"][0] == "insert-indexes.js"
+ assert index_task.task["payload"]["env"]["TARGET_TASKID"] == "a-tid"
+ assert index_task.task["payload"]["env"]["INDEX_RANK"] == 1540722354
+
+ # check the scope summary
+ assert index_task.task["scopes"] == ["index:insert-task:gecko.v2.mozilla-central.*"]
+
+
+if __name__ == "__main__":
+ main()
diff --git a/taskcluster/gecko_taskgraph/test/test_optimize_strategies.py b/taskcluster/gecko_taskgraph/test/test_optimize_strategies.py
new file mode 100644
index 0000000000..1240d71cf8
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/test/test_optimize_strategies.py
@@ -0,0 +1,515 @@
+# Any copyright is dedicated to the public domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+import time
+from datetime import datetime
+from time import mktime
+
+import pytest
+from mozunit import main
+from taskgraph.optimize.base import registry
+from taskgraph.task import Task
+
+from gecko_taskgraph.optimize import project
+from gecko_taskgraph.optimize.backstop import SkipUnlessBackstop, SkipUnlessPushInterval
+from gecko_taskgraph.optimize.bugbug import (
+ FALLBACK,
+ BugBugPushSchedules,
+ DisperseGroups,
+ SkipUnlessDebug,
+)
+from gecko_taskgraph.optimize.strategies import SkipUnlessSchedules
+from gecko_taskgraph.util.backstop import BACKSTOP_PUSH_INTERVAL
+from gecko_taskgraph.util.bugbug import (
+ BUGBUG_BASE_URL,
+ BugbugTimeoutException,
+ push_schedules,
+)
+
+
+@pytest.fixture(autouse=True)
+def clear_push_schedules_memoize():
+ push_schedules.clear()
+
+
+@pytest.fixture
+def params():
+ return {
+ "branch": "autoland",
+ "head_repository": "https://hg.mozilla.org/integration/autoland",
+ "head_rev": "abcdef",
+ "project": "autoland",
+ "pushlog_id": 1,
+ "pushdate": mktime(datetime.now().timetuple()),
+ }
+
+
+def generate_tasks(*tasks):
+ for i, task in enumerate(tasks):
+ task.setdefault("label", f"task-{i}-label")
+ task.setdefault("kind", "test")
+ task.setdefault("task", {})
+ task.setdefault("attributes", {})
+ task["attributes"].setdefault("e10s", True)
+
+ for attr in (
+ "optimization",
+ "dependencies",
+ "soft_dependencies",
+ ):
+ task.setdefault(attr, None)
+
+ task["task"].setdefault("label", task["label"])
+ yield Task.from_json(task)
+
+
+# task sets
+
+default_tasks = list(
+ generate_tasks(
+ {"attributes": {"test_manifests": ["foo/test.ini", "bar/test.ini"]}},
+ {"attributes": {"test_manifests": ["bar/test.ini"], "build_type": "debug"}},
+ {"attributes": {"build_type": "debug"}},
+ {"attributes": {"test_manifests": [], "build_type": "opt"}},
+ {"attributes": {"build_type": "opt"}},
+ )
+)
+
+
+disperse_tasks = list(
+ generate_tasks(
+ {
+ "attributes": {
+ "test_manifests": ["foo/test.ini", "bar/test.ini"],
+ "test_platform": "linux/opt",
+ }
+ },
+ {
+ "attributes": {
+ "test_manifests": ["bar/test.ini"],
+ "test_platform": "linux/opt",
+ }
+ },
+ {
+ "attributes": {
+ "test_manifests": ["bar/test.ini"],
+ "test_platform": "windows/debug",
+ }
+ },
+ {
+ "attributes": {
+ "test_manifests": ["bar/test.ini"],
+ "test_platform": "linux/opt",
+ "unittest_variant": "no-fission",
+ }
+ },
+ {
+ "attributes": {
+ "e10s": False,
+ "test_manifests": ["bar/test.ini"],
+ "test_platform": "linux/opt",
+ }
+ },
+ )
+)
+
+
+def idfn(param):
+ if isinstance(param, tuple):
+ try:
+ return param[0].__name__
+ except AttributeError:
+ return None
+ return None
+
+
+@pytest.mark.parametrize(
+ "opt,tasks,arg,expected",
+ [
+ # debug
+ pytest.param(
+ SkipUnlessDebug(),
+ default_tasks,
+ None,
+ ["task-0-label", "task-1-label", "task-2-label"],
+ ),
+ # disperse with no supplied importance
+ pytest.param(
+ DisperseGroups(),
+ disperse_tasks,
+ None,
+ [t.label for t in disperse_tasks],
+ ),
+ # disperse with low importance
+ pytest.param(
+ DisperseGroups(),
+ disperse_tasks,
+ {"bar/test.ini": "low"},
+ ["task-0-label", "task-2-label"],
+ ),
+ # disperse with medium importance
+ pytest.param(
+ DisperseGroups(),
+ disperse_tasks,
+ {"bar/test.ini": "medium"},
+ ["task-0-label", "task-1-label", "task-2-label"],
+ ),
+ # disperse with high importance
+ pytest.param(
+ DisperseGroups(),
+ disperse_tasks,
+ {"bar/test.ini": "high"},
+ ["task-0-label", "task-1-label", "task-2-label", "task-3-label"],
+ ),
+ ],
+ ids=idfn,
+)
+def test_optimization_strategy_remove(params, opt, tasks, arg, expected):
+ labels = [t.label for t in tasks if not opt.should_remove_task(t, params, arg)]
+ assert sorted(labels) == sorted(expected)
+
+
+@pytest.mark.parametrize(
+ "args,data,expected",
+ [
+ # empty
+ pytest.param(
+ (0.1,),
+ {},
+ [],
+ ),
+ # only tasks without test manifests selected
+ pytest.param(
+ (0.1,),
+ {"tasks": {"task-1-label": 0.9, "task-2-label": 0.1, "task-3-label": 0.5}},
+ ["task-2-label"],
+ ),
+ # tasks which are unknown to bugbug are selected
+ pytest.param(
+ (0.1,),
+ {
+ "tasks": {"task-1-label": 0.9, "task-3-label": 0.5},
+ "known_tasks": ["task-1-label", "task-3-label", "task-4-label"],
+ },
+ ["task-2-label"],
+ ),
+ # tasks containing groups selected
+ pytest.param(
+ (0.1,),
+ {"groups": {"foo/test.ini": 0.4}},
+ ["task-0-label"],
+ ),
+ # tasks matching "tasks" or "groups" selected
+ pytest.param(
+ (0.1,),
+ {
+ "tasks": {"task-2-label": 0.2},
+ "groups": {"foo/test.ini": 0.25, "bar/test.ini": 0.75},
+ },
+ ["task-0-label", "task-1-label", "task-2-label"],
+ ),
+ # tasks matching "tasks" or "groups" selected, when they exceed the confidence threshold
+ pytest.param(
+ (0.5,),
+ {
+ "tasks": {"task-2-label": 0.2, "task-4-label": 0.5},
+ "groups": {"foo/test.ini": 0.65, "bar/test.ini": 0.25},
+ },
+ ["task-0-label", "task-4-label"],
+ ),
+ # tasks matching "reduced_tasks" are selected, when they exceed the confidence threshold
+ pytest.param(
+ (0.7, True, True),
+ {
+ "tasks": {"task-2-label": 0.7, "task-4-label": 0.7},
+ "reduced_tasks": {"task-4-label": 0.7},
+ "groups": {"foo/test.ini": 0.75, "bar/test.ini": 0.25},
+ },
+ ["task-4-label"],
+ ),
+ # tasks matching "groups" selected, only on specific platforms.
+ pytest.param(
+ (0.1, False, False, None, 1, True),
+ {
+ "tasks": {"task-2-label": 0.2},
+ "groups": {"foo/test.ini": 0.25, "bar/test.ini": 0.75},
+ "config_groups": {
+ "foo/test.ini": ["task-1-label", "task-0-label"],
+ "bar/test.ini": ["task-0-label"],
+ },
+ },
+ ["task-0-label", "task-2-label"],
+ ),
+ pytest.param(
+ (0.1, False, False, None, 1, True),
+ {
+ "tasks": {"task-2-label": 0.2},
+ "groups": {"foo/test.ini": 0.25, "bar/test.ini": 0.75},
+ "config_groups": {
+ "foo/test.ini": ["task-1-label", "task-0-label"],
+ "bar/test.ini": ["task-1-label"],
+ },
+ },
+ ["task-0-label", "task-1-label", "task-2-label"],
+ ),
+ pytest.param(
+ (0.1, False, False, None, 1, True),
+ {
+ "tasks": {"task-2-label": 0.2},
+ "groups": {"foo/test.ini": 0.25, "bar/test.ini": 0.75},
+ "config_groups": {
+ "foo/test.ini": ["task-1-label"],
+ "bar/test.ini": ["task-0-label"],
+ },
+ },
+ ["task-0-label", "task-2-label"],
+ ),
+ pytest.param(
+ (0.1, False, False, None, 1, True),
+ {
+ "tasks": {"task-2-label": 0.2},
+ "groups": {"foo/test.ini": 0.25, "bar/test.ini": 0.75},
+ "config_groups": {
+ "foo/test.ini": ["task-1-label"],
+ "bar/test.ini": ["task-3-label"],
+ },
+ },
+ ["task-2-label"],
+ ),
+ ],
+ ids=idfn,
+)
+def test_bugbug_push_schedules(responses, params, args, data, expected):
+ query = "/push/{branch}/{head_rev}/schedules".format(**params)
+ url = BUGBUG_BASE_URL + query
+
+ responses.add(
+ responses.GET,
+ url,
+ json=data,
+ status=200,
+ )
+
+ opt = BugBugPushSchedules(*args)
+ labels = [
+ t.label for t in default_tasks if not opt.should_remove_task(t, params, {})
+ ]
+ assert sorted(labels) == sorted(expected)
+
+
+def test_bugbug_multiple_pushes(responses, params):
+ pushes = {str(pid): {"changesets": [f"c{pid}"]} for pid in range(8, 10)}
+
+ responses.add(
+ responses.GET,
+ "https://hg.mozilla.org/integration/autoland/json-pushes/?version=2&startID=8&endID=9",
+ json={"pushes": pushes},
+ status=200,
+ )
+
+ responses.add(
+ responses.GET,
+ BUGBUG_BASE_URL + "/push/{}/c9/schedules".format(params["branch"]),
+ json={
+ "tasks": {"task-2-label": 0.2, "task-4-label": 0.5},
+ "groups": {"foo/test.ini": 0.2, "bar/test.ini": 0.25},
+ "config_groups": {"foo/test.ini": ["linux-*"], "bar/test.ini": ["task-*"]},
+ "known_tasks": ["task-4-label"],
+ },
+ status=200,
+ )
+
+ # Tasks with a lower confidence don't override task with a higher one.
+ # Tasks with a higher confidence override tasks with a lower one.
+ # Known tasks are merged.
+ responses.add(
+ responses.GET,
+ BUGBUG_BASE_URL + "/push/{branch}/{head_rev}/schedules".format(**params),
+ json={
+ "tasks": {"task-2-label": 0.2, "task-4-label": 0.2},
+ "groups": {"foo/test.ini": 0.65, "bar/test.ini": 0.25},
+ "config_groups": {
+ "foo/test.ini": ["task-*"],
+ "bar/test.ini": ["windows-*"],
+ },
+ "known_tasks": ["task-1-label", "task-3-label"],
+ },
+ status=200,
+ )
+
+ params["pushlog_id"] = 10
+
+ opt = BugBugPushSchedules(0.3, False, False, False, 2)
+ labels = [
+ t.label for t in default_tasks if not opt.should_remove_task(t, params, {})
+ ]
+ assert sorted(labels) == sorted(["task-0-label", "task-2-label", "task-4-label"])
+
+ opt = BugBugPushSchedules(0.3, False, False, False, 2, True)
+ labels = [
+ t.label for t in default_tasks if not opt.should_remove_task(t, params, {})
+ ]
+ assert sorted(labels) == sorted(["task-0-label", "task-2-label", "task-4-label"])
+
+ opt = BugBugPushSchedules(0.2, False, False, False, 2, True)
+ labels = [
+ t.label for t in default_tasks if not opt.should_remove_task(t, params, {})
+ ]
+ assert sorted(labels) == sorted(
+ ["task-0-label", "task-1-label", "task-2-label", "task-4-label"]
+ )
+
+
+def test_bugbug_timeout(monkeypatch, responses, params):
+ query = "/push/{branch}/{head_rev}/schedules".format(**params)
+ url = BUGBUG_BASE_URL + query
+ responses.add(
+ responses.GET,
+ url,
+ json={"ready": False},
+ status=202,
+ )
+
+ # Make sure the test runs fast.
+ monkeypatch.setattr(time, "sleep", lambda i: None)
+
+ opt = BugBugPushSchedules(0.5)
+ with pytest.raises(BugbugTimeoutException):
+ opt.should_remove_task(default_tasks[0], params, None)
+
+
+def test_bugbug_fallback(monkeypatch, responses, params):
+ query = "/push/{branch}/{head_rev}/schedules".format(**params)
+ url = BUGBUG_BASE_URL + query
+ responses.add(
+ responses.GET,
+ url,
+ json={"ready": False},
+ status=202,
+ )
+
+ opt = BugBugPushSchedules(0.5, fallback=FALLBACK)
+
+ # Make sure the test runs fast.
+ monkeypatch.setattr(time, "sleep", lambda i: None)
+
+ def fake_should_remove_task(task, params, _):
+ return task.label == default_tasks[0].label
+
+ monkeypatch.setattr(
+ registry[FALLBACK], "should_remove_task", fake_should_remove_task
+ )
+
+ assert opt.should_remove_task(default_tasks[0], params, None)
+
+ # Make sure we don't hit bugbug more than once.
+ responses.reset()
+
+ assert not opt.should_remove_task(default_tasks[1], params, None)
+
+
+def test_backstop(params):
+ all_labels = {t.label for t in default_tasks}
+ opt = SkipUnlessBackstop()
+
+ params["backstop"] = False
+ scheduled = {
+ t.label for t in default_tasks if not opt.should_remove_task(t, params, None)
+ }
+ assert scheduled == set()
+
+ params["backstop"] = True
+ scheduled = {
+ t.label for t in default_tasks if not opt.should_remove_task(t, params, None)
+ }
+ assert scheduled == all_labels
+
+
+def test_push_interval(params):
+ all_labels = {t.label for t in default_tasks}
+ opt = SkipUnlessPushInterval(10) # every 10th push
+
+ # Only multiples of 10 schedule tasks.
+ params["pushlog_id"] = 9
+ scheduled = {
+ t.label for t in default_tasks if not opt.should_remove_task(t, params, None)
+ }
+ assert scheduled == set()
+
+ params["pushlog_id"] = 10
+ scheduled = {
+ t.label for t in default_tasks if not opt.should_remove_task(t, params, None)
+ }
+ assert scheduled == all_labels
+
+
+def test_expanded(params):
+ all_labels = {t.label for t in default_tasks}
+ opt = registry["skip-unless-expanded"]
+
+ params["backstop"] = False
+ params["pushlog_id"] = BACKSTOP_PUSH_INTERVAL / 2
+ scheduled = {
+ t.label for t in default_tasks if not opt.should_remove_task(t, params, None)
+ }
+ assert scheduled == all_labels
+
+ params["pushlog_id"] += 1
+ scheduled = {
+ t.label for t in default_tasks if not opt.should_remove_task(t, params, None)
+ }
+ assert scheduled == set()
+
+ params["backstop"] = True
+ scheduled = {
+ t.label for t in default_tasks if not opt.should_remove_task(t, params, None)
+ }
+ assert scheduled == all_labels
+
+
+def test_project_autoland_test(monkeypatch, responses, params):
+ """Tests the behaviour of the `project.autoland["test"]` strategy on
+ various types of pushes.
+ """
+ # This is meant to test the composition of substrategies, and not the
+ # actual optimization implementations. So mock them out for simplicity.
+ monkeypatch.setattr(SkipUnlessSchedules, "should_remove_task", lambda *args: False)
+ monkeypatch.setattr(DisperseGroups, "should_remove_task", lambda *args: False)
+
+ def fake_bugbug_should_remove_task(self, task, params, importance):
+ if self.num_pushes > 1:
+ return task.label == "task-4-label"
+ return task.label in ("task-2-label", "task-3-label", "task-4-label")
+
+ monkeypatch.setattr(
+ BugBugPushSchedules, "should_remove_task", fake_bugbug_should_remove_task
+ )
+
+ opt = project.autoland["test"]
+
+ # On backstop pushes, nothing gets optimized.
+ params["backstop"] = True
+ scheduled = {
+ t.label for t in default_tasks if not opt.should_remove_task(t, params, {})
+ }
+ assert scheduled == {t.label for t in default_tasks}
+
+ # On expanded pushes, some things are optimized.
+ params["backstop"] = False
+ params["pushlog_id"] = 10
+ scheduled = {
+ t.label for t in default_tasks if not opt.should_remove_task(t, params, {})
+ }
+ assert scheduled == {"task-0-label", "task-1-label", "task-2-label", "task-3-label"}
+
+ # On regular pushes, more things are optimized.
+ params["pushlog_id"] = 11
+ scheduled = {
+ t.label for t in default_tasks if not opt.should_remove_task(t, params, {})
+ }
+ assert scheduled == {"task-0-label", "task-1-label"}
+
+
+if __name__ == "__main__":
+ main()
diff --git a/taskcluster/gecko_taskgraph/test/test_target_tasks.py b/taskcluster/gecko_taskgraph/test/test_target_tasks.py
new file mode 100644
index 0000000000..2bbc57fcf3
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/test/test_target_tasks.py
@@ -0,0 +1,428 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import contextlib
+import re
+import unittest
+
+import pytest
+from mozunit import main
+from taskgraph.graph import Graph
+from taskgraph.task import Task
+from taskgraph.taskgraph import TaskGraph
+
+from gecko_taskgraph import target_tasks, try_option_syntax
+
+
+class FakeTryOptionSyntax:
+ def __init__(self, message, task_graph, graph_config):
+ self.trigger_tests = 0
+ self.talos_trigger_tests = 0
+ self.raptor_trigger_tests = 0
+ self.notifications = None
+ self.env = []
+ self.profile = False
+ self.tag = None
+ self.no_retry = False
+
+ def task_matches(self, task):
+ return "at-at" in task.attributes
+
+
+class TestTargetTasks(unittest.TestCase):
+ def default_matches_project(self, run_on_projects, project):
+ return self.default_matches(
+ attributes={
+ "run_on_projects": run_on_projects,
+ },
+ parameters={
+ "project": project,
+ "hg_branch": "default",
+ },
+ )
+
+ def default_matches_hg_branch(self, run_on_hg_branches, hg_branch):
+ attributes = {"run_on_projects": ["all"]}
+ if run_on_hg_branches is not None:
+ attributes["run_on_hg_branches"] = run_on_hg_branches
+
+ return self.default_matches(
+ attributes=attributes,
+ parameters={
+ "project": "mozilla-central",
+ "hg_branch": hg_branch,
+ },
+ )
+
+ def default_matches(self, attributes, parameters):
+ method = target_tasks.get_method("default")
+ graph = TaskGraph(
+ tasks={
+ "a": Task(kind="build", label="a", attributes=attributes, task={}),
+ },
+ graph=Graph(nodes={"a"}, edges=set()),
+ )
+ return "a" in method(graph, parameters, {})
+
+ def test_default_all(self):
+ """run_on_projects=[all] includes release, integration, and other projects"""
+ self.assertTrue(self.default_matches_project(["all"], "mozilla-central"))
+ self.assertTrue(self.default_matches_project(["all"], "baobab"))
+
+ def test_default_integration(self):
+ """run_on_projects=[integration] includes integration projects"""
+ self.assertFalse(
+ self.default_matches_project(["integration"], "mozilla-central")
+ )
+ self.assertFalse(self.default_matches_project(["integration"], "baobab"))
+
+ def test_default_release(self):
+ """run_on_projects=[release] includes release projects"""
+ self.assertTrue(self.default_matches_project(["release"], "mozilla-central"))
+ self.assertFalse(self.default_matches_project(["release"], "baobab"))
+
+ def test_default_nothing(self):
+ """run_on_projects=[] includes nothing"""
+ self.assertFalse(self.default_matches_project([], "mozilla-central"))
+ self.assertFalse(self.default_matches_project([], "baobab"))
+
+ def test_default_hg_branch(self):
+ self.assertTrue(self.default_matches_hg_branch(None, "default"))
+ self.assertTrue(self.default_matches_hg_branch(None, "GECKOVIEW_62_RELBRANCH"))
+
+ self.assertFalse(self.default_matches_hg_branch([], "default"))
+ self.assertFalse(self.default_matches_hg_branch([], "GECKOVIEW_62_RELBRANCH"))
+
+ self.assertTrue(self.default_matches_hg_branch(["all"], "default"))
+ self.assertTrue(
+ self.default_matches_hg_branch(["all"], "GECKOVIEW_62_RELBRANCH")
+ )
+
+ self.assertTrue(self.default_matches_hg_branch(["default"], "default"))
+ self.assertTrue(self.default_matches_hg_branch([r"default"], "default"))
+ self.assertFalse(
+ self.default_matches_hg_branch([r"default"], "GECKOVIEW_62_RELBRANCH")
+ )
+
+ self.assertTrue(
+ self.default_matches_hg_branch(
+ ["GECKOVIEW_62_RELBRANCH"], "GECKOVIEW_62_RELBRANCH"
+ )
+ )
+ self.assertTrue(
+ self.default_matches_hg_branch(
+ [r"GECKOVIEW_\d+_RELBRANCH"], "GECKOVIEW_62_RELBRANCH"
+ )
+ )
+ self.assertTrue(
+ self.default_matches_hg_branch(
+ [r"GECKOVIEW_\d+_RELBRANCH"], "GECKOVIEW_62_RELBRANCH"
+ )
+ )
+ self.assertFalse(
+ self.default_matches_hg_branch([r"GECKOVIEW_\d+_RELBRANCH"], "default")
+ )
+
+ def make_task_graph(self):
+ tasks = {
+ "a": Task(kind=None, label="a", attributes={}, task={}),
+ "b": Task(kind=None, label="b", attributes={"at-at": "yep"}, task={}),
+ "c": Task(
+ kind=None, label="c", attributes={"run_on_projects": ["try"]}, task={}
+ ),
+ "ddd-1": Task(kind="test", label="ddd-1", attributes={}, task={}),
+ "ddd-2": Task(kind="test", label="ddd-2", attributes={}, task={}),
+ "ddd-1-cf": Task(kind="test", label="ddd-1-cf", attributes={}, task={}),
+ "ddd-2-cf": Task(kind="test", label="ddd-2-cf", attributes={}, task={}),
+ "ddd-var-1": Task(kind="test", label="ddd-var-1", attributes={}, task={}),
+ "ddd-var-2": Task(kind="test", label="ddd-var-2", attributes={}, task={}),
+ }
+ graph = Graph(
+ nodes=set(
+ [
+ "a",
+ "b",
+ "c",
+ "ddd-1",
+ "ddd-2",
+ "ddd-1-cf",
+ "ddd-2-cf",
+ "ddd-var-1",
+ "ddd-var-2",
+ ]
+ ),
+ edges=set(),
+ )
+ return TaskGraph(tasks, graph)
+
+ @contextlib.contextmanager
+ def fake_TryOptionSyntax(self):
+ orig_TryOptionSyntax = try_option_syntax.TryOptionSyntax
+ try:
+ try_option_syntax.TryOptionSyntax = FakeTryOptionSyntax
+ yield
+ finally:
+ try_option_syntax.TryOptionSyntax = orig_TryOptionSyntax
+
+ def test_empty_try(self):
+ "try_mode = None runs nothing"
+ tg = self.make_task_graph()
+ method = target_tasks.get_method("try_tasks")
+ params = {
+ "try_mode": None,
+ "project": "try",
+ "message": "",
+ }
+ # only runs the task with run_on_projects: try
+ self.assertEqual(method(tg, params, {}), [])
+
+ def test_try_option_syntax(self):
+ "try_mode = try_option_syntax uses TryOptionSyntax"
+ tg = self.make_task_graph()
+ method = target_tasks.get_method("try_tasks")
+ with self.fake_TryOptionSyntax():
+ params = {
+ "try_mode": "try_option_syntax",
+ "message": "try: -p all",
+ }
+ self.assertEqual(method(tg, params, {}), ["b"])
+
+ def test_try_task_config(self):
+ "try_mode = try_task_config uses the try config"
+ tg = self.make_task_graph()
+ method = target_tasks.get_method("try_tasks")
+ params = {
+ "try_mode": "try_task_config",
+ "try_task_config": {"tasks": ["a"]},
+ }
+ self.assertEqual(method(tg, params, {}), ["a"])
+
+ def test_try_task_config_regex(self):
+ "try_mode = try_task_config uses the try config with regex instead of chunk numbers"
+ tg = self.make_task_graph()
+ method = target_tasks.get_method("try_tasks")
+ params = {
+ "try_mode": "try_task_config",
+ "try_task_config": {"new-test-config": True, "tasks": ["ddd-*"]},
+ "project": "try",
+ }
+ self.assertEqual(sorted(method(tg, params, {})), ["ddd-1", "ddd-2"])
+
+ def test_try_task_config_absolute(self):
+ "try_mode = try_task_config uses the try config with full task labels"
+ tg = self.make_task_graph()
+ method = target_tasks.get_method("try_tasks")
+ params = {
+ "try_mode": "try_task_config",
+ "try_task_config": {
+ "new-test-config": True,
+ "tasks": ["ddd-var-2", "ddd-1"],
+ },
+ "project": "try",
+ }
+ self.assertEqual(sorted(method(tg, params, {})), ["ddd-1", "ddd-var-2"])
+
+ def test_try_task_config_regex_var(self):
+ "try_mode = try_task_config uses the try config with regex instead of chunk numbers and a test variant"
+ tg = self.make_task_graph()
+ method = target_tasks.get_method("try_tasks")
+ params = {
+ "try_mode": "try_task_config",
+ "try_task_config": {"new-test-config": True, "tasks": ["ddd-var-*"]},
+ "project": "try",
+ }
+ self.assertEqual(sorted(method(tg, params, {})), ["ddd-var-1", "ddd-var-2"])
+
+
+# tests for specific filters
+
+
+@pytest.mark.parametrize(
+ "name,params,expected",
+ (
+ pytest.param(
+ "filter_tests_without_manifests",
+ {
+ "task": Task(kind="test", label="a", attributes={}, task={}),
+ "parameters": None,
+ },
+ True,
+ id="filter_tests_without_manifests_not_in_attributes",
+ ),
+ pytest.param(
+ "filter_tests_without_manifests",
+ {
+ "task": Task(
+ kind="test",
+ label="a",
+ attributes={"test_manifests": ["foo"]},
+ task={},
+ ),
+ "parameters": None,
+ },
+ True,
+ id="filter_tests_without_manifests_has_test_manifests",
+ ),
+ pytest.param(
+ "filter_tests_without_manifests",
+ {
+ "task": Task(
+ kind="build",
+ label="a",
+ attributes={"test_manifests": None},
+ task={},
+ ),
+ "parameters": None,
+ },
+ True,
+ id="filter_tests_without_manifests_not_a_test",
+ ),
+ pytest.param(
+ "filter_tests_without_manifests",
+ {
+ "task": Task(
+ kind="test", label="a", attributes={"test_manifests": None}, task={}
+ ),
+ "parameters": None,
+ },
+ False,
+ id="filter_tests_without_manifests_has_no_test_manifests",
+ ),
+ pytest.param(
+ "filter_by_regex",
+ {
+ "task_label": "build-linux64-debug",
+ "regexes": [re.compile("build")],
+ "mode": "include",
+ },
+ True,
+ id="filter_regex_simple_include",
+ ),
+ pytest.param(
+ "filter_by_regex",
+ {
+ "task_label": "build-linux64-debug",
+ "regexes": [re.compile("linux(.+)debug")],
+ "mode": "include",
+ },
+ True,
+ id="filter_regex_re_include",
+ ),
+ pytest.param(
+ "filter_by_regex",
+ {
+ "task_label": "build-linux64-debug",
+ "regexes": [re.compile("nothing"), re.compile("linux(.+)debug")],
+ "mode": "include",
+ },
+ True,
+ id="filter_regex_re_include_multiple",
+ ),
+ pytest.param(
+ "filter_by_regex",
+ {
+ "task_label": "build-linux64-debug",
+ "regexes": [re.compile("build")],
+ "mode": "exclude",
+ },
+ False,
+ id="filter_regex_simple_exclude",
+ ),
+ pytest.param(
+ "filter_by_regex",
+ {
+ "task_label": "build-linux64-debug",
+ "regexes": [re.compile("linux(.+)debug")],
+ "mode": "exclude",
+ },
+ False,
+ id="filter_regex_re_exclude",
+ ),
+ pytest.param(
+ "filter_by_regex",
+ {
+ "task_label": "build-linux64-debug",
+ "regexes": [re.compile("linux(.+)debug"), re.compile("nothing")],
+ "mode": "exclude",
+ },
+ False,
+ id="filter_regex_re_exclude_multiple",
+ ),
+ pytest.param(
+ "filter_unsupported_artifact_builds",
+ {
+ "task": Task(
+ kind="test",
+ label="a",
+ attributes={"supports-artifact-builds": False},
+ task={},
+ ),
+ "parameters": {
+ "try_task_config": {
+ "use-artifact-builds": False,
+ },
+ },
+ },
+ True,
+ id="filter_unsupported_artifact_builds_no_artifact_builds",
+ ),
+ pytest.param(
+ "filter_unsupported_artifact_builds",
+ {
+ "task": Task(
+ kind="test",
+ label="a",
+ attributes={"supports-artifact-builds": False},
+ task={},
+ ),
+ "parameters": {
+ "try_task_config": {
+ "use-artifact-builds": True,
+ },
+ },
+ },
+ False,
+ id="filter_unsupported_artifact_builds_removed",
+ ),
+ pytest.param(
+ "filter_unsupported_artifact_builds",
+ {
+ "task": Task(
+ kind="test",
+ label="a",
+ attributes={"supports-artifact-builds": True},
+ task={},
+ ),
+ "parameters": {
+ "try_task_config": {
+ "use-artifact-builds": True,
+ },
+ },
+ },
+ True,
+ id="filter_unsupported_artifact_builds_not_removed",
+ ),
+ pytest.param(
+ "filter_unsupported_artifact_builds",
+ {
+ "task": Task(kind="test", label="a", attributes={}, task={}),
+ "parameters": {
+ "try_task_config": {
+ "use-artifact-builds": True,
+ },
+ },
+ },
+ True,
+ id="filter_unsupported_artifact_builds_not_removed",
+ ),
+ ),
+)
+def test_filters(name, params, expected):
+ func = getattr(target_tasks, name)
+ assert func(**params) is expected
+
+
+if __name__ == "__main__":
+ main()
diff --git a/taskcluster/gecko_taskgraph/test/test_taskcluster_yml.py b/taskcluster/gecko_taskgraph/test/test_taskcluster_yml.py
new file mode 100644
index 0000000000..cdf94ec3e1
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/test/test_taskcluster_yml.py
@@ -0,0 +1,145 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import pprint
+import unittest
+
+import jsone
+import slugid
+from mozunit import main
+from taskgraph.util.time import current_json_time
+from taskgraph.util.yaml import load_yaml
+
+from gecko_taskgraph import GECKO
+
+
+class TestTaskclusterYml(unittest.TestCase):
+ @property
+ def taskcluster_yml(self):
+ return load_yaml(GECKO, ".taskcluster.yml")
+
+ def test_push(self):
+ context = {
+ "tasks_for": "hg-push",
+ "push": {
+ "revision": "e8d2d9aff5026ef1f1777b781b47fdcbdb9d8f20",
+ "base_revision": "e8aebe488b2f2e567940577de25013d00e818f7c",
+ "owner": "dustin@mozilla.com",
+ "pushlog_id": 1556565286,
+ "pushdate": 112957,
+ },
+ "repository": {
+ "url": "https://hg.mozilla.org/mozilla-central",
+ "project": "mozilla-central",
+ "level": "3",
+ },
+ "ownTaskId": slugid.nice(),
+ }
+ rendered = jsone.render(self.taskcluster_yml, context)
+ pprint.pprint(rendered)
+ self.assertEqual(
+ rendered["tasks"][0]["metadata"]["name"], "Gecko Decision Task"
+ )
+ self.assertIn("matrixBody", rendered["tasks"][0]["extra"]["notify"])
+
+ def test_push_non_mc(self):
+ context = {
+ "tasks_for": "hg-push",
+ "push": {
+ "revision": "e8d2d9aff5026ef1f1777b781b47fdcbdb9d8f20",
+ "base_revision": "e8aebe488b2f2e567940577de25013d00e818f7c",
+ "owner": "dustin@mozilla.com",
+ "pushlog_id": 1556565286,
+ "pushdate": 112957,
+ },
+ "repository": {
+ "url": "https://hg.mozilla.org/releases/mozilla-beta",
+ "project": "mozilla-beta",
+ "level": "3",
+ },
+ "ownTaskId": slugid.nice(),
+ }
+ rendered = jsone.render(self.taskcluster_yml, context)
+ pprint.pprint(rendered)
+ self.assertEqual(
+ rendered["tasks"][0]["metadata"]["name"], "Gecko Decision Task"
+ )
+ self.assertNotIn("matrixBody", rendered["tasks"][0]["extra"]["notify"])
+
+ def test_cron(self):
+ context = {
+ "tasks_for": "cron",
+ "repository": {
+ "url": "https://hg.mozilla.org/mozilla-central",
+ "project": "mozilla-central",
+ "level": 3,
+ },
+ "push": {
+ "revision": "e8aebe488b2f2e567940577de25013d00e818f7c",
+ "base_revision": "54cbb3745cdb9a8aa0a4428d405b3b2e1c7d13c2",
+ "pushlog_id": -1,
+ "pushdate": 0,
+ "owner": "cron",
+ },
+ "cron": {
+ "task_id": "<cron task id>",
+ "job_name": "test",
+ "job_symbol": "T",
+ "quoted_args": "abc def",
+ },
+ "now": current_json_time(),
+ "ownTaskId": slugid.nice(),
+ }
+ rendered = jsone.render(self.taskcluster_yml, context)
+ pprint.pprint(rendered)
+ self.assertEqual(
+ rendered["tasks"][0]["metadata"]["name"], "Decision Task for cron job test"
+ )
+
+ def test_action(self):
+ context = {
+ "tasks_for": "action",
+ "repository": {
+ "url": "https://hg.mozilla.org/mozilla-central",
+ "project": "mozilla-central",
+ "level": 3,
+ },
+ "push": {
+ "revision": "e8d2d9aff5026ef1f1777b781b47fdcbdb9d8f20",
+ "base_revision": "e8aebe488b2f2e567940577de25013d00e818f7c",
+ "owner": "dustin@mozilla.com",
+ "pushlog_id": 1556565286,
+ "pushdate": 112957,
+ },
+ "action": {
+ "name": "test-action",
+ "title": "Test Action",
+ "description": "Just testing",
+ "taskGroupId": slugid.nice(),
+ "symbol": "t",
+ "repo_scope": "assume:repo:hg.mozilla.org/try:action:generic",
+ "cb_name": "test_action",
+ },
+ "input": {},
+ "parameters": {},
+ "now": current_json_time(),
+ "taskId": slugid.nice(),
+ "ownTaskId": slugid.nice(),
+ "clientId": "testing/testing/testing",
+ }
+ rendered = jsone.render(self.taskcluster_yml, context)
+ pprint.pprint(rendered)
+ self.assertEqual(
+ rendered["tasks"][0]["metadata"]["name"], "Action: Test Action"
+ )
+
+ def test_unknown(self):
+ context = {"tasks_for": "bitkeeper-push"}
+ rendered = jsone.render(self.taskcluster_yml, context)
+ pprint.pprint(rendered)
+ self.assertEqual(rendered["tasks"], [])
+
+
+if __name__ == "__main__":
+ main()
diff --git a/taskcluster/gecko_taskgraph/test/test_transforms_job.py b/taskcluster/gecko_taskgraph/test/test_transforms_job.py
new file mode 100644
index 0000000000..b032307ea6
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/test/test_transforms_job.py
@@ -0,0 +1,111 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Tests for the 'job' transform subsystem.
+"""
+
+
+import os
+from copy import deepcopy
+
+import pytest
+from mozunit import main
+from taskgraph.config import load_graph_config
+from taskgraph.transforms.base import TransformConfig
+from taskgraph.util.schema import Schema, validate_schema
+
+from gecko_taskgraph import GECKO
+from gecko_taskgraph.test.conftest import FakeParameters
+from gecko_taskgraph.transforms import job
+from gecko_taskgraph.transforms.job import run_task # noqa: F401
+from gecko_taskgraph.transforms.job.common import add_cache
+from gecko_taskgraph.transforms.task import payload_builders
+
+here = os.path.abspath(os.path.dirname(__file__))
+
+
+TASK_DEFAULTS = {
+ "description": "fake description",
+ "label": "fake-task-label",
+ "run": {
+ "using": "run-task",
+ },
+}
+
+
+@pytest.fixture(scope="module")
+def config():
+ graph_config = load_graph_config(os.path.join(GECKO, "taskcluster", "ci"))
+ params = FakeParameters(
+ {
+ "base_repository": "http://hg.example.com",
+ "head_repository": "http://hg.example.com",
+ "head_rev": "abcdef",
+ "level": 1,
+ "project": "example",
+ }
+ )
+ return TransformConfig(
+ "job_test", here, {}, params, {}, graph_config, write_artifacts=False
+ )
+
+
+@pytest.fixture()
+def transform(monkeypatch, config):
+ """Run the job transforms on the specified task but return the inputs to
+ `configure_taskdesc_for_run` without executing it.
+
+ This gives test functions an easy way to generate the inputs required for
+ many of the `run_using` subsystems.
+ """
+
+ def inner(task_input):
+ task = deepcopy(TASK_DEFAULTS)
+ task.update(task_input)
+ frozen_args = []
+
+ def _configure_taskdesc_for_run(*args):
+ frozen_args.extend(args)
+
+ monkeypatch.setattr(
+ job, "configure_taskdesc_for_run", _configure_taskdesc_for_run
+ )
+
+ for _ in job.transforms(config, [task]):
+ # This forces the generator to be evaluated
+ pass
+
+ return frozen_args
+
+ return inner
+
+
+@pytest.mark.parametrize(
+ "task",
+ [
+ {"worker-type": "b-linux"},
+ {"worker-type": "t-win10-64-hw"},
+ ],
+ ids=lambda t: t["worker-type"],
+)
+def test_worker_caches(task, transform):
+ config, job, taskdesc, impl = transform(task)
+ add_cache(job, taskdesc, "cache1", "/cache1")
+ add_cache(job, taskdesc, "cache2", "/cache2", skip_untrusted=True)
+
+ if impl not in ("docker-worker", "generic-worker"):
+ pytest.xfail(f"caches not implemented for '{impl}'")
+
+ key = "caches" if impl == "docker-worker" else "mounts"
+ assert key in taskdesc["worker"]
+ assert len(taskdesc["worker"][key]) == 2
+
+ # Create a new schema object with just the part relevant to caches.
+ partial_schema = Schema(payload_builders[impl].schema.schema[key])
+ validate_schema(partial_schema, taskdesc["worker"][key], "validation error")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/taskcluster/gecko_taskgraph/test/test_transforms_test.py b/taskcluster/gecko_taskgraph/test/test_transforms_test.py
new file mode 100644
index 0000000000..1e5067a2b5
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/test/test_transforms_test.py
@@ -0,0 +1,330 @@
+# Any copyright is dedicated to the Public Domain.
+# https://creativecommons.org/publicdomain/zero/1.0/
+"""
+Tests for the 'tests.py' transforms
+"""
+
+import hashlib
+import json
+from functools import partial
+from pprint import pprint
+
+import mozunit
+import pytest
+
+from gecko_taskgraph.transforms import test as test_transforms
+
+
+@pytest.fixture
+def make_test_task():
+ """Create a test task definition with required default values."""
+
+ def inner(**extra):
+ task = {
+ "attributes": {},
+ "build-platform": "linux64",
+ "mozharness": {"extra-options": []},
+ "test-platform": "linux64",
+ "treeherder-symbol": "g(t)",
+ "try-name": "task",
+ }
+ task.update(extra)
+ return task
+
+ return inner
+
+
+def test_split_variants(monkeypatch, run_full_config_transform, make_test_task):
+ # mock out variant definitions
+ monkeypatch.setattr(
+ test_transforms.variant,
+ "TEST_VARIANTS",
+ {
+ "foo": {
+ "description": "foo variant",
+ "suffix": "foo",
+ "mozinfo": "foo",
+ "component": "foo bar",
+ "expiration": "never",
+ "merge": {
+ "mozharness": {
+ "extra-options": [
+ "--setpref=foo=1",
+ ],
+ },
+ },
+ },
+ "bar": {
+ "description": "bar variant",
+ "suffix": "bar",
+ "mozinfo": "bar",
+ "component": "foo bar",
+ "expiration": "never",
+ "when": {
+ "$eval": "task['test-platform'][:5] == 'linux'",
+ },
+ "merge": {
+ "mozharness": {
+ "extra-options": [
+ "--setpref=bar=1",
+ ],
+ },
+ },
+ "replace": {"tier": 2},
+ },
+ },
+ )
+
+ def make_expected(variant):
+ """Helper to generate expected tasks."""
+ return make_test_task(
+ **{
+ "attributes": {"unittest_variant": variant},
+ "description": f"{variant} variant",
+ "mozharness": {
+ "extra-options": [f"--setpref={variant}=1"],
+ },
+ "treeherder-symbol": f"g-{variant}(t)",
+ "variant-suffix": f"-{variant}",
+ }
+ )
+
+ run_split_variants = partial(
+ run_full_config_transform, test_transforms.variant.split_variants
+ )
+
+ # test no variants
+ input_task = make_test_task(
+ **{
+ "run-without-variant": True,
+ }
+ )
+ tasks = list(run_split_variants(input_task))
+ assert len(tasks) == 1
+ assert tasks[0] == input_task
+
+ # test variants are split into expected tasks
+ input_task = make_test_task(
+ **{
+ "run-without-variant": True,
+ "variants": ["foo", "bar"],
+ }
+ )
+ tasks = list(run_split_variants(input_task))
+ assert len(tasks) == 3
+ assert tasks[0] == make_test_task()
+ assert tasks[1] == make_expected("foo")
+
+ expected = make_expected("bar")
+ expected["tier"] = 2
+ assert tasks[2] == expected
+
+ # test composite variants
+ input_task = make_test_task(
+ **{
+ "run-without-variant": True,
+ "variants": ["foo+bar"],
+ }
+ )
+ tasks = list(run_split_variants(input_task))
+ assert len(tasks) == 2
+ assert tasks[1]["attributes"]["unittest_variant"] == "foo+bar"
+ assert tasks[1]["mozharness"]["extra-options"] == [
+ "--setpref=foo=1",
+ "--setpref=bar=1",
+ ]
+ assert tasks[1]["treeherder-symbol"] == "g-foo-bar(t)"
+
+ # test 'when' filter
+ input_task = make_test_task(
+ **{
+ "run-without-variant": True,
+ # this should cause task to be filtered out of 'bar' and 'foo+bar' variants
+ "test-platform": "windows",
+ "variants": ["foo", "bar", "foo+bar"],
+ }
+ )
+ tasks = list(run_split_variants(input_task))
+ assert len(tasks) == 2
+ assert "unittest_variant" not in tasks[0]["attributes"]
+ assert tasks[1]["attributes"]["unittest_variant"] == "foo"
+
+ # test 'run-without-variants=False'
+ input_task = make_test_task(
+ **{
+ "run-without-variant": False,
+ "variants": ["foo"],
+ }
+ )
+ tasks = list(run_split_variants(input_task))
+ assert len(tasks) == 1
+ assert tasks[0]["attributes"]["unittest_variant"] == "foo"
+
+
+@pytest.mark.parametrize(
+ "task,expected",
+ (
+ pytest.param(
+ {
+ "attributes": {"unittest_variant": "webrender-sw+1proc"},
+ "test-platform": "linux1804-64-clang-trunk-qr/opt",
+ },
+ {
+ "platform": {
+ "arch": "64",
+ "os": {
+ "name": "linux",
+ "version": "1804",
+ },
+ },
+ "build": {
+ "type": "opt",
+ "clang-trunk": True,
+ },
+ "runtime": {
+ "1proc": True,
+ "webrender-sw": True,
+ },
+ },
+ id="linux",
+ ),
+ pytest.param(
+ {
+ "attributes": {},
+ "test-platform": "linux2204-64-wayland-shippable/opt",
+ },
+ {
+ "platform": {
+ "arch": "64",
+ "display": "wayland",
+ "os": {
+ "name": "linux",
+ "version": "2204",
+ },
+ },
+ "build": {
+ "type": "opt",
+ "shippable": True,
+ },
+ "runtime": {},
+ },
+ id="linux wayland shippable",
+ ),
+ pytest.param(
+ {
+ "attributes": {},
+ "test-platform": "android-hw-a51-11-0-arm7-shippable-qr/opt",
+ },
+ {
+ "platform": {
+ "arch": "arm7",
+ "device": "a51",
+ "os": {
+ "name": "android",
+ "version": "11.0",
+ },
+ },
+ "build": {
+ "type": "opt",
+ "shippable": True,
+ },
+ "runtime": {},
+ },
+ id="android",
+ ),
+ pytest.param(
+ {
+ "attributes": {},
+ "test-platform": "windows10-64-2004-ref-hw-2017-ccov/debug",
+ },
+ {
+ "platform": {
+ "arch": "64",
+ "machine": "ref-hw-2017",
+ "os": {
+ "build": "2004",
+ "name": "windows",
+ "version": "10",
+ },
+ },
+ "build": {
+ "type": "debug",
+ "ccov": True,
+ },
+ "runtime": {},
+ },
+ id="windows",
+ ),
+ ),
+)
+def test_set_test_setting(run_transform, task, expected):
+ # add hash to 'expected'
+ expected["_hash"] = hashlib.sha256(
+ json.dumps(expected, sort_keys=True).encode("utf-8")
+ ).hexdigest()[:12]
+
+ task = list(run_transform(test_transforms.other.set_test_setting, task))[0]
+ assert "test-setting" in task
+ assert task["test-setting"] == expected
+
+
+def assert_spi_not_disabled(task):
+ extra_options = task["mozharness"]["extra-options"]
+ # The pref to enable this gets set outside of this transform, so only
+ # bother asserting that the pref to disable does not exist.
+ assert (
+ "--setpref=media.peerconnection.mtransport_process=false" not in extra_options
+ )
+ assert "--setpref=network.process.enabled=false" not in extra_options
+
+
+def assert_spi_disabled(task):
+ extra_options = task["mozharness"]["extra-options"]
+ assert "--setpref=media.peerconnection.mtransport_process=false" in extra_options
+ assert "--setpref=media.peerconnection.mtransport_process=true" not in extra_options
+ assert "--setpref=network.process.enabled=false" in extra_options
+ assert "--setpref=network.process.enabled=true" not in extra_options
+
+
+@pytest.mark.parametrize(
+ "task,callback",
+ (
+ pytest.param(
+ {"attributes": {"unittest_variant": "socketprocess"}},
+ assert_spi_not_disabled,
+ id="socketprocess",
+ ),
+ pytest.param(
+ {
+ "attributes": {"unittest_variant": "socketprocess_networking"},
+ },
+ assert_spi_not_disabled,
+ id="socketprocess_networking",
+ ),
+ pytest.param({}, assert_spi_disabled, id="no variant"),
+ pytest.param(
+ {"suite": "cppunit", "attributes": {"unittest_variant": "socketprocess"}},
+ assert_spi_not_disabled,
+ id="excluded suite",
+ ),
+ pytest.param(
+ {"attributes": {"unittest_variant": "no-fission+socketprocess"}},
+ assert_spi_not_disabled,
+ id="composite variant",
+ ),
+ ),
+)
+def test_ensure_spi_disabled_on_all_but_spi(
+ make_test_task, run_transform, task, callback
+):
+ task.setdefault("suite", "mochitest-plain")
+ task = make_test_task(**task)
+ task = list(
+ run_transform(test_transforms.other.ensure_spi_disabled_on_all_but_spi, task)
+ )[0]
+ pprint(task)
+ callback(task)
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/taskcluster/gecko_taskgraph/test/test_try_option_syntax.py b/taskcluster/gecko_taskgraph/test/test_try_option_syntax.py
new file mode 100644
index 0000000000..a37de53378
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/test/test_try_option_syntax.py
@@ -0,0 +1,430 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import unittest
+
+from mozunit import main
+from taskgraph.graph import Graph
+from taskgraph.task import Task
+from taskgraph.taskgraph import TaskGraph
+
+from gecko_taskgraph.try_option_syntax import TryOptionSyntax, parse_message
+
+
+def unittest_task(n, tp, bt="opt"):
+ return (
+ n,
+ Task(
+ "test",
+ n,
+ {
+ "unittest_try_name": n,
+ "test_platform": tp.split("/")[0],
+ "build_type": bt,
+ },
+ {},
+ ),
+ )
+
+
+def talos_task(n, tp, bt="opt"):
+ return (
+ n,
+ Task(
+ "test",
+ n,
+ {
+ "talos_try_name": n,
+ "test_platform": tp.split("/")[0],
+ "build_type": bt,
+ },
+ {},
+ ),
+ )
+
+
+tasks = {
+ k: v
+ for k, v in [
+ unittest_task("mochitest-browser-chrome", "linux/opt"),
+ unittest_task("mochitest-browser-chrome-e10s", "linux64/opt"),
+ unittest_task("mochitest-chrome", "linux/debug", "debug"),
+ unittest_task("mochitest-webgl1-core", "linux/debug", "debug"),
+ unittest_task("mochitest-webgl1-ext", "linux/debug", "debug"),
+ unittest_task("mochitest-webgl2-core", "linux/debug", "debug"),
+ unittest_task("mochitest-webgl2-ext", "linux/debug", "debug"),
+ unittest_task("mochitest-webgl2-deqp", "linux/debug", "debug"),
+ unittest_task("extra1", "linux", "debug/opt"),
+ unittest_task("extra2", "win32/opt"),
+ unittest_task("crashtest-e10s", "linux/other"),
+ unittest_task("gtest", "linux64/asan"),
+ talos_task("dromaeojs", "linux64/psan"),
+ unittest_task("extra3", "linux/opt"),
+ unittest_task("extra4", "linux64/debug", "debug"),
+ unittest_task("extra5", "linux/this"),
+ unittest_task("extra6", "linux/that"),
+ unittest_task("extra7", "linux/other"),
+ unittest_task("extra8", "linux64/asan"),
+ talos_task("extra9", "linux64/psan"),
+ ]
+}
+
+RIDEALONG_BUILDS = {
+ "linux": ["linux-ridealong"],
+ "linux64": ["linux64-ridealong"],
+}
+
+GRAPH_CONFIG = {
+ "try": {"ridealong-builds": RIDEALONG_BUILDS},
+}
+
+for r in RIDEALONG_BUILDS.values():
+ tasks.update({k: v for k, v in [unittest_task(n + "-test", n) for n in r]})
+
+unittest_tasks = {k: v for k, v in tasks.items() if "unittest_try_name" in v.attributes}
+talos_tasks = {k: v for k, v in tasks.items() if "talos_try_name" in v.attributes}
+graph_with_jobs = TaskGraph(tasks, Graph(set(tasks), set()))
+
+
+class TestTryOptionSyntax(unittest.TestCase):
+ def test_unknown_args(self):
+ "unknown arguments are ignored"
+ parameters = parse_message("try: --doubledash -z extra")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ # equilvant to "try:"..
+ self.assertEqual(tos.build_types, [])
+ self.assertEqual(tos.jobs, [])
+
+ def test_apostrophe_in_message(self):
+ "apostrophe does not break parsing"
+ parameters = parse_message("Increase spammy log's log level. try: -b do")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertEqual(sorted(tos.build_types), ["debug", "opt"])
+
+ def test_b_do(self):
+ "-b do should produce both build_types"
+ parameters = parse_message("try: -b do")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertEqual(sorted(tos.build_types), ["debug", "opt"])
+
+ def test_b_d(self):
+ "-b d should produce build_types=['debug']"
+ parameters = parse_message("try: -b d")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertEqual(sorted(tos.build_types), ["debug"])
+
+ def test_b_o(self):
+ "-b o should produce build_types=['opt']"
+ parameters = parse_message("try: -b o")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertEqual(sorted(tos.build_types), ["opt"])
+
+ def test_build_o(self):
+ "--build o should produce build_types=['opt']"
+ parameters = parse_message("try: --build o")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertEqual(sorted(tos.build_types), ["opt"])
+
+ def test_b_dx(self):
+ "-b dx should produce build_types=['debug'], silently ignoring the x"
+ parameters = parse_message("try: -b dx")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertEqual(sorted(tos.build_types), ["debug"])
+
+ def test_j_job(self):
+ "-j somejob sets jobs=['somejob']"
+ parameters = parse_message("try: -j somejob")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertEqual(sorted(tos.jobs), ["somejob"])
+
+ def test_j_jobs(self):
+ "-j job1,job2 sets jobs=['job1', 'job2']"
+ parameters = parse_message("try: -j job1,job2")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertEqual(sorted(tos.jobs), ["job1", "job2"])
+
+ def test_j_all(self):
+ "-j all sets jobs=None"
+ parameters = parse_message("try: -j all")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertEqual(tos.jobs, None)
+
+ def test_j_twice(self):
+ "-j job1 -j job2 sets jobs=job1, job2"
+ parameters = parse_message("try: -j job1 -j job2")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertEqual(sorted(tos.jobs), sorted(["job1", "job2"]))
+
+ def test_p_all(self):
+ "-p all sets platforms=None"
+ parameters = parse_message("try: -p all")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertEqual(tos.platforms, None)
+
+ def test_p_linux(self):
+ "-p linux sets platforms=['linux', 'linux-ridealong']"
+ parameters = parse_message("try: -p linux")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertEqual(tos.platforms, ["linux", "linux-ridealong"])
+
+ def test_p_linux_win32(self):
+ "-p linux,win32 sets platforms=['linux', 'linux-ridealong', 'win32']"
+ parameters = parse_message("try: -p linux,win32")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertEqual(sorted(tos.platforms), ["linux", "linux-ridealong", "win32"])
+
+ def test_p_expands_ridealongs(self):
+ "-p linux,linux64 includes the RIDEALONG_BUILDS"
+ parameters = parse_message("try: -p linux,linux64")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ platforms = set(["linux"] + RIDEALONG_BUILDS["linux"])
+ platforms |= set(["linux64"] + RIDEALONG_BUILDS["linux64"])
+ self.assertEqual(sorted(tos.platforms), sorted(platforms))
+
+ def test_u_none(self):
+ "-u none sets unittests=[]"
+ parameters = parse_message("try: -u none")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertEqual(tos.unittests, [])
+
+ def test_u_all(self):
+ "-u all sets unittests=[..whole list..]"
+ parameters = parse_message("try: -u all")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertEqual(tos.unittests, [{"test": t} for t in sorted(unittest_tasks)])
+
+ def test_u_single(self):
+ "-u mochitest-webgl1-core sets unittests=[mochitest-webgl1-core]"
+ parameters = parse_message("try: -u mochitest-webgl1-core")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertEqual(tos.unittests, [{"test": "mochitest-webgl1-core"}])
+
+ def test_u_alias(self):
+ "-u mochitest-gl sets unittests=[mochitest-webgl*]"
+ parameters = parse_message("try: -u mochitest-gl")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertEqual(
+ tos.unittests,
+ [
+ {"test": t}
+ for t in [
+ "mochitest-webgl1-core",
+ "mochitest-webgl1-ext",
+ "mochitest-webgl2-core",
+ "mochitest-webgl2-deqp",
+ "mochitest-webgl2-ext",
+ ]
+ ],
+ )
+
+ def test_u_multi_alias(self):
+ "-u e10s sets unittests=[all e10s unittests]"
+ parameters = parse_message("try: -u e10s")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertEqual(
+ tos.unittests, [{"test": t} for t in sorted(unittest_tasks) if "e10s" in t]
+ )
+
+ def test_u_commas(self):
+ "-u mochitest-webgl1-core,gtest sets unittests=both"
+ parameters = parse_message("try: -u mochitest-webgl1-core,gtest")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertEqual(
+ tos.unittests,
+ [
+ {"test": "gtest"},
+ {"test": "mochitest-webgl1-core"},
+ ],
+ )
+
+ def test_u_chunks(self):
+ "-u gtest-3,gtest-4 selects the third and fourth chunk of gtest"
+ parameters = parse_message("try: -u gtest-3,gtest-4")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertEqual(
+ sorted(tos.unittests),
+ sorted(
+ [
+ {"test": "gtest", "only_chunks": set("34")},
+ ]
+ ),
+ )
+
+ def test_u_platform(self):
+ "-u gtest[linux] selects the linux platform for gtest"
+ parameters = parse_message("try: -u gtest[linux]")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertEqual(
+ sorted(tos.unittests),
+ sorted(
+ [
+ {"test": "gtest", "platforms": ["linux"]},
+ ]
+ ),
+ )
+
+ def test_u_platforms(self):
+ "-u gtest[linux,win32] selects the linux and win32 platforms for gtest"
+ parameters = parse_message("try: -u gtest[linux,win32]")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertEqual(
+ sorted(tos.unittests),
+ sorted(
+ [
+ {"test": "gtest", "platforms": ["linux", "win32"]},
+ ]
+ ),
+ )
+
+ def test_u_platforms_pretty(self):
+ """-u gtest[Ubuntu] selects the linux, linux64 and linux64-asan
+ platforms for gtest"""
+ parameters = parse_message("try: -u gtest[Ubuntu]")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertEqual(
+ sorted(tos.unittests),
+ sorted(
+ [
+ {
+ "test": "gtest",
+ "platforms": [
+ "linux32",
+ "linux64",
+ "linux64-asan",
+ "linux1804-64",
+ "linux1804-64-asan",
+ ],
+ },
+ ]
+ ),
+ )
+
+ def test_u_platforms_negated(self):
+ "-u gtest[-linux] selects all platforms but linux for gtest"
+ parameters = parse_message("try: -u gtest[-linux]")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ all_platforms = {x.attributes["test_platform"] for x in unittest_tasks.values()}
+ self.assertEqual(
+ sorted(tos.unittests[0]["platforms"]),
+ sorted(x for x in all_platforms if x != "linux"),
+ )
+
+ def test_u_platforms_negated_pretty(self):
+ "-u gtest[Ubuntu,-x64] selects just linux for gtest"
+ parameters = parse_message("try: -u gtest[Ubuntu,-x64]")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertEqual(
+ sorted(tos.unittests),
+ sorted(
+ [
+ {"test": "gtest", "platforms": ["linux32"]},
+ ]
+ ),
+ )
+
+ def test_u_chunks_platforms(self):
+ "-u gtest-1[linux,win32] selects the linux and win32 platforms for chunk 1 of gtest"
+ parameters = parse_message("try: -u gtest-1[linux,win32]")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertEqual(
+ tos.unittests,
+ [
+ {
+ "test": "gtest",
+ "platforms": ["linux", "win32"],
+ "only_chunks": set("1"),
+ },
+ ],
+ )
+
+ def test_t_none(self):
+ "-t none sets talos=[]"
+ parameters = parse_message("try: -t none")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertEqual(tos.talos, [])
+
+ def test_t_all(self):
+ "-t all sets talos=[..whole list..]"
+ parameters = parse_message("try: -t all")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertEqual(tos.talos, [{"test": t} for t in sorted(talos_tasks)])
+
+ def test_t_single(self):
+ "-t mochitest-webgl sets talos=[mochitest-webgl]"
+ parameters = parse_message("try: -t mochitest-webgl")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertEqual(tos.talos, [{"test": "mochitest-webgl"}])
+
+ # -t shares an implementation with -u, so it's not tested heavily
+
+ def test_trigger_tests(self):
+ "--rebuild 10 sets trigger_tests"
+ parameters = parse_message("try: --rebuild 10")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertEqual(tos.trigger_tests, 10)
+
+ def test_talos_trigger_tests(self):
+ "--rebuild-talos 10 sets talos_trigger_tests"
+ parameters = parse_message("try: --rebuild-talos 10")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertEqual(tos.talos_trigger_tests, 10)
+
+ def test_interactive(self):
+ "--interactive sets interactive"
+ parameters = parse_message("try: --interactive")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertEqual(tos.interactive, True)
+
+ def test_all_email(self):
+ "--all-emails sets notifications"
+ parameters = parse_message("try: --all-emails")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertEqual(tos.notifications, "all")
+
+ def test_fail_email(self):
+ "--failure-emails sets notifications"
+ parameters = parse_message("try: --failure-emails")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertEqual(tos.notifications, "failure")
+
+ def test_no_email(self):
+ "no email settings don't set notifications"
+ parameters = parse_message("try:")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertEqual(tos.notifications, None)
+
+ def test_setenv(self):
+ "--setenv VAR=value adds a environment variables setting to env"
+ parameters = parse_message("try: --setenv VAR1=value1 --setenv VAR2=value2")
+ assert parameters["try_task_config"]["env"] == {
+ "VAR1": "value1",
+ "VAR2": "value2",
+ }
+
+ def test_profile(self):
+ "--gecko-profile sets profile to true"
+ parameters = parse_message("try: --gecko-profile")
+ assert parameters["try_task_config"]["gecko-profile"] is True
+
+ def test_tag(self):
+ "--tag TAG sets tag to TAG value"
+ parameters = parse_message("try: --tag tagName")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertEqual(tos.tag, "tagName")
+
+ def test_no_retry(self):
+ "--no-retry sets no_retry to true"
+ parameters = parse_message("try: --no-retry")
+ tos = TryOptionSyntax(parameters, graph_with_jobs, GRAPH_CONFIG)
+ self.assertTrue(tos.no_retry)
+
+ def test_artifact(self):
+ "--artifact sets artifact to true"
+ parameters = parse_message("try: --artifact")
+ assert parameters["try_task_config"]["use-artifact-builds"] is True
+
+
+if __name__ == "__main__":
+ main()
diff --git a/taskcluster/gecko_taskgraph/test/test_util_attributes.py b/taskcluster/gecko_taskgraph/test/test_util_attributes.py
new file mode 100644
index 0000000000..eafa4020bf
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/test/test_util_attributes.py
@@ -0,0 +1,99 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import unittest
+
+from mozunit import main
+from taskgraph.util.attributes import attrmatch
+
+from gecko_taskgraph.util.attributes import match_run_on_projects
+
+
+class Attrmatch(unittest.TestCase):
+ def test_trivial_match(self):
+ """Given no conditions, anything matches"""
+ self.assertTrue(attrmatch({}))
+
+ def test_missing_attribute(self):
+ """If a filtering attribute is not present, no match"""
+ self.assertFalse(attrmatch({}, someattr=10))
+
+ def test_literal_attribute(self):
+ """Literal attributes must match exactly"""
+ self.assertTrue(attrmatch({"att": 10}, att=10))
+ self.assertFalse(attrmatch({"att": 10}, att=20))
+
+ def test_set_attribute(self):
+ """Set attributes require set membership"""
+ self.assertTrue(attrmatch({"att": 10}, att={9, 10}))
+ self.assertFalse(attrmatch({"att": 10}, att={19, 20}))
+
+ def test_callable_attribute(self):
+ """Callable attributes are called and any False causes the match to fail"""
+ self.assertTrue(attrmatch({"att": 10}, att=lambda val: True))
+ self.assertFalse(attrmatch({"att": 10}, att=lambda val: False))
+
+ def even(val):
+ return val % 2 == 0
+
+ self.assertTrue(attrmatch({"att": 10}, att=even))
+ self.assertFalse(attrmatch({"att": 11}, att=even))
+
+ def test_all_matches_required(self):
+ """If only one attribute does not match, the result is False"""
+ self.assertFalse(attrmatch({"a": 1}, a=1, b=2, c=3))
+ self.assertFalse(attrmatch({"a": 1, "b": 2}, a=1, b=2, c=3))
+ self.assertTrue(attrmatch({"a": 1, "b": 2, "c": 3}, a=1, b=2, c=3))
+
+
+class MatchRunOnProjects(unittest.TestCase):
+ def test_empty(self):
+ self.assertFalse(match_run_on_projects("birch", []))
+
+ def test_all(self):
+ self.assertTrue(match_run_on_projects("birch", ["all"]))
+ self.assertTrue(match_run_on_projects("larch", ["all"]))
+ self.assertTrue(match_run_on_projects("autoland", ["all"]))
+ self.assertTrue(match_run_on_projects("mozilla-central", ["all"]))
+ self.assertTrue(match_run_on_projects("mozilla-beta", ["all"]))
+ self.assertTrue(match_run_on_projects("mozilla-release", ["all"]))
+
+ def test_release(self):
+ self.assertFalse(match_run_on_projects("birch", ["release"]))
+ self.assertTrue(match_run_on_projects("larch", ["release"]))
+ self.assertFalse(match_run_on_projects("autoland", ["release"]))
+ self.assertTrue(match_run_on_projects("mozilla-central", ["release"]))
+ self.assertTrue(match_run_on_projects("mozilla-beta", ["release"]))
+ self.assertTrue(match_run_on_projects("mozilla-release", ["release"]))
+
+ def test_integration(self):
+ self.assertFalse(match_run_on_projects("birch", ["integration"]))
+ self.assertFalse(match_run_on_projects("larch", ["integration"]))
+ self.assertTrue(match_run_on_projects("autoland", ["integration"]))
+ self.assertFalse(match_run_on_projects("mozilla-central", ["integration"]))
+ self.assertFalse(match_run_on_projects("mozilla-beta", ["integration"]))
+ self.assertFalse(match_run_on_projects("mozilla-integration", ["integration"]))
+
+ def test_combo(self):
+ self.assertTrue(match_run_on_projects("birch", ["release", "birch", "maple"]))
+ self.assertTrue(match_run_on_projects("larch", ["release", "birch", "maple"]))
+ self.assertTrue(match_run_on_projects("maple", ["release", "birch", "maple"]))
+ self.assertFalse(
+ match_run_on_projects("autoland", ["release", "birch", "maple"])
+ )
+ self.assertTrue(
+ match_run_on_projects("mozilla-central", ["release", "birch", "maple"])
+ )
+ self.assertTrue(
+ match_run_on_projects("mozilla-beta", ["release", "birch", "maple"])
+ )
+ self.assertTrue(
+ match_run_on_projects("mozilla-release", ["release", "birch", "maple"])
+ )
+ self.assertTrue(match_run_on_projects("birch", ["birch", "trunk"]))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/taskcluster/gecko_taskgraph/test/test_util_backstop.py b/taskcluster/gecko_taskgraph/test/test_util_backstop.py
new file mode 100644
index 0000000000..af9aabd5af
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/test/test_util_backstop.py
@@ -0,0 +1,155 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+from datetime import datetime
+from textwrap import dedent
+from time import mktime
+
+import pytest
+from mozunit import main
+from taskgraph.util.taskcluster import get_artifact_url, get_index_url, get_task_url
+
+from gecko_taskgraph.util.backstop import (
+ BACKSTOP_INDEX,
+ BACKSTOP_PUSH_INTERVAL,
+ BACKSTOP_TIME_INTERVAL,
+ is_backstop,
+)
+
+LAST_BACKSTOP_ID = 0
+LAST_BACKSTOP_PUSHDATE = mktime(datetime.now().timetuple())
+DEFAULT_RESPONSES = {
+ "index": {
+ "status": 200,
+ "json": {"taskId": LAST_BACKSTOP_ID},
+ },
+ "artifact": {
+ "status": 200,
+ "body": dedent(
+ """
+ pushdate: {}
+ """.format(
+ LAST_BACKSTOP_PUSHDATE
+ )
+ ),
+ },
+ "status": {
+ "status": 200,
+ "json": {"status": {"state": "complete"}},
+ },
+}
+
+
+@pytest.fixture
+def params():
+ return {
+ "branch": "integration/autoland",
+ "head_repository": "https://hg.mozilla.org/integration/autoland",
+ "head_rev": "abcdef",
+ "project": "autoland",
+ "pushdate": LAST_BACKSTOP_PUSHDATE + 1,
+ "pushlog_id": LAST_BACKSTOP_ID + 1,
+ }
+
+
+@pytest.mark.parametrize(
+ "response_args,extra_params,expected",
+ (
+ pytest.param(
+ {
+ "index": {"status": 404},
+ },
+ {"pushlog_id": 1},
+ True,
+ id="no previous backstop",
+ ),
+ pytest.param(
+ {
+ "index": DEFAULT_RESPONSES["index"],
+ "status": DEFAULT_RESPONSES["status"],
+ "artifact": {"status": 404},
+ },
+ {"pushlog_id": 1},
+ False,
+ id="previous backstop not finished",
+ ),
+ pytest.param(
+ DEFAULT_RESPONSES,
+ {
+ "pushlog_id": LAST_BACKSTOP_ID + 1,
+ "pushdate": LAST_BACKSTOP_PUSHDATE + 1,
+ },
+ False,
+ id="not a backstop",
+ ),
+ pytest.param(
+ {},
+ {
+ "pushlog_id": BACKSTOP_PUSH_INTERVAL,
+ },
+ True,
+ id="backstop interval",
+ ),
+ pytest.param(
+ DEFAULT_RESPONSES,
+ {
+ "pushdate": LAST_BACKSTOP_PUSHDATE + (BACKSTOP_TIME_INTERVAL * 60),
+ },
+ True,
+ id="time elapsed",
+ ),
+ pytest.param(
+ {},
+ {
+ "project": "try",
+ "pushlog_id": BACKSTOP_PUSH_INTERVAL,
+ },
+ False,
+ id="try not a backstop",
+ ),
+ pytest.param(
+ {},
+ {
+ "project": "mozilla-central",
+ },
+ True,
+ id="release branches always a backstop",
+ ),
+ pytest.param(
+ {
+ "index": DEFAULT_RESPONSES["index"],
+ "status": {
+ "status": 200,
+ "json": {"status": {"state": "failed"}},
+ },
+ },
+ {},
+ True,
+ id="last backstop failed",
+ ),
+ ),
+)
+def test_is_backstop(responses, params, response_args, extra_params, expected):
+ urls = {
+ "index": get_index_url(
+ BACKSTOP_INDEX.format(
+ **{"trust-domain": "gecko", "project": params["project"]}
+ )
+ ),
+ "artifact": get_artifact_url(LAST_BACKSTOP_ID, "public/parameters.yml"),
+ "status": get_task_url(LAST_BACKSTOP_ID) + "/status",
+ }
+
+ for key in ("index", "status", "artifact"):
+ if key in response_args:
+ print(urls[key])
+ responses.add(responses.GET, urls[key], **response_args[key])
+
+ params.update(extra_params)
+ assert is_backstop(params) is expected
+
+
+if __name__ == "__main__":
+ main()
diff --git a/taskcluster/gecko_taskgraph/test/test_util_bugbug.py b/taskcluster/gecko_taskgraph/test/test_util_bugbug.py
new file mode 100644
index 0000000000..7e8865ddde
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/test/test_util_bugbug.py
@@ -0,0 +1,57 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import mozunit
+
+from gecko_taskgraph.util.bugbug import BUGBUG_BASE_URL, push_schedules
+
+
+def test_group_translation(responses):
+ branch = ("integration/autoland",)
+ rev = "abcdef"
+ query = f"/push/{branch}/{rev}/schedules"
+ url = BUGBUG_BASE_URL + query
+
+ responses.add(
+ responses.GET,
+ url,
+ json={
+ "groups": {
+ "dom/indexedDB": 1,
+ "testing/web-platform/tests/IndexedDB": 1,
+ "testing/web-platform/mozilla/tests/IndexedDB": 1,
+ },
+ "config_groups": {
+ "dom/indexedDB": ["label1", "label2"],
+ "testing/web-platform/tests/IndexedDB": ["label3"],
+ "testing/web-platform/mozilla/tests/IndexedDB": ["label4"],
+ },
+ },
+ status=200,
+ )
+
+ assert len(push_schedules) == 0
+ data = push_schedules(branch, rev)
+ print(data)
+ assert sorted(data["groups"]) == [
+ "/IndexedDB",
+ "/_mozilla/IndexedDB",
+ "dom/indexedDB",
+ ]
+ assert data["config_groups"] == {
+ "dom/indexedDB": ["label1", "label2"],
+ "/IndexedDB": ["label3"],
+ "/_mozilla/IndexedDB": ["label4"],
+ }
+ assert len(push_schedules) == 1
+
+ # Value is memoized.
+ responses.reset()
+ push_schedules(branch, rev)
+ assert len(push_schedules) == 1
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/taskcluster/gecko_taskgraph/test/test_util_chunking.py b/taskcluster/gecko_taskgraph/test/test_util_chunking.py
new file mode 100644
index 0000000000..681c499fe2
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/test/test_util_chunking.py
@@ -0,0 +1,411 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import re
+from itertools import combinations
+
+import pytest
+from mozunit import main
+
+from gecko_taskgraph.util import chunking
+
+pytestmark = pytest.mark.slow
+
+
+@pytest.fixture(scope="module")
+def mock_manifest_runtimes():
+ """Deterministically produce a list of simulated manifest runtimes.
+
+ Args:
+ manifests (list): list of manifests against which simulated manifest
+ runtimes would be paired up to.
+
+ Returns:
+ dict of manifest data paired with a float value representing runtime.
+ """
+
+ def inner(manifests):
+ manifests = sorted(manifests)
+ # Generate deterministic runtime data.
+ runtimes = [(i / 10) ** (i / 10) for i in range(len(manifests))]
+ return dict(zip(manifests, runtimes))
+
+ return inner
+
+
+@pytest.fixture(scope="module")
+def unchunked_manifests():
+ """Produce a list of unchunked manifests to be consumed by test method.
+
+ Args:
+ length (int, optional): number of path elements to keep.
+ cutoff (int, optional): number of generated test paths to remove
+ from the test set if user wants to limit the number of paths.
+
+ Returns:
+ list: list of test paths.
+ """
+ data = ["blueberry", "nashi", "peach", "watermelon"]
+
+ def inner(suite, length=2, cutoff=0):
+ if "web-platform" in suite:
+ suffix = ""
+ prefix = "/"
+ elif "reftest" in suite:
+ suffix = ".list"
+ prefix = ""
+ else:
+ suffix = ".ini"
+ prefix = ""
+ return [prefix + "/".join(p) + suffix for p in combinations(data, length)][
+ cutoff:
+ ]
+
+ return inner
+
+
+@pytest.fixture(scope="module")
+def mock_task_definition():
+ """Builds a mock task definition for use in testing.
+
+ Args:
+ os_name (str): represents the os.
+ os_version (str): represents the os version
+ bits (int): software bits.
+ build_type (str): opt or debug.
+ build_attrs (list, optional): specify build attribute(s)
+ variants (list, optional): specify runtime variant(s)
+
+ Returns:
+ dict: mocked task definition.
+ """
+
+ def inner(os_name, os_version, bits, build_type, build_attrs=None, variants=None):
+ setting = {
+ "platform": {
+ "arch": str(bits),
+ "os": {
+ "name": os_name,
+ "version": os_version,
+ },
+ },
+ "build": {
+ "type": build_type,
+ },
+ "runtime": {},
+ }
+
+ # Optionally set build attributes and runtime variants.
+ if build_attrs:
+ if isinstance(build_attrs, str):
+ build_attrs = [build_attrs]
+ for attr in build_attrs:
+ setting["build"][attr] = True
+
+ if variants:
+ if isinstance(variants, str):
+ variants = [variants]
+ for variant in variants:
+ setting["runtime"][variant] = True
+ return {"test-name": "foo", "test-setting": setting}
+
+ return inner
+
+
+@pytest.fixture(scope="module")
+def mock_mozinfo():
+ """Returns a mocked mozinfo object, similar to guess_mozinfo_from_task().
+
+ Args:
+ os (str): typically one of 'win, linux, mac, android'.
+ processor (str): processor architecture.
+ asan (bool, optional): addressanitizer build.
+ bits (int, optional): defaults to 64.
+ ccov (bool, optional): code coverage build.
+ debug (bool, optional): debug type build.
+ fission (bool, optional): process fission.
+ headless (bool, optional): headless browser testing without displays.
+ tsan (bool, optional): threadsanitizer build.
+
+ Returns:
+ dict: Dictionary mimickign the results from guess_mozinfo_from_task.
+ """
+
+ def inner(
+ os,
+ processor,
+ asan=False,
+ bits=64,
+ ccov=False,
+ debug=False,
+ fission=False,
+ headless=False,
+ tsan=False,
+ ):
+ return {
+ "os": os,
+ "processor": processor,
+ "toolkit": "",
+ "asan": asan,
+ "bits": bits,
+ "ccov": ccov,
+ "debug": debug,
+ "e10s": True,
+ "fission": fission,
+ "headless": headless,
+ "tsan": tsan,
+ "appname": "firefox",
+ "condprof": False,
+ "canvas": False,
+ "webgpu": False,
+ "privatebrowsing": False,
+ }
+
+ return inner
+
+
+@pytest.mark.parametrize(
+ "params,exception",
+ [
+ [("win", "7", 32, "opt"), None],
+ [("win", "10", 64, "opt"), None],
+ [("linux", "1804", 64, "debug"), None],
+ [("macosx", "1015", 64, "debug"), None],
+ [("macosx", "1100", 64, "opt"), None],
+ [("android", "", 64, "debug"), None],
+ [("and", "", 64, "debug"), ValueError],
+ [("", "", 64, "opt"), ValueError],
+ [("linux", "1804", 64, "opt", ["ccov"]), None],
+ [("linux", "1804", 64, "opt", ["asan"]), None],
+ [("win", "10", 64, "opt", ["tsan"]), None],
+ [("mac", "1100", 64, "opt", ["ccov"]), None],
+ [("android", "", 64, "opt", None, ["fission"]), None],
+ [("win", "10", "aarch64", "opt"), None],
+ ],
+)
+def test_guess_mozinfo_from_task(params, exception, mock_task_definition):
+ """Tests the mozinfo guessing process."""
+ # Set up a mocked task object.
+ task = mock_task_definition(*params)
+
+ if exception:
+ with pytest.raises(exception):
+ result = chunking.guess_mozinfo_from_task(task)
+ else:
+ expected_toolkits = {
+ "android": "android",
+ "linux": "gtk",
+ "mac": "cocoa",
+ "win": "windows",
+ }
+ result = chunking.guess_mozinfo_from_task(task)
+ setting = task["test-setting"]
+
+ assert str(result["bits"]) in setting["platform"]["arch"]
+ assert result["os"] in ("android", "linux", "mac", "win")
+ assert result["os"] in setting["platform"]["os"]["name"]
+ assert result["toolkit"] == expected_toolkits[result["os"]]
+
+ # Ensure the outcome of special build variants being present match what
+ # guess_mozinfo_from_task method returns for these attributes.
+ assert ("asan" in setting["build"]) == result["asan"]
+ assert ("tsan" in setting["build"]) == result["tsan"]
+ assert ("ccov" in setting["build"]) == result["ccov"]
+
+ # Ensure runtime variants match
+ assert ("fission" in setting["runtime"]) == result["fission"]
+ assert ("1proc" in setting["runtime"]) != result["e10s"]
+
+
+@pytest.mark.parametrize("platform", ["unix", "windows", "android"])
+@pytest.mark.parametrize(
+ "suite", ["crashtest", "reftest", "web-platform-tests", "xpcshell"]
+)
+def test_get_runtimes(platform, suite):
+ """Tests that runtime information is returned for known good configurations."""
+ assert chunking.get_runtimes(platform, suite)
+
+
+@pytest.mark.parametrize(
+ "platform,suite,exception",
+ [
+ ("nonexistent_platform", "nonexistent_suite", KeyError),
+ ("unix", "nonexistent_suite", KeyError),
+ ("unix", "", TypeError),
+ ("", "", TypeError),
+ ("", "nonexistent_suite", TypeError),
+ ],
+)
+def test_get_runtimes_invalid(platform, suite, exception):
+ """Ensure get_runtimes() method raises an exception if improper request is made."""
+ with pytest.raises(exception):
+ chunking.get_runtimes(platform, suite)
+
+
+@pytest.mark.parametrize(
+ "suite",
+ [
+ "web-platform-tests",
+ "web-platform-tests-reftest",
+ "web-platform-tests-wdspec",
+ "web-platform-tests-crashtest",
+ ],
+)
+@pytest.mark.parametrize("chunks", [1, 3, 6, 20])
+def test_mock_chunk_manifests_wpt(unchunked_manifests, suite, chunks):
+ """Tests web-platform-tests and its subsuites chunking process."""
+ # Setup.
+ manifests = unchunked_manifests(suite)
+
+ # Generate the expected results, by generating list of indices that each
+ # manifest should go into and then appending each item to that index.
+ # This method is intentionally different from the way chunking.py performs
+ # chunking for cross-checking.
+ expected = [[] for _ in range(chunks)]
+ indexed = zip(manifests, list(range(0, chunks)) * len(manifests))
+ for i in indexed:
+ expected[i[1]].append(i[0])
+
+ # Call the method under test on unchunked manifests.
+ chunked_manifests = chunking.chunk_manifests(suite, "unix", chunks, manifests)
+
+ # Assertions and end test.
+ assert chunked_manifests
+ if chunks > len(manifests):
+ # If chunk count exceeds number of manifests, not all chunks will have
+ # manifests.
+ with pytest.raises(AssertionError):
+ assert all(chunked_manifests)
+ else:
+ assert all(chunked_manifests)
+ minimum = min(len(c) for c in chunked_manifests)
+ maximum = max(len(c) for c in chunked_manifests)
+ assert maximum - minimum <= 1
+ assert expected == chunked_manifests
+
+
+@pytest.mark.parametrize(
+ "suite",
+ [
+ "mochitest-devtools-chrome",
+ "mochitest-browser-chrome",
+ "mochitest-plain",
+ "mochitest-chrome",
+ "xpcshell",
+ ],
+)
+@pytest.mark.parametrize("chunks", [1, 3, 6, 20])
+def test_mock_chunk_manifests(
+ mock_manifest_runtimes, unchunked_manifests, suite, chunks
+):
+ """Tests non-WPT tests and its subsuites chunking process."""
+ # Setup.
+ manifests = unchunked_manifests(suite)
+
+ # Call the method under test on unchunked manifests.
+ chunked_manifests = chunking.chunk_manifests(suite, "unix", chunks, manifests)
+
+ # Assertions and end test.
+ assert chunked_manifests
+ if chunks > len(manifests):
+ # If chunk count exceeds number of manifests, not all chunks will have
+ # manifests.
+ with pytest.raises(AssertionError):
+ assert all(chunked_manifests)
+ else:
+ assert all(chunked_manifests)
+
+
+@pytest.mark.parametrize(
+ "suite",
+ [
+ "web-platform-tests",
+ "web-platform-tests-reftest",
+ "xpcshell",
+ "mochitest-plain",
+ "mochitest-devtools-chrome",
+ "mochitest-browser-chrome",
+ "mochitest-chrome",
+ ],
+)
+@pytest.mark.parametrize(
+ "platform",
+ [
+ ("mac", "x86_64"),
+ ("win", "x86_64"),
+ ("win", "x86"),
+ ("win", "aarch64"),
+ ("linux", "x86_64"),
+ ("linux", "x86"),
+ ],
+)
+def test_get_manifests(suite, platform, mock_mozinfo):
+ """Tests the DefaultLoader class' ability to load manifests."""
+ mozinfo = mock_mozinfo(*platform)
+
+ loader = chunking.DefaultLoader([])
+ manifests = loader.get_manifests(suite, frozenset(mozinfo.items()))
+
+ assert manifests
+ assert manifests["active"]
+ if "web-platform" in suite:
+ assert manifests["skipped"] == []
+ else:
+ assert manifests["skipped"]
+
+ items = manifests["active"]
+ if suite == "xpcshell":
+ assert all([re.search(r"xpcshell(.*)?(.ini|.toml)", m) for m in items])
+ if "mochitest" in suite:
+ assert all(
+ [
+ re.search(r"(perftest|mochitest|chrome|browser).*(.ini|.toml)", m)
+ for m in items
+ ]
+ )
+ if "web-platform" in suite:
+ assert all([m.startswith("/") and m.count("/") <= 4 for m in items])
+
+
+@pytest.mark.parametrize(
+ "suite",
+ [
+ "mochitest-devtools-chrome",
+ "mochitest-browser-chrome",
+ "mochitest-plain",
+ "mochitest-chrome",
+ "web-platform-tests",
+ "web-platform-tests-reftest",
+ "xpcshell",
+ ],
+)
+@pytest.mark.parametrize(
+ "platform",
+ [
+ ("mac", "x86_64"),
+ ("win", "x86_64"),
+ ("linux", "x86_64"),
+ ],
+)
+@pytest.mark.parametrize("chunks", [1, 3, 6, 20])
+def test_chunk_manifests(suite, platform, chunks, mock_mozinfo):
+ """Tests chunking with real manifests."""
+ mozinfo = mock_mozinfo(*platform)
+
+ loader = chunking.DefaultLoader([])
+ manifests = loader.get_manifests(suite, frozenset(mozinfo.items()))
+
+ chunked_manifests = chunking.chunk_manifests(
+ suite, platform, chunks, manifests["active"]
+ )
+
+ # Assertions and end test.
+ assert chunked_manifests
+ assert len(chunked_manifests) == chunks
+ assert all(chunked_manifests)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/taskcluster/gecko_taskgraph/test/test_util_docker.py b/taskcluster/gecko_taskgraph/test/test_util_docker.py
new file mode 100644
index 0000000000..49c01738fe
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/test/test_util_docker.py
@@ -0,0 +1,255 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import os
+import shutil
+import stat
+import tarfile
+import tempfile
+import unittest
+from unittest import mock
+
+import taskcluster_urls as liburls
+from mozunit import MockedOpen, main
+
+from gecko_taskgraph.util import docker
+
+MODE_STANDARD = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH
+
+
+@mock.patch.dict("os.environ", {"TASKCLUSTER_ROOT_URL": liburls.test_root_url()})
+class TestDocker(unittest.TestCase):
+ def test_generate_context_hash(self):
+ tmpdir = tempfile.mkdtemp()
+ try:
+ os.makedirs(os.path.join(tmpdir, "docker", "my-image"))
+ p = os.path.join(tmpdir, "docker", "my-image", "Dockerfile")
+ with open(p, "w") as f:
+ f.write("FROM node\nADD a-file\n")
+ os.chmod(p, MODE_STANDARD)
+ p = os.path.join(tmpdir, "docker", "my-image", "a-file")
+ with open(p, "w") as f:
+ f.write("data\n")
+ os.chmod(p, MODE_STANDARD)
+ self.assertIn(
+ docker.generate_context_hash(
+ tmpdir,
+ os.path.join(tmpdir, "docker/my-image"),
+ "my-image",
+ {},
+ ),
+ (
+ "680532a33c845e3b4f8ea8a7bd697da579b647f28c29f7a0a71e51e6cca33983",
+ "cc02f943ae87b283749369fa9c4f6a74639c27a7b9972c99de58e5d9fb3a98ae",
+ ),
+ )
+ finally:
+ shutil.rmtree(tmpdir)
+
+ def test_docker_image_explicit_registry(self):
+ files = {}
+ files[f"{docker.IMAGE_DIR}/myimage/REGISTRY"] = "cool-images"
+ files[f"{docker.IMAGE_DIR}/myimage/VERSION"] = "1.2.3"
+ files[f"{docker.IMAGE_DIR}/myimage/HASH"] = "sha256:434..."
+ with MockedOpen(files):
+ self.assertEqual(
+ docker.docker_image("myimage"), "cool-images/myimage@sha256:434..."
+ )
+
+ def test_docker_image_explicit_registry_by_tag(self):
+ files = {}
+ files[f"{docker.IMAGE_DIR}/myimage/REGISTRY"] = "myreg"
+ files[f"{docker.IMAGE_DIR}/myimage/VERSION"] = "1.2.3"
+ files[f"{docker.IMAGE_DIR}/myimage/HASH"] = "sha256:434..."
+ with MockedOpen(files):
+ self.assertEqual(
+ docker.docker_image("myimage", by_tag=True), "myreg/myimage:1.2.3"
+ )
+
+ def test_docker_image_default_registry(self):
+ files = {}
+ files[f"{docker.IMAGE_DIR}/REGISTRY"] = "mozilla"
+ files[f"{docker.IMAGE_DIR}/myimage/VERSION"] = "1.2.3"
+ files[f"{docker.IMAGE_DIR}/myimage/HASH"] = "sha256:434..."
+ with MockedOpen(files):
+ self.assertEqual(
+ docker.docker_image("myimage"), "mozilla/myimage@sha256:434..."
+ )
+
+ def test_docker_image_default_registry_by_tag(self):
+ files = {}
+ files[f"{docker.IMAGE_DIR}/REGISTRY"] = "mozilla"
+ files[f"{docker.IMAGE_DIR}/myimage/VERSION"] = "1.2.3"
+ files[f"{docker.IMAGE_DIR}/myimage/HASH"] = "sha256:434..."
+ with MockedOpen(files):
+ self.assertEqual(
+ docker.docker_image("myimage", by_tag=True), "mozilla/myimage:1.2.3"
+ )
+
+ def test_create_context_tar_basic(self):
+ tmp = tempfile.mkdtemp()
+ try:
+ d = os.path.join(tmp, "test_image")
+ os.mkdir(d)
+ with open(os.path.join(d, "Dockerfile"), "a"):
+ pass
+ os.chmod(os.path.join(d, "Dockerfile"), MODE_STANDARD)
+
+ with open(os.path.join(d, "extra"), "a"):
+ pass
+ os.chmod(os.path.join(d, "extra"), MODE_STANDARD)
+
+ tp = os.path.join(tmp, "tar")
+ h = docker.create_context_tar(tmp, d, tp, "my_image", {})
+ self.assertIn(
+ h,
+ (
+ "eae3ad00936085eb3e5958912f79fb06ee8e14a91f7157c5f38625f7ddacb9c7",
+ "9ff54ee091c4f346e94e809b03efae5aa49a5c1db152f9f633682cfa005f7422",
+ ),
+ )
+
+ # File prefix should be "my_image"
+ with tarfile.open(tp, "r:gz") as tf:
+ self.assertEqual(
+ tf.getnames(),
+ [
+ "Dockerfile",
+ "extra",
+ ],
+ )
+ finally:
+ shutil.rmtree(tmp)
+
+ def test_create_context_topsrcdir_files(self):
+ tmp = tempfile.mkdtemp()
+ try:
+ d = os.path.join(tmp, "test-image")
+ os.mkdir(d)
+ with open(os.path.join(d, "Dockerfile"), "wb") as fh:
+ fh.write(b"# %include extra/file0\n")
+ os.chmod(os.path.join(d, "Dockerfile"), MODE_STANDARD)
+
+ extra = os.path.join(tmp, "extra")
+ os.mkdir(extra)
+ with open(os.path.join(extra, "file0"), "a"):
+ pass
+ os.chmod(os.path.join(extra, "file0"), MODE_STANDARD)
+
+ tp = os.path.join(tmp, "tar")
+ h = docker.create_context_tar(tmp, d, tp, "test_image", {})
+ self.assertIn(
+ h,
+ (
+ "49dc3827530cd344d7bcc52e1fdd4aefc632568cf442cffd3dd9633a58f271bf",
+ "8f8e3dd2b712003cd12bb39e5a84fc2a7c06e891cf481613a52bf3db472c4ca9",
+ ),
+ )
+
+ with tarfile.open(tp, "r:gz") as tf:
+ self.assertEqual(
+ tf.getnames(),
+ [
+ "Dockerfile",
+ "topsrcdir/extra/file0",
+ ],
+ )
+ finally:
+ shutil.rmtree(tmp)
+
+ def test_create_context_absolute_path(self):
+ tmp = tempfile.mkdtemp()
+ try:
+ d = os.path.join(tmp, "test-image")
+ os.mkdir(d)
+
+ # Absolute paths in %include syntax are not allowed.
+ with open(os.path.join(d, "Dockerfile"), "wb") as fh:
+ fh.write(b"# %include /etc/shadow\n")
+
+ with self.assertRaisesRegexp(Exception, "cannot be absolute"):
+ docker.create_context_tar(tmp, d, os.path.join(tmp, "tar"), "test", {})
+ finally:
+ shutil.rmtree(tmp)
+
+ def test_create_context_outside_topsrcdir(self):
+ tmp = tempfile.mkdtemp()
+ try:
+ d = os.path.join(tmp, "test-image")
+ os.mkdir(d)
+
+ with open(os.path.join(d, "Dockerfile"), "wb") as fh:
+ fh.write(b"# %include foo/../../../etc/shadow\n")
+
+ with self.assertRaisesRegexp(Exception, "path outside topsrcdir"):
+ docker.create_context_tar(tmp, d, os.path.join(tmp, "tar"), "test", {})
+ finally:
+ shutil.rmtree(tmp)
+
+ def test_create_context_missing_extra(self):
+ tmp = tempfile.mkdtemp()
+ try:
+ d = os.path.join(tmp, "test-image")
+ os.mkdir(d)
+
+ with open(os.path.join(d, "Dockerfile"), "wb") as fh:
+ fh.write(b"# %include does/not/exist\n")
+
+ with self.assertRaisesRegexp(Exception, "path does not exist"):
+ docker.create_context_tar(tmp, d, os.path.join(tmp, "tar"), "test", {})
+ finally:
+ shutil.rmtree(tmp)
+
+ def test_create_context_extra_directory(self):
+ tmp = tempfile.mkdtemp()
+ try:
+ d = os.path.join(tmp, "test-image")
+ os.mkdir(d)
+
+ with open(os.path.join(d, "Dockerfile"), "wb") as fh:
+ fh.write(b"# %include extra\n")
+ fh.write(b"# %include file0\n")
+ os.chmod(os.path.join(d, "Dockerfile"), MODE_STANDARD)
+
+ extra = os.path.join(tmp, "extra")
+ os.mkdir(extra)
+ for i in range(3):
+ p = os.path.join(extra, "file%d" % i)
+ with open(p, "wb") as fh:
+ fh.write(b"file%d" % i)
+ os.chmod(p, MODE_STANDARD)
+
+ with open(os.path.join(tmp, "file0"), "a"):
+ pass
+ os.chmod(os.path.join(tmp, "file0"), MODE_STANDARD)
+
+ tp = os.path.join(tmp, "tar")
+ h = docker.create_context_tar(tmp, d, tp, "my_image", {})
+
+ self.assertIn(
+ h,
+ (
+ "a392f23cd6606ae43116390a4d0113354cff1e688a41d46f48b0fb25e90baa13",
+ "02325bdc508c2e941959170beeb840f6bb91d0675cb8095783a7db7301d136b2",
+ ),
+ )
+
+ with tarfile.open(tp, "r:gz") as tf:
+ self.assertEqual(
+ tf.getnames(),
+ [
+ "Dockerfile",
+ "topsrcdir/extra/file0",
+ "topsrcdir/extra/file1",
+ "topsrcdir/extra/file2",
+ "topsrcdir/file0",
+ ],
+ )
+ finally:
+ shutil.rmtree(tmp)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/taskcluster/gecko_taskgraph/test/test_util_partials.py b/taskcluster/gecko_taskgraph/test/test_util_partials.py
new file mode 100644
index 0000000000..3630d7b0ec
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/test/test_util_partials.py
@@ -0,0 +1,128 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+from unittest import mock
+
+from mozunit import main
+
+from gecko_taskgraph.util import partials
+
+release_blob = {
+ "fileUrls": {
+ "release-localtest": {
+ "completes": {
+ "*": "%OS_FTP%/%LOCALE%/firefox-92.0.1.complete.mar",
+ }
+ }
+ },
+ "platforms": {
+ "WINNT_x86_64-msvc": {
+ "locales": {
+ "en-US": {
+ "buildID": "20210922161155",
+ }
+ }
+ }
+ },
+}
+
+
+def nightly_blob(release):
+ return {
+ "platforms": {
+ "WINNT_x86_64-msvc": {
+ "locales": {
+ "en-US": {
+ "buildID": release[-14:],
+ "completes": [{"fileUrl": release}],
+ }
+ }
+ }
+ }
+ }
+
+
+class TestReleaseHistory(unittest.TestCase):
+ @mock.patch("gecko_taskgraph.util.partials.get_release_builds")
+ @mock.patch("gecko_taskgraph.util.partials.get_sorted_releases")
+ def test_populate_release_history(self, get_sorted_releases, get_release_builds):
+ self.assertEqual(
+ partials.populate_release_history(
+ "Firefox", "mozilla-release", partial_updates={}
+ ),
+ {},
+ )
+ get_sorted_releases.assert_not_called()
+ get_release_builds.assert_not_called()
+
+ def patched_get_sorted_releases(product, branch):
+ assert branch == "mozilla-central"
+ return [
+ "Firefox-mozilla-central-nightly-20211003201113",
+ "Firefox-mozilla-central-nightly-20211003100640",
+ "Firefox-mozilla-central-nightly-20211002213629",
+ "Firefox-mozilla-central-nightly-20211002095048",
+ "Firefox-mozilla-central-nightly-20211001214601",
+ "Firefox-mozilla-central-nightly-20211001093323",
+ ]
+
+ def patched_get_release_builds(release, branch):
+ if branch == "mozilla-central":
+ return nightly_blob(release)
+ if branch == "mozilla-release":
+ return release_blob
+
+ get_sorted_releases.side_effect = patched_get_sorted_releases
+ get_release_builds.side_effect = patched_get_release_builds
+
+ self.assertEqual(
+ partials.populate_release_history(
+ "Firefox",
+ "mozilla-release",
+ partial_updates={"92.0.1": {"buildNumber": 1}},
+ ),
+ {
+ "WINNT_x86_64-msvc": {
+ "en-US": {
+ "target-92.0.1.partial.mar": {
+ "buildid": "20210922161155",
+ "mar_url": "win64/en-US/firefox-92.0.1.complete.mar",
+ "previousVersion": "92.0.1",
+ "previousBuildNumber": 1,
+ "product": "Firefox",
+ }
+ }
+ }
+ },
+ )
+ self.assertEqual(
+ partials.populate_release_history("Firefox", "mozilla-central"),
+ {
+ "WINNT_x86_64-msvc": {
+ "en-US": {
+ "target.partial-1.mar": {
+ "buildid": "20211003201113",
+ "mar_url": "Firefox-mozilla-central-nightly-20211003201113",
+ },
+ "target.partial-2.mar": {
+ "buildid": "20211003100640",
+ "mar_url": "Firefox-mozilla-central-nightly-20211003100640",
+ },
+ "target.partial-3.mar": {
+ "buildid": "20211002213629",
+ "mar_url": "Firefox-mozilla-central-nightly-20211002213629",
+ },
+ "target.partial-4.mar": {
+ "buildid": "20211002095048",
+ "mar_url": "Firefox-mozilla-central-nightly-20211002095048",
+ },
+ }
+ }
+ },
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/taskcluster/gecko_taskgraph/test/test_util_runnable_jobs.py b/taskcluster/gecko_taskgraph/test/test_util_runnable_jobs.py
new file mode 100644
index 0000000000..d1d7b0c06a
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/test/test_util_runnable_jobs.py
@@ -0,0 +1,75 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import unittest
+
+from mozunit import main
+from taskgraph.graph import Graph
+from taskgraph.task import Task
+from taskgraph.taskgraph import TaskGraph
+
+from gecko_taskgraph.decision import full_task_graph_to_runnable_jobs
+
+
+class TestRunnableJobs(unittest.TestCase):
+ tasks = [
+ {
+ "kind": "build",
+ "label": "a",
+ "attributes": {},
+ "task": {
+ "extra": {"treeherder": {"symbol": "B"}},
+ },
+ },
+ {
+ "kind": "test",
+ "label": "b",
+ "attributes": {},
+ "task": {
+ "extra": {
+ "treeherder": {
+ "collection": {"opt": True},
+ "groupName": "Some group",
+ "groupSymbol": "GS",
+ "machine": {"platform": "linux64"},
+ "symbol": "t",
+ }
+ },
+ },
+ },
+ ]
+
+ def make_taskgraph(self, tasks):
+ label_to_taskid = {k: k + "-tid" for k in tasks}
+ for label, task_id in label_to_taskid.items():
+ tasks[label].task_id = task_id
+ graph = Graph(nodes=set(tasks), edges=set())
+ taskgraph = TaskGraph(tasks, graph)
+ return taskgraph, label_to_taskid
+
+ def test_taskgraph_to_runnable_jobs(self):
+ tg, label_to_taskid = self.make_taskgraph(
+ {t["label"]: Task(**t) for t in self.tasks[:]}
+ )
+
+ res = full_task_graph_to_runnable_jobs(tg.to_json())
+
+ self.assertEqual(
+ res,
+ {
+ "a": {"symbol": "B"},
+ "b": {
+ "collection": {"opt": True},
+ "groupName": "Some group",
+ "groupSymbol": "GS",
+ "symbol": "t",
+ "platform": "linux64",
+ },
+ },
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/taskcluster/gecko_taskgraph/test/test_util_templates.py b/taskcluster/gecko_taskgraph/test/test_util_templates.py
new file mode 100644
index 0000000000..edfb13a277
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/test/test_util_templates.py
@@ -0,0 +1,79 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import unittest
+
+import mozunit
+
+from gecko_taskgraph.util.templates import merge, merge_to
+
+
+class MergeTest(unittest.TestCase):
+ def test_merge_to_dicts(self):
+ source = {"a": 1, "b": 2}
+ dest = {"b": "20", "c": 30}
+ expected = {
+ "a": 1, # source only
+ "b": 2, # source overrides dest
+ "c": 30, # dest only
+ }
+ self.assertEqual(merge_to(source, dest), expected)
+ self.assertEqual(dest, expected)
+
+ def test_merge_to_lists(self):
+ source = {"x": [3, 4]}
+ dest = {"x": [1, 2]}
+ expected = {"x": [1, 2, 3, 4]} # dest first
+ self.assertEqual(merge_to(source, dest), expected)
+ self.assertEqual(dest, expected)
+
+ def test_merge_diff_types(self):
+ source = {"x": [1, 2]}
+ dest = {"x": "abc"}
+ expected = {"x": [1, 2]} # source wins
+ self.assertEqual(merge_to(source, dest), expected)
+ self.assertEqual(dest, expected)
+
+ def test_merge(self):
+ first = {"a": 1, "b": 2, "d": 11}
+ second = {"b": 20, "c": 30}
+ third = {"c": 300, "d": 400}
+ expected = {
+ "a": 1,
+ "b": 20,
+ "c": 300,
+ "d": 400,
+ }
+ self.assertEqual(merge(first, second, third), expected)
+
+ # inputs haven't changed..
+ self.assertEqual(first, {"a": 1, "b": 2, "d": 11})
+ self.assertEqual(second, {"b": 20, "c": 30})
+ self.assertEqual(third, {"c": 300, "d": 400})
+
+ def test_merge_by(self):
+ source = {
+ "x": "abc",
+ "y": {"by-foo": {"quick": "fox", "default": ["a", "b", "c"]}},
+ }
+ dest = {"y": {"by-foo": {"purple": "rain", "default": ["x", "y", "z"]}}}
+ expected = {
+ "x": "abc",
+ "y": {"by-foo": {"quick": "fox", "default": ["a", "b", "c"]}},
+ } # source wins
+ self.assertEqual(merge_to(source, dest), expected)
+ self.assertEqual(dest, expected)
+
+ def test_merge_multiple_by(self):
+ source = {"x": {"by-foo": {"quick": "fox", "default": ["a", "b", "c"]}}}
+ dest = {"x": {"by-bar": {"purple": "rain", "default": ["x", "y", "z"]}}}
+ expected = {
+ "x": {"by-foo": {"quick": "fox", "default": ["a", "b", "c"]}}
+ } # source wins
+ self.assertEqual(merge_to(source, dest), expected)
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/taskcluster/gecko_taskgraph/test/test_util_verify.py b/taskcluster/gecko_taskgraph/test/test_util_verify.py
new file mode 100644
index 0000000000..e2f774a315
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/test/test_util_verify.py
@@ -0,0 +1,149 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+There are some basic tests run as part of the Decision task to make sure
+documentation exists for taskgraph functionality.
+These functions are defined in gecko_taskgraph.generator and call
+gecko_taskgraph.util.verify.verify_docs with different parameters to do the
+actual checking.
+"""
+
+
+import os.path
+
+import pytest
+from mozunit import main
+
+import gecko_taskgraph.util.verify
+from gecko_taskgraph import GECKO
+from gecko_taskgraph.util.verify import DocPaths, verify_docs
+
+FF_DOCS_BASE = os.path.join(GECKO, "taskcluster", "docs")
+EXTRA_DOCS_BASE = os.path.abspath(os.path.join(os.path.dirname(__file__), "docs"))
+
+
+@pytest.fixture
+def mock_single_doc_path(monkeypatch):
+ """Set a single path containing documentation"""
+ mocked_documentation_paths = DocPaths()
+ mocked_documentation_paths.add(FF_DOCS_BASE)
+ monkeypatch.setattr(
+ gecko_taskgraph.util.verify, "documentation_paths", mocked_documentation_paths
+ )
+
+
+@pytest.fixture
+def mock_two_doc_paths(monkeypatch):
+ """Set two paths containing documentation"""
+ mocked_documentation_paths = DocPaths()
+ mocked_documentation_paths.add(FF_DOCS_BASE)
+ mocked_documentation_paths.add(EXTRA_DOCS_BASE)
+ monkeypatch.setattr(
+ gecko_taskgraph.util.verify, "documentation_paths", mocked_documentation_paths
+ )
+
+
+@pytest.mark.usefixtures("mock_single_doc_path")
+class PyTestSingleDocPath:
+ """
+ Taskcluster documentation for Firefox is in a single directory. Check the tests
+ running at build time to make sure documentation exists, actually work themselves.
+ """
+
+ def test_heading(self):
+ """
+ Look for a headings in filename matching identifiers. This is used when making sure
+ documentation exists for kinds and attributes.
+ """
+ verify_docs(
+ filename="kinds.rst",
+ identifiers=["build", "packages", "toolchain"],
+ appearing_as="heading",
+ )
+ with pytest.raises(Exception, match="missing from doc file"):
+ verify_docs(
+ filename="kinds.rst",
+ identifiers=["build", "packages", "badvalue"],
+ appearing_as="heading",
+ )
+
+ def test_inline_literal(self):
+ """
+ Look for inline-literals in filename. Used when checking documentation for decision
+ task parameters and run-using functions.
+ """
+ verify_docs(
+ filename="parameters.rst",
+ identifiers=["base_repository", "head_repository", "owner"],
+ appearing_as="inline-literal",
+ )
+ with pytest.raises(Exception, match="missing from doc file"):
+ verify_docs(
+ filename="parameters.rst",
+ identifiers=["base_repository", "head_repository", "badvalue"],
+ appearing_as="inline-literal",
+ )
+
+
+@pytest.mark.usefixtures("mock_two_doc_paths")
+class PyTestTwoDocPaths:
+ """
+ Thunderbird extends Firefox's taskgraph with additional kinds. The documentation
+ for Thunderbird kinds are in its repository, and documentation_paths will have
+ two places to look for files. Run the same tests as for a single documentation
+ path, and cover additional possible scenarios.
+ """
+
+ def test_heading(self):
+ """
+ Look for a headings in filename matching identifiers. This is used when
+ making sure documentation exists for kinds and attributes.
+ The first test looks for headings that are all within the first doc path,
+ the second test is new and has a heading found in the second path.
+ The final check has a identifier that will not match and should
+ produce an error.
+ """
+ verify_docs(
+ filename="kinds.rst",
+ identifiers=["build", "packages", "toolchain"],
+ appearing_as="heading",
+ )
+ verify_docs(
+ filename="kinds.rst",
+ identifiers=["build", "packages", "newkind"],
+ appearing_as="heading",
+ )
+ with pytest.raises(Exception, match="missing from doc file"):
+ verify_docs(
+ filename="kinds.rst",
+ identifiers=["build", "packages", "badvalue"],
+ appearing_as="heading",
+ )
+
+ def test_inline_literal(self):
+ """
+ Look for inline-literals in filename. Used when checking documentation for decision
+ task parameters and run-using functions. As with the heading tests,
+ the second check looks for an identifier in the added documentation path.
+ """
+ verify_docs(
+ filename="parameters.rst",
+ identifiers=["base_repository", "head_repository", "owner"],
+ appearing_as="inline-literal",
+ )
+ verify_docs(
+ filename="parameters.rst",
+ identifiers=["base_repository", "head_repository", "newparameter"],
+ appearing_as="inline-literal",
+ )
+ with pytest.raises(Exception, match="missing from doc file"):
+ verify_docs(
+ filename="parameters.rst",
+ identifiers=["base_repository", "head_repository", "badvalue"],
+ appearing_as="inline-literal",
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/taskcluster/gecko_taskgraph/transforms/__init__.py b/taskcluster/gecko_taskgraph/transforms/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/__init__.py
diff --git a/taskcluster/gecko_taskgraph/transforms/artifact.py b/taskcluster/gecko_taskgraph/transforms/artifact.py
new file mode 100644
index 0000000000..559148f7b4
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/artifact.py
@@ -0,0 +1,116 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Apply different expiration dates to different artifacts based on a manifest file (artifacts.yml)
+"""
+import logging
+import os
+import sys
+
+import yaml
+from taskgraph.transforms.base import TransformSequence
+from yaml import YAMLError
+
+from gecko_taskgraph.transforms.job.common import get_expiration
+from gecko_taskgraph.util.workertypes import worker_type_implementation
+
+logger = logging.getLogger(__name__)
+
+transforms = TransformSequence()
+
+
+def read_artifact_manifest(manifest_path):
+ """Read the artifacts.yml manifest file and return it."""
+ # logger.info(f"The current directory is {os.getcwd()}")
+ try:
+ with open(manifest_path, "r") as ymlf:
+ yml = yaml.safe_load(ymlf.read())
+ return yml
+ except YAMLError as ye:
+ err = 'Failed to parse manifest "{manifest_path}". Invalid Yaml:'
+ err += ye
+ raise SystemExit(err)
+ except FileNotFoundError:
+ err = f'Failed to load manifest "{manifest_path}". File not found'
+ raise SystemExit(err)
+ except PermissionError:
+ err = f'Failed to load manifest "{manifest_path}". Permission Error'
+ raise SystemExit(err)
+
+
+@transforms.add
+def set_artifact_expiration(config, jobs):
+ """Set the expiration for certain artifacts based on a manifest file."""
+ """---
+ win:
+ - build_resources.json: short
+
+ linux:
+ - target.crashreporter-symbols-full.tar.zst: medium
+ """
+ transform_dir = os.path.dirname(__file__)
+ manifest = read_artifact_manifest(os.path.join(transform_dir, "artifacts.yml"))
+
+ for job in jobs:
+ try:
+ platform = job["attributes"]["build_platform"]
+ except KeyError:
+ err = "Tried to get build_platfrom for job, but it does not exist. Exiting."
+ raise SystemExit(err)
+ if "worker" in job:
+ if "env" in job["worker"]:
+ if isinstance(job["worker"]["env"], dict):
+ job["worker"]["env"]["MOZ_ARTIFACT_PLATFORM"] = platform
+ else:
+ raise SystemExit(
+ f"Expected env to be a dict, but it was {type(job['worker']['env'])}"
+ )
+ if "artifacts" in job["worker"]:
+ plat = platform.lower()
+ if "plain" in plat or "ccov" in plat or "rusttest" in plat:
+ art_dict = None
+ elif (
+ plat == "toolchain-wasm32-wasi-compiler-rt-trunk"
+ or plat == "toolchain-linux64-x64-compiler-rt-trunk"
+ or plat == "toolchain-linux64-x86-compiler-rt-trunk"
+ or plat == "android-geckoview-docs"
+ ):
+ art_dict = None
+ elif plat.startswith("win"):
+ art_dict = manifest["win"]
+ elif plat.startswith("linux"):
+ art_dict = manifest["linux"]
+ elif plat.startswith("mac"):
+ art_dict = manifest["macos"]
+ elif plat.startswith("android"):
+ art_dict = manifest["android"]
+ else:
+ print(
+ 'The platform name "{plat}" didn\'t start with',
+ '"win", "mac", "android", or "linux".',
+ file=sys.stderr,
+ )
+ art_dict = None
+ worker_implementation, _ = worker_type_implementation(
+ config.graph_config, config.params, job["worker-type"]
+ )
+ if worker_implementation == "docker-worker":
+ artifact_dest = "/builds/worker/cidata/{}"
+ else:
+ artifact_dest = "cidata/{}"
+
+ if art_dict is not None:
+ for art_name in art_dict.keys():
+ # The 'artifacts' key of a job is a list at this stage.
+ # So, must append a new dict to the list
+ expiry_policy = art_dict[art_name]
+ expires = get_expiration(config, policy=expiry_policy)
+ new_art = {
+ "name": f"public/cidata/{art_name}",
+ "path": artifact_dest.format(art_name),
+ "type": "file",
+ "expires-after": expires,
+ }
+ job["worker"]["artifacts"].append(new_art)
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/artifacts.yml b/taskcluster/gecko_taskgraph/transforms/artifacts.yml
new file mode 100644
index 0000000000..26f06640ad
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/artifacts.yml
@@ -0,0 +1,20 @@
+---
+win:
+ target.crashreporter-symbols-full.tar.zst: shortest
+ sccache.log: shortest
+ sccache-stats.json: shortest
+
+linux:
+ target.crashreporter-symbols-full.tar.zst: shortest
+ sccache.log: shortest
+ sccache-stats.json: shortest
+
+macos:
+ target.crashreporter-symbols-full.tar.zst: shortest
+ sccache.log: shortest
+ sccache-stats.json: shortest
+
+android:
+ target.crashreporter-symbols-full.tar.zst: shortest
+ sccache.log: shortest
+ sccache-stats.json: shortest
diff --git a/taskcluster/gecko_taskgraph/transforms/attribution.py b/taskcluster/gecko_taskgraph/transforms/attribution.py
new file mode 100644
index 0000000000..42d0f20f79
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/attribution.py
@@ -0,0 +1,69 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+transforms = TransformSequence()
+resolve_keyed_by_transforms = TransformSequence()
+
+
+@resolve_keyed_by_transforms.add
+def attribution_keyed_by(config, jobs):
+ keyed_by_fields = (
+ "fetches",
+ "attributes.release_artifacts",
+ "run.command",
+ "properties-with-locale", # properties-with-locale only exists in the l10n task
+ )
+ for job in jobs:
+ build_platform = {"build-platform": job["attributes"]["build_platform"]}
+ for field in keyed_by_fields:
+ resolve_keyed_by(item=job, field=field, item_name=field, **build_platform)
+ yield job
+
+
+@transforms.add
+def stub_installer(config, jobs):
+ """Not all windows builds come with a stub installer (only win32, and not
+ on esr), so conditionally add it here based on our dependency's
+ stub-installer attribute."""
+ for job in jobs:
+ dep_name, dep_label = next(iter(job["dependencies"].items()))
+ dep_task = config.kind_dependencies_tasks[dep_label]
+ if dep_task.attributes.get("stub-installer"):
+ locale = job["attributes"].get("locale")
+ if locale:
+ artifact = f"{locale}/target.stub-installer.exe"
+ else:
+ artifact = "target.stub-installer.exe"
+ job["fetches"][dep_name].append(artifact)
+ job["run"]["command"] += [
+ "--input",
+ "/builds/worker/fetches/target.stub-installer.exe",
+ ]
+ job["attributes"]["release_artifacts"].append(
+ "public/build/target.stub-installer.exe"
+ )
+ yield job
+
+
+@transforms.add
+def mac_attribution(config, jobs):
+ """Adds \t padding to the mac attribution data. Implicitly assumes that the
+ attribution data is the last thing in job.run.command
+ """
+ for job in jobs:
+ if "macosx" in job["attributes"]["build_platform"]:
+ # Last argument of command should be the attribution data
+ command = job["run"]["command"]
+ attribution_arg = command[-1]
+ # Attribution length should be aligned with ATTR_CODE_MAX_LENGTH
+ # from browser/components/attribution/AttributionCode.sys
+ while len(attribution_arg) < 1010:
+ attribution_arg += "\t"
+ # Wrap attribution value in quotes to prevent run-task from removing tabs
+ command[-1] = "'" + attribution_arg + "'"
+ job["run"]["command"] = " ".join(command)
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/balrog_submit.py b/taskcluster/gecko_taskgraph/transforms/balrog_submit.py
new file mode 100644
index 0000000000..2063567345
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/balrog_submit.py
@@ -0,0 +1,155 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the per-locale balrog task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+from taskgraph.util.schema import Schema, optionally_keyed_by, resolve_keyed_by
+from taskgraph.util.treeherder import replace_group
+from voluptuous import Optional, Required
+
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+
+balrog_description_schema = Schema(
+ {
+ # unique label to describe this balrog task, defaults to balrog-{dep.label}
+ Required("label"): str,
+ Optional(
+ "update-no-wnp",
+ description="Whether the parallel `-No-WNP` blob should be updated as well.",
+ ): optionally_keyed_by("release-type", bool),
+ # treeherder is allowed here to override any defaults we use for beetmover. See
+ # taskcluster/gecko_taskgraph/transforms/task.py for the schema details, and the
+ # below transforms for defaults of various values.
+ Optional("treeherder"): task_description_schema["treeherder"],
+ Optional("attributes"): task_description_schema["attributes"],
+ Optional("dependencies"): task_description_schema["dependencies"],
+ Optional("job-from"): task_description_schema["job-from"],
+ # Shipping product / phase
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ }
+)
+
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def remove_name(config, jobs):
+ for job in jobs:
+ if "name" in job:
+ del job["name"]
+ yield job
+
+
+transforms.add_validate(balrog_description_schema)
+
+
+@transforms.add
+def handle_keyed_by(config, jobs):
+ """Resolve fields that can be keyed by platform, etc."""
+ fields = [
+ "update-no-wnp",
+ ]
+ for job in jobs:
+ for field in fields:
+ resolve_keyed_by(
+ item=job,
+ field=field,
+ item_name=job["label"],
+ **{
+ "project": config.params["project"],
+ "release-type": config.params["release_type"],
+ },
+ )
+ yield job
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ job["shipping-product"] = dep_job.attributes.get("shipping_product")
+
+ treeherder = job.get("treeherder", {})
+ treeherder.setdefault("symbol", "c-Up(N)")
+ dep_th_platform = (
+ dep_job.task.get("extra", {})
+ .get("treeherder", {})
+ .get("machine", {})
+ .get("platform", "")
+ )
+ treeherder.setdefault("platform", f"{dep_th_platform}/opt")
+ treeherder.setdefault(
+ "tier", dep_job.task.get("extra", {}).get("treeherder", {}).get("tier", 1)
+ )
+ treeherder.setdefault("kind", "build")
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+
+ treeherder_job_symbol = dep_job.task["extra"]["treeherder"]["symbol"]
+ treeherder["symbol"] = replace_group(treeherder_job_symbol, "c-Up")
+
+ if dep_job.attributes.get("locale"):
+ attributes["locale"] = dep_job.attributes.get("locale")
+
+ label = job["label"]
+
+ description = (
+ "Balrog submission for locale '{locale}' for build '"
+ "{build_platform}/{build_type}'".format(
+ locale=attributes.get("locale", "en-US"),
+ build_platform=attributes.get("build_platform"),
+ build_type=attributes.get("build_type"),
+ )
+ )
+
+ upstream_artifacts = [
+ {
+ "taskId": {"task-reference": "<beetmover>"},
+ "taskType": "beetmover",
+ "paths": ["public/manifest.json"],
+ }
+ ]
+
+ dependencies = {"beetmover": dep_job.label}
+ # don't block on startup-test for release/esr, they block on manual testing anyway
+ if config.params["release_type"] in ("nightly", "beta", "release-rc"):
+ for kind_dep in config.kind_dependencies_tasks.values():
+ if (
+ kind_dep.kind == "startup-test"
+ and kind_dep.attributes["build_platform"]
+ == attributes.get("build_platform")
+ and kind_dep.attributes["build_type"]
+ == attributes.get("build_type")
+ and kind_dep.attributes.get("shipping_product")
+ == job.get("shipping-product")
+ ):
+ dependencies["startup-test"] = kind_dep.label
+
+ task = {
+ "label": label,
+ "description": description,
+ "worker-type": "balrog",
+ "worker": {
+ "implementation": "balrog",
+ "upstream-artifacts": upstream_artifacts,
+ "balrog-action": "v2-submit-locale",
+ "suffixes": ["", "-No-WNP"] if job.get("update-no-wnp") else [""],
+ },
+ "dependencies": dependencies,
+ "attributes": attributes,
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "treeherder": treeherder,
+ "shipping-phase": job.get("shipping-phase", "promote"),
+ "shipping-product": job.get("shipping-product"),
+ }
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/balrog_toplevel.py b/taskcluster/gecko_taskgraph/transforms/balrog_toplevel.py
new file mode 100644
index 0000000000..6b06758f69
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/balrog_toplevel.py
@@ -0,0 +1,42 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the beetmover task into an actual task description.
+"""
+
+from mozilla_version.gecko import GeckoVersion
+from mozrelease.balrog import generate_update_properties
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.yaml import load_yaml
+
+from gecko_taskgraph.util.scriptworker import get_release_config
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def generate_update_line(config, jobs):
+ """Resolve fields that can be keyed by platform, etc."""
+ release_config = get_release_config(config)
+ for job in jobs:
+ config_file = job.pop("whats-new-config")
+ update_config = load_yaml(config_file)
+
+ product = job["shipping-product"]
+ if product == "devedition":
+ product = "firefox"
+ job["worker"]["update-line"] = {}
+ for blob_type, suffix in [("wnp", ""), ("no-wnp", "-No-WNP")]:
+ context = {
+ "release-type": config.params["release_type"],
+ "product": product,
+ "version": GeckoVersion.parse(release_config["appVersion"]),
+ "blob-type": blob_type,
+ "build-id": config.params["moz_build_date"],
+ }
+ job["worker"]["update-line"][suffix] = generate_update_properties(
+ context, update_config
+ )
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/beetmover.py b/taskcluster/gecko_taskgraph/transforms/beetmover.py
new file mode 100644
index 0000000000..311e576293
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/beetmover.py
@@ -0,0 +1,178 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the beetmover task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+from taskgraph.util.schema import Schema
+from taskgraph.util.treeherder import replace_group
+from voluptuous import Optional, Required
+
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+from gecko_taskgraph.util.scriptworker import (
+ generate_beetmover_artifact_map,
+ generate_beetmover_upstream_artifacts,
+ get_beetmover_action_scope,
+ get_beetmover_bucket_scope,
+)
+
+transforms = TransformSequence()
+
+beetmover_description_schema = Schema(
+ {
+ # unique label to describe this beetmover task
+ Required("label"): str,
+ Required("dependencies"): task_description_schema["dependencies"],
+ # treeherder is allowed here to override any defaults we use for beetmover. See
+ # taskcluster/gecko_taskgraph/transforms/task.py for the schema details, and the
+ # below transforms for defaults of various values.
+ Optional("treeherder"): task_description_schema["treeherder"],
+ # locale is passed only for l10n beetmoving
+ Optional("locale"): str,
+ Required("shipping-phase"): task_description_schema["shipping-phase"],
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Optional("attributes"): task_description_schema["attributes"],
+ Optional("job-from"): task_description_schema["job-from"],
+ }
+)
+
+
+@transforms.add
+def remove_name(config, jobs):
+ for job in jobs:
+ if "name" in job:
+ del job["name"]
+ yield job
+
+
+transforms.add_validate(beetmover_description_schema)
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ attributes = dep_job.attributes
+
+ treeherder = job.get("treeherder", {})
+ treeherder.setdefault(
+ "symbol", replace_group(dep_job.task["extra"]["treeherder"]["symbol"], "BM")
+ )
+ dep_th_platform = (
+ dep_job.task.get("extra", {})
+ .get("treeherder", {})
+ .get("machine", {})
+ .get("platform", "")
+ )
+ treeherder.setdefault("platform", f"{dep_th_platform}/opt")
+ treeherder.setdefault(
+ "tier", dep_job.task.get("extra", {}).get("treeherder", {}).get("tier", 1)
+ )
+ treeherder.setdefault("kind", "build")
+ label = job["label"]
+ description = (
+ "Beetmover submission for locale '{locale}' for build '"
+ "{build_platform}/{build_type}'".format(
+ locale=attributes.get("locale", "en-US"),
+ build_platform=attributes.get("build_platform"),
+ build_type=attributes.get("build_type"),
+ )
+ )
+
+ dependencies = {dep_job.kind: dep_job.label}
+
+ # XXX release snap-repackage has a variable number of dependencies, depending on how many
+ # "post-beetmover-dummy" jobs there are in the graph.
+ if dep_job.kind != "release-snap-repackage" and len(dep_job.dependencies) > 1:
+ raise NotImplementedError(
+ "Can't beetmove a signing task with multiple dependencies"
+ )
+ signing_dependencies = dep_job.dependencies
+ dependencies.update(signing_dependencies)
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+ attributes.update(job.get("attributes", {}))
+
+ if job.get("locale"):
+ attributes["locale"] = job["locale"]
+
+ bucket_scope = get_beetmover_bucket_scope(config)
+ action_scope = get_beetmover_action_scope(config)
+
+ task = {
+ "label": label,
+ "description": description,
+ "worker-type": "beetmover",
+ "scopes": [bucket_scope, action_scope],
+ "dependencies": dependencies,
+ "attributes": attributes,
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "treeherder": treeherder,
+ "shipping-phase": job["shipping-phase"],
+ }
+
+ yield task
+
+
+def craft_release_properties(config, job):
+ params = config.params
+ build_platform = job["attributes"]["build_platform"]
+ build_platform = build_platform.replace("-shippable", "")
+ if build_platform.endswith("-source"):
+ build_platform = build_platform.replace("-source", "-release")
+
+ # XXX This should be explicitly set via build attributes or something
+ if "android" in job["label"] or "fennec" in job["label"]:
+ app_name = "Fennec"
+ elif config.graph_config["trust-domain"] == "comm":
+ app_name = "Thunderbird"
+ else:
+ # XXX Even DevEdition is called Firefox
+ app_name = "Firefox"
+
+ return {
+ "app-name": app_name,
+ "app-version": params["app_version"],
+ "branch": params["project"],
+ "build-id": params["moz_build_date"],
+ "hash-type": "sha512",
+ "platform": build_platform,
+ }
+
+
+@transforms.add
+def make_task_worker(config, jobs):
+ for job in jobs:
+ valid_beetmover_job = len(job["dependencies"]) == 2 and any(
+ ["signing" in j for j in job["dependencies"]]
+ )
+ # XXX release snap-repackage has a variable number of dependencies, depending on how many
+ # "post-beetmover-dummy" jobs there are in the graph.
+ if "-snap-" not in job["label"] and not valid_beetmover_job:
+ raise NotImplementedError("Beetmover must have two dependencies.")
+
+ locale = job["attributes"].get("locale")
+ platform = job["attributes"]["build_platform"]
+
+ worker = {
+ "implementation": "beetmover",
+ "release-properties": craft_release_properties(config, job),
+ "upstream-artifacts": generate_beetmover_upstream_artifacts(
+ config, job, platform, locale
+ ),
+ "artifact-map": generate_beetmover_artifact_map(
+ config, job, platform=platform, locale=locale
+ ),
+ }
+
+ if locale:
+ worker["locale"] = locale
+ job["worker"] = worker
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/beetmover_apt.py b/taskcluster/gecko_taskgraph/transforms/beetmover_apt.py
new file mode 100644
index 0000000000..49836a1785
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/beetmover_apt.py
@@ -0,0 +1,119 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from itertools import islice
+
+from taskgraph import MAX_DEPENDENCIES
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+
+from gecko_taskgraph.util.platforms import architecture
+from gecko_taskgraph.util.scriptworker import (
+ generate_artifact_registry_gcs_sources,
+ get_beetmover_apt_repo_scope,
+ get_beetmover_repo_action_scope,
+)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def beetmover_apt(config, tasks):
+ product = (
+ "firefox"
+ if config.params["release_type"] == "nightly"
+ else config.params["release_product"]
+ )
+ filtered_tasks = filter_beetmover_apt_tasks(config, tasks, product)
+ # There are too many beetmover-repackage dependencies for a single task
+ # and we hit the taskgraph dependencies limit.
+ # To work around this limitation, we chunk the would be task
+ # into tasks dependendent on, at most, half of MAX_DEPENDENCIES.
+ batches = batched(filtered_tasks, MAX_DEPENDENCIES // 2)
+ for index, batch in enumerate(batches):
+ dependencies = {}
+ gcs_sources = []
+ for task in batch:
+ dep = get_primary_dependency(config, task)
+ assert dep
+
+ dependencies[dep.label] = dep.label
+ gcs_sources.extend(generate_artifact_registry_gcs_sources(dep))
+ description = f"Batch {index + 1} of beetmover APT submissions for the {config.params['release_type']} .deb packages"
+ platform = "firefox-release/opt"
+ treeherder = {
+ "platform": platform,
+ "tier": 1,
+ "kind": "other",
+ "symbol": f"BM-apt(batch-{index + 1})",
+ }
+ apt_repo_scope = get_beetmover_apt_repo_scope(config)
+ repo_action_scope = get_beetmover_repo_action_scope(config)
+ attributes = {
+ "required_signoffs": ["mar-signing"],
+ "shippable": True,
+ "shipping_product": product,
+ }
+ task = {
+ "label": f"{config.kind}-{index + 1}-{platform}",
+ "description": description,
+ "worker-type": "beetmover",
+ "treeherder": treeherder,
+ "scopes": [apt_repo_scope, repo_action_scope],
+ "attributes": attributes,
+ "shipping-phase": "ship",
+ "shipping-product": product,
+ "dependencies": dependencies,
+ }
+ worker = {
+ "implementation": "beetmover-import-from-gcs-to-artifact-registry",
+ "product": product,
+ "gcs-sources": gcs_sources,
+ }
+ task["worker"] = worker
+ yield task
+
+
+def batched(iterable, n):
+ "Batch data into tuples of length n. The last batch may be shorter."
+ # batched('ABCDEFG', 3) --> ABC DEF G
+ if n < 1:
+ raise ValueError("n must be at least one")
+ it = iter(iterable)
+ batch = tuple(islice(it, n))
+ while batch:
+ yield batch
+ batch = tuple(islice(it, n))
+
+
+def filter_beetmover_apt_tasks(config, tasks, product):
+ for task in tasks:
+ task["primary-dependency"] = get_primary_dependency(config, task)
+ if filter_beetmover_apt_task(task, product):
+ yield task
+
+
+def filter_beetmover_apt_task(task, product):
+ # We only create beetmover-apt tasks for l10n beetmover-repackage tasks that
+ # beetmove langpack .deb packages. The langpack .deb packages support all
+ # architectures, so we generate them only on x86_64 tasks.
+ return (
+ is_x86_64_l10n_task(task) or is_not_l10n_task(task)
+ ) and is_task_for_product(task, product)
+
+
+def is_x86_64_l10n_task(task):
+ dep = task["primary-dependency"]
+ locale = dep.attributes.get("locale")
+ return locale and architecture(dep.attributes["build_platform"]) == "x86_64"
+
+
+def is_not_l10n_task(task):
+ dep = task["primary-dependency"]
+ locale = dep.attributes.get("locale")
+ return not locale
+
+
+def is_task_for_product(task, product):
+ dep = task["primary-dependency"]
+ return dep.attributes.get("shipping_product") == product
diff --git a/taskcluster/gecko_taskgraph/transforms/beetmover_checksums.py b/taskcluster/gecko_taskgraph/transforms/beetmover_checksums.py
new file mode 100644
index 0000000000..b17cb5a743
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/beetmover_checksums.py
@@ -0,0 +1,145 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the checksums signing task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+from taskgraph.util.schema import Schema
+from taskgraph.util.treeherder import replace_group
+from voluptuous import Optional, Required
+
+from gecko_taskgraph.transforms.beetmover import craft_release_properties
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+from gecko_taskgraph.util.scriptworker import (
+ generate_beetmover_artifact_map,
+ generate_beetmover_upstream_artifacts,
+ get_beetmover_action_scope,
+ get_beetmover_bucket_scope,
+)
+
+beetmover_checksums_description_schema = Schema(
+ {
+ Required("attributes"): {str: object},
+ Required("dependencies"): task_description_schema["dependencies"],
+ Optional("label"): str,
+ Optional("treeherder"): task_description_schema["treeherder"],
+ Optional("locale"): str,
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Optional("job-from"): task_description_schema["job-from"],
+ }
+)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def remove_name(config, jobs):
+ for job in jobs:
+ if "name" in job:
+ del job["name"]
+ yield job
+
+
+transforms.add_validate(beetmover_checksums_description_schema)
+
+
+@transforms.add
+def make_beetmover_checksums_description(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+ attributes = dep_job.attributes
+
+ treeherder = job.get("treeherder", {})
+ treeherder.setdefault(
+ "symbol",
+ replace_group(dep_job.task["extra"]["treeherder"]["symbol"], "BMcs"),
+ )
+ dep_th_platform = (
+ dep_job.task.get("extra", {})
+ .get("treeherder", {})
+ .get("machine", {})
+ .get("platform", "")
+ )
+ treeherder.setdefault("platform", f"{dep_th_platform}/opt")
+ treeherder.setdefault(
+ "tier", dep_job.task.get("extra", {}).get("treeherder", {}).get("tier", 1)
+ )
+ treeherder.setdefault("kind", "build")
+
+ label = job["label"]
+ build_platform = attributes.get("build_platform")
+
+ description = (
+ "Beetmover submission of checksums for locale '{locale}' for build '"
+ "{build_platform}/{build_type}'".format(
+ locale=attributes.get("locale", "en-US"),
+ build_platform=build_platform,
+ build_type=attributes.get("build_type"),
+ )
+ )
+
+ extra = {}
+ if "devedition" in build_platform:
+ extra["product"] = "devedition"
+ else:
+ extra["product"] = "firefox"
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+ attributes.update(job.get("attributes", {}))
+
+ if dep_job.attributes.get("locale"):
+ treeherder["symbol"] = "BMcs({})".format(dep_job.attributes.get("locale"))
+ attributes["locale"] = dep_job.attributes.get("locale")
+
+ bucket_scope = get_beetmover_bucket_scope(config)
+ action_scope = get_beetmover_action_scope(config)
+
+ task = {
+ "label": label,
+ "description": description,
+ "worker-type": "beetmover",
+ "scopes": [bucket_scope, action_scope],
+ "dependencies": job["dependencies"],
+ "attributes": attributes,
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "treeherder": treeherder,
+ "extra": extra,
+ }
+
+ if "shipping-phase" in job:
+ task["shipping-phase"] = job["shipping-phase"]
+
+ if "shipping-product" in job:
+ task["shipping-product"] = job["shipping-product"]
+
+ yield task
+
+
+@transforms.add
+def make_beetmover_checksums_worker(config, jobs):
+ for job in jobs:
+ locale = job["attributes"].get("locale")
+ platform = job["attributes"]["build_platform"]
+
+ worker = {
+ "implementation": "beetmover",
+ "release-properties": craft_release_properties(config, job),
+ "upstream-artifacts": generate_beetmover_upstream_artifacts(
+ config, job, platform, locale
+ ),
+ "artifact-map": generate_beetmover_artifact_map(
+ config, job, platform=platform, locale=locale
+ ),
+ }
+
+ if locale:
+ worker["locale"] = locale
+ job["worker"] = worker
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/beetmover_emefree_checksums.py b/taskcluster/gecko_taskgraph/transforms/beetmover_emefree_checksums.py
new file mode 100644
index 0000000000..11a5fe522c
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/beetmover_emefree_checksums.py
@@ -0,0 +1,154 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform release-beetmover-source-checksums into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+from taskgraph.util.schema import Schema
+from voluptuous import Optional
+
+from gecko_taskgraph.transforms.beetmover import craft_release_properties
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+
+beetmover_checksums_description_schema = Schema(
+ {
+ Optional("label"): str,
+ Optional("extra"): object,
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Optional("job-from"): task_description_schema["job-from"],
+ Optional("attributes"): task_description_schema["attributes"],
+ Optional("dependencies"): task_description_schema["dependencies"],
+ }
+)
+
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def remove_name(config, jobs):
+ for job in jobs:
+ if "name" in job:
+ del job["name"]
+ yield job
+
+
+transforms.add_validate(beetmover_checksums_description_schema)
+
+
+@transforms.add
+def make_beetmover_checksums_description(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ attributes = dep_job.attributes
+ build_platform = attributes.get("build_platform")
+ if not build_platform:
+ raise Exception("Cannot find build platform!")
+ repack_id = dep_job.task.get("extra", {}).get("repack_id")
+ if not repack_id:
+ raise Exception("Cannot find repack id!")
+
+ label = dep_job.label.replace("beetmover-", "beetmover-checksums-")
+ description = (
+ "Beetmove checksums for repack_id '{repack_id}' for build '"
+ "{build_platform}/{build_type}'".format(
+ repack_id=repack_id,
+ build_platform=build_platform,
+ build_type=attributes.get("build_type"),
+ )
+ )
+
+ extra = {}
+ extra["partner_path"] = dep_job.task["payload"]["upstreamArtifacts"][0][
+ "locale"
+ ]
+ extra["repack_id"] = repack_id
+
+ dependencies = {dep_job.kind: dep_job.label}
+ for k, v in dep_job.dependencies.items():
+ if k.startswith("beetmover"):
+ dependencies[k] = v
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+
+ task = {
+ "label": label,
+ "description": description,
+ "worker-type": "{}/{}".format(
+ dep_job.task["provisionerId"],
+ dep_job.task["workerType"],
+ ),
+ "scopes": dep_job.task["scopes"],
+ "dependencies": dependencies,
+ "attributes": attributes,
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "extra": extra,
+ }
+
+ if "shipping-phase" in job:
+ task["shipping-phase"] = job["shipping-phase"]
+
+ if "shipping-product" in job:
+ task["shipping-product"] = job["shipping-product"]
+
+ yield task
+
+
+def generate_upstream_artifacts(refs, partner_path):
+ # Until bug 1331141 is fixed, if you are adding any new artifacts here that
+ # need to be transfered to S3, please be aware you also need to follow-up
+ # with a beetmover patch in https://github.com/mozilla-releng/beetmoverscript/.
+ # See example in bug 1348286
+ common_paths = [
+ "public/target.checksums",
+ ]
+
+ upstream_artifacts = [
+ {
+ "taskId": {"task-reference": refs["beetmover"]},
+ "taskType": "signing",
+ "paths": common_paths,
+ "locale": f"beetmover-checksums/{partner_path}",
+ }
+ ]
+
+ return upstream_artifacts
+
+
+@transforms.add
+def make_beetmover_checksums_worker(config, jobs):
+ for job in jobs:
+ valid_beetmover_job = len(job["dependencies"]) == 1
+ if not valid_beetmover_job:
+ raise NotImplementedError("Beetmover checksums must have one dependency.")
+
+ refs = {
+ "beetmover": None,
+ }
+ for dependency in job["dependencies"].keys():
+ if dependency.endswith("beetmover"):
+ refs["beetmover"] = f"<{dependency}>"
+ if None in refs.values():
+ raise NotImplementedError(
+ "Beetmover checksums must have a beetmover dependency!"
+ )
+
+ worker = {
+ "implementation": "beetmover",
+ "release-properties": craft_release_properties(config, job),
+ "upstream-artifacts": generate_upstream_artifacts(
+ refs,
+ job["extra"]["partner_path"],
+ ),
+ }
+
+ job["worker"] = worker
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/beetmover_geckoview.py b/taskcluster/gecko_taskgraph/transforms/beetmover_geckoview.py
new file mode 100644
index 0000000000..1ae884d10c
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/beetmover_geckoview.py
@@ -0,0 +1,181 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the beetmover task into an actual task description.
+"""
+
+
+from copy import deepcopy
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+from taskgraph.util.schema import Schema, optionally_keyed_by, resolve_keyed_by
+from voluptuous import Optional, Required
+
+from gecko_taskgraph.transforms.beetmover import (
+ craft_release_properties as beetmover_craft_release_properties,
+)
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import (
+ copy_attributes_from_dependent_job,
+ release_level,
+)
+from gecko_taskgraph.util.declarative_artifacts import (
+ get_geckoview_artifact_id,
+ get_geckoview_artifact_map,
+ get_geckoview_upstream_artifacts,
+)
+
+beetmover_description_schema = Schema(
+ {
+ Required("label"): str,
+ Required("dependencies"): task_description_schema["dependencies"],
+ Optional("treeherder"): task_description_schema["treeherder"],
+ Required("run-on-projects"): task_description_schema["run-on-projects"],
+ Required("run-on-hg-branches"): task_description_schema["run-on-hg-branches"],
+ Optional("bucket-scope"): optionally_keyed_by("release-level", str),
+ Optional("shipping-phase"): optionally_keyed_by(
+ "project", task_description_schema["shipping-phase"]
+ ),
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Optional("attributes"): task_description_schema["attributes"],
+ Optional("job-from"): task_description_schema["job-from"],
+ }
+)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def remove_name(config, jobs):
+ for job in jobs:
+ if "name" in job:
+ del job["name"]
+ yield job
+
+
+transforms.add_validate(beetmover_description_schema)
+
+
+@transforms.add
+def resolve_keys(config, jobs):
+ for job in jobs:
+ resolve_keyed_by(
+ job,
+ "run-on-hg-branches",
+ item_name=job["label"],
+ project=config.params["project"],
+ )
+ resolve_keyed_by(
+ job,
+ "shipping-phase",
+ item_name=job["label"],
+ project=config.params["project"],
+ )
+ resolve_keyed_by(
+ job,
+ "bucket-scope",
+ item_name=job["label"],
+ **{"release-level": release_level(config.params["project"])},
+ )
+ yield job
+
+
+@transforms.add
+def split_maven_packages(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+ for package in attributes["maven_packages"]:
+ package_job = deepcopy(job)
+ package_job["maven-package"] = package
+ yield package_job
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+ attributes.update(job.get("attributes", {}))
+
+ treeherder = job.get("treeherder", {})
+ dep_th_platform = (
+ dep_job.task.get("extra", {})
+ .get("treeherder", {})
+ .get("machine", {})
+ .get("platform", "")
+ )
+ treeherder.setdefault("platform", f"{dep_th_platform}/opt")
+ treeherder.setdefault("tier", 2)
+ treeherder.setdefault("kind", "build")
+ package = job["maven-package"]
+ treeherder.setdefault("symbol", f"BM-{package}")
+ label = job["label"]
+ description = (
+ "Beetmover submission for geckoview"
+ "{build_platform}/{build_type}'".format(
+ build_platform=attributes.get("build_platform"),
+ build_type=attributes.get("build_type"),
+ )
+ )
+
+ job["dependencies"].update(deepcopy(dep_job.dependencies))
+
+ if job.get("locale"):
+ attributes["locale"] = job["locale"]
+
+ attributes["run_on_hg_branches"] = job["run-on-hg-branches"]
+
+ task = {
+ "label": f"{package}-{label}",
+ "description": description,
+ "worker-type": "beetmover",
+ "scopes": [
+ job["bucket-scope"],
+ "project:releng:beetmover:action:push-to-maven",
+ ],
+ "dependencies": job["dependencies"],
+ "attributes": attributes,
+ "run-on-projects": job["run-on-projects"],
+ "treeherder": treeherder,
+ "shipping-phase": job["shipping-phase"],
+ "maven-package": package,
+ }
+
+ yield task
+
+
+@transforms.add
+def make_task_worker(config, jobs):
+ for job in jobs:
+ job["worker"] = {
+ "artifact-map": get_geckoview_artifact_map(config, job),
+ "implementation": "beetmover-maven",
+ "release-properties": craft_release_properties(config, job),
+ "upstream-artifacts": get_geckoview_upstream_artifacts(
+ config, job, job["maven-package"]
+ ),
+ }
+ del job["maven-package"]
+
+ yield job
+
+
+def craft_release_properties(config, job):
+ release_properties = beetmover_craft_release_properties(config, job)
+
+ release_properties["artifact-id"] = get_geckoview_artifact_id(
+ config,
+ job["attributes"]["build_platform"],
+ job["maven-package"],
+ job["attributes"].get("update-channel"),
+ )
+ release_properties["app-name"] = "geckoview"
+
+ return release_properties
diff --git a/taskcluster/gecko_taskgraph/transforms/beetmover_langpack_checksums.py b/taskcluster/gecko_taskgraph/transforms/beetmover_langpack_checksums.py
new file mode 100644
index 0000000000..9e59621de4
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/beetmover_langpack_checksums.py
@@ -0,0 +1,143 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform release-beetmover-langpack-checksums into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+from taskgraph.util.schema import Schema
+from taskgraph.util.treeherder import inherit_treeherder_from_dep
+from voluptuous import Optional, Required
+
+from gecko_taskgraph.transforms.beetmover import craft_release_properties
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+from gecko_taskgraph.util.scriptworker import (
+ generate_beetmover_artifact_map,
+ generate_beetmover_upstream_artifacts,
+ get_beetmover_action_scope,
+ get_beetmover_bucket_scope,
+)
+
+beetmover_checksums_description_schema = Schema(
+ {
+ Required("attributes"): {str: object},
+ Optional("label"): str,
+ Optional("treeherder"): task_description_schema["treeherder"],
+ Optional("locale"): str,
+ Optional("dependencies"): task_description_schema["dependencies"],
+ Optional("job-from"): task_description_schema["job-from"],
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ }
+)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def remove_name(config, jobs):
+ for job in jobs:
+ if "name" in job:
+ del job["name"]
+ yield job
+
+
+transforms.add_validate(beetmover_checksums_description_schema)
+
+
+@transforms.add
+def make_beetmover_checksums_description(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ attributes = dep_job.attributes
+
+ treeherder = inherit_treeherder_from_dep(job, dep_job)
+ treeherder.setdefault(
+ "symbol", "BMcslang(N{})".format(attributes.get("l10n_chunk", ""))
+ )
+
+ label = job["label"]
+ build_platform = attributes.get("build_platform")
+
+ description = "Beetmover submission of checksums for langpack files"
+
+ extra = {}
+ if "devedition" in build_platform:
+ extra["product"] = "devedition"
+ else:
+ extra["product"] = "firefox"
+
+ dependencies = {dep_job.kind: dep_job.label}
+ for k, v in dep_job.dependencies.items():
+ if k.startswith("beetmover"):
+ dependencies[k] = v
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+ if "chunk_locales" in dep_job.attributes:
+ attributes["chunk_locales"] = dep_job.attributes["chunk_locales"]
+ attributes.update(job.get("attributes", {}))
+
+ bucket_scope = get_beetmover_bucket_scope(config)
+ action_scope = get_beetmover_action_scope(config)
+
+ task = {
+ "label": label,
+ "description": description,
+ "worker-type": "beetmover",
+ "scopes": [bucket_scope, action_scope],
+ "dependencies": dependencies,
+ "attributes": attributes,
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "treeherder": treeherder,
+ "extra": extra,
+ }
+
+ if "shipping-phase" in job:
+ task["shipping-phase"] = job["shipping-phase"]
+
+ if "shipping-product" in job:
+ task["shipping-product"] = job["shipping-product"]
+
+ yield task
+
+
+@transforms.add
+def make_beetmover_checksums_worker(config, jobs):
+ for job in jobs:
+ valid_beetmover_job = len(job["dependencies"]) == 1
+ if not valid_beetmover_job:
+ raise NotImplementedError("Beetmover checksums must have one dependency.")
+
+ locales = job["attributes"].get("chunk_locales")
+ platform = job["attributes"]["build_platform"]
+
+ refs = {
+ "beetmover": None,
+ }
+ for dependency in job["dependencies"].keys():
+ if dependency.startswith("release-beetmover"):
+ refs["beetmover"] = f"<{dependency}>"
+ if None in refs.values():
+ raise NotImplementedError(
+ "Beetmover checksums must have a beetmover dependency!"
+ )
+
+ worker = {
+ "implementation": "beetmover",
+ "release-properties": craft_release_properties(config, job),
+ "upstream-artifacts": generate_beetmover_upstream_artifacts(
+ config, job, platform, locales
+ ),
+ "artifact-map": generate_beetmover_artifact_map(
+ config, job, platform=platform, locale=locales
+ ),
+ }
+
+ job["worker"] = worker
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/beetmover_push_to_release.py b/taskcluster/gecko_taskgraph/transforms/beetmover_push_to_release.py
new file mode 100644
index 0000000000..b6307d93cf
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/beetmover_push_to_release.py
@@ -0,0 +1,93 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the beetmover-push-to-release task into a task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import Schema, taskref_or_string
+from voluptuous import Optional, Required
+
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.scriptworker import (
+ add_scope_prefix,
+ get_beetmover_bucket_scope,
+)
+
+beetmover_push_to_release_description_schema = Schema(
+ {
+ Required("name"): str,
+ Required("product"): str,
+ Required("treeherder-platform"): str,
+ Optional("attributes"): {str: object},
+ Optional("job-from"): task_description_schema["job-from"],
+ Optional("run"): {str: object},
+ Optional("run-on-projects"): task_description_schema["run-on-projects"],
+ Optional("dependencies"): {str: taskref_or_string},
+ Optional("index"): {str: str},
+ Optional("routes"): [str],
+ Required("shipping-phase"): task_description_schema["shipping-phase"],
+ Required("shipping-product"): task_description_schema["shipping-product"],
+ Optional("extra"): task_description_schema["extra"],
+ Optional("worker"): {
+ Optional("max-run-time"): int,
+ },
+ }
+)
+
+
+transforms = TransformSequence()
+transforms.add_validate(beetmover_push_to_release_description_schema)
+
+
+@transforms.add
+def make_beetmover_push_to_release_description(config, jobs):
+ for job in jobs:
+ treeherder = job.get("treeherder", {})
+ treeherder.setdefault("symbol", "Rel(BM-C)")
+ treeherder.setdefault("tier", 1)
+ treeherder.setdefault("kind", "build")
+ treeherder.setdefault("platform", job["treeherder-platform"])
+
+ label = job["name"]
+ description = "Beetmover push to release for '{product}'".format(
+ product=job["product"]
+ )
+
+ bucket_scope = get_beetmover_bucket_scope(config)
+ action_scope = add_scope_prefix(config, "beetmover:action:push-to-releases")
+
+ task = {
+ "label": label,
+ "description": description,
+ "worker-type": "beetmover",
+ "scopes": [bucket_scope, action_scope],
+ "product": job["product"],
+ "dependencies": job["dependencies"],
+ "attributes": job.get("attributes", {}),
+ "run-on-projects": job.get("run-on-projects"),
+ "treeherder": treeherder,
+ "shipping-phase": job.get("shipping-phase", "push"),
+ "shipping-product": job.get("shipping-product"),
+ "routes": job.get("routes", []),
+ "extra": job.get("extra", {}),
+ "worker": job.get("worker", {}),
+ }
+
+ yield task
+
+
+@transforms.add
+def make_beetmover_push_to_release_worker(config, jobs):
+ for job in jobs:
+ worker = {
+ "implementation": "beetmover-push-to-release",
+ "product": job["product"],
+ }
+ if job.get("worker", {}).get("max-run-time"):
+ worker["max-run-time"] = job["worker"]["max-run-time"]
+ job["worker"] = worker
+ del job["product"]
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/beetmover_repackage.py b/taskcluster/gecko_taskgraph/transforms/beetmover_repackage.py
new file mode 100644
index 0000000000..700b136f4b
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/beetmover_repackage.py
@@ -0,0 +1,358 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the beetmover task into an actual task description.
+"""
+
+import logging
+from typing import List
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_dependencies, get_primary_dependency
+from taskgraph.util.schema import Schema
+from taskgraph.util.taskcluster import get_artifact_prefix
+from taskgraph.util.treeherder import inherit_treeherder_from_dep, replace_group
+from voluptuous import Optional, Required
+
+from gecko_taskgraph.transforms.beetmover import craft_release_properties
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import (
+ copy_attributes_from_dependent_job,
+ sorted_unique_list,
+)
+from gecko_taskgraph.util.partials import (
+ get_balrog_platform_name,
+ get_partials_artifacts_from_params,
+ get_partials_info_from_params,
+)
+from gecko_taskgraph.util.scriptworker import (
+ generate_beetmover_artifact_map,
+ generate_beetmover_partials_artifact_map,
+ generate_beetmover_upstream_artifacts,
+ get_beetmover_action_scope,
+ get_beetmover_bucket_scope,
+)
+
+logger = logging.getLogger(__name__)
+
+
+beetmover_description_schema = Schema(
+ {
+ # unique label to describe this beetmover task, defaults to {dep.label}-beetmover
+ Required("label"): str,
+ Required("dependencies"): task_description_schema["dependencies"],
+ # treeherder is allowed here to override any defaults we use for beetmover. See
+ # taskcluster/gecko_taskgraph/transforms/task.py for the schema details, and the
+ # below transforms for defaults of various values.
+ Optional("treeherder"): task_description_schema["treeherder"],
+ Optional("attributes"): task_description_schema["attributes"],
+ # locale is passed only for l10n beetmoving
+ Optional("locale"): str,
+ Required("shipping-phase"): task_description_schema["shipping-phase"],
+ Optional("job-from"): task_description_schema["job-from"],
+ }
+)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def remove_name(config, jobs):
+ for job in jobs:
+ if "name" in job:
+ del job["name"]
+ yield job
+
+
+transforms.add_validate(beetmover_description_schema)
+
+
+def get_label_by_suffix(labels: List, suffix: str):
+ """
+ Given list of labels, returns the label with provided suffix
+ Raises exception if more than one label is found.
+
+ Args:
+ labels (List): List of labels
+ suffix (str): Suffix for the desired label
+
+ Returns
+ str: The desired label
+ """
+ labels = [l for l in labels if l.endswith(suffix)]
+ if len(labels) > 1:
+ raise Exception(
+ f"There should only be a single label with suffix: {suffix} - found {len(labels)}"
+ )
+ return labels[0]
+
+
+@transforms.add
+def gather_required_signoffs(config, jobs):
+ for job in jobs:
+ job.setdefault("attributes", {})["required_signoffs"] = sorted_unique_list(
+ *(
+ dep.attributes.get("required_signoffs", [])
+ for dep in get_dependencies(config, job)
+ )
+ )
+ yield job
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ attributes = dep_job.attributes
+
+ treeherder = inherit_treeherder_from_dep(job, dep_job)
+ upstream_symbol = dep_job.task["extra"]["treeherder"]["symbol"]
+ if "build" in job["dependencies"]:
+ build_label = job["dependencies"]["build"]
+ build_dep = config.kind_dependencies_tasks[build_label]
+ upstream_symbol = build_dep.task["extra"]["treeherder"]["symbol"]
+ treeherder.setdefault("symbol", replace_group(upstream_symbol, "BMR"))
+ label = job["label"]
+ description = (
+ "Beetmover submission for locale '{locale}' for build '"
+ "{build_platform}/{build_type}'".format(
+ locale=attributes.get("locale", "en-US"),
+ build_platform=attributes.get("build_platform"),
+ build_type=attributes.get("build_type"),
+ )
+ )
+
+ upstream_deps = {
+ k: config.kind_dependencies_tasks[v] for k, v in job["dependencies"].items()
+ }
+
+ signing_name = "build-signing"
+ build_name = "build"
+ repackage_name = "repackage"
+ repackage_signing_name = "repackage-signing"
+ msi_signing_name = "repackage-signing-msi"
+ msix_signing_name = "repackage-signing-shippable-l10n-msix"
+ mar_signing_name = "mar-signing"
+ attribution_name = "attribution"
+ repackage_deb_name = "repackage-deb"
+ if job.get("locale"):
+ signing_name = "shippable-l10n-signing"
+ build_name = "shippable-l10n"
+ repackage_name = "repackage-l10n"
+ repackage_signing_name = "repackage-signing-l10n"
+ mar_signing_name = "mar-signing-l10n"
+ attribution_name = "attribution-l10n"
+ repackage_deb_name = "repackage-deb-l10n"
+
+ # The upstream "signing" task for macosx is either *-mac-signing or *-mac-notarization
+ if attributes.get("build_platform", "").startswith("macosx"):
+ signing_name = None
+ # We use the signing task on level 1 and notarization on level 3
+ if int(config.params.get("level", 0)) < 3:
+ signing_name = get_label_by_suffix(job["dependencies"], "-mac-signing")
+ else:
+ signing_name = get_label_by_suffix(
+ job["dependencies"], "-mac-notarization"
+ )
+ if not signing_name:
+ raise Exception("Could not find upstream kind for mac signing.")
+
+ dependencies = {
+ "build": upstream_deps[build_name],
+ "repackage": upstream_deps[repackage_name],
+ "signing": upstream_deps[signing_name],
+ "mar-signing": upstream_deps[mar_signing_name],
+ }
+ if "partials-signing" in upstream_deps:
+ dependencies["partials-signing"] = upstream_deps["partials-signing"]
+ if msi_signing_name in upstream_deps:
+ dependencies[msi_signing_name] = upstream_deps[msi_signing_name]
+ if msix_signing_name in upstream_deps:
+ dependencies[msix_signing_name] = upstream_deps[msix_signing_name]
+ if repackage_signing_name in upstream_deps:
+ dependencies["repackage-signing"] = upstream_deps[repackage_signing_name]
+ if attribution_name in upstream_deps:
+ dependencies[attribution_name] = upstream_deps[attribution_name]
+ if repackage_deb_name in upstream_deps:
+ dependencies[repackage_deb_name] = upstream_deps[repackage_deb_name]
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+ attributes.update(job.get("attributes", {}))
+ if job.get("locale"):
+ attributes["locale"] = job["locale"]
+
+ bucket_scope = get_beetmover_bucket_scope(config)
+ action_scope = get_beetmover_action_scope(config)
+
+ task = {
+ "label": label,
+ "description": description,
+ "worker-type": "beetmover",
+ "scopes": [bucket_scope, action_scope],
+ "dependencies": dependencies,
+ "attributes": attributes,
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "treeherder": treeherder,
+ "shipping-phase": job["shipping-phase"],
+ "shipping-product": job.get("shipping-product"),
+ }
+
+ yield task
+
+
+def generate_partials_upstream_artifacts(job, artifacts, platform, locale=None):
+ artifact_prefix = get_artifact_prefix(job)
+ if locale and locale != "en-US":
+ artifact_prefix = f"{artifact_prefix}/{locale}"
+
+ upstream_artifacts = [
+ {
+ "taskId": {"task-reference": "<partials-signing>"},
+ "taskType": "signing",
+ "paths": [f"{artifact_prefix}/{path}" for path, _ in artifacts],
+ "locale": locale or "en-US",
+ }
+ ]
+
+ return upstream_artifacts
+
+
+@transforms.add
+def make_task_worker(config, jobs):
+ for job in jobs:
+ locale = job["attributes"].get("locale")
+ platform = job["attributes"]["build_platform"]
+
+ worker = {
+ "implementation": "beetmover",
+ "release-properties": craft_release_properties(config, job),
+ "upstream-artifacts": generate_beetmover_upstream_artifacts(
+ config, job, platform, locale
+ ),
+ "artifact-map": generate_beetmover_artifact_map(
+ config, job, platform=platform, locale=locale
+ ),
+ }
+
+ if locale:
+ worker["locale"] = locale
+ job["worker"] = worker
+
+ yield job
+
+
+@transforms.add
+def strip_unwanted_langpacks_from_worker(config, jobs):
+ """Strips out langpacks where we didn't sign them.
+
+ This explicitly deletes langpacks from upstream artifacts and from artifact-maps.
+ Due to limitations in declarative artifacts, doing this was our easiest way right now.
+ """
+ ALWAYS_OK_PLATFORMS = {"linux64-shippable", "linux64-devedition"}
+ OSX_OK_PLATFORMS = {"macosx64-shippable", "macosx64-devedition"}
+ for job in jobs:
+ platform = job["attributes"].get("build_platform")
+ if platform in ALWAYS_OK_PLATFORMS:
+ # No need to strip anything
+ yield job
+ continue
+
+ for map in job["worker"].get("artifact-map", [])[:]:
+ if not any([path.endswith("target.langpack.xpi") for path in map["paths"]]):
+ continue
+ if map["locale"] == "ja-JP-mac":
+ # This locale should only exist on mac
+ assert platform in OSX_OK_PLATFORMS
+ continue
+ # map[paths] is being modified while iterating, so we need to resolve the
+ # ".keys()" iterator up front by throwing it into a list.
+ for path in list(map["paths"].keys()):
+ if path.endswith("target.langpack.xpi"):
+ del map["paths"][path]
+ if map["paths"] == {}:
+ job["worker"]["artifact-map"].remove(map)
+
+ for artifact in job["worker"].get("upstream-artifacts", []):
+ if not any(
+ [path.endswith("target.langpack.xpi") for path in artifact["paths"]]
+ ):
+ continue
+ if artifact["locale"] == "ja-JP-mac":
+ # This locale should only exist on mac
+ assert platform in OSX_OK_PLATFORMS
+ continue
+ artifact["paths"] = [
+ path
+ for path in artifact["paths"]
+ if not path.endswith("target.langpack.xpi")
+ ]
+ if artifact["paths"] == []:
+ job["worker"]["upstream-artifacts"].remove(artifact)
+
+ yield job
+
+
+@transforms.add
+def make_partials_artifacts(config, jobs):
+ for job in jobs:
+ locale = job["attributes"].get("locale")
+ if not locale:
+ locale = "en-US"
+
+ platform = job["attributes"]["build_platform"]
+
+ if "partials-signing" not in job["dependencies"]:
+ yield job
+ continue
+
+ balrog_platform = get_balrog_platform_name(platform)
+ artifacts = get_partials_artifacts_from_params(
+ config.params.get("release_history"), balrog_platform, locale
+ )
+
+ upstream_artifacts = generate_partials_upstream_artifacts(
+ job, artifacts, balrog_platform, locale
+ )
+
+ job["worker"]["upstream-artifacts"].extend(upstream_artifacts)
+
+ extra = list()
+
+ partials_info = get_partials_info_from_params(
+ config.params.get("release_history"), balrog_platform, locale
+ )
+
+ job["worker"]["artifact-map"].extend(
+ generate_beetmover_partials_artifact_map(
+ config, job, partials_info, platform=platform, locale=locale
+ )
+ )
+
+ for artifact in partials_info:
+ artifact_extra = {
+ "locale": locale,
+ "artifact_name": artifact,
+ "buildid": partials_info[artifact]["buildid"],
+ "platform": balrog_platform,
+ }
+ for rel_attr in ("previousBuildNumber", "previousVersion"):
+ if partials_info[artifact].get(rel_attr):
+ artifact_extra[rel_attr] = partials_info[artifact][rel_attr]
+ extra.append(artifact_extra)
+
+ job.setdefault("extra", {})
+ job["extra"]["partials"] = extra
+
+ yield job
+
+
+@transforms.add
+def convert_deps(config, jobs):
+ for job in jobs:
+ job["dependencies"] = {
+ name: dep_job.label for name, dep_job in job["dependencies"].items()
+ }
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/beetmover_repackage_l10n.py b/taskcluster/gecko_taskgraph/transforms/beetmover_repackage_l10n.py
new file mode 100644
index 0000000000..2630b34f62
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/beetmover_repackage_l10n.py
@@ -0,0 +1,44 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the signing task into an actual task description.
+"""
+
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+from taskgraph.util.treeherder import join_symbol
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def make_beetmover_description(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ locale = dep_job.attributes.get("locale")
+ if not locale:
+ yield job
+ continue
+
+ group = "BMR"
+
+ # add the locale code
+ symbol = locale
+
+ treeherder = {
+ "symbol": join_symbol(group, symbol),
+ }
+
+ beet_description = {
+ "label": job["label"],
+ "attributes": job["attributes"],
+ "dependencies": job["dependencies"],
+ "treeherder": treeherder,
+ "locale": locale,
+ "shipping-phase": job["shipping-phase"],
+ }
+ yield beet_description
diff --git a/taskcluster/gecko_taskgraph/transforms/beetmover_repackage_partner.py b/taskcluster/gecko_taskgraph/transforms/beetmover_repackage_partner.py
new file mode 100644
index 0000000000..769f4035df
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/beetmover_repackage_partner.py
@@ -0,0 +1,288 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the beetmover task into an actual task description.
+"""
+
+import logging
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+from taskgraph.util.schema import Schema
+from taskgraph.util.taskcluster import get_artifact_prefix
+from voluptuous import Optional, Required
+
+from gecko_taskgraph.transforms.beetmover import craft_release_properties
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import (
+ copy_attributes_from_dependent_job,
+)
+from gecko_taskgraph.util.partners import get_ftp_platform, get_partner_config_by_kind
+from gecko_taskgraph.util.scriptworker import (
+ add_scope_prefix,
+ get_beetmover_bucket_scope,
+)
+
+logger = logging.getLogger(__name__)
+
+
+beetmover_description_schema = Schema(
+ {
+ # unique label to describe this beetmover task, defaults to {dep.label}-beetmover
+ Optional("label"): str,
+ Required("partner-path"): str,
+ Optional("extra"): object,
+ Optional("attributes"): task_description_schema["attributes"],
+ Optional("dependencies"): task_description_schema["dependencies"],
+ Required("shipping-phase"): task_description_schema["shipping-phase"],
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Optional("priority"): task_description_schema["priority"],
+ Optional("job-from"): task_description_schema["job-from"],
+ }
+)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def remove_name(config, jobs):
+ for job in jobs:
+ if "name" in job:
+ del job["name"]
+ yield job
+
+
+transforms.add_validate(beetmover_description_schema)
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ repack_id = dep_job.task.get("extra", {}).get("repack_id")
+ if not repack_id:
+ raise Exception("Cannot find repack id!")
+
+ attributes = dep_job.attributes
+ build_platform = attributes.get("build_platform")
+ if not build_platform:
+ raise Exception("Cannot find build platform!")
+
+ label = dep_job.label.replace("repackage-signing-l10n", "beetmover-")
+ label = dep_job.label.replace("repackage-signing-", "beetmover-")
+ label = label.replace("repackage-", "beetmover-")
+ label = label.replace("chunking-dummy-", "beetmover-")
+ description = (
+ "Beetmover submission for repack_id '{repack_id}' for build '"
+ "{build_platform}/{build_type}'".format(
+ repack_id=repack_id,
+ build_platform=build_platform,
+ build_type=attributes.get("build_type"),
+ )
+ )
+
+ dependencies = {}
+
+ base_label = "release-partner-repack"
+ if "eme" in config.kind:
+ base_label = "release-eme-free-repack"
+ dependencies["build"] = f"{base_label}-{build_platform}"
+ if "macosx" in build_platform or "win" in build_platform:
+ dependencies["repackage"] = "{}-repackage-{}-{}".format(
+ base_label, build_platform, repack_id.replace("/", "-")
+ )
+ dependencies["repackage-signing"] = "{}-repackage-signing-{}-{}".format(
+ base_label, build_platform, repack_id.replace("/", "-")
+ )
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+
+ task = {
+ "label": label,
+ "description": description,
+ "dependencies": dependencies,
+ "attributes": attributes,
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "shipping-phase": job["shipping-phase"],
+ "shipping-product": job.get("shipping-product"),
+ "partner-path": job["partner-path"],
+ "extra": {
+ "repack_id": repack_id,
+ },
+ }
+ # we may have reduced the priority for partner jobs, otherwise task.py will set it
+ if job.get("priority"):
+ task["priority"] = job["priority"]
+
+ yield task
+
+
+@transforms.add
+def populate_scopes_and_worker_type(config, jobs):
+ bucket_scope = get_beetmover_bucket_scope(config)
+ action_scope = add_scope_prefix(config, "beetmover:action:push-to-partner")
+
+ for job in jobs:
+ job["scopes"] = [bucket_scope, action_scope]
+ job["worker-type"] = "beetmover"
+ yield job
+
+
+def generate_upstream_artifacts(
+ job,
+ build_task_ref,
+ repackage_task_ref,
+ repackage_signing_task_ref,
+ platform,
+ repack_id,
+ partner_path,
+ repack_stub_installer=False,
+):
+ upstream_artifacts = []
+ artifact_prefix = get_artifact_prefix(job)
+
+ if "linux" in platform:
+ upstream_artifacts.append(
+ {
+ "taskId": {"task-reference": build_task_ref},
+ "taskType": "build",
+ "paths": [f"{artifact_prefix}/{repack_id}/target.tar.bz2"],
+ "locale": partner_path,
+ }
+ )
+ upstream_artifacts.append(
+ {
+ "taskId": {"task-reference": repackage_signing_task_ref},
+ "taskType": "repackage",
+ "paths": [f"{artifact_prefix}/{repack_id}/target.tar.bz2.asc"],
+ "locale": partner_path,
+ }
+ )
+ elif "macosx" in platform:
+ upstream_artifacts.append(
+ {
+ "taskId": {"task-reference": repackage_task_ref},
+ "taskType": "repackage",
+ "paths": [f"{artifact_prefix}/{repack_id}/target.dmg"],
+ "locale": partner_path,
+ }
+ )
+ upstream_artifacts.append(
+ {
+ "taskId": {"task-reference": repackage_signing_task_ref},
+ "taskType": "repackage",
+ "paths": [f"{artifact_prefix}/{repack_id}/target.dmg.asc"],
+ "locale": partner_path,
+ }
+ )
+ elif "win" in platform:
+ upstream_artifacts.append(
+ {
+ "taskId": {"task-reference": repackage_signing_task_ref},
+ "taskType": "repackage",
+ "paths": [f"{artifact_prefix}/{repack_id}/target.installer.exe"],
+ "locale": partner_path,
+ }
+ )
+ upstream_artifacts.append(
+ {
+ "taskId": {"task-reference": repackage_signing_task_ref},
+ "taskType": "repackage",
+ "paths": [f"{artifact_prefix}/{repack_id}/target.installer.exe.asc"],
+ "locale": partner_path,
+ }
+ )
+ if platform.startswith("win32") and repack_stub_installer:
+ upstream_artifacts.append(
+ {
+ "taskId": {"task-reference": repackage_signing_task_ref},
+ "taskType": "repackage",
+ "paths": [
+ "{}/{}/target.stub-installer.exe".format(
+ artifact_prefix, repack_id
+ )
+ ],
+ "locale": partner_path,
+ }
+ )
+ upstream_artifacts.append(
+ {
+ "taskId": {"task-reference": repackage_signing_task_ref},
+ "taskType": "repackage",
+ "paths": [
+ "{}/{}/target.stub-installer.exe.asc".format(
+ artifact_prefix, repack_id
+ )
+ ],
+ "locale": partner_path,
+ }
+ )
+
+ if not upstream_artifacts:
+ raise Exception("Couldn't find any upstream artifacts.")
+
+ return upstream_artifacts
+
+
+@transforms.add
+def make_task_worker(config, jobs):
+ for job in jobs:
+ platform = job["attributes"]["build_platform"]
+ repack_id = job["extra"]["repack_id"]
+ partner, subpartner, locale = job["extra"]["repack_id"].split("/")
+ partner_config = get_partner_config_by_kind(config, config.kind)
+ repack_stub_installer = partner_config[partner][subpartner].get(
+ "repack_stub_installer"
+ )
+ build_task = None
+ repackage_task = None
+ repackage_signing_task = None
+
+ for dependency in job["dependencies"].keys():
+ if "repackage-signing" in dependency:
+ repackage_signing_task = dependency
+ elif "repackage" in dependency:
+ repackage_task = dependency
+ else:
+ build_task = "build"
+
+ build_task_ref = "<" + str(build_task) + ">"
+ repackage_task_ref = "<" + str(repackage_task) + ">"
+ repackage_signing_task_ref = "<" + str(repackage_signing_task) + ">"
+
+ # generate the partner path; we'll send this to beetmover as the "locale"
+ ftp_platform = get_ftp_platform(platform)
+ repl_dict = {
+ "build_number": config.params["build_number"],
+ "locale": locale,
+ "partner": partner,
+ "platform": ftp_platform,
+ "release_partner_build_number": config.params[
+ "release_partner_build_number"
+ ],
+ "subpartner": subpartner,
+ "version": config.params["version"],
+ }
+ partner_path = job["partner-path"].format(**repl_dict)
+ del job["partner-path"]
+
+ worker = {
+ "implementation": "beetmover",
+ "release-properties": craft_release_properties(config, job),
+ "upstream-artifacts": generate_upstream_artifacts(
+ job,
+ build_task_ref,
+ repackage_task_ref,
+ repackage_signing_task_ref,
+ platform,
+ repack_id,
+ partner_path,
+ repack_stub_installer,
+ ),
+ }
+ job["worker"] = worker
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/beetmover_snap.py b/taskcluster/gecko_taskgraph/transforms/beetmover_snap.py
new file mode 100644
index 0000000000..40f5132cc1
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/beetmover_snap.py
@@ -0,0 +1,42 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the snap beetmover kind into an actual task description.
+"""
+
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def leave_snap_repackage_dependencies_only(config, jobs):
+ for job in jobs:
+ # XXX: We delete the build dependency because, unlike the other beetmover
+ # tasks, source doesn't depend on any build task at all. This hack should
+ # go away when we rewrite beetmover transforms to allow more flexibility in deps
+
+ job["dependencies"] = {
+ key: value
+ for key, value in job["dependencies"].items()
+ if key == "release-snap-repackage"
+ }
+
+ job["worker"]["upstream-artifacts"] = [
+ upstream_artifact
+ for upstream_artifact in job["worker"]["upstream-artifacts"]
+ if upstream_artifact["taskId"]["task-reference"]
+ == "<release-snap-repackage>"
+ ]
+
+ yield job
+
+
+@transforms.add
+def set_custom_treeherder_job_name(config, jobs):
+ for job in jobs:
+ job.get("treeherder", {})["symbol"] = "Snap(BM)"
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/beetmover_source.py b/taskcluster/gecko_taskgraph/transforms/beetmover_source.py
new file mode 100644
index 0000000000..573f684a98
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/beetmover_source.py
@@ -0,0 +1,35 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the beetmover-source task to also append `build` as dependency
+"""
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def remove_build_dependency_in_beetmover_source(config, jobs):
+ for job in jobs:
+ # XXX: We delete the build dependency because, unlike the other beetmover
+ # tasks, source doesn't depend on any build task at all. This hack should
+ # go away when we rewrite beetmover transforms to allow more flexibility in deps
+ # Essentially, we should use multi_dep for beetmover.
+ for depname in job["dependencies"]:
+ if "signing" not in depname:
+ del job["dependencies"][depname]
+ break
+ else:
+ raise Exception("Can't find build dep in beetmover source!")
+
+ all_upstream_artifacts = job["worker"]["upstream-artifacts"]
+ upstream_artifacts_without_build = [
+ upstream_artifact
+ for upstream_artifact in all_upstream_artifacts
+ if upstream_artifact["taskId"]["task-reference"] != f"<{depname}>"
+ ]
+ job["worker"]["upstream-artifacts"] = upstream_artifacts_without_build
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/beetmover_source_checksums.py b/taskcluster/gecko_taskgraph/transforms/beetmover_source_checksums.py
new file mode 100644
index 0000000000..766587de6f
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/beetmover_source_checksums.py
@@ -0,0 +1,152 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform release-beetmover-source-checksums into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+from taskgraph.util.schema import Schema
+from voluptuous import Optional
+
+from gecko_taskgraph.transforms.beetmover import craft_release_properties
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+from gecko_taskgraph.util.scriptworker import (
+ generate_beetmover_artifact_map,
+ generate_beetmover_upstream_artifacts,
+ get_beetmover_action_scope,
+ get_beetmover_bucket_scope,
+)
+
+beetmover_checksums_description_schema = Schema(
+ {
+ Optional("label"): str,
+ Optional("treeherder"): task_description_schema["treeherder"],
+ Optional("locale"): str,
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Optional("attributes"): task_description_schema["attributes"],
+ Optional("dependencies"): task_description_schema["dependencies"],
+ Optional("job-from"): task_description_schema["job-from"],
+ }
+)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def remove_name(config, jobs):
+ for job in jobs:
+ if "name" in job:
+ del job["name"]
+ yield job
+
+
+transforms.add_validate(beetmover_checksums_description_schema)
+
+
+@transforms.add
+def make_beetmover_checksums_description(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ attributes = dep_job.attributes
+
+ treeherder = job.get("treeherder", {})
+ treeherder.setdefault("symbol", "BMcss(N)")
+ dep_th_platform = (
+ dep_job.task.get("extra", {})
+ .get("treeherder", {})
+ .get("machine", {})
+ .get("platform", "")
+ )
+ treeherder.setdefault("platform", f"{dep_th_platform}/opt")
+ treeherder.setdefault("tier", 1)
+ treeherder.setdefault("kind", "build")
+
+ label = job["label"]
+ build_platform = attributes.get("build_platform")
+
+ description = "Beetmover submission of checksums for source file"
+
+ extra = {}
+ if "devedition" in build_platform:
+ extra["product"] = "devedition"
+ else:
+ extra["product"] = "firefox"
+
+ dependencies = {dep_job.kind: dep_job.label}
+ for k, v in dep_job.dependencies.items():
+ if k.startswith("beetmover"):
+ dependencies[k] = v
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+ attributes.update(job.get("attributes", {}))
+
+ bucket_scope = get_beetmover_bucket_scope(config)
+ action_scope = get_beetmover_action_scope(config)
+
+ task = {
+ "label": label,
+ "description": description,
+ "worker-type": "beetmover",
+ "scopes": [bucket_scope, action_scope],
+ "dependencies": dependencies,
+ "attributes": attributes,
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "treeherder": treeherder,
+ "extra": extra,
+ }
+
+ if "shipping-phase" in job:
+ task["shipping-phase"] = job["shipping-phase"]
+
+ if "shipping-product" in job:
+ task["shipping-product"] = job["shipping-product"]
+
+ yield task
+
+
+@transforms.add
+def make_beetmover_checksums_worker(config, jobs):
+ for job in jobs:
+ valid_beetmover_job = len(job["dependencies"]) == 2
+ if not valid_beetmover_job:
+ raise NotImplementedError("Beetmover checksums must have two dependencies.")
+
+ locale = job["attributes"].get("locale")
+ platform = job["attributes"]["build_platform"]
+
+ refs = {
+ "beetmover": None,
+ "signing": None,
+ }
+ for dependency in job["dependencies"].keys():
+ if dependency.startswith("beetmover"):
+ refs["beetmover"] = f"<{dependency}>"
+ else:
+ refs["signing"] = f"<{dependency}>"
+ if None in refs.values():
+ raise NotImplementedError(
+ "Beetmover checksums must have a beetmover and signing dependency!"
+ )
+
+ worker = {
+ "implementation": "beetmover",
+ "release-properties": craft_release_properties(config, job),
+ "upstream-artifacts": generate_beetmover_upstream_artifacts(
+ config, job, platform, locale
+ ),
+ "artifact-map": generate_beetmover_artifact_map(
+ config, job, platform=platform
+ ),
+ }
+
+ if locale:
+ worker["locale"] = locale
+ job["worker"] = worker
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/bootstrap.py b/taskcluster/gecko_taskgraph/transforms/bootstrap.py
new file mode 100644
index 0000000000..e4537cab01
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/bootstrap.py
@@ -0,0 +1,132 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import Schema
+from voluptuous import Any, Optional, Required
+
+transforms = TransformSequence()
+
+bootstrap_schema = Schema(
+ {
+ # Name of the bootstrap task.
+ Required("name"): str,
+ # Name of the docker image. Ideally, we'd also have tasks for mac and windows,
+ # but we unfortunately don't have workers barebones enough for such testing
+ # to be satisfactory.
+ Required("image"): Any(str, {"in-tree": str}),
+ # Initialization commands.
+ Required("pre-commands"): [str],
+ # relative path (from config.path) to the file task was defined in
+ Optional("job-from"): str,
+ }
+)
+
+
+transforms.add_validate(bootstrap_schema)
+
+
+@transforms.add
+def bootstrap_tasks(config, tasks):
+ for task in tasks:
+ name = task.pop("name")
+ image = task.pop("image")
+ pre_commands = task.pop("pre-commands")
+
+ head_repo = config.params["head_repository"]
+ head_rev = config.params["head_rev"]
+
+ # Get all the non macos/windows local toolchains (the only ones bootstrap can use),
+ # and use them as dependencies for the tasks we create, so that they don't start
+ # before any potential toolchain task that would be triggered on the same push
+ # (which would lead to bootstrap failing).
+ dependencies = {
+ name: name
+ for name, task in config.kind_dependencies_tasks.items()
+ if task.attributes.get("local-toolchain")
+ and not name.startswith(("toolchain-macos", "toolchain-win"))
+ }
+ # We don't test the artifacts variants, or js, because they are essentially subsets.
+ # Mobile and browser are different enough to warrant testing them separately.
+ for app in ("browser", "mobile_android"):
+ commands = pre_commands + [
+ # MOZ_AUTOMATION changes the behavior, and we want something closer to user
+ # machines.
+ "unset MOZ_AUTOMATION",
+ f"curl -O {head_repo}/raw-file/{head_rev}/python/mozboot/bin/bootstrap.py",
+ f"python3 bootstrap.py --no-interactive --application-choice {app}",
+ "cd mozilla-unified",
+ # After bootstrap, configure should go through without its own auto-bootstrap.
+ "./mach configure --disable-bootstrap",
+ # Then a build should go through too.
+ "./mach build",
+ ]
+
+ os_specific = []
+ if app == "mobile_android":
+ os_specific += ["android*"]
+ for os, filename in (
+ ("debian", "debian.py"),
+ ("ubuntu", "debian.py"),
+ ("fedora", "centosfedora.py"),
+ ("rockylinux", "centosfedora.py"),
+ ("opensuse", "opensuse.py"),
+ ("gentoo", "gentoo.py"),
+ ("archlinux", "archlinux.py"),
+ ("voidlinux", "void.py"),
+ ):
+ if name.startswith(os):
+ os_specific.append(filename)
+ break
+ else:
+ raise Exception(f"Missing OS specific bootstrap file for {name}")
+
+ taskdesc = {
+ "label": f"{config.kind}-{name}-{app}",
+ "description": f"Bootstrap {app} build on {name}",
+ "always-target": True,
+ "scopes": [],
+ "treeherder": {
+ "symbol": f"Boot({name})",
+ "platform": {
+ "browser": "linux64/opt",
+ "mobile_android": "android-5-0-armv7/opt",
+ }[app],
+ "kind": "other",
+ "tier": 2,
+ },
+ "run-on-projects": ["trunk"],
+ "worker-type": "b-linux-gcp",
+ "worker": {
+ "implementation": "docker-worker",
+ "docker-image": image,
+ "os": "linux",
+ "env": {
+ "GECKO_HEAD_REPOSITORY": head_repo,
+ "GECKO_HEAD_REV": head_rev,
+ "MACH_NO_TERMINAL_FOOTER": "1",
+ "MOZ_SCM_LEVEL": config.params["level"],
+ },
+ "command": ["sh", "-c", "-x", "-e", " && ".join(commands)],
+ "max-run-time": 7200,
+ },
+ "dependencies": dependencies,
+ "optimization": {
+ "skip-unless-changed": [
+ "python/mozboot/bin/bootstrap.py",
+ "python/mozboot/mozboot/base.py",
+ "python/mozboot/mozboot/bootstrap.py",
+ "python/mozboot/mozboot/linux_common.py",
+ "python/mozboot/mozboot/mach_commands.py",
+ "python/mozboot/mozboot/mozconfig.py",
+ "python/mozboot/mozboot/rust.py",
+ "python/mozboot/mozboot/sccache.py",
+ "python/mozboot/mozboot/util.py",
+ ]
+ + [f"python/mozboot/mozboot/{f}" for f in os_specific]
+ },
+ }
+
+ yield taskdesc
diff --git a/taskcluster/gecko_taskgraph/transforms/bouncer_aliases.py b/taskcluster/gecko_taskgraph/transforms/bouncer_aliases.py
new file mode 100644
index 0000000000..ec2e89ea6d
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/bouncer_aliases.py
@@ -0,0 +1,108 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Add from parameters.yml into bouncer submission tasks.
+"""
+
+
+import logging
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+from gecko_taskgraph.transforms.bouncer_submission import craft_bouncer_product_name
+from gecko_taskgraph.transforms.bouncer_submission_partners import (
+ craft_partner_bouncer_product_name,
+)
+from gecko_taskgraph.util.attributes import release_level
+from gecko_taskgraph.util.partners import get_partners_to_be_published
+from gecko_taskgraph.util.scriptworker import get_release_config
+
+logger = logging.getLogger(__name__)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def make_task_worker(config, jobs):
+ for job in jobs:
+ resolve_keyed_by(
+ job,
+ "worker-type",
+ item_name=job["name"],
+ **{"release-level": release_level(config.params["project"])},
+ )
+ resolve_keyed_by(
+ job,
+ "scopes",
+ item_name=job["name"],
+ **{"release-level": release_level(config.params["project"])},
+ )
+ resolve_keyed_by(
+ job,
+ "bouncer-products-per-alias",
+ item_name=job["name"],
+ **{"release-type": config.params["release_type"]},
+ )
+ if "partner-bouncer-products-per-alias" in job:
+ resolve_keyed_by(
+ job,
+ "partner-bouncer-products-per-alias",
+ item_name=job["name"],
+ **{"release-type": config.params["release_type"]},
+ )
+
+ job["worker"]["entries"] = craft_bouncer_entries(config, job)
+
+ del job["bouncer-products-per-alias"]
+ if "partner-bouncer-products-per-alias" in job:
+ del job["partner-bouncer-products-per-alias"]
+
+ if job["worker"]["entries"]:
+ yield job
+ else:
+ logger.warning(
+ 'No bouncer entries defined in bouncer submission task for "{}". \
+Job deleted.'.format(
+ job["name"]
+ )
+ )
+
+
+def craft_bouncer_entries(config, job):
+ release_config = get_release_config(config)
+
+ product = job["shipping-product"]
+ current_version = release_config["version"]
+ bouncer_products_per_alias = job["bouncer-products-per-alias"]
+
+ entries = {
+ bouncer_alias: craft_bouncer_product_name(
+ product,
+ bouncer_product,
+ current_version,
+ )
+ for bouncer_alias, bouncer_product in bouncer_products_per_alias.items()
+ }
+
+ partner_bouncer_products_per_alias = job.get("partner-bouncer-products-per-alias")
+ if partner_bouncer_products_per_alias:
+ partners = get_partners_to_be_published(config)
+ for partner, sub_config_name, _ in partners:
+ entries.update(
+ {
+ bouncer_alias.replace(
+ "PARTNER", f"{partner}-{sub_config_name}"
+ ): craft_partner_bouncer_product_name(
+ product,
+ bouncer_product,
+ current_version,
+ partner,
+ sub_config_name,
+ )
+ for bouncer_alias, bouncer_product in partner_bouncer_products_per_alias.items() # NOQA: E501
+ }
+ )
+
+ return entries
diff --git a/taskcluster/gecko_taskgraph/transforms/bouncer_check.py b/taskcluster/gecko_taskgraph/transforms/bouncer_check.py
new file mode 100644
index 0000000000..4b5d72da51
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/bouncer_check.py
@@ -0,0 +1,111 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import json
+import logging
+from shlex import quote as shell_quote
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+from gecko_taskgraph.util.attributes import release_level
+from gecko_taskgraph.util.scriptworker import get_release_config
+
+logger = logging.getLogger(__name__)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def add_command(config, jobs):
+ for job in jobs:
+ command = [
+ "python",
+ "testing/mozharness/scripts/release/bouncer_check.py",
+ ]
+ job["run"].update(
+ {
+ "using": "mach",
+ "mach": command,
+ }
+ )
+ yield job
+
+
+@transforms.add
+def add_previous_versions(config, jobs):
+ release_config = get_release_config(config)
+ if not release_config.get("partial_versions"):
+ for job in jobs:
+ yield job
+ else:
+ extra_params = []
+ for partial in release_config["partial_versions"].split(","):
+ extra_params.append(
+ "--previous-version={}".format(partial.split("build")[0].strip())
+ )
+
+ for job in jobs:
+ job["run"]["mach"].extend(extra_params)
+ yield job
+
+
+@transforms.add
+def handle_keyed_by(config, jobs):
+ """Resolve fields that can be keyed by project, etc."""
+ fields = [
+ "run.config",
+ "run.product-field",
+ "run.extra-config",
+ ]
+
+ release_config = get_release_config(config)
+ version = release_config["version"]
+
+ for job in jobs:
+ for field in fields:
+ resolve_keyed_by(
+ item=job,
+ field=field,
+ item_name=job["name"],
+ **{
+ "project": config.params["project"],
+ "release-level": release_level(config.params["project"]),
+ "release-type": config.params["release_type"],
+ },
+ )
+
+ for cfg in job["run"]["config"]:
+ job["run"]["mach"].extend(["--config", cfg])
+
+ if config.kind == "cron-bouncer-check":
+ job["run"]["mach"].extend(
+ [
+ "--product-field={}".format(job["run"]["product-field"]),
+ "--products-url={}".format(job["run"]["products-url"]),
+ ]
+ )
+ del job["run"]["product-field"]
+ del job["run"]["products-url"]
+ elif config.kind == "release-bouncer-check":
+ job["run"]["mach"].append(f"--version={version}")
+
+ del job["run"]["config"]
+
+ if "extra-config" in job["run"]:
+ env = job["worker"].setdefault("env", {})
+ env["EXTRA_MOZHARNESS_CONFIG"] = json.dumps(
+ job["run"]["extra-config"], sort_keys=True
+ )
+ del job["run"]["extra-config"]
+
+ yield job
+
+
+@transforms.add
+def command_to_string(config, jobs):
+ """Convert command to string to make it work properly with run-task"""
+ for job in jobs:
+ job["run"]["mach"] = " ".join(map(shell_quote, job["run"]["mach"]))
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/bouncer_locations.py b/taskcluster/gecko_taskgraph/transforms/bouncer_locations.py
new file mode 100644
index 0000000000..e755b73c27
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/bouncer_locations.py
@@ -0,0 +1,35 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import logging
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+logger = logging.getLogger(__name__)
+
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def make_task_worker(config, jobs):
+ for job in jobs:
+ resolve_keyed_by(
+ job, "worker-type", item_name=job["name"], project=config.params["project"]
+ )
+ resolve_keyed_by(
+ job, "scopes", item_name=job["name"], project=config.params["project"]
+ )
+ resolve_keyed_by(
+ job,
+ "bouncer-products",
+ item_name=job["name"],
+ project=config.params["project"],
+ )
+
+ job["worker"]["bouncer-products"] = job["bouncer-products"]
+
+ del job["bouncer-products"]
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/bouncer_submission.py b/taskcluster/gecko_taskgraph/transforms/bouncer_submission.py
new file mode 100644
index 0000000000..fb5b17d3b3
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/bouncer_submission.py
@@ -0,0 +1,335 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Add from parameters.yml into bouncer submission tasks.
+"""
+
+
+import copy
+import logging
+
+import attr
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+from gecko_taskgraph.transforms.l10n import parse_locales_file
+from gecko_taskgraph.util.attributes import release_level
+from gecko_taskgraph.util.scriptworker import get_release_config
+
+logger = logging.getLogger(__name__)
+
+
+FTP_PLATFORMS_PER_BOUNCER_PLATFORM = {
+ "linux": "linux-i686",
+ "linux64": "linux-x86_64",
+ "osx": "mac",
+ "win": "win32",
+ "win64": "win64",
+ "win64-aarch64": "win64-aarch64",
+}
+
+# :lang is interpolated by bouncer at runtime
+CANDIDATES_PATH_TEMPLATE = "/{ftp_product}/candidates/{version}-candidates/build{build_number}/\
+{update_folder}{ftp_platform}/:lang/{file}"
+RELEASES_PATH_TEMPLATE = "/{ftp_product}/releases/{version}/\
+{update_folder}{ftp_platform}/:lang/{file}"
+
+
+CONFIG_PER_BOUNCER_PRODUCT = {
+ "complete-mar": {
+ "name_postfix": "-Complete",
+ "path_template": RELEASES_PATH_TEMPLATE,
+ "file_names": {
+ "default": "{product}-{version}.complete.mar",
+ },
+ },
+ "complete-mar-candidates": {
+ "name_postfix": "build{build_number}-Complete",
+ "path_template": CANDIDATES_PATH_TEMPLATE,
+ "file_names": {
+ "default": "{product}-{version}.complete.mar",
+ },
+ },
+ "installer": {
+ "path_template": RELEASES_PATH_TEMPLATE,
+ "file_names": {
+ "linux": "{product}-{version}.tar.bz2",
+ "linux64": "{product}-{version}.tar.bz2",
+ "osx": "{pretty_product}%20{version}.dmg",
+ "win": "{pretty_product}%20Setup%20{version}.exe",
+ "win64": "{pretty_product}%20Setup%20{version}.exe",
+ "win64-aarch64": "{pretty_product}%20Setup%20{version}.exe",
+ },
+ },
+ "partial-mar": {
+ "name_postfix": "-Partial-{previous_version}",
+ "path_template": RELEASES_PATH_TEMPLATE,
+ "file_names": {
+ "default": "{product}-{previous_version}-{version}.partial.mar",
+ },
+ },
+ "partial-mar-candidates": {
+ "name_postfix": "build{build_number}-Partial-{previous_version}build{previous_build}",
+ "path_template": CANDIDATES_PATH_TEMPLATE,
+ "file_names": {
+ "default": "{product}-{previous_version}-{version}.partial.mar",
+ },
+ },
+ "stub-installer": {
+ "name_postfix": "-stub",
+ # We currently have a sole win32 stub installer that is to be used
+ # in all windows platforms to toggle between full installers
+ "path_template": RELEASES_PATH_TEMPLATE.replace("{ftp_platform}", "win32"),
+ "file_names": {
+ "win": "{pretty_product}%20Installer.exe",
+ "win64": "{pretty_product}%20Installer.exe",
+ "win64-aarch64": "{pretty_product}%20Installer.exe",
+ },
+ },
+ "msi": {
+ "name_postfix": "-msi-SSL",
+ "path_template": RELEASES_PATH_TEMPLATE,
+ "file_names": {
+ "win": "{pretty_product}%20Setup%20{version}.msi",
+ "win64": "{pretty_product}%20Setup%20{version}.msi",
+ },
+ },
+ "msix": {
+ "name_postfix": "-msix-SSL",
+ "path_template": RELEASES_PATH_TEMPLATE.replace(":lang", "multi"),
+ "file_names": {
+ "win": "{pretty_product}%20Setup%20{version}.msix",
+ "win64": "{pretty_product}%20Setup%20{version}.msix",
+ },
+ },
+ "pkg": {
+ "name_postfix": "-pkg-SSL",
+ "path_template": RELEASES_PATH_TEMPLATE,
+ "file_names": {
+ "osx": "{pretty_product}%20{version}.pkg",
+ },
+ },
+ "langpack": {
+ "name_postfix": "-langpack-SSL",
+ "path_template": RELEASES_PATH_TEMPLATE.replace(":lang", "xpi"),
+ "file_names": {"default": ":lang.xpi"},
+ },
+}
+CONFIG_PER_BOUNCER_PRODUCT["installer-ssl"] = copy.deepcopy(
+ CONFIG_PER_BOUNCER_PRODUCT["installer"]
+)
+CONFIG_PER_BOUNCER_PRODUCT["installer-ssl"]["name_postfix"] = "-SSL"
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def make_task_worker(config, jobs):
+ for job in jobs:
+ resolve_keyed_by(
+ job,
+ "worker-type",
+ item_name=job["name"],
+ **{"release-level": release_level(config.params["project"])}
+ )
+ resolve_keyed_by(
+ job,
+ "scopes",
+ item_name=job["name"],
+ **{"release-level": release_level(config.params["project"])}
+ )
+ resolve_keyed_by(
+ job,
+ "bouncer-products",
+ item_name=job["name"],
+ **{"release-type": config.params["release_type"]}
+ )
+
+ # No need to filter out ja-JP-mac, we need to upload both; but we do
+ # need to filter out the platforms they come with
+ all_locales = sorted(
+ locale
+ for locale in parse_locales_file(job["locales-file"]).keys()
+ if locale not in ("linux", "win32", "osx")
+ )
+
+ job["worker"]["locales"] = all_locales
+ job["worker"]["entries"] = craft_bouncer_entries(config, job)
+
+ del job["locales-file"]
+ del job["bouncer-platforms"]
+ del job["bouncer-products"]
+
+ if job["worker"]["entries"]:
+ yield job
+ else:
+ logger.warning(
+ 'No bouncer entries defined in bouncer submission task for "{}". \
+Job deleted.'.format(
+ job["name"]
+ )
+ )
+
+
+def craft_bouncer_entries(config, job):
+ release_config = get_release_config(config)
+
+ product = job["shipping-product"]
+ bouncer_platforms = job["bouncer-platforms"]
+
+ current_version = release_config["version"]
+ current_build_number = release_config["build_number"]
+
+ bouncer_products = job["bouncer-products"]
+ previous_versions_string = release_config.get("partial_versions", None)
+ if previous_versions_string:
+ previous_versions = previous_versions_string.split(", ")
+ else:
+ logger.warning(
+ 'No partials defined! Bouncer submission task won\'t send any \
+partial-related entry for "{}"'.format(
+ job["name"]
+ )
+ )
+ bouncer_products = [
+ bouncer_product
+ for bouncer_product in bouncer_products
+ if "partial" not in bouncer_product
+ ]
+ previous_versions = [None]
+
+ project = config.params["project"]
+
+ return {
+ craft_bouncer_product_name(
+ product,
+ bouncer_product,
+ current_version,
+ current_build_number,
+ previous_version,
+ ): {
+ "options": {
+ "add_locales": False if "msix" in bouncer_product else True,
+ "ssl_only": craft_ssl_only(bouncer_product, project),
+ },
+ "paths_per_bouncer_platform": craft_paths_per_bouncer_platform(
+ product,
+ bouncer_product,
+ bouncer_platforms,
+ current_version,
+ current_build_number,
+ previous_version,
+ ),
+ }
+ for bouncer_product in bouncer_products
+ for previous_version in previous_versions
+ }
+
+
+def craft_paths_per_bouncer_platform(
+ product,
+ bouncer_product,
+ bouncer_platforms,
+ current_version,
+ current_build_number,
+ previous_version=None,
+):
+ paths_per_bouncer_platform = {}
+ for bouncer_platform in bouncer_platforms:
+ file_names_per_platform = CONFIG_PER_BOUNCER_PRODUCT[bouncer_product][
+ "file_names"
+ ]
+ file_name_template = file_names_per_platform.get(
+ bouncer_platform, file_names_per_platform.get("default", None)
+ )
+ if not file_name_template:
+ # Some bouncer product like stub-installer are only meant to be on Windows.
+ # Thus no default value is defined there
+ continue
+
+ file_name_product = _craft_filename_product(product)
+ file_name = file_name_template.format(
+ product=file_name_product,
+ pretty_product=file_name_product.capitalize(),
+ version=current_version,
+ previous_version=split_build_data(previous_version)[0],
+ )
+
+ path_template = CONFIG_PER_BOUNCER_PRODUCT[bouncer_product]["path_template"]
+ file_relative_location = path_template.format(
+ ftp_product=_craft_ftp_product(product),
+ version=current_version,
+ build_number=current_build_number,
+ update_folder="update/" if "-mar" in bouncer_product else "",
+ ftp_platform=FTP_PLATFORMS_PER_BOUNCER_PLATFORM[bouncer_platform],
+ file=file_name,
+ )
+
+ paths_per_bouncer_platform[bouncer_platform] = file_relative_location
+
+ return paths_per_bouncer_platform
+
+
+def _craft_ftp_product(product):
+ return product.lower()
+
+
+def _craft_filename_product(product):
+ return "firefox" if product == "devedition" else product
+
+
+@attr.s
+class InvalidSubstitution:
+ error = attr.ib(type=str)
+
+ def __str__(self):
+ raise Exception("Partial is being processed, but no previous version defined.")
+
+
+def craft_bouncer_product_name(
+ product,
+ bouncer_product,
+ current_version,
+ current_build_number=None,
+ previous_version=None,
+):
+ if previous_version is None:
+ previous_version = previous_build = InvalidSubstitution(
+ "Partial is being processed, but no previous version defined."
+ )
+ else:
+ previous_version, previous_build = split_build_data(previous_version)
+ postfix = (
+ CONFIG_PER_BOUNCER_PRODUCT[bouncer_product]
+ .get("name_postfix", "")
+ .format(
+ build_number=current_build_number,
+ previous_version=previous_version,
+ previous_build=previous_build,
+ )
+ )
+
+ return "{product}-{version}{postfix}".format(
+ product=product.capitalize(), version=current_version, postfix=postfix
+ )
+
+
+def craft_ssl_only(bouncer_product, project):
+ # XXX ESR is the only channel where we force serve the installer over SSL
+ if "-esr" in project and bouncer_product == "installer":
+ return True
+
+ return bouncer_product not in (
+ "complete-mar",
+ "complete-mar-candidates",
+ "installer",
+ "partial-mar",
+ "partial-mar-candidates",
+ )
+
+
+def split_build_data(version):
+ if version and "build" in version:
+ return version.split("build")
+ return version, InvalidSubstitution("k")
diff --git a/taskcluster/gecko_taskgraph/transforms/bouncer_submission_partners.py b/taskcluster/gecko_taskgraph/transforms/bouncer_submission_partners.py
new file mode 100644
index 0000000000..0f298cb120
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/bouncer_submission_partners.py
@@ -0,0 +1,193 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Add from parameters.yml into bouncer submission tasks.
+"""
+
+
+import logging
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+from gecko_taskgraph.transforms.bouncer_submission import (
+ CONFIG_PER_BOUNCER_PRODUCT as CONFIG_PER_BOUNCER_PRODUCT_VANILLA,
+)
+from gecko_taskgraph.transforms.bouncer_submission import (
+ FTP_PLATFORMS_PER_BOUNCER_PLATFORM,
+ _craft_filename_product,
+ _craft_ftp_product,
+)
+from gecko_taskgraph.util.attributes import release_level
+from gecko_taskgraph.util.partners import (
+ check_if_partners_enabled,
+ get_partners_to_be_published,
+)
+from gecko_taskgraph.util.scriptworker import get_release_config
+
+logger = logging.getLogger(__name__)
+
+
+PARTNER_PLATFORMS_TO_BOUNCER = {
+ "linux-shippable": "linux",
+ "linux64-shippable": "linux64",
+ "macosx64-shippable": "osx",
+ "win32-shippable": "win",
+ "win64-shippable": "win64",
+ "win64-aarch64-shippable": "win64-aarch64",
+}
+
+# :lang is interpolated by bouncer at runtime
+RELEASES_PARTNERS_PATH_TEMPLATE = "/{ftp_product}/releases/partners/{partner}/{sub_config}/\
+{version}/{ftp_platform}/:lang/{file}"
+
+CONFIG_PER_BOUNCER_PRODUCT = {
+ "installer": {
+ "name_postfix": "-{partner}-{sub_config}",
+ "path_template": RELEASES_PARTNERS_PATH_TEMPLATE,
+ "file_names": CONFIG_PER_BOUNCER_PRODUCT_VANILLA["installer"]["file_names"],
+ },
+ "stub-installer": {
+ "name_postfix": "-{partner}-{sub_config}-stub",
+ # We currently have a sole win32 stub installer that is to be used
+ # in all windows platforms to toggle between full installers
+ "path_template": RELEASES_PARTNERS_PATH_TEMPLATE.replace(
+ "{ftp_platform}", "win32"
+ ),
+ "file_names": CONFIG_PER_BOUNCER_PRODUCT_VANILLA["stub-installer"][
+ "file_names"
+ ],
+ },
+}
+
+transforms = TransformSequence()
+transforms.add(check_if_partners_enabled)
+
+
+@transforms.add
+def make_task_worker(config, jobs):
+ for job in jobs:
+ resolve_keyed_by(
+ job,
+ "worker-type",
+ item_name=job["name"],
+ **{"release-level": release_level(config.params["project"])}
+ )
+ resolve_keyed_by(
+ job,
+ "scopes",
+ item_name=job["name"],
+ **{"release-level": release_level(config.params["project"])}
+ )
+ resolve_keyed_by(
+ job,
+ "bouncer-products",
+ item_name=job["name"],
+ **{"release-type": config.params["release_type"]}
+ )
+
+ # the schema requires at least one locale but this will not be used
+ job["worker"]["locales"] = ["fake"]
+ job["worker"]["entries"] = craft_bouncer_entries(config, job)
+
+ del job["locales-file"]
+ del job["bouncer-platforms"]
+ del job["bouncer-products"]
+
+ if job["worker"]["entries"]:
+ yield job
+
+
+def craft_bouncer_entries(config, job):
+ release_config = get_release_config(config)
+
+ product = job["shipping-product"]
+ current_version = release_config["version"]
+ bouncer_products = job["bouncer-products"]
+
+ partners = get_partners_to_be_published(config)
+ entries = {}
+ for partner, sub_config_name, platforms in partners:
+ platforms = [PARTNER_PLATFORMS_TO_BOUNCER[p] for p in platforms]
+ entries.update(
+ {
+ craft_partner_bouncer_product_name(
+ product, bouncer_product, current_version, partner, sub_config_name
+ ): {
+ "options": {
+ "add_locales": False, # partners may use different sets of locales
+ "ssl_only": craft_ssl_only(bouncer_product),
+ },
+ "paths_per_bouncer_platform": craft_paths_per_bouncer_platform(
+ product,
+ bouncer_product,
+ platforms,
+ current_version,
+ partner,
+ sub_config_name,
+ ),
+ }
+ for bouncer_product in bouncer_products
+ }
+ )
+ return entries
+
+
+def craft_paths_per_bouncer_platform(
+ product, bouncer_product, bouncer_platforms, current_version, partner, sub_config
+):
+ paths_per_bouncer_platform = {}
+ for bouncer_platform in bouncer_platforms:
+ file_names_per_platform = CONFIG_PER_BOUNCER_PRODUCT[bouncer_product][
+ "file_names"
+ ]
+ file_name_template = file_names_per_platform.get(
+ bouncer_platform, file_names_per_platform.get("default", None)
+ )
+ if not file_name_template:
+ # Some bouncer product like stub-installer are only meant to be on Windows.
+ # Thus no default value is defined there
+ continue
+
+ file_name_product = _craft_filename_product(product)
+ file_name = file_name_template.format(
+ product=file_name_product,
+ pretty_product=file_name_product.capitalize(),
+ version=current_version,
+ )
+
+ path_template = CONFIG_PER_BOUNCER_PRODUCT[bouncer_product]["path_template"]
+ file_relative_location = path_template.format(
+ ftp_product=_craft_ftp_product(product),
+ version=current_version,
+ ftp_platform=FTP_PLATFORMS_PER_BOUNCER_PLATFORM[bouncer_platform],
+ partner=partner,
+ sub_config=sub_config,
+ file=file_name,
+ )
+
+ paths_per_bouncer_platform[bouncer_platform] = file_relative_location
+
+ return paths_per_bouncer_platform
+
+
+def craft_partner_bouncer_product_name(
+ product, bouncer_product, current_version, partner, sub_config
+):
+ postfix = (
+ CONFIG_PER_BOUNCER_PRODUCT[bouncer_product]
+ .get("name_postfix", "")
+ .format(
+ partner=partner,
+ sub_config=sub_config,
+ )
+ )
+
+ return "{product}-{version}{postfix}".format(
+ product=product.capitalize(), version=current_version, postfix=postfix
+ )
+
+
+def craft_ssl_only(bouncer_product):
+ return bouncer_product == "stub-installer"
diff --git a/taskcluster/gecko_taskgraph/transforms/build.py b/taskcluster/gecko_taskgraph/transforms/build.py
new file mode 100644
index 0000000000..4e73c5aef2
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/build.py
@@ -0,0 +1,235 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Apply some defaults and minor modifications to the jobs defined in the build
+kind.
+"""
+import logging
+
+from mozbuild.artifact_builds import JOB_CHOICES as ARTIFACT_JOBS
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+from taskgraph.util.treeherder import add_suffix
+
+from gecko_taskgraph.util.attributes import RELEASE_PROJECTS, is_try, release_level
+from gecko_taskgraph.util.workertypes import worker_type_implementation
+
+logger = logging.getLogger(__name__)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def set_defaults(config, jobs):
+ """Set defaults, including those that differ per worker implementation"""
+ for job in jobs:
+ job["treeherder"].setdefault("kind", "build")
+ job["treeherder"].setdefault("tier", 1)
+ _, worker_os = worker_type_implementation(
+ config.graph_config, config.params, job["worker-type"]
+ )
+ worker = job.setdefault("worker", {})
+ worker.setdefault("env", {})
+ worker["chain-of-trust"] = True
+ yield job
+
+
+@transforms.add
+def stub_installer(config, jobs):
+ for job in jobs:
+ resolve_keyed_by(
+ job,
+ "stub-installer",
+ item_name=job["name"],
+ project=config.params["project"],
+ **{
+ "release-type": config.params["release_type"],
+ },
+ )
+ job.setdefault("attributes", {})
+ if job.get("stub-installer"):
+ job["attributes"]["stub-installer"] = job["stub-installer"]
+ job["worker"]["env"].update({"USE_STUB_INSTALLER": "1"})
+ if "stub-installer" in job:
+ del job["stub-installer"]
+ yield job
+
+
+@transforms.add
+def resolve_shipping_product(config, jobs):
+ for job in jobs:
+ resolve_keyed_by(
+ job,
+ "shipping-product",
+ item_name=job["name"],
+ **{
+ "release-type": config.params["release_type"],
+ },
+ )
+ yield job
+
+
+@transforms.add
+def update_channel(config, jobs):
+ keys = [
+ "run.update-channel",
+ "run.mar-channel-id",
+ "run.accepted-mar-channel-ids",
+ ]
+ for job in jobs:
+ job["worker"].setdefault("env", {})
+ for key in keys:
+ resolve_keyed_by(
+ job,
+ key,
+ item_name=job["name"],
+ **{
+ "project": config.params["project"],
+ "release-type": config.params["release_type"],
+ },
+ )
+ update_channel = job["run"].pop("update-channel", None)
+ if update_channel:
+ job["run"].setdefault("extra-config", {})["update_channel"] = update_channel
+ job["attributes"]["update-channel"] = update_channel
+ mar_channel_id = job["run"].pop("mar-channel-id", None)
+ if mar_channel_id:
+ job["attributes"]["mar-channel-id"] = mar_channel_id
+ job["worker"]["env"]["MAR_CHANNEL_ID"] = mar_channel_id
+ accepted_mar_channel_ids = job["run"].pop("accepted-mar-channel-ids", None)
+ if accepted_mar_channel_ids:
+ job["attributes"]["accepted-mar-channel-ids"] = accepted_mar_channel_ids
+ job["worker"]["env"]["ACCEPTED_MAR_CHANNEL_IDS"] = accepted_mar_channel_ids
+
+ yield job
+
+
+@transforms.add
+def mozconfig(config, jobs):
+ for job in jobs:
+ resolve_keyed_by(
+ job,
+ "run.mozconfig-variant",
+ item_name=job["name"],
+ **{
+ "release-type": config.params["release_type"],
+ },
+ )
+ mozconfig_variant = job["run"].pop("mozconfig-variant", None)
+ if mozconfig_variant:
+ job["run"].setdefault("extra-config", {})[
+ "mozconfig_variant"
+ ] = mozconfig_variant
+ yield job
+
+
+@transforms.add
+def use_artifact(config, jobs):
+ if is_try(config.params):
+ use_artifact = config.params["try_task_config"].get(
+ "use-artifact-builds", False
+ )
+ else:
+ use_artifact = False
+ for job in jobs:
+ if (
+ config.kind == "build"
+ and use_artifact
+ and job.get("index", {}).get("job-name") in ARTIFACT_JOBS
+ # If tests aren't packaged, then we are not able to rebuild all the packages
+ and job["worker"]["env"].get("MOZ_AUTOMATION_PACKAGE_TESTS") == "1"
+ ):
+ job["treeherder"]["symbol"] = add_suffix(job["treeherder"]["symbol"], "a")
+ job["worker"]["env"]["USE_ARTIFACT"] = "1"
+ job["attributes"]["artifact-build"] = True
+ yield job
+
+
+@transforms.add
+def use_profile_data(config, jobs):
+ for job in jobs:
+ use_pgo = job.pop("use-pgo", False)
+ disable_pgo = config.params["try_task_config"].get("disable-pgo", False)
+ artifact_build = job["attributes"].get("artifact-build")
+ if not use_pgo or disable_pgo or artifact_build:
+ yield job
+ continue
+
+ # If use_pgo is True, the task uses the generate-profile task of the
+ # same name. Otherwise a task can specify a specific generate-profile
+ # task to use in the use_pgo field.
+ if use_pgo is True:
+ name = job["name"]
+ else:
+ name = use_pgo
+ dependencies = f"generate-profile-{name}"
+ job.setdefault("dependencies", {})["generate-profile"] = dependencies
+ job.setdefault("fetches", {})["generate-profile"] = ["profdata.tar.xz"]
+ job["worker"]["env"].update({"TASKCLUSTER_PGO_PROFILE_USE": "1"})
+
+ _, worker_os = worker_type_implementation(
+ config.graph_config, config.params, job["worker-type"]
+ )
+ if worker_os == "linux":
+ # LTO linkage needs more open files than the default from run-task.
+ job["worker"]["env"].update({"MOZ_LIMIT_NOFILE": "8192"})
+
+ if job.get("use-sccache"):
+ raise Exception(
+ "use-sccache is incompatible with use-pgo in {}".format(job["name"])
+ )
+
+ yield job
+
+
+@transforms.add
+def resolve_keys(config, jobs):
+ for job in jobs:
+ resolve_keyed_by(
+ job,
+ "use-sccache",
+ item_name=job["name"],
+ **{"release-level": release_level(config.params["project"])},
+ )
+ yield job
+
+
+@transforms.add
+def enable_full_crashsymbols(config, jobs):
+ """Enable full crashsymbols on jobs with
+ 'enable-full-crashsymbols' set to True and on release branches, or
+ on try"""
+ branches = RELEASE_PROJECTS | {
+ "toolchains",
+ "try",
+ }
+ for job in jobs:
+ enable_full_crashsymbols = job["attributes"].get("enable-full-crashsymbols")
+ if enable_full_crashsymbols and config.params["project"] in branches:
+ logger.debug("Enabling full symbol generation for %s", job["name"])
+ job["worker"]["env"]["MOZ_ENABLE_FULL_SYMBOLS"] = "1"
+ else:
+ logger.debug("Disabling full symbol generation for %s", job["name"])
+ job["attributes"].pop("enable-full-crashsymbols", None)
+ yield job
+
+
+@transforms.add
+def set_expiry(config, jobs):
+ for job in jobs:
+ attributes = job["attributes"]
+ if (
+ "shippable" in attributes
+ and attributes["shippable"]
+ and config.kind
+ in {
+ "build",
+ }
+ ):
+ expiration_policy = "long"
+ else:
+ expiration_policy = "medium"
+
+ job["expiration-policy"] = expiration_policy
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/build_attrs.py b/taskcluster/gecko_taskgraph/transforms/build_attrs.py
new file mode 100644
index 0000000000..9cda71718a
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/build_attrs.py
@@ -0,0 +1,50 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from taskgraph.transforms.base import TransformSequence
+
+from gecko_taskgraph.util.platforms import platform_family
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def set_build_attributes(config, jobs):
+ """
+ Set the build_platform and build_type attributes based on the job name.
+ Although not all jobs using this transform are actual "builds", the try
+ option syntax treats them as such, and this arranges the attributes
+ appropriately for that purpose.
+ """
+ for job in jobs:
+ build_platform, build_type = job["name"].split("/")
+
+ # pgo builds are represented as a different platform, type opt
+ if build_type == "pgo":
+ build_platform = build_platform + "-pgo"
+ build_type = "opt"
+
+ attributes = job.setdefault("attributes", {})
+ attributes.update(
+ {
+ "build_platform": build_platform,
+ "build_type": build_type,
+ }
+ )
+
+ yield job
+
+
+@transforms.add
+def set_schedules_optimization(config, jobs):
+ """Set the `skip-unless-affected` optimization based on the build platform."""
+ for job in jobs:
+ # don't add skip-unless-schedules if there's already a when defined
+ if "when" in job:
+ yield job
+ continue
+
+ build_platform = job["attributes"]["build_platform"]
+ job.setdefault("optimization", {"build": [platform_family(build_platform)]})
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/build_fat_aar.py b/taskcluster/gecko_taskgraph/transforms/build_fat_aar.py
new file mode 100644
index 0000000000..61df2111d2
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/build_fat_aar.py
@@ -0,0 +1,78 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import copy
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.taskcluster import get_artifact_prefix
+
+from gecko_taskgraph.util.declarative_artifacts import get_geckoview_upstream_artifacts
+
+transforms = TransformSequence()
+
+
+MOZ_ANDROID_FAT_AAR_ENV_MAP = {
+ "android-arm-shippable": "MOZ_ANDROID_FAT_AAR_ARMEABI_V7A",
+ "android-arm-shippable-lite": "MOZ_ANDROID_FAT_AAR_ARMEABI_V7A",
+ "android-aarch64-shippable": "MOZ_ANDROID_FAT_AAR_ARM64_V8A",
+ "android-aarch64-shippable-lite": "MOZ_ANDROID_FAT_AAR_ARM64_V8A",
+ "android-x86-shippable": "MOZ_ANDROID_FAT_AAR_X86",
+ "android-x86-shippable-lite": "MOZ_ANDROID_FAT_AAR_X86",
+ "android-x86_64-shippable": "MOZ_ANDROID_FAT_AAR_X86_64",
+ "android-x86_64-shippable-lite": "MOZ_ANDROID_FAT_AAR_X86_64",
+ "android-arm-opt": "MOZ_ANDROID_FAT_AAR_ARMEABI_V7A",
+ "android-aarch64-opt": "MOZ_ANDROID_FAT_AAR_ARM64_V8A",
+ "android-x86-opt": "MOZ_ANDROID_FAT_AAR_X86",
+ "android-x86_64-opt": "MOZ_ANDROID_FAT_AAR_X86_64",
+}
+
+
+@transforms.add
+def set_fetches_and_locations(config, jobs):
+ """Set defaults, including those that differ per worker implementation"""
+ for job in jobs:
+ dependencies = copy.deepcopy(job["dependencies"])
+
+ for platform, label in dependencies.items():
+ job["dependencies"] = {"build": label}
+
+ aar_location = _get_aar_location(config, job, platform)
+ prefix = get_artifact_prefix(job)
+ if not prefix.endswith("/"):
+ prefix = prefix + "/"
+ if aar_location.startswith(prefix):
+ aar_location = aar_location[len(prefix) :]
+
+ job.setdefault("fetches", {}).setdefault(platform, []).append(
+ {
+ "artifact": aar_location,
+ "extract": False,
+ }
+ )
+
+ aar_file_name = aar_location.split("/")[-1]
+ env_var = MOZ_ANDROID_FAT_AAR_ENV_MAP[platform]
+ job["worker"]["env"][env_var] = aar_file_name
+
+ job["dependencies"] = dependencies
+
+ yield job
+
+
+def _get_aar_location(config, job, platform):
+ artifacts_locations = []
+
+ for package in job["attributes"]["maven_packages"]:
+ artifacts_locations += get_geckoview_upstream_artifacts(
+ config, job, package, platform=platform
+ )
+
+ aar_locations = [
+ path for path in artifacts_locations[0]["paths"] if path.endswith(".aar")
+ ]
+ if len(aar_locations) != 1:
+ raise ValueError(f"Only a single AAR must be given. Got: {aar_locations}")
+
+ return aar_locations[0]
diff --git a/taskcluster/gecko_taskgraph/transforms/build_lints.py b/taskcluster/gecko_taskgraph/transforms/build_lints.py
new file mode 100644
index 0000000000..d1bd276059
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/build_lints.py
@@ -0,0 +1,59 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Apply some defaults and minor modifications to the jobs defined in the build
+kind.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def check_mozharness_perfherder_options(config, jobs):
+ """Verify that multiple jobs don't use the same perfherder bucket.
+
+ Build jobs record perfherder metrics by default. Perfherder metrics go
+ to a bucket derived by the platform by default. The name can further be
+ customized by the presence of "extra options" either defined in
+ mozharness sub-configs or in an environment variable.
+
+ This linter tries to verify that no 2 jobs will send Perfherder metrics
+ to the same bucket by looking for jobs not defining extra options when
+ their platform or mozharness config are otherwise similar.
+ """
+
+ SEEN_CONFIGS = {}
+
+ for job in jobs:
+ if job["run"]["using"] != "mozharness":
+ yield job
+ continue
+
+ worker = job.get("worker", {})
+
+ platform = job["treeherder"]["platform"]
+ primary_config = job["run"]["config"][0]
+ options = worker.get("env", {}).get("PERFHERDER_EXTRA_OPTIONS")
+ shippable = job.get("attributes", {}).get("shippable", False)
+
+ # This isn't strictly necessary. But the Perfherder code looking at the
+ # values we care about is only active on builds. So it doesn't make
+ # sense to run this linter elsewhere.
+ assert primary_config.startswith("builds/")
+
+ key = (platform, primary_config, shippable, options)
+
+ if key in SEEN_CONFIGS:
+ raise Exception(
+ "Non-unique Perfherder data collection for jobs %s-%s and %s: "
+ "set PERFHERDER_EXTRA_OPTIONS in worker environment variables "
+ "or use different mozconfigs"
+ % (config.kind, job["name"], SEEN_CONFIGS[key])
+ )
+
+ SEEN_CONFIGS[key] = "{}-{}".format(config.kind, job["name"])
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/build_signing.py b/taskcluster/gecko_taskgraph/transforms/build_signing.py
new file mode 100644
index 0000000000..bea7ac9ff8
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/build_signing.py
@@ -0,0 +1,74 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the signing task into an actual task description.
+"""
+
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+from gecko_taskgraph.util.signed_artifacts import (
+ generate_specifications_of_artifacts_to_sign,
+)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def add_signed_routes(config, jobs):
+ """Add routes corresponding to the routes of the build task
+ this corresponds to, with .signed inserted, for all gecko.v2 routes"""
+
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ enable_signing_routes = job.pop("enable-signing-routes", True)
+
+ job["routes"] = []
+ if dep_job.attributes.get("shippable") and enable_signing_routes:
+ for dep_route in dep_job.task.get("routes", []):
+ if not dep_route.startswith("index.gecko.v2"):
+ continue
+ branch = dep_route.split(".")[3]
+ rest = ".".join(dep_route.split(".")[4:])
+ job["routes"].append(f"index.gecko.v2.{branch}.signed.{rest}")
+
+ yield job
+
+
+@transforms.add
+def define_upstream_artifacts(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ upstream_artifact_task = job.pop("upstream-artifact-task", dep_job)
+
+ job.setdefault("attributes", {}).update(
+ copy_attributes_from_dependent_job(dep_job)
+ )
+
+ artifacts_specifications = generate_specifications_of_artifacts_to_sign(
+ config,
+ job,
+ keep_locale_template=False,
+ kind=config.kind,
+ dep_kind=upstream_artifact_task.kind,
+ )
+
+ task_ref = f"<{upstream_artifact_task.kind}>"
+ task_type = "build"
+ if "notarization" in upstream_artifact_task.kind:
+ task_type = "scriptworker"
+
+ job["upstream-artifacts"] = [
+ {
+ "taskId": {"task-reference": task_ref},
+ "taskType": task_type,
+ "paths": spec["artifacts"],
+ "formats": spec["formats"],
+ }
+ for spec in artifacts_specifications
+ ]
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/cached_tasks.py b/taskcluster/gecko_taskgraph/transforms/cached_tasks.py
new file mode 100644
index 0000000000..bb7e6e6778
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/cached_tasks.py
@@ -0,0 +1,101 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+from collections import deque
+
+import taskgraph
+from taskgraph.transforms.base import TransformSequence
+
+from gecko_taskgraph.util.cached_tasks import add_optimization
+
+transforms = TransformSequence()
+
+
+def order_tasks(config, tasks):
+ """Iterate image tasks in an order where parent tasks come first."""
+ kind_prefix = config.kind + "-"
+
+ pending = deque(tasks)
+ task_labels = {task["label"] for task in pending}
+ emitted = set()
+ while True:
+ try:
+ task = pending.popleft()
+ except IndexError:
+ break
+ parents = {
+ task
+ for task in task.get("dependencies", {}).values()
+ if task.startswith(kind_prefix)
+ }
+ if parents and not emitted.issuperset(parents & task_labels):
+ pending.append(task)
+ continue
+ emitted.add(task["label"])
+ yield task
+
+
+def format_task_digest(cached_task):
+ return "/".join(
+ [
+ cached_task["type"],
+ cached_task["name"],
+ cached_task["digest"],
+ ]
+ )
+
+
+@transforms.add
+def cache_task(config, tasks):
+ if taskgraph.fast:
+ for task in tasks:
+ yield task
+ return
+
+ digests = {}
+ for task in config.kind_dependencies_tasks.values():
+ if (
+ "cached_task" in task.attributes
+ and task.attributes["cached_task"] is not False
+ ):
+ digests[task.label] = format_task_digest(task.attributes["cached_task"])
+
+ for task in order_tasks(config, tasks):
+ cache = task.pop("cache", None)
+ if cache is None:
+ yield task
+ continue
+
+ dependency_digests = []
+ for p in task.get("dependencies", {}).values():
+ if p in digests:
+ dependency_digests.append(digests[p])
+ elif config.params["project"] == "toolchains":
+ # The toolchains repository uses non-cached toolchain artifacts. Allow
+ # tasks to use them.
+ cache = None
+ break
+ else:
+ raise Exception(
+ "Cached task {} has uncached parent task: {}".format(
+ task["label"], p
+ )
+ )
+
+ if cache is None:
+ yield task
+ continue
+
+ digest_data = cache["digest-data"] + sorted(dependency_digests)
+ add_optimization(
+ config,
+ task,
+ cache_type=cache["type"],
+ cache_name=cache["name"],
+ digest_data=digest_data,
+ )
+ digests[task["label"]] = format_task_digest(task["attributes"]["cached_task"])
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/chunk_partners.py b/taskcluster/gecko_taskgraph/transforms/chunk_partners.py
new file mode 100644
index 0000000000..b1ebdd4f12
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/chunk_partners.py
@@ -0,0 +1,78 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Chunk the partner repack tasks by subpartner and locale
+"""
+
+
+import copy
+
+from mozbuild.chunkify import chunkify
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+
+from gecko_taskgraph.util.partners import (
+ apply_partner_priority,
+ get_repack_ids_by_platform,
+)
+
+transforms = TransformSequence()
+transforms.add(apply_partner_priority)
+
+
+@transforms.add
+def chunk_partners(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ build_platform = dep_job.attributes["build_platform"]
+ repack_id = dep_job.task.get("extra", {}).get("repack_id")
+ repack_ids = dep_job.task.get("extra", {}).get("repack_ids")
+ copy_repack_ids = job.pop("copy-repack-ids", False)
+
+ if copy_repack_ids:
+ assert repack_ids, "dep_job {} doesn't have repack_ids!".format(
+ dep_job.label
+ )
+ job.setdefault("extra", {})["repack_ids"] = repack_ids
+ yield job
+ # first downstream of the repack task, no chunking or fanout has been done yet
+ elif not any([repack_id, repack_ids]):
+ platform_repack_ids = get_repack_ids_by_platform(config, build_platform)
+ # we chunk mac signing
+ if config.kind in (
+ "release-partner-repack-signing",
+ "release-eme-free-repack-signing",
+ "release-eme-free-repack-mac-signing",
+ "release-partner-repack-mac-signing",
+ ):
+ repacks_per_chunk = job.get("repacks-per-chunk")
+ chunks, remainder = divmod(len(platform_repack_ids), repacks_per_chunk)
+ if remainder:
+ chunks = int(chunks + 1)
+ for this_chunk in range(1, chunks + 1):
+ chunk = chunkify(platform_repack_ids, this_chunk, chunks)
+ partner_job = copy.deepcopy(job)
+ partner_job.setdefault("extra", {}).setdefault("repack_ids", chunk)
+ partner_job["extra"]["repack_suffix"] = str(this_chunk)
+ yield partner_job
+ # linux and windows we fan out immediately to one task per partner-sub_partner-locale
+ else:
+ for repack_id in platform_repack_ids:
+ partner_job = copy.deepcopy(job) # don't overwrite dict values here
+ partner_job.setdefault("extra", {})
+ partner_job["extra"]["repack_id"] = repack_id
+ yield partner_job
+ # fan out chunked mac signing for repackage
+ elif repack_ids:
+ for repack_id in repack_ids:
+ partner_job = copy.deepcopy(job)
+ partner_job.setdefault("extra", {}).setdefault("repack_id", repack_id)
+ yield partner_job
+ # otherwise we've fully fanned out already, continue by passing repack_id on
+ else:
+ partner_job = copy.deepcopy(job)
+ partner_job.setdefault("extra", {}).setdefault("repack_id", repack_id)
+ yield partner_job
diff --git a/taskcluster/gecko_taskgraph/transforms/code_review.py b/taskcluster/gecko_taskgraph/transforms/code_review.py
new file mode 100644
index 0000000000..d644e17d0e
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/code_review.py
@@ -0,0 +1,33 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Add soft dependencies and configuration to code-review tasks.
+"""
+
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def add_dependencies(config, jobs):
+ for job in jobs:
+ job.setdefault("soft-dependencies", [])
+ job["soft-dependencies"] += [
+ dep_task.label
+ for dep_task in config.kind_dependencies_tasks.values()
+ if dep_task.attributes.get("code-review") is True
+ ]
+ yield job
+
+
+@transforms.add
+def add_phabricator_config(config, jobs):
+ for job in jobs:
+ diff = config.params.get("phabricator_diff")
+ if diff is not None:
+ code_review = job.setdefault("extra", {}).setdefault("code-review", {})
+ code_review["phabricator-diff"] = diff
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/condprof.py b/taskcluster/gecko_taskgraph/transforms/condprof.py
new file mode 100644
index 0000000000..516c1d8f20
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/condprof.py
@@ -0,0 +1,91 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+This transform constructs tasks generate conditioned profiles from
+the condprof/kind.yml file
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import Schema
+from voluptuous import Optional
+
+from gecko_taskgraph.transforms.job import job_description_schema
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.copy_task import copy_task
+
+diff_description_schema = Schema(
+ {
+ # default is settled, but add 'full' to get both
+ Optional("scenarios"): [str],
+ Optional("description"): task_description_schema["description"],
+ Optional("dependencies"): task_description_schema["dependencies"],
+ Optional("fetches"): job_description_schema["fetches"],
+ Optional("index"): task_description_schema["index"],
+ Optional("job-from"): str,
+ Optional("name"): str,
+ Optional("run"): job_description_schema["run"],
+ Optional("run-on-projects"): task_description_schema["run-on-projects"],
+ Optional("scopes"): task_description_schema["scopes"],
+ Optional("treeherder"): task_description_schema["treeherder"],
+ Optional("use-system-python"): bool,
+ Optional("worker"): job_description_schema["worker"],
+ Optional("worker-type"): task_description_schema["worker-type"],
+ }
+)
+
+transforms = TransformSequence()
+transforms.add_validate(diff_description_schema)
+
+
+@transforms.add
+def generate_scenarios(config, tasks):
+ for task in tasks:
+ cmds = task["run"]["command"]
+ symbol = task["treeherder"]["symbol"].split(")")[0]
+ index = task["index"]
+ jobname = index["job-name"]
+ label = task["name"]
+ run_as_root = task["run"].get("run-as-root", False)
+
+ for scenario in set(task["scenarios"]):
+ extra_args = ""
+ if scenario == "settled":
+ extra_args = " --force-new "
+
+ tcmd = cmds.replace("${EXTRA_ARGS}", extra_args)
+ tcmd = tcmd.replace("${SCENARIO}", scenario)
+
+ index["job-name"] = "%s-%s" % (jobname, scenario)
+
+ taskdesc = {
+ "name": "%s-%s" % (label, scenario),
+ "description": task["description"],
+ "treeherder": {
+ "symbol": "%s-%s)" % (symbol, scenario),
+ "platform": task["treeherder"]["platform"],
+ "kind": task["treeherder"]["kind"],
+ "tier": task["treeherder"]["tier"],
+ },
+ "worker-type": copy_task(task["worker-type"]),
+ "worker": copy_task(task["worker"]),
+ "index": copy_task(index),
+ "run": {
+ "using": "run-task",
+ "cwd": task["run"]["cwd"],
+ "checkout": task["run"]["checkout"],
+ "tooltool-downloads": copy_task(task["run"]["tooltool-downloads"]),
+ "command": tcmd,
+ "run-as-root": run_as_root,
+ },
+ "run-on-projects": copy_task(task["run-on-projects"]),
+ "scopes": copy_task(task["scopes"]),
+ "dependencies": copy_task(task["dependencies"]),
+ "fetches": copy_task(task["fetches"]),
+ }
+
+ use_system_python = task.get("use-system-python", None)
+ if use_system_python is not None:
+ taskdesc["use-system-python"] = use_system_python
+
+ yield taskdesc
diff --git a/taskcluster/gecko_taskgraph/transforms/cross_channel.py b/taskcluster/gecko_taskgraph/transforms/cross_channel.py
new file mode 100644
index 0000000000..d491ffae4d
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/cross_channel.py
@@ -0,0 +1,44 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Build a command to run `mach l10n-cross-channel`.
+"""
+
+
+from shlex import quote as shell_quote
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def resolve_keys(config, jobs):
+ for job in jobs:
+ for item in ["ssh-key-secret", "run.actions"]:
+ resolve_keyed_by(job, item, item, **{"level": str(config.params["level"])})
+ yield job
+
+
+@transforms.add
+def build_command(config, jobs):
+ for job in jobs:
+ command = [
+ "l10n-cross-channel",
+ "-o",
+ "/builds/worker/artifacts/outgoing.diff",
+ "--attempts",
+ "5",
+ ]
+ ssh_key_secret = job.pop("ssh-key-secret")
+ if ssh_key_secret:
+ command.extend(["--ssh-secret", ssh_key_secret])
+ job.setdefault("scopes", []).append(f"secrets:get:{ssh_key_secret}")
+
+ command.extend(job["run"].pop("actions", []))
+ job.setdefault("run", {}).update(
+ {"using": "mach", "mach": " ".join(map(shell_quote, command))}
+ )
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/diffoscope.py b/taskcluster/gecko_taskgraph/transforms/diffoscope.py
new file mode 100644
index 0000000000..b74dc5bb8f
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/diffoscope.py
@@ -0,0 +1,172 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+This transform construct tasks to perform diffs between builds, as
+defined in kind.yml
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import Schema
+from taskgraph.util.taskcluster import get_artifact_path
+from voluptuous import Any, Optional, Required
+
+from gecko_taskgraph.transforms.task import task_description_schema
+
+index_or_string = Any(
+ str,
+ {Required("index-search"): str},
+)
+
+diff_description_schema = Schema(
+ {
+ # Name of the diff task.
+ Required("name"): str,
+ # Treeherder tier.
+ Required("tier"): int,
+ # Treeherder symbol.
+ Required("symbol"): str,
+ # relative path (from config.path) to the file the task was defined in.
+ Optional("job-from"): str,
+ # Original and new builds to compare.
+ Required("original"): index_or_string,
+ Required("new"): index_or_string,
+ # Arguments to pass to diffoscope, used for job-defaults in
+ # taskcluster/ci/diffoscope/kind.yml
+ Optional("args"): str,
+ # Extra arguments to pass to diffoscope, that can be set per job.
+ Optional("extra-args"): str,
+ # Fail the task when differences are detected.
+ Optional("fail-on-diff"): bool,
+ # What artifact to check the differences of. Defaults to target.tar.bz2
+ # for Linux, target.dmg for Mac, target.zip for Windows, target.apk for
+ # Android.
+ Optional("artifact"): str,
+ # Whether to unpack first. Diffoscope can normally work without unpacking,
+ # but when one needs to --exclude some contents, that doesn't work out well
+ # if said content is packed (e.g. in omni.ja).
+ Optional("unpack"): bool,
+ # Commands to run before performing the diff.
+ Optional("pre-diff-commands"): [str],
+ # Only run the task on a set of projects/branches.
+ Optional("run-on-projects"): task_description_schema["run-on-projects"],
+ Optional("optimization"): task_description_schema["optimization"],
+ }
+)
+
+transforms = TransformSequence()
+transforms.add_validate(diff_description_schema)
+
+
+@transforms.add
+def fill_template(config, tasks):
+ dummy_tasks = {}
+
+ for task in tasks:
+ name = task["name"]
+
+ deps = {}
+ urls = {}
+ previous_artifact = None
+ artifact = task.get("artifact")
+ for k in ("original", "new"):
+ value = task[k]
+ if isinstance(value, str):
+ deps[k] = value
+ dep_name = k
+ os_hint = value
+ else:
+ index = value["index-search"]
+ if index not in dummy_tasks:
+ dummy_tasks[index] = {
+ "label": "index-search-" + index,
+ "description": index,
+ "worker-type": "invalid/always-optimized",
+ "run": {
+ "using": "always-optimized",
+ },
+ "optimization": {
+ "index-search": [index],
+ },
+ }
+ yield dummy_tasks[index]
+ deps[index] = "index-search-" + index
+ dep_name = index
+ os_hint = index.split(".")[-1]
+ if artifact:
+ pass
+ elif "linux" in os_hint:
+ artifact = "target.tar.bz2"
+ elif "macosx" in os_hint:
+ artifact = "target.dmg"
+ elif "android" in os_hint:
+ artifact = "target.apk"
+ elif "win" in os_hint:
+ artifact = "target.zip"
+ else:
+ raise Exception(f"Cannot figure out the OS for {value!r}")
+ if previous_artifact is not None and previous_artifact != artifact:
+ raise Exception("Cannot compare builds from different OSes")
+ urls[k] = {
+ "artifact-reference": "<{}/{}>".format(
+ dep_name, get_artifact_path(task, artifact)
+ ),
+ }
+ previous_artifact = artifact
+
+ taskdesc = {
+ "label": "diff-" + name,
+ "description": name,
+ "treeherder": {
+ "symbol": task["symbol"],
+ "platform": "diff/opt",
+ "kind": "other",
+ "tier": task["tier"],
+ },
+ "worker-type": "b-linux-gcp",
+ "worker": {
+ "docker-image": {"in-tree": "diffoscope"},
+ "artifacts": [
+ {
+ "type": "file",
+ "path": f"/builds/worker/{f}",
+ "name": f"public/{f}",
+ }
+ for f in (
+ "diff.html",
+ "diff.txt",
+ )
+ ],
+ "env": {
+ "ORIG_URL": urls["original"],
+ "NEW_URL": urls["new"],
+ "DIFFOSCOPE_ARGS": " ".join(
+ task[k] for k in ("args", "extra-args") if k in task
+ ),
+ "PRE_DIFF": "; ".join(task.get("pre-diff-commands", [])),
+ },
+ "max-run-time": 1800,
+ },
+ "run": {
+ "using": "run-task",
+ "checkout": task.get("unpack", False),
+ "command": "/builds/worker/bin/get_and_diffoscope{}{}".format(
+ " --unpack" if task.get("unpack") else "",
+ " --fail" if task.get("fail-on-diff") else "",
+ ),
+ },
+ "dependencies": deps,
+ "optimization": task.get("optimization"),
+ }
+ if "run-on-projects" in task:
+ taskdesc["run-on-projects"] = task["run-on-projects"]
+
+ if artifact.endswith(".dmg"):
+ taskdesc.setdefault("fetches", {}).setdefault("toolchain", []).extend(
+ [
+ "linux64-cctools-port",
+ "linux64-libdmg",
+ ]
+ )
+
+ yield taskdesc
diff --git a/taskcluster/gecko_taskgraph/transforms/docker_image.py b/taskcluster/gecko_taskgraph/transforms/docker_image.py
new file mode 100644
index 0000000000..635573aa78
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/docker_image.py
@@ -0,0 +1,210 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import json
+import logging
+import os
+import re
+
+import mozpack.path as mozpath
+import taskgraph
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import Schema
+from voluptuous import Optional, Required
+
+from gecko_taskgraph.util.docker import (
+ create_context_tar,
+ generate_context_hash,
+ image_path,
+)
+
+from .. import GECKO
+from .task import task_description_schema
+
+logger = logging.getLogger(__name__)
+
+CONTEXTS_DIR = "docker-contexts"
+
+DIGEST_RE = re.compile("^[0-9a-f]{64}$")
+
+IMAGE_BUILDER_IMAGE = (
+ "mozillareleases/image_builder:5.0.0"
+ "@sha256:"
+ "e510a9a9b80385f71c112d61b2f2053da625aff2b6d430411ac42e424c58953f"
+)
+
+transforms = TransformSequence()
+
+docker_image_schema = Schema(
+ {
+ # Name of the docker image.
+ Required("name"): str,
+ # Name of the parent docker image.
+ Optional("parent"): str,
+ # Treeherder symbol.
+ Required("symbol"): str,
+ # relative path (from config.path) to the file the docker image was defined
+ # in.
+ Optional("job-from"): str,
+ # Arguments to use for the Dockerfile.
+ Optional("args"): {str: str},
+ # Name of the docker image definition under taskcluster/docker, when
+ # different from the docker image name.
+ Optional("definition"): str,
+ # List of package tasks this docker image depends on.
+ Optional("packages"): [str],
+ Optional(
+ "index",
+ description="information for indexing this build so its artifacts can be discovered",
+ ): task_description_schema["index"],
+ Optional(
+ "cache",
+ description="Whether this image should be cached based on inputs.",
+ ): bool,
+ }
+)
+
+
+transforms.add_validate(docker_image_schema)
+
+
+@transforms.add
+def fill_template(config, tasks):
+ if not taskgraph.fast and config.write_artifacts:
+ if not os.path.isdir(CONTEXTS_DIR):
+ os.makedirs(CONTEXTS_DIR)
+
+ for task in tasks:
+ image_name = task.pop("name")
+ job_symbol = task.pop("symbol")
+ args = task.pop("args", {})
+ packages = task.pop("packages", [])
+ parent = task.pop("parent", None)
+
+ for p in packages:
+ if f"packages-{p}" not in config.kind_dependencies_tasks:
+ raise Exception(
+ "Missing package job for {}-{}: {}".format(
+ config.kind, image_name, p
+ )
+ )
+
+ if not taskgraph.fast:
+ context_path = mozpath.relpath(image_path(image_name), GECKO)
+ if config.write_artifacts:
+ context_file = os.path.join(CONTEXTS_DIR, f"{image_name}.tar.gz")
+ logger.info(f"Writing {context_file} for docker image {image_name}")
+ context_hash = create_context_tar(
+ GECKO, context_path, context_file, image_name, args
+ )
+ else:
+ context_hash = generate_context_hash(
+ GECKO, context_path, image_name, args
+ )
+ else:
+ if config.write_artifacts:
+ raise Exception("Can't write artifacts if `taskgraph.fast` is set.")
+ context_hash = "0" * 40
+ digest_data = [context_hash]
+ digest_data += [json.dumps(args, sort_keys=True)]
+
+ description = "Build the docker image {} for use by dependent tasks".format(
+ image_name
+ )
+
+ args["DOCKER_IMAGE_PACKAGES"] = " ".join(f"<{p}>" for p in packages)
+
+ # Adjust the zstandard compression level based on the execution level.
+ # We use faster compression for level 1 because we care more about
+ # end-to-end times. We use slower/better compression for other levels
+ # because images are read more often and it is worth the trade-off to
+ # burn more CPU once to reduce image size.
+ zstd_level = "3" if int(config.params["level"]) == 1 else "10"
+
+ # include some information that is useful in reconstructing this task
+ # from JSON
+ taskdesc = {
+ "label": f"{config.kind}-{image_name}",
+ "description": description,
+ "attributes": {
+ "image_name": image_name,
+ "artifact_prefix": "public",
+ },
+ "always-target": True,
+ "expiration-policy": "long",
+ "scopes": [],
+ "treeherder": {
+ "symbol": job_symbol,
+ "platform": "taskcluster-images/opt",
+ "kind": "other",
+ "tier": 1,
+ },
+ "run-on-projects": [],
+ "worker-type": "images-gcp",
+ "worker": {
+ "implementation": "docker-worker",
+ "os": "linux",
+ "artifacts": [
+ {
+ "type": "file",
+ "path": "/workspace/image.tar.zst",
+ "name": "public/image.tar.zst",
+ }
+ ],
+ "env": {
+ "CONTEXT_TASK_ID": {"task-reference": "<decision>"},
+ "CONTEXT_PATH": "public/docker-contexts/{}.tar.gz".format(
+ image_name
+ ),
+ "HASH": context_hash,
+ "PROJECT": config.params["project"],
+ "IMAGE_NAME": image_name,
+ "DOCKER_IMAGE_ZSTD_LEVEL": zstd_level,
+ "DOCKER_BUILD_ARGS": {"task-reference": json.dumps(args)},
+ "GECKO_BASE_REPOSITORY": config.params["base_repository"],
+ "GECKO_HEAD_REPOSITORY": config.params["head_repository"],
+ "GECKO_HEAD_REV": config.params["head_rev"],
+ },
+ "chain-of-trust": True,
+ "max-run-time": 7200,
+ # FIXME: We aren't currently propagating the exit code
+ },
+ }
+ # Retry for 'funsize-update-generator' if exit status code is -1
+ if image_name in ["funsize-update-generator"]:
+ taskdesc["worker"]["retry-exit-status"] = [-1]
+
+ worker = taskdesc["worker"]
+
+ if image_name == "image_builder":
+ worker["docker-image"] = IMAGE_BUILDER_IMAGE
+ digest_data.append(f"image-builder-image:{IMAGE_BUILDER_IMAGE}")
+ else:
+ worker["docker-image"] = {"in-tree": "image_builder"}
+ deps = taskdesc.setdefault("dependencies", {})
+ deps["docker-image"] = f"{config.kind}-image_builder"
+
+ if packages:
+ deps = taskdesc.setdefault("dependencies", {})
+ for p in sorted(packages):
+ deps[p] = f"packages-{p}"
+
+ if parent:
+ deps = taskdesc.setdefault("dependencies", {})
+ deps["parent"] = f"{config.kind}-{parent}"
+ worker["env"]["PARENT_TASK_ID"] = {
+ "task-reference": "<parent>",
+ }
+ if "index" in task:
+ taskdesc["index"] = task["index"]
+
+ if task.get("cache", True) and not taskgraph.fast:
+ taskdesc["cache"] = {
+ "type": "docker-images.v2",
+ "name": image_name,
+ "digest-data": digest_data,
+ }
+
+ yield taskdesc
diff --git a/taskcluster/gecko_taskgraph/transforms/fetch.py b/taskcluster/gecko_taskgraph/transforms/fetch.py
new file mode 100644
index 0000000000..90a02a8043
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/fetch.py
@@ -0,0 +1,388 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# Support for running tasks that download remote content and re-export
+# it as task artifacts.
+
+
+import os
+import re
+
+import attr
+import taskgraph
+from mozbuild.shellutil import quote as shell_quote
+from mozpack import path as mozpath
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import Schema, validate_schema
+from taskgraph.util.treeherder import join_symbol
+from voluptuous import Any, Extra, Optional, Required
+
+import gecko_taskgraph
+
+from ..util.cached_tasks import add_optimization
+
+CACHE_TYPE = "content.v1"
+
+FETCH_SCHEMA = Schema(
+ {
+ # Name of the task.
+ Required("name"): str,
+ # Relative path (from config.path) to the file the task was defined
+ # in.
+ Optional("job-from"): str,
+ # Description of the task.
+ Required("description"): str,
+ Optional(
+ "fetch-alias",
+ description="An alias that can be used instead of the real fetch job name in "
+ "fetch stanzas for jobs.",
+ ): str,
+ Optional(
+ "artifact-prefix",
+ description="The prefix of the taskcluster artifact being uploaded. "
+ "Defaults to `public/`; if it starts with something other than "
+ "`public/` the artifact will require scopes to access.",
+ ): str,
+ Optional("attributes"): {str: object},
+ Required("fetch"): {
+ Required("type"): str,
+ Extra: object,
+ },
+ }
+)
+
+
+# define a collection of payload builders, depending on the worker implementation
+fetch_builders = {}
+
+
+@attr.s(frozen=True)
+class FetchBuilder:
+ schema = attr.ib(type=Schema)
+ builder = attr.ib()
+
+
+def fetch_builder(name, schema):
+ schema = Schema({Required("type"): name}).extend(schema)
+
+ def wrap(func):
+ fetch_builders[name] = FetchBuilder(schema, func)
+ return func
+
+ return wrap
+
+
+transforms = TransformSequence()
+transforms.add_validate(FETCH_SCHEMA)
+
+
+@transforms.add
+def process_fetch_job(config, jobs):
+ # Converts fetch-url entries to the job schema.
+ for job in jobs:
+ typ = job["fetch"]["type"]
+ name = job["name"]
+ fetch = job.pop("fetch")
+
+ if typ not in fetch_builders:
+ raise Exception(f"Unknown fetch type {typ} in fetch {name}")
+ validate_schema(fetch_builders[typ].schema, fetch, f"In task.fetch {name!r}:")
+
+ job.update(configure_fetch(config, typ, name, fetch))
+
+ yield job
+
+
+def configure_fetch(config, typ, name, fetch):
+ if typ not in fetch_builders:
+ raise Exception(f"No fetch type {typ} in fetch {name}")
+ validate_schema(fetch_builders[typ].schema, fetch, f"In task.fetch {name!r}:")
+
+ return fetch_builders[typ].builder(config, name, fetch)
+
+
+@transforms.add
+def make_task(config, jobs):
+ # Fetch tasks are idempotent and immutable. Have them live for
+ # essentially forever.
+ if config.params["level"] == "3":
+ expires = "1000 years"
+ else:
+ expires = "28 days"
+
+ for job in jobs:
+ name = job["name"]
+ artifact_prefix = job.get("artifact-prefix", "public")
+ env = job.get("env", {})
+ env.update({"UPLOAD_DIR": "/builds/worker/artifacts"})
+ attributes = job.get("attributes", {})
+ attributes["artifact_prefix"] = artifact_prefix
+ attributes["fetch-artifact"] = mozpath.join(
+ artifact_prefix, job["artifact_name"]
+ )
+ alias = job.get("fetch-alias")
+ if alias:
+ attributes["fetch-alias"] = alias
+
+ task_expires = "28 days" if attributes.get("cached_task") is False else expires
+ artifact_expires = (
+ "2 days" if attributes.get("cached_task") is False else expires
+ )
+
+ task = {
+ "attributes": attributes,
+ "name": name,
+ "description": job["description"],
+ "expires-after": task_expires,
+ "label": "fetch-%s" % name,
+ "run-on-projects": [],
+ "treeherder": {
+ "symbol": join_symbol("Fetch", name),
+ "kind": "build",
+ "platform": "fetch/opt",
+ "tier": 1,
+ },
+ "run": {
+ "using": "run-task",
+ "checkout": False,
+ "command": job["command"],
+ },
+ "worker-type": "b-linux-gcp",
+ "worker": {
+ "chain-of-trust": True,
+ "docker-image": {"in-tree": "fetch"},
+ "env": env,
+ "max-run-time": 900,
+ "artifacts": [
+ {
+ "type": "directory",
+ "name": artifact_prefix,
+ "path": "/builds/worker/artifacts",
+ "expires-after": artifact_expires,
+ }
+ ],
+ },
+ }
+
+ if job.get("secret", None):
+ task["scopes"] = ["secrets:get:" + job.get("secret")]
+ task["worker"]["taskcluster-proxy"] = True
+
+ if not taskgraph.fast:
+ cache_name = task["label"].replace(f"{config.kind}-", "", 1)
+
+ # This adds the level to the index path automatically.
+ add_optimization(
+ config,
+ task,
+ cache_type=CACHE_TYPE,
+ cache_name=cache_name,
+ digest_data=job["digest_data"],
+ )
+ yield task
+
+
+@fetch_builder(
+ "static-url",
+ schema={
+ # The URL to download.
+ Required("url"): str,
+ # The SHA-256 of the downloaded content.
+ Required("sha256"): str,
+ # Size of the downloaded entity, in bytes.
+ Required("size"): int,
+ # GPG signature verification.
+ Optional("gpg-signature"): {
+ # URL where GPG signature document can be obtained. Can contain the
+ # value ``{url}``, which will be substituted with the value from
+ # ``url``.
+ Required("sig-url"): str,
+ # Path to file containing GPG public key(s) used to validate
+ # download.
+ Required("key-path"): str,
+ },
+ # The name to give to the generated artifact. Defaults to the file
+ # portion of the URL. Using a different extension converts the
+ # archive to the given type. Only conversion to .tar.zst is
+ # supported.
+ Optional("artifact-name"): str,
+ # Strip the given number of path components at the beginning of
+ # each file entry in the archive.
+ # Requires an artifact-name ending with .tar.zst.
+ Optional("strip-components"): int,
+ # Add the given prefix to each file entry in the archive.
+ # Requires an artifact-name ending with .tar.zst.
+ Optional("add-prefix"): str,
+ # IMPORTANT: when adding anything that changes the behavior of the task,
+ # it is important to update the digest data used to compute cache hits.
+ },
+)
+def create_fetch_url_task(config, name, fetch):
+ artifact_name = fetch.get("artifact-name")
+ if not artifact_name:
+ artifact_name = fetch["url"].split("/")[-1]
+
+ command = [
+ "/builds/worker/bin/fetch-content",
+ "static-url",
+ ]
+
+ # Arguments that matter to the cache digest
+ args = [
+ "--sha256",
+ fetch["sha256"],
+ "--size",
+ "%d" % fetch["size"],
+ ]
+
+ if fetch.get("strip-components"):
+ args.extend(["--strip-components", "%d" % fetch["strip-components"]])
+
+ if fetch.get("add-prefix"):
+ args.extend(["--add-prefix", fetch["add-prefix"]])
+
+ command.extend(args)
+
+ env = {}
+
+ if "gpg-signature" in fetch:
+ sig_url = fetch["gpg-signature"]["sig-url"].format(url=fetch["url"])
+ key_path = os.path.join(
+ gecko_taskgraph.GECKO, fetch["gpg-signature"]["key-path"]
+ )
+
+ with open(key_path, "r") as fh:
+ gpg_key = fh.read()
+
+ env["FETCH_GPG_KEY"] = gpg_key
+ command.extend(
+ [
+ "--gpg-sig-url",
+ sig_url,
+ "--gpg-key-env",
+ "FETCH_GPG_KEY",
+ ]
+ )
+
+ command.extend(
+ [
+ fetch["url"],
+ "/builds/worker/artifacts/%s" % artifact_name,
+ ]
+ )
+
+ return {
+ "command": command,
+ "artifact_name": artifact_name,
+ "env": env,
+ # We don't include the GPG signature in the digest because it isn't
+ # materially important for caching: GPG signatures are supplemental
+ # trust checking beyond what the shasum already provides.
+ "digest_data": args + [artifact_name],
+ }
+
+
+@fetch_builder(
+ "git",
+ schema={
+ Required("repo"): str,
+ Required(Any("revision", "branch")): str,
+ Optional("include-dot-git"): bool,
+ Optional("artifact-name"): str,
+ Optional("path-prefix"): str,
+ # ssh-key is a taskcluster secret path (e.g. project/civet/github-deploy-key)
+ # In the secret dictionary, the key should be specified as
+ # "ssh_privkey": "-----BEGIN OPENSSH PRIVATE KEY-----\nkfksnb3jc..."
+ # n.b. The OpenSSH private key file format requires a newline at the end of the file.
+ Optional("ssh-key"): str,
+ },
+)
+def create_git_fetch_task(config, name, fetch):
+ path_prefix = fetch.get("path-prefix")
+ if not path_prefix:
+ path_prefix = fetch["repo"].rstrip("/").rsplit("/", 1)[-1]
+ artifact_name = fetch.get("artifact-name")
+ if not artifact_name:
+ artifact_name = f"{path_prefix}.tar.zst"
+
+ if "revision" in fetch and "branch" in fetch:
+ raise Exception("revision and branch cannot be used in the same context")
+
+ revision_or_branch = None
+
+ if "revision" in fetch:
+ revision_or_branch = fetch["revision"]
+ if not re.match(r"[0-9a-fA-F]{40}", fetch["revision"]):
+ raise Exception(f'Revision is not a sha1 in fetch task "{name}"')
+ else:
+ # we are sure we are dealing with a branch
+ revision_or_branch = fetch["branch"]
+
+ args = [
+ "/builds/worker/bin/fetch-content",
+ "git-checkout-archive",
+ "--path-prefix",
+ path_prefix,
+ fetch["repo"],
+ revision_or_branch,
+ "/builds/worker/artifacts/%s" % artifact_name,
+ ]
+
+ ssh_key = fetch.get("ssh-key")
+ if ssh_key:
+ args.append("--ssh-key-secret")
+ args.append(ssh_key)
+
+ digest_data = [revision_or_branch, path_prefix, artifact_name]
+ if fetch.get("include-dot-git", False):
+ args.append("--include-dot-git")
+ digest_data.append(".git")
+
+ return {
+ "command": args,
+ "artifact_name": artifact_name,
+ "digest_data": digest_data,
+ "secret": ssh_key,
+ }
+
+
+@fetch_builder(
+ "chromium-fetch",
+ schema={
+ Required("script"): str,
+ # Platform type for chromium build
+ Required("platform"): str,
+ # Chromium revision to obtain
+ Optional("revision"): str,
+ # The name to give to the generated artifact.
+ Required("artifact-name"): str,
+ },
+)
+def create_chromium_fetch_task(config, name, fetch):
+ artifact_name = fetch.get("artifact-name")
+
+ workdir = "/builds/worker"
+
+ platform = fetch.get("platform")
+ revision = fetch.get("revision")
+
+ args = "--platform " + shell_quote(platform)
+ if revision:
+ args += " --revision " + shell_quote(revision)
+
+ cmd = [
+ "bash",
+ "-c",
+ "cd {} && " "/usr/bin/python3 {} {}".format(workdir, fetch["script"], args),
+ ]
+
+ return {
+ "command": cmd,
+ "artifact_name": artifact_name,
+ "digest_data": [
+ f"revision={revision}",
+ f"platform={platform}",
+ f"artifact_name={artifact_name}",
+ ],
+ }
diff --git a/taskcluster/gecko_taskgraph/transforms/final_verify.py b/taskcluster/gecko_taskgraph/transforms/final_verify.py
new file mode 100644
index 0000000000..aa8be35a0d
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/final_verify.py
@@ -0,0 +1,35 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the beetmover task into an actual task description.
+"""
+
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def add_command(config, tasks):
+ for task in tasks:
+ if not task["worker"].get("env"):
+ task["worker"]["env"] = {}
+
+ final_verify_configs = []
+ for upstream in sorted(task.get("dependencies", {}).keys()):
+ if "update-verify-config" in upstream:
+ final_verify_configs.append(
+ f"<{upstream}/public/build/update-verify.cfg>",
+ )
+ task["run"] = {
+ "using": "run-task",
+ "cwd": "{checkout}",
+ "command": {
+ "artifact-reference": "tools/update-verify/release/final-verification.sh "
+ + " ".join(final_verify_configs),
+ },
+ "sparse-profile": "update-verify",
+ }
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/fxrecord.py b/taskcluster/gecko_taskgraph/transforms/fxrecord.py
new file mode 100644
index 0000000000..6ae569969e
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/fxrecord.py
@@ -0,0 +1,30 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def fxrecord(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ job["dependencies"] = {dep_job.label: dep_job.label}
+ job["treeherder"]["platform"] = dep_job.task["extra"]["treeherder-platform"]
+ job["worker"].setdefault("env", {})["FXRECORD_TASK_ID"] = {
+ "task-reference": f"<{dep_job.label}>"
+ }
+
+ # copy shipping_product from upstream
+ product = dep_job.attributes.get(
+ "shipping_product", dep_job.task.get("shipping-product")
+ )
+ if product:
+ job.setdefault("shipping-product", product)
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/geckodriver_mac_notarization.py b/taskcluster/gecko_taskgraph/transforms/geckodriver_mac_notarization.py
new file mode 100644
index 0000000000..2f0d8dd2aa
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/geckodriver_mac_notarization.py
@@ -0,0 +1,83 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the repackage signing task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+from taskgraph.util.schema import Schema
+from voluptuous import Optional
+
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+from gecko_taskgraph.util.scriptworker import add_scope_prefix
+
+repackage_signing_description_schema = Schema(
+ {
+ Optional("label"): str,
+ Optional("treeherder"): task_description_schema["treeherder"],
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ Optional("worker"): task_description_schema["worker"],
+ Optional("worker-type"): task_description_schema["worker-type"],
+ Optional("job-from"): task_description_schema["job-from"],
+ Optional("attributes"): task_description_schema["attributes"],
+ Optional("dependencies"): task_description_schema["dependencies"],
+ }
+)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def remove_name(config, jobs):
+ for job in jobs:
+ if "name" in job:
+ del job["name"]
+ yield job
+
+
+transforms.add_validate(repackage_signing_description_schema)
+
+
+@transforms.add
+def geckodriver_mac_notarization(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+ treeherder = job.get("treeherder", {})
+ dep_treeherder = dep_job.task.get("extra", {}).get("treeherder", {})
+ treeherder.setdefault(
+ "platform", dep_job.task.get("extra", {}).get("treeherder-platform")
+ )
+ treeherder.setdefault("tier", dep_treeherder.get("tier", 1))
+ treeherder.setdefault("kind", "build")
+
+ dependencies = {dep_job.kind: dep_job.label}
+
+ description = "Mac notarization - Geckodriver for build '{}'".format(
+ attributes.get("build_platform"),
+ )
+
+ build_platform = dep_job.attributes.get("build_platform")
+
+ scopes = [add_scope_prefix(config, "signing:cert:release-apple-notarization")]
+
+ platform = build_platform.rsplit("-", 1)[0]
+
+ task = {
+ "label": job["label"],
+ "description": description,
+ "worker-type": job["worker-type"],
+ "worker": job["worker"],
+ "scopes": scopes,
+ "dependencies": dependencies,
+ "attributes": attributes,
+ "treeherder": treeherder,
+ "run-on-projects": ["mozilla-central"],
+ "index": {"product": "geckodriver", "job-name": f"{platform}-notarized"},
+ }
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/geckodriver_signing.py b/taskcluster/gecko_taskgraph/transforms/geckodriver_signing.py
new file mode 100644
index 0000000000..95b8d3dd54
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/geckodriver_signing.py
@@ -0,0 +1,139 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the repackage signing task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+from taskgraph.util.schema import Schema
+from voluptuous import Optional
+
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+from gecko_taskgraph.util.scriptworker import get_signing_cert_scope_per_platform
+
+repackage_signing_description_schema = Schema(
+ {
+ Optional("label"): str,
+ Optional("attributes"): task_description_schema["attributes"],
+ Optional("dependencies"): task_description_schema["dependencies"],
+ Optional("treeherder"): task_description_schema["treeherder"],
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ Optional("job-from"): task_description_schema["job-from"],
+ }
+)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def remove_name(config, jobs):
+ for job in jobs:
+ if "name" in job:
+ del job["name"]
+ yield job
+
+
+transforms.add_validate(repackage_signing_description_schema)
+
+
+@transforms.add
+def make_signing_description(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+ attributes["repackage_type"] = "repackage-signing"
+
+ treeherder = job.get("treeherder", {})
+ dep_treeherder = dep_job.task.get("extra", {}).get("treeherder", {})
+ treeherder.setdefault(
+ "symbol", "{}(gd-s)".format(dep_treeherder["groupSymbol"])
+ )
+ treeherder.setdefault(
+ "platform", dep_job.task.get("extra", {}).get("treeherder-platform")
+ )
+ treeherder.setdefault("tier", dep_treeherder.get("tier", 1))
+ treeherder.setdefault("kind", "build")
+
+ dependencies = {dep_job.kind: dep_job.label}
+ signing_dependencies = dep_job.dependencies
+ dependencies.update(
+ {k: v for k, v in signing_dependencies.items() if k != "docker-image"}
+ )
+
+ description = "Signing Geckodriver for build '{}'".format(
+ attributes.get("build_platform"),
+ )
+
+ build_platform = dep_job.attributes.get("build_platform")
+ is_shippable = dep_job.attributes.get("shippable")
+ signing_cert_scope = get_signing_cert_scope_per_platform(
+ build_platform, is_shippable, config
+ )
+
+ upstream_artifacts = _craft_upstream_artifacts(
+ dep_job, dep_job.kind, build_platform
+ )
+
+ scopes = [signing_cert_scope]
+
+ platform = build_platform.rsplit("-", 1)[0]
+
+ task = {
+ "label": job["label"],
+ "description": description,
+ "worker-type": "linux-signing",
+ "worker": {
+ "implementation": "scriptworker-signing",
+ "upstream-artifacts": upstream_artifacts,
+ },
+ "scopes": scopes,
+ "dependencies": dependencies,
+ "attributes": attributes,
+ "treeherder": treeherder,
+ "run-on-projects": ["mozilla-central"],
+ "index": {"product": "geckodriver", "job-name": platform},
+ }
+
+ if build_platform.startswith("macosx"):
+ worker_type = task["worker-type"]
+ worker_type_alias_map = {
+ "linux-depsigning": "mac-depsigning",
+ "linux-signing": "mac-signing",
+ }
+
+ assert worker_type in worker_type_alias_map, (
+ "Make sure to adjust the below worker_type_alias logic for "
+ "mac if you change the signing workerType aliases!"
+ " ({} not found in mapping)".format(worker_type)
+ )
+ worker_type = worker_type_alias_map[worker_type]
+
+ task["worker-type"] = worker_type_alias_map[task["worker-type"]]
+ task["worker"]["mac-behavior"] = "mac_geckodriver"
+
+ yield task
+
+
+def _craft_upstream_artifacts(dep_job, dependency_kind, build_platform):
+ if build_platform.startswith("win"):
+ signing_format = "autograph_authenticode_sha2"
+ elif build_platform.startswith("linux"):
+ signing_format = "autograph_gpg"
+ elif build_platform.startswith("macosx"):
+ signing_format = "mac_geckodriver"
+ else:
+ raise ValueError(f'Unsupported build platform "{build_platform}"')
+
+ return [
+ {
+ "taskId": {"task-reference": f"<{dependency_kind}>"},
+ "taskType": "build",
+ "paths": [dep_job.attributes["toolchain-artifact"]],
+ "formats": [signing_format],
+ }
+ ]
diff --git a/taskcluster/gecko_taskgraph/transforms/github_sync.py b/taskcluster/gecko_taskgraph/transforms/github_sync.py
new file mode 100644
index 0000000000..6f48f794ce
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/github_sync.py
@@ -0,0 +1,23 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def sync_github(config, tasks):
+ """Do transforms specific to github-sync tasks."""
+ for task in tasks:
+ # Add the secret to the scopes, only in m-c.
+ # Doing this on any other tree will result in decision task failure
+ # because m-c is the only one allowed to have that scope.
+ secret = task["secret"]
+ if config.params["project"] == "mozilla-central":
+ task.setdefault("scopes", [])
+ task["scopes"].append("secrets:get:" + secret)
+ task["worker"].setdefault("env", {})["GITHUB_SECRET"] = secret
+ del task["secret"]
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/hardened_signing.py b/taskcluster/gecko_taskgraph/transforms/hardened_signing.py
new file mode 100644
index 0000000000..edc8aaf68e
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/hardened_signing.py
@@ -0,0 +1,111 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the signing task into an actual task description.
+"""
+
+import copy
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+from taskgraph.util.keyed_by import evaluate_keyed_by
+
+from gecko_taskgraph.util.attributes import release_level
+
+transforms = TransformSequence()
+
+PROVISIONING_PROFILE_FILENAMES = {
+ "firefox": "orgmozillafirefox.provisionprofile",
+ "devedition": "orgmozillafirefoxdeveloperedition.provisionprofile",
+ "nightly": "orgmozillanightly.provisionprofile",
+}
+
+
+@transforms.add
+def add_hardened_sign_config(config, jobs):
+ for job in jobs:
+ if (
+ "signing" not in config.kind
+ or "macosx" not in job["attributes"]["build_platform"]
+ ):
+ yield job
+ continue
+
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+ project_level = release_level(config.params["project"])
+ is_shippable = dep_job.attributes.get("shippable", False)
+ hardened_signing_type = "developer"
+
+ # If project is production AND shippable build, then use production entitlements
+ # Note: debug builds require developer entitlements
+ if project_level == "production" and is_shippable:
+ hardened_signing_type = "production"
+
+ # Evaluating can mutate the original config, so we must deepcopy
+ hardened_sign_config = evaluate_keyed_by(
+ copy.deepcopy(config.graph_config["mac-signing"]["hardened-sign-config"]),
+ "hardened-sign-config",
+ {"hardened-signing-type": hardened_signing_type},
+ )
+ if not isinstance(hardened_sign_config, list):
+ raise Exception("hardened-sign-config must be a list")
+
+ for sign_cfg in hardened_sign_config:
+ if isinstance(sign_cfg.get("entitlements"), dict):
+ sign_cfg["entitlements"] = evaluate_keyed_by(
+ sign_cfg["entitlements"],
+ "entitlements",
+ {
+ "build-platform": dep_job.attributes.get("build_platform"),
+ "project": config.params["project"],
+ },
+ )
+
+ if "entitlements" in sign_cfg and not sign_cfg.get(
+ "entitlements", ""
+ ).startswith("http"):
+ sign_cfg["entitlements"] = config.params.file_url(
+ sign_cfg["entitlements"]
+ )
+
+ job["worker"]["hardened-sign-config"] = hardened_sign_config
+ job["worker"]["mac-behavior"] = "mac_sign_and_pkg_hardened"
+ yield job
+
+
+@transforms.add
+def add_provisioning_profile_config(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+ if (
+ # Ensure signing task
+ "signing" in config.kind
+ # Ensure macosx platform
+ and "macosx" in job["attributes"]["build_platform"]
+ # Ensure project is considered production
+ and release_level(config.params["project"]) == "production"
+ # Ensure build is shippable
+ and dep_job.attributes.get("shippable", False)
+ ):
+ # Note that the check order here is important, as mozilla-central can build devedition
+ if "devedition" in dep_job.attributes.get("build_platform", ""):
+ # Devedition
+ filename = PROVISIONING_PROFILE_FILENAMES["devedition"]
+ elif config.params["project"] == "mozilla-central":
+ # Nightly
+ filename = PROVISIONING_PROFILE_FILENAMES["nightly"]
+ else:
+ # Release, beta, esr and variants should all use default firefox app id
+ # For full list of projects, see RELEASE_PROJECTS in taskcluster/gecko_taskgraph/util/attributes.py
+ filename = PROVISIONING_PROFILE_FILENAMES["firefox"]
+
+ job["worker"]["provisioning-profile-config"] = [
+ {
+ "profile_name": filename,
+ "target_path": "/Contents/embedded.provisionprofile",
+ },
+ ]
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/job/__init__.py b/taskcluster/gecko_taskgraph/transforms/job/__init__.py
new file mode 100644
index 0000000000..b87f7e0955
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/job/__init__.py
@@ -0,0 +1,507 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Convert a job description into a task description.
+
+Jobs descriptions are similar to task descriptions, but they specify how to run
+the job at a higher level, using a "run" field that can be interpreted by
+run-using handlers in `taskcluster/gecko_taskgraph/transforms/job`.
+"""
+
+
+import json
+import logging
+
+import mozpack.path as mozpath
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.python_path import import_sibling_modules
+from taskgraph.util.schema import Schema, validate_schema
+from taskgraph.util.taskcluster import get_artifact_prefix
+from voluptuous import Any, Exclusive, Extra, Optional, Required
+
+from gecko_taskgraph.transforms.cached_tasks import order_tasks
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.copy_task import copy_task
+from gecko_taskgraph.util.workertypes import worker_type_implementation
+
+logger = logging.getLogger(__name__)
+
+# Schema for a build description
+job_description_schema = Schema(
+ {
+ # The name of the job and the job's label. At least one must be specified,
+ # and the label will be generated from the name if necessary, by prepending
+ # the kind.
+ Optional("name"): str,
+ Optional("label"): str,
+ # the following fields are passed directly through to the task description,
+ # possibly modified by the run implementation. See
+ # taskcluster/gecko_taskgraph/transforms/task.py for the schema details.
+ Required("description"): task_description_schema["description"],
+ Optional("attributes"): task_description_schema["attributes"],
+ Optional("job-from"): task_description_schema["job-from"],
+ Optional("dependencies"): task_description_schema["dependencies"],
+ Optional("if-dependencies"): task_description_schema["if-dependencies"],
+ Optional("soft-dependencies"): task_description_schema["soft-dependencies"],
+ Optional("if-dependencies"): task_description_schema["if-dependencies"],
+ Optional("requires"): task_description_schema["requires"],
+ Optional("expires-after"): task_description_schema["expires-after"],
+ Optional("expiration-policy"): task_description_schema["expiration-policy"],
+ Optional("routes"): task_description_schema["routes"],
+ Optional("scopes"): task_description_schema["scopes"],
+ Optional("tags"): task_description_schema["tags"],
+ Optional("extra"): task_description_schema["extra"],
+ Optional("treeherder"): task_description_schema["treeherder"],
+ Optional("index"): task_description_schema["index"],
+ Optional("run-on-projects"): task_description_schema["run-on-projects"],
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Optional("always-target"): task_description_schema["always-target"],
+ Exclusive("optimization", "optimization"): task_description_schema[
+ "optimization"
+ ],
+ Optional("use-sccache"): task_description_schema["use-sccache"],
+ Optional("use-system-python"): bool,
+ Optional("priority"): task_description_schema["priority"],
+ # The "when" section contains descriptions of the circumstances under which
+ # this task should be included in the task graph. This will be converted
+ # into an optimization, so it cannot be specified in a job description that
+ # also gives 'optimization'.
+ Exclusive("when", "optimization"): Any(
+ None,
+ {
+ # This task only needs to be run if a file matching one of the given
+ # patterns has changed in the push. The patterns use the mozpack
+ # match function (python/mozbuild/mozpack/path.py).
+ Optional("files-changed"): [str],
+ },
+ ),
+ # A list of artifacts to install from 'fetch' tasks.
+ Optional("fetches"): {
+ str: [
+ str,
+ {
+ Required("artifact"): str,
+ Optional("dest"): str,
+ Optional("extract"): bool,
+ Optional("verify-hash"): bool,
+ },
+ ],
+ },
+ # A description of how to run this job.
+ "run": {
+ # The key to a job implementation in a peer module to this one
+ "using": str,
+ # Base work directory used to set up the task.
+ Optional("workdir"): str,
+ # Any remaining content is verified against that job implementation's
+ # own schema.
+ Extra: object,
+ },
+ Required("worker-type"): task_description_schema["worker-type"],
+ # This object will be passed through to the task description, with additions
+ # provided by the job's run-using function
+ Optional("worker"): dict,
+ }
+)
+
+transforms = TransformSequence()
+transforms.add_validate(job_description_schema)
+
+
+@transforms.add
+def rewrite_when_to_optimization(config, jobs):
+ for job in jobs:
+ when = job.pop("when", {})
+ if not when:
+ yield job
+ continue
+
+ files_changed = when.get("files-changed")
+
+ # implicitly add task config directory.
+ files_changed.append(f"{config.path}/**")
+
+ # "only when files changed" implies "skip if files have not changed"
+ job["optimization"] = {"skip-unless-changed": files_changed}
+
+ assert "when" not in job
+ yield job
+
+
+@transforms.add
+def set_implementation(config, jobs):
+ for job in jobs:
+ impl, os = worker_type_implementation(
+ config.graph_config, config.params, job["worker-type"]
+ )
+ if os:
+ job.setdefault("tags", {})["os"] = os
+ if impl:
+ job.setdefault("tags", {})["worker-implementation"] = impl
+ worker = job.setdefault("worker", {})
+ assert "implementation" not in worker
+ worker["implementation"] = impl
+ if os:
+ worker["os"] = os
+ yield job
+
+
+@transforms.add
+def set_label(config, jobs):
+ for job in jobs:
+ if "label" not in job:
+ if "name" not in job:
+ raise Exception("job has neither a name nor a label")
+ job["label"] = "{}-{}".format(config.kind, job["name"])
+ if job.get("name"):
+ del job["name"]
+ yield job
+
+
+@transforms.add
+def add_resource_monitor(config, jobs):
+ for job in jobs:
+ if job.get("attributes", {}).get("resource-monitor"):
+ worker_implementation, worker_os = worker_type_implementation(
+ config.graph_config, config.params, job["worker-type"]
+ )
+ # Normalise worker os so that linux-bitbar and similar use linux tools.
+ worker_os = worker_os.split("-")[0]
+ # We don't currently support an Arm worker, due to gopsutil's indirect
+ # dependencies (go-ole)
+ if "aarch64" in job["worker-type"]:
+ yield job
+ continue
+ elif "win7" in job["worker-type"]:
+ arch = "32"
+ else:
+ arch = "64"
+ job.setdefault("fetches", {})
+ job["fetches"].setdefault("toolchain", [])
+ job["fetches"]["toolchain"].append(f"{worker_os}{arch}-resource-monitor")
+
+ if worker_implementation == "docker-worker":
+ artifact_source = "/builds/worker/monitoring/resource-monitor.json"
+ else:
+ artifact_source = "monitoring/resource-monitor.json"
+ job["worker"].setdefault("artifacts", [])
+ job["worker"]["artifacts"].append(
+ {
+ "name": "public/monitoring/resource-monitor.json",
+ "type": "file",
+ "path": artifact_source,
+ }
+ )
+ # Set env for output file
+ job["worker"].setdefault("env", {})
+ job["worker"]["env"]["RESOURCE_MONITOR_OUTPUT"] = artifact_source
+
+ yield job
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ """Given a build description, create a task description"""
+ # import plugin modules first, before iterating over jobs
+ import_sibling_modules(exceptions=("common.py",))
+
+ for job in jobs:
+ # only docker-worker uses a fixed absolute path to find directories
+ if job["worker"]["implementation"] == "docker-worker":
+ job["run"].setdefault("workdir", "/builds/worker")
+
+ taskdesc = copy_task(job)
+
+ # fill in some empty defaults to make run implementations easier
+ taskdesc.setdefault("attributes", {})
+ taskdesc.setdefault("dependencies", {})
+ taskdesc.setdefault("if-dependencies", [])
+ taskdesc.setdefault("soft-dependencies", [])
+ taskdesc.setdefault("routes", [])
+ taskdesc.setdefault("scopes", [])
+ taskdesc.setdefault("extra", {})
+
+ # give the function for job.run.using on this worker implementation a
+ # chance to set up the task description.
+ configure_taskdesc_for_run(
+ config, job, taskdesc, job["worker"]["implementation"]
+ )
+ del taskdesc["run"]
+
+ # yield only the task description, discarding the job description
+ yield taskdesc
+
+
+def get_attribute(dict, key, attributes, attribute_name):
+ """Get `attribute_name` from the given `attributes` dict, and if there
+ is a corresponding value, set `key` in `dict` to that value."""
+ value = attributes.get(attribute_name)
+ if value:
+ dict[key] = value
+
+
+@transforms.add
+def use_system_python(config, jobs):
+ for job in jobs:
+ if job.pop("use-system-python", True):
+ yield job
+ else:
+ fetches = job.setdefault("fetches", {})
+ toolchain = fetches.setdefault("toolchain", [])
+ if "win" in job["worker"]["os"]:
+ platform = "win64"
+ elif "linux" in job["worker"]["os"]:
+ platform = "linux64"
+ elif "macosx" in job["worker"]["os"]:
+ platform = "macosx64"
+ else:
+ raise ValueError("unexpected worker.os value {}".format(platform))
+
+ toolchain.append("{}-python".format(platform))
+
+ worker = job.setdefault("worker", {})
+ env = worker.setdefault("env", {})
+
+ moz_fetches_dir = env.get("MOZ_FETCHES_DIR", "fetches")
+ moz_python_home = mozpath.join(moz_fetches_dir, "python")
+ env["MOZ_PYTHON_HOME"] = moz_python_home
+
+ yield job
+
+
+@transforms.add
+def use_fetches(config, jobs):
+ artifact_names = {}
+ extra_env = {}
+ aliases = {}
+ tasks = []
+
+ if config.kind in ("toolchain", "fetch"):
+ jobs = list(jobs)
+ tasks.extend((config.kind, j) for j in jobs)
+
+ tasks.extend(
+ (task.kind, task.__dict__)
+ for task in config.kind_dependencies_tasks.values()
+ if task.kind in ("fetch", "toolchain")
+ )
+ for kind, task in tasks:
+ get_attribute(
+ artifact_names, task["label"], task["attributes"], f"{kind}-artifact"
+ )
+ get_attribute(extra_env, task["label"], task["attributes"], f"{kind}-env")
+ value = task["attributes"].get(f"{kind}-alias")
+ if not value:
+ value = []
+ elif isinstance(value, str):
+ value = [value]
+ for alias in value:
+ fully_qualified = f"{kind}-{alias}"
+ label = task["label"]
+ if fully_qualified == label:
+ raise Exception(f"The alias {alias} of task {label} points to itself!")
+ aliases[fully_qualified] = label
+
+ artifact_prefixes = {}
+ for job in order_tasks(config, jobs):
+ artifact_prefixes[job["label"]] = get_artifact_prefix(job)
+
+ fetches = job.pop("fetches", None)
+ if not fetches:
+ yield job
+ continue
+
+ job_fetches = []
+ name = job.get("name") or job.get("label").replace(f"{config.kind}-", "")
+ dependencies = job.setdefault("dependencies", {})
+ worker = job.setdefault("worker", {})
+ env = worker.setdefault("env", {})
+ prefix = get_artifact_prefix(job)
+ has_sccache = False
+ for kind, artifacts in fetches.items():
+ if kind in ("fetch", "toolchain"):
+ for fetch_name in artifacts:
+ label = f"{kind}-{fetch_name}"
+ label = aliases.get(label, label)
+ if label not in artifact_names:
+ raise Exception(
+ "Missing fetch job for {kind}-{name}: {fetch}".format(
+ kind=config.kind, name=name, fetch=fetch_name
+ )
+ )
+ if label in extra_env:
+ env.update(extra_env[label])
+
+ path = artifact_names[label]
+
+ dependencies[label] = label
+ job_fetches.append(
+ {
+ "artifact": path,
+ "task": f"<{label}>",
+ "extract": True,
+ }
+ )
+
+ if kind == "toolchain" and fetch_name.endswith("-sccache"):
+ has_sccache = True
+ else:
+ if kind not in dependencies:
+ raise Exception(
+ "{name} can't fetch {kind} artifacts because "
+ "it has no {kind} dependencies!".format(name=name, kind=kind)
+ )
+ dep_label = dependencies[kind]
+ if dep_label in artifact_prefixes:
+ prefix = artifact_prefixes[dep_label]
+ else:
+ if dep_label not in config.kind_dependencies_tasks:
+ raise Exception(
+ "{name} can't fetch {kind} artifacts because "
+ "there are no tasks with label {label} in kind dependencies!".format(
+ name=name,
+ kind=kind,
+ label=dependencies[kind],
+ )
+ )
+
+ prefix = get_artifact_prefix(
+ config.kind_dependencies_tasks[dep_label]
+ )
+
+ for artifact in artifacts:
+ if isinstance(artifact, str):
+ path = artifact
+ dest = None
+ extract = True
+ verify_hash = False
+ else:
+ path = artifact["artifact"]
+ dest = artifact.get("dest")
+ extract = artifact.get("extract", True)
+ verify_hash = artifact.get("verify-hash", False)
+
+ fetch = {
+ "artifact": f"{prefix}/{path}"
+ if not path.startswith("/")
+ else path[1:],
+ "task": f"<{kind}>",
+ "extract": extract,
+ }
+ if dest is not None:
+ fetch["dest"] = dest
+ if verify_hash:
+ fetch["verify-hash"] = verify_hash
+ job_fetches.append(fetch)
+
+ if job.get("use-sccache") and not has_sccache:
+ raise Exception("Must provide an sccache toolchain if using sccache.")
+
+ job_artifact_prefixes = {
+ mozpath.dirname(fetch["artifact"])
+ for fetch in job_fetches
+ if not fetch["artifact"].startswith("public/")
+ }
+ if job_artifact_prefixes:
+ # Use taskcluster-proxy and request appropriate scope. For example, add
+ # 'scopes: [queue:get-artifact:path/to/*]' for 'path/to/artifact.tar.xz'.
+ worker["taskcluster-proxy"] = True
+ for prefix in sorted(job_artifact_prefixes):
+ scope = f"queue:get-artifact:{prefix}/*"
+ if scope not in job.setdefault("scopes", []):
+ job["scopes"].append(scope)
+
+ artifacts = {}
+ for f in job_fetches:
+ _, __, artifact = f["artifact"].rpartition("/")
+ if "dest" in f:
+ artifact = f"{f['dest']}/{artifact}"
+ task = f["task"][1:-1]
+ if artifact in artifacts:
+ raise Exception(
+ f"Task {name} depends on {artifacts[artifact]} and {task} "
+ f"that both provide {artifact}"
+ )
+ artifacts[artifact] = task
+
+ env["MOZ_FETCHES"] = {
+ "task-reference": json.dumps(
+ sorted(job_fetches, key=lambda x: sorted(x.items())), sort_keys=True
+ )
+ }
+ # The path is normalized to an absolute path in run-task
+ env.setdefault("MOZ_FETCHES_DIR", "fetches")
+
+ yield job
+
+
+# A registry of all functions decorated with run_job_using
+registry = {}
+
+
+def run_job_using(worker_implementation, run_using, schema=None, defaults={}):
+ """Register the decorated function as able to set up a task description for
+ jobs with the given worker implementation and `run.using` property. If
+ `schema` is given, the job's run field will be verified to match it.
+
+ The decorated function should have the signature `using_foo(config, job, taskdesc)`
+ and should modify the task description in-place. The skeleton of
+ the task description is already set up, but without a payload."""
+
+ def wrap(func):
+ for_run_using = registry.setdefault(run_using, {})
+ if worker_implementation in for_run_using:
+ raise Exception(
+ "run_job_using({!r}, {!r}) already exists: {!r}".format(
+ run_using,
+ worker_implementation,
+ for_run_using[worker_implementation],
+ )
+ )
+ for_run_using[worker_implementation] = (func, schema, defaults)
+ return func
+
+ return wrap
+
+
+@run_job_using(
+ "always-optimized", "always-optimized", Schema({"using": "always-optimized"})
+)
+def always_optimized(config, job, taskdesc):
+ pass
+
+
+def configure_taskdesc_for_run(config, job, taskdesc, worker_implementation):
+ """
+ Run the appropriate function for this job against the given task
+ description.
+
+ This will raise an appropriate error if no function exists, or if the job's
+ run is not valid according to the schema.
+ """
+ run_using = job["run"]["using"]
+ if run_using not in registry:
+ raise Exception(f"no functions for run.using {run_using!r}")
+
+ if worker_implementation not in registry[run_using]:
+ raise Exception(
+ "no functions for run.using {!r} on {!r}".format(
+ run_using, worker_implementation
+ )
+ )
+
+ func, schema, defaults = registry[run_using][worker_implementation]
+ for k, v in defaults.items():
+ job["run"].setdefault(k, v)
+
+ if schema:
+ validate_schema(
+ schema,
+ job["run"],
+ "In job.run using {!r}/{!r} for job {!r}:".format(
+ job["run"]["using"], worker_implementation, job["label"]
+ ),
+ )
+ func(config, job, taskdesc)
diff --git a/taskcluster/gecko_taskgraph/transforms/job/common.py b/taskcluster/gecko_taskgraph/transforms/job/common.py
new file mode 100644
index 0000000000..0c6289a6db
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/job/common.py
@@ -0,0 +1,269 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Common support for various job types. These functions are all named after the
+worker implementation they operate on, and take the same three parameters, for
+consistency.
+"""
+
+
+from taskgraph.util.keyed_by import evaluate_keyed_by
+from taskgraph.util.taskcluster import get_artifact_prefix
+
+SECRET_SCOPE = "secrets:get:project/releng/{trust_domain}/{kind}/level-{level}/{secret}"
+
+
+def add_cache(job, taskdesc, name, mount_point, skip_untrusted=False):
+ """Adds a cache based on the worker's implementation.
+
+ Args:
+ job (dict): Task's job description.
+ taskdesc (dict): Target task description to modify.
+ name (str): Name of the cache.
+ mount_point (path): Path on the host to mount the cache.
+ skip_untrusted (bool): Whether cache is used in untrusted environments
+ (default: False). Only applies to docker-worker.
+ """
+ if not job["run"].get("use-caches", True):
+ return
+
+ worker = job["worker"]
+
+ if worker["implementation"] == "docker-worker":
+ taskdesc["worker"].setdefault("caches", []).append(
+ {
+ "type": "persistent",
+ "name": name,
+ "mount-point": mount_point,
+ "skip-untrusted": skip_untrusted,
+ }
+ )
+
+ elif worker["implementation"] == "generic-worker":
+ taskdesc["worker"].setdefault("mounts", []).append(
+ {
+ "cache-name": name,
+ "directory": mount_point,
+ }
+ )
+
+ else:
+ # Caches not implemented
+ pass
+
+
+def add_artifacts(config, job, taskdesc, path):
+ taskdesc["worker"].setdefault("artifacts", []).append(
+ {
+ "name": get_artifact_prefix(taskdesc),
+ "path": path,
+ "type": "directory",
+ }
+ )
+
+
+def docker_worker_add_artifacts(config, job, taskdesc):
+ """Adds an artifact directory to the task"""
+ path = "{workdir}/artifacts/".format(**job["run"])
+ taskdesc["worker"].setdefault("env", {})["UPLOAD_DIR"] = path
+ add_artifacts(config, job, taskdesc, path)
+
+
+def generic_worker_add_artifacts(config, job, taskdesc):
+ """Adds an artifact directory to the task"""
+ # The path is the location on disk; it doesn't necessarily
+ # mean the artifacts will be public or private; that is set via the name
+ # attribute in add_artifacts.
+ path = get_artifact_prefix(taskdesc)
+ taskdesc["worker"].setdefault("env", {})["UPLOAD_DIR"] = path
+ add_artifacts(config, job, taskdesc, path=path)
+
+
+def support_vcs_checkout(config, job, taskdesc, sparse=False):
+ """Update a job/task with parameters to enable a VCS checkout.
+
+ This can only be used with ``run-task`` tasks, as the cache name is
+ reserved for ``run-task`` tasks.
+ """
+ worker = job["worker"]
+ is_mac = worker["os"] == "macosx"
+ is_win = worker["os"] == "windows"
+ is_linux = worker["os"] == "linux" or "linux-bitbar"
+ is_docker = worker["implementation"] == "docker-worker"
+ assert is_mac or is_win or is_linux
+
+ if is_win:
+ checkoutdir = "./build"
+ geckodir = f"{checkoutdir}/src"
+ hgstore = "y:/hg-shared"
+ elif is_docker:
+ checkoutdir = "{workdir}/checkouts".format(**job["run"])
+ geckodir = f"{checkoutdir}/gecko"
+ hgstore = f"{checkoutdir}/hg-store"
+ else:
+ checkoutdir = "./checkouts"
+ geckodir = f"{checkoutdir}/gecko"
+ hgstore = f"{checkoutdir}/hg-shared"
+
+ cache_name = "checkouts"
+
+ # Sparse checkouts need their own cache because they can interfere
+ # with clients that aren't sparse aware.
+ if sparse:
+ cache_name += "-sparse"
+
+ # Workers using Mercurial >= 5.8 will enable revlog-compression-zstd, which
+ # workers using older versions can't understand, so they can't share cache.
+ # At the moment, only docker workers use the newer version.
+ if is_docker:
+ cache_name += "-hg58"
+
+ add_cache(job, taskdesc, cache_name, checkoutdir)
+
+ taskdesc["worker"].setdefault("env", {}).update(
+ {
+ "GECKO_BASE_REPOSITORY": config.params["base_repository"],
+ "GECKO_HEAD_REPOSITORY": config.params["head_repository"],
+ "GECKO_HEAD_REV": config.params["head_rev"],
+ "HG_STORE_PATH": hgstore,
+ }
+ )
+ taskdesc["worker"]["env"].setdefault("GECKO_PATH", geckodir)
+
+ if "comm_base_repository" in config.params:
+ taskdesc["worker"]["env"].update(
+ {
+ "COMM_BASE_REPOSITORY": config.params["comm_base_repository"],
+ "COMM_HEAD_REPOSITORY": config.params["comm_head_repository"],
+ "COMM_HEAD_REV": config.params["comm_head_rev"],
+ }
+ )
+ elif job["run"].get("comm-checkout", False):
+ raise Exception(
+ "Can't checkout from comm-* repository if not given a repository."
+ )
+
+ # Give task access to hgfingerprint secret so it can pin the certificate
+ # for hg.mozilla.org.
+ taskdesc["scopes"].append("secrets:get:project/taskcluster/gecko/hgfingerprint")
+ taskdesc["scopes"].append("secrets:get:project/taskcluster/gecko/hgmointernal")
+
+ # only some worker platforms have taskcluster-proxy enabled
+ if job["worker"]["implementation"] in ("docker-worker",):
+ taskdesc["worker"]["taskcluster-proxy"] = True
+
+
+def generic_worker_hg_commands(
+ base_repo, head_repo, head_rev, path, sparse_profile=None
+):
+ """Obtain commands needed to obtain a Mercurial checkout on generic-worker.
+
+ Returns two command strings. One performs the checkout. Another logs.
+ """
+ args = [
+ r'"c:\Program Files\Mercurial\hg.exe"',
+ "robustcheckout",
+ "--sharebase",
+ r"y:\hg-shared",
+ "--purge",
+ "--upstream",
+ base_repo,
+ "--revision",
+ head_rev,
+ ]
+
+ if sparse_profile:
+ args.extend(["--config", "extensions.sparse="])
+ args.extend(["--sparseprofile", sparse_profile])
+
+ args.extend(
+ [
+ head_repo,
+ path,
+ ]
+ )
+
+ logging_args = [
+ b":: TinderboxPrint:<a href={source_repo}/rev/{revision} "
+ b"title='Built from {repo_name} revision {revision}'>{revision}</a>"
+ b"\n".format(
+ revision=head_rev, source_repo=head_repo, repo_name=head_repo.split("/")[-1]
+ ),
+ ]
+
+ return [" ".join(args), " ".join(logging_args)]
+
+
+def setup_secrets(config, job, taskdesc):
+ """Set up access to secrets via taskcluster-proxy. The value of
+ run['secrets'] should be a boolean or a list of secret names that
+ can be accessed."""
+ if not job["run"].get("secrets"):
+ return
+
+ taskdesc["worker"]["taskcluster-proxy"] = True
+ secrets = job["run"]["secrets"]
+ if secrets is True:
+ secrets = ["*"]
+ for secret in secrets:
+ taskdesc["scopes"].append(
+ SECRET_SCOPE.format(
+ trust_domain=config.graph_config["trust-domain"],
+ kind=job["treeherder"]["kind"],
+ level=config.params["level"],
+ secret=secret,
+ )
+ )
+
+
+def add_tooltool(config, job, taskdesc, internal=False):
+ """Give the task access to tooltool.
+
+ Enables the tooltool cache. Adds releng proxy. Configures scopes.
+
+ By default, only public tooltool access will be granted. Access to internal
+ tooltool can be enabled via ``internal=True``.
+
+ This can only be used with ``run-task`` tasks, as the cache name is
+ reserved for use with ``run-task``.
+ """
+
+ if job["worker"]["implementation"] in ("docker-worker",):
+ add_cache(
+ job,
+ taskdesc,
+ "tooltool-cache",
+ "{workdir}/tooltool-cache".format(**job["run"]),
+ )
+
+ taskdesc["worker"].setdefault("env", {}).update(
+ {
+ "TOOLTOOL_CACHE": "{workdir}/tooltool-cache".format(**job["run"]),
+ }
+ )
+ elif not internal:
+ return
+
+ taskdesc["worker"]["taskcluster-proxy"] = True
+ taskdesc["scopes"].extend(
+ [
+ "project:releng:services/tooltool/api/download/public",
+ ]
+ )
+
+ if internal:
+ taskdesc["scopes"].extend(
+ [
+ "project:releng:services/tooltool/api/download/internal",
+ ]
+ )
+
+
+def get_expiration(config, policy="default"):
+ expires = evaluate_keyed_by(
+ config.graph_config["expiration-policy"],
+ "artifact expiration",
+ {"project": config.params["project"]},
+ )[policy]
+ return expires
diff --git a/taskcluster/gecko_taskgraph/transforms/job/distro_package.py b/taskcluster/gecko_taskgraph/transforms/job/distro_package.py
new file mode 100644
index 0000000000..c9d0d1d518
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/job/distro_package.py
@@ -0,0 +1,240 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running spidermonkey jobs via dedicated scripts
+"""
+
+
+import os
+import re
+
+import taskgraph
+from taskgraph.util.schema import Schema
+from taskgraph.util.taskcluster import get_root_url
+from voluptuous import Any, Optional, Required
+
+from gecko_taskgraph import GECKO
+from gecko_taskgraph.transforms.job import run_job_using
+from gecko_taskgraph.transforms.job.common import add_artifacts
+from gecko_taskgraph.util.hash import hash_path
+
+DSC_PACKAGE_RE = re.compile(".*(?=_)")
+SOURCE_PACKAGE_RE = re.compile(r".*(?=[-_]\d)")
+
+source_definition = {
+ Required("url"): str,
+ Required("sha256"): str,
+}
+
+common_schema = Schema(
+ {
+ # URL/SHA256 of a source file to build, which can either be a source
+ # control (.dsc), or a tarball.
+ Required(Any("dsc", "tarball")): source_definition,
+ # Package name. Normally derived from the source control or tarball file
+ # name. Use in case the name doesn't match DSC_PACKAGE_RE or
+ # SOURCE_PACKAGE_RE.
+ Optional("name"): str,
+ # Patch to apply to the extracted source.
+ Optional("patch"): str,
+ # Command to run before dpkg-buildpackage.
+ Optional("pre-build-command"): str,
+ # Architecture to build the package for.
+ Optional("arch"): str,
+ # List of package tasks to get build dependencies from.
+ Optional("packages"): [str],
+ # What resolver to use to install build dependencies. The default
+ # (apt-get) is good in most cases, but in subtle cases involving
+ # a *-backports archive, its solver might not be able to find a
+ # solution that satisfies the build dependencies.
+ Optional("resolver"): Any("apt-get", "aptitude"),
+ # Base work directory used to set up the task.
+ Required("workdir"): str,
+ }
+)
+
+debian_schema = common_schema.extend(
+ {
+ Required("using"): "debian-package",
+ # Debian distribution
+ Required("dist"): str,
+ }
+)
+
+ubuntu_schema = common_schema.extend(
+ {
+ Required("using"): "ubuntu-package",
+ # Ubuntu distribution
+ Required("dist"): str,
+ }
+)
+
+
+def common_package(config, job, taskdesc, distro, version):
+ run = job["run"]
+
+ name = taskdesc["label"].replace(f"{config.kind}-", "", 1)
+
+ arch = run.get("arch", "amd64")
+
+ worker = taskdesc["worker"]
+ worker.setdefault("artifacts", [])
+
+ image = "%s%d" % (distro, version)
+ if arch != "amd64":
+ image += "-" + arch
+ image += "-packages"
+ worker["docker-image"] = {"in-tree": image}
+
+ add_artifacts(config, job, taskdesc, path="/tmp/artifacts")
+
+ env = worker.setdefault("env", {})
+ env["DEBFULLNAME"] = "Mozilla build team"
+ env["DEBEMAIL"] = "dev-builds@lists.mozilla.org"
+
+ if "dsc" in run:
+ src = run["dsc"]
+ unpack = "dpkg-source -x {src_file} {package}"
+ package_re = DSC_PACKAGE_RE
+ elif "tarball" in run:
+ src = run["tarball"]
+ unpack = (
+ "mkdir {package} && "
+ "tar -C {package} -axf {src_file} --strip-components=1"
+ )
+ package_re = SOURCE_PACKAGE_RE
+ else:
+ raise RuntimeError("Unreachable")
+ src_url = src["url"]
+ src_file = os.path.basename(src_url)
+ src_sha256 = src["sha256"]
+ package = run.get("name")
+ if not package:
+ package = package_re.match(src_file).group(0)
+ unpack = unpack.format(src_file=src_file, package=package)
+
+ resolver = run.get("resolver", "apt-get")
+ if resolver == "apt-get":
+ resolver = "apt-get -yyq --no-install-recommends"
+ elif resolver == "aptitude":
+ resolver = (
+ "aptitude -y --without-recommends -o "
+ "Aptitude::ProblemResolver::Hints::KeepBuildDeps="
+ '"reject {}-build-deps :UNINST"'
+ ).format(package)
+ else:
+ raise RuntimeError("Unreachable")
+
+ adjust = ""
+ if "patch" in run:
+ # We don't use robustcheckout or run-task to get a checkout. So for
+ # this one file we'd need from a checkout, download it.
+ env["PATCH_URL"] = config.params.file_url(
+ "build/debian-packages/{patch}".format(patch=run["patch"]),
+ )
+ adjust += "curl -sL $PATCH_URL | patch -p1 && "
+ if "pre-build-command" in run:
+ adjust += run["pre-build-command"] + " && "
+ if "tarball" in run:
+ adjust += "mv ../{src_file} ../{package}_{ver}.orig.tar.gz && ".format(
+ src_file=src_file,
+ package=package,
+ ver="$(dpkg-parsechangelog | awk '$1==\"Version:\"{print $2}' | cut -f 1 -d -)",
+ )
+ if "patch" not in run and "pre-build-command" not in run:
+ adjust += (
+ 'debchange -l ".{prefix}moz" --distribution "{dist}"'
+ ' "Mozilla backport for {dist}." < /dev/null && '
+ ).format(
+ prefix=name.split("-", 1)[0],
+ dist=run["dist"],
+ )
+
+ worker["command"] = [
+ "sh",
+ "-x",
+ "-c",
+ # Add sources for packages coming from other package tasks.
+ "/usr/local/sbin/setup_packages.sh {root_url} $PACKAGES && "
+ "apt-get update && "
+ # Upgrade packages that might have new versions in package tasks.
+ "apt-get dist-upgrade && " "cd /tmp && "
+ # Get, validate and extract the package source.
+ "(dget -d -u {src_url} || exit 100) && "
+ 'echo "{src_sha256} {src_file}" | sha256sum -c && '
+ "{unpack} && "
+ "cd {package} && "
+ # Optionally apply patch and/or pre-build command.
+ "{adjust}"
+ # Install the necessary build dependencies.
+ "(cd ..; mk-build-deps -i -r {package}/debian/control -t '{resolver}' || exit 100) && "
+ # Build the package
+ 'DEB_BUILD_OPTIONS="parallel=$(nproc) nocheck" dpkg-buildpackage -sa && '
+ # Copy the artifacts
+ "mkdir -p {artifacts}/apt && "
+ "dcmd cp ../{package}_*.changes {artifacts}/apt/ && "
+ "cd {artifacts} && "
+ # Make the artifacts directory usable as an APT repository.
+ "apt-ftparchive sources apt | gzip -c9 > apt/Sources.gz && "
+ "apt-ftparchive packages apt | gzip -c9 > apt/Packages.gz".format(
+ root_url=get_root_url(False),
+ package=package,
+ src_url=src_url,
+ src_file=src_file,
+ src_sha256=src_sha256,
+ unpack=unpack,
+ adjust=adjust,
+ artifacts="/tmp/artifacts",
+ resolver=resolver,
+ ),
+ ]
+
+ if run.get("packages"):
+ env = worker.setdefault("env", {})
+ env["PACKAGES"] = {
+ "task-reference": " ".join(f"<{p}>" for p in run["packages"])
+ }
+ deps = taskdesc.setdefault("dependencies", {})
+ for p in run["packages"]:
+ deps[p] = f"packages-{p}"
+
+ # Use the command generated above as the base for the index hash.
+ # We rely on it not varying depending on the head_repository or head_rev.
+ digest_data = list(worker["command"])
+ if "patch" in run:
+ digest_data.append(
+ hash_path(os.path.join(GECKO, "build", "debian-packages", run["patch"]))
+ )
+
+ if not taskgraph.fast:
+ taskdesc["cache"] = {
+ "type": "packages.v1",
+ "name": name,
+ "digest-data": digest_data,
+ }
+
+
+@run_job_using("docker-worker", "debian-package", schema=debian_schema)
+def docker_worker_debian_package(config, job, taskdesc):
+ run = job["run"]
+ version = {
+ "wheezy": 7,
+ "jessie": 8,
+ "stretch": 9,
+ "buster": 10,
+ "bullseye": 11,
+ "bookworm": 12,
+ }[run["dist"]]
+ common_package(config, job, taskdesc, "debian", version)
+
+
+@run_job_using("docker-worker", "ubuntu-package", schema=ubuntu_schema)
+def docker_worker_ubuntu_package(config, job, taskdesc):
+ run = job["run"]
+ version = {
+ "bionic": 1804,
+ "focal": 2004,
+ "jammy": 2204,
+ }[run["dist"]]
+ common_package(config, job, taskdesc, "ubuntu", version)
diff --git a/taskcluster/gecko_taskgraph/transforms/job/hazard.py b/taskcluster/gecko_taskgraph/transforms/job/hazard.py
new file mode 100644
index 0000000000..af0e8616e0
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/job/hazard.py
@@ -0,0 +1,66 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running hazard jobs via dedicated scripts
+"""
+
+
+from taskgraph.util.schema import Schema
+from voluptuous import Any, Optional, Required
+
+from gecko_taskgraph.transforms.job import configure_taskdesc_for_run, run_job_using
+from gecko_taskgraph.transforms.job.common import (
+ add_tooltool,
+ docker_worker_add_artifacts,
+ setup_secrets,
+)
+
+haz_run_schema = Schema(
+ {
+ Required("using"): "hazard",
+ # The command to run within the task image (passed through to the worker)
+ Required("command"): str,
+ # The mozconfig to use; default in the script is used if omitted
+ Optional("mozconfig"): str,
+ # The set of secret names to which the task has access; these are prefixed
+ # with `project/releng/gecko/{treeherder.kind}/level-{level}/`. Setting
+ # this will enable any worker features required and set the task's scopes
+ # appropriately. `true` here means ['*'], all secrets. Not supported on
+ # Windows
+ Optional("secrets"): Any(bool, [str]),
+ # Base work directory used to set up the task.
+ Optional("workdir"): str,
+ }
+)
+
+
+@run_job_using("docker-worker", "hazard", schema=haz_run_schema)
+def docker_worker_hazard(config, job, taskdesc):
+ run = job["run"]
+
+ worker = taskdesc["worker"] = job["worker"]
+ worker.setdefault("artifacts", [])
+
+ docker_worker_add_artifacts(config, job, taskdesc)
+ worker.setdefault("required-volumes", []).append(
+ "{workdir}/workspace".format(**run)
+ )
+ add_tooltool(config, job, taskdesc)
+ setup_secrets(config, job, taskdesc)
+
+ env = worker["env"]
+ env.update(
+ {
+ "MOZ_BUILD_DATE": config.params["moz_build_date"],
+ "MOZ_SCM_LEVEL": config.params["level"],
+ }
+ )
+
+ # script parameters
+ if run.get("mozconfig"):
+ env["MOZCONFIG"] = run.pop("mozconfig")
+
+ run["using"] = "run-task"
+ run["cwd"] = run["workdir"]
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
diff --git a/taskcluster/gecko_taskgraph/transforms/job/mach.py b/taskcluster/gecko_taskgraph/transforms/job/mach.py
new file mode 100644
index 0000000000..775213f8fe
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/job/mach.py
@@ -0,0 +1,80 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running mach tasks (via run-task)
+"""
+
+from taskgraph.util.schema import Schema, taskref_or_string
+from voluptuous import Any, Optional, Required
+
+from gecko_taskgraph.transforms.job import configure_taskdesc_for_run, run_job_using
+
+mach_schema = Schema(
+ {
+ Required("using"): "mach",
+ # The mach command (omitting `./mach`) to run
+ Required("mach"): taskref_or_string,
+ # The version of Python to run with. Either an absolute path to the binary
+ # on the worker, a version identifier (e.g python2.7 or 3.8). There is no
+ # validation performed to ensure the specified binaries actually exist.
+ Optional("python-version"): Any(str, int, float),
+ # The sparse checkout profile to use. Value is the filename relative to the
+ # directory where sparse profiles are defined (build/sparse-profiles/).
+ Optional("sparse-profile"): Any(str, None),
+ # if true, perform a checkout of a comm-central based branch inside the
+ # gecko checkout
+ Required("comm-checkout"): bool,
+ # Base work directory used to set up the task.
+ Optional("workdir"): str,
+ }
+)
+
+
+defaults = {
+ "comm-checkout": False,
+}
+
+
+@run_job_using("docker-worker", "mach", schema=mach_schema, defaults=defaults)
+@run_job_using("generic-worker", "mach", schema=mach_schema, defaults=defaults)
+def configure_mach(config, job, taskdesc):
+ run = job["run"]
+ worker = job["worker"]
+
+ additional_prefix = []
+ if worker["os"] == "macosx":
+ additional_prefix = ["LC_ALL=en_US.UTF-8", "LANG=en_US.UTF-8"]
+
+ python = run.get("python-version")
+ if python:
+ del run["python-version"]
+
+ if taskdesc.get("use-system-python"):
+ if worker["os"] == "macosx" and python == 3:
+ python = "/usr/local/bin/python3"
+
+ python = str(python)
+ try:
+ float(python)
+ python = "python" + python
+ except ValueError:
+ pass
+
+ additional_prefix.append(python)
+
+ command_prefix = " ".join(additional_prefix + ["./mach "])
+
+ mach = run["mach"]
+ if isinstance(mach, dict):
+ ref, pattern = next(iter(mach.items()))
+ command = {ref: command_prefix + pattern}
+ else:
+ command = command_prefix + mach
+
+ # defer to the run_task implementation
+ run["command"] = command
+ run["cwd"] = "{checkout}"
+ run["using"] = "run-task"
+ del run["mach"]
+ configure_taskdesc_for_run(config, job, taskdesc, job["worker"]["implementation"])
diff --git a/taskcluster/gecko_taskgraph/transforms/job/mozharness.py b/taskcluster/gecko_taskgraph/transforms/job/mozharness.py
new file mode 100644
index 0000000000..4d7293ec51
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/job/mozharness.py
@@ -0,0 +1,366 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+
+Support for running jobs via mozharness. Ideally, most stuff gets run this
+way, and certainly anything using mozharness should use this approach.
+
+"""
+
+import json
+from textwrap import dedent
+
+from mozpack import path as mozpath
+from taskgraph.util.schema import Schema
+from voluptuous import Any, Optional, Required
+from voluptuous.validators import Match
+
+from gecko_taskgraph.transforms.job import configure_taskdesc_for_run, run_job_using
+from gecko_taskgraph.transforms.job.common import (
+ docker_worker_add_artifacts,
+ generic_worker_add_artifacts,
+ get_expiration,
+ setup_secrets,
+)
+from gecko_taskgraph.transforms.task import get_branch_repo, get_branch_rev
+from gecko_taskgraph.util.attributes import is_try
+
+mozharness_run_schema = Schema(
+ {
+ Required("using"): "mozharness",
+ # the mozharness script used to run this task, relative to the testing/
+ # directory and using forward slashes even on Windows
+ Required("script"): str,
+ # Additional paths to look for mozharness configs in. These should be
+ # relative to the base of the source checkout
+ Optional("config-paths"): [str],
+ # the config files required for the task, relative to
+ # testing/mozharness/configs or one of the paths specified in
+ # `config-paths` and using forward slashes even on Windows
+ Required("config"): [str],
+ # any additional actions to pass to the mozharness command
+ Optional("actions"): [
+ Match("^[a-z0-9-]+$", "actions must be `-` seperated alphanumeric strings")
+ ],
+ # any additional options (without leading --) to be passed to mozharness
+ Optional("options"): [
+ Match(
+ "^[a-z0-9-]+(=[^ ]+)?$",
+ "options must be `-` seperated alphanumeric strings (with optional argument)",
+ )
+ ],
+ # --custom-build-variant-cfg value
+ Optional("custom-build-variant-cfg"): str,
+ # Extra configuration options to pass to mozharness.
+ Optional("extra-config"): dict,
+ # If not false, tooltool downloads will be enabled via relengAPIProxy
+ # for either just public files, or all files. Not supported on Windows
+ Required("tooltool-downloads"): Any(
+ False,
+ "public",
+ "internal",
+ ),
+ # The set of secret names to which the task has access; these are prefixed
+ # with `project/releng/gecko/{treeherder.kind}/level-{level}/`. Setting
+ # this will enable any worker features required and set the task's scopes
+ # appropriately. `true` here means ['*'], all secrets. Not supported on
+ # Windows
+ Required("secrets"): Any(bool, [str]),
+ # If true, taskcluster proxy will be enabled; note that it may also be enabled
+ # automatically e.g., for secrets support. Not supported on Windows.
+ Required("taskcluster-proxy"): bool,
+ # If false, indicate that builds should skip producing artifacts. Not
+ # supported on Windows.
+ Required("keep-artifacts"): bool,
+ # If specified, use the in-tree job script specified.
+ Optional("job-script"): str,
+ Required("requires-signed-builds"): bool,
+ # Whether or not to use caches.
+ Optional("use-caches"): bool,
+ # If false, don't set MOZ_SIMPLE_PACKAGE_NAME
+ # Only disableable on windows
+ Required("use-simple-package"): bool,
+ # If false don't pass --branch mozharness script
+ # Only disableable on windows
+ Required("use-magic-mh-args"): bool,
+ # if true, perform a checkout of a comm-central based branch inside the
+ # gecko checkout
+ Required("comm-checkout"): bool,
+ # Base work directory used to set up the task.
+ Optional("workdir"): str,
+ Optional("run-as-root"): bool,
+ }
+)
+
+
+mozharness_defaults = {
+ "tooltool-downloads": False,
+ "secrets": False,
+ "taskcluster-proxy": False,
+ "keep-artifacts": True,
+ "requires-signed-builds": False,
+ "use-simple-package": True,
+ "use-magic-mh-args": True,
+ "comm-checkout": False,
+ "run-as-root": False,
+}
+
+
+@run_job_using(
+ "docker-worker",
+ "mozharness",
+ schema=mozharness_run_schema,
+ defaults=mozharness_defaults,
+)
+def mozharness_on_docker_worker_setup(config, job, taskdesc):
+ run = job["run"]
+
+ worker = taskdesc["worker"] = job["worker"]
+
+ if not run.pop("use-simple-package", None):
+ raise NotImplementedError(
+ "Simple packaging cannot be disabled via"
+ "'use-simple-package' on docker-workers"
+ )
+ if not run.pop("use-magic-mh-args", None):
+ raise NotImplementedError(
+ "Cannot disabled mh magic arg passing via"
+ "'use-magic-mh-args' on docker-workers"
+ )
+
+ # Running via mozharness assumes an image that contains build.sh:
+ # by default, debian12-amd64-build, but it could be another image (like
+ # android-build).
+ worker.setdefault("docker-image", {"in-tree": "debian12-amd64-build"})
+
+ worker.setdefault("artifacts", []).append(
+ {
+ "name": "public/logs",
+ "path": "{workdir}/logs/".format(**run),
+ "type": "directory",
+ "expires-after": get_expiration(config, "medium"),
+ }
+ )
+ worker["taskcluster-proxy"] = run.pop("taskcluster-proxy", None)
+ docker_worker_add_artifacts(config, job, taskdesc)
+
+ env = worker.setdefault("env", {})
+ env.update(
+ {
+ "WORKSPACE": "{workdir}/workspace".format(**run),
+ "MOZHARNESS_CONFIG": " ".join(run.pop("config")),
+ "MOZHARNESS_SCRIPT": run.pop("script"),
+ "MH_BRANCH": config.params["project"],
+ "MOZ_SOURCE_CHANGESET": get_branch_rev(config),
+ "MOZ_SOURCE_REPO": get_branch_repo(config),
+ "MH_BUILD_POOL": "taskcluster",
+ "MOZ_BUILD_DATE": config.params["moz_build_date"],
+ "MOZ_SCM_LEVEL": config.params["level"],
+ "PYTHONUNBUFFERED": "1",
+ }
+ )
+
+ worker.setdefault("required-volumes", []).append(env["WORKSPACE"])
+
+ if "actions" in run:
+ env["MOZHARNESS_ACTIONS"] = " ".join(run.pop("actions"))
+
+ if "options" in run:
+ env["MOZHARNESS_OPTIONS"] = " ".join(run.pop("options"))
+
+ if "config-paths" in run:
+ env["MOZHARNESS_CONFIG_PATHS"] = " ".join(run.pop("config-paths"))
+
+ if "custom-build-variant-cfg" in run:
+ env["MH_CUSTOM_BUILD_VARIANT_CFG"] = run.pop("custom-build-variant-cfg")
+
+ extra_config = run.pop("extra-config", {})
+ extra_config["objdir"] = "obj-build"
+ env["EXTRA_MOZHARNESS_CONFIG"] = json.dumps(extra_config, sort_keys=True)
+
+ if "job-script" in run:
+ env["JOB_SCRIPT"] = run["job-script"]
+
+ if is_try(config.params):
+ env["TRY_COMMIT_MSG"] = config.params["message"]
+
+ # if we're not keeping artifacts, set some env variables to empty values
+ # that will cause the build process to skip copying the results to the
+ # artifacts directory. This will have no effect for operations that are
+ # not builds.
+ if not run.pop("keep-artifacts"):
+ env["DIST_TARGET_UPLOADS"] = ""
+ env["DIST_UPLOADS"] = ""
+
+ # Retry if mozharness returns TBPL_RETRY
+ worker["retry-exit-status"] = [4]
+
+ setup_secrets(config, job, taskdesc)
+
+ run["using"] = "run-task"
+ run["command"] = mozpath.join(
+ "${GECKO_PATH}",
+ run.pop("job-script", "taskcluster/scripts/builder/build-linux.sh"),
+ )
+ run.pop("secrets")
+ run.pop("requires-signed-builds")
+
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
+
+
+@run_job_using(
+ "generic-worker",
+ "mozharness",
+ schema=mozharness_run_schema,
+ defaults=mozharness_defaults,
+)
+def mozharness_on_generic_worker(config, job, taskdesc):
+ assert job["worker"]["os"] in (
+ "windows",
+ "macosx",
+ ), "only supports windows and macOS right now: {}".format(job["label"])
+
+ run = job["run"]
+
+ # fail if invalid run options are included
+ invalid = []
+ if not run.pop("keep-artifacts", True):
+ invalid.append("keep-artifacts")
+ if invalid:
+ raise Exception(
+ "Jobs run using mozharness on Windows do not support properties "
+ + ", ".join(invalid)
+ )
+
+ worker = taskdesc["worker"] = job["worker"]
+
+ worker["taskcluster-proxy"] = run.pop("taskcluster-proxy", None)
+
+ setup_secrets(config, job, taskdesc)
+
+ taskdesc["worker"].setdefault("artifacts", []).append(
+ {
+ "name": "public/logs",
+ "path": "logs",
+ "type": "directory",
+ "expires-after": get_expiration(config, "medium"),
+ }
+ )
+
+ if not worker.get("skip-artifacts", False):
+ generic_worker_add_artifacts(config, job, taskdesc)
+
+ env = worker.setdefault("env", {})
+ env.update(
+ {
+ "MOZ_BUILD_DATE": config.params["moz_build_date"],
+ "MOZ_SCM_LEVEL": config.params["level"],
+ "MH_BRANCH": config.params["project"],
+ "MOZ_SOURCE_CHANGESET": get_branch_rev(config),
+ "MOZ_SOURCE_REPO": get_branch_repo(config),
+ }
+ )
+ if run.pop("use-simple-package"):
+ env.update({"MOZ_SIMPLE_PACKAGE_NAME": "target"})
+
+ extra_config = run.pop("extra-config", {})
+ extra_config["objdir"] = "obj-build"
+ env["EXTRA_MOZHARNESS_CONFIG"] = json.dumps(extra_config, sort_keys=True)
+
+ # The windows generic worker uses batch files to pass environment variables
+ # to commands. Setting a variable to empty in a batch file unsets, so if
+ # there is no `TRY_COMMIT_MESSAGE`, pass a space instead, so that
+ # mozharness doesn't try to find the commit message on its own.
+ if is_try(config.params):
+ env["TRY_COMMIT_MSG"] = config.params["message"] or "no commit message"
+
+ if not job["attributes"]["build_platform"].startswith(("win", "macosx")):
+ raise Exception(
+ "Task generation for mozharness build jobs currently only supported on "
+ "Windows and macOS"
+ )
+
+ mh_command = []
+ if job["worker"]["os"] == "windows":
+ system_python_dir = "c:/mozilla-build/python3/"
+ gecko_path = "%GECKO_PATH%"
+ else:
+ system_python_dir = ""
+ gecko_path = "$GECKO_PATH"
+
+ if run.get("use-system-python", True):
+ python_bindir = system_python_dir
+ else:
+ # $MOZ_PYTHON_HOME is going to be substituted in run-task, when we
+ # know the actual MOZ_PYTHON_HOME value.
+ is_windows = job["worker"]["os"] == "windows"
+ if is_windows:
+ python_bindir = "%MOZ_PYTHON_HOME%/"
+ else:
+ python_bindir = "${MOZ_PYTHON_HOME}/bin/"
+
+ mh_command = ["{}python3".format(python_bindir)]
+
+ mh_command += [
+ f"{gecko_path}/mach",
+ "python",
+ "{}/testing/{}".format(gecko_path, run.pop("script")),
+ ]
+
+ for path in run.pop("config-paths", []):
+ mh_command.append(f"--extra-config-path {gecko_path}/{path}")
+
+ for cfg in run.pop("config"):
+ mh_command.extend(("--config", cfg))
+ if run.pop("use-magic-mh-args"):
+ mh_command.extend(("--branch", config.params["project"]))
+ if job["worker"]["os"] == "windows":
+ mh_command.extend(("--work-dir", r"%cd:Z:=z:%\workspace"))
+ for action in run.pop("actions", []):
+ mh_command.append("--" + action)
+
+ for option in run.pop("options", []):
+ mh_command.append("--" + option)
+ if run.get("custom-build-variant-cfg"):
+ mh_command.append("--custom-build-variant")
+ mh_command.append(run.pop("custom-build-variant-cfg"))
+
+ if job["worker"]["os"] == "macosx":
+ # Ideally, we'd use shellutil.quote, but that would single-quote
+ # $GECKO_PATH, which would defeat having the variable in the command
+ # in the first place, as it wouldn't be expanded.
+ # In practice, arguments are expected not to contain characters that
+ # would require quoting.
+ mh_command = " ".join(mh_command)
+
+ run["using"] = "run-task"
+ run["command"] = mh_command
+ run.pop("secrets")
+ run.pop("requires-signed-builds")
+ run.pop("job-script", None)
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
+
+ # Everything past this point is Windows-specific.
+ if job["worker"]["os"] == "macosx":
+ return
+
+ if taskdesc.get("use-sccache"):
+ worker["command"] = (
+ [
+ # Make the comment part of the first command, as it will help users to
+ # understand what is going on, and why these steps are implemented.
+ dedent(
+ """\
+ :: sccache currently uses the full compiler commandline as input to the
+ :: cache hash key, so create a symlink to the task dir and build from
+ :: the symlink dir to get consistent paths.
+ if exist z:\\build rmdir z:\\build"""
+ ),
+ r"mklink /d z:\build %cd%",
+ # Grant delete permission on the link to everyone.
+ r"icacls z:\build /grant *S-1-1-0:D /L",
+ r"cd /d z:\build",
+ ]
+ + worker["command"]
+ )
diff --git a/taskcluster/gecko_taskgraph/transforms/job/mozharness_test.py b/taskcluster/gecko_taskgraph/transforms/job/mozharness_test.py
new file mode 100644
index 0000000000..eb4aea609f
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/job/mozharness_test.py
@@ -0,0 +1,477 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import json
+import os
+import re
+
+from taskgraph.util.schema import Schema
+from taskgraph.util.taskcluster import get_artifact_path, get_artifact_url
+from voluptuous import Extra, Optional, Required
+
+from gecko_taskgraph.transforms.job import configure_taskdesc_for_run, run_job_using
+from gecko_taskgraph.transforms.job.common import get_expiration, support_vcs_checkout
+from gecko_taskgraph.transforms.test import normpath, test_description_schema
+from gecko_taskgraph.util.attributes import is_try
+
+VARIANTS = [
+ "shippable",
+ "shippable-qr",
+ "shippable-lite",
+ "shippable-lite-qr",
+ "devedition",
+ "pgo",
+ "asan",
+ "stylo",
+ "qr",
+ "ccov",
+]
+
+
+def get_variant(test_platform):
+ for v in VARIANTS:
+ if f"-{v}/" in test_platform:
+ return v
+ return ""
+
+
+mozharness_test_run_schema = Schema(
+ {
+ Required("using"): "mozharness-test",
+ Required("test"): {
+ Required("test-platform"): str,
+ Required("mozharness"): test_description_schema["mozharness"],
+ Required("docker-image"): test_description_schema["docker-image"],
+ Required("loopback-video"): test_description_schema["loopback-video"],
+ Required("loopback-audio"): test_description_schema["loopback-audio"],
+ Required("max-run-time"): test_description_schema["max-run-time"],
+ Optional("retry-exit-status"): test_description_schema["retry-exit-status"],
+ Extra: object,
+ },
+ # Base work directory used to set up the task.
+ Optional("workdir"): str,
+ }
+)
+
+
+def test_packages_url(taskdesc):
+ """Account for different platforms that name their test packages differently"""
+ artifact_url = get_artifact_url(
+ "<build>", get_artifact_path(taskdesc, "target.test_packages.json")
+ )
+ # for android shippable we need to add 'en-US' to the artifact url
+ test = taskdesc["run"]["test"]
+ if "android" in test["test-platform"] and (
+ get_variant(test["test-platform"])
+ in ("shippable", "shippable-qr", "shippable-lite", "shippable-lite-qr")
+ ):
+ head, tail = os.path.split(artifact_url)
+ artifact_url = os.path.join(head, "en-US", tail)
+ return artifact_url
+
+
+def installer_url(taskdesc):
+ test = taskdesc["run"]["test"]
+ mozharness = test["mozharness"]
+
+ if "installer-url" in mozharness:
+ installer_url = mozharness["installer-url"]
+ else:
+ upstream_task = (
+ "<build-signing>" if mozharness["requires-signed-builds"] else "<build>"
+ )
+ installer_url = get_artifact_url(
+ upstream_task, mozharness["build-artifact-name"]
+ )
+
+ return installer_url
+
+
+@run_job_using("docker-worker", "mozharness-test", schema=mozharness_test_run_schema)
+def mozharness_test_on_docker(config, job, taskdesc):
+ run = job["run"]
+ test = taskdesc["run"]["test"]
+ mozharness = test["mozharness"]
+ worker = taskdesc["worker"] = job["worker"]
+
+ # apply some defaults
+ worker["docker-image"] = test["docker-image"]
+ worker["allow-ptrace"] = True # required for all tests, for crashreporter
+ worker["loopback-video"] = test["loopback-video"]
+ worker["loopback-audio"] = test["loopback-audio"]
+ worker["max-run-time"] = test["max-run-time"]
+ worker["retry-exit-status"] = test["retry-exit-status"]
+ if "android-em-7.0-x86" in test["test-platform"]:
+ worker["privileged"] = True
+
+ artifacts = [
+ # (artifact name prefix, in-image path)
+ ("public/logs", "{workdir}/workspace/logs/".format(**run)),
+ ("public/test", "{workdir}/artifacts/".format(**run)),
+ (
+ "public/test_info",
+ "{workdir}/workspace/build/blobber_upload_dir/".format(**run),
+ ),
+ ]
+
+ installer = installer_url(taskdesc)
+
+ mozharness_url = get_artifact_url(
+ "<build>", get_artifact_path(taskdesc, "mozharness.zip")
+ )
+
+ worker.setdefault("artifacts", [])
+ worker["artifacts"].extend(
+ [
+ {
+ "name": prefix,
+ "path": os.path.join("{workdir}/workspace".format(**run), path),
+ "type": "directory",
+ "expires-after": get_expiration(config, "default"),
+ }
+ for (prefix, path) in artifacts
+ ]
+ )
+
+ env = worker.setdefault("env", {})
+ env.update(
+ {
+ "MOZHARNESS_CONFIG": " ".join(mozharness["config"]),
+ "MOZHARNESS_SCRIPT": mozharness["script"],
+ "MOZILLA_BUILD_URL": {"task-reference": installer},
+ "NEED_PULSEAUDIO": "true",
+ "NEED_WINDOW_MANAGER": "true",
+ "ENABLE_E10S": str(bool(test.get("e10s"))).lower(),
+ "WORKING_DIR": "/builds/worker",
+ }
+ )
+
+ env["PYTHON"] = "python3"
+
+ # Legacy linux64 tests rely on compiz.
+ if test.get("docker-image", {}).get("in-tree") == "desktop1604-test":
+ env.update({"NEED_COMPIZ": "true"})
+
+ # Bug 1602701/1601828 - use compiz on ubuntu1804 due to GTK asynchiness
+ # when manipulating windows.
+ if test.get("docker-image", {}).get("in-tree") == "ubuntu1804-test":
+ if "wdspec" in job["run"]["test"]["suite"] or (
+ "marionette" in job["run"]["test"]["suite"]
+ and "headless" not in job["label"]
+ ):
+ env.update({"NEED_COMPIZ": "true"})
+
+ # Set MOZ_ENABLE_WAYLAND env variables to enable Wayland backend.
+ if "wayland" in job["label"]:
+ env["MOZ_ENABLE_WAYLAND"] = "1"
+
+ if mozharness.get("mochitest-flavor"):
+ env["MOCHITEST_FLAVOR"] = mozharness["mochitest-flavor"]
+
+ if mozharness["set-moz-node-path"]:
+ env["MOZ_NODE_PATH"] = "/usr/local/bin/node"
+
+ if "actions" in mozharness:
+ env["MOZHARNESS_ACTIONS"] = " ".join(mozharness["actions"])
+
+ if is_try(config.params):
+ env["TRY_COMMIT_MSG"] = config.params["message"]
+
+ # handle some of the mozharness-specific options
+ if test["reboot"]:
+ raise Exception(
+ "reboot: {} not supported on generic-worker".format(test["reboot"])
+ )
+
+ # Support vcs checkouts regardless of whether the task runs from
+ # source or not in case it is needed on an interactive loaner.
+ support_vcs_checkout(config, job, taskdesc)
+
+ # If we have a source checkout, run mozharness from it instead of
+ # downloading a zip file with the same content.
+ if test["checkout"]:
+ env["MOZHARNESS_PATH"] = "{workdir}/checkouts/gecko/testing/mozharness".format(
+ **run
+ )
+ else:
+ env["MOZHARNESS_URL"] = {"task-reference": mozharness_url}
+
+ extra_config = {
+ "installer_url": installer,
+ "test_packages_url": test_packages_url(taskdesc),
+ }
+ env["EXTRA_MOZHARNESS_CONFIG"] = {
+ "task-reference": json.dumps(extra_config, sort_keys=True)
+ }
+
+ # Bug 1634554 - pass in decision task artifact URL to mozharness for WPT.
+ # Bug 1645974 - test-verify-wpt and test-coverage-wpt need artifact URL.
+ if "web-platform-tests" in test["suite"] or re.match(
+ "test-(coverage|verify)-wpt", test["suite"]
+ ):
+ env["TESTS_BY_MANIFEST_URL"] = {
+ "artifact-reference": "<decision/public/tests-by-manifest.json.gz>"
+ }
+
+ command = [
+ "{workdir}/bin/test-linux.sh".format(**run),
+ ]
+ command.extend(mozharness.get("extra-options", []))
+
+ if test.get("test-manifests"):
+ env["MOZHARNESS_TEST_PATHS"] = json.dumps(
+ {test["suite"]: test["test-manifests"]}, sort_keys=True
+ )
+
+ # TODO: remove the need for run['chunked']
+ elif mozharness.get("chunked") or test["chunks"] > 1:
+ command.append("--total-chunk={}".format(test["chunks"]))
+ command.append("--this-chunk={}".format(test["this-chunk"]))
+
+ if "download-symbols" in mozharness:
+ download_symbols = mozharness["download-symbols"]
+ download_symbols = {True: "true", False: "false"}.get(
+ download_symbols, download_symbols
+ )
+ command.append("--download-symbols=" + download_symbols)
+
+ job["run"] = {
+ "workdir": run["workdir"],
+ "tooltool-downloads": mozharness["tooltool-downloads"],
+ "checkout": test["checkout"],
+ "command": command,
+ "using": "run-task",
+ }
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
+
+
+@run_job_using("generic-worker", "mozharness-test", schema=mozharness_test_run_schema)
+def mozharness_test_on_generic_worker(config, job, taskdesc):
+ test = taskdesc["run"]["test"]
+ mozharness = test["mozharness"]
+ worker = taskdesc["worker"] = job["worker"]
+
+ bitbar_script = "test-linux.sh"
+
+ is_macosx = worker["os"] == "macosx"
+ is_windows = worker["os"] == "windows"
+ is_linux = worker["os"] == "linux" or worker["os"] == "linux-bitbar"
+ is_bitbar = worker["os"] == "linux-bitbar"
+ assert is_macosx or is_windows or is_linux
+
+ artifacts = [
+ {
+ "name": "public/logs",
+ "path": "logs",
+ "type": "directory",
+ "expires-after": get_expiration(config, "default"),
+ }
+ ]
+
+ # jittest doesn't have blob_upload_dir
+ if test["test-name"] != "jittest":
+ artifacts.append(
+ {
+ "name": "public/test_info",
+ "path": "build/blobber_upload_dir",
+ "type": "directory",
+ "expires-after": get_expiration(config, "default"),
+ }
+ )
+
+ if is_bitbar:
+ artifacts = [
+ {
+ "name": "public/test/",
+ "path": "artifacts/public",
+ "type": "directory",
+ "expires-after": get_expiration(config, "default"),
+ },
+ {
+ "name": "public/logs/",
+ "path": "workspace/logs",
+ "type": "directory",
+ "expires-after": get_expiration(config, "default"),
+ },
+ {
+ "name": "public/test_info/",
+ "path": "workspace/build/blobber_upload_dir",
+ "type": "directory",
+ "expires-after": get_expiration(config, "default"),
+ },
+ ]
+
+ installer = installer_url(taskdesc)
+
+ worker["os-groups"] = test["os-groups"]
+
+ # run-as-administrator is a feature for workers with UAC enabled and as such should not be
+ # included in tasks on workers that have UAC disabled. Currently UAC is only enabled on
+ # gecko Windows 10 workers, however this may be subject to change. Worker type
+ # environment definitions can be found in https://github.com/mozilla-releng/OpenCloudConfig
+ # See https://docs.microsoft.com/en-us/windows/desktop/secauthz/user-account-control
+ # for more information about UAC.
+ if test.get("run-as-administrator", False):
+ if job["worker-type"].startswith("win10-64") or job["worker-type"].startswith(
+ "win11-64"
+ ):
+ worker["run-as-administrator"] = True
+ else:
+ raise Exception(
+ "run-as-administrator not supported on {}".format(job["worker-type"])
+ )
+
+ if test["reboot"]:
+ raise Exception(
+ "reboot: {} not supported on generic-worker".format(test["reboot"])
+ )
+
+ worker["max-run-time"] = test["max-run-time"]
+ worker["retry-exit-status"] = test["retry-exit-status"]
+ worker.setdefault("artifacts", [])
+ worker["artifacts"].extend(artifacts)
+
+ env = worker.setdefault("env", {})
+ env["GECKO_HEAD_REPOSITORY"] = config.params["head_repository"]
+ env["GECKO_HEAD_REV"] = config.params["head_rev"]
+
+ # this list will get cleaned up / reduced / removed in bug 1354088
+ if is_macosx:
+ env.update(
+ {
+ "LC_ALL": "en_US.UTF-8",
+ "LANG": "en_US.UTF-8",
+ "MOZ_NODE_PATH": "/usr/local/bin/node",
+ "PATH": "/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin",
+ "SHELL": "/bin/bash",
+ }
+ )
+ elif is_bitbar:
+ env.update(
+ {
+ "LANG": "en_US.UTF-8",
+ "MOZHARNESS_CONFIG": " ".join(mozharness["config"]),
+ "MOZHARNESS_SCRIPT": mozharness["script"],
+ "MOZHARNESS_URL": {
+ "artifact-reference": "<build/public/build/mozharness.zip>"
+ },
+ "MOZILLA_BUILD_URL": {"task-reference": installer},
+ "MOZ_NO_REMOTE": "1",
+ "NEED_XVFB": "false",
+ "XPCOM_DEBUG_BREAK": "warn",
+ "NO_FAIL_ON_TEST_ERRORS": "1",
+ "MOZ_HIDE_RESULTS_TABLE": "1",
+ "MOZ_NODE_PATH": "/usr/local/bin/node",
+ "TASKCLUSTER_WORKER_TYPE": job["worker-type"],
+ }
+ )
+
+ extra_config = {
+ "installer_url": installer,
+ "test_packages_url": test_packages_url(taskdesc),
+ }
+ env["EXTRA_MOZHARNESS_CONFIG"] = {
+ "task-reference": json.dumps(extra_config, sort_keys=True)
+ }
+
+ # Bug 1634554 - pass in decision task artifact URL to mozharness for WPT.
+ # Bug 1645974 - test-verify-wpt and test-coverage-wpt need artifact URL.
+ if "web-platform-tests" in test["suite"] or re.match(
+ "test-(coverage|verify)-wpt", test["suite"]
+ ):
+ env["TESTS_BY_MANIFEST_URL"] = {
+ "artifact-reference": "<decision/public/tests-by-manifest.json.gz>"
+ }
+
+ if is_windows:
+ py_binary = "c:\\mozilla-build\\{python}\\{python}.exe".format(python="python3")
+ mh_command = [
+ py_binary,
+ "-u",
+ "mozharness\\scripts\\" + normpath(mozharness["script"]),
+ ]
+ elif is_bitbar:
+ py_binary = "python3"
+ mh_command = ["bash", f"./{bitbar_script}"]
+ elif is_macosx:
+ py_binary = "/usr/local/bin/{}".format("python3")
+ mh_command = [
+ py_binary,
+ "-u",
+ "mozharness/scripts/" + mozharness["script"],
+ ]
+ else:
+ # is_linux
+ py_binary = "/usr/bin/{}".format("python3")
+ mh_command = [
+ # Using /usr/bin/python2.7 rather than python2.7 because
+ # /usr/local/bin/python2.7 is broken on the mac workers.
+ # See bug #1547903.
+ py_binary,
+ "-u",
+ "mozharness/scripts/" + mozharness["script"],
+ ]
+
+ env["PYTHON"] = py_binary
+
+ for mh_config in mozharness["config"]:
+ cfg_path = "mozharness/configs/" + mh_config
+ if is_windows:
+ cfg_path = normpath(cfg_path)
+ mh_command.extend(["--cfg", cfg_path])
+ mh_command.extend(mozharness.get("extra-options", []))
+ if mozharness.get("download-symbols"):
+ if isinstance(mozharness["download-symbols"], str):
+ mh_command.extend(["--download-symbols", mozharness["download-symbols"]])
+ else:
+ mh_command.extend(["--download-symbols", "true"])
+ if mozharness.get("include-blob-upload-branch"):
+ mh_command.append("--blob-upload-branch=" + config.params["project"])
+
+ if test.get("test-manifests"):
+ env["MOZHARNESS_TEST_PATHS"] = json.dumps(
+ {test["suite"]: test["test-manifests"]}, sort_keys=True
+ )
+
+ # TODO: remove the need for run['chunked']
+ elif mozharness.get("chunked") or test["chunks"] > 1:
+ mh_command.append("--total-chunk={}".format(test["chunks"]))
+ mh_command.append("--this-chunk={}".format(test["this-chunk"]))
+
+ if is_try(config.params):
+ env["TRY_COMMIT_MSG"] = config.params["message"]
+
+ worker["mounts"] = [
+ {
+ "directory": "mozharness",
+ "content": {
+ "artifact": get_artifact_path(taskdesc, "mozharness.zip"),
+ "task-id": {"task-reference": "<build>"},
+ },
+ "format": "zip",
+ }
+ ]
+ if is_bitbar:
+ a_url = config.params.file_url(
+ f"taskcluster/scripts/tester/{bitbar_script}",
+ )
+ worker["mounts"] = [
+ {
+ "file": bitbar_script,
+ "content": {
+ "url": a_url,
+ },
+ }
+ ]
+
+ job["run"] = {
+ "tooltool-downloads": mozharness["tooltool-downloads"],
+ "checkout": test["checkout"],
+ "command": mh_command,
+ "using": "run-task",
+ }
+ if is_bitbar:
+ job["run"]["run-as-root"] = True
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
diff --git a/taskcluster/gecko_taskgraph/transforms/job/python_test.py b/taskcluster/gecko_taskgraph/transforms/job/python_test.py
new file mode 100644
index 0000000000..b572061217
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/job/python_test.py
@@ -0,0 +1,47 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running mach python-test tasks (via run-task)
+"""
+
+
+from taskgraph.util.schema import Schema
+from voluptuous import Optional, Required
+
+from gecko_taskgraph.transforms.job import configure_taskdesc_for_run, run_job_using
+
+python_test_schema = Schema(
+ {
+ Required("using"): "python-test",
+ # Python version to use
+ Required("python-version"): int,
+ # The subsuite to run
+ Required("subsuite"): str,
+ # Base work directory used to set up the task.
+ Optional("workdir"): str,
+ }
+)
+
+
+defaults = {
+ "python-version": 3,
+ "subsuite": "default",
+}
+
+
+@run_job_using(
+ "docker-worker", "python-test", schema=python_test_schema, defaults=defaults
+)
+@run_job_using(
+ "generic-worker", "python-test", schema=python_test_schema, defaults=defaults
+)
+def configure_python_test(config, job, taskdesc):
+ run = job["run"]
+ worker = job["worker"]
+
+ # defer to the mach implementation
+ run["mach"] = ("python-test --subsuite {subsuite} --run-slow").format(**run)
+ run["using"] = "mach"
+ del run["subsuite"]
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
diff --git a/taskcluster/gecko_taskgraph/transforms/job/run_task.py b/taskcluster/gecko_taskgraph/transforms/job/run_task.py
new file mode 100644
index 0000000000..ce411693d3
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/job/run_task.py
@@ -0,0 +1,268 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running jobs that are invoked via the `run-task` script.
+"""
+
+
+import os
+
+from mozbuild.util import memoize
+from mozpack import path
+from taskgraph.util.schema import Schema
+from taskgraph.util.yaml import load_yaml
+from voluptuous import Any, Optional, Required
+
+from gecko_taskgraph.transforms.job import run_job_using
+from gecko_taskgraph.transforms.job.common import add_tooltool, support_vcs_checkout
+from gecko_taskgraph.transforms.task import taskref_or_string
+
+run_task_schema = Schema(
+ {
+ Required("using"): "run-task",
+ # if true, add a cache at ~worker/.cache, which is where things like pip
+ # tend to hide their caches. This cache is never added for level-1 jobs.
+ # TODO Once bug 1526028 is fixed, this and 'use-caches' should be merged.
+ Required("cache-dotcache"): bool,
+ # Whether or not to use caches.
+ Optional("use-caches"): bool,
+ # if true (the default), perform a checkout of gecko on the worker
+ Required("checkout"): bool,
+ Optional(
+ "cwd",
+ description="Path to run command in. If a checkout is present, the path "
+ "to the checkout will be interpolated with the key `checkout`",
+ ): str,
+ # The sparse checkout profile to use. Value is the filename relative to
+ # "sparse-profile-prefix" which defaults to "build/sparse-profiles/".
+ Required("sparse-profile"): Any(str, None),
+ # The relative path to the sparse profile.
+ Optional("sparse-profile-prefix"): str,
+ # if true, perform a checkout of a comm-central based branch inside the
+ # gecko checkout
+ Required("comm-checkout"): bool,
+ # The command arguments to pass to the `run-task` script, after the
+ # checkout arguments. If a list, it will be passed directly; otherwise
+ # it will be included in a single argument to `bash -cx`.
+ Required("command"): Any([taskref_or_string], taskref_or_string),
+ # Base work directory used to set up the task.
+ Optional("workdir"): str,
+ # If not false, tooltool downloads will be enabled via relengAPIProxy
+ # for either just public files, or all files. Only supported on
+ # docker-worker.
+ Required("tooltool-downloads"): Any(
+ False,
+ "public",
+ "internal",
+ ),
+ # Whether to run as root. (defaults to False)
+ Optional("run-as-root"): bool,
+ }
+)
+
+
+def common_setup(config, job, taskdesc, command):
+ run = job["run"]
+ if run["checkout"]:
+ support_vcs_checkout(config, job, taskdesc, sparse=bool(run["sparse-profile"]))
+ command.append(
+ "--gecko-checkout={}".format(taskdesc["worker"]["env"]["GECKO_PATH"])
+ )
+
+ if run["sparse-profile"]:
+ sparse_profile_prefix = run.pop(
+ "sparse-profile-prefix", "build/sparse-profiles"
+ )
+ sparse_profile_path = path.join(sparse_profile_prefix, run["sparse-profile"])
+ command.append(f"--gecko-sparse-profile={sparse_profile_path}")
+
+ taskdesc["worker"].setdefault("env", {})["MOZ_SCM_LEVEL"] = config.params["level"]
+
+
+worker_defaults = {
+ "cache-dotcache": False,
+ "checkout": True,
+ "comm-checkout": False,
+ "sparse-profile": None,
+ "tooltool-downloads": False,
+ "run-as-root": False,
+}
+
+
+load_yaml = memoize(load_yaml)
+
+
+def script_url(config, script):
+ if "MOZ_AUTOMATION" in os.environ and "TASK_ID" not in os.environ:
+ raise Exception("TASK_ID must be defined to use run-task on generic-worker")
+ task_id = os.environ.get("TASK_ID", "<TASK_ID>")
+ tc_url = "http://firefox-ci-tc.services.mozilla.com"
+ return f"{tc_url}/api/queue/v1/task/{task_id}/artifacts/public/{script}"
+
+
+@run_job_using(
+ "docker-worker", "run-task", schema=run_task_schema, defaults=worker_defaults
+)
+def docker_worker_run_task(config, job, taskdesc):
+ run = job["run"]
+ worker = taskdesc["worker"] = job["worker"]
+ command = ["/builds/worker/bin/run-task"]
+ common_setup(config, job, taskdesc, command)
+
+ if run["tooltool-downloads"]:
+ internal = run["tooltool-downloads"] == "internal"
+ add_tooltool(config, job, taskdesc, internal=internal)
+
+ if run.get("cache-dotcache"):
+ worker["caches"].append(
+ {
+ "type": "persistent",
+ "name": "{project}-dotcache".format(**config.params),
+ "mount-point": "{workdir}/.cache".format(**run),
+ "skip-untrusted": True,
+ }
+ )
+
+ run_command = run["command"]
+
+ run_cwd = run.get("cwd")
+ if run_cwd and run["checkout"]:
+ run_cwd = path.normpath(
+ run_cwd.format(checkout=taskdesc["worker"]["env"]["GECKO_PATH"])
+ )
+ elif run_cwd and "{checkout}" in run_cwd:
+ raise Exception(
+ "Found `{{checkout}}` interpolation in `cwd` for task {name} "
+ "but the task doesn't have a checkout: {cwd}".format(
+ cwd=run_cwd, name=job.get("name", job.get("label"))
+ )
+ )
+
+ # dict is for the case of `{'task-reference': text_type}`.
+ if isinstance(run_command, (str, dict)):
+ run_command = ["bash", "-cx", run_command]
+ if run["comm-checkout"]:
+ command.append(
+ "--comm-checkout={}/comm".format(taskdesc["worker"]["env"]["GECKO_PATH"])
+ )
+ if run["run-as-root"]:
+ command.extend(("--user", "root", "--group", "root"))
+ if run_cwd:
+ command.extend(("--task-cwd", run_cwd))
+ command.append("--")
+ command.extend(run_command)
+ worker["command"] = command
+
+
+@run_job_using(
+ "generic-worker", "run-task", schema=run_task_schema, defaults=worker_defaults
+)
+def generic_worker_run_task(config, job, taskdesc):
+ run = job["run"]
+ worker = taskdesc["worker"] = job["worker"]
+ is_win = worker["os"] == "windows"
+ is_mac = worker["os"] == "macosx"
+ is_bitbar = worker["os"] == "linux-bitbar"
+
+ if run["tooltool-downloads"]:
+ internal = run["tooltool-downloads"] == "internal"
+ add_tooltool(config, job, taskdesc, internal=internal)
+
+ if is_win:
+ command = ["C:/mozilla-build/python3/python3.exe", "run-task"]
+ elif is_mac:
+ command = ["/usr/local/bin/python3", "run-task"]
+ else:
+ command = ["./run-task"]
+
+ common_setup(config, job, taskdesc, command)
+
+ worker.setdefault("mounts", [])
+ if run.get("cache-dotcache"):
+ worker["mounts"].append(
+ {
+ "cache-name": "{project}-dotcache".format(**config.params),
+ "directory": "{workdir}/.cache".format(**run),
+ }
+ )
+ worker["mounts"].append(
+ {
+ "content": {
+ "url": script_url(config, "run-task"),
+ },
+ "file": "./run-task",
+ }
+ )
+ if job.get("fetches", {}):
+ worker["mounts"].append(
+ {
+ "content": {
+ "url": script_url(config, "fetch-content"),
+ },
+ "file": "./fetch-content",
+ }
+ )
+
+ run_command = run["command"]
+ run_cwd = run.get("cwd")
+ if run_cwd and run["checkout"]:
+ run_cwd = path.normpath(
+ run_cwd.format(checkout=taskdesc["worker"]["env"]["GECKO_PATH"])
+ )
+ elif run_cwd and "{checkout}" in run_cwd:
+ raise Exception(
+ "Found `{{checkout}}` interpolation in `cwd` for task {name} "
+ "but the task doesn't have a checkout: {cwd}".format(
+ cwd=run_cwd, name=job.get("name", job.get("label"))
+ )
+ )
+
+ # dict is for the case of `{'task-reference': text_type}`.
+ if isinstance(run_command, (str, dict)):
+ if is_win:
+ if isinstance(run_command, dict):
+ for k in run_command.keys():
+ run_command[k] = f'"{run_command[k]}"'
+ else:
+ run_command = f'"{run_command}"'
+ run_command = ["bash", "-cx", run_command]
+
+ if run["comm-checkout"]:
+ command.append(
+ "--comm-checkout={}/comm".format(taskdesc["worker"]["env"]["GECKO_PATH"])
+ )
+
+ if run["run-as-root"]:
+ command.extend(("--user", "root", "--group", "root"))
+ if run_cwd:
+ command.extend(("--task-cwd", run_cwd))
+ command.append("--")
+ if is_bitbar:
+ # Use the bitbar wrapper script which sets up the device and adb
+ # environment variables
+ command.append("/builds/taskcluster/script.py")
+ command.extend(run_command)
+
+ if is_win:
+ taskref = False
+ for c in command:
+ if isinstance(c, dict):
+ taskref = True
+
+ if taskref:
+ cmd = []
+ for c in command:
+ if isinstance(c, dict):
+ for v in c.values():
+ cmd.append(v)
+ else:
+ cmd.append(c)
+ worker["command"] = [{"artifact-reference": " ".join(cmd)}]
+ else:
+ worker["command"] = [" ".join(command)]
+ else:
+ worker["command"] = [
+ ["chmod", "+x", "run-task"],
+ command,
+ ]
diff --git a/taskcluster/gecko_taskgraph/transforms/job/spidermonkey.py b/taskcluster/gecko_taskgraph/transforms/job/spidermonkey.py
new file mode 100644
index 0000000000..91c7e93bd6
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/job/spidermonkey.py
@@ -0,0 +1,109 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running spidermonkey jobs via dedicated scripts
+"""
+
+
+from taskgraph.util.schema import Schema
+from voluptuous import Any, Optional, Required
+
+from gecko_taskgraph.transforms.job import configure_taskdesc_for_run, run_job_using
+from gecko_taskgraph.transforms.job.common import (
+ docker_worker_add_artifacts,
+ generic_worker_add_artifacts,
+)
+
+sm_run_schema = Schema(
+ {
+ Required("using"): Any(
+ "spidermonkey",
+ "spidermonkey-package",
+ ),
+ # SPIDERMONKEY_VARIANT and SPIDERMONKEY_PLATFORM
+ Required("spidermonkey-variant"): str,
+ Optional("spidermonkey-platform"): str,
+ # Base work directory used to set up the task.
+ Optional("workdir"): str,
+ Required("tooltool-downloads"): Any(
+ False,
+ "public",
+ "internal",
+ ),
+ }
+)
+
+
+@run_job_using("docker-worker", "spidermonkey", schema=sm_run_schema)
+@run_job_using("docker-worker", "spidermonkey-package", schema=sm_run_schema)
+def docker_worker_spidermonkey(config, job, taskdesc):
+ run = job["run"]
+
+ worker = taskdesc["worker"] = job["worker"]
+ worker.setdefault("artifacts", [])
+
+ docker_worker_add_artifacts(config, job, taskdesc)
+
+ env = worker.setdefault("env", {})
+ env.update(
+ {
+ "MOZHARNESS_DISABLE": "true",
+ "SPIDERMONKEY_VARIANT": run.pop("spidermonkey-variant"),
+ "MOZ_BUILD_DATE": config.params["moz_build_date"],
+ "MOZ_SCM_LEVEL": config.params["level"],
+ }
+ )
+ if "spidermonkey-platform" in run:
+ env["SPIDERMONKEY_PLATFORM"] = run.pop("spidermonkey-platform")
+
+ script = "build-sm.sh"
+ if run["using"] == "spidermonkey-package":
+ script = "build-sm-package.sh"
+
+ run["using"] = "run-task"
+ run["cwd"] = run["workdir"]
+ run["command"] = [f"./checkouts/gecko/taskcluster/scripts/builder/{script}"]
+
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
+
+
+@run_job_using("generic-worker", "spidermonkey", schema=sm_run_schema)
+def generic_worker_spidermonkey(config, job, taskdesc):
+ assert job["worker"]["os"] == "windows", "only supports windows right now"
+
+ run = job["run"]
+
+ worker = taskdesc["worker"] = job["worker"]
+
+ generic_worker_add_artifacts(config, job, taskdesc)
+
+ env = worker.setdefault("env", {})
+ env.update(
+ {
+ "MOZHARNESS_DISABLE": "true",
+ "SPIDERMONKEY_VARIANT": run.pop("spidermonkey-variant"),
+ "MOZ_BUILD_DATE": config.params["moz_build_date"],
+ "MOZ_SCM_LEVEL": config.params["level"],
+ "SCCACHE_DISABLE": "1",
+ "WORK": ".", # Override the defaults in build scripts
+ "GECKO_PATH": "./src", # with values suiteable for windows generic worker
+ "UPLOAD_DIR": "./public/build",
+ }
+ )
+ if "spidermonkey-platform" in run:
+ env["SPIDERMONKEY_PLATFORM"] = run.pop("spidermonkey-platform")
+
+ script = "build-sm.sh"
+ if run["using"] == "spidermonkey-package":
+ script = "build-sm-package.sh"
+ # Don't allow untested configurations yet
+ raise Exception("spidermonkey-package is not a supported configuration")
+
+ run["using"] = "run-task"
+ run["command"] = [
+ "c:\\mozilla-build\\msys2\\usr\\bin\\bash.exe " # string concat
+ '"./src/taskcluster/scripts/builder/%s"' % script
+ ]
+
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
diff --git a/taskcluster/gecko_taskgraph/transforms/job/toolchain.py b/taskcluster/gecko_taskgraph/transforms/job/toolchain.py
new file mode 100644
index 0000000000..715c1577c5
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/job/toolchain.py
@@ -0,0 +1,257 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running toolchain-building jobs via dedicated scripts
+"""
+
+
+import os
+
+import taskgraph
+from mozbuild.shellutil import quote as shell_quote
+from taskgraph.util.schema import Schema, optionally_keyed_by, resolve_keyed_by
+from voluptuous import Any, Optional, Required
+
+from gecko_taskgraph import GECKO
+from gecko_taskgraph.transforms.job import configure_taskdesc_for_run, run_job_using
+from gecko_taskgraph.transforms.job.common import (
+ docker_worker_add_artifacts,
+ generic_worker_add_artifacts,
+)
+from gecko_taskgraph.util.attributes import RELEASE_PROJECTS
+from gecko_taskgraph.util.hash import hash_paths
+
+CACHE_TYPE = "toolchains.v3"
+
+toolchain_run_schema = Schema(
+ {
+ Required("using"): "toolchain-script",
+ # The script (in taskcluster/scripts/misc) to run.
+ # Python scripts are invoked with `mach python` so vendored libraries
+ # are available.
+ Required("script"): str,
+ # Arguments to pass to the script.
+ Optional("arguments"): [str],
+ # If not false, tooltool downloads will be enabled via relengAPIProxy
+ # for either just public files, or all files. Not supported on Windows
+ Required("tooltool-downloads"): Any(
+ False,
+ "public",
+ "internal",
+ ),
+ # Sparse profile to give to checkout using `run-task`. If given,
+ # Defaults to "toolchain-build". The value is relative to
+ # "sparse-profile-prefix", optionally defined below is the path,
+ # defaulting to "build/sparse-profiles".
+ # i.e. `build/sparse-profiles/toolchain-build`.
+ # If `None`, instructs `run-task` to not use a sparse profile at all.
+ Required("sparse-profile"): Any(str, None),
+ # The relative path to the sparse profile.
+ Optional("sparse-profile-prefix"): str,
+ # Paths/patterns pointing to files that influence the outcome of a
+ # toolchain build.
+ Optional("resources"): [str],
+ # Path to the artifact produced by the toolchain job
+ Required("toolchain-artifact"): str,
+ Optional(
+ "toolchain-alias",
+ description="An alias that can be used instead of the real toolchain job name in "
+ "fetch stanzas for jobs.",
+ ): optionally_keyed_by("project", Any(None, str, [str])),
+ Optional(
+ "toolchain-env",
+ description="Additional env variables to add to the worker when using this toolchain",
+ ): {str: object},
+ # Base work directory used to set up the task.
+ Optional("workdir"): str,
+ }
+)
+
+
+def get_digest_data(config, run, taskdesc):
+ files = list(run.pop("resources", []))
+ # The script
+ files.append("taskcluster/scripts/misc/{}".format(run["script"]))
+ # Tooltool manifest if any is defined:
+ tooltool_manifest = taskdesc["worker"]["env"].get("TOOLTOOL_MANIFEST")
+ if tooltool_manifest:
+ files.append(tooltool_manifest)
+
+ # Accumulate dependency hashes for index generation.
+ data = [hash_paths(GECKO, files)]
+
+ data.append(taskdesc["attributes"]["toolchain-artifact"])
+
+ # If the task uses an in-tree docker image, we want it to influence
+ # the index path as well. Ideally, the content of the docker image itself
+ # should have an influence, but at the moment, we can't get that
+ # information here. So use the docker image name as a proxy. Not a lot of
+ # changes to docker images actually have an impact on the resulting
+ # toolchain artifact, so we'll just rely on such important changes to be
+ # accompanied with a docker image name change.
+ image = taskdesc["worker"].get("docker-image", {}).get("in-tree")
+ if image:
+ data.append(image)
+
+ # Likewise script arguments should influence the index.
+ args = run.get("arguments")
+ if args:
+ data.extend(args)
+
+ if taskdesc["attributes"].get("rebuild-on-release"):
+ # Add whether this is a release branch or not
+ data.append(str(config.params["project"] in RELEASE_PROJECTS))
+ return data
+
+
+def common_toolchain(config, job, taskdesc, is_docker):
+ run = job["run"]
+
+ worker = taskdesc["worker"] = job["worker"]
+ worker["chain-of-trust"] = True
+
+ if is_docker:
+ # If the task doesn't have a docker-image, set a default
+ worker.setdefault("docker-image", {"in-tree": "deb12-toolchain-build"})
+
+ if job["worker"]["os"] == "windows":
+ # There were no caches on generic-worker before bug 1519472, and they cause
+ # all sorts of problems with Windows toolchain tasks, disable them until
+ # tasks are ready.
+ run["use-caches"] = False
+
+ env = worker.setdefault("env", {})
+ env.update(
+ {
+ "MOZ_BUILD_DATE": config.params["moz_build_date"],
+ "MOZ_SCM_LEVEL": config.params["level"],
+ "TOOLCHAIN_ARTIFACT": run["toolchain-artifact"],
+ }
+ )
+
+ if is_docker:
+ # Toolchain checkouts don't live under {workdir}/checkouts
+ workspace = "{workdir}/workspace/build".format(**run)
+ env["GECKO_PATH"] = f"{workspace}/src"
+
+ attributes = taskdesc.setdefault("attributes", {})
+ attributes["toolchain-artifact"] = run.pop("toolchain-artifact")
+ toolchain_artifact = attributes["toolchain-artifact"]
+ if not toolchain_artifact.startswith("public/build/"):
+ if "artifact_prefix" in attributes:
+ raise Exception(
+ "Toolchain {} has an artifact_prefix attribute. That is not"
+ " allowed on toolchain tasks.".format(taskdesc["label"])
+ )
+ attributes["artifact_prefix"] = os.path.dirname(toolchain_artifact)
+
+ resolve_keyed_by(
+ run,
+ "toolchain-alias",
+ item_name=taskdesc["label"],
+ project=config.params["project"],
+ )
+ alias = run.pop("toolchain-alias", None)
+ if alias:
+ attributes["toolchain-alias"] = alias
+ if "toolchain-env" in run:
+ attributes["toolchain-env"] = run.pop("toolchain-env")
+
+ # Allow the job to specify where artifacts come from, but add
+ # public/build if it's not there already.
+ artifacts = worker.setdefault("artifacts", [])
+ if not artifacts:
+ if is_docker:
+ docker_worker_add_artifacts(config, job, taskdesc)
+ else:
+ generic_worker_add_artifacts(config, job, taskdesc)
+
+ digest_data = get_digest_data(config, run, taskdesc)
+
+ if job.get("attributes", {}).get("cached_task") is not False and not taskgraph.fast:
+ name = taskdesc["label"].replace(f"{config.kind}-", "", 1)
+ taskdesc["cache"] = {
+ "type": CACHE_TYPE,
+ "name": name,
+ "digest-data": digest_data,
+ }
+
+ # Toolchains that are used for local development need to be built on a
+ # level-3 branch to be installable via `mach bootstrap`.
+ local_toolchain = taskdesc["attributes"].get("local-toolchain")
+ if local_toolchain:
+ if taskdesc.get("run-on-projects"):
+ raise Exception(
+ "Toolchain {} used for local developement must not have"
+ " run-on-projects set".format(taskdesc["label"])
+ )
+ taskdesc["run-on-projects"] = ["integration", "release"]
+
+ script = run.pop("script")
+ arguments = run.pop("arguments", [])
+ if local_toolchain and not attributes["toolchain-artifact"].startswith("public/"):
+ # Local toolchains with private artifacts are expected to have a script that
+ # fill a directory given as a final command line argument. That script, and the
+ # arguments provided, are used by the build system bootstrap code, and for the
+ # corresponding CI tasks, the command is wrapped with a script that creates an
+ # artifact based on that filled directory.
+ # We prefer automatic wrapping rather than manual wrapping in the yaml because
+ # it makes the index independent of the wrapper script, which is irrelevant.
+ # Also, an attribute is added for the bootstrap code to be able to easily parse
+ # the command.
+ attributes["toolchain-command"] = {
+ "script": script,
+ "arguments": list(arguments),
+ }
+ arguments.insert(0, script)
+ script = "private_local_toolchain.sh"
+
+ run["using"] = "run-task"
+ if is_docker:
+ gecko_path = "workspace/build/src"
+ elif job["worker"]["os"] == "windows":
+ gecko_path = "%GECKO_PATH%"
+ else:
+ gecko_path = "$GECKO_PATH"
+
+ if is_docker:
+ run["cwd"] = run["workdir"]
+ run["command"] = [
+ "{}/taskcluster/scripts/misc/{}".format(gecko_path, script)
+ ] + arguments
+ if not is_docker:
+ # Don't quote the first item in the command because it purposely contains
+ # an environment variable that is not meant to be quoted.
+ if len(run["command"]) > 1:
+ run["command"] = run["command"][0] + " " + shell_quote(*run["command"][1:])
+ else:
+ run["command"] = run["command"][0]
+
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
+
+
+toolchain_defaults = {
+ "tooltool-downloads": False,
+ "sparse-profile": "toolchain-build",
+}
+
+
+@run_job_using(
+ "docker-worker",
+ "toolchain-script",
+ schema=toolchain_run_schema,
+ defaults=toolchain_defaults,
+)
+def docker_worker_toolchain(config, job, taskdesc):
+ common_toolchain(config, job, taskdesc, is_docker=True)
+
+
+@run_job_using(
+ "generic-worker",
+ "toolchain-script",
+ schema=toolchain_run_schema,
+ defaults=toolchain_defaults,
+)
+def generic_worker_toolchain(config, job, taskdesc):
+ common_toolchain(config, job, taskdesc, is_docker=False)
diff --git a/taskcluster/gecko_taskgraph/transforms/l10n.py b/taskcluster/gecko_taskgraph/transforms/l10n.py
new file mode 100644
index 0000000000..151d7d585a
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/l10n.py
@@ -0,0 +1,423 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Do transforms specific to l10n kind
+"""
+
+
+import json
+
+from mozbuild.chunkify import chunkify
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_dependencies, get_primary_dependency
+from taskgraph.util.schema import (
+ Schema,
+ optionally_keyed_by,
+ resolve_keyed_by,
+ taskref_or_string,
+)
+from taskgraph.util.taskcluster import get_artifact_prefix
+from taskgraph.util.treeherder import add_suffix
+from voluptuous import Any, Optional, Required
+
+from gecko_taskgraph.transforms.job import job_description_schema
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import (
+ copy_attributes_from_dependent_job,
+ sorted_unique_list,
+ task_name,
+)
+from gecko_taskgraph.util.copy_task import copy_task
+
+
+def _by_platform(arg):
+ return optionally_keyed_by("build-platform", arg)
+
+
+l10n_description_schema = Schema(
+ {
+ # Name for this job, inferred from the dependent job before validation
+ Required("name"): str,
+ # build-platform, inferred from dependent job before validation
+ Required("build-platform"): str,
+ # max run time of the task
+ Required("run-time"): _by_platform(int),
+ # Locales not to repack for
+ Required("ignore-locales"): _by_platform([str]),
+ # All l10n jobs use mozharness
+ Required("mozharness"): {
+ # Script to invoke for mozharness
+ Required("script"): _by_platform(str),
+ # Config files passed to the mozharness script
+ Required("config"): _by_platform([str]),
+ # Additional paths to look for mozharness configs in. These should be
+ # relative to the base of the source checkout
+ Optional("config-paths"): [str],
+ # Options to pass to the mozharness script
+ Optional("options"): _by_platform([str]),
+ # Action commands to provide to mozharness script
+ Required("actions"): _by_platform([str]),
+ # if true, perform a checkout of a comm-central based branch inside the
+ # gecko checkout
+ Optional("comm-checkout"): bool,
+ },
+ # Items for the taskcluster index
+ Optional("index"): {
+ # Product to identify as in the taskcluster index
+ Required("product"): _by_platform(str),
+ # Job name to identify as in the taskcluster index
+ Required("job-name"): _by_platform(str),
+ # Type of index
+ Optional("type"): _by_platform(str),
+ },
+ # Description of the localized task
+ Required("description"): _by_platform(str),
+ Optional("run-on-projects"): job_description_schema["run-on-projects"],
+ # worker-type to utilize
+ Required("worker-type"): _by_platform(str),
+ # File which contains the used locales
+ Required("locales-file"): _by_platform(str),
+ # Tooltool visibility required for task.
+ Required("tooltool"): _by_platform(Any("internal", "public")),
+ # Docker image required for task. We accept only in-tree images
+ # -- generally desktop-build or android-build -- for now.
+ Optional("docker-image"): _by_platform(
+ # an in-tree generated docker image (from `taskcluster/docker/<name>`)
+ {"in-tree": str},
+ ),
+ Optional("fetches"): {
+ str: _by_platform([str]),
+ },
+ # The set of secret names to which the task has access; these are prefixed
+ # with `project/releng/gecko/{treeherder.kind}/level-{level}/`. Setting
+ # this will enable any worker features required and set the task's scopes
+ # appropriately. `true` here means ['*'], all secrets. Not supported on
+ # Windows
+ Optional("secrets"): _by_platform(Any(bool, [str])),
+ # Information for treeherder
+ Required("treeherder"): {
+ # Platform to display the task on in treeherder
+ Required("platform"): _by_platform(str),
+ # Symbol to use
+ Required("symbol"): str,
+ # Tier this task is
+ Required("tier"): _by_platform(int),
+ },
+ # Extra environment values to pass to the worker
+ Optional("env"): _by_platform({str: taskref_or_string}),
+ # Max number locales per chunk
+ Optional("locales-per-chunk"): _by_platform(int),
+ # Task deps to chain this task with, added in transforms from primary dependency
+ # if this is a shippable-style build
+ Optional("dependencies"): {str: str},
+ # Run the task when the listed files change (if present).
+ Optional("when"): {"files-changed": [str]},
+ # passed through directly to the job description
+ Optional("attributes"): job_description_schema["attributes"],
+ Optional("extra"): job_description_schema["extra"],
+ # Shipping product and phase
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ Optional("job-from"): task_description_schema["job-from"],
+ }
+)
+
+transforms = TransformSequence()
+
+
+def parse_locales_file(locales_file, platform=None):
+ """Parse the passed locales file for a list of locales."""
+ locales = []
+
+ with open(locales_file, mode="r") as f:
+ if locales_file.endswith("json"):
+ all_locales = json.load(f)
+ # XXX Only single locales are fetched
+ locales = {
+ locale: data["revision"]
+ for locale, data in all_locales.items()
+ if platform is None or platform in data["platforms"]
+ }
+ else:
+ all_locales = f.read().split()
+ # 'default' is the hg revision at the top of hg repo, in this context
+ locales = {locale: "default" for locale in all_locales}
+ return locales
+
+
+def _remove_locales(locales, to_remove=None):
+ # ja-JP-mac is a mac-only locale, but there are no mac builds being repacked,
+ # so just omit it unconditionally
+ return {
+ locale: revision
+ for locale, revision in locales.items()
+ if locale not in to_remove
+ }
+
+
+@transforms.add
+def setup_name(config, jobs):
+ for job in jobs:
+ dep = get_primary_dependency(config, job)
+ assert dep
+ # Set the name to the same as the dep task, without kind name.
+ # Label will get set automatically with this kinds name.
+ job["name"] = job.get("name", task_name(dep))
+ yield job
+
+
+@transforms.add
+def copy_in_useful_magic(config, jobs):
+ for job in jobs:
+ dep = get_primary_dependency(config, job)
+ assert dep
+ attributes = copy_attributes_from_dependent_job(dep)
+ attributes.update(job.get("attributes", {}))
+ # build-platform is needed on `job` for by-build-platform
+ job["build-platform"] = attributes.get("build_platform")
+ job["attributes"] = attributes
+ yield job
+
+
+transforms.add_validate(l10n_description_schema)
+
+
+@transforms.add
+def gather_required_signoffs(config, jobs):
+ for job in jobs:
+ job.setdefault("attributes", {})["required_signoffs"] = sorted_unique_list(
+ *(
+ dep.attributes.get("required_signoffs", [])
+ for dep in get_dependencies(config, job)
+ )
+ )
+ yield job
+
+
+@transforms.add
+def remove_repackage_dependency(config, jobs):
+ for job in jobs:
+ build_platform = job["attributes"]["build_platform"]
+ if not build_platform.startswith("macosx"):
+ del job["dependencies"]["repackage"]
+
+ yield job
+
+
+@transforms.add
+def handle_keyed_by(config, jobs):
+ """Resolve fields that can be keyed by platform, etc."""
+ fields = [
+ "locales-file",
+ "locales-per-chunk",
+ "worker-type",
+ "description",
+ "run-time",
+ "docker-image",
+ "secrets",
+ "fetches.toolchain",
+ "fetches.fetch",
+ "tooltool",
+ "env",
+ "ignore-locales",
+ "mozharness.config",
+ "mozharness.options",
+ "mozharness.actions",
+ "mozharness.script",
+ "treeherder.tier",
+ "treeherder.platform",
+ "index.type",
+ "index.product",
+ "index.job-name",
+ "when.files-changed",
+ ]
+ for job in jobs:
+ job = copy_task(job) # don't overwrite dict values here
+ for field in fields:
+ resolve_keyed_by(item=job, field=field, item_name=job["name"])
+ yield job
+
+
+@transforms.add
+def handle_artifact_prefix(config, jobs):
+ """Resolve ``artifact_prefix`` in env vars"""
+ for job in jobs:
+ artifact_prefix = get_artifact_prefix(job)
+ for k1, v1 in job.get("env", {}).items():
+ if isinstance(v1, str):
+ job["env"][k1] = v1.format(artifact_prefix=artifact_prefix)
+ elif isinstance(v1, dict):
+ for k2, v2 in v1.items():
+ job["env"][k1][k2] = v2.format(artifact_prefix=artifact_prefix)
+ yield job
+
+
+@transforms.add
+def all_locales_attribute(config, jobs):
+ for job in jobs:
+ locales_platform = job["attributes"]["build_platform"].replace("-shippable", "")
+ locales_platform = locales_platform.replace("-pgo", "")
+ locales_with_changesets = parse_locales_file(
+ job["locales-file"], platform=locales_platform
+ )
+ locales_with_changesets = _remove_locales(
+ locales_with_changesets, to_remove=job["ignore-locales"]
+ )
+
+ locales = sorted(locales_with_changesets.keys())
+ attributes = job.setdefault("attributes", {})
+ attributes["all_locales"] = locales
+ attributes["all_locales_with_changesets"] = locales_with_changesets
+ if job.get("shipping-product"):
+ attributes["shipping_product"] = job["shipping-product"]
+ yield job
+
+
+@transforms.add
+def chunk_locales(config, jobs):
+ """Utilizes chunking for l10n stuff"""
+ for job in jobs:
+ locales_per_chunk = job.get("locales-per-chunk")
+ locales_with_changesets = job["attributes"]["all_locales_with_changesets"]
+ if locales_per_chunk:
+ chunks, remainder = divmod(len(locales_with_changesets), locales_per_chunk)
+ if remainder:
+ chunks = int(chunks + 1)
+ for this_chunk in range(1, chunks + 1):
+ chunked = copy_task(job)
+ chunked["name"] = chunked["name"].replace("/", f"-{this_chunk}/", 1)
+ chunked["mozharness"]["options"] = chunked["mozharness"].get(
+ "options", []
+ )
+ # chunkify doesn't work with dicts
+ locales_with_changesets_as_list = sorted(
+ locales_with_changesets.items()
+ )
+ chunked_locales = chunkify(
+ locales_with_changesets_as_list, this_chunk, chunks
+ )
+ chunked["mozharness"]["options"].extend(
+ [
+ f"locale={locale}:{changeset}"
+ for locale, changeset in chunked_locales
+ ]
+ )
+ chunked["attributes"]["l10n_chunk"] = str(this_chunk)
+ # strip revision
+ chunked["attributes"]["chunk_locales"] = [
+ locale for locale, _ in chunked_locales
+ ]
+
+ # add the chunk number to the TH symbol
+ chunked["treeherder"]["symbol"] = add_suffix(
+ chunked["treeherder"]["symbol"], this_chunk
+ )
+ yield chunked
+ else:
+ job["mozharness"]["options"] = job["mozharness"].get("options", [])
+ job["mozharness"]["options"].extend(
+ [
+ f"locale={locale}:{changeset}"
+ for locale, changeset in sorted(locales_with_changesets.items())
+ ]
+ )
+ yield job
+
+
+transforms.add_validate(l10n_description_schema)
+
+
+@transforms.add
+def stub_installer(config, jobs):
+ for job in jobs:
+ job.setdefault("attributes", {})
+ job.setdefault("env", {})
+ if job["attributes"].get("stub-installer"):
+ job["env"].update({"USE_STUB_INSTALLER": "1"})
+ yield job
+
+
+@transforms.add
+def set_extra_config(config, jobs):
+ for job in jobs:
+ job["mozharness"].setdefault("extra-config", {})["branch"] = config.params[
+ "project"
+ ]
+ if "update-channel" in job["attributes"]:
+ job["mozharness"]["extra-config"]["update_channel"] = job["attributes"][
+ "update-channel"
+ ]
+ yield job
+
+
+@transforms.add
+def make_job_description(config, jobs):
+ for job in jobs:
+ job["mozharness"].update(
+ {
+ "using": "mozharness",
+ "job-script": "taskcluster/scripts/builder/build-l10n.sh",
+ "secrets": job.get("secrets", False),
+ }
+ )
+ job_description = {
+ "name": job["name"],
+ "worker-type": job["worker-type"],
+ "description": job["description"],
+ "run": job["mozharness"],
+ "attributes": job["attributes"],
+ "treeherder": {
+ "kind": "build",
+ "tier": job["treeherder"]["tier"],
+ "symbol": job["treeherder"]["symbol"],
+ "platform": job["treeherder"]["platform"],
+ },
+ "run-on-projects": job.get("run-on-projects")
+ if job.get("run-on-projects")
+ else [],
+ }
+ if job.get("extra"):
+ job_description["extra"] = job["extra"]
+
+ job_description["run"]["tooltool-downloads"] = job["tooltool"]
+
+ job_description["worker"] = {
+ "max-run-time": job["run-time"],
+ "chain-of-trust": True,
+ }
+ if job["worker-type"] in ["b-win2012", "b-win2022"]:
+ job_description["worker"]["os"] = "windows"
+ job_description["run"]["use-simple-package"] = False
+ job_description["run"]["use-magic-mh-args"] = False
+
+ if job.get("docker-image"):
+ job_description["worker"]["docker-image"] = job["docker-image"]
+
+ if job.get("fetches"):
+ job_description["fetches"] = job["fetches"]
+
+ if job.get("index"):
+ job_description["index"] = {
+ "product": job["index"]["product"],
+ "job-name": job["index"]["job-name"],
+ "type": job["index"].get("type", "generic"),
+ }
+
+ if job.get("dependencies"):
+ job_description["dependencies"] = job["dependencies"]
+ if job.get("env"):
+ job_description["worker"]["env"] = job["env"]
+ if job.get("when", {}).get("files-changed"):
+ job_description.setdefault("when", {})
+ job_description["when"]["files-changed"] = [job["locales-file"]] + job[
+ "when"
+ ]["files-changed"]
+
+ if "shipping-phase" in job:
+ job_description["shipping-phase"] = job["shipping-phase"]
+
+ if "shipping-product" in job:
+ job_description["shipping-product"] = job["shipping-product"]
+
+ yield job_description
diff --git a/taskcluster/gecko_taskgraph/transforms/mac_dummy.py b/taskcluster/gecko_taskgraph/transforms/mac_dummy.py
new file mode 100644
index 0000000000..f134ee2765
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/mac_dummy.py
@@ -0,0 +1,40 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Add dependencies to dummy macosx64 tasks.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def add_dependencies(config, jobs):
+ for job in jobs:
+ dependencies = {}
+
+ platform = job.get("attributes", {}).get("build_platform")
+ if not platform:
+ continue
+ arm = platform.replace("macosx64", "macosx64-aarch64")
+ intel = platform.replace("macosx64", "macosx64-x64")
+ for dep_task in config.kind_dependencies_tasks.values():
+ # Weed out unwanted tasks.
+ if dep_task.attributes.get("build_platform"):
+ if dep_task.attributes["build_platform"] not in (platform, arm, intel):
+ continue
+ # Add matching tasks to deps
+ dependencies[dep_task.label] = dep_task.label
+ # Pick one task to copy run-on-projects from
+ if (
+ dep_task.kind == "build"
+ and dep_task.attributes["build_platform"] == platform
+ ):
+ job["run-on-projects"] = dep_task.attributes.get("run_on_projects")
+
+ job.setdefault("dependencies", {}).update(dependencies)
+ job["if-dependencies"] = list(dependencies)
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/mac_notarization.py b/taskcluster/gecko_taskgraph/transforms/mac_notarization.py
new file mode 100644
index 0000000000..5591022e1b
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/mac_notarization.py
@@ -0,0 +1,19 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform mac notarization tasks
+"""
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def only_level_3_notarization(config, jobs):
+ """Filter out any notarization jobs that are not level 3"""
+ for job in jobs:
+ if "notarization" in config.kind and int(config.params["level"]) != 3:
+ continue
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/mar_signing.py b/taskcluster/gecko_taskgraph/transforms/mar_signing.py
new file mode 100644
index 0000000000..56ac72c24c
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/mar_signing.py
@@ -0,0 +1,143 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the {partials,mar}-signing task into an actual task description.
+"""
+
+import logging
+import os
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+from taskgraph.util.taskcluster import get_artifact_prefix
+from taskgraph.util.treeherder import inherit_treeherder_from_dep, join_symbol
+
+from gecko_taskgraph.util.attributes import (
+ copy_attributes_from_dependent_job,
+ sorted_unique_list,
+)
+from gecko_taskgraph.util.partials import get_partials_artifacts_from_params
+from gecko_taskgraph.util.scriptworker import get_signing_cert_scope_per_platform
+
+logger = logging.getLogger(__name__)
+
+SIGNING_FORMATS = {
+ "mar-signing-autograph-stage": {
+ "target.complete.mar": ["autograph_stage_mar384"],
+ },
+ "default": {
+ "target.complete.mar": ["autograph_hash_only_mar384"],
+ },
+}
+
+transforms = TransformSequence()
+
+
+def generate_partials_artifacts(job, release_history, platform, locale=None):
+ artifact_prefix = get_artifact_prefix(job)
+ if locale:
+ artifact_prefix = f"{artifact_prefix}/{locale}"
+ else:
+ locale = "en-US"
+
+ artifacts = get_partials_artifacts_from_params(release_history, platform, locale)
+
+ upstream_artifacts = [
+ {
+ "taskId": {"task-reference": "<partials>"},
+ "taskType": "partials",
+ "paths": [f"{artifact_prefix}/{path}" for path, version in artifacts],
+ "formats": ["autograph_hash_only_mar384"],
+ }
+ ]
+
+ return upstream_artifacts
+
+
+def generate_complete_artifacts(job, kind):
+ upstream_artifacts = []
+ if kind not in SIGNING_FORMATS:
+ kind = "default"
+ for artifact in job.attributes["release_artifacts"]:
+ basename = os.path.basename(artifact)
+ if basename in SIGNING_FORMATS[kind]:
+ upstream_artifacts.append(
+ {
+ "taskId": {"task-reference": f"<{job.kind}>"},
+ "taskType": "build",
+ "paths": [artifact],
+ "formats": SIGNING_FORMATS[kind][basename],
+ }
+ )
+
+ return upstream_artifacts
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ locale = dep_job.attributes.get("locale")
+
+ treeherder = inherit_treeherder_from_dep(job, dep_job)
+ treeherder.setdefault(
+ "symbol", join_symbol(job.get("treeherder-group", "ms"), locale or "N")
+ )
+
+ label = job.get("label", f"{config.kind}-{dep_job.label}")
+
+ dependencies = {dep_job.kind: dep_job.label}
+ signing_dependencies = dep_job.dependencies
+ # This is so we get the build task etc in our dependencies to
+ # have better beetmover support.
+ dependencies.update(signing_dependencies)
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+ attributes["required_signoffs"] = sorted_unique_list(
+ attributes.get("required_signoffs", []), job.pop("required_signoffs")
+ )
+ attributes["shipping_phase"] = job["shipping-phase"]
+ if locale:
+ attributes["locale"] = locale
+
+ build_platform = attributes.get("build_platform")
+ if config.kind == "partials-signing":
+ upstream_artifacts = generate_partials_artifacts(
+ dep_job, config.params["release_history"], build_platform, locale
+ )
+ else:
+ upstream_artifacts = generate_complete_artifacts(dep_job, config.kind)
+
+ is_shippable = job.get(
+ "shippable", dep_job.attributes.get("shippable") # First check current job
+ ) # Then dep job for 'shippable'
+ signing_cert_scope = get_signing_cert_scope_per_platform(
+ build_platform, is_shippable, config
+ )
+
+ scopes = [signing_cert_scope]
+
+ task = {
+ "label": label,
+ "description": "{} {}".format(
+ dep_job.description, job["description-suffix"]
+ ),
+ "worker-type": job.get("worker-type", "linux-signing"),
+ "worker": {
+ "implementation": "scriptworker-signing",
+ "upstream-artifacts": upstream_artifacts,
+ "max-run-time": 3600,
+ },
+ "dependencies": dependencies,
+ "attributes": attributes,
+ "scopes": scopes,
+ "run-on-projects": job.get(
+ "run-on-projects", dep_job.attributes.get("run_on_projects")
+ ),
+ "treeherder": treeherder,
+ }
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/maybe_release.py b/taskcluster/gecko_taskgraph/transforms/maybe_release.py
new file mode 100644
index 0000000000..08a066001a
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/maybe_release.py
@@ -0,0 +1,23 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+from gecko_taskgraph.util.attributes import release_level
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ for job in jobs:
+ for key in ["worker-type", "scopes"]:
+ resolve_keyed_by(
+ job,
+ key,
+ item_name=job["name"],
+ **{"release-level": release_level(config.params["project"])}
+ )
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/merge_automation.py b/taskcluster/gecko_taskgraph/transforms/merge_automation.py
new file mode 100644
index 0000000000..ca5f3b6bde
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/merge_automation.py
@@ -0,0 +1,81 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the update generation task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def handle_keyed_by(config, tasks):
+ """Resolve fields that can be keyed by platform, etc."""
+ if "merge_config" not in config.params:
+ return
+ merge_config = config.params["merge_config"]
+ fields = [
+ "worker.push",
+ "worker-type",
+ "worker.l10n-bump-info",
+ "worker.source-repo",
+ ]
+ for task in tasks:
+ for field in fields:
+ resolve_keyed_by(
+ task,
+ field,
+ item_name=task["name"],
+ **{
+ "project": config.params["project"],
+ "release-type": config.params["release_type"],
+ "behavior": merge_config["behavior"],
+ }
+ )
+ yield task
+
+
+@transforms.add
+def update_labels(config, tasks):
+ for task in tasks:
+ merge_config = config.params["merge_config"]
+ task["label"] = "merge-{}".format(merge_config["behavior"])
+ treeherder = task.get("treeherder", {})
+ treeherder["symbol"] = "Rel({})".format(merge_config["behavior"])
+ task["treeherder"] = treeherder
+ yield task
+
+
+@transforms.add
+def add_payload_config(config, tasks):
+ for task in tasks:
+ if "merge_config" not in config.params:
+ break
+ merge_config = config.params["merge_config"]
+ worker = task["worker"]
+ worker["merge-info"] = config.graph_config["merge-automation"]["behaviors"][
+ merge_config["behavior"]
+ ]
+
+ if "l10n-bump-info" in worker and worker["l10n-bump-info"] is None:
+ del worker["l10n-bump-info"]
+
+ # Override defaults, useful for testing.
+ for field in [
+ "from-repo",
+ "from-branch",
+ "to-repo",
+ "to-branch",
+ "fetch-version-from",
+ ]:
+ if merge_config.get(field):
+ worker["merge-info"][field] = merge_config[field]
+
+ worker["force-dry-run"] = merge_config["force-dry-run"]
+ worker["ssh-user"] = merge_config.get("ssh-user-alias", "merge_user")
+ if merge_config.get("push"):
+ worker["push"] = merge_config["push"]
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/name_sanity.py b/taskcluster/gecko_taskgraph/transforms/name_sanity.py
new file mode 100644
index 0000000000..247c7bb3b8
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/name_sanity.py
@@ -0,0 +1,48 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Generate labels for tasks without names, consistently.
+Uses attributes from primary dependency.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def make_label(config, jobs):
+ """Generate a sane label for a new task constructed from a dependency
+ Using attributes from the dependent job and the current task kind"""
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ attr = dep_job.attributes.get
+
+ if attr("locale", job.get("locale")):
+ template = "{kind}-{locale}-{build_platform}/{build_type}"
+ elif attr("l10n_chunk"):
+ template = "{kind}-{build_platform}-{l10n_chunk}/{build_type}"
+ elif config.kind.startswith("release-eme-free") or config.kind.startswith(
+ "release-partner-repack"
+ ):
+ suffix = job.get("extra", {}).get("repack_suffix", None) or job.get(
+ "extra", {}
+ ).get("repack_id", None)
+ template = "{kind}-{build_platform}"
+ if suffix:
+ template += "-{}".format(suffix.replace("/", "-"))
+ else:
+ template = "{kind}-{build_platform}/{build_type}"
+ job["label"] = template.format(
+ kind=config.kind,
+ build_platform=attr("build_platform"),
+ build_type=attr("build_type"),
+ locale=attr("locale", job.get("locale", "")), # Locale can be absent
+ l10n_chunk=attr("l10n_chunk", ""), # Can be empty
+ )
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/openh264.py b/taskcluster/gecko_taskgraph/transforms/openh264.py
new file mode 100644
index 0000000000..f41215d20b
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/openh264.py
@@ -0,0 +1,26 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+This transform is used to help populate mozharness options for openh264 jobs
+"""
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def set_mh_options(config, jobs):
+ """
+ This transform sets the 'openh264_rev' attribute.
+ """
+ for job in jobs:
+ repo = job.pop("repo")
+ rev = job.pop("revision")
+ attributes = job.setdefault("attributes", {})
+ attributes["openh264_rev"] = rev
+ run = job.setdefault("run", {})
+ options = run.setdefault("options", [])
+ options.extend([f"repo={repo}", f"rev={rev}"])
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/openh264_signing.py b/taskcluster/gecko_taskgraph/transforms/openh264_signing.py
new file mode 100644
index 0000000000..00a55dad41
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/openh264_signing.py
@@ -0,0 +1,123 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the repackage signing task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+from taskgraph.util.schema import Schema
+from taskgraph.util.treeherder import inherit_treeherder_from_dep
+from voluptuous import Optional
+
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+from gecko_taskgraph.util.scriptworker import get_signing_cert_scope_per_platform
+
+transforms = TransformSequence()
+
+signing_description_schema = Schema(
+ {
+ Optional("label"): str,
+ Optional("extra"): object,
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ Optional("attributes"): task_description_schema["attributes"],
+ Optional("dependencies"): task_description_schema["dependencies"],
+ Optional("job-from"): task_description_schema["job-from"],
+ }
+)
+
+
+@transforms.add
+def remove_name(config, jobs):
+ for job in jobs:
+ if "name" in job:
+ del job["name"]
+ yield job
+
+
+transforms.add_validate(signing_description_schema)
+
+
+@transforms.add
+def make_signing_description(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ attributes = dep_job.attributes
+ build_platform = dep_job.attributes.get("build_platform")
+ is_nightly = True # cert_scope_per_platform uses this to choose the right cert
+
+ description = (
+ "Signing of OpenH264 Binaries for '"
+ "{build_platform}/{build_type}'".format(
+ build_platform=attributes.get("build_platform"),
+ build_type=attributes.get("build_type"),
+ )
+ )
+
+ # we have a genuine repackage job as our parent
+ dependencies = {"openh264": dep_job.label}
+
+ my_attributes = copy_attributes_from_dependent_job(dep_job)
+
+ signing_cert_scope = get_signing_cert_scope_per_platform(
+ build_platform, is_nightly, config
+ )
+
+ scopes = [signing_cert_scope]
+ worker_type = "linux-signing"
+ worker = {
+ "implementation": "scriptworker-signing",
+ "max-run-time": 3600,
+ }
+ rev = attributes["openh264_rev"]
+ upstream_artifact = {
+ "taskId": {"task-reference": "<openh264>"},
+ "taskType": "build",
+ }
+
+ if "win" in build_platform:
+ upstream_artifact["formats"] = ["autograph_authenticode_sha2"]
+ elif "mac" in build_platform:
+ upstream_artifact["formats"] = ["mac_single_file"]
+ upstream_artifact["singleFileGlobs"] = ["libgmpopenh264.dylib"]
+ worker_type = "mac-signing"
+ worker["mac-behavior"] = "mac_notarize_single_file"
+ else:
+ upstream_artifact["formats"] = ["autograph_gpg"]
+
+ upstream_artifact["paths"] = [
+ f"private/openh264/openh264-{build_platform}-{rev}.zip",
+ ]
+ worker["upstream-artifacts"] = [upstream_artifact]
+
+ treeherder = inherit_treeherder_from_dep(job, dep_job)
+ treeherder.setdefault(
+ "symbol",
+ _generate_treeherder_symbol(
+ dep_job.task.get("extra", {}).get("treeherder", {}).get("symbol")
+ ),
+ )
+
+ task = {
+ "label": job["label"],
+ "description": description,
+ "worker-type": worker_type,
+ "worker": worker,
+ "scopes": scopes,
+ "dependencies": dependencies,
+ "attributes": my_attributes,
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "treeherder": treeherder,
+ }
+
+ yield task
+
+
+def _generate_treeherder_symbol(build_symbol):
+ symbol = build_symbol + "s"
+ return symbol
diff --git a/taskcluster/gecko_taskgraph/transforms/partials.py b/taskcluster/gecko_taskgraph/transforms/partials.py
new file mode 100644
index 0000000000..65139811e6
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/partials.py
@@ -0,0 +1,174 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the partials task into an actual task description.
+"""
+
+import logging
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+from taskgraph.util.taskcluster import get_artifact_prefix
+from taskgraph.util.treeherder import inherit_treeherder_from_dep
+
+from gecko_taskgraph.util.attributes import (
+ copy_attributes_from_dependent_job,
+ release_level,
+)
+from gecko_taskgraph.util.partials import get_builds
+from gecko_taskgraph.util.platforms import architecture
+
+logger = logging.getLogger(__name__)
+
+transforms = TransformSequence()
+
+
+def _generate_task_output_files(job, filenames, locale=None):
+ locale_output_path = f"{locale}/" if locale else ""
+ artifact_prefix = get_artifact_prefix(job)
+
+ data = list()
+ for filename in filenames:
+ data.append(
+ {
+ "type": "file",
+ "path": f"/home/worker/artifacts/{filename}",
+ "name": f"{artifact_prefix}/{locale_output_path}{filename}",
+ }
+ )
+ data.append(
+ {
+ "type": "file",
+ "path": "/home/worker/artifacts/manifest.json",
+ "name": f"{artifact_prefix}/{locale_output_path}manifest.json",
+ }
+ )
+ return data
+
+
+def identify_desired_signing_keys(project, product):
+ if project in ["mozilla-central", "comm-central", "larch", "pine"]:
+ return "nightly"
+ if project == "mozilla-beta":
+ if product == "devedition":
+ return "nightly"
+ return "release"
+ if (
+ project in ["mozilla-release", "comm-release", "comm-beta"]
+ or project.startswith("mozilla-esr")
+ or project.startswith("comm-esr")
+ ):
+ return "release"
+ return "dep1"
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ # If no balrog release history, then don't generate partials
+ if not config.params.get("release_history"):
+ return
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ treeherder = inherit_treeherder_from_dep(job, dep_job)
+ treeherder.setdefault("symbol", "p(N)")
+
+ label = job.get("label", f"partials-{dep_job.label}")
+
+ dependencies = {dep_job.kind: dep_job.label}
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+ locale = dep_job.attributes.get("locale")
+ if locale:
+ attributes["locale"] = locale
+ treeherder["symbol"] = f"p({locale})"
+ attributes["shipping_phase"] = job["shipping-phase"]
+
+ build_locale = locale or "en-US"
+
+ build_platform = attributes["build_platform"]
+ builds = get_builds(
+ config.params["release_history"], build_platform, build_locale
+ )
+
+ # If the list is empty there's no available history for this platform
+ # and locale combination, so we can't build any partials.
+ if not builds:
+ continue
+
+ extra = {"funsize": {"partials": list()}}
+ update_number = 1
+
+ locale_suffix = ""
+ if locale:
+ locale_suffix = f"{locale}/"
+ artifact_path = "<{}/{}/{}target.complete.mar>".format(
+ dep_job.kind,
+ get_artifact_prefix(dep_job),
+ locale_suffix,
+ )
+ for build in sorted(builds):
+ partial_info = {
+ "locale": build_locale,
+ "from_mar": builds[build]["mar_url"],
+ "to_mar": {"artifact-reference": artifact_path},
+ "branch": config.params["project"],
+ "update_number": update_number,
+ "dest_mar": build,
+ }
+ if "product" in builds[build]:
+ partial_info["product"] = builds[build]["product"]
+ if "previousVersion" in builds[build]:
+ partial_info["previousVersion"] = builds[build]["previousVersion"]
+ if "previousBuildNumber" in builds[build]:
+ partial_info["previousBuildNumber"] = builds[build][
+ "previousBuildNumber"
+ ]
+ extra["funsize"]["partials"].append(partial_info)
+ update_number += 1
+
+ level = config.params["level"]
+
+ worker = {
+ "artifacts": _generate_task_output_files(dep_job, builds.keys(), locale),
+ "implementation": "docker-worker",
+ "docker-image": {"in-tree": "funsize-update-generator"},
+ "os": "linux",
+ "max-run-time": 3600 if "asan" in dep_job.label else 1800,
+ "chain-of-trust": True,
+ "taskcluster-proxy": True,
+ "env": {
+ "SIGNING_CERT": identify_desired_signing_keys(
+ config.params["project"], config.params["release_product"]
+ ),
+ "EXTRA_PARAMS": f"--arch={architecture(build_platform)}",
+ "MAR_CHANNEL_ID": attributes["mar-channel-id"],
+ },
+ }
+ if release_level(config.params["project"]) == "staging":
+ worker["env"]["FUNSIZE_ALLOW_STAGING_PREFIXES"] = "true"
+
+ task = {
+ "label": label,
+ "description": f"{dep_job.description} Partials",
+ "worker-type": "b-linux-gcp",
+ "dependencies": dependencies,
+ "scopes": [],
+ "attributes": attributes,
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "treeherder": treeherder,
+ "extra": extra,
+ "worker": worker,
+ }
+
+ # We only want caching on linux/windows due to bug 1436977
+ if int(level) == 3 and any(
+ [build_platform.startswith(prefix) for prefix in ["linux", "win"]]
+ ):
+ task["scopes"].append(
+ "auth:aws-s3:read-write:tc-gp-private-1d-us-east-1/releng/mbsdiff-cache/"
+ )
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/partner_attribution.py b/taskcluster/gecko_taskgraph/transforms/partner_attribution.py
new file mode 100644
index 0000000000..0bd5e0d141
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/partner_attribution.py
@@ -0,0 +1,129 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the partner attribution task into an actual task description.
+"""
+
+
+import json
+import logging
+from collections import defaultdict
+
+from taskgraph.transforms.base import TransformSequence
+
+from gecko_taskgraph.util.partners import (
+ apply_partner_priority,
+ check_if_partners_enabled,
+ generate_attribution_code,
+ get_partner_config_by_kind,
+)
+
+log = logging.getLogger(__name__)
+
+transforms = TransformSequence()
+transforms.add(check_if_partners_enabled)
+transforms.add(apply_partner_priority)
+
+
+@transforms.add
+def add_command_arguments(config, tasks):
+ enabled_partners = config.params.get("release_partners")
+ dependencies = {}
+ fetches = defaultdict(set)
+ attributions = []
+ release_artifacts = []
+ attribution_config = get_partner_config_by_kind(config, config.kind)
+
+ for partner_config in attribution_config.get("configs", []):
+ # we might only be interested in a subset of all partners, eg for a respin
+ if enabled_partners and partner_config["campaign"] not in enabled_partners:
+ continue
+ attribution_code = generate_attribution_code(
+ attribution_config["defaults"], partner_config
+ )
+ for platform in partner_config["platforms"]:
+ stage_platform = platform.replace("-shippable", "")
+ for locale in partner_config["locales"]:
+ # find the upstream, throw away locales we don't have, somehow. Skip ?
+ if locale == "en-US":
+ upstream_label = "repackage-signing-{platform}/opt".format(
+ platform=platform
+ )
+ upstream_artifact = "target.installer.exe"
+ else:
+ upstream_label = (
+ "repackage-signing-l10n-{locale}-{platform}/opt".format(
+ locale=locale, platform=platform
+ )
+ )
+ upstream_artifact = "{locale}/target.installer.exe".format(
+ locale=locale
+ )
+ if upstream_label not in config.kind_dependencies_tasks:
+ raise Exception(f"Can't find upstream task for {platform} {locale}")
+ upstream = config.kind_dependencies_tasks[upstream_label]
+
+ # set the dependencies to just what we need rather than all of l10n
+ dependencies.update({upstream.label: upstream.label})
+
+ fetches[upstream_label].add((upstream_artifact, stage_platform, locale))
+
+ artifact_part = "{platform}/{locale}/target.installer.exe".format(
+ platform=stage_platform, locale=locale
+ )
+ artifact = (
+ "releng/partner/{partner}/{sub_partner}/{artifact_part}".format(
+ partner=partner_config["campaign"],
+ sub_partner=partner_config["content"],
+ artifact_part=artifact_part,
+ )
+ )
+ # config for script
+ # TODO - generalise input & output ??
+ # add releng/partner prefix via get_artifact_prefix..()
+ attributions.append(
+ {
+ "input": f"/builds/worker/fetches/{artifact_part}",
+ "output": f"/builds/worker/artifacts/{artifact}",
+ "attribution": attribution_code,
+ }
+ )
+ release_artifacts.append(artifact)
+
+ # bail-out early if we don't have any attributions to do
+ if not attributions:
+ return
+
+ for task in tasks:
+ worker = task.get("worker", {})
+ worker["chain-of-trust"] = True
+
+ task.setdefault("dependencies", {}).update(dependencies)
+ task.setdefault("fetches", {})
+ for upstream_label, upstream_artifacts in fetches.items():
+ task["fetches"][upstream_label] = [
+ {
+ "artifact": upstream_artifact,
+ "dest": "{platform}/{locale}".format(
+ platform=platform, locale=locale
+ ),
+ "extract": False,
+ "verify-hash": True,
+ }
+ for upstream_artifact, platform, locale in upstream_artifacts
+ ]
+ worker.setdefault("env", {})["ATTRIBUTION_CONFIG"] = json.dumps(
+ attributions, sort_keys=True
+ )
+ worker["artifacts"] = [
+ {
+ "name": "releng/partner",
+ "path": "/builds/worker/artifacts/releng/partner",
+ "type": "directory",
+ }
+ ]
+ task.setdefault("attributes", {})["release_artifacts"] = release_artifacts
+ task["label"] = config.kind
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/partner_attribution_beetmover.py b/taskcluster/gecko_taskgraph/transforms/partner_attribution_beetmover.py
new file mode 100644
index 0000000000..3ac34da85d
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/partner_attribution_beetmover.py
@@ -0,0 +1,157 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the beetmover task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+from taskgraph.util.schema import Schema
+from taskgraph.util.taskcluster import get_artifact_prefix
+from voluptuous import Optional, Required
+
+from gecko_taskgraph.transforms.beetmover import craft_release_properties
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import (
+ copy_attributes_from_dependent_job,
+)
+from gecko_taskgraph.util.partners import (
+ apply_partner_priority,
+)
+from gecko_taskgraph.util.scriptworker import (
+ add_scope_prefix,
+ get_beetmover_bucket_scope,
+)
+
+beetmover_description_schema = Schema(
+ {
+ # from the loader:
+ Optional("job-from"): str,
+ Optional("name"): str,
+ # from the from_deps transforms:
+ Optional("attributes"): task_description_schema["attributes"],
+ Optional("dependencies"): task_description_schema["dependencies"],
+ # depname is used in taskref's to identify the taskID of the unsigned things
+ Required("depname", default="build"): str,
+ # unique label to describe this beetmover task, defaults to {dep.label}-beetmover
+ Optional("label"): str,
+ Required("partner-path"): str,
+ Optional("extra"): object,
+ Required("shipping-phase"): task_description_schema["shipping-phase"],
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Optional("priority"): task_description_schema["priority"],
+ }
+)
+
+transforms = TransformSequence()
+transforms.add_validate(beetmover_description_schema)
+transforms.add(apply_partner_priority)
+
+
+@transforms.add
+def populate_scopes_and_upstream_artifacts(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ upstream_artifacts = dep_job.attributes["release_artifacts"]
+ attribution_task_ref = "<{}>".format(dep_job.label)
+ prefix = get_artifact_prefix(dep_job)
+ artifacts = []
+ for artifact in upstream_artifacts:
+ partner, sub_partner, platform, locale, _ = artifact.replace(
+ prefix + "/", ""
+ ).split("/", 4)
+ artifacts.append((artifact, partner, sub_partner, platform, locale))
+
+ action_scope = add_scope_prefix(config, "beetmover:action:push-to-partner")
+ bucket_scope = get_beetmover_bucket_scope(config)
+ repl_dict = {
+ "build_number": config.params["build_number"],
+ "release_partner_build_number": config.params[
+ "release_partner_build_number"
+ ],
+ "version": config.params["version"],
+ "partner": "{partner}", # we'll replace these later, per artifact
+ "subpartner": "{subpartner}",
+ "platform": "{platform}",
+ "locale": "{locale}",
+ }
+ job["scopes"] = [bucket_scope, action_scope]
+
+ partner_path = job["partner-path"].format(**repl_dict)
+ job.setdefault("worker", {})[
+ "upstream-artifacts"
+ ] = generate_upstream_artifacts(attribution_task_ref, artifacts, partner_path)
+
+ yield job
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ attributes = dep_job.attributes
+ build_platform = attributes.get("build_platform")
+ if not build_platform:
+ raise Exception("Cannot find build platform!")
+
+ label = config.kind
+ description = "Beetmover for partner attribution"
+ attributes = copy_attributes_from_dependent_job(dep_job)
+
+ task = {
+ "label": label,
+ "description": description,
+ "dependencies": {dep_job.kind: dep_job.label},
+ "attributes": attributes,
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "shipping-phase": job["shipping-phase"],
+ "shipping-product": job.get("shipping-product"),
+ "worker": job["worker"],
+ "scopes": job["scopes"],
+ }
+ # we may have reduced the priority for partner jobs, otherwise task.py will set it
+ if job.get("priority"):
+ task["priority"] = job["priority"]
+
+ yield task
+
+
+def generate_upstream_artifacts(attribution_task, artifacts, partner_path):
+ upstream_artifacts = []
+ for artifact, partner, subpartner, platform, locale in artifacts:
+ upstream_artifacts.append(
+ {
+ "taskId": {"task-reference": attribution_task},
+ "taskType": "repackage",
+ "paths": [artifact],
+ "locale": partner_path.format(
+ partner=partner,
+ subpartner=subpartner,
+ platform=platform,
+ locale=locale,
+ ),
+ }
+ )
+
+ if not upstream_artifacts:
+ raise Exception("Couldn't find any upstream artifacts.")
+
+ return upstream_artifacts
+
+
+@transforms.add
+def make_task_worker(config, jobs):
+ for job in jobs:
+ job["worker-type"] = "beetmover"
+ worker = {
+ "implementation": "beetmover",
+ "release-properties": craft_release_properties(config, job),
+ }
+ job["worker"].update(worker)
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/partner_repack.py b/taskcluster/gecko_taskgraph/transforms/partner_repack.py
new file mode 100644
index 0000000000..d164c10a59
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/partner_repack.py
@@ -0,0 +1,136 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the partner repack task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+from gecko_taskgraph.util.attributes import release_level
+from gecko_taskgraph.util.partners import (
+ apply_partner_priority,
+ check_if_partners_enabled,
+ get_partner_config_by_kind,
+ get_partner_url_config,
+ get_repack_ids_by_platform,
+)
+from gecko_taskgraph.util.scriptworker import get_release_config
+
+transforms = TransformSequence()
+transforms.add(check_if_partners_enabled)
+transforms.add(apply_partner_priority)
+
+
+@transforms.add
+def skip_unnecessary_platforms(config, tasks):
+ for task in tasks:
+ if config.kind == "release-partner-repack":
+ platform = task["attributes"]["build_platform"]
+ repack_ids = get_repack_ids_by_platform(config, platform)
+ if not repack_ids:
+ continue
+ yield task
+
+
+@transforms.add
+def remove_mac_dependency(config, tasks):
+ """Remove mac dependency depending on current level
+ to accomodate for mac notarization not running on level 1
+ """
+ level = int(config.params.get("level", 0))
+ for task in tasks:
+ if "macosx" not in task["attributes"]["build_platform"]:
+ yield task
+ continue
+ skipped_kind = "mac-signing" if level == 3 else "mac-notarization"
+ for dep_label in list(task["dependencies"].keys()):
+ if skipped_kind in dep_label:
+ del task["dependencies"][dep_label]
+ yield task
+
+
+@transforms.add
+def populate_repack_manifests_url(config, tasks):
+ for task in tasks:
+ partner_url_config = get_partner_url_config(config.params, config.graph_config)
+
+ for k in partner_url_config:
+ if config.kind.startswith(k):
+ task["worker"].setdefault("env", {})[
+ "REPACK_MANIFESTS_URL"
+ ] = partner_url_config[k]
+ break
+ else:
+ raise Exception("Can't find partner REPACK_MANIFESTS_URL")
+
+ for property in ("limit-locales",):
+ property = f"extra.{property}"
+ resolve_keyed_by(
+ task,
+ property,
+ property,
+ **{"release-level": release_level(config.params["project"])},
+ )
+
+ if task["worker"]["env"]["REPACK_MANIFESTS_URL"].startswith("git@"):
+ task.setdefault("scopes", []).append(
+ "secrets:get:project/releng/gecko/build/level-{level}/partner-github-ssh".format(
+ **config.params
+ )
+ )
+
+ yield task
+
+
+@transforms.add
+def make_label(config, tasks):
+ for task in tasks:
+ task["label"] = "{}-{}".format(config.kind, task["name"])
+ yield task
+
+
+@transforms.add
+def add_command_arguments(config, tasks):
+ release_config = get_release_config(config)
+
+ # staging releases - pass reduced set of locales to the repacking script
+ all_locales = set()
+ partner_config = get_partner_config_by_kind(config, config.kind)
+ for partner in partner_config.values():
+ for sub_partner in partner.values():
+ all_locales.update(sub_partner.get("locales", []))
+
+ for task in tasks:
+ # add the MOZHARNESS_OPTIONS, eg version=61.0, build-number=1, platform=win64
+ if not task["attributes"]["build_platform"].endswith("-shippable"):
+ raise Exception(
+ "Unexpected partner repack platform: {}".format(
+ task["attributes"]["build_platform"],
+ ),
+ )
+ platform = task["attributes"]["build_platform"].partition("-shippable")[0]
+ task["run"]["options"] = [
+ "version={}".format(release_config["version"]),
+ "build-number={}".format(release_config["build_number"]),
+ f"platform={platform}",
+ ]
+ if task["extra"]["limit-locales"]:
+ for locale in all_locales:
+ task["run"]["options"].append(f"limit-locale={locale}")
+ if "partner" in config.kind and config.params["release_partners"]:
+ for partner in config.params["release_partners"]:
+ task["run"]["options"].append(f"partner={partner}")
+
+ # The upstream taskIds are stored a special environment variable, because we want to use
+ # task-reference's to resolve dependencies, but the string handling of MOZHARNESS_OPTIONS
+ # blocks that. It's space-separated string of ids in the end.
+ task["worker"]["env"]["UPSTREAM_TASKIDS"] = {
+ "task-reference": " ".join([f"<{dep}>" for dep in task["dependencies"]])
+ }
+
+ # Forward the release type for bouncer product construction
+ task["worker"]["env"]["RELEASE_TYPE"] = config.params["release_type"]
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/partner_signing.py b/taskcluster/gecko_taskgraph/transforms/partner_signing.py
new file mode 100644
index 0000000000..2ae9c62bc8
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/partner_signing.py
@@ -0,0 +1,68 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the signing task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+from gecko_taskgraph.util.partners import get_partner_config_by_kind
+from gecko_taskgraph.util.signed_artifacts import (
+ generate_specifications_of_artifacts_to_sign,
+)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def set_mac_label(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ if "mac-notarization" in config.kind:
+ default_label = dep_job.label.replace("mac-signing", "mac-notarization")
+ job.setdefault("label", default_label)
+ assert job["label"] != dep_job.label, "Unable to determine label for {}".format(
+ config.kind
+ )
+ yield job
+
+
+@transforms.add
+def define_upstream_artifacts(config, jobs):
+ partner_configs = get_partner_config_by_kind(config, config.kind)
+ if not partner_configs:
+ return
+
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ job.setdefault("attributes", {}).update(
+ copy_attributes_from_dependent_job(dep_job)
+ )
+
+ repack_ids = job["extra"]["repack_ids"]
+ artifacts_specifications = generate_specifications_of_artifacts_to_sign(
+ config,
+ job,
+ keep_locale_template=True,
+ kind=config.kind,
+ )
+ task_type = "build"
+ if "notarization" in dep_job.label or "mac-signing" in dep_job.label:
+ task_type = "scriptworker"
+ job["upstream-artifacts"] = [
+ {
+ "taskId": {"task-reference": f"<{dep_job.kind}>"},
+ "taskType": task_type,
+ "paths": [
+ path_template.format(locale=repack_id)
+ for path_template in spec["artifacts"]
+ for repack_id in repack_ids
+ ],
+ "formats": spec["formats"],
+ }
+ for spec in artifacts_specifications
+ ]
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/per_platform_dummy.py b/taskcluster/gecko_taskgraph/transforms/per_platform_dummy.py
new file mode 100644
index 0000000000..8ec5155a73
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/per_platform_dummy.py
@@ -0,0 +1,36 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the repackage task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def one_task_per_product_and_platform(config, jobs):
+ unique_products_and_platforms = set()
+ for job in jobs:
+ dep_task = get_primary_dependency(config, job)
+ assert dep_task
+
+ product = dep_task.attributes.get("shipping_product")
+ platform = dep_task.attributes.get("build_platform")
+ if (product, platform) not in unique_products_and_platforms:
+ attr_denylist = ("l10n_chunk", "locale", "artifact_map", "artifact_prefix")
+ attributes = copy_attributes_from_dependent_job(
+ dep_task, denylist=attr_denylist
+ )
+ attributes.update(job.get("attributes", {}))
+ job["attributes"] = attributes
+ job["name"] = f"{product}-{platform}"
+ job["shipping-product"] = product
+ del job["dependencies"]
+ yield job
+ unique_products_and_platforms.add((product, platform))
diff --git a/taskcluster/gecko_taskgraph/transforms/perftest.py b/taskcluster/gecko_taskgraph/transforms/perftest.py
new file mode 100644
index 0000000000..47baafdad7
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/perftest.py
@@ -0,0 +1,358 @@
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+This transform passes options from `mach perftest` to the corresponding task.
+"""
+
+
+import json
+from copy import deepcopy
+from datetime import date, timedelta
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import Schema, optionally_keyed_by, resolve_keyed_by
+from taskgraph.util.treeherder import join_symbol, split_symbol
+from voluptuous import Any, Extra, Optional
+
+transforms = TransformSequence()
+
+
+perftest_description_schema = Schema(
+ {
+ # The test names and the symbols to use for them: [test-symbol, test-path]
+ Optional("perftest"): [[str]],
+ # Metrics to gather for the test. These will be merged
+ # with options specified through perftest-perfherder-global
+ Optional("perftest-metrics"): optionally_keyed_by(
+ "perftest",
+ Any(
+ [str],
+ {str: Any(None, {str: Any(None, str, [str])})},
+ ),
+ ),
+ # Perfherder data options that will be applied to
+ # all metrics gathered.
+ Optional("perftest-perfherder-global"): optionally_keyed_by(
+ "perftest", {str: Any(None, str, [str])}
+ ),
+ # Extra options to add to the test's command
+ Optional("perftest-extra-options"): optionally_keyed_by("perftest", [str]),
+ # Variants of the test to make based on extra browsertime
+ # arguments. Expecting:
+ # [variant-suffix, options-to-use]
+ # If variant-suffix is `null` then the options will be added
+ # to the existing task. Otherwise, a new variant is created
+ # with the given suffix and with its options replaced.
+ Optional("perftest-btime-variants"): optionally_keyed_by(
+ "perftest", [[Any(None, str)]]
+ ),
+ # These options will be parsed in the next schemas
+ Extra: object,
+ }
+)
+
+
+transforms.add_validate(perftest_description_schema)
+
+
+@transforms.add
+def split_tests(config, jobs):
+ for job in jobs:
+ if job.get("perftest") is None:
+ yield job
+ continue
+
+ for test_symbol, test_name in job.pop("perftest"):
+ job_new = deepcopy(job)
+
+ job_new["perftest"] = test_symbol
+ job_new["name"] += "-" + test_symbol
+ job_new["treeherder"]["symbol"] = job["treeherder"]["symbol"].format(
+ symbol=test_symbol
+ )
+ job_new["run"]["command"] = job["run"]["command"].replace(
+ "{perftest_testname}", test_name
+ )
+
+ yield job_new
+
+
+@transforms.add
+def handle_keyed_by_perftest(config, jobs):
+ fields = ["perftest-metrics", "perftest-extra-options", "perftest-btime-variants"]
+ for job in jobs:
+ if job.get("perftest") is None:
+ yield job
+ continue
+
+ for field in fields:
+ resolve_keyed_by(job, field, item_name=job["name"])
+
+ job.pop("perftest")
+ yield job
+
+
+@transforms.add
+def parse_perftest_metrics(config, jobs):
+ """Parse the metrics into a dictionary immediately.
+
+ This way we can modify the extraOptions field (and others) entry through the
+ transforms that come later. The metrics aren't formatted until the end of the
+ transforms.
+ """
+ for job in jobs:
+ if job.get("perftest-metrics") is None:
+ yield job
+ continue
+ perftest_metrics = job.pop("perftest-metrics")
+
+ # If perftest metrics is a string, split it up first
+ if isinstance(perftest_metrics, list):
+ new_metrics_info = [{"name": metric} for metric in perftest_metrics]
+ else:
+ new_metrics_info = []
+ for metric, options in perftest_metrics.items():
+ entry = {"name": metric}
+ entry.update(options)
+ new_metrics_info.append(entry)
+
+ job["perftest-metrics"] = new_metrics_info
+ yield job
+
+
+@transforms.add
+def split_perftest_variants(config, jobs):
+ for job in jobs:
+ if job.get("variants") is None:
+ yield job
+ continue
+
+ for variant in job.pop("variants"):
+ job_new = deepcopy(job)
+
+ group, symbol = split_symbol(job_new["treeherder"]["symbol"])
+ group += "-" + variant
+ job_new["treeherder"]["symbol"] = join_symbol(group, symbol)
+ job_new["name"] += "-" + variant
+ job_new.setdefault("perftest-perfherder-global", {}).setdefault(
+ "extraOptions", []
+ ).append(variant)
+ job_new[variant] = True
+
+ yield job_new
+
+ yield job
+
+
+@transforms.add
+def split_btime_variants(config, jobs):
+ for job in jobs:
+ if job.get("perftest-btime-variants") is None:
+ yield job
+ continue
+
+ variants = job.pop("perftest-btime-variants")
+ if not variants:
+ yield job
+ continue
+
+ yield_existing = False
+ for suffix, options in variants:
+ if suffix is None:
+ # Append options to the existing job
+ job.setdefault("perftest-btime-variants", []).append(options)
+ yield_existing = True
+ else:
+ job_new = deepcopy(job)
+ group, symbol = split_symbol(job_new["treeherder"]["symbol"])
+ symbol += "-" + suffix
+ job_new["treeherder"]["symbol"] = join_symbol(group, symbol)
+ job_new["name"] += "-" + suffix
+ job_new.setdefault("perftest-perfherder-global", {}).setdefault(
+ "extraOptions", []
+ ).append(suffix)
+ # Replace the existing options with the new ones
+ job_new["perftest-btime-variants"] = [options]
+ yield job_new
+
+ # The existing job has been modified so we should also return it
+ if yield_existing:
+ yield job
+
+
+@transforms.add
+def setup_http3_tests(config, jobs):
+ for job in jobs:
+ if job.get("http3") is None or not job.pop("http3"):
+ yield job
+ continue
+ job.setdefault("perftest-btime-variants", []).append(
+ "firefox.preference=network.http.http3.enable:true"
+ )
+ yield job
+
+
+@transforms.add
+def setup_perftest_metrics(config, jobs):
+ for job in jobs:
+ if job.get("perftest-metrics") is None:
+ yield job
+ continue
+ perftest_metrics = job.pop("perftest-metrics")
+
+ # Options to apply to each metric
+ global_options = job.pop("perftest-perfherder-global", {})
+ for metric_info in perftest_metrics:
+ for opt, val in global_options.items():
+ if isinstance(val, list) and opt in metric_info:
+ metric_info[opt].extend(val)
+ elif not (isinstance(val, list) and len(val) == 0):
+ metric_info[opt] = val
+
+ quote_escape = '\\"'
+ if "win" in job.get("platform", ""):
+ # Escaping is a bit different on windows platforms
+ quote_escape = '\\\\\\"'
+
+ job["run"]["command"] = job["run"]["command"].replace(
+ "{perftest_metrics}",
+ " ".join(
+ [
+ ",".join(
+ [
+ ":".join(
+ [
+ option,
+ str(value)
+ .replace(" ", "")
+ .replace("'", quote_escape),
+ ]
+ )
+ for option, value in metric_info.items()
+ ]
+ )
+ for metric_info in perftest_metrics
+ ]
+ ),
+ )
+
+ yield job
+
+
+@transforms.add
+def setup_perftest_browsertime_variants(config, jobs):
+ for job in jobs:
+ if job.get("perftest-btime-variants") is None:
+ yield job
+ continue
+
+ job["run"]["command"] += " --browsertime-extra-options %s" % ",".join(
+ [opt.strip() for opt in job.pop("perftest-btime-variants")]
+ )
+
+ yield job
+
+
+@transforms.add
+def setup_perftest_extra_options(config, jobs):
+ for job in jobs:
+ if job.get("perftest-extra-options") is None:
+ yield job
+ continue
+ job["run"]["command"] += " " + " ".join(job.pop("perftest-extra-options"))
+ yield job
+
+
+@transforms.add
+def pass_perftest_options(config, jobs):
+ for job in jobs:
+ env = job.setdefault("worker", {}).setdefault("env", {})
+ env["PERFTEST_OPTIONS"] = json.dumps(
+ config.params["try_task_config"].get("perftest-options")
+ )
+ yield job
+
+
+@transforms.add
+def setup_perftest_test_date(config, jobs):
+ for job in jobs:
+ if (
+ job.get("attributes", {}).get("batch", False)
+ and "--test-date" not in job["run"]["command"]
+ ):
+ yesterday = (date.today() - timedelta(1)).strftime("%Y.%m.%d")
+ job["run"]["command"] += " --test-date %s" % yesterday
+ yield job
+
+
+@transforms.add
+def setup_regression_detector(config, jobs):
+ for job in jobs:
+ if "change-detector" in job.get("name"):
+ tasks_to_analyze = []
+ for task in config.params["try_task_config"].get("tasks", []):
+ # Explicitly skip these tasks since they're
+ # part of the mozperftest tasks
+ if "side-by-side" in task:
+ continue
+ if "change-detector" in task:
+ continue
+
+ # Select these tasks
+ if "browsertime" in task:
+ tasks_to_analyze.append(task)
+ elif "talos" in task:
+ tasks_to_analyze.append(task)
+ elif "awsy" in task:
+ tasks_to_analyze.append(task)
+ elif "perftest" in task:
+ tasks_to_analyze.append(task)
+
+ if len(tasks_to_analyze) == 0:
+ yield job
+ continue
+
+ # Make the change detector task depend on the tasks to analyze.
+ # This prevents the task from running until all data is available
+ # within the current push.
+ job["soft-dependencies"] = tasks_to_analyze
+ job["requires"] = "all-completed"
+
+ new_project = config.params["project"]
+ if (
+ "try" in config.params["project"]
+ or config.params["try_mode"] == "try_select"
+ ):
+ new_project = "try"
+
+ base_project = None
+ if (
+ config.params.get("try_task_config", {})
+ .get("env", {})
+ .get("PERF_BASE_REVISION", None)
+ is not None
+ ):
+ task_names = " --task-name ".join(tasks_to_analyze)
+ base_revision = config.params["try_task_config"]["env"][
+ "PERF_BASE_REVISION"
+ ]
+ base_project = new_project
+
+ # Add all the required information to the task
+ job["run"]["command"] = job["run"]["command"].format(
+ task_name=task_names,
+ base_revision=base_revision,
+ base_branch=base_project,
+ new_branch=new_project,
+ new_revision=config.params["head_rev"],
+ )
+
+ yield job
+
+
+@transforms.add
+def apply_perftest_tier_optimization(config, jobs):
+ for job in jobs:
+ job["optimization"] = {"skip-unless-backstop": None}
+ job["treeherder"]["tier"] = max(job["treeherder"]["tier"], 2)
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/python_update.py b/taskcluster/gecko_taskgraph/transforms/python_update.py
new file mode 100644
index 0000000000..f4f135b585
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/python_update.py
@@ -0,0 +1,25 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the repo-update task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def resolve_keys(config, tasks):
+ for task in tasks:
+ env = task["worker"].setdefault("env", {})
+ env["BRANCH"] = config.params["project"]
+ for envvar in env:
+ resolve_keyed_by(env, envvar, envvar, **config.params)
+
+ for envvar in list(env.keys()):
+ if not env.get(envvar):
+ del env[envvar]
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/release.py b/taskcluster/gecko_taskgraph/transforms/release.py
new file mode 100644
index 0000000000..1158252fe7
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/release.py
@@ -0,0 +1,20 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Transforms for release tasks
+"""
+
+
+def run_on_releases(config, jobs):
+ """
+ Filter out jobs with `run-on-releases` set, and that don't match the
+ `release_type` paramater.
+ """
+ for job in jobs:
+ release_type = config.params["release_type"]
+ run_on_release_types = job.pop("run-on-releases", None)
+
+ if run_on_release_types is None or release_type in run_on_release_types:
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/release_beetmover_signed_addons.py b/taskcluster/gecko_taskgraph/transforms/release_beetmover_signed_addons.py
new file mode 100644
index 0000000000..4c419659a4
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/release_beetmover_signed_addons.py
@@ -0,0 +1,246 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the beetmover task into an actual task description.
+"""
+
+import copy
+import logging
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+from taskgraph.util.schema import Schema, optionally_keyed_by, resolve_keyed_by
+from taskgraph.util.treeherder import inherit_treeherder_from_dep
+from voluptuous import Optional, Required
+
+from gecko_taskgraph.transforms.beetmover import craft_release_properties
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import (
+ copy_attributes_from_dependent_job,
+ release_level,
+)
+from gecko_taskgraph.util.scriptworker import (
+ generate_beetmover_artifact_map,
+ generate_beetmover_upstream_artifacts,
+ get_beetmover_action_scope,
+ get_beetmover_bucket_scope,
+)
+
+logger = logging.getLogger(__name__)
+
+
+transforms = TransformSequence()
+
+
+beetmover_description_schema = Schema(
+ {
+ # attributes is used for enabling artifact-map by declarative artifacts
+ Required("attributes"): {str: object},
+ # unique label to describe this beetmover task, defaults to {dep.label}-beetmover
+ Optional("label"): str,
+ # treeherder is allowed here to override any defaults we use for beetmover. See
+ # taskcluster/gecko_taskgraph/transforms/task.py for the schema details, and the
+ # below transforms for defaults of various values.
+ Optional("treeherder"): task_description_schema["treeherder"],
+ Required("description"): str,
+ Required("worker-type"): optionally_keyed_by("release-level", str),
+ Required("run-on-projects"): [],
+ # locale is passed only for l10n beetmoving
+ Optional("locale"): str,
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ Optional("job-from"): task_description_schema["job-from"],
+ Optional("dependencies"): task_description_schema["dependencies"],
+ }
+)
+
+
+@transforms.add
+def remove_name(config, jobs):
+ for job in jobs:
+ if "name" in job:
+ del job["name"]
+ yield job
+
+
+transforms.add_validate(beetmover_description_schema)
+
+
+@transforms.add
+def resolve_keys(config, jobs):
+ for job in jobs:
+ for field in ("worker-type", "attributes.artifact_map"):
+ resolve_keyed_by(
+ job,
+ field,
+ item_name=job["label"],
+ **{
+ "release-level": release_level(config.params["project"]),
+ "release-type": config.params["release_type"],
+ "project": config.params["project"],
+ },
+ )
+ yield job
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ attributes = dep_job.attributes
+
+ treeherder = inherit_treeherder_from_dep(job, dep_job)
+ treeherder.setdefault(
+ "symbol", "langpack(BM{})".format(attributes.get("l10n_chunk", ""))
+ )
+
+ job["attributes"].update(copy_attributes_from_dependent_job(dep_job))
+ job["attributes"]["chunk_locales"] = dep_job.attributes.get(
+ "chunk_locales", ["en-US"]
+ )
+
+ job["description"] = job["description"].format(
+ locales="/".join(job["attributes"]["chunk_locales"]),
+ platform=job["attributes"]["build_platform"],
+ )
+
+ job["scopes"] = [
+ get_beetmover_bucket_scope(config),
+ get_beetmover_action_scope(config),
+ ]
+
+ job["dependencies"] = {"langpack-copy": dep_job.label}
+
+ job["run-on-projects"] = job.get(
+ "run_on_projects", dep_job.attributes["run_on_projects"]
+ )
+ job["treeherder"] = treeherder
+ job["shipping-phase"] = job.get(
+ "shipping-phase", dep_job.attributes["shipping_phase"]
+ )
+ job["shipping-product"] = dep_job.attributes["shipping_product"]
+
+ yield job
+
+
+@transforms.add
+def make_task_worker(config, jobs):
+ for job in jobs:
+ platform = job["attributes"]["build_platform"]
+ locale = job["attributes"]["chunk_locales"]
+
+ job["worker"] = {
+ "implementation": "beetmover",
+ "release-properties": craft_release_properties(config, job),
+ "upstream-artifacts": generate_beetmover_upstream_artifacts(
+ config,
+ job,
+ platform,
+ locale,
+ ),
+ "artifact-map": generate_beetmover_artifact_map(
+ config, job, platform=platform, locale=locale
+ ),
+ }
+
+ yield job
+
+
+@transforms.add
+def yield_all_platform_jobs(config, jobs):
+ # Even though langpacks are now platform independent, we keep beetmoving them at old
+ # platform-specific locations. That's why this transform exist
+ # The linux64 and mac specific ja-JP-mac are beetmoved along with the signing beetmover
+ # So while the dependent jobs are linux here, we only yield jobs for other platforms
+ for job in jobs:
+ platforms = ("linux", "macosx64", "win32", "win64")
+ if "devedition" in job["attributes"]["build_platform"]:
+ platforms = (f"{plat}-devedition" for plat in platforms)
+ for platform in platforms:
+ platform_job = copy.deepcopy(job)
+ if "ja" in platform_job["attributes"]["chunk_locales"] and platform in (
+ "macosx64",
+ "macosx64-devedition",
+ ):
+ platform_job = _strip_ja_data_from_linux_job(platform_job)
+
+ platform_job = _change_platform_data(config, platform_job, platform)
+
+ yield platform_job
+
+
+def _strip_ja_data_from_linux_job(platform_job):
+ # Let's take "ja" out the description. This locale is in a substring like "aa/bb/cc/dd", where
+ # "ja" could be any of "aa", "bb", "cc", "dd"
+ platform_job["description"] = platform_job["description"].replace("ja/", "")
+ platform_job["description"] = platform_job["description"].replace("/ja", "")
+
+ platform_job["worker"]["upstream-artifacts"] = [
+ artifact
+ for artifact in platform_job["worker"]["upstream-artifacts"]
+ if artifact["locale"] != "ja"
+ ]
+
+ return platform_job
+
+
+def _change_platform_in_artifact_map_paths(paths, orig_platform, new_platform):
+ amended_paths = {}
+ for artifact, artifact_info in paths.items():
+ amended_artifact_info = {
+ "checksums_path": artifact_info["checksums_path"].replace(
+ orig_platform, new_platform
+ ),
+ "destinations": [
+ d.replace(orig_platform, new_platform)
+ for d in artifact_info["destinations"]
+ ],
+ }
+ amended_paths[artifact] = amended_artifact_info
+
+ return amended_paths
+
+
+def _change_platform_data(config, platform_job, platform):
+ orig_platform = "linux64"
+ if "devedition" in platform:
+ orig_platform = "linux64-devedition"
+ platform_job["attributes"]["build_platform"] = platform
+ platform_job["label"] = platform_job["label"].replace(orig_platform, platform)
+ platform_job["description"] = platform_job["description"].replace(
+ orig_platform, platform
+ )
+ platform_job["treeherder"]["platform"] = platform_job["treeherder"][
+ "platform"
+ ].replace(orig_platform, platform)
+ platform_job["worker"]["release-properties"]["platform"] = platform
+
+ # amend artifactMap entries as well
+ platform_mapping = {
+ "linux64": "linux-x86_64",
+ "linux": "linux-i686",
+ "macosx64": "mac",
+ "win32": "win32",
+ "win64": "win64",
+ "linux64-devedition": "linux-x86_64",
+ "linux-devedition": "linux-i686",
+ "macosx64-devedition": "mac",
+ "win32-devedition": "win32",
+ "win64-devedition": "win64",
+ }
+ orig_platform = platform_mapping.get(orig_platform, orig_platform)
+ platform = platform_mapping.get(platform, platform)
+ platform_job["worker"]["artifact-map"] = [
+ {
+ "locale": entry["locale"],
+ "taskId": entry["taskId"],
+ "paths": _change_platform_in_artifact_map_paths(
+ entry["paths"], orig_platform, platform
+ ),
+ }
+ for entry in platform_job["worker"]["artifact-map"]
+ ]
+
+ return platform_job
diff --git a/taskcluster/gecko_taskgraph/transforms/release_deps.py b/taskcluster/gecko_taskgraph/transforms/release_deps.py
new file mode 100644
index 0000000000..e44af576eb
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/release_deps.py
@@ -0,0 +1,61 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Add dependencies to release tasks.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+
+PHASES = ["build", "promote", "push", "ship"]
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def add_dependencies(config, jobs):
+ for job in jobs:
+ dependencies = {}
+ # Add any kind_dependencies_tasks with matching product as dependencies
+ product = job.get("shipping-product")
+ phase = job.get("shipping-phase")
+ if product is None:
+ continue
+
+ required_signoffs = set(
+ job.setdefault("attributes", {}).get("required_signoffs", [])
+ )
+ for dep_task in config.kind_dependencies_tasks.values():
+ # Weed out unwanted tasks.
+ # XXX we have run-on-projects which specifies the on-push behavior;
+ # we need another attribute that specifies release promotion,
+ # possibly which action(s) each task belongs in.
+
+ # We can only depend on tasks in the current or previous phases
+ dep_phase = dep_task.attributes.get("shipping_phase")
+ if dep_phase and PHASES.index(dep_phase) > PHASES.index(phase):
+ continue
+
+ if dep_task.attributes.get("build_platform") and job.get(
+ "attributes", {}
+ ).get("build_platform"):
+ if (
+ dep_task.attributes["build_platform"]
+ != job["attributes"]["build_platform"]
+ ):
+ continue
+ # Add matching product tasks to deps
+ if (
+ dep_task.task.get("shipping-product") == product
+ or dep_task.attributes.get("shipping_product") == product
+ ):
+ dependencies[dep_task.label] = dep_task.label
+ required_signoffs.update(
+ dep_task.attributes.get("required_signoffs", [])
+ )
+
+ job.setdefault("dependencies", {}).update(dependencies)
+ if required_signoffs:
+ job["attributes"]["required_signoffs"] = sorted(required_signoffs)
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/release_flatpak_push.py b/taskcluster/gecko_taskgraph/transforms/release_flatpak_push.py
new file mode 100644
index 0000000000..9242018755
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/release_flatpak_push.py
@@ -0,0 +1,81 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the release-flatpak-push kind into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import Schema, optionally_keyed_by, resolve_keyed_by
+from voluptuous import Optional, Required
+
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import release_level
+from gecko_taskgraph.util.scriptworker import add_scope_prefix
+
+push_flatpak_description_schema = Schema(
+ {
+ Required("name"): str,
+ Required("job-from"): task_description_schema["job-from"],
+ Required("dependencies"): task_description_schema["dependencies"],
+ Required("description"): task_description_schema["description"],
+ Required("treeherder"): task_description_schema["treeherder"],
+ Required("run-on-projects"): task_description_schema["run-on-projects"],
+ Required("worker-type"): optionally_keyed_by("release-level", str),
+ Required("worker"): object,
+ Optional("scopes"): [str],
+ Required("shipping-phase"): task_description_schema["shipping-phase"],
+ Required("shipping-product"): task_description_schema["shipping-product"],
+ Required("flathub-scope"): str,
+ Optional("extra"): task_description_schema["extra"],
+ Optional("attributes"): task_description_schema["attributes"],
+ }
+)
+
+transforms = TransformSequence()
+transforms.add_validate(push_flatpak_description_schema)
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ for job in jobs:
+ if len(job["dependencies"]) != 1:
+ raise Exception("Exactly 1 dependency is required")
+
+ job["worker"]["upstream-artifacts"] = generate_upstream_artifacts(
+ job["dependencies"]
+ )
+
+ resolve_keyed_by(
+ job,
+ "worker.channel",
+ item_name=job["name"],
+ **{"release-type": config.params["release_type"]},
+ )
+ resolve_keyed_by(
+ job,
+ "worker-type",
+ item_name=job["name"],
+ **{"release-level": release_level(config.params["project"])},
+ )
+ if release_level(config.params["project"]) == "production":
+ job.setdefault("scopes", []).append(
+ add_scope_prefix(
+ config,
+ "{}:{}".format(job["flathub-scope"], job["worker"]["channel"]),
+ )
+ )
+ del job["flathub-scope"]
+
+ yield job
+
+
+def generate_upstream_artifacts(dependencies):
+ return [
+ {
+ "taskId": {"task-reference": f"<{task_kind}>"},
+ "taskType": "build",
+ "paths": ["public/build/target.flatpak.tar.xz"],
+ }
+ for task_kind in dependencies.keys()
+ ]
diff --git a/taskcluster/gecko_taskgraph/transforms/release_flatpak_repackage.py b/taskcluster/gecko_taskgraph/transforms/release_flatpak_repackage.py
new file mode 100644
index 0000000000..7af1134c3a
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/release_flatpak_repackage.py
@@ -0,0 +1,42 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+from gecko_taskgraph.util.attributes import release_level
+from gecko_taskgraph.util.scriptworker import get_release_config
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def format(config, tasks):
+ """Apply format substitution to worker.env and worker.command."""
+
+ format_params = {
+ "release_config": get_release_config(config),
+ "config_params": config.params,
+ }
+
+ for task in tasks:
+ format_params["task"] = task
+
+ command = task.get("worker", {}).get("command", [])
+ task["worker"]["command"] = [x.format(**format_params) for x in command]
+
+ env = task.get("worker", {}).get("env", {})
+ for k in env.keys():
+ resolve_keyed_by(
+ env,
+ k,
+ "flatpak envs",
+ **{
+ "release-level": release_level(config.params["project"]),
+ "project": config.params["project"],
+ }
+ )
+ task["worker"]["env"][k] = env[k].format(**format_params)
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/release_generate_checksums.py b/taskcluster/gecko_taskgraph/transforms/release_generate_checksums.py
new file mode 100644
index 0000000000..0024b88726
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/release_generate_checksums.py
@@ -0,0 +1,53 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the checksums task into an actual task description.
+"""
+
+import copy
+import logging
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+from gecko_taskgraph.util.attributes import release_level
+from gecko_taskgraph.util.scriptworker import get_release_config
+
+logger = logging.getLogger(__name__)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def handle_keyed_by(config, jobs):
+ """Resolve fields that can be keyed by project, etc."""
+ fields = [
+ "run.config",
+ "run.extra-config",
+ ]
+ for job in jobs:
+ job = copy.deepcopy(job)
+ for field in fields:
+ resolve_keyed_by(
+ item=job,
+ field=field,
+ item_name=job["name"],
+ **{"release-level": release_level(config.params["project"])}
+ )
+ yield job
+
+
+@transforms.add
+def interpolate(config, jobs):
+ release_config = get_release_config(config)
+ for job in jobs:
+ mh_options = list(job["run"]["options"])
+ job["run"]["options"] = [
+ option.format(
+ version=release_config["version"],
+ build_number=release_config["build_number"],
+ )
+ for option in mh_options
+ ]
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/release_generate_checksums_beetmover.py b/taskcluster/gecko_taskgraph/transforms/release_generate_checksums_beetmover.py
new file mode 100644
index 0000000000..28043debe9
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/release_generate_checksums_beetmover.py
@@ -0,0 +1,133 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the `release-generate-checksums-beetmover` task to also append `build` as dependency
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+from taskgraph.util.schema import Schema
+from voluptuous import Optional
+
+from gecko_taskgraph.transforms.beetmover import craft_release_properties
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+from gecko_taskgraph.util.scriptworker import (
+ generate_beetmover_artifact_map,
+ generate_beetmover_upstream_artifacts,
+ get_beetmover_action_scope,
+ get_beetmover_bucket_scope,
+)
+
+transforms = TransformSequence()
+
+
+release_generate_checksums_beetmover_schema = Schema(
+ {
+ # unique label to describe this beetmover task, defaults to {dep.label}-beetmover
+ Optional("label"): str,
+ # treeherder is allowed here to override any defaults we use for beetmover. See
+ # taskcluster/gecko_taskgraph/transforms/task.py for the schema details, and the
+ # below transforms for defaults of various values.
+ Optional("treeherder"): task_description_schema["treeherder"],
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Optional("attributes"): task_description_schema["attributes"],
+ Optional("job-from"): task_description_schema["job-from"],
+ Optional("dependencies"): task_description_schema["dependencies"],
+ }
+)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def remove_name(config, jobs):
+ for job in jobs:
+ if "name" in job:
+ del job["name"]
+ yield job
+
+
+transforms.add_validate(release_generate_checksums_beetmover_schema)
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+ attributes.update(job.get("attributes", {}))
+
+ treeherder = job.get("treeherder", {})
+ treeherder.setdefault("symbol", "BM-SGenChcks")
+ dep_th_platform = (
+ dep_job.task.get("extra", {})
+ .get("treeherder", {})
+ .get("machine", {})
+ .get("platform", "")
+ )
+ treeherder.setdefault("platform", f"{dep_th_platform}/opt")
+ treeherder.setdefault("tier", 1)
+ treeherder.setdefault("kind", "build")
+
+ job_template = f"{dep_job.label}"
+ label = job_template.replace("signing", "beetmover")
+
+ description = "Transfer *SUMS and *SUMMARY checksums file to S3."
+
+ # first dependency is the signing task for the *SUMS files
+ dependencies = {dep_job.kind: dep_job.label}
+
+ if len(dep_job.dependencies) > 1:
+ raise NotImplementedError(
+ "Can't beetmove a signing task with multiple dependencies"
+ )
+ # update the dependencies with the dependencies of the signing task
+ dependencies.update(dep_job.dependencies)
+
+ bucket_scope = get_beetmover_bucket_scope(config)
+ action_scope = get_beetmover_action_scope(config)
+
+ task = {
+ "label": label,
+ "description": description,
+ "worker-type": "beetmover",
+ "scopes": [bucket_scope, action_scope],
+ "dependencies": dependencies,
+ "attributes": attributes,
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "treeherder": treeherder,
+ "shipping-phase": "promote",
+ }
+
+ yield task
+
+
+@transforms.add
+def make_task_worker(config, jobs):
+ for job in jobs:
+ valid_beetmover_job = len(job["dependencies"]) == 2 and any(
+ ["signing" in j for j in job["dependencies"]]
+ )
+ if not valid_beetmover_job:
+ raise NotImplementedError("Beetmover must have two dependencies.")
+
+ platform = job["attributes"]["build_platform"]
+ worker = {
+ "implementation": "beetmover",
+ "release-properties": craft_release_properties(config, job),
+ "upstream-artifacts": generate_beetmover_upstream_artifacts(
+ config, job, platform=None, locale=None
+ ),
+ "artifact-map": generate_beetmover_artifact_map(
+ config, job, platform=platform
+ ),
+ }
+
+ job["worker"] = worker
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/release_generate_checksums_signing.py b/taskcluster/gecko_taskgraph/transforms/release_generate_checksums_signing.py
new file mode 100644
index 0000000000..12cbcdc721
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/release_generate_checksums_signing.py
@@ -0,0 +1,102 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the release-generate-checksums-signing task into task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+from taskgraph.util.schema import Schema
+from taskgraph.util.taskcluster import get_artifact_path
+from voluptuous import Optional
+
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+from gecko_taskgraph.util.scriptworker import get_signing_cert_scope
+
+release_generate_checksums_signing_schema = Schema(
+ {
+ Optional("label"): str,
+ Optional("dependencies"): task_description_schema["dependencies"],
+ Optional("attributes"): task_description_schema["attributes"],
+ Optional("treeherder"): task_description_schema["treeherder"],
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ Optional("job-from"): task_description_schema["job-from"],
+ }
+)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def remote_name(config, jobs):
+ for job in jobs:
+ if "name" in job:
+ del job["name"]
+ yield job
+
+
+transforms.add_validate(release_generate_checksums_signing_schema)
+
+
+@transforms.add
+def make_release_generate_checksums_signing_description(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+
+ treeherder = job.get("treeherder", {})
+ treeherder.setdefault("symbol", "SGenChcks")
+ dep_th_platform = (
+ dep_job.task.get("extra", {})
+ .get("treeherder", {})
+ .get("machine", {})
+ .get("platform", "")
+ )
+ treeherder.setdefault("platform", f"{dep_th_platform}/opt")
+ treeherder.setdefault("tier", 1)
+ treeherder.setdefault("kind", "build")
+
+ job_template = "{}-{}".format(dep_job.label, "signing")
+ label = job.get("label", job_template)
+ description = "Signing of the overall release-related checksums"
+
+ dependencies = {dep_job.kind: dep_job.label}
+
+ upstream_artifacts = [
+ {
+ "taskId": {"task-reference": f"<{str(dep_job.kind)}>"},
+ "taskType": "build",
+ "paths": [
+ get_artifact_path(dep_job, "SHA256SUMS"),
+ get_artifact_path(dep_job, "SHA512SUMS"),
+ ],
+ "formats": ["autograph_gpg"],
+ }
+ ]
+
+ signing_cert_scope = get_signing_cert_scope(config)
+
+ task = {
+ "label": label,
+ "description": description,
+ "worker-type": "linux-signing",
+ "worker": {
+ "implementation": "scriptworker-signing",
+ "upstream-artifacts": upstream_artifacts,
+ "max-run-time": 3600,
+ },
+ "scopes": [
+ signing_cert_scope,
+ ],
+ "dependencies": dependencies,
+ "attributes": attributes,
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "treeherder": treeherder,
+ }
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/release_mark_as_shipped.py b/taskcluster/gecko_taskgraph/transforms/release_mark_as_shipped.py
new file mode 100644
index 0000000000..f2ce148320
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/release_mark_as_shipped.py
@@ -0,0 +1,39 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+from gecko_taskgraph.util.attributes import release_level
+from gecko_taskgraph.util.scriptworker import get_release_config
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ release_config = get_release_config(config)
+ for job in jobs:
+ resolve_keyed_by(
+ job,
+ "worker-type",
+ item_name=job["name"],
+ **{"release-level": release_level(config.params["project"])}
+ )
+ resolve_keyed_by(
+ job,
+ "scopes",
+ item_name=job["name"],
+ **{"release-level": release_level(config.params["project"])}
+ )
+
+ job["worker"][
+ "release-name"
+ ] = "{product}-{version}-build{build_number}".format(
+ product=job["shipping-product"].capitalize(),
+ version=release_config["version"],
+ build_number=release_config["build_number"],
+ )
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/release_msix_push.py b/taskcluster/gecko_taskgraph/transforms/release_msix_push.py
new file mode 100644
index 0000000000..22b356d5a2
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/release_msix_push.py
@@ -0,0 +1,87 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the release-msix-push kind into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import Schema, optionally_keyed_by, resolve_keyed_by
+from voluptuous import Optional, Required
+
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import release_level
+from gecko_taskgraph.util.scriptworker import add_scope_prefix
+
+push_msix_description_schema = Schema(
+ {
+ Required("name"): str,
+ Required("job-from"): task_description_schema["job-from"],
+ Required("dependencies"): task_description_schema["dependencies"],
+ Required("description"): task_description_schema["description"],
+ Required("treeherder"): task_description_schema["treeherder"],
+ Required("run-on-projects"): task_description_schema["run-on-projects"],
+ Required("worker-type"): optionally_keyed_by("release-level", str),
+ Required("worker"): object,
+ Optional("scopes"): [str],
+ Required("shipping-phase"): task_description_schema["shipping-phase"],
+ Required("shipping-product"): task_description_schema["shipping-product"],
+ Optional("extra"): task_description_schema["extra"],
+ Optional("attributes"): task_description_schema["attributes"],
+ }
+)
+
+transforms = TransformSequence()
+transforms.add_validate(push_msix_description_schema)
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ for job in jobs:
+ job["worker"]["upstream-artifacts"] = generate_upstream_artifacts(
+ job["dependencies"]
+ )
+
+ resolve_keyed_by(
+ job,
+ "worker.channel",
+ item_name=job["name"],
+ **{"release-type": config.params["release_type"]},
+ )
+ resolve_keyed_by(
+ job,
+ "worker.publish-mode",
+ item_name=job["name"],
+ **{"release-type": config.params["release_type"]},
+ )
+ resolve_keyed_by(
+ job,
+ "worker-type",
+ item_name=job["name"],
+ **{"release-level": release_level(config.params["project"])},
+ )
+ if release_level(config.params["project"]) == "production":
+ job.setdefault("scopes", []).append(
+ add_scope_prefix(
+ config,
+ "microsoftstore:{}".format(job["worker"]["channel"]),
+ )
+ )
+
+ # Override shipping-phase for release: push to the Store early to
+ # allow time for certification.
+ if job["worker"]["publish-mode"] == "Manual":
+ job["shipping-phase"] = "promote"
+
+ yield job
+
+
+def generate_upstream_artifacts(dependencies):
+ return [
+ {
+ "taskId": {"task-reference": f"<{task_kind}>"},
+ "taskType": "build",
+ "paths": ["public/build/target.store.msix"],
+ }
+ for task_kind in dependencies.keys()
+ ]
diff --git a/taskcluster/gecko_taskgraph/transforms/release_notifications.py b/taskcluster/gecko_taskgraph/transforms/release_notifications.py
new file mode 100644
index 0000000000..86109ec5ed
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/release_notifications.py
@@ -0,0 +1,73 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Add notifications via taskcluster-notify for release tasks
+"""
+from string import Formatter
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+from gecko_taskgraph.util.scriptworker import get_release_config
+
+transforms = TransformSequence()
+
+
+class TitleCaseFormatter(Formatter):
+ """Support title formatter for strings"""
+
+ def convert_field(self, value, conversion):
+ if conversion == "t":
+ return str(value).title()
+ super().convert_field(value, conversion)
+ return value
+
+
+titleformatter = TitleCaseFormatter()
+
+
+@transforms.add
+def add_notifications(config, jobs):
+ release_config = get_release_config(config)
+
+ for job in jobs:
+ label = "{}-{}".format(config.kind, job["name"])
+
+ notifications = job.pop("notifications", None)
+ if notifications:
+ resolve_keyed_by(
+ notifications, "emails", label, project=config.params["project"]
+ )
+ emails = notifications["emails"]
+ format_kwargs = dict(
+ task=job,
+ config=config.__dict__,
+ release_config=release_config,
+ )
+ subject = titleformatter.format(notifications["subject"], **format_kwargs)
+ message = titleformatter.format(notifications["message"], **format_kwargs)
+ emails = [email.format(**format_kwargs) for email in emails]
+
+ # By default, we only send mail on success to avoid messages like 'blah is in the
+ # candidates dir' when cancelling graphs, dummy job failure, etc
+ status_types = notifications.get("status-types", ["on-completed"])
+ for s in status_types:
+ job.setdefault("routes", []).extend(
+ [f"notify.email.{email}.{s}" for email in emails]
+ )
+
+ # Customize the email subject to include release name and build number
+ job.setdefault("extra", {}).update(
+ {
+ "notify": {
+ "email": {
+ "subject": subject,
+ }
+ }
+ }
+ )
+ if message:
+ job["extra"]["notify"]["email"]["content"] = message
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/release_sign_and_push_langpacks.py b/taskcluster/gecko_taskgraph/transforms/release_sign_and_push_langpacks.py
new file mode 100644
index 0000000000..6761c76416
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/release_sign_and_push_langpacks.py
@@ -0,0 +1,190 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the release-sign-and-push task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+from taskgraph.util.schema import Schema, optionally_keyed_by, resolve_keyed_by
+from taskgraph.util.treeherder import inherit_treeherder_from_dep
+from voluptuous import Any, Optional, Required
+
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import (
+ copy_attributes_from_dependent_job,
+ release_level,
+)
+
+transforms = TransformSequence()
+
+langpack_sign_push_description_schema = Schema(
+ {
+ Required("label"): str,
+ Required("description"): str,
+ Required("worker-type"): optionally_keyed_by("release-level", str),
+ Required("worker"): {
+ Required("channel"): optionally_keyed_by(
+ "project", "platform", Any("listed", "unlisted")
+ ),
+ Required("upstream-artifacts"): None, # Processed here below
+ },
+ Required("run-on-projects"): [],
+ Required("scopes"): optionally_keyed_by("release-level", [str]),
+ Required("shipping-phase"): task_description_schema["shipping-phase"],
+ Optional("job-from"): task_description_schema["job-from"],
+ Optional("attributes"): task_description_schema["attributes"],
+ Optional("dependencies"): task_description_schema["dependencies"],
+ }
+)
+
+
+@transforms.add
+def set_label(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ job["label"] = f"push-langpacks-{dep_job.label}"
+
+ if "name" in job:
+ del job["name"]
+
+ yield job
+
+
+transforms.add_validate(langpack_sign_push_description_schema)
+
+
+@transforms.add
+def resolve_keys(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ resolve_keyed_by(
+ job,
+ "worker-type",
+ item_name=job["label"],
+ **{"release-level": release_level(config.params["project"])},
+ )
+ resolve_keyed_by(
+ job,
+ "scopes",
+ item_name=job["label"],
+ **{"release-level": release_level(config.params["project"])},
+ )
+ resolve_keyed_by(
+ job,
+ "worker.channel",
+ item_name=job["label"],
+ project=config.params["project"],
+ platform=dep_job.attributes["build_platform"],
+ )
+
+ yield job
+
+
+@transforms.add
+def copy_attributes(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ job.setdefault("attributes", {}).update(
+ copy_attributes_from_dependent_job(dep_job)
+ )
+ job["attributes"]["chunk_locales"] = dep_job.attributes.get(
+ "chunk_locales", ["en-US"]
+ )
+
+ yield job
+
+
+@transforms.add
+def filter_out_macos_jobs_but_mac_only_locales(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ build_platform = dep_job.attributes.get("build_platform")
+
+ if build_platform in ("linux64-devedition", "linux64-shippable"):
+ yield job
+ elif (
+ build_platform in ("macosx64-devedition", "macosx64-shippable")
+ and "ja-JP-mac" in job["attributes"]["chunk_locales"]
+ ):
+ # Other locales of the same job shouldn't be processed
+ job["attributes"]["chunk_locales"] = ["ja-JP-mac"]
+ job["label"] = job["label"].replace(
+ # Guard against a chunk 10 or chunk 1 (latter on try) weird munging
+ "-{}/".format(job["attributes"]["l10n_chunk"]),
+ "-ja-JP-mac/",
+ )
+ yield job
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ treeherder = inherit_treeherder_from_dep(job, dep_job)
+ treeherder.setdefault(
+ "symbol", "langpack(SnP{})".format(job["attributes"].get("l10n_chunk", ""))
+ )
+
+ job["description"] = job["description"].format(
+ locales="/".join(job["attributes"]["chunk_locales"]),
+ )
+
+ job["dependencies"] = {dep_job.kind: dep_job.label}
+ job["treeherder"] = treeherder
+
+ yield job
+
+
+def generate_upstream_artifacts(upstream_task_ref, locales):
+ return [
+ {
+ "taskId": {"task-reference": upstream_task_ref},
+ "taskType": "build",
+ "paths": [
+ "public/build{locale}/target.langpack.xpi".format(
+ locale="" if locale == "en-US" else "/" + locale
+ )
+ for locale in locales
+ ],
+ }
+ ]
+
+
+@transforms.add
+def make_task_worker(config, jobs):
+ for job in jobs:
+ upstream_task_ref = get_upstream_task_ref(
+ job, expected_kinds=("build", "shippable-l10n")
+ )
+
+ job["worker"]["implementation"] = "push-addons"
+ job["worker"]["upstream-artifacts"] = generate_upstream_artifacts(
+ upstream_task_ref, job["attributes"]["chunk_locales"]
+ )
+
+ yield job
+
+
+def get_upstream_task_ref(job, expected_kinds):
+ upstream_tasks = [
+ job_kind
+ for job_kind in job["dependencies"].keys()
+ if job_kind in expected_kinds
+ ]
+
+ if len(upstream_tasks) > 1:
+ raise Exception("Only one dependency expected")
+
+ return f"<{upstream_tasks[0]}>"
diff --git a/taskcluster/gecko_taskgraph/transforms/release_snap_repackage.py b/taskcluster/gecko_taskgraph/transforms/release_snap_repackage.py
new file mode 100644
index 0000000000..659a203971
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/release_snap_repackage.py
@@ -0,0 +1,39 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+from gecko_taskgraph.util.attributes import release_level
+from gecko_taskgraph.util.scriptworker import get_release_config
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def format(config, tasks):
+ """Apply format substitution to worker.env and worker.command."""
+
+ format_params = {
+ "release_config": get_release_config(config),
+ "config_params": config.params,
+ }
+
+ for task in tasks:
+ format_params["task"] = task
+
+ command = task.get("worker", {}).get("command", [])
+ task["worker"]["command"] = [x.format(**format_params) for x in command]
+
+ env = task.get("worker", {}).get("env", {})
+ for k in env.keys():
+ resolve_keyed_by(
+ env,
+ k,
+ "snap envs",
+ **{"release-level": release_level(config.params["project"])}
+ )
+ task["worker"]["env"][k] = env[k].format(**format_params)
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/release_started.py b/taskcluster/gecko_taskgraph/transforms/release_started.py
new file mode 100644
index 0000000000..67e27332c3
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/release_started.py
@@ -0,0 +1,52 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Add notifications via taskcluster-notify for release tasks
+"""
+from shlex import quote as shell_quote
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def add_notifications(config, jobs):
+ for job in jobs:
+ label = "{}-{}".format(config.kind, job["name"])
+
+ resolve_keyed_by(job, "emails", label, project=config.params["project"])
+ emails = [email.format(config=config.__dict__) for email in job.pop("emails")]
+
+ command = [
+ "release",
+ "send-buglist-email",
+ "--version",
+ config.params["version"],
+ "--product",
+ job["shipping-product"],
+ "--revision",
+ config.params["head_rev"],
+ "--build-number",
+ str(config.params["build_number"]),
+ "--repo",
+ config.params["head_repository"],
+ ]
+ for address in emails:
+ command += ["--address", address]
+ command += [
+ # We wrap this in `{'task-reference': ...}` below
+ "--task-group-id",
+ "<decision>",
+ ]
+
+ job["scopes"] = [f"notify:email:{address}" for address in emails]
+ job["run"] = {
+ "using": "mach",
+ "sparse-profile": "mach",
+ "mach": {"task-reference": " ".join(map(shell_quote, command))},
+ }
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/release_version_bump.py b/taskcluster/gecko_taskgraph/transforms/release_version_bump.py
new file mode 100644
index 0000000000..a0f3f69d05
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/release_version_bump.py
@@ -0,0 +1,42 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the update generation task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def handle_keyed_by(config, tasks):
+ """Resolve fields that can be keyed by platform, etc."""
+ default_fields = [
+ "worker.push",
+ "worker.bump-files",
+ "worker-type",
+ ]
+ for task in tasks:
+ fields = default_fields[:]
+ for additional_field in (
+ "l10n-bump-info",
+ "source-repo",
+ "dontbuild",
+ "ignore-closed-tree",
+ ):
+ if additional_field in task["worker"]:
+ fields.append(f"worker.{additional_field}")
+ for field in fields:
+ resolve_keyed_by(
+ task,
+ field,
+ item_name=task["name"],
+ **{
+ "project": config.params["project"],
+ "release-type": config.params["release_type"],
+ },
+ )
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/repackage.py b/taskcluster/gecko_taskgraph/transforms/repackage.py
new file mode 100644
index 0000000000..ea684d71c5
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/repackage.py
@@ -0,0 +1,716 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the repackage task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+from taskgraph.util.schema import Schema, optionally_keyed_by, resolve_keyed_by
+from taskgraph.util.taskcluster import get_artifact_prefix
+from voluptuous import Extra, Optional, Required
+
+from gecko_taskgraph.transforms.job import job_description_schema
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+from gecko_taskgraph.util.copy_task import copy_task
+from gecko_taskgraph.util.platforms import architecture, archive_format
+from gecko_taskgraph.util.workertypes import worker_type_implementation
+
+packaging_description_schema = Schema(
+ {
+ # unique label to describe this repackaging task
+ Optional("label"): str,
+ Optional("worker-type"): str,
+ Optional("worker"): object,
+ Optional("attributes"): job_description_schema["attributes"],
+ Optional("dependencies"): job_description_schema["dependencies"],
+ # treeherder is allowed here to override any defaults we use for repackaging. See
+ # taskcluster/gecko_taskgraph/transforms/task.py for the schema details, and the
+ # below transforms for defaults of various values.
+ Optional("treeherder"): job_description_schema["treeherder"],
+ # If a l10n task, the corresponding locale
+ Optional("locale"): str,
+ # Routes specific to this task, if defined
+ Optional("routes"): [str],
+ # passed through directly to the job description
+ Optional("extra"): job_description_schema["extra"],
+ # passed through to job description
+ Optional("fetches"): job_description_schema["fetches"],
+ Optional("run-on-projects"): job_description_schema["run-on-projects"],
+ # Shipping product and phase
+ Optional("shipping-product"): job_description_schema["shipping-product"],
+ Optional("shipping-phase"): job_description_schema["shipping-phase"],
+ Required("package-formats"): optionally_keyed_by(
+ "build-platform", "release-type", "build-type", [str]
+ ),
+ Optional("msix"): {
+ Optional("channel"): optionally_keyed_by(
+ "package-format",
+ "level",
+ "build-platform",
+ "release-type",
+ "shipping-product",
+ str,
+ ),
+ Optional("identity-name"): optionally_keyed_by(
+ "package-format",
+ "level",
+ "build-platform",
+ "release-type",
+ "shipping-product",
+ str,
+ ),
+ Optional("publisher"): optionally_keyed_by(
+ "package-format",
+ "level",
+ "build-platform",
+ "release-type",
+ "shipping-product",
+ str,
+ ),
+ Optional("publisher-display-name"): optionally_keyed_by(
+ "package-format",
+ "level",
+ "build-platform",
+ "release-type",
+ "shipping-product",
+ str,
+ ),
+ Optional("vendor"): str,
+ },
+ # All l10n jobs use mozharness
+ Required("mozharness"): {
+ Extra: object,
+ # Config files passed to the mozharness script
+ Required("config"): optionally_keyed_by("build-platform", [str]),
+ # Additional paths to look for mozharness configs in. These should be
+ # relative to the base of the source checkout
+ Optional("config-paths"): [str],
+ # if true, perform a checkout of a comm-central based branch inside the
+ # gecko checkout
+ Optional("comm-checkout"): bool,
+ Optional("run-as-root"): bool,
+ Optional("use-caches"): bool,
+ },
+ Optional("job-from"): job_description_schema["job-from"],
+ }
+)
+
+# The configuration passed to the mozharness repackage script. This defines the
+# arguments passed to `mach repackage`
+# - `args` is interpolated by mozharness (`{package-name}`, `{installer-tag}`,
+# `{stub-installer-tag}`, `{sfx-stub}`, `{wsx-stub}`, `{fetch-dir}`), with values
+# from mozharness.
+# - `inputs` are passed as long-options, with the filename prefixed by
+# `MOZ_FETCH_DIR`. The filename is interpolated by taskgraph
+# (`{archive_format}`).
+# - `output` is passed to `--output`, with the filename prefixed by the output
+# directory.
+PACKAGE_FORMATS = {
+ "mar": {
+ "args": [
+ "mar",
+ "--arch",
+ "{architecture}",
+ "--mar-channel-id",
+ "{mar-channel-id}",
+ ],
+ "inputs": {
+ "input": "target{archive_format}",
+ "mar": "mar-tools/mar",
+ },
+ "output": "target.complete.mar",
+ },
+ "msi": {
+ "args": [
+ "msi",
+ "--wsx",
+ "{wsx-stub}",
+ "--version",
+ "{version_display}",
+ "--locale",
+ "{_locale}",
+ "--arch",
+ "{architecture}",
+ "--candle",
+ "{fetch-dir}/candle.exe",
+ "--light",
+ "{fetch-dir}/light.exe",
+ ],
+ "inputs": {
+ "setupexe": "target.installer.exe",
+ },
+ "output": "target.installer.msi",
+ },
+ "msix": {
+ "args": [
+ "msix",
+ "--channel",
+ "{msix-channel}",
+ "--publisher",
+ "{msix-publisher}",
+ "--publisher-display-name",
+ "{msix-publisher-display-name}",
+ "--identity-name",
+ "{msix-identity-name}",
+ "--vendor",
+ "{msix-vendor}",
+ "--arch",
+ "{architecture}",
+ # For langpacks. Ignored if directory does not exist.
+ "--distribution-dir",
+ "{fetch-dir}/distribution",
+ "--verbose",
+ "--makeappx",
+ "{fetch-dir}/msix-packaging/makemsix",
+ ],
+ "inputs": {
+ "input": "target{archive_format}",
+ },
+ "output": "target.installer.msix",
+ },
+ "msix-store": {
+ "args": [
+ "msix",
+ "--channel",
+ "{msix-channel}",
+ "--publisher",
+ "{msix-publisher}",
+ "--publisher-display-name",
+ "{msix-publisher-display-name}",
+ "--identity-name",
+ "{msix-identity-name}",
+ "--vendor",
+ "{msix-vendor}",
+ "--arch",
+ "{architecture}",
+ # For langpacks. Ignored if directory does not exist.
+ "--distribution-dir",
+ "{fetch-dir}/distribution",
+ "--verbose",
+ "--makeappx",
+ "{fetch-dir}/msix-packaging/makemsix",
+ ],
+ "inputs": {
+ "input": "target{archive_format}",
+ },
+ "output": "target.store.msix",
+ },
+ "dmg": {
+ "args": ["dmg"],
+ "inputs": {
+ "input": "target{archive_format}",
+ },
+ "output": "target.dmg",
+ },
+ "dmg-attrib": {
+ "args": [
+ "dmg",
+ "--attribution_sentinel",
+ "__MOZCUSTOM__",
+ ],
+ "inputs": {
+ "input": "target{archive_format}",
+ },
+ "output": "target.dmg",
+ },
+ "pkg": {
+ "args": ["pkg"],
+ "inputs": {
+ "input": "target{archive_format}",
+ },
+ "output": "target.pkg",
+ },
+ "installer": {
+ "args": [
+ "installer",
+ "--package-name",
+ "{package-name}",
+ "--tag",
+ "{installer-tag}",
+ "--sfx-stub",
+ "{sfx-stub}",
+ ],
+ "inputs": {
+ "package": "target{archive_format}",
+ "setupexe": "setup.exe",
+ },
+ "output": "target.installer.exe",
+ },
+ "installer-stub": {
+ "args": [
+ "installer",
+ "--tag",
+ "{stub-installer-tag}",
+ "--sfx-stub",
+ "{sfx-stub}",
+ ],
+ "inputs": {
+ "setupexe": "setup-stub.exe",
+ },
+ "output": "target.stub-installer.exe",
+ },
+ "deb": {
+ "args": [
+ "deb",
+ "--arch",
+ "{architecture}",
+ "--templates",
+ "browser/installer/linux/app/debian",
+ "--version",
+ "{version_display}",
+ "--build-number",
+ "{build_number}",
+ "--release-product",
+ "{release_product}",
+ "--release-type",
+ "{release_type}",
+ ],
+ "inputs": {
+ "input": "target{archive_format}",
+ },
+ "output": "target.deb",
+ },
+ "deb-l10n": {
+ "args": [
+ "deb-l10n",
+ "--version",
+ "{version_display}",
+ "--build-number",
+ "{build_number}",
+ "--templates",
+ "browser/installer/linux/langpack/debian",
+ "--release-product",
+ "{release_product}",
+ ],
+ "inputs": {
+ "input-xpi-file": "target.langpack.xpi",
+ "input-tar-file": "target{archive_format}",
+ },
+ "output": "target.langpack.deb",
+ },
+}
+MOZHARNESS_EXPANSIONS = [
+ "package-name",
+ "installer-tag",
+ "fetch-dir",
+ "stub-installer-tag",
+ "sfx-stub",
+ "wsx-stub",
+]
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def remove_name(config, jobs):
+ for job in jobs:
+ if "name" in job:
+ del job["name"]
+ yield job
+
+
+transforms.add_validate(packaging_description_schema)
+
+
+@transforms.add
+def copy_in_useful_magic(config, jobs):
+ """Copy attributes from upstream task to be used for keyed configuration."""
+ for job in jobs:
+ dep = get_primary_dependency(config, job)
+ assert dep
+
+ job["build-platform"] = dep.attributes.get("build_platform")
+ job["shipping-product"] = dep.attributes.get("shipping_product")
+ job["build-type"] = dep.attributes.get("build_type")
+ yield job
+
+
+@transforms.add
+def handle_keyed_by(config, jobs):
+ """Resolve fields that can be keyed by platform, etc, but not `msix.*` fields
+ that can be keyed by `package-format`. Such fields are handled specially below.
+ """
+ fields = [
+ "mozharness.config",
+ "package-formats",
+ "worker.max-run-time",
+ ]
+ for job in jobs:
+ job = copy_task(job) # don't overwrite dict values here
+ for field in fields:
+ resolve_keyed_by(
+ item=job,
+ field=field,
+ item_name="?",
+ **{
+ "release-type": config.params["release_type"],
+ "level": config.params["level"],
+ },
+ )
+ yield job
+
+
+@transforms.add
+def make_repackage_description(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ label = job.get("label", dep_job.label.replace("signing-", "repackage-"))
+ job["label"] = label
+
+ yield job
+
+
+@transforms.add
+def make_job_description(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ dependencies = {dep_job.kind: dep_job.label}
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+ attributes["repackage_type"] = "repackage"
+
+ locale = attributes.get("locale", job.get("locale"))
+ if locale:
+ attributes["locale"] = locale
+
+ description = (
+ "Repackaging for locale '{locale}' for build '"
+ "{build_platform}/{build_type}'".format(
+ locale=attributes.get("locale", "en-US"),
+ build_platform=attributes.get("build_platform"),
+ build_type=attributes.get("build_type"),
+ )
+ )
+
+ treeherder = job.get("treeherder", {})
+ treeherder.setdefault("symbol", "Rpk")
+ dep_th_platform = dep_job.task.get("extra", {}).get("treeherder-platform")
+ treeherder.setdefault("platform", dep_th_platform)
+ treeherder.setdefault("tier", 1)
+ treeherder.setdefault("kind", "build")
+
+ # Search dependencies before adding langpack dependencies.
+ signing_task = None
+ repackage_signing_task = None
+ for dependency in dependencies.keys():
+ if "repackage-signing" in dependency:
+ repackage_signing_task = dependency
+ elif "signing" in dependency or "notarization" in dependency:
+ signing_task = dependency
+
+ if config.kind == "repackage-msi":
+ treeherder["symbol"] = "MSI({})".format(locale or "N")
+
+ elif config.kind == "repackage-msix":
+ assert not locale
+
+ # Like "MSIXs(Bs)".
+ treeherder["symbol"] = "MSIX({})".format(
+ dep_job.task.get("extra", {}).get("treeherder", {}).get("symbol", "B")
+ )
+
+ elif config.kind == "repackage-shippable-l10n-msix":
+ assert not locale
+
+ if attributes.get("l10n_chunk") or attributes.get("chunk_locales"):
+ # We don't want to produce MSIXes for single-locale repack builds.
+ continue
+
+ description = (
+ "Repackaging with multiple locales for build '"
+ "{build_platform}/{build_type}'".format(
+ build_platform=attributes.get("build_platform"),
+ build_type=attributes.get("build_type"),
+ )
+ )
+
+ # Like "MSIXs(Bs-multi)".
+ treeherder["symbol"] = "MSIX({}-multi)".format(
+ dep_job.task.get("extra", {}).get("treeherder", {}).get("symbol", "B")
+ )
+
+ fetches = job.setdefault("fetches", {})
+
+ # The keys are unique, like `shippable-l10n-signing-linux64-shippable-1/opt`, so we
+ # can't ask for the tasks directly, we must filter for them.
+ for t in config.kind_dependencies_tasks.values():
+ if t.kind != "shippable-l10n-signing":
+ continue
+ if t.attributes["build_platform"] != "linux64-shippable":
+ continue
+ if t.attributes["build_type"] != "opt":
+ continue
+
+ dependencies.update({t.label: t.label})
+
+ fetches.update(
+ {
+ t.label: [
+ {
+ "artifact": f"{loc}/target.langpack.xpi",
+ "extract": False,
+ # Otherwise we can't disambiguate locales!
+ "dest": f"distribution/extensions/{loc}",
+ }
+ for loc in t.attributes["chunk_locales"]
+ ]
+ }
+ )
+
+ elif config.kind == "repackage-deb":
+ attributes["repackage_type"] = "repackage-deb"
+ description = (
+ "Repackaging the '{build_platform}/{build_type}' "
+ "{version} build into a '.deb' package"
+ ).format(
+ build_platform=attributes.get("build_platform"),
+ build_type=attributes.get("build_type"),
+ version=config.params["version"],
+ )
+
+ _fetch_subst_locale = "en-US"
+ if locale:
+ _fetch_subst_locale = locale
+
+ worker_type = job["worker-type"]
+ build_platform = attributes["build_platform"]
+
+ use_stub = attributes.get("stub-installer")
+
+ repackage_config = []
+ package_formats = job.get("package-formats")
+ if use_stub and not repackage_signing_task and "msix" not in package_formats:
+ # if repackage_signing_task doesn't exists, generate the stub installer
+ package_formats += ["installer-stub"]
+ for format in package_formats:
+ command = copy_task(PACKAGE_FORMATS[format])
+ substs = {
+ "archive_format": archive_format(build_platform),
+ "_locale": _fetch_subst_locale,
+ "architecture": architecture(build_platform),
+ "version_display": config.params["version"],
+ "mar-channel-id": attributes["mar-channel-id"],
+ "build_number": config.params["build_number"],
+ "release_product": config.params["release_product"],
+ "release_type": config.params["release_type"],
+ }
+ # Allow us to replace `args` as well, but specifying things expanded in mozharness
+ # without breaking .format and without allowing unknown through.
+ substs.update({name: f"{{{name}}}" for name in MOZHARNESS_EXPANSIONS})
+
+ # We need to resolve `msix.*` values keyed by `package-format` for each format, not
+ # just once, so we update a temporary copy just for extracting these values.
+ temp_job = copy_task(job)
+ for msix_key in (
+ "channel",
+ "identity-name",
+ "publisher",
+ "publisher-display-name",
+ "vendor",
+ ):
+ resolve_keyed_by(
+ item=temp_job,
+ field=f"msix.{msix_key}",
+ item_name="?",
+ **{
+ "package-format": format,
+ "release-type": config.params["release_type"],
+ "level": config.params["level"],
+ },
+ )
+
+ # Turn `msix.channel` into `msix-channel`, etc.
+ value = temp_job.get("msix", {}).get(msix_key)
+ if value:
+ substs.update(
+ {f"msix-{msix_key}": value},
+ )
+
+ command["inputs"] = {
+ name: filename.format(**substs)
+ for name, filename in command["inputs"].items()
+ }
+ command["args"] = [arg.format(**substs) for arg in command["args"]]
+ if "installer" in format and "aarch64" not in build_platform:
+ command["args"].append("--use-upx")
+
+ repackage_config.append(command)
+
+ run = job.get("mozharness", {})
+ run.update(
+ {
+ "using": "mozharness",
+ "script": "mozharness/scripts/repackage.py",
+ "job-script": "taskcluster/scripts/builder/repackage.sh",
+ "actions": ["setup", "repackage"],
+ "extra-config": {
+ "repackage_config": repackage_config,
+ },
+ "run-as-root": run.get("run-as-root", False),
+ "use-caches": run.get("use-caches", True),
+ }
+ )
+
+ worker = job.get("worker", {})
+ worker.update(
+ {
+ "chain-of-trust": True,
+ # Don't add generic artifact directory.
+ "skip-artifacts": True,
+ }
+ )
+ worker.setdefault("max-run-time", 3600)
+
+ if locale:
+ # Make sure we specify the locale-specific upload dir
+ worker.setdefault("env", {})["LOCALE"] = locale
+
+ worker["artifacts"] = _generate_task_output_files(
+ dep_job,
+ worker_type_implementation(config.graph_config, config.params, worker_type),
+ repackage_config=repackage_config,
+ locale=locale,
+ )
+ attributes["release_artifacts"] = [
+ artifact["name"] for artifact in worker["artifacts"]
+ ]
+
+ task = {
+ "label": job["label"],
+ "description": description,
+ "worker-type": worker_type,
+ "dependencies": dependencies,
+ "if-dependencies": [dep_job.kind],
+ "attributes": attributes,
+ "run-on-projects": job.get(
+ "run-on-projects", dep_job.attributes.get("run_on_projects")
+ ),
+ "optimization": dep_job.optimization,
+ "treeherder": treeherder,
+ "routes": job.get("routes", []),
+ "extra": job.get("extra", {}),
+ "worker": worker,
+ "run": run,
+ "fetches": _generate_download_config(
+ config,
+ dep_job,
+ build_platform,
+ signing_task,
+ repackage_signing_task,
+ locale=locale,
+ existing_fetch=job.get("fetches"),
+ ),
+ }
+
+ if build_platform.startswith("macosx"):
+ task.setdefault("fetches", {}).setdefault("toolchain", []).extend(
+ [
+ "linux64-libdmg",
+ "linux64-hfsplus",
+ "linux64-node",
+ "linux64-xar",
+ "linux64-mkbom",
+ ]
+ )
+
+ if "shipping-phase" in job:
+ task["shipping-phase"] = job["shipping-phase"]
+
+ yield task
+
+
+def _generate_download_config(
+ config,
+ task,
+ build_platform,
+ signing_task,
+ repackage_signing_task,
+ locale=None,
+ existing_fetch=None,
+):
+ locale_path = f"{locale}/" if locale else ""
+ fetch = {}
+ if existing_fetch:
+ fetch.update(existing_fetch)
+
+ if repackage_signing_task and build_platform.startswith("win"):
+ fetch.update(
+ {
+ repackage_signing_task: [f"{locale_path}target.installer.exe"],
+ }
+ )
+ elif build_platform.startswith("linux") or build_platform.startswith("macosx"):
+ signing_fetch = [
+ {
+ "artifact": "{}target{}".format(
+ locale_path, archive_format(build_platform)
+ ),
+ "extract": False,
+ },
+ ]
+ if config.kind == "repackage-deb-l10n":
+ signing_fetch.append(
+ {
+ "artifact": f"{locale_path}target.langpack.xpi",
+ "extract": False,
+ }
+ )
+ fetch.update({signing_task: signing_fetch})
+ elif build_platform.startswith("win"):
+ fetch.update(
+ {
+ signing_task: [
+ {
+ "artifact": f"{locale_path}target.zip",
+ "extract": False,
+ },
+ f"{locale_path}setup.exe",
+ ],
+ }
+ )
+
+ use_stub = task.attributes.get("stub-installer")
+ if use_stub:
+ fetch[signing_task].append(f"{locale_path}setup-stub.exe")
+
+ if fetch:
+ return fetch
+
+ raise NotImplementedError(f'Unsupported build_platform: "{build_platform}"')
+
+
+def _generate_task_output_files(
+ task, worker_implementation, repackage_config, locale=None
+):
+ locale_output_path = f"{locale}/" if locale else ""
+ artifact_prefix = get_artifact_prefix(task)
+
+ if worker_implementation == ("docker-worker", "linux"):
+ local_prefix = "/builds/worker/workspace/"
+ elif worker_implementation == ("generic-worker", "windows"):
+ local_prefix = "workspace/"
+ else:
+ raise NotImplementedError(
+ f'Unsupported worker implementation: "{worker_implementation}"'
+ )
+
+ output_files = []
+ for config in repackage_config:
+ output_files.append(
+ {
+ "type": "file",
+ "path": "{}outputs/{}{}".format(
+ local_prefix, locale_output_path, config["output"]
+ ),
+ "name": "{}/{}{}".format(
+ artifact_prefix, locale_output_path, config["output"]
+ ),
+ }
+ )
+ return output_files
diff --git a/taskcluster/gecko_taskgraph/transforms/repackage_l10n.py b/taskcluster/gecko_taskgraph/transforms/repackage_l10n.py
new file mode 100644
index 0000000000..013f331e1f
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/repackage_l10n.py
@@ -0,0 +1,29 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the repackage task into an actual task description.
+"""
+
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+
+from gecko_taskgraph.util.copy_task import copy_task
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def split_locales(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ for locale in dep_job.attributes.get("chunk_locales", []):
+ locale_job = copy_task(job) # don't overwrite dict values here
+ treeherder = locale_job.setdefault("treeherder", {})
+ treeherder_group = locale_job.pop("treeherder-group")
+ treeherder["symbol"] = f"{treeherder_group}({locale})"
+ locale_job["locale"] = locale
+ yield locale_job
diff --git a/taskcluster/gecko_taskgraph/transforms/repackage_partner.py b/taskcluster/gecko_taskgraph/transforms/repackage_partner.py
new file mode 100644
index 0000000000..ad91d101da
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/repackage_partner.py
@@ -0,0 +1,316 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the repackage task into an actual task description.
+"""
+
+
+import copy
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+from taskgraph.util.schema import Schema, optionally_keyed_by, resolve_keyed_by
+from taskgraph.util.taskcluster import get_artifact_prefix
+from voluptuous import Optional, Required
+
+from gecko_taskgraph.transforms.repackage import (
+ PACKAGE_FORMATS as PACKAGE_FORMATS_VANILLA,
+)
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+from gecko_taskgraph.util.partners import get_partner_config_by_kind
+from gecko_taskgraph.util.platforms import archive_format, executable_extension
+from gecko_taskgraph.util.workertypes import worker_type_implementation
+
+# When repacking the stub installer we need to pass a zip file and package name to the
+# repackage task. This is not needed for vanilla stub but analogous to the full installer.
+PACKAGE_FORMATS = copy.deepcopy(PACKAGE_FORMATS_VANILLA)
+PACKAGE_FORMATS["installer-stub"]["inputs"]["package"] = "target-stub{archive_format}"
+PACKAGE_FORMATS["installer-stub"]["args"].extend(["--package-name", "{package-name}"])
+
+packaging_description_schema = Schema(
+ {
+ # unique label to describe this repackaging task
+ Optional("label"): str,
+ # Routes specific to this task, if defined
+ Optional("routes"): [str],
+ # passed through directly to the job description
+ Optional("extra"): task_description_schema["extra"],
+ # Shipping product and phase
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ Required("package-formats"): optionally_keyed_by(
+ "build-platform", "build-type", [str]
+ ),
+ # All l10n jobs use mozharness
+ Required("mozharness"): {
+ # Config files passed to the mozharness script
+ Required("config"): optionally_keyed_by("build-platform", [str]),
+ # Additional paths to look for mozharness configs in. These should be
+ # relative to the base of the source checkout
+ Optional("config-paths"): [str],
+ # if true, perform a checkout of a comm-central based branch inside the
+ # gecko checkout
+ Optional("comm-checkout"): bool,
+ },
+ # Override the default priority for the project
+ Optional("priority"): task_description_schema["priority"],
+ Optional("job-from"): task_description_schema["job-from"],
+ Optional("attributes"): task_description_schema["attributes"],
+ Optional("dependencies"): task_description_schema["dependencies"],
+ }
+)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def remove_name(config, jobs):
+ for job in jobs:
+ if "name" in job:
+ del job["name"]
+ yield job
+
+
+transforms.add_validate(packaging_description_schema)
+
+
+@transforms.add
+def copy_in_useful_magic(config, jobs):
+ """Copy attributes from upstream task to be used for keyed configuration."""
+ for job in jobs:
+ dep = get_primary_dependency(config, job)
+ assert dep
+
+ job["build-platform"] = dep.attributes.get("build_platform")
+ yield job
+
+
+@transforms.add
+def handle_keyed_by(config, jobs):
+ """Resolve fields that can be keyed by platform, etc."""
+ fields = [
+ "mozharness.config",
+ "package-formats",
+ ]
+ for job in jobs:
+ job = copy.deepcopy(job) # don't overwrite dict values here
+ for field in fields:
+ resolve_keyed_by(item=job, field=field, item_name="?")
+ yield job
+
+
+@transforms.add
+def make_repackage_description(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ label = job.get("label", dep_job.label.replace("signing-", "repackage-"))
+ job["label"] = label
+
+ yield job
+
+
+@transforms.add
+def make_job_description(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+ build_platform = attributes["build_platform"]
+
+ if job["build-platform"].startswith("win"):
+ if dep_job.kind.endswith("signing"):
+ continue
+ if job["build-platform"].startswith("macosx"):
+ if dep_job.kind.endswith("repack"):
+ continue
+ dependencies = {dep_job.attributes.get("kind"): dep_job.label}
+ dependencies.update(dep_job.dependencies)
+
+ signing_task = None
+ for dependency in dependencies.keys():
+ if build_platform.startswith("macosx") and dependency.endswith("signing"):
+ signing_task = dependency
+ elif build_platform.startswith("win") and dependency.endswith("repack"):
+ signing_task = dependency
+
+ attributes["repackage_type"] = "repackage"
+
+ repack_id = job["extra"]["repack_id"]
+
+ partner_config = get_partner_config_by_kind(config, config.kind)
+ partner, subpartner, _ = repack_id.split("/")
+ repack_stub_installer = partner_config[partner][subpartner].get(
+ "repack_stub_installer"
+ )
+ if build_platform.startswith("win32") and repack_stub_installer:
+ job["package-formats"].append("installer-stub")
+
+ repackage_config = []
+ for format in job.get("package-formats"):
+ command = copy.deepcopy(PACKAGE_FORMATS[format])
+ substs = {
+ "archive_format": archive_format(build_platform),
+ "executable_extension": executable_extension(build_platform),
+ }
+ command["inputs"] = {
+ name: filename.format(**substs)
+ for name, filename in command["inputs"].items()
+ }
+ repackage_config.append(command)
+
+ run = job.get("mozharness", {})
+ run.update(
+ {
+ "using": "mozharness",
+ "script": "mozharness/scripts/repackage.py",
+ "job-script": "taskcluster/scripts/builder/repackage.sh",
+ "actions": ["setup", "repackage"],
+ "extra-config": {
+ "repackage_config": repackage_config,
+ },
+ }
+ )
+
+ worker = {
+ "chain-of-trust": True,
+ "max-run-time": 3600,
+ "taskcluster-proxy": True if get_artifact_prefix(dep_job) else False,
+ "env": {
+ "REPACK_ID": repack_id,
+ },
+ # Don't add generic artifact directory.
+ "skip-artifacts": True,
+ }
+
+ worker_type = "b-linux-gcp"
+
+ worker["artifacts"] = _generate_task_output_files(
+ dep_job,
+ worker_type_implementation(config.graph_config, config.params, worker_type),
+ repackage_config,
+ partner=repack_id,
+ )
+
+ description = (
+ "Repackaging for repack_id '{repack_id}' for build '"
+ "{build_platform}/{build_type}'".format(
+ repack_id=job["extra"]["repack_id"],
+ build_platform=attributes.get("build_platform"),
+ build_type=attributes.get("build_type"),
+ )
+ )
+
+ task = {
+ "label": job["label"],
+ "description": description,
+ "worker-type": worker_type,
+ "dependencies": dependencies,
+ "attributes": attributes,
+ "scopes": ["queue:get-artifact:releng/partner/*"],
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "routes": job.get("routes", []),
+ "extra": job.get("extra", {}),
+ "worker": worker,
+ "run": run,
+ "fetches": _generate_download_config(
+ dep_job,
+ build_platform,
+ signing_task,
+ partner=repack_id,
+ project=config.params["project"],
+ repack_stub_installer=repack_stub_installer,
+ ),
+ }
+
+ # we may have reduced the priority for partner jobs, otherwise task.py will set it
+ if job.get("priority"):
+ task["priority"] = job["priority"]
+ if build_platform.startswith("macosx"):
+ task.setdefault("fetches", {}).setdefault("toolchain", []).extend(
+ [
+ "linux64-libdmg",
+ "linux64-hfsplus",
+ "linux64-node",
+ ]
+ )
+ yield task
+
+
+def _generate_download_config(
+ task,
+ build_platform,
+ signing_task,
+ partner=None,
+ project=None,
+ repack_stub_installer=False,
+):
+ locale_path = f"{partner}/" if partner else ""
+
+ if build_platform.startswith("macosx"):
+ return {
+ signing_task: [
+ {
+ "artifact": f"{locale_path}target.tar.gz",
+ "extract": False,
+ },
+ ],
+ }
+ if build_platform.startswith("win"):
+ download_config = [
+ {
+ "artifact": f"{locale_path}target.zip",
+ "extract": False,
+ },
+ f"{locale_path}setup.exe",
+ ]
+ if build_platform.startswith("win32") and repack_stub_installer:
+ download_config.extend(
+ [
+ {
+ "artifact": f"{locale_path}target-stub.zip",
+ "extract": False,
+ },
+ f"{locale_path}setup-stub.exe",
+ ]
+ )
+ return {signing_task: download_config}
+
+ raise NotImplementedError(f'Unsupported build_platform: "{build_platform}"')
+
+
+def _generate_task_output_files(task, worker_implementation, repackage_config, partner):
+ """We carefully generate an explicit list here, but there's an artifacts directory
+ too, courtesy of generic_worker_add_artifacts() (windows) or docker_worker_add_artifacts().
+ Any errors here are likely masked by that.
+ """
+ partner_output_path = f"{partner}/"
+ artifact_prefix = get_artifact_prefix(task)
+
+ if worker_implementation == ("docker-worker", "linux"):
+ local_prefix = "/builds/worker/workspace/"
+ elif worker_implementation == ("generic-worker", "windows"):
+ local_prefix = "workspace/"
+ else:
+ raise NotImplementedError(
+ f'Unsupported worker implementation: "{worker_implementation}"'
+ )
+
+ output_files = []
+ for config in repackage_config:
+ output_files.append(
+ {
+ "type": "file",
+ "path": "{}outputs/{}{}".format(
+ local_prefix, partner_output_path, config["output"]
+ ),
+ "name": "{}/{}{}".format(
+ artifact_prefix, partner_output_path, config["output"]
+ ),
+ }
+ )
+ return output_files
diff --git a/taskcluster/gecko_taskgraph/transforms/repackage_routes.py b/taskcluster/gecko_taskgraph/transforms/repackage_routes.py
new file mode 100644
index 0000000000..2973ee35bd
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/repackage_routes.py
@@ -0,0 +1,34 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Add indexes to repackage kinds
+"""
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def add_indexes(config, jobs):
+ for job in jobs:
+ repackage_type = job["attributes"].get("repackage_type")
+ if repackage_type and job["attributes"]["build_type"] != "debug":
+ build_platform = job["attributes"]["build_platform"]
+ job_name = f"{build_platform}-{repackage_type}"
+ product = job.get("index", {}).get("product", "firefox")
+ index_type = "generic"
+ if job["attributes"].get("shippable") and job["attributes"].get("locale"):
+ index_type = "shippable-l10n"
+ if job["attributes"].get("shippable"):
+ index_type = "shippable"
+ if job["attributes"].get("locale"):
+ index_type = "l10n"
+ job["index"] = {
+ "job-name": job_name,
+ "product": product,
+ "type": index_type,
+ }
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/repackage_set_upstream_mac_kind.py b/taskcluster/gecko_taskgraph/transforms/repackage_set_upstream_mac_kind.py
new file mode 100644
index 0000000000..1cf98ab3c9
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/repackage_set_upstream_mac_kind.py
@@ -0,0 +1,43 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform mac notarization tasks
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+from taskgraph.util.schema import resolve_keyed_by
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def repackage_set_upstream_mac_kind(config, tasks):
+ """
+ Notarization only runs on level 3
+ If level < 3 then repackage the mac-signing task artifact
+ Exception for debug builds, which will use signed build on level 3
+ """
+ for task in tasks:
+ primary_dep = get_primary_dependency(config, task)
+ assert primary_dep
+
+ if "macosx64" not in primary_dep.attributes["build_platform"]:
+ task.pop("upstream-mac-kind")
+ yield task
+ continue
+ resolve_keyed_by(
+ task,
+ "upstream-mac-kind",
+ item_name=config.kind,
+ **{
+ "build-type": primary_dep.attributes["build_type"],
+ "project": config.params.get("project"),
+ }
+ )
+ upstream_mac_kind = task.pop("upstream-mac-kind")
+
+ if primary_dep.kind != upstream_mac_kind:
+ continue
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/repackage_signing.py b/taskcluster/gecko_taskgraph/transforms/repackage_signing.py
new file mode 100644
index 0000000000..66c1f87d70
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/repackage_signing.py
@@ -0,0 +1,153 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the repackage signing task into an actual task description.
+"""
+
+import os
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+from taskgraph.util.schema import Schema
+from voluptuous import Optional
+
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+from gecko_taskgraph.util.scriptworker import get_signing_cert_scope_per_platform
+
+repackage_signing_description_schema = Schema(
+ {
+ Optional("label"): str,
+ Optional("attributes"): task_description_schema["attributes"],
+ Optional("dependencies"): task_description_schema["dependencies"],
+ Optional("job-from"): task_description_schema["job-from"],
+ Optional("treeherder"): task_description_schema["treeherder"],
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ }
+)
+
+SIGNING_FORMATS = {
+ "target.installer.exe": ["autograph_authenticode_sha2_stub"],
+ "target.stub-installer.exe": ["autograph_authenticode_sha2_stub"],
+ "target.installer.msi": ["autograph_authenticode_sha2"],
+ "target.installer.msix": ["autograph_authenticode_sha2"],
+}
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def remove_name(config, jobs):
+ for job in jobs:
+ if "name" in job:
+ del job["name"]
+ yield job
+
+
+transforms.add_validate(repackage_signing_description_schema)
+
+
+@transforms.add
+def make_repackage_signing_description(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+ locale = attributes.get("locale", dep_job.attributes.get("locale"))
+ attributes["repackage_type"] = "repackage-signing"
+
+ treeherder = job.get("treeherder", {})
+ treeherder.setdefault("symbol", "rs(B)")
+ dep_th_platform = dep_job.task.get("extra", {}).get("treeherder-platform")
+ treeherder.setdefault("platform", dep_th_platform)
+ treeherder.setdefault(
+ "tier", dep_job.task.get("extra", {}).get("treeherder", {}).get("tier", 1)
+ )
+ treeherder.setdefault("kind", "build")
+
+ if locale:
+ treeherder["symbol"] = f"rs({locale})"
+
+ if config.kind == "repackage-signing-msi":
+ treeherder["symbol"] = "MSIs({})".format(locale or "N")
+
+ elif config.kind in (
+ "repackage-signing-msix",
+ "repackage-signing-shippable-l10n-msix",
+ ):
+ # Like "MSIXs(Bs-multi)".
+ treeherder["symbol"] = "MSIXs({})".format(
+ dep_job.task.get("extra", {}).get("treeherder", {}).get("symbol", "B")
+ )
+
+ label = job["label"]
+
+ dep_kind = dep_job.kind
+ if "l10n" in dep_kind:
+ dep_kind = "repackage"
+
+ dependencies = {dep_kind: dep_job.label}
+
+ signing_dependencies = dep_job.dependencies
+ # This is so we get the build task etc in our dependencies to have better beetmover
+ # support. But for multi-locale MSIX packages, we don't want the signing task to directly
+ # depend on the langpack tasks.
+ dependencies.update(
+ {
+ k: v
+ for k, v in signing_dependencies.items()
+ if k != "docker-image"
+ and not k.startswith("shippable-l10n-signing-linux64")
+ }
+ )
+
+ description = (
+ "Signing of repackaged artifacts for locale '{locale}' for build '"
+ "{build_platform}/{build_type}'".format(
+ locale=attributes.get("locale", "en-US"),
+ build_platform=attributes.get("build_platform"),
+ build_type=attributes.get("build_type"),
+ )
+ )
+
+ build_platform = dep_job.attributes.get("build_platform")
+ is_shippable = dep_job.attributes.get("shippable")
+ signing_cert_scope = get_signing_cert_scope_per_platform(
+ build_platform, is_shippable, config
+ )
+ scopes = [signing_cert_scope]
+
+ upstream_artifacts = []
+ for artifact in sorted(dep_job.attributes.get("release_artifacts")):
+ basename = os.path.basename(artifact)
+ if basename in SIGNING_FORMATS:
+ upstream_artifacts.append(
+ {
+ "taskId": {"task-reference": f"<{dep_kind}>"},
+ "taskType": "repackage",
+ "paths": [artifact],
+ "formats": SIGNING_FORMATS[os.path.basename(artifact)],
+ }
+ )
+
+ task = {
+ "label": label,
+ "description": description,
+ "worker-type": "linux-signing" if is_shippable else "linux-depsigning",
+ "worker": {
+ "implementation": "scriptworker-signing",
+ "upstream-artifacts": upstream_artifacts,
+ "max-run-time": 3600,
+ },
+ "scopes": scopes,
+ "dependencies": dependencies,
+ "attributes": attributes,
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "optimization": dep_job.optimization,
+ "treeherder": treeherder,
+ }
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/repackage_signing_partner.py b/taskcluster/gecko_taskgraph/transforms/repackage_signing_partner.py
new file mode 100644
index 0000000000..e3940fd846
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/repackage_signing_partner.py
@@ -0,0 +1,163 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the repackage signing task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+from taskgraph.util.schema import Schema
+from taskgraph.util.taskcluster import get_artifact_path
+from voluptuous import Optional
+
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+from gecko_taskgraph.util.partners import get_partner_config_by_kind
+from gecko_taskgraph.util.scriptworker import get_signing_cert_scope_per_platform
+
+transforms = TransformSequence()
+
+repackage_signing_description_schema = Schema(
+ {
+ Optional("label"): str,
+ Optional("extra"): object,
+ Optional("attributes"): task_description_schema["attributes"],
+ Optional("dependencies"): task_description_schema["dependencies"],
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ Optional("priority"): task_description_schema["priority"],
+ Optional("job-from"): task_description_schema["job-from"],
+ }
+)
+
+
+@transforms.add
+def remove_name(config, jobs):
+ for job in jobs:
+ if "name" in job:
+ del job["name"]
+ yield job
+
+
+transforms.add_validate(repackage_signing_description_schema)
+
+
+@transforms.add
+def make_repackage_signing_description(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ repack_id = dep_job.task["extra"]["repack_id"]
+ attributes = dep_job.attributes
+ build_platform = dep_job.attributes.get("build_platform")
+ is_shippable = dep_job.attributes.get("shippable")
+
+ # Mac & windows
+ label = dep_job.label.replace("repackage-", "repackage-signing-")
+ # Linux
+ label = label.replace("chunking-dummy-", "repackage-signing-")
+ description = (
+ "Signing of repackaged artifacts for partner repack id '{repack_id}' for build '"
+ "{build_platform}/{build_type}'".format( # NOQA: E501
+ repack_id=repack_id,
+ build_platform=attributes.get("build_platform"),
+ build_type=attributes.get("build_type"),
+ )
+ )
+
+ if "linux" in build_platform:
+ # we want the repack job, via the dependencies for the the chunking-dummy dep_job
+ for dep in dep_job.dependencies.values():
+ if dep.startswith("release-partner-repack"):
+ dependencies = {"repack": dep}
+ break
+ else:
+ # we have a genuine repackage job as our parent
+ dependencies = {"repackage": dep_job.label}
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+ attributes["repackage_type"] = "repackage-signing"
+
+ signing_cert_scope = get_signing_cert_scope_per_platform(
+ build_platform, is_shippable, config
+ )
+ scopes = [signing_cert_scope]
+
+ if "win" in build_platform:
+ upstream_artifacts = [
+ {
+ "taskId": {"task-reference": "<repackage>"},
+ "taskType": "repackage",
+ "paths": [
+ get_artifact_path(dep_job, f"{repack_id}/target.installer.exe"),
+ ],
+ "formats": ["autograph_authenticode_sha2", "autograph_gpg"],
+ }
+ ]
+
+ partner_config = get_partner_config_by_kind(config, config.kind)
+ partner, subpartner, _ = repack_id.split("/")
+ repack_stub_installer = partner_config[partner][subpartner].get(
+ "repack_stub_installer"
+ )
+ if build_platform.startswith("win32") and repack_stub_installer:
+ upstream_artifacts.append(
+ {
+ "taskId": {"task-reference": "<repackage>"},
+ "taskType": "repackage",
+ "paths": [
+ get_artifact_path(
+ dep_job,
+ f"{repack_id}/target.stub-installer.exe",
+ ),
+ ],
+ "formats": ["autograph_authenticode_sha2", "autograph_gpg"],
+ }
+ )
+ elif "mac" in build_platform:
+ upstream_artifacts = [
+ {
+ "taskId": {"task-reference": "<repackage>"},
+ "taskType": "repackage",
+ "paths": [
+ get_artifact_path(dep_job, f"{repack_id}/target.dmg"),
+ ],
+ "formats": ["autograph_gpg"],
+ }
+ ]
+ elif "linux" in build_platform:
+ upstream_artifacts = [
+ {
+ "taskId": {"task-reference": "<repack>"},
+ "taskType": "repackage",
+ "paths": [
+ get_artifact_path(dep_job, f"{repack_id}/target.tar.bz2"),
+ ],
+ "formats": ["autograph_gpg"],
+ }
+ ]
+
+ task = {
+ "label": label,
+ "description": description,
+ "worker-type": "linux-signing",
+ "worker": {
+ "implementation": "scriptworker-signing",
+ "upstream-artifacts": upstream_artifacts,
+ "max-run-time": 3600,
+ },
+ "scopes": scopes,
+ "dependencies": dependencies,
+ "attributes": attributes,
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "extra": {
+ "repack_id": repack_id,
+ },
+ }
+ # we may have reduced the priority for partner jobs, otherwise task.py will set it
+ if job.get("priority"):
+ task["priority"] = job["priority"]
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/repo_update.py b/taskcluster/gecko_taskgraph/transforms/repo_update.py
new file mode 100644
index 0000000000..f4f135b585
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/repo_update.py
@@ -0,0 +1,25 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the repo-update task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def resolve_keys(config, tasks):
+ for task in tasks:
+ env = task["worker"].setdefault("env", {})
+ env["BRANCH"] = config.params["project"]
+ for envvar in env:
+ resolve_keyed_by(env, envvar, envvar, **config.params)
+
+ for envvar in list(env.keys()):
+ if not env.get(envvar):
+ del env[envvar]
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/reprocess_symbols.py b/taskcluster/gecko_taskgraph/transforms/reprocess_symbols.py
new file mode 100644
index 0000000000..ea2cac3a68
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/reprocess_symbols.py
@@ -0,0 +1,72 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the reprocess-symbols task description template,
+taskcluster/ci/reprocess-symbols/job-template.yml into an actual task description.
+"""
+
+
+import logging
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_dependencies, get_primary_dependency
+from taskgraph.util.treeherder import inherit_treeherder_from_dep, join_symbol
+
+from gecko_taskgraph.util.attributes import (
+ copy_attributes_from_dependent_job,
+ sorted_unique_list,
+)
+
+logger = logging.getLogger(__name__)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def gather_required_signoffs(config, jobs):
+ for job in jobs:
+ job.setdefault("attributes", {})["required_signoffs"] = sorted_unique_list(
+ *(
+ dep.attributes.get("required_signoffs", [])
+ for dep in get_dependencies(config, job)
+ )
+ )
+ yield job
+
+
+@transforms.add
+def fill_template(config, tasks):
+ for task in tasks:
+ dep = get_primary_dependency(config, task)
+ assert dep
+
+ # Fill out the dynamic fields in the task description
+ task["label"] = dep.label + "-reprocess-symbols"
+ task["worker"]["env"]["GECKO_HEAD_REPOSITORY"] = config.params[
+ "head_repository"
+ ]
+ task["worker"]["env"]["GECKO_HEAD_REV"] = config.params["head_rev"]
+ task["worker"]["env"]["CRASHSTATS_SECRET"] = task["worker"]["env"][
+ "CRASHSTATS_SECRET"
+ ].format(level=config.params["level"])
+
+ attributes = copy_attributes_from_dependent_job(dep)
+ attributes.update(task.get("attributes", {}))
+ task["attributes"] = attributes
+
+ treeherder = inherit_treeherder_from_dep(task, dep)
+ th = dep.task["extra"]["treeherder"]
+ th_symbol = th.get("symbol")
+ th_groupsymbol = th.get("groupSymbol", "?")
+
+ # Disambiguate the treeherder symbol.
+ sym = "Rep" + (th_symbol[1:] if th_symbol.startswith("B") else th_symbol)
+ treeherder.setdefault("symbol", join_symbol(th_groupsymbol, sym))
+ task["treeherder"] = treeherder
+
+ task["run-on-projects"] = dep.attributes.get("run_on_projects")
+ task["optimization"] = {"reprocess-symbols": None}
+ task["if-dependencies"] = [task["attributes"]["primary-kind-dependency"]]
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/reverse_chunk_deps.py b/taskcluster/gecko_taskgraph/transforms/reverse_chunk_deps.py
new file mode 100644
index 0000000000..1eb0e39c42
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/reverse_chunk_deps.py
@@ -0,0 +1,45 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Adjust dependencies to not exceed MAX_DEPENDENCIES
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.treeherder import add_suffix
+
+from gecko_taskgraph import MAX_DEPENDENCIES
+from gecko_taskgraph.transforms import release_deps
+from gecko_taskgraph.util.copy_task import copy_task
+
+transforms = TransformSequence()
+
+
+def yield_job(orig_job, deps, count):
+ job = copy_task(orig_job)
+ job["dependencies"] = deps
+ job["name"] = "{}-{}".format(orig_job["name"], count)
+ if "treeherder" in job:
+ job["treeherder"]["symbol"] = add_suffix(
+ job["treeherder"]["symbol"], f"-{count}"
+ )
+
+ return job
+
+
+@transforms.add
+def add_dependencies(config, jobs):
+ for job in release_deps.add_dependencies(config, jobs):
+ count = 1
+ deps = {}
+
+ # sort for deterministic chunking
+ for dep_label in sorted(job["dependencies"].keys()):
+ deps[dep_label] = dep_label
+ if len(deps) == MAX_DEPENDENCIES:
+ yield yield_job(job, deps, count)
+ deps = {}
+ count += 1
+ if deps:
+ yield yield_job(job, deps, count)
+ count += 1
diff --git a/taskcluster/gecko_taskgraph/transforms/run_pgo_profile.py b/taskcluster/gecko_taskgraph/transforms/run_pgo_profile.py
new file mode 100644
index 0000000000..942c6a2110
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/run_pgo_profile.py
@@ -0,0 +1,34 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Apply some defaults and minor modifications to the pgo jobs.
+"""
+
+import logging
+
+from taskgraph.transforms.base import TransformSequence
+
+logger = logging.getLogger(__name__)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def run_profile_data(config, jobs):
+ for job in jobs:
+ build_platform = job["attributes"].get("build_platform")
+ instr = "instrumented-build-{}".format(job["name"])
+ if "android" in build_platform:
+ artifact = "geckoview-test_runner.apk"
+ elif "macosx64" in build_platform:
+ artifact = "target.dmg"
+ elif "win" in build_platform:
+ artifact = "target.zip"
+ else:
+ artifact = "target.tar.bz2"
+ job.setdefault("fetches", {})[instr] = [
+ {"artifact": artifact, "extract": not artifact.endswith((".dmg", ".apk"))},
+ "target.crashreporter-symbols.zip",
+ ]
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/scriptworker.py b/taskcluster/gecko_taskgraph/transforms/scriptworker.py
new file mode 100644
index 0000000000..5d382702af
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/scriptworker.py
@@ -0,0 +1,18 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Transforms for adding appropriate scopes to scriptworker tasks.
+"""
+
+
+from gecko_taskgraph.util.scriptworker import get_balrog_server_scope
+
+
+def add_balrog_scopes(config, jobs):
+ for job in jobs:
+ server_scope = get_balrog_server_scope(config)
+ job["scopes"] = [server_scope]
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/scriptworker_canary.py b/taskcluster/gecko_taskgraph/transforms/scriptworker_canary.py
new file mode 100644
index 0000000000..c8b77b85bc
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/scriptworker_canary.py
@@ -0,0 +1,45 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Build a command to run `mach release push-scriptworker-canaries`.
+"""
+
+
+from shlex import quote as shell_quote
+
+from mozrelease.scriptworker_canary import TASK_TYPES
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def build_command(config, jobs):
+ scriptworkers = config.params["try_task_config"].get(
+ "scriptworker-canary-workers", []
+ )
+ # Filter the list of workers to those we have configured a set of canary
+ # tasks for.
+ scriptworkers = [
+ scriptworker for scriptworker in scriptworkers if scriptworker in TASK_TYPES
+ ]
+
+ if not scriptworkers:
+ return
+
+ for job in jobs:
+ command = ["release", "push-scriptworker-canary"]
+ for scriptworker in scriptworkers:
+ command.extend(["--scriptworker", scriptworker])
+ for address in job.pop("addresses"):
+ command.extend(["--address", address])
+ if "ssh-key-secret" in job:
+ ssh_key_secret = job.pop("ssh-key-secret")
+ command.extend(["--ssh-key-secret", ssh_key_secret])
+ job.setdefault("scopes", []).append(f"secrets:get:{ssh_key_secret}")
+
+ job.setdefault("run", {}).update(
+ {"using": "mach", "mach": " ".join(map(shell_quote, command))}
+ )
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/sentry.py b/taskcluster/gecko_taskgraph/transforms/sentry.py
new file mode 100644
index 0000000000..2e43a15518
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/sentry.py
@@ -0,0 +1,30 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def sentry(config, tasks):
+ """Do transforms specific to github-sync tasks."""
+ if config.params["project"] not in ["mozilla-central", "try"]:
+ return
+ for task in tasks:
+ scopes = [
+ scope.format(level=config.params["level"]) for scope in task["scopes"]
+ ]
+ task["scopes"] = scopes
+
+ env = {
+ key: value.format(
+ level=config.params["level"],
+ head_repository=config.params["head_repository"],
+ head_rev=config.params["head_rev"],
+ )
+ for (key, value) in task["worker"]["env"].items()
+ }
+ task["worker"]["env"] = env
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/shippable_l10n_signing.py b/taskcluster/gecko_taskgraph/transforms/shippable_l10n_signing.py
new file mode 100644
index 0000000000..45725f33a5
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/shippable_l10n_signing.py
@@ -0,0 +1,88 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the signing task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+from taskgraph.util.treeherder import join_symbol
+
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+from gecko_taskgraph.util.signed_artifacts import (
+ generate_specifications_of_artifacts_to_sign,
+)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def make_signing_description(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+
+ # add the chunk number to the TH symbol
+ symbol = job.get("treeherder", {}).get("symbol", "Bs")
+ symbol = "{}{}".format(symbol, dep_job.attributes.get("l10n_chunk"))
+ group = "L10n"
+
+ job["treeherder"] = {
+ "symbol": join_symbol(group, symbol),
+ }
+
+ yield job
+
+
+@transforms.add
+def define_upstream_artifacts(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ upstream_artifact_task = job.pop("upstream-artifact-task", dep_job)
+
+ job.setdefault("attributes", {}).update(
+ copy_attributes_from_dependent_job(dep_job)
+ )
+ if dep_job.attributes.get("chunk_locales"):
+ # Used for l10n attribute passthrough
+ job["attributes"]["chunk_locales"] = dep_job.attributes.get("chunk_locales")
+
+ locale_specifications = generate_specifications_of_artifacts_to_sign(
+ config,
+ job,
+ keep_locale_template=True,
+ dep_kind=upstream_artifact_task.kind,
+ )
+
+ upstream_artifacts = []
+ for spec in locale_specifications:
+ upstream_task_type = "l10n"
+ if upstream_artifact_task.kind.endswith(
+ ("-mac-notarization", "-mac-signing")
+ ):
+ # Upstream is mac signing or notarization
+ upstream_task_type = "scriptworker"
+ upstream_artifacts.append(
+ {
+ "taskId": {"task-reference": f"<{upstream_artifact_task.kind}>"},
+ "taskType": upstream_task_type,
+ # Set paths based on artifacts in the specs (above) one per
+ # locale present in the chunk this is signing stuff for.
+ # Pass paths through set and sorted() so we get a list back
+ # and we remove any duplicates (e.g. hardcoded ja-JP-mac langpack)
+ "paths": sorted(
+ {
+ path_template.format(locale=locale)
+ for locale in upstream_artifact_task.attributes.get(
+ "chunk_locales", []
+ )
+ for path_template in spec["artifacts"]
+ }
+ ),
+ "formats": spec["formats"],
+ }
+ )
+
+ job["upstream-artifacts"] = upstream_artifacts
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/signing.py b/taskcluster/gecko_taskgraph/transforms/signing.py
new file mode 100644
index 0000000000..e55ad47f42
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/signing.py
@@ -0,0 +1,258 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the signing task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+from taskgraph.util.keyed_by import evaluate_keyed_by
+from taskgraph.util.schema import Schema, taskref_or_string
+from voluptuous import Optional, Required
+
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import (
+ copy_attributes_from_dependent_job,
+ release_level,
+)
+from gecko_taskgraph.util.scriptworker import (
+ add_scope_prefix,
+ get_signing_cert_scope_per_platform,
+)
+
+transforms = TransformSequence()
+
+signing_description_schema = Schema(
+ {
+ # Artifacts from dep task to sign - Sync with taskgraph/transforms/task.py
+ # because this is passed directly into the signingscript worker
+ Required("upstream-artifacts"): [
+ {
+ # taskId of the task with the artifact
+ Required("taskId"): taskref_or_string,
+ # type of signing task (for CoT)
+ Required("taskType"): str,
+ # Paths to the artifacts to sign
+ Required("paths"): [str],
+ # Signing formats to use on each of the paths
+ Required("formats"): [str],
+ }
+ ],
+ # attributes for this task
+ Optional("attributes"): {str: object},
+ # unique label to describe this signing task, defaults to {dep.label}-signing
+ Optional("label"): str,
+ # treeherder is allowed here to override any defaults we use for signing. See
+ # taskcluster/gecko_taskgraph/transforms/task.py for the schema details, and the
+ # below transforms for defaults of various values.
+ Optional("treeherder"): task_description_schema["treeherder"],
+ # Routes specific to this task, if defined
+ Optional("routes"): [str],
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Required("dependencies"): task_description_schema["dependencies"],
+ # Optional control for how long a task may run (aka maxRunTime)
+ Optional("max-run-time"): int,
+ Optional("extra"): {str: object},
+ # Max number of partner repacks per chunk
+ Optional("repacks-per-chunk"): int,
+ # Override the default priority for the project
+ Optional("priority"): task_description_schema["priority"],
+ Optional("job-from"): task_description_schema["job-from"],
+ }
+)
+
+
+@transforms.add
+def delete_name(config, jobs):
+ """Delete the 'name' key if it exists, we don't use it."""
+ for job in jobs:
+ if "name" in job:
+ del job["name"]
+ yield job
+
+
+transforms.add_validate(signing_description_schema)
+
+
+@transforms.add
+def add_entitlements_link(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ entitlements_path = evaluate_keyed_by(
+ config.graph_config["mac-notarization"]["mac-entitlements"],
+ "mac entitlements",
+ {
+ "platform": dep_job.attributes.get("build_platform"),
+ "release-level": release_level(config.params["project"]),
+ },
+ )
+ if entitlements_path:
+ job["entitlements-url"] = config.params.file_url(
+ entitlements_path,
+ )
+ yield job
+
+
+@transforms.add
+def add_requirements_link(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ requirements_path = evaluate_keyed_by(
+ config.graph_config["mac-notarization"]["mac-requirements"],
+ "mac requirements",
+ {
+ "platform": dep_job.attributes.get("build_platform"),
+ },
+ )
+ if requirements_path:
+ job["requirements-plist-url"] = config.params.file_url(
+ requirements_path,
+ )
+ yield job
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ attributes = dep_job.attributes
+
+ signing_format_scopes = []
+ formats = set()
+ for artifacts in job["upstream-artifacts"]:
+ for f in artifacts["formats"]:
+ formats.add(f) # Add each format only once
+
+ is_shippable = dep_job.attributes.get("shippable", False)
+ build_platform = dep_job.attributes.get("build_platform")
+ treeherder = None
+ if "partner" not in config.kind and "eme-free" not in config.kind:
+ treeherder = job.get("treeherder", {})
+
+ dep_th_platform = (
+ dep_job.task.get("extra", {})
+ .get("treeherder", {})
+ .get("machine", {})
+ .get("platform", "")
+ )
+ build_type = dep_job.attributes.get("build_type")
+ treeherder.setdefault(
+ "platform",
+ _generate_treeherder_platform(
+ dep_th_platform, build_platform, build_type
+ ),
+ )
+
+ # ccov builds are tier 2, so they cannot have tier 1 tasks
+ # depending on them.
+ treeherder.setdefault(
+ "tier",
+ dep_job.task.get("extra", {}).get("treeherder", {}).get("tier", 1),
+ )
+ treeherder.setdefault(
+ "symbol",
+ _generate_treeherder_symbol(
+ dep_job.task.get("extra", {}).get("treeherder", {}).get("symbol")
+ ),
+ )
+ treeherder.setdefault("kind", "build")
+
+ label = job["label"]
+ description = (
+ "Initial Signing for locale '{locale}' for build '"
+ "{build_platform}/{build_type}'".format(
+ locale=attributes.get("locale", "en-US"),
+ build_platform=build_platform,
+ build_type=attributes.get("build_type"),
+ )
+ )
+
+ attributes = (
+ job["attributes"]
+ if job.get("attributes")
+ else copy_attributes_from_dependent_job(dep_job)
+ )
+ attributes["signed"] = True
+
+ if "linux" in build_platform:
+ attributes["release_artifacts"] = ["public/build/KEY"]
+
+ if dep_job.attributes.get("chunk_locales"):
+ # Used for l10n attribute passthrough
+ attributes["chunk_locales"] = dep_job.attributes.get("chunk_locales")
+
+ signing_cert_scope = get_signing_cert_scope_per_platform(
+ build_platform, is_shippable, config
+ )
+ worker_type_alias = "linux-signing" if is_shippable else "linux-depsigning"
+ task = {
+ "label": label,
+ "description": description,
+ "worker": {
+ "implementation": "scriptworker-signing",
+ "upstream-artifacts": job["upstream-artifacts"],
+ "max-run-time": job.get("max-run-time", 3600),
+ },
+ "scopes": [signing_cert_scope] + signing_format_scopes,
+ "dependencies": job["dependencies"],
+ "attributes": attributes,
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "optimization": dep_job.optimization,
+ "routes": job.get("routes", []),
+ "shipping-product": job.get("shipping-product"),
+ "shipping-phase": job.get("shipping-phase"),
+ }
+ if dep_job.kind in task["dependencies"]:
+ task["if-dependencies"] = [dep_job.kind]
+
+ # build-mac-{signing,notarization} uses signingscript instead of iscript
+ if "macosx" in build_platform and config.kind.endswith("-mac-notarization"):
+ task["worker"]["mac-behavior"] = "apple_notarization"
+ task["scopes"] = [
+ add_scope_prefix(config, "signing:cert:release-apple-notarization")
+ ]
+ elif "macosx" in build_platform:
+ # iscript overrides
+ task["worker"]["mac-behavior"] = "mac_sign_and_pkg"
+
+ worker_type_alias_map = {
+ "linux-depsigning": "mac-depsigning",
+ "linux-signing": "mac-signing",
+ }
+ assert worker_type_alias in worker_type_alias_map, (
+ "Make sure to adjust the below worker_type_alias logic for "
+ "mac if you change the signing workerType aliases!"
+ " ({} not found in mapping)".format(worker_type_alias)
+ )
+ worker_type_alias = worker_type_alias_map[worker_type_alias]
+ for attr in ("entitlements-url", "requirements-plist-url"):
+ if job.get(attr):
+ task["worker"][attr] = job[attr]
+
+ task["worker-type"] = worker_type_alias
+ if treeherder:
+ task["treeherder"] = treeherder
+ if job.get("extra"):
+ task["extra"] = job["extra"]
+ # we may have reduced the priority for partner jobs, otherwise task.py will set it
+ if job.get("priority"):
+ task["priority"] = job["priority"]
+
+ yield task
+
+
+def _generate_treeherder_platform(dep_th_platform, build_platform, build_type):
+ if "-pgo" in build_platform:
+ actual_build_type = "pgo"
+ elif "-ccov" in build_platform:
+ actual_build_type = "ccov"
+ else:
+ actual_build_type = build_type
+ return f"{dep_th_platform}/{actual_build_type}"
+
+
+def _generate_treeherder_symbol(build_symbol):
+ symbol = build_symbol + "s"
+ return symbol
diff --git a/taskcluster/gecko_taskgraph/transforms/snap_test.py b/taskcluster/gecko_taskgraph/transforms/snap_test.py
new file mode 100644
index 0000000000..e6d879f225
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/snap_test.py
@@ -0,0 +1,48 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+"""
+
+
+import logging
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+from taskgraph.util.treeherder import inherit_treeherder_from_dep
+
+logger = logging.getLogger(__name__)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def fill_template(config, tasks):
+ for task in tasks:
+ test_type = task.get("attributes")["snap_test_type"]
+
+ assert "-test-" in task.get("label")
+ task["label"] = task.get("label").replace("-test-", "-test-" + test_type + "-")
+
+ dep = get_primary_dependency(config, task)
+ assert dep
+
+ inherit_treeherder_from_dep(task, dep)
+ task_platform = task["task"]["extra"]["treeherder"]["machine"]["platform"]
+
+ # Disambiguate the treeherder symbol.
+ full_platform_collection = (
+ task_platform + "-snap-" + task.get("label").split("-")[-1]
+ )
+ (platform, collection) = full_platform_collection.split("/")
+ task["task"]["extra"]["treeherder"]["collection"] = {collection: True}
+ task["task"]["extra"]["treeherder"]["machine"]["platform"] = platform
+ task["task"]["extra"]["treeherder-platform"] = full_platform_collection
+ task["task"]["metadata"]["name"] = task["label"]
+
+ timeout = 10
+ if collection != "opt":
+ timeout = 60
+ task["task"]["payload"]["env"]["TEST_TIMEOUT"] = "{}".format(timeout)
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/source_checksums_signing.py b/taskcluster/gecko_taskgraph/transforms/source_checksums_signing.py
new file mode 100644
index 0000000000..0c31f48cc0
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/source_checksums_signing.py
@@ -0,0 +1,99 @@
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the checksums signing task into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+from taskgraph.util.schema import Schema
+from voluptuous import Optional
+
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
+from gecko_taskgraph.util.scriptworker import get_signing_cert_scope
+
+checksums_signing_description_schema = Schema(
+ {
+ Optional("label"): str,
+ Optional("treeherder"): task_description_schema["treeherder"],
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ Optional("job-from"): task_description_schema["job-from"],
+ Optional("attributes"): task_description_schema["attributes"],
+ Optional("dependencies"): task_description_schema["dependencies"],
+ }
+)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def remove_name(config, jobs):
+ for job in jobs:
+ if "name" in job:
+ del job["name"]
+ yield job
+
+
+transforms.add_validate(checksums_signing_description_schema)
+
+
+@transforms.add
+def make_checksums_signing_description(config, jobs):
+ for job in jobs:
+ dep_job = get_primary_dependency(config, job)
+ assert dep_job
+
+ attributes = dep_job.attributes
+
+ treeherder = job.get("treeherder", {})
+ treeherder.setdefault("symbol", "css(N)")
+ dep_th_platform = (
+ dep_job.task.get("extra", {})
+ .get("treeherder", {})
+ .get("machine", {})
+ .get("platform", "")
+ )
+ treeherder.setdefault("platform", f"{dep_th_platform}/opt")
+ treeherder.setdefault("tier", 1)
+ treeherder.setdefault("kind", "build")
+
+ label = job["label"]
+ description = "Signing of release-source checksums file"
+ dependencies = {"beetmover": dep_job.label}
+
+ attributes = copy_attributes_from_dependent_job(dep_job)
+
+ upstream_artifacts = [
+ {
+ "taskId": {"task-reference": "<beetmover>"},
+ "taskType": "beetmover",
+ "paths": [
+ "public/target-source.checksums",
+ ],
+ "formats": ["autograph_gpg"],
+ }
+ ]
+
+ signing_cert_scope = get_signing_cert_scope(config)
+
+ task = {
+ "label": label,
+ "description": description,
+ "worker-type": "linux-signing",
+ "worker": {
+ "implementation": "scriptworker-signing",
+ "upstream-artifacts": upstream_artifacts,
+ "max-run-time": 3600,
+ },
+ "scopes": [
+ signing_cert_scope,
+ ],
+ "dependencies": dependencies,
+ "attributes": attributes,
+ "run-on-projects": dep_job.attributes.get("run_on_projects"),
+ "treeherder": treeherder,
+ }
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/source_test.py b/taskcluster/gecko_taskgraph/transforms/source_test.py
new file mode 100644
index 0000000000..5c561e8114
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/source_test.py
@@ -0,0 +1,300 @@
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Source-test jobs can run on multiple platforms. These transforms allow jobs
+with either `platform` or a list of `platforms`, and set the appropriate
+treeherder configuration and attributes for that platform.
+"""
+
+
+import copy
+import os
+
+import taskgraph
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.attributes import keymatch
+from taskgraph.util.schema import Schema, optionally_keyed_by, resolve_keyed_by
+from taskgraph.util.treeherder import join_symbol, split_symbol
+from voluptuous import Any, Extra, Optional, Required
+
+from gecko_taskgraph.transforms.job import job_description_schema
+from gecko_taskgraph.util.hg import get_json_automationrelevance
+
+source_test_description_schema = Schema(
+ {
+ # most fields are passed directly through as job fields, and are not
+ # repeated here
+ Extra: object,
+ # The platform on which this task runs. This will be used to set up attributes
+ # (for try selection) and treeherder metadata (for display). If given as a list,
+ # the job will be "split" into multiple tasks, one with each platform.
+ Required("platform"): Any(str, [str]),
+ # Build labels required for the task. If this key is provided it must
+ # contain a build label for the task platform.
+ # The task will then depend on a build task, and the installer url will be
+ # saved to the GECKO_INSTALLER_URL environment variable.
+ Optional("require-build"): optionally_keyed_by("project", {str: str}),
+ # These fields can be keyed by "platform", and are otherwise identical to
+ # job descriptions.
+ Required("worker-type"): optionally_keyed_by(
+ "platform", job_description_schema["worker-type"]
+ ),
+ Required("worker"): optionally_keyed_by(
+ "platform", job_description_schema["worker"]
+ ),
+ Optional("python-version"): [int],
+ Optional("dependencies"): {
+ k: optionally_keyed_by("platform", v)
+ for k, v in job_description_schema["dependencies"].items()
+ },
+ # A list of artifacts to install from 'fetch' tasks.
+ Optional("fetches"): {
+ str: optionally_keyed_by(
+ "platform", job_description_schema["fetches"][str]
+ ),
+ },
+ }
+)
+
+transforms = TransformSequence()
+
+transforms.add_validate(source_test_description_schema)
+
+
+@transforms.add
+def set_job_name(config, jobs):
+ for job in jobs:
+ if "job-from" in job and job["job-from"] != "kind.yml":
+ from_name = os.path.splitext(job["job-from"])[0]
+ job["name"] = "{}-{}".format(from_name, job["name"])
+ yield job
+
+
+@transforms.add
+def expand_platforms(config, jobs):
+ for job in jobs:
+ if isinstance(job["platform"], str):
+ yield job
+ continue
+
+ for platform in job["platform"]:
+ pjob = copy.deepcopy(job)
+ pjob["platform"] = platform
+
+ if "name" in pjob:
+ pjob["name"] = "{}-{}".format(pjob["name"], platform)
+ else:
+ pjob["label"] = "{}-{}".format(pjob["label"], platform)
+ yield pjob
+
+
+@transforms.add
+def split_python(config, jobs):
+ for job in jobs:
+ key = "python-version"
+ versions = job.pop(key, [])
+ if not versions:
+ yield job
+ continue
+ for version in versions:
+ group = f"py{version}"
+ pyjob = copy.deepcopy(job)
+ if "name" in pyjob:
+ pyjob["name"] += f"-{group}"
+ else:
+ pyjob["label"] += f"-{group}"
+ symbol = split_symbol(pyjob["treeherder"]["symbol"])[1]
+ pyjob["treeherder"]["symbol"] = join_symbol(group, symbol)
+ pyjob["run"][key] = version
+ yield pyjob
+
+
+@transforms.add
+def split_jsshell(config, jobs):
+ all_shells = {"sm": "Spidermonkey", "v8": "Google V8"}
+
+ for job in jobs:
+ if not job["name"].startswith("jsshell"):
+ yield job
+ continue
+
+ test = job.pop("test")
+ for shell in job.get("shell", all_shells.keys()):
+ assert shell in all_shells
+
+ new_job = copy.deepcopy(job)
+ new_job["name"] = "{}-{}".format(new_job["name"], shell)
+ new_job["description"] = "{} on {}".format(
+ new_job["description"], all_shells[shell]
+ )
+ new_job["shell"] = shell
+
+ group = f"js-bench-{shell}"
+ symbol = split_symbol(new_job["treeherder"]["symbol"])[1]
+ new_job["treeherder"]["symbol"] = join_symbol(group, symbol)
+
+ run = new_job["run"]
+ run["mach"] = run["mach"].format(
+ shell=shell, SHELL=shell.upper(), test=test
+ )
+ yield new_job
+
+
+def add_build_dependency(config, job):
+ """
+ Add build dependency to the job and installer_url to env.
+ """
+ key = job["platform"]
+ build_labels = job.pop("require-build", {})
+ matches = keymatch(build_labels, key)
+ if not matches:
+ raise Exception(
+ "No build platform found. "
+ "Define 'require-build' for {} in the task config.".format(key)
+ )
+
+ if len(matches) > 1:
+ raise Exception(f"More than one build platform found for '{key}'.")
+
+ label = matches[0]
+ deps = job.setdefault("dependencies", {})
+ deps.update({"build": label})
+
+
+@transforms.add
+def handle_platform(config, jobs):
+ """
+ Handle the 'platform' property, setting up treeherder context as well as
+ try-related attributes.
+ """
+ fields = [
+ "always-target",
+ "fetches.toolchain",
+ "require-build",
+ "worker-type",
+ "worker",
+ ]
+
+ for job in jobs:
+ platform = job["platform"]
+
+ for field in fields:
+ resolve_keyed_by(
+ job, field, item_name=job["name"], project=config.params["project"]
+ )
+ for field in job.get("dependencies", {}):
+ resolve_keyed_by(
+ job,
+ f"dependencies.{field}",
+ item_name=job["name"],
+ project=config.params["project"],
+ )
+
+ if "treeherder" in job:
+ job["treeherder"].setdefault("platform", platform)
+
+ if "require-build" in job:
+ add_build_dependency(config, job)
+
+ del job["platform"]
+ yield job
+
+
+@transforms.add
+def handle_shell(config, jobs):
+ """
+ Handle the 'shell' property.
+ """
+ fields = [
+ "run-on-projects",
+ "worker.env",
+ ]
+
+ for job in jobs:
+ if not job.get("shell"):
+ yield job
+ continue
+
+ for field in fields:
+ resolve_keyed_by(job, field, item_name=job["name"])
+
+ del job["shell"]
+ yield job
+
+
+@transforms.add
+def set_code_review_env(config, jobs):
+ """
+ Add a CODE_REVIEW environment variable when running in code-review bot mode
+ """
+ is_code_review = config.params["target_tasks_method"] == "codereview"
+
+ for job in jobs:
+ attrs = job.get("attributes", {})
+ if is_code_review and attrs.get("code-review") is True:
+ env = job["worker"].setdefault("env", {})
+ env["CODE_REVIEW"] = "1"
+
+ yield job
+
+
+@transforms.add
+def set_base_revision_in_tgdiff(config, jobs):
+ # Don't attempt to download 'json-automation' locally as the revision may
+ # not exist in the repository.
+ if not os.environ.get("MOZ_AUTOMATION") or taskgraph.fast:
+ yield from jobs
+ return
+
+ data = get_json_automationrelevance(
+ config.params["head_repository"], config.params["head_rev"]
+ )
+ for job in jobs:
+ if job["name"] != "taskgraph-diff":
+ yield job
+ continue
+
+ job["task-context"] = {
+ "from-object": {
+ "base_rev": data["changesets"][0]["parents"][0],
+ },
+ "substitution-fields": [
+ "run.command",
+ ],
+ }
+ yield job
+
+
+@transforms.add
+def set_worker_exit_code(config, jobs):
+ for job in jobs:
+ worker = job["worker"]
+ worker.setdefault("retry-exit-status", [])
+ if 137 not in worker["retry-exit-status"]:
+ worker["retry-exit-status"].append(137)
+ yield job
+
+
+@transforms.add
+def remove_optimization_on_central(config, jobs):
+ """
+ For pushes to mozilla-central run all source-test tasks that are enabled for
+ code-review in order to have the code-review bot populate the DB according
+ with the push hash.
+ """
+ if (
+ config.params["project"] != "mozilla-central"
+ or config.params["tasks_for"] != "hg-push"
+ ):
+ yield from jobs
+ return
+
+ for job in jobs:
+ if not job.get("attributes", {}).get("code-review", False):
+ yield job
+ continue
+ if "when" not in job:
+ yield job
+ continue
+ del job["when"]
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/spidermonkey.py b/taskcluster/gecko_taskgraph/transforms/spidermonkey.py
new file mode 100644
index 0000000000..8e652f1668
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/spidermonkey.py
@@ -0,0 +1,21 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import copy
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def handle_keyed_by(config, jobs):
+ """Resolve fields that can be keyed by platform, etc."""
+ fields = ["fetches.toolchain"]
+ for job in jobs:
+ job = copy.deepcopy(job) # don't overwrite dict values here
+ for field in fields:
+ resolve_keyed_by(item=job, field=field, item_name=job["name"])
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/split_by_locale.py b/taskcluster/gecko_taskgraph/transforms/split_by_locale.py
new file mode 100644
index 0000000000..ae68ab5051
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/split_by_locale.py
@@ -0,0 +1,79 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+This transform splits the jobs it receives into per-locale tasks. Locales are
+provided by the `locales-file`.
+"""
+
+from copy import deepcopy
+from pprint import pprint
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import Schema
+from voluptuous import Extra, Optional, Required
+
+from gecko_taskgraph.transforms.l10n import parse_locales_file
+
+transforms = TransformSequence()
+
+split_by_locale_schema = Schema(
+ {
+ # The file to pull locale information from. This should be a json file
+ # such as browser/locales/l10n-changesets.json.
+ Required("locales-file"): str,
+ # The platform name in the form used by the locales files. Defaults to
+ # attributes.build_platform if not provided.
+ Optional("locale-file-platform"): str,
+ # A list of properties elsewhere in the job that need to have the locale
+ # name substituted into them. The referenced properties may be strings
+ # or lists. In the case of the latter, all list values will have
+ # substitutions performed.
+ Optional("properties-with-locale"): [str],
+ Extra: object,
+ }
+)
+
+
+transforms.add_validate(split_by_locale_schema)
+
+
+@transforms.add
+def add_command(config, jobs):
+ for job in jobs:
+ locales_file = job.pop("locales-file")
+ properties_with_locale = job.pop("properties-with-locale")
+ build_platform = job.pop(
+ "locale-file-platform", job["attributes"]["build_platform"]
+ )
+
+ for locale in parse_locales_file(locales_file, build_platform):
+ locale_job = deepcopy(job)
+ locale_job["attributes"]["locale"] = locale
+ for prop in properties_with_locale:
+ container, subfield = locale_job, prop
+ while "." in subfield:
+ f, subfield = subfield.split(".", 1)
+ if f not in container:
+ raise Exception(
+ f"Unable to find property {prop} to perform locale substitution on. Job is:\n{pprint(job)}"
+ )
+ container = container[f]
+ if not isinstance(container, dict):
+ raise Exception(
+ f"{container} is not a dict, cannot perform locale substitution. Job is:\n{pprint(job)}"
+ )
+
+ if isinstance(container[subfield], str):
+ container[subfield] = container[subfield].format(locale=locale)
+ elif isinstance(container[subfield], list):
+ for i in range(len(container[subfield])):
+ container[subfield][i] = container[subfield][i].format(
+ locale=locale
+ )
+ else:
+ raise Exception(
+ f"Don't know how to subtitute locale for value of type: {type(container[subfield])}; value is: {container[subfield]}"
+ )
+
+ yield locale_job
diff --git a/taskcluster/gecko_taskgraph/transforms/startup_test.py b/taskcluster/gecko_taskgraph/transforms/startup_test.py
new file mode 100644
index 0000000000..2660ef6e93
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/startup_test.py
@@ -0,0 +1,40 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def add_command(config, jobs):
+ for job in jobs:
+ extra_config = job.pop("extra-config")
+ upstream_kind = extra_config["upstream_kind"]
+ upstream_artifact = extra_config["upstream_artifact"]
+ binary = extra_config["binary"]
+ package_to_test = "<{}/public/build/{}>".format(
+ upstream_kind, upstream_artifact
+ )
+
+ if job["attributes"]["build_platform"].startswith("linux"):
+ job["run"]["command"] = {
+ "artifact-reference": ". $HOME/scripts/xvfb.sh && start_xvfb '1600x1200x24' 0 && "
+ + "python3 ./mach python testing/mozharness/scripts/does_it_crash.py "
+ + "--run-for 30 --thing-url "
+ + package_to_test
+ + " --thing-to-run "
+ + binary
+ }
+ else:
+ job["run"]["mach"] = {
+ "artifact-reference": "python testing/mozharness/scripts/does_it_crash.py "
+ + "--run-for 30 --thing-url "
+ + package_to_test
+ + " --thing-to-run "
+ + binary
+ }
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/task.py b/taskcluster/gecko_taskgraph/transforms/task.py
new file mode 100644
index 0000000000..3129742ea9
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/task.py
@@ -0,0 +1,2296 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+These transformations take a task description and turn it into a TaskCluster
+task definition (along with attributes, label, etc.). The input to these
+transformations is generic to any kind of task, but abstracts away some of the
+complexities of worker implementations, scopes, and treeherder annotations.
+"""
+
+
+import datetime
+import hashlib
+import os
+import re
+import time
+
+import attr
+from mozbuild.util import memoize
+from taskcluster.utils import fromNow
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.keyed_by import evaluate_keyed_by
+from taskgraph.util.schema import (
+ Schema,
+ optionally_keyed_by,
+ resolve_keyed_by,
+ taskref_or_string,
+ validate_schema,
+)
+from taskgraph.util.treeherder import split_symbol
+from voluptuous import All, Any, Extra, Match, NotIn, Optional, Required
+
+from gecko_taskgraph import GECKO, MAX_DEPENDENCIES
+from gecko_taskgraph.optimize.schema import OptimizationSchema
+from gecko_taskgraph.transforms.job.common import get_expiration
+from gecko_taskgraph.util import docker as dockerutil
+from gecko_taskgraph.util.attributes import TRUNK_PROJECTS, is_try, release_level
+from gecko_taskgraph.util.copy_task import copy_task
+from gecko_taskgraph.util.hash import hash_path
+from gecko_taskgraph.util.partners import get_partners_to_be_published
+from gecko_taskgraph.util.scriptworker import BALROG_ACTIONS, get_release_config
+from gecko_taskgraph.util.signed_artifacts import get_signed_artifacts
+from gecko_taskgraph.util.workertypes import get_worker_type, worker_type_implementation
+
+RUN_TASK = os.path.join(GECKO, "taskcluster", "scripts", "run-task")
+
+SCCACHE_GCS_PROJECT = "sccache-3"
+
+
+@memoize
+def _run_task_suffix():
+ """String to append to cache names under control of run-task."""
+ return hash_path(RUN_TASK)[0:20]
+
+
+def _compute_geckoview_version(app_version, moz_build_date):
+ """Geckoview version string that matches geckoview gradle configuration"""
+ # Must be synchronized with /mobile/android/geckoview/build.gradle computeVersionCode(...)
+ version_without_milestone = re.sub(r"a[0-9]", "", app_version, 1)
+ parts = version_without_milestone.split(".")
+ return f"{parts[0]}.{parts[1]}.{moz_build_date}"
+
+
+# A task description is a general description of a TaskCluster task
+task_description_schema = Schema(
+ {
+ # the label for this task
+ Required("label"): str,
+ # description of the task (for metadata)
+ Required("description"): str,
+ # attributes for this task
+ Optional("attributes"): {str: object},
+ # relative path (from config.path) to the file task was defined in
+ Optional("job-from"): str,
+ # dependencies of this task, keyed by name; these are passed through
+ # verbatim and subject to the interpretation of the Task's get_dependencies
+ # method.
+ Optional("dependencies"): {
+ All(
+ str,
+ NotIn(
+ ["self", "decision"],
+ "Can't use 'self` or 'decision' as depdency names.",
+ ),
+ ): object,
+ },
+ # Soft dependencies of this task, as a list of tasks labels
+ Optional("soft-dependencies"): [str],
+ # Dependencies that must be scheduled in order for this task to run.
+ Optional("if-dependencies"): [str],
+ Optional("requires"): Any("all-completed", "all-resolved"),
+ # expiration and deadline times, relative to task creation, with units
+ # (e.g., "14 days"). Defaults are set based on the project.
+ Optional("expires-after"): str,
+ Optional("deadline-after"): str,
+ Optional("expiration-policy"): str,
+ # custom routes for this task; the default treeherder routes will be added
+ # automatically
+ Optional("routes"): [str],
+ # custom scopes for this task; any scopes required for the worker will be
+ # added automatically. The following parameters will be substituted in each
+ # scope:
+ # {level} -- the scm level of this push
+ # {project} -- the project of this push
+ Optional("scopes"): [str],
+ # Tags
+ Optional("tags"): {str: str},
+ # custom "task.extra" content
+ Optional("extra"): {str: object},
+ # treeherder-related information; see
+ # https://firefox-ci-tc.services.mozilla.com/schemas/taskcluster-treeherder/v1/task-treeherder-config.json
+ # If not specified, no treeherder extra information or routes will be
+ # added to the task
+ Optional("treeherder"): {
+ # either a bare symbol, or "grp(sym)".
+ "symbol": str,
+ # the job kind
+ "kind": Any("build", "test", "other"),
+ # tier for this task
+ "tier": int,
+ # task platform, in the form platform/collection, used to set
+ # treeherder.machine.platform and treeherder.collection or
+ # treeherder.labels
+ "platform": Match("^[A-Za-z0-9_-]{1,50}/[A-Za-z0-9_-]{1,50}$"),
+ },
+ # information for indexing this build so its artifacts can be discovered;
+ # if omitted, the build will not be indexed.
+ Optional("index"): {
+ # the name of the product this build produces
+ "product": str,
+ # the names to use for this job in the TaskCluster index
+ "job-name": str,
+ # Type of gecko v2 index to use
+ "type": Any(
+ "generic",
+ "l10n",
+ "shippable",
+ "shippable-l10n",
+ "android-shippable",
+ "android-shippable-with-multi-l10n",
+ "shippable-with-multi-l10n",
+ ),
+ # The rank that the task will receive in the TaskCluster
+ # index. A newly completed task supercedes the currently
+ # indexed task iff it has a higher rank. If unspecified,
+ # 'by-tier' behavior will be used.
+ "rank": Any(
+ # Rank is equal the timestamp of the build_date for tier-1
+ # tasks, and zero for non-tier-1. This sorts tier-{2,3}
+ # builds below tier-1 in the index.
+ "by-tier",
+ # Rank is given as an integer constant (e.g. zero to make
+ # sure a task is last in the index).
+ int,
+ # Rank is equal to the timestamp of the build_date. This
+ # option can be used to override the 'by-tier' behavior
+ # for non-tier-1 tasks.
+ "build_date",
+ ),
+ },
+ # The `run_on_projects` attribute, defaulting to "all". This dictates the
+ # projects on which this task should be included in the target task set.
+ # See the attributes documentation for details.
+ Optional("run-on-projects"): optionally_keyed_by("build-platform", [str]),
+ # Like `run_on_projects`, `run-on-hg-branches` defaults to "all".
+ Optional("run-on-hg-branches"): optionally_keyed_by("project", [str]),
+ # The `shipping_phase` attribute, defaulting to None. This specifies the
+ # release promotion phase that this task belongs to.
+ Required("shipping-phase"): Any(
+ None,
+ "build",
+ "promote",
+ "push",
+ "ship",
+ ),
+ # The `shipping_product` attribute, defaulting to None. This specifies the
+ # release promotion product that this task belongs to.
+ Required("shipping-product"): Any(None, str),
+ # The `always-target` attribute will cause the task to be included in the
+ # target_task_graph regardless of filtering. Tasks included in this manner
+ # will be candidates for optimization even when `optimize_target_tasks` is
+ # False, unless the task was also explicitly chosen by the target_tasks
+ # method.
+ Required("always-target"): bool,
+ # Optimization to perform on this task during the optimization phase.
+ # Optimizations are defined in taskcluster/gecko_taskgraph/optimize.py.
+ Required("optimization"): OptimizationSchema,
+ # the provisioner-id/worker-type for the task. The following parameters will
+ # be substituted in this string:
+ # {level} -- the scm level of this push
+ "worker-type": str,
+ # Whether the job should use sccache compiler caching.
+ Required("use-sccache"): bool,
+ # information specific to the worker implementation that will run this task
+ Optional("worker"): {
+ Required("implementation"): str,
+ Extra: object,
+ },
+ # Override the default priority for the project
+ Optional("priority"): str,
+ }
+)
+
+TC_TREEHERDER_SCHEMA_URL = (
+ "https://github.com/taskcluster/taskcluster-treeherder/"
+ "blob/master/schemas/task-treeherder-config.yml"
+)
+
+
+UNKNOWN_GROUP_NAME = (
+ "Treeherder group {} (from {}) has no name; " "add it to taskcluster/ci/config.yml"
+)
+
+V2_ROUTE_TEMPLATES = [
+ "index.{trust-domain}.v2.{project}.latest.{product}.{job-name}",
+ "index.{trust-domain}.v2.{project}.pushdate.{build_date_long}.{product}.{job-name}",
+ "index.{trust-domain}.v2.{project}.pushdate.{build_date}.latest.{product}.{job-name}",
+ "index.{trust-domain}.v2.{project}.pushlog-id.{pushlog_id}.{product}.{job-name}",
+ "index.{trust-domain}.v2.{project}.revision.{branch_rev}.{product}.{job-name}",
+]
+
+# {central, inbound, autoland} write to a "trunk" index prefix. This facilitates
+# walking of tasks with similar configurations.
+V2_TRUNK_ROUTE_TEMPLATES = [
+ "index.{trust-domain}.v2.trunk.revision.{branch_rev}.{product}.{job-name}",
+]
+
+V2_SHIPPABLE_TEMPLATES = [
+ "index.{trust-domain}.v2.{project}.shippable.latest.{product}.{job-name}",
+ "index.{trust-domain}.v2.{project}.shippable.{build_date}.revision.{branch_rev}.{product}.{job-name}", # noqa - too long
+ "index.{trust-domain}.v2.{project}.shippable.{build_date}.latest.{product}.{job-name}",
+ "index.{trust-domain}.v2.{project}.shippable.revision.{branch_rev}.{product}.{job-name}",
+]
+
+V2_SHIPPABLE_L10N_TEMPLATES = [
+ "index.{trust-domain}.v2.{project}.shippable.latest.{product}-l10n.{job-name}.{locale}",
+ "index.{trust-domain}.v2.{project}.shippable.{build_date}.revision.{branch_rev}.{product}-l10n.{job-name}.{locale}", # noqa - too long
+ "index.{trust-domain}.v2.{project}.shippable.{build_date}.latest.{product}-l10n.{job-name}.{locale}", # noqa - too long
+ "index.{trust-domain}.v2.{project}.shippable.revision.{branch_rev}.{product}-l10n.{job-name}.{locale}", # noqa - too long
+]
+
+V2_L10N_TEMPLATES = [
+ "index.{trust-domain}.v2.{project}.revision.{branch_rev}.{product}-l10n.{job-name}.{locale}",
+ "index.{trust-domain}.v2.{project}.pushdate.{build_date_long}.{product}-l10n.{job-name}.{locale}", # noqa - too long
+ "index.{trust-domain}.v2.{project}.pushlog-id.{pushlog_id}.{product}-l10n.{job-name}.{locale}",
+ "index.{trust-domain}.v2.{project}.latest.{product}-l10n.{job-name}.{locale}",
+]
+
+# This index is specifically for builds that include geckoview releases,
+# so we can hard-code the project to "geckoview"
+V2_GECKOVIEW_RELEASE = "index.{trust-domain}.v2.{project}.geckoview-version.{geckoview-version}.{product}.{job-name}" # noqa - too long
+
+# the roots of the treeherder routes
+TREEHERDER_ROUTE_ROOT = "tc-treeherder"
+
+
+def get_branch_rev(config):
+ return config.params[
+ "{}head_rev".format(config.graph_config["project-repo-param-prefix"])
+ ]
+
+
+def get_branch_repo(config):
+ return config.params[
+ "{}head_repository".format(
+ config.graph_config["project-repo-param-prefix"],
+ )
+ ]
+
+
+@memoize
+def get_default_priority(graph_config, project):
+ return evaluate_keyed_by(
+ graph_config["task-priority"], "Graph Config", {"project": project}
+ )
+
+
+# define a collection of payload builders, depending on the worker implementation
+payload_builders = {}
+
+
+@attr.s(frozen=True)
+class PayloadBuilder:
+ schema = attr.ib(type=Schema)
+ builder = attr.ib()
+
+
+def payload_builder(name, schema):
+ schema = Schema({Required("implementation"): name, Optional("os"): str}).extend(
+ schema
+ )
+
+ def wrap(func):
+ assert name not in payload_builders, f"duplicate payload builder name {name}"
+ payload_builders[name] = PayloadBuilder(schema, func)
+ return func
+
+ return wrap
+
+
+# define a collection of index builders, depending on the type implementation
+index_builders = {}
+
+
+def index_builder(name):
+ def wrap(func):
+ assert name not in index_builders, f"duplicate index builder name {name}"
+ index_builders[name] = func
+ return func
+
+ return wrap
+
+
+UNSUPPORTED_INDEX_PRODUCT_ERROR = """\
+The gecko-v2 product {product} is not in the list of configured products in
+`taskcluster/ci/config.yml'.
+"""
+
+
+def verify_index(config, index):
+ product = index["product"]
+ if product not in config.graph_config["index"]["products"]:
+ raise Exception(UNSUPPORTED_INDEX_PRODUCT_ERROR.format(product=product))
+
+
+@payload_builder(
+ "docker-worker",
+ schema={
+ Required("os"): "linux",
+ # For tasks that will run in docker-worker, this is the
+ # name of the docker image or in-tree docker image to run the task in. If
+ # in-tree, then a dependency will be created automatically. This is
+ # generally `desktop-test`, or an image that acts an awful lot like it.
+ Required("docker-image"): Any(
+ # a raw Docker image path (repo/image:tag)
+ str,
+ # an in-tree generated docker image (from `taskcluster/docker/<name>`)
+ {"in-tree": str},
+ # an indexed docker image
+ {"indexed": str},
+ ),
+ # worker features that should be enabled
+ Required("chain-of-trust"): bool,
+ Required("taskcluster-proxy"): bool,
+ Required("allow-ptrace"): bool,
+ Required("loopback-video"): bool,
+ Required("loopback-audio"): bool,
+ Required("docker-in-docker"): bool, # (aka 'dind')
+ Required("privileged"): bool,
+ Optional("kvm"): bool,
+ # Paths to Docker volumes.
+ #
+ # For in-tree Docker images, volumes can be parsed from Dockerfile.
+ # This only works for the Dockerfile itself: if a volume is defined in
+ # a base image, it will need to be declared here. Out-of-tree Docker
+ # images will also require explicit volume annotation.
+ #
+ # Caches are often mounted to the same path as Docker volumes. In this
+ # case, they take precedence over a Docker volume. But a volume still
+ # needs to be declared for the path.
+ Optional("volumes"): [str],
+ Optional(
+ "required-volumes",
+ description=(
+ "Paths that are required to be volumes for performance reasons. "
+ "For in-tree images, these paths will be checked to verify that they "
+ "are defined as volumes."
+ ),
+ ): [str],
+ # caches to set up for the task
+ Optional("caches"): [
+ {
+ # only one type is supported by any of the workers right now
+ "type": "persistent",
+ # name of the cache, allowing re-use by subsequent tasks naming the
+ # same cache
+ "name": str,
+ # location in the task image where the cache will be mounted
+ "mount-point": str,
+ # Whether the cache is not used in untrusted environments
+ # (like the Try repo).
+ Optional("skip-untrusted"): bool,
+ }
+ ],
+ # artifacts to extract from the task image after completion
+ Optional("artifacts"): [
+ {
+ # type of artifact -- simple file, or recursive directory
+ "type": Any("file", "directory"),
+ # task image path from which to read artifact
+ "path": str,
+ # name of the produced artifact (root of the names for
+ # type=directory)
+ "name": str,
+ "expires-after": str,
+ }
+ ],
+ # environment variables
+ Required("env"): {str: taskref_or_string},
+ # the command to run; if not given, docker-worker will default to the
+ # command in the docker image
+ Optional("command"): [taskref_or_string],
+ # the maximum time to run, in seconds
+ Required("max-run-time"): int,
+ # the exit status code(s) that indicates the task should be retried
+ Optional("retry-exit-status"): [int],
+ # the exit status code(s) that indicates the caches used by the task
+ # should be purged
+ Optional("purge-caches-exit-status"): [int],
+ # Whether any artifacts are assigned to this worker
+ Optional("skip-artifacts"): bool,
+ },
+)
+def build_docker_worker_payload(config, task, task_def):
+ worker = task["worker"]
+ level = int(config.params["level"])
+
+ image = worker["docker-image"]
+ if isinstance(image, dict):
+ if "in-tree" in image:
+ name = image["in-tree"]
+ docker_image_task = "docker-image-" + image["in-tree"]
+ task.setdefault("dependencies", {})["docker-image"] = docker_image_task
+
+ image = {
+ "path": "public/image.tar.zst",
+ "taskId": {"task-reference": "<docker-image>"},
+ "type": "task-image",
+ }
+
+ # Find VOLUME in Dockerfile.
+ volumes = dockerutil.parse_volumes(name)
+ for v in sorted(volumes):
+ if v in worker["volumes"]:
+ raise Exception(
+ "volume %s already defined; "
+ "if it is defined in a Dockerfile, "
+ "it does not need to be specified in the "
+ "worker definition" % v
+ )
+
+ worker["volumes"].append(v)
+
+ elif "indexed" in image:
+ image = {
+ "path": "public/image.tar.zst",
+ "namespace": image["indexed"],
+ "type": "indexed-image",
+ }
+ else:
+ raise Exception("unknown docker image type")
+
+ features = {}
+
+ if worker.get("taskcluster-proxy"):
+ features["taskclusterProxy"] = True
+
+ if worker.get("allow-ptrace"):
+ features["allowPtrace"] = True
+ task_def["scopes"].append("docker-worker:feature:allowPtrace")
+
+ if worker.get("chain-of-trust"):
+ features["chainOfTrust"] = True
+
+ if worker.get("docker-in-docker"):
+ features["dind"] = True
+
+ # Never enable sccache on the toolchains repo, as there is no benefit from it
+ # because each push uses a different compiler.
+ if task.get("use-sccache") and config.params["project"] != "toolchains":
+ features["taskclusterProxy"] = True
+ task_def["scopes"].append(
+ "assume:project:taskcluster:{trust_domain}:level-{level}-sccache-buckets".format(
+ trust_domain=config.graph_config["trust-domain"],
+ level=config.params["level"],
+ )
+ )
+ worker["env"]["USE_SCCACHE"] = "1"
+ worker["env"]["SCCACHE_GCS_PROJECT"] = SCCACHE_GCS_PROJECT
+ # Disable sccache idle shutdown.
+ worker["env"]["SCCACHE_IDLE_TIMEOUT"] = "0"
+ else:
+ worker["env"]["SCCACHE_DISABLE"] = "1"
+
+ capabilities = {}
+
+ for lo in "audio", "video":
+ if worker.get("loopback-" + lo):
+ capitalized = "loopback" + lo.capitalize()
+ devices = capabilities.setdefault("devices", {})
+ devices[capitalized] = True
+ task_def["scopes"].append("docker-worker:capability:device:" + capitalized)
+
+ if worker.get("kvm"):
+ devices = capabilities.setdefault("devices", {})
+ devices["kvm"] = True
+ task_def["scopes"].append("docker-worker:capability:device:kvm")
+
+ if worker.get("privileged"):
+ capabilities["privileged"] = True
+ task_def["scopes"].append("docker-worker:capability:privileged")
+
+ task_def["payload"] = payload = {
+ "image": image,
+ "env": worker["env"],
+ }
+ if "command" in worker:
+ payload["command"] = worker["command"]
+
+ if "max-run-time" in worker:
+ payload["maxRunTime"] = worker["max-run-time"]
+
+ run_task = payload.get("command", [""])[0].endswith("run-task")
+
+ # run-task exits EXIT_PURGE_CACHES if there is a problem with caches.
+ # Automatically retry the tasks and purge caches if we see this exit
+ # code.
+ # TODO move this closer to code adding run-task once bug 1469697 is
+ # addressed.
+ if run_task:
+ worker.setdefault("retry-exit-status", []).append(72)
+ worker.setdefault("purge-caches-exit-status", []).append(72)
+
+ payload["onExitStatus"] = {}
+ if "retry-exit-status" in worker:
+ payload["onExitStatus"]["retry"] = worker["retry-exit-status"]
+ if "purge-caches-exit-status" in worker:
+ payload["onExitStatus"]["purgeCaches"] = worker["purge-caches-exit-status"]
+
+ if "artifacts" in worker:
+ artifacts = {}
+ for artifact in worker["artifacts"]:
+ artifacts[artifact["name"]] = {
+ "path": artifact["path"],
+ "type": artifact["type"],
+ "expires": {"relative-datestamp": artifact["expires-after"]},
+ }
+ payload["artifacts"] = artifacts
+
+ if isinstance(worker.get("docker-image"), str):
+ out_of_tree_image = worker["docker-image"]
+ else:
+ out_of_tree_image = None
+ image = worker.get("docker-image", {}).get("in-tree")
+
+ if "caches" in worker:
+ caches = {}
+
+ # run-task knows how to validate caches.
+ #
+ # To help ensure new run-task features and bug fixes don't interfere
+ # with existing caches, we seed the hash of run-task into cache names.
+ # So, any time run-task changes, we should get a fresh set of caches.
+ # This means run-task can make changes to cache interaction at any time
+ # without regards for backwards or future compatibility.
+ #
+ # But this mechanism only works for in-tree Docker images that are built
+ # with the current run-task! For out-of-tree Docker images, we have no
+ # way of knowing their content of run-task. So, in addition to varying
+ # cache names by the contents of run-task, we also take the Docker image
+ # name into consideration. This means that different Docker images will
+ # never share the same cache. This is a bit unfortunate. But it is the
+ # safest thing to do. Fortunately, most images are defined in-tree.
+ #
+ # For out-of-tree Docker images, we don't strictly need to incorporate
+ # the run-task content into the cache name. However, doing so preserves
+ # the mechanism whereby changing run-task results in new caches
+ # everywhere.
+
+ # As an additional mechanism to force the use of different caches, the
+ # string literal in the variable below can be changed. This is
+ # preferred to changing run-task because it doesn't require images
+ # to be rebuilt.
+ cache_version = "v3"
+
+ if run_task:
+ suffix = f"{cache_version}-{_run_task_suffix()}"
+
+ if out_of_tree_image:
+ name_hash = hashlib.sha256(
+ out_of_tree_image.encode("utf-8")
+ ).hexdigest()
+ suffix += name_hash[0:12]
+
+ else:
+ suffix = cache_version
+
+ skip_untrusted = is_try(config.params) or level == 1
+
+ for cache in worker["caches"]:
+ # Some caches aren't enabled in environments where we can't
+ # guarantee certain behavior. Filter those out.
+ if cache.get("skip-untrusted") and skip_untrusted:
+ continue
+
+ name = "{trust_domain}-level-{level}-{name}-{suffix}".format(
+ trust_domain=config.graph_config["trust-domain"],
+ level=config.params["level"],
+ name=cache["name"],
+ suffix=suffix,
+ )
+
+ caches[name] = cache["mount-point"]
+ task_def["scopes"].append("docker-worker:cache:%s" % name)
+
+ # Assertion: only run-task is interested in this.
+ if run_task:
+ payload["env"]["TASKCLUSTER_CACHES"] = ";".join(sorted(caches.values()))
+
+ payload["cache"] = caches
+
+ # And send down volumes information to run-task as well.
+ if run_task and worker.get("volumes"):
+ payload["env"]["TASKCLUSTER_VOLUMES"] = ";".join(sorted(worker["volumes"]))
+
+ if payload.get("cache") and skip_untrusted:
+ payload["env"]["TASKCLUSTER_UNTRUSTED_CACHES"] = "1"
+
+ if features:
+ payload["features"] = features
+ if capabilities:
+ payload["capabilities"] = capabilities
+
+ check_caches_are_volumes(task)
+ check_required_volumes(task)
+
+
+@payload_builder(
+ "generic-worker",
+ schema={
+ Required("os"): Any("windows", "macosx", "linux", "linux-bitbar"),
+ # see http://schemas.taskcluster.net/generic-worker/v1/payload.json
+ # and https://docs.taskcluster.net/reference/workers/generic-worker/payload
+ # command is a list of commands to run, sequentially
+ # on Windows, each command is a string, on OS X and Linux, each command is
+ # a string array
+ Required("command"): Any(
+ [taskref_or_string], [[taskref_or_string]] # Windows # Linux / OS X
+ ),
+ # artifacts to extract from the task image after completion; note that artifacts
+ # for the generic worker cannot have names
+ Optional("artifacts"): [
+ {
+ # type of artifact -- simple file, or recursive directory
+ "type": Any("file", "directory"),
+ # filesystem path from which to read artifact
+ "path": str,
+ # if not specified, path is used for artifact name
+ Optional("name"): str,
+ "expires-after": str,
+ }
+ ],
+ # Directories and/or files to be mounted.
+ # The actual allowed combinations are stricter than the model below,
+ # but this provides a simple starting point.
+ # See https://docs.taskcluster.net/reference/workers/generic-worker/payload
+ Optional("mounts"): [
+ {
+ # A unique name for the cache volume, implies writable cache directory
+ # (otherwise mount is a read-only file or directory).
+ Optional("cache-name"): str,
+ # Optional content for pre-loading cache, or mandatory content for
+ # read-only file or directory. Pre-loaded content can come from either
+ # a task artifact or from a URL.
+ Optional("content"): {
+ # *** Either (artifact and task-id) or url must be specified. ***
+ # Artifact name that contains the content.
+ Optional("artifact"): str,
+ # Task ID that has the artifact that contains the content.
+ Optional("task-id"): taskref_or_string,
+ # URL that supplies the content in response to an unauthenticated
+ # GET request.
+ Optional("url"): str,
+ },
+ # *** Either file or directory must be specified. ***
+ # If mounting a cache or read-only directory, the filesystem location of
+ # the directory should be specified as a relative path to the task
+ # directory here.
+ Optional("directory"): str,
+ # If mounting a file, specify the relative path within the task
+ # directory to mount the file (the file will be read only).
+ Optional("file"): str,
+ # Required if and only if `content` is specified and mounting a
+ # directory (not a file). This should be the archive format of the
+ # content (either pre-loaded cache or read-only directory).
+ Optional("format"): Any("rar", "tar.bz2", "tar.gz", "zip"),
+ }
+ ],
+ # environment variables
+ Required("env"): {str: taskref_or_string},
+ # the maximum time to run, in seconds
+ Required("max-run-time"): int,
+ # os user groups for test task workers
+ Optional("os-groups"): [str],
+ # feature for test task to run as administarotr
+ Optional("run-as-administrator"): bool,
+ # optional features
+ Required("chain-of-trust"): bool,
+ Optional("taskcluster-proxy"): bool,
+ # the exit status code(s) that indicates the task should be retried
+ Optional("retry-exit-status"): [int],
+ # Wether any artifacts are assigned to this worker
+ Optional("skip-artifacts"): bool,
+ },
+)
+def build_generic_worker_payload(config, task, task_def):
+ worker = task["worker"]
+ features = {}
+
+ task_def["payload"] = {
+ "command": worker["command"],
+ "maxRunTime": worker["max-run-time"],
+ }
+
+ if worker["os"] == "windows":
+ task_def["payload"]["onExitStatus"] = {
+ "retry": [
+ # These codes (on windows) indicate a process interruption,
+ # rather than a task run failure. See bug 1544403.
+ 1073807364, # process force-killed due to system shutdown
+ 3221225786, # sigint (any interrupt)
+ ]
+ }
+ if "retry-exit-status" in worker:
+ task_def["payload"].setdefault("onExitStatus", {}).setdefault(
+ "retry", []
+ ).extend(worker["retry-exit-status"])
+ if worker["os"] == "linux-bitbar":
+ task_def["payload"].setdefault("onExitStatus", {}).setdefault("retry", [])
+ # exit code 4 is used to indicate an intermittent android device error
+ if 4 not in task_def["payload"]["onExitStatus"]["retry"]:
+ task_def["payload"]["onExitStatus"]["retry"].extend([4])
+
+ env = worker.get("env", {})
+
+ # Never enable sccache on the toolchains repo, as there is no benefit from it
+ # because each push uses a different compiler.
+ if task.get("use-sccache") and config.params["project"] != "toolchains":
+ features["taskclusterProxy"] = True
+ task_def["scopes"].append(
+ "assume:project:taskcluster:{trust_domain}:level-{level}-sccache-buckets".format(
+ trust_domain=config.graph_config["trust-domain"],
+ level=config.params["level"],
+ )
+ )
+ env["USE_SCCACHE"] = "1"
+ worker["env"]["SCCACHE_GCS_PROJECT"] = SCCACHE_GCS_PROJECT
+ # Disable sccache idle shutdown.
+ env["SCCACHE_IDLE_TIMEOUT"] = "0"
+ else:
+ env["SCCACHE_DISABLE"] = "1"
+
+ if env:
+ task_def["payload"]["env"] = env
+
+ artifacts = []
+
+ for artifact in worker.get("artifacts", []):
+ a = {
+ "path": artifact["path"],
+ "type": artifact["type"],
+ "expires": {"relative-datestamp": artifact["expires-after"]},
+ }
+ if "name" in artifact:
+ a["name"] = artifact["name"]
+ artifacts.append(a)
+
+ if artifacts:
+ task_def["payload"]["artifacts"] = artifacts
+
+ # Need to copy over mounts, but rename keys to respect naming convention
+ # * 'cache-name' -> 'cacheName'
+ # * 'task-id' -> 'taskId'
+ # All other key names are already suitable, and don't need renaming.
+ mounts = copy_task(worker.get("mounts", []))
+ for mount in mounts:
+ if "cache-name" in mount:
+ mount["cacheName"] = "{trust_domain}-level-{level}-{name}".format(
+ trust_domain=config.graph_config["trust-domain"],
+ level=config.params["level"],
+ name=mount.pop("cache-name"),
+ )
+ task_def["scopes"].append(
+ "generic-worker:cache:{}".format(mount["cacheName"])
+ )
+ if "content" in mount:
+ if "task-id" in mount["content"]:
+ mount["content"]["taskId"] = mount["content"].pop("task-id")
+ if "artifact" in mount["content"]:
+ if not mount["content"]["artifact"].startswith("public/"):
+ task_def["scopes"].append(
+ "queue:get-artifact:{}".format(mount["content"]["artifact"])
+ )
+
+ if mounts:
+ task_def["payload"]["mounts"] = mounts
+
+ if worker.get("os-groups"):
+ task_def["payload"]["osGroups"] = worker["os-groups"]
+ task_def["scopes"].extend(
+ [
+ "generic-worker:os-group:{}/{}".format(task["worker-type"], group)
+ for group in worker["os-groups"]
+ ]
+ )
+
+ if worker.get("chain-of-trust"):
+ features["chainOfTrust"] = True
+
+ if worker.get("taskcluster-proxy"):
+ features["taskclusterProxy"] = True
+
+ if worker.get("run-as-administrator", False):
+ features["runAsAdministrator"] = True
+ task_def["scopes"].append(
+ "generic-worker:run-as-administrator:{}".format(task["worker-type"]),
+ )
+
+ if features:
+ task_def["payload"]["features"] = features
+
+
+@payload_builder(
+ "scriptworker-signing",
+ schema={
+ # the maximum time to run, in seconds
+ Required("max-run-time"): int,
+ # list of artifact URLs for the artifacts that should be signed
+ Required("upstream-artifacts"): [
+ {
+ # taskId of the task with the artifact
+ Required("taskId"): taskref_or_string,
+ # type of signing task (for CoT)
+ Required("taskType"): str,
+ # Paths to the artifacts to sign
+ Required("paths"): [str],
+ # Signing formats to use on each of the paths
+ Required("formats"): [str],
+ Optional("singleFileGlobs"): [str],
+ }
+ ],
+ # behavior for mac iscript
+ Optional("mac-behavior"): Any(
+ "apple_notarization",
+ "mac_sign_and_pkg",
+ "mac_sign_and_pkg_hardened",
+ "mac_geckodriver",
+ "mac_notarize_geckodriver",
+ "mac_single_file",
+ "mac_notarize_single_file",
+ ),
+ Optional("entitlements-url"): str,
+ Optional("requirements-plist-url"): str,
+ Optional("provisioning-profile-config"): [
+ {
+ Required("profile_name"): str,
+ Required("target_path"): str,
+ }
+ ],
+ Optional("hardened-sign-config"): [
+ {
+ Optional("deep"): bool,
+ Optional("runtime"): bool,
+ Optional("force"): bool,
+ Optional("entitlements"): str,
+ Optional("requirements"): str,
+ Required("globs"): [str],
+ }
+ ],
+ },
+)
+def build_scriptworker_signing_payload(config, task, task_def):
+ worker = task["worker"]
+
+ task_def["payload"] = {
+ "maxRunTime": worker["max-run-time"],
+ "upstreamArtifacts": worker["upstream-artifacts"],
+ }
+ if worker.get("mac-behavior"):
+ task_def["payload"]["behavior"] = worker["mac-behavior"]
+ for attribute in (
+ "entitlements-url",
+ "requirements-plist-url",
+ "hardened-sign-config",
+ "provisioning-profile-config",
+ ):
+ if worker.get(attribute):
+ task_def["payload"][attribute] = worker[attribute]
+
+ artifacts = set(task.setdefault("attributes", {}).get("release_artifacts", []))
+ for upstream_artifact in worker["upstream-artifacts"]:
+ for path in upstream_artifact["paths"]:
+ artifacts.update(
+ get_signed_artifacts(
+ input=path,
+ formats=upstream_artifact["formats"],
+ behavior=worker.get("mac-behavior"),
+ )
+ )
+ task["attributes"]["release_artifacts"] = sorted(list(artifacts))
+
+
+@payload_builder(
+ "beetmover",
+ schema={
+ # the maximum time to run, in seconds
+ Required("max-run-time"): int,
+ # locale key, if this is a locale beetmover job
+ Optional("locale"): str,
+ Required("release-properties"): {
+ "app-name": str,
+ "app-version": str,
+ "branch": str,
+ "build-id": str,
+ "hash-type": str,
+ "platform": str,
+ },
+ # list of artifact URLs for the artifacts that should be beetmoved
+ Required("upstream-artifacts"): [
+ {
+ # taskId of the task with the artifact
+ Required("taskId"): taskref_or_string,
+ # type of signing task (for CoT)
+ Required("taskType"): str,
+ # Paths to the artifacts to sign
+ Required("paths"): [str],
+ # locale is used to map upload path and allow for duplicate simple names
+ Required("locale"): str,
+ }
+ ],
+ Optional("artifact-map"): object,
+ },
+)
+def build_beetmover_payload(config, task, task_def):
+ worker = task["worker"]
+ release_config = get_release_config(config)
+ release_properties = worker["release-properties"]
+
+ task_def["payload"] = {
+ "maxRunTime": worker["max-run-time"],
+ "releaseProperties": {
+ "appName": release_properties["app-name"],
+ "appVersion": release_properties["app-version"],
+ "branch": release_properties["branch"],
+ "buildid": release_properties["build-id"],
+ "hashType": release_properties["hash-type"],
+ "platform": release_properties["platform"],
+ },
+ "upload_date": config.params["build_date"],
+ "upstreamArtifacts": worker["upstream-artifacts"],
+ }
+ if worker.get("locale"):
+ task_def["payload"]["locale"] = worker["locale"]
+ if worker.get("artifact-map"):
+ task_def["payload"]["artifactMap"] = worker["artifact-map"]
+ if release_config:
+ task_def["payload"].update(release_config)
+
+
+@payload_builder(
+ "beetmover-push-to-release",
+ schema={
+ # the maximum time to run, in seconds
+ Required("max-run-time"): int,
+ Required("product"): str,
+ },
+)
+def build_beetmover_push_to_release_payload(config, task, task_def):
+ worker = task["worker"]
+ release_config = get_release_config(config)
+ partners = [f"{p}/{s}" for p, s, _ in get_partners_to_be_published(config)]
+
+ task_def["payload"] = {
+ "maxRunTime": worker["max-run-time"],
+ "product": worker["product"],
+ "version": release_config["version"],
+ "build_number": release_config["build_number"],
+ "partners": partners,
+ }
+
+
+@payload_builder(
+ "beetmover-import-from-gcs-to-artifact-registry",
+ schema={
+ Required("max-run-time"): int,
+ Required("gcs-sources"): [str],
+ Required("product"): str,
+ },
+)
+def build_import_from_gcs_to_artifact_registry_payload(config, task, task_def):
+ task_def["payload"] = {
+ "product": task["worker"]["product"],
+ "gcs_sources": task["worker"]["gcs-sources"],
+ }
+
+
+@payload_builder(
+ "beetmover-maven",
+ schema={
+ Required("max-run-time"): int,
+ Required("release-properties"): {
+ "app-name": str,
+ "app-version": str,
+ "branch": str,
+ "build-id": str,
+ "artifact-id": str,
+ "hash-type": str,
+ "platform": str,
+ },
+ Required("upstream-artifacts"): [
+ {
+ Required("taskId"): taskref_or_string,
+ Required("taskType"): str,
+ Required("paths"): [str],
+ Optional("zipExtract"): bool,
+ }
+ ],
+ Optional("artifact-map"): object,
+ },
+)
+def build_beetmover_maven_payload(config, task, task_def):
+ build_beetmover_payload(config, task, task_def)
+
+ task_def["payload"]["artifact_id"] = task["worker"]["release-properties"][
+ "artifact-id"
+ ]
+ if task["worker"].get("artifact-map"):
+ task_def["payload"]["artifactMap"] = task["worker"]["artifact-map"]
+
+ task_def["payload"]["version"] = _compute_geckoview_version(
+ task["worker"]["release-properties"]["app-version"],
+ task["worker"]["release-properties"]["build-id"],
+ )
+
+ del task_def["payload"]["releaseProperties"]["hashType"]
+ del task_def["payload"]["releaseProperties"]["platform"]
+
+
+@payload_builder(
+ "balrog",
+ schema={
+ Required("balrog-action"): Any(*BALROG_ACTIONS),
+ Optional("product"): str,
+ Optional("platforms"): [str],
+ Optional("release-eta"): str,
+ Optional("channel-names"): optionally_keyed_by("release-type", [str]),
+ Optional("require-mirrors"): bool,
+ Optional("publish-rules"): optionally_keyed_by(
+ "release-type", "release-level", [int]
+ ),
+ Optional("rules-to-update"): optionally_keyed_by(
+ "release-type", "release-level", [str]
+ ),
+ Optional("archive-domain"): optionally_keyed_by("release-level", str),
+ Optional("download-domain"): optionally_keyed_by("release-level", str),
+ Optional("blob-suffix"): str,
+ Optional("complete-mar-filename-pattern"): str,
+ Optional("complete-mar-bouncer-product-pattern"): str,
+ Optional("update-line"): object,
+ Optional("suffixes"): [str],
+ Optional("background-rate"): optionally_keyed_by(
+ "release-type", "beta-number", Any(int, None)
+ ),
+ Optional("force-fallback-mapping-update"): optionally_keyed_by(
+ "release-type", "beta-number", bool
+ ),
+ Optional("pin-channels"): optionally_keyed_by(
+ "release-type", "release-level", [str]
+ ),
+ # list of artifact URLs for the artifacts that should be beetmoved
+ Optional("upstream-artifacts"): [
+ {
+ # taskId of the task with the artifact
+ Required("taskId"): taskref_or_string,
+ # type of signing task (for CoT)
+ Required("taskType"): str,
+ # Paths to the artifacts to sign
+ Required("paths"): [str],
+ }
+ ],
+ },
+)
+def build_balrog_payload(config, task, task_def):
+ worker = task["worker"]
+ release_config = get_release_config(config)
+ beta_number = None
+ if "b" in release_config["version"]:
+ beta_number = release_config["version"].split("b")[-1]
+
+ task_def["payload"] = {
+ "behavior": worker["balrog-action"],
+ }
+
+ if (
+ worker["balrog-action"] == "submit-locale"
+ or worker["balrog-action"] == "v2-submit-locale"
+ ):
+ task_def["payload"].update(
+ {
+ "upstreamArtifacts": worker["upstream-artifacts"],
+ "suffixes": worker["suffixes"],
+ }
+ )
+ else:
+ for prop in (
+ "archive-domain",
+ "channel-names",
+ "download-domain",
+ "publish-rules",
+ "rules-to-update",
+ "background-rate",
+ "force-fallback-mapping-update",
+ "pin-channels",
+ ):
+ if prop in worker:
+ resolve_keyed_by(
+ worker,
+ prop,
+ task["description"],
+ **{
+ "release-type": config.params["release_type"],
+ "release-level": release_level(config.params["project"]),
+ "beta-number": beta_number,
+ },
+ )
+ task_def["payload"].update(
+ {
+ "build_number": release_config["build_number"],
+ "product": worker["product"],
+ "version": release_config["version"],
+ }
+ )
+ for prop in (
+ "blob-suffix",
+ "complete-mar-filename-pattern",
+ "complete-mar-bouncer-product-pattern",
+ "pin-channels",
+ ):
+ if prop in worker:
+ task_def["payload"][prop.replace("-", "_")] = worker[prop]
+ if (
+ worker["balrog-action"] == "submit-toplevel"
+ or worker["balrog-action"] == "v2-submit-toplevel"
+ ):
+ task_def["payload"].update(
+ {
+ "app_version": release_config["appVersion"],
+ "archive_domain": worker["archive-domain"],
+ "channel_names": worker["channel-names"],
+ "download_domain": worker["download-domain"],
+ "partial_versions": release_config.get("partial_versions", ""),
+ "platforms": worker["platforms"],
+ "rules_to_update": worker["rules-to-update"],
+ "require_mirrors": worker["require-mirrors"],
+ "update_line": worker["update-line"],
+ }
+ )
+ else: # schedule / ship
+ task_def["payload"].update(
+ {
+ "publish_rules": worker["publish-rules"],
+ "release_eta": worker.get(
+ "release-eta", config.params.get("release_eta")
+ )
+ or "",
+ }
+ )
+ if worker.get("force-fallback-mapping-update"):
+ task_def["payload"]["force_fallback_mapping_update"] = worker[
+ "force-fallback-mapping-update"
+ ]
+ if worker.get("background-rate"):
+ task_def["payload"]["background_rate"] = worker["background-rate"]
+
+
+@payload_builder(
+ "bouncer-aliases",
+ schema={
+ Required("entries"): object,
+ },
+)
+def build_bouncer_aliases_payload(config, task, task_def):
+ worker = task["worker"]
+
+ task_def["payload"] = {"aliases_entries": worker["entries"]}
+
+
+@payload_builder(
+ "bouncer-locations",
+ schema={
+ Required("implementation"): "bouncer-locations",
+ Required("bouncer-products"): [str],
+ },
+)
+def build_bouncer_locations_payload(config, task, task_def):
+ worker = task["worker"]
+ release_config = get_release_config(config)
+
+ task_def["payload"] = {
+ "bouncer_products": worker["bouncer-products"],
+ "version": release_config["version"],
+ "product": task["shipping-product"],
+ }
+
+
+@payload_builder(
+ "bouncer-submission",
+ schema={
+ Required("locales"): [str],
+ Required("entries"): object,
+ },
+)
+def build_bouncer_submission_payload(config, task, task_def):
+ worker = task["worker"]
+
+ task_def["payload"] = {
+ "locales": worker["locales"],
+ "submission_entries": worker["entries"],
+ }
+
+
+@payload_builder(
+ "push-flatpak",
+ schema={
+ Required("channel"): str,
+ Required("upstream-artifacts"): [
+ {
+ Required("taskId"): taskref_or_string,
+ Required("taskType"): str,
+ Required("paths"): [str],
+ }
+ ],
+ },
+)
+def build_push_flatpak_payload(config, task, task_def):
+ worker = task["worker"]
+
+ task_def["payload"] = {
+ "channel": worker["channel"],
+ "upstreamArtifacts": worker["upstream-artifacts"],
+ }
+
+
+@payload_builder(
+ "push-msix",
+ schema={
+ Required("channel"): str,
+ Optional("publish-mode"): str,
+ Required("upstream-artifacts"): [
+ {
+ Required("taskId"): taskref_or_string,
+ Required("taskType"): str,
+ Required("paths"): [str],
+ }
+ ],
+ },
+)
+def build_push_msix_payload(config, task, task_def):
+ worker = task["worker"]
+
+ task_def["payload"] = {
+ "channel": worker["channel"],
+ "upstreamArtifacts": worker["upstream-artifacts"],
+ }
+ if worker.get("publish-mode"):
+ task_def["payload"]["publishMode"] = worker["publish-mode"]
+
+
+@payload_builder(
+ "shipit-shipped",
+ schema={
+ Required("release-name"): str,
+ },
+)
+def build_ship_it_shipped_payload(config, task, task_def):
+ worker = task["worker"]
+
+ task_def["payload"] = {"release_name": worker["release-name"]}
+
+
+@payload_builder(
+ "shipit-maybe-release",
+ schema={
+ Required("phase"): str,
+ },
+)
+def build_ship_it_maybe_release_payload(config, task, task_def):
+ # expect branch name, including path
+ branch = config.params["head_repository"][len("https://hg.mozilla.org/") :]
+ # 'version' is e.g. '71.0b13' (app_version doesn't have beta number)
+ version = config.params["version"]
+
+ task_def["payload"] = {
+ "product": task["shipping-product"],
+ "branch": branch,
+ "phase": task["worker"]["phase"],
+ "version": version,
+ "cron_revision": config.params["head_rev"],
+ }
+
+
+@payload_builder(
+ "push-addons",
+ schema={
+ Required("channel"): Any("listed", "unlisted"),
+ Required("upstream-artifacts"): [
+ {
+ Required("taskId"): taskref_or_string,
+ Required("taskType"): str,
+ Required("paths"): [str],
+ }
+ ],
+ },
+)
+def build_push_addons_payload(config, task, task_def):
+ worker = task["worker"]
+
+ task_def["payload"] = {
+ "channel": worker["channel"],
+ "upstreamArtifacts": worker["upstream-artifacts"],
+ }
+
+
+@payload_builder(
+ "treescript",
+ schema={
+ Required("tags"): [Any("buildN", "release", None)],
+ Required("bump"): bool,
+ Optional("bump-files"): [str],
+ Optional("repo-param-prefix"): str,
+ Optional("dontbuild"): bool,
+ Optional("ignore-closed-tree"): bool,
+ Optional("force-dry-run"): bool,
+ Optional("push"): bool,
+ Optional("source-repo"): str,
+ Optional("ssh-user"): str,
+ Optional("l10n-bump-info"): {
+ Required("name"): str,
+ Required("path"): str,
+ Required("version-path"): str,
+ Optional("l10n-repo-url"): str,
+ Optional("ignore-config"): object,
+ Required("platform-configs"): [
+ {
+ Required("platforms"): [str],
+ Required("path"): str,
+ Optional("format"): str,
+ }
+ ],
+ },
+ Optional("merge-info"): object,
+ },
+)
+def build_treescript_payload(config, task, task_def):
+ worker = task["worker"]
+ release_config = get_release_config(config)
+
+ task_def["payload"] = {"actions": []}
+ actions = task_def["payload"]["actions"]
+ if worker["tags"]:
+ tag_names = []
+ product = task["shipping-product"].upper()
+ version = release_config["version"].replace(".", "_")
+ buildnum = release_config["build_number"]
+ if "buildN" in worker["tags"]:
+ tag_names.extend(
+ [
+ f"{product}_{version}_BUILD{buildnum}",
+ ]
+ )
+ if "release" in worker["tags"]:
+ tag_names.extend([f"{product}_{version}_RELEASE"])
+ tag_info = {
+ "tags": tag_names,
+ "revision": config.params[
+ "{}head_rev".format(worker.get("repo-param-prefix", ""))
+ ],
+ }
+ task_def["payload"]["tag_info"] = tag_info
+ actions.append("tag")
+
+ if worker["bump"]:
+ if not worker["bump-files"]:
+ raise Exception("Version Bump requested without bump-files")
+
+ bump_info = {}
+ bump_info["next_version"] = release_config["next_version"]
+ bump_info["files"] = worker["bump-files"]
+ task_def["payload"]["version_bump_info"] = bump_info
+ actions.append("version_bump")
+
+ if worker.get("l10n-bump-info"):
+ l10n_bump_info = {}
+ for k, v in worker["l10n-bump-info"].items():
+ l10n_bump_info[k.replace("-", "_")] = worker["l10n-bump-info"][k]
+ task_def["payload"]["l10n_bump_info"] = [l10n_bump_info]
+ actions.append("l10n_bump")
+
+ if worker.get("merge-info"):
+ merge_info = {
+ merge_param_name.replace("-", "_"): merge_param_value
+ for merge_param_name, merge_param_value in worker["merge-info"].items()
+ if merge_param_name != "version-files"
+ }
+ merge_info["version_files"] = [
+ {
+ file_param_name.replace("-", "_"): file_param_value
+ for file_param_name, file_param_value in file_entry.items()
+ }
+ for file_entry in worker["merge-info"]["version-files"]
+ ]
+ task_def["payload"]["merge_info"] = merge_info
+ actions.append("merge_day")
+
+ if worker["push"]:
+ actions.append("push")
+
+ if worker.get("force-dry-run"):
+ task_def["payload"]["dry_run"] = True
+
+ if worker.get("dontbuild"):
+ task_def["payload"]["dontbuild"] = True
+
+ if worker.get("ignore-closed-tree") is not None:
+ task_def["payload"]["ignore_closed_tree"] = worker["ignore-closed-tree"]
+
+ if worker.get("source-repo"):
+ task_def["payload"]["source_repo"] = worker["source-repo"]
+
+ if worker.get("ssh-user"):
+ task_def["payload"]["ssh_user"] = worker["ssh-user"]
+
+
+@payload_builder(
+ "invalid",
+ schema={
+ # an invalid task is one which should never actually be created; this is used in
+ # release automation on branches where the task just doesn't make sense
+ Extra: object,
+ },
+)
+def build_invalid_payload(config, task, task_def):
+ task_def["payload"] = "invalid task - should never be created"
+
+
+@payload_builder(
+ "always-optimized",
+ schema={
+ Extra: object,
+ },
+)
+@payload_builder("succeed", schema={})
+def build_dummy_payload(config, task, task_def):
+ task_def["payload"] = {}
+
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def set_implementation(config, tasks):
+ """
+ Set the worker implementation based on the worker-type alias.
+ """
+ for task in tasks:
+ worker = task.setdefault("worker", {})
+ if "implementation" in task["worker"]:
+ yield task
+ continue
+
+ impl, os = worker_type_implementation(
+ config.graph_config, config.params, task["worker-type"]
+ )
+
+ tags = task.setdefault("tags", {})
+ tags["worker-implementation"] = impl
+ if os:
+ tags["os"] = os
+
+ worker["implementation"] = impl
+ if os:
+ worker["os"] = os
+
+ yield task
+
+
+@transforms.add
+def set_defaults(config, tasks):
+ for task in tasks:
+ task.setdefault("shipping-phase", None)
+ task.setdefault("shipping-product", None)
+ task.setdefault("always-target", False)
+ task.setdefault("optimization", None)
+ task.setdefault("use-sccache", False)
+
+ worker = task["worker"]
+ if worker["implementation"] in ("docker-worker",):
+ worker.setdefault("chain-of-trust", False)
+ worker.setdefault("taskcluster-proxy", False)
+ worker.setdefault("allow-ptrace", True)
+ worker.setdefault("loopback-video", False)
+ worker.setdefault("loopback-audio", False)
+ worker.setdefault("docker-in-docker", False)
+ worker.setdefault("privileged", False)
+ worker.setdefault("volumes", [])
+ worker.setdefault("env", {})
+ if "caches" in worker:
+ for c in worker["caches"]:
+ c.setdefault("skip-untrusted", False)
+ elif worker["implementation"] == "generic-worker":
+ worker.setdefault("env", {})
+ worker.setdefault("os-groups", [])
+ if worker["os-groups"] and worker["os"] != "windows":
+ raise Exception(
+ "os-groups feature of generic-worker is only supported on "
+ "Windows, not on {}".format(worker["os"])
+ )
+ worker.setdefault("chain-of-trust", False)
+ elif worker["implementation"] in (
+ "scriptworker-signing",
+ "beetmover",
+ "beetmover-push-to-release",
+ "beetmover-maven",
+ "beetmover-import-from-gcs-to-artifact-registry",
+ ):
+ worker.setdefault("max-run-time", 600)
+ elif worker["implementation"] == "push-apk":
+ worker.setdefault("commit", False)
+
+ yield task
+
+
+@transforms.add
+def setup_raptor(config, tasks):
+ """Add options that are specific to raptor jobs (identified by suite=raptor).
+
+ This variant uses a separate set of transforms for manipulating the tests at the
+ task-level. Currently only used for setting the taskcluster proxy setting and
+ the scopes required for perftest secrets.
+ """
+ from gecko_taskgraph.transforms.test.raptor import (
+ task_transforms as raptor_transforms,
+ )
+
+ for task in tasks:
+ if task.get("extra", {}).get("suite", "") != "raptor":
+ yield task
+ continue
+
+ yield from raptor_transforms(config, [task])
+
+
+@transforms.add
+def task_name_from_label(config, tasks):
+ for task in tasks:
+ taskname = task.pop("name", None)
+ if "label" not in task:
+ if taskname is None:
+ raise Exception("task has neither a name nor a label")
+ task["label"] = "{}-{}".format(config.kind, taskname)
+ yield task
+
+
+UNSUPPORTED_SHIPPING_PRODUCT_ERROR = """\
+The shipping product {product} is not in the list of configured products in
+`taskcluster/ci/config.yml'.
+"""
+
+
+def validate_shipping_product(config, product):
+ if product not in config.graph_config["release-promotion"]["products"]:
+ raise Exception(UNSUPPORTED_SHIPPING_PRODUCT_ERROR.format(product=product))
+
+
+@transforms.add
+def validate(config, tasks):
+ for task in tasks:
+ validate_schema(
+ task_description_schema,
+ task,
+ "In task {!r}:".format(task.get("label", "?no-label?")),
+ )
+ validate_schema(
+ payload_builders[task["worker"]["implementation"]].schema,
+ task["worker"],
+ "In task.run {!r}:".format(task.get("label", "?no-label?")),
+ )
+ if task["shipping-product"] is not None:
+ validate_shipping_product(config, task["shipping-product"])
+ yield task
+
+
+@index_builder("generic")
+def add_generic_index_routes(config, task):
+ index = task.get("index")
+ routes = task.setdefault("routes", [])
+
+ verify_index(config, index)
+
+ subs = config.params.copy()
+ subs["job-name"] = index["job-name"]
+ subs["build_date_long"] = time.strftime(
+ "%Y.%m.%d.%Y%m%d%H%M%S", time.gmtime(config.params["build_date"])
+ )
+ subs["build_date"] = time.strftime(
+ "%Y.%m.%d", time.gmtime(config.params["build_date"])
+ )
+ subs["product"] = index["product"]
+ subs["trust-domain"] = config.graph_config["trust-domain"]
+ subs["branch_rev"] = get_branch_rev(config)
+
+ project = config.params.get("project")
+
+ for tpl in V2_ROUTE_TEMPLATES:
+ routes.append(tpl.format(**subs))
+
+ # Additionally alias all tasks for "trunk" repos into a common
+ # namespace.
+ if project and project in TRUNK_PROJECTS:
+ for tpl in V2_TRUNK_ROUTE_TEMPLATES:
+ routes.append(tpl.format(**subs))
+
+ return task
+
+
+@index_builder("shippable")
+def add_shippable_index_routes(config, task):
+ index = task.get("index")
+ routes = task.setdefault("routes", [])
+
+ verify_index(config, index)
+
+ subs = config.params.copy()
+ subs["job-name"] = index["job-name"]
+ subs["build_date_long"] = time.strftime(
+ "%Y.%m.%d.%Y%m%d%H%M%S", time.gmtime(config.params["build_date"])
+ )
+ subs["build_date"] = time.strftime(
+ "%Y.%m.%d", time.gmtime(config.params["build_date"])
+ )
+ subs["product"] = index["product"]
+ subs["trust-domain"] = config.graph_config["trust-domain"]
+ subs["branch_rev"] = get_branch_rev(config)
+
+ for tpl in V2_SHIPPABLE_TEMPLATES:
+ routes.append(tpl.format(**subs))
+
+ # Also add routes for en-US
+ task = add_shippable_l10n_index_routes(config, task, force_locale="en-US")
+
+ return task
+
+
+@index_builder("shippable-with-multi-l10n")
+def add_shippable_multi_index_routes(config, task):
+ task = add_shippable_index_routes(config, task)
+ task = add_l10n_index_routes(config, task, force_locale="multi")
+ return task
+
+
+@index_builder("l10n")
+def add_l10n_index_routes(config, task, force_locale=None):
+ index = task.get("index")
+ routes = task.setdefault("routes", [])
+
+ verify_index(config, index)
+
+ subs = config.params.copy()
+ subs["job-name"] = index["job-name"]
+ subs["build_date_long"] = time.strftime(
+ "%Y.%m.%d.%Y%m%d%H%M%S", time.gmtime(config.params["build_date"])
+ )
+ subs["product"] = index["product"]
+ subs["trust-domain"] = config.graph_config["trust-domain"]
+ subs["branch_rev"] = get_branch_rev(config)
+
+ locales = task["attributes"].get(
+ "chunk_locales", task["attributes"].get("all_locales")
+ )
+ # Some tasks has only one locale set
+ if task["attributes"].get("locale"):
+ locales = [task["attributes"]["locale"]]
+
+ if force_locale:
+ # Used for en-US and multi-locale
+ locales = [force_locale]
+
+ if not locales:
+ raise Exception("Error: Unable to use l10n index for tasks without locales")
+
+ # If there are too many locales, we can't write a route for all of them
+ # See Bug 1323792
+ if len(locales) > 18: # 18 * 3 = 54, max routes = 64
+ return task
+
+ for locale in locales:
+ for tpl in V2_L10N_TEMPLATES:
+ routes.append(tpl.format(locale=locale, **subs))
+
+ return task
+
+
+@index_builder("shippable-l10n")
+def add_shippable_l10n_index_routes(config, task, force_locale=None):
+ index = task.get("index")
+ routes = task.setdefault("routes", [])
+
+ verify_index(config, index)
+
+ subs = config.params.copy()
+ subs["job-name"] = index["job-name"]
+ subs["build_date_long"] = time.strftime(
+ "%Y.%m.%d.%Y%m%d%H%M%S", time.gmtime(config.params["build_date"])
+ )
+ subs["product"] = index["product"]
+ subs["trust-domain"] = config.graph_config["trust-domain"]
+ subs["branch_rev"] = get_branch_rev(config)
+
+ locales = task["attributes"].get(
+ "chunk_locales", task["attributes"].get("all_locales")
+ )
+ # Some tasks has only one locale set
+ if task["attributes"].get("locale"):
+ locales = [task["attributes"]["locale"]]
+
+ if force_locale:
+ # Used for en-US and multi-locale
+ locales = [force_locale]
+
+ if not locales:
+ raise Exception("Error: Unable to use l10n index for tasks without locales")
+
+ # If there are too many locales, we can't write a route for all of them
+ # See Bug 1323792
+ if len(locales) > 18: # 18 * 3 = 54, max routes = 64
+ return task
+
+ for locale in locales:
+ for tpl in V2_SHIPPABLE_L10N_TEMPLATES:
+ routes.append(tpl.format(locale=locale, **subs))
+
+ return task
+
+
+def add_geckoview_index_routes(config, task):
+ index = task.get("index")
+ routes = task.setdefault("routes", [])
+ geckoview_version = _compute_geckoview_version(
+ config.params["app_version"], config.params["moz_build_date"]
+ )
+
+ subs = {
+ "geckoview-version": geckoview_version,
+ "job-name": index["job-name"],
+ "product": index["product"],
+ "project": config.params["project"],
+ "trust-domain": config.graph_config["trust-domain"],
+ }
+ routes.append(V2_GECKOVIEW_RELEASE.format(**subs))
+
+ return task
+
+
+@index_builder("android-shippable")
+def add_android_shippable_index_routes(config, task):
+ task = add_shippable_index_routes(config, task)
+ task = add_geckoview_index_routes(config, task)
+
+ return task
+
+
+@index_builder("android-shippable-with-multi-l10n")
+def add_android_shippable_multi_index_routes(config, task):
+ task = add_shippable_multi_index_routes(config, task)
+ task = add_geckoview_index_routes(config, task)
+
+ return task
+
+
+@transforms.add
+def add_index_routes(config, tasks):
+ for task in tasks:
+ index = task.get("index", {})
+
+ # The default behavior is to rank tasks according to their tier
+ extra_index = task.setdefault("extra", {}).setdefault("index", {})
+ rank = index.get("rank", "by-tier")
+
+ if rank == "by-tier":
+ # rank is zero for non-tier-1 tasks and based on pushid for others;
+ # this sorts tier-{2,3} builds below tier-1 in the index
+ tier = task.get("treeherder", {}).get("tier", 3)
+ extra_index["rank"] = 0 if tier > 1 else int(config.params["build_date"])
+ elif rank == "build_date":
+ extra_index["rank"] = int(config.params["build_date"])
+ else:
+ extra_index["rank"] = rank
+
+ if not index:
+ yield task
+ continue
+
+ index_type = index.get("type", "generic")
+ task = index_builders[index_type](config, task)
+
+ del task["index"]
+ yield task
+
+
+@transforms.add
+def try_task_config_env(config, tasks):
+ """Set environment variables in the task."""
+ env = config.params["try_task_config"].get("env")
+ if not env:
+ yield from tasks
+ return
+
+ # Find all implementations that have an 'env' key.
+ implementations = {
+ name
+ for name, builder in payload_builders.items()
+ if "env" in builder.schema.schema
+ }
+ for task in tasks:
+ if task["worker"]["implementation"] in implementations:
+ task["worker"]["env"].update(env)
+ yield task
+
+
+@transforms.add
+def try_task_config_chemspill_prio(config, tasks):
+ """Increase the priority from lowest and very-low -> low, but leave others unchanged."""
+ chemspill_prio = config.params["try_task_config"].get("chemspill-prio")
+ if not chemspill_prio:
+ yield from tasks
+ return
+
+ for task in tasks:
+ if task["priority"] in ("lowest", "very-low"):
+ task["priority"] = "low"
+ yield task
+
+
+@transforms.add
+def try_task_config_routes(config, tasks):
+ """Set routes in the task."""
+ routes = config.params["try_task_config"].get("routes")
+ for task in tasks:
+ if routes:
+ task_routes = task.setdefault("routes", [])
+ task_routes.extend(routes)
+ yield task
+
+
+@transforms.add
+def set_task_and_artifact_expiry(config, jobs):
+ """Set the default expiry for tasks and their artifacts.
+
+ These values are read from ci/config.yml
+ """
+ now = datetime.datetime.utcnow()
+ # We don't want any configuration leading to anything with an expiry longer
+ # than 28 days on try.
+ cap = "28 days" if is_try(config.params) else None
+ cap_from_now = fromNow(cap, now) if cap else None
+ if cap:
+ for policy, expires in config.graph_config["expiration-policy"]["by-project"][
+ "try"
+ ].items():
+ if fromNow(expires, now) > cap_from_now:
+ raise Exception(
+ f'expiration-policy "{policy}" is larger than {cap} '
+ f'for {config.params["project"]}'
+ )
+ for job in jobs:
+ expires = get_expiration(config, job.get("expiration-policy", "default"))
+ job_expiry = job.setdefault("expires-after", expires)
+ job_expiry_from_now = fromNow(job_expiry, now)
+ if cap and job_expiry_from_now > cap_from_now:
+ job_expiry, job_expiry_from_now = cap, cap_from_now
+ # If the task has no explicit expiration-policy, but has an expires-after,
+ # we use that as the default artifact expiry.
+ artifact_expires = expires if "expiration-policy" in job else job_expiry
+
+ for artifact in job["worker"].get("artifacts", ()):
+ artifact_expiry = artifact.setdefault("expires-after", artifact_expires)
+
+ # By using > instead of >=, there's a chance of mismatch
+ # where the artifact expires sooner than the task.
+ # There is no chance, however, of mismatch where artifacts
+ # expire _after_ the task.
+ # Currently this leads to some build tasks having logs
+ # that expire in 1 year while the task expires in 3 years.
+ if fromNow(artifact_expiry, now) > job_expiry_from_now:
+ artifact["expires-after"] = job_expiry
+
+ yield job
+
+
+@transforms.add
+def build_task(config, tasks):
+ for task in tasks:
+ level = str(config.params["level"])
+
+ task_worker_type = task["worker-type"]
+ worker_overrides = config.params["try_task_config"].get("worker-overrides", {})
+ if task_worker_type in worker_overrides:
+ worker_pool = worker_overrides[task_worker_type]
+ provisioner_id, worker_type = worker_pool.split("/", 1)
+ else:
+ provisioner_id, worker_type = get_worker_type(
+ config.graph_config,
+ config.params,
+ task_worker_type,
+ )
+ task["worker-type"] = "/".join([provisioner_id, worker_type])
+ project = config.params["project"]
+
+ routes = task.get("routes", [])
+ scopes = [
+ s.format(level=level, project=project) for s in task.get("scopes", [])
+ ]
+
+ # set up extra
+ extra = task.get("extra", {})
+ extra["parent"] = {"task-reference": "<decision>"}
+ task_th = task.get("treeherder")
+ if task_th:
+ extra.setdefault("treeherder-platform", task_th["platform"])
+ treeherder = extra.setdefault("treeherder", {})
+
+ machine_platform, collection = task_th["platform"].split("/", 1)
+ treeherder["machine"] = {"platform": machine_platform}
+ treeherder["collection"] = {collection: True}
+
+ group_names = config.graph_config["treeherder"]["group-names"]
+ groupSymbol, symbol = split_symbol(task_th["symbol"])
+ if groupSymbol != "?":
+ treeherder["groupSymbol"] = groupSymbol
+ if groupSymbol not in group_names:
+ path = os.path.join(config.path, task.get("job-from", ""))
+ raise Exception(UNKNOWN_GROUP_NAME.format(groupSymbol, path))
+ treeherder["groupName"] = group_names[groupSymbol]
+ treeherder["symbol"] = symbol
+ if len(symbol) > 25 or len(groupSymbol) > 25:
+ raise RuntimeError(
+ "Treeherder group and symbol names must not be longer than "
+ "25 characters: {} (see {})".format(
+ task_th["symbol"],
+ TC_TREEHERDER_SCHEMA_URL,
+ )
+ )
+ treeherder["jobKind"] = task_th["kind"]
+ treeherder["tier"] = task_th["tier"]
+
+ branch_rev = get_branch_rev(config)
+
+ routes.append(
+ "{}.v2.{}.{}".format(
+ TREEHERDER_ROUTE_ROOT,
+ config.params["project"],
+ branch_rev,
+ )
+ )
+
+ if "deadline-after" not in task:
+ task["deadline-after"] = "1 day"
+
+ if "priority" not in task:
+ task["priority"] = get_default_priority(
+ config.graph_config, config.params["project"]
+ )
+
+ tags = task.get("tags", {})
+ attributes = task.get("attributes", {})
+
+ tags.update(
+ {
+ "createdForUser": config.params["owner"],
+ "kind": config.kind,
+ "label": task["label"],
+ "retrigger": "true" if attributes.get("retrigger", False) else "false",
+ }
+ )
+
+ task_def = {
+ "provisionerId": provisioner_id,
+ "workerType": worker_type,
+ "routes": routes,
+ "created": {"relative-datestamp": "0 seconds"},
+ "deadline": {"relative-datestamp": task["deadline-after"]},
+ "expires": {"relative-datestamp": task["expires-after"]},
+ "scopes": scopes,
+ "metadata": {
+ "description": task["description"],
+ "name": task["label"],
+ "owner": config.params["owner"],
+ "source": config.params.file_url(config.path, pretty=True),
+ },
+ "extra": extra,
+ "tags": tags,
+ "priority": task["priority"],
+ }
+
+ if task.get("requires", None):
+ task_def["requires"] = task["requires"]
+
+ if task_th:
+ # link back to treeherder in description
+ th_job_link = (
+ "https://treeherder.mozilla.org/#/jobs?repo={}&revision={}&selectedTaskRun=<self>"
+ ).format(config.params["project"], branch_rev)
+ task_def["metadata"]["description"] = {
+ "task-reference": "{description} ([Treeherder job]({th_job_link}))".format(
+ description=task_def["metadata"]["description"],
+ th_job_link=th_job_link,
+ )
+ }
+
+ # add the payload and adjust anything else as required (e.g., scopes)
+ payload_builders[task["worker"]["implementation"]].builder(
+ config, task, task_def
+ )
+
+ # Resolve run-on-projects
+ build_platform = attributes.get("build_platform")
+ resolve_keyed_by(
+ task,
+ "run-on-projects",
+ item_name=task["label"],
+ **{"build-platform": build_platform},
+ )
+ attributes["run_on_projects"] = task.get("run-on-projects", ["all"])
+ attributes["always_target"] = task["always-target"]
+ # This logic is here since downstream tasks don't always match their
+ # upstream dependency's shipping_phase.
+ # A text_type task['shipping-phase'] takes precedence, then
+ # an existing attributes['shipping_phase'], then fall back to None.
+ if task.get("shipping-phase") is not None:
+ attributes["shipping_phase"] = task["shipping-phase"]
+ else:
+ attributes.setdefault("shipping_phase", None)
+ # shipping_product will always match the upstream task's
+ # shipping_product, so a pre-set existing attributes['shipping_product']
+ # takes precedence over task['shipping-product']. However, make sure
+ # we don't have conflicting values.
+ if task.get("shipping-product") and attributes.get("shipping_product") not in (
+ None,
+ task["shipping-product"],
+ ):
+ raise Exception(
+ "{} shipping_product {} doesn't match task shipping-product {}!".format(
+ task["label"],
+ attributes["shipping_product"],
+ task["shipping-product"],
+ )
+ )
+ attributes.setdefault("shipping_product", task["shipping-product"])
+
+ # Set MOZ_AUTOMATION on all jobs.
+ if task["worker"]["implementation"] in (
+ "generic-worker",
+ "docker-worker",
+ ):
+ payload = task_def.get("payload")
+ if payload:
+ env = payload.setdefault("env", {})
+ env["MOZ_AUTOMATION"] = "1"
+
+ dependencies = task.get("dependencies", {})
+ if_dependencies = task.get("if-dependencies", [])
+ if if_dependencies:
+ for i, dep in enumerate(if_dependencies):
+ if dep in dependencies:
+ if_dependencies[i] = dependencies[dep]
+ continue
+
+ raise Exception(
+ "{label} specifies '{dep}' in if-dependencies, "
+ "but {dep} is not a dependency!".format(
+ label=task["label"], dep=dep
+ )
+ )
+
+ yield {
+ "label": task["label"],
+ "description": task["description"],
+ "task": task_def,
+ "dependencies": dependencies,
+ "if-dependencies": if_dependencies,
+ "soft-dependencies": task.get("soft-dependencies", []),
+ "attributes": attributes,
+ "optimization": task.get("optimization", None),
+ }
+
+
+@transforms.add
+def chain_of_trust(config, tasks):
+ for task in tasks:
+ if task["task"].get("payload", {}).get("features", {}).get("chainOfTrust"):
+ image = task.get("dependencies", {}).get("docker-image")
+ if image:
+ cot = (
+ task["task"].setdefault("extra", {}).setdefault("chainOfTrust", {})
+ )
+ cot.setdefault("inputs", {})["docker-image"] = {
+ "task-reference": "<docker-image>"
+ }
+ yield task
+
+
+@transforms.add
+def check_task_identifiers(config, tasks):
+ """Ensures that all tasks have well defined identifiers:
+ ``^[a-zA-Z0-9_-]{1,38}$``
+ """
+ e = re.compile("^[a-zA-Z0-9_-]{1,38}$")
+ for task in tasks:
+ for attrib in ("workerType", "provisionerId"):
+ if not e.match(task["task"][attrib]):
+ raise Exception(
+ "task {}.{} is not a valid identifier: {}".format(
+ task["label"], attrib, task["task"][attrib]
+ )
+ )
+ yield task
+
+
+@transforms.add
+def check_task_dependencies(config, tasks):
+ """Ensures that tasks don't have more than 100 dependencies."""
+ for task in tasks:
+ if len(task["dependencies"]) > MAX_DEPENDENCIES:
+ raise Exception(
+ "task {}/{} has too many dependencies ({} > {})".format(
+ config.kind,
+ task["label"],
+ len(task["dependencies"]),
+ MAX_DEPENDENCIES,
+ )
+ )
+ yield task
+
+
+def check_caches_are_volumes(task):
+ """Ensures that all cache paths are defined as volumes.
+
+ Caches and volumes are the only filesystem locations whose content
+ isn't defined by the Docker image itself. Some caches are optional
+ depending on the job environment. We want paths that are potentially
+ caches to have as similar behavior regardless of whether a cache is
+ used. To help enforce this, we require that all paths used as caches
+ to be declared as Docker volumes. This check won't catch all offenders.
+ But it is better than nothing.
+ """
+ volumes = {s for s in task["worker"]["volumes"]}
+ paths = {c["mount-point"] for c in task["worker"].get("caches", [])}
+ missing = paths - volumes
+
+ if not missing:
+ return
+
+ raise Exception(
+ "task %s (image %s) has caches that are not declared as "
+ "Docker volumes: %s "
+ "(have you added them as VOLUMEs in the Dockerfile?)"
+ % (task["label"], task["worker"]["docker-image"], ", ".join(sorted(missing)))
+ )
+
+
+def check_required_volumes(task):
+ """
+ Ensures that all paths that are required to be volumes are defined as volumes.
+
+ Performance of writing to files in poor in directories not marked as
+ volumes, in docker. Ensure that paths that are often written to are marked
+ as volumes.
+ """
+ volumes = set(task["worker"]["volumes"])
+ paths = set(task["worker"].get("required-volumes", []))
+ missing = paths - volumes
+
+ if not missing:
+ return
+
+ raise Exception(
+ "task %s (image %s) has paths that should be volumes for peformance "
+ "that are not declared as Docker volumes: %s "
+ "(have you added them as VOLUMEs in the Dockerfile?)"
+ % (task["label"], task["worker"]["docker-image"], ", ".join(sorted(missing)))
+ )
+
+
+@transforms.add
+def check_run_task_caches(config, tasks):
+ """Audit for caches requiring run-task.
+
+ run-task manages caches in certain ways. If a cache managed by run-task
+ is used by a non run-task task, it could cause problems. So we audit for
+ that and make sure certain cache names are exclusive to run-task.
+
+ IF YOU ARE TEMPTED TO MAKE EXCLUSIONS TO THIS POLICY, YOU ARE LIKELY
+ CONTRIBUTING TECHNICAL DEBT AND WILL HAVE TO SOLVE MANY OF THE PROBLEMS
+ THAT RUN-TASK ALREADY SOLVES. THINK LONG AND HARD BEFORE DOING THAT.
+ """
+ re_reserved_caches = re.compile(
+ """^
+ (checkouts|tooltool-cache)
+ """,
+ re.VERBOSE,
+ )
+
+ re_sparse_checkout_cache = re.compile("^checkouts-sparse")
+
+ cache_prefix = "{trust_domain}-level-{level}-".format(
+ trust_domain=config.graph_config["trust-domain"],
+ level=config.params["level"],
+ )
+
+ suffix = _run_task_suffix()
+
+ for task in tasks:
+ payload = task["task"].get("payload", {})
+ command = payload.get("command") or [""]
+
+ main_command = command[0] if isinstance(command[0], str) else ""
+ run_task = main_command.endswith("run-task")
+
+ require_sparse_cache = False
+ have_sparse_cache = False
+
+ if run_task:
+ for arg in command[1:]:
+ if not isinstance(arg, str):
+ continue
+
+ if arg == "--":
+ break
+
+ if arg.startswith("--gecko-sparse-profile"):
+ if "=" not in arg:
+ raise Exception(
+ "{} is specifying `--gecko-sparse-profile` to run-task "
+ "as two arguments. Unable to determine if the sparse "
+ "profile exists.".format(task["label"])
+ )
+ _, sparse_profile = arg.split("=", 1)
+ if not os.path.exists(os.path.join(GECKO, sparse_profile)):
+ raise Exception(
+ "{} is using non-existant sparse profile {}.".format(
+ task["label"], sparse_profile
+ )
+ )
+ require_sparse_cache = True
+ break
+
+ for cache in payload.get("cache", {}):
+ if not cache.startswith(cache_prefix):
+ raise Exception(
+ "{} is using a cache ({}) which is not appropriate "
+ "for its trust-domain and level. It should start with {}.".format(
+ task["label"], cache, cache_prefix
+ )
+ )
+
+ cache = cache[len(cache_prefix) :]
+
+ if re_sparse_checkout_cache.match(cache):
+ have_sparse_cache = True
+
+ if not re_reserved_caches.match(cache):
+ continue
+
+ if not run_task:
+ raise Exception(
+ "%s is using a cache (%s) reserved for run-task "
+ "change the task to use run-task or use a different "
+ "cache name" % (task["label"], cache)
+ )
+
+ if not cache.endswith(suffix):
+ raise Exception(
+ "%s is using a cache (%s) reserved for run-task "
+ "but the cache name is not dependent on the contents "
+ "of run-task; change the cache name to conform to the "
+ "naming requirements" % (task["label"], cache)
+ )
+
+ if require_sparse_cache and not have_sparse_cache:
+ raise Exception(
+ "%s is using a sparse checkout but not using "
+ "a sparse checkout cache; change the checkout "
+ "cache name so it is sparse aware" % task["label"]
+ )
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/test/__init__.py b/taskcluster/gecko_taskgraph/transforms/test/__init__.py
new file mode 100644
index 0000000000..92704bf18c
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/test/__init__.py
@@ -0,0 +1,544 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+These transforms construct a task description to run the given test, based on a
+test description. The implementation here is shared among all test kinds, but
+contains specific support for how we run tests in Gecko (via mozharness,
+invoked in particular ways).
+
+This is a good place to translate a test-description option such as
+`single-core: true` to the implementation of that option in a task description
+(worker options, mozharness commandline, environment variables, etc.)
+
+The test description should be fully formed by the time it reaches these
+transforms, and these transforms should not embody any specific knowledge about
+what should run where. this is the wrong place for special-casing platforms,
+for example - use `all_tests.py` instead.
+"""
+
+
+import logging
+from importlib import import_module
+
+from mozbuild.schedules import INCLUSIVE_COMPONENTS
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import Schema, optionally_keyed_by, resolve_keyed_by
+from voluptuous import Any, Exclusive, Optional, Required
+
+from gecko_taskgraph.optimize.schema import OptimizationSchema
+from gecko_taskgraph.transforms.test.other import get_mobile_project
+from gecko_taskgraph.util.chunking import manifest_loaders
+
+logger = logging.getLogger(__name__)
+transforms = TransformSequence()
+
+
+# Schema for a test description
+#
+# *****WARNING*****
+#
+# This is a great place for baffling cruft to accumulate, and that makes
+# everyone move more slowly. Be considerate of your fellow hackers!
+# See the warnings in taskcluster/docs/how-tos.rst
+#
+# *****WARNING*****
+test_description_schema = Schema(
+ {
+ # description of the suite, for the task metadata
+ Required("description"): str,
+ # test suite category and name
+ Optional("suite"): Any(
+ optionally_keyed_by("variant", str),
+ {
+ Optional("category"): str,
+ Optional("name"): optionally_keyed_by("variant", str),
+ },
+ ),
+ # base work directory used to set up the task.
+ Optional("workdir"): optionally_keyed_by("test-platform", Any(str, "default")),
+ # the name by which this test suite is addressed in try syntax; defaults to
+ # the test-name. This will translate to the `unittest_try_name` or
+ # `talos_try_name` attribute.
+ Optional("try-name"): str,
+ # additional tags to mark up this type of test
+ Optional("tags"): {str: object},
+ # the symbol, or group(symbol), under which this task should appear in
+ # treeherder.
+ Required("treeherder-symbol"): str,
+ # the value to place in task.extra.treeherder.machine.platform; ideally
+ # this is the same as build-platform, and that is the default, but in
+ # practice it's not always a match.
+ Optional("treeherder-machine-platform"): str,
+ # attributes to appear in the resulting task (later transforms will add the
+ # common attributes)
+ Optional("attributes"): {str: object},
+ # relative path (from config.path) to the file task was defined in
+ Optional("job-from"): str,
+ # The `run_on_projects` attribute, defaulting to "all". This dictates the
+ # projects on which this task should be included in the target task set.
+ # See the attributes documentation for details.
+ #
+ # Note that the special case 'built-projects', the default, uses the parent
+ # build task's run-on-projects, meaning that tests run only on platforms
+ # that are built.
+ Optional("run-on-projects"): optionally_keyed_by(
+ "app",
+ "subtest",
+ "test-platform",
+ "test-name",
+ "variant",
+ Any([str], "built-projects"),
+ ),
+ # When set only run on projects where the build would already be running.
+ # This ensures tasks where this is True won't be the cause of the build
+ # running on a project it otherwise wouldn't have.
+ Optional("built-projects-only"): bool,
+ # the sheriffing tier for this task (default: set based on test platform)
+ Optional("tier"): optionally_keyed_by(
+ "test-platform", "variant", "app", "subtest", Any(int, "default")
+ ),
+ # number of chunks to create for this task. This can be keyed by test
+ # platform by passing a dictionary in the `by-test-platform` key. If the
+ # test platform is not found, the key 'default' will be tried.
+ Required("chunks"): optionally_keyed_by(
+ "test-platform", "variant", Any(int, "dynamic")
+ ),
+ # Custom 'test_manifest_loader' to use, overriding the one configured in the
+ # parameters. When 'null', no test chunking will be performed. Can also
+ # be used to disable "manifest scheduling".
+ Optional("test-manifest-loader"): Any(None, *list(manifest_loaders)),
+ # the time (with unit) after which this task is deleted; default depends on
+ # the branch (see below)
+ Optional("expires-after"): str,
+ # The different configurations that should be run against this task, defined
+ # in the TEST_VARIANTS object in the variant.py transforms.
+ Optional("variants"): [str],
+ # Whether to run this task without any variants applied.
+ Required("run-without-variant"): optionally_keyed_by("test-platform", bool),
+ # The EC2 instance size to run these tests on.
+ Required("instance-size"): optionally_keyed_by(
+ "test-platform", Any("default", "large", "xlarge")
+ ),
+ # type of virtualization or hardware required by test.
+ Required("virtualization"): optionally_keyed_by(
+ "test-platform", Any("virtual", "virtual-with-gpu", "hardware")
+ ),
+ # Whether the task requires loopback audio or video (whatever that may mean
+ # on the platform)
+ Required("loopback-audio"): bool,
+ Required("loopback-video"): bool,
+ # Whether the test can run using a software GL implementation on Linux
+ # using the GL compositor. May not be used with "legacy" sized instances
+ # due to poor LLVMPipe performance (bug 1296086). Defaults to true for
+ # unit tests on linux platforms and false otherwise
+ Optional("allow-software-gl-layers"): bool,
+ # For tasks that will run in docker-worker, this is the
+ # name of the docker image or in-tree docker image to run the task in. If
+ # in-tree, then a dependency will be created automatically. This is
+ # generally `desktop-test`, or an image that acts an awful lot like it.
+ Required("docker-image"): optionally_keyed_by(
+ "test-platform",
+ Any(
+ # a raw Docker image path (repo/image:tag)
+ str,
+ # an in-tree generated docker image (from `taskcluster/docker/<name>`)
+ {"in-tree": str},
+ # an indexed docker image
+ {"indexed": str},
+ ),
+ ),
+ # seconds of runtime after which the task will be killed. Like 'chunks',
+ # this can be keyed by test platform, but also variant.
+ Required("max-run-time"): optionally_keyed_by(
+ "test-platform", "subtest", "variant", "app", int
+ ),
+ # the exit status code that indicates the task should be retried
+ Optional("retry-exit-status"): [int],
+ # Whether to perform a gecko checkout.
+ Required("checkout"): bool,
+ # Wheter to perform a machine reboot after test is done
+ Optional("reboot"): Any(False, "always", "on-exception", "on-failure"),
+ # What to run
+ Required("mozharness"): {
+ # the mozharness script used to run this task
+ Required("script"): optionally_keyed_by("test-platform", str),
+ # the config files required for the task
+ Required("config"): optionally_keyed_by("test-platform", [str]),
+ # mochitest flavor for mochitest runs
+ Optional("mochitest-flavor"): str,
+ # any additional actions to pass to the mozharness command
+ Optional("actions"): [str],
+ # additional command-line options for mozharness, beyond those
+ # automatically added
+ Required("extra-options"): optionally_keyed_by("test-platform", [str]),
+ # the artifact name (including path) to test on the build task; this is
+ # generally set in a per-kind transformation
+ Optional("build-artifact-name"): str,
+ Optional("installer-url"): str,
+ # If not false, tooltool downloads will be enabled via relengAPIProxy
+ # for either just public files, or all files. Not supported on Windows
+ Required("tooltool-downloads"): Any(
+ False,
+ "public",
+ "internal",
+ ),
+ # Add --blob-upload-branch=<project> mozharness parameter
+ Optional("include-blob-upload-branch"): bool,
+ # The setting for --download-symbols (if omitted, the option will not
+ # be passed to mozharness)
+ Optional("download-symbols"): Any(True, "ondemand"),
+ # If set, then MOZ_NODE_PATH=/usr/local/bin/node is included in the
+ # environment. This is more than just a helpful path setting -- it
+ # causes xpcshell tests to start additional servers, and runs
+ # additional tests.
+ Required("set-moz-node-path"): bool,
+ # If true, include chunking information in the command even if the number
+ # of chunks is 1
+ Required("chunked"): optionally_keyed_by("test-platform", bool),
+ Required("requires-signed-builds"): optionally_keyed_by(
+ "test-platform", "variant", bool
+ ),
+ },
+ # The set of test manifests to run.
+ Optional("test-manifests"): Any(
+ [str],
+ {"active": [str], "skipped": [str]},
+ ),
+ # flag to determine if this is a confirm failure task
+ Optional("confirm-failure"): bool,
+ # The current chunk (if chunking is enabled).
+ Optional("this-chunk"): int,
+ # os user groups for test task workers; required scopes, will be
+ # added automatically
+ Optional("os-groups"): optionally_keyed_by("test-platform", [str]),
+ Optional("run-as-administrator"): optionally_keyed_by("test-platform", bool),
+ # -- values supplied by the task-generation infrastructure
+ # the platform of the build this task is testing
+ Required("build-platform"): str,
+ # the label of the build task generating the materials to test
+ Required("build-label"): str,
+ # the label of the signing task generating the materials to test.
+ # Signed builds are used in xpcshell tests on Windows, for instance.
+ Optional("build-signing-label"): optionally_keyed_by("variant", str),
+ # the build's attributes
+ Required("build-attributes"): {str: object},
+ # the platform on which the tests will run
+ Required("test-platform"): str,
+ # limit the test-platforms (as defined in test-platforms.yml)
+ # that the test will run on
+ Optional("limit-platforms"): optionally_keyed_by("app", "subtest", [str]),
+ # the name of the test (the key in tests.yml)
+ Required("test-name"): str,
+ # the product name, defaults to firefox
+ Optional("product"): str,
+ # conditional files to determine when these tests should be run
+ Exclusive("when", "optimization"): {
+ Optional("files-changed"): [str],
+ },
+ # Optimization to perform on this task during the optimization phase.
+ # Optimizations are defined in taskcluster/gecko_taskgraph/optimize.py.
+ Exclusive("optimization", "optimization"): OptimizationSchema,
+ # The SCHEDULES component for this task; this defaults to the suite
+ # (not including the flavor) but can be overridden here.
+ Exclusive("schedules-component", "optimization"): Any(
+ str,
+ [str],
+ ),
+ Optional("worker-type"): optionally_keyed_by(
+ "test-platform",
+ Any(str, None),
+ ),
+ Optional(
+ "require-signed-extensions",
+ description="Whether the build being tested requires extensions be signed.",
+ ): optionally_keyed_by("release-type", "test-platform", bool),
+ # The target name, specifying the build artifact to be tested.
+ # If None or not specified, a transform sets the target based on OS:
+ # target.dmg (Mac), target.apk (Android), target.tar.bz2 (Linux),
+ # or target.zip (Windows).
+ Optional("target"): optionally_keyed_by(
+ "app",
+ "test-platform",
+ "variant",
+ Any(
+ str,
+ None,
+ {Required("index"): str, Required("name"): str},
+ ),
+ ),
+ # A list of artifacts to install from 'fetch' tasks. Validation deferred
+ # to 'job' transforms.
+ Optional("fetches"): object,
+ # Raptor / browsertime specific keys, defer validation to 'raptor.py'
+ # transform.
+ Optional("raptor"): object,
+ # Raptor / browsertime specific keys that need to be here since 'raptor' schema
+ # is evluated *before* test_description_schema
+ Optional("app"): str,
+ Optional("subtest"): str,
+ # Define if a given task supports artifact builds or not, see bug 1695325.
+ Optional("supports-artifact-builds"): bool,
+ }
+)
+
+
+@transforms.add
+def handle_keyed_by_mozharness(config, tasks):
+ """Resolve a mozharness field if it is keyed by something"""
+ fields = [
+ "mozharness",
+ "mozharness.chunked",
+ "mozharness.config",
+ "mozharness.extra-options",
+ "mozharness.script",
+ ]
+ for task in tasks:
+ for field in fields:
+ resolve_keyed_by(
+ task,
+ field,
+ item_name=task["test-name"],
+ enforce_single_match=False,
+ )
+ yield task
+
+
+@transforms.add
+def set_defaults(config, tasks):
+ for task in tasks:
+ build_platform = task["build-platform"]
+ if build_platform.startswith("android"):
+ # all Android test tasks download internal objects from tooltool
+ task["mozharness"]["tooltool-downloads"] = "internal"
+ task["mozharness"]["actions"] = ["get-secrets"]
+
+ # loopback-video is always true for Android, but false for other
+ # platform phyla
+ task["loopback-video"] = True
+ task["mozharness"]["set-moz-node-path"] = True
+
+ # software-gl-layers is only meaningful on linux unittests, where it defaults to True
+ if task["test-platform"].startswith("linux") and task["suite"] not in [
+ "talos",
+ "raptor",
+ ]:
+ task.setdefault("allow-software-gl-layers", True)
+ else:
+ task["allow-software-gl-layers"] = False
+
+ task.setdefault("try-name", task["test-name"])
+ task.setdefault("os-groups", [])
+ task.setdefault("run-as-administrator", False)
+ task.setdefault("chunks", 1)
+ task.setdefault("run-on-projects", "built-projects")
+ task.setdefault("built-projects-only", False)
+ task.setdefault("instance-size", "default")
+ task.setdefault("max-run-time", 3600)
+ task.setdefault("reboot", False)
+ task.setdefault("virtualization", "virtual")
+ task.setdefault("loopback-audio", False)
+ task.setdefault("loopback-video", False)
+ task.setdefault("limit-platforms", [])
+ task.setdefault("docker-image", {"in-tree": "ubuntu1804-test"})
+ task.setdefault("checkout", False)
+ task.setdefault("require-signed-extensions", False)
+ task.setdefault("run-without-variant", True)
+ task.setdefault("variants", [])
+ task.setdefault("supports-artifact-builds", True)
+
+ task["mozharness"].setdefault("extra-options", [])
+ task["mozharness"].setdefault("requires-signed-builds", False)
+ task["mozharness"].setdefault("tooltool-downloads", "public")
+ task["mozharness"].setdefault("set-moz-node-path", False)
+ task["mozharness"].setdefault("chunked", False)
+ yield task
+
+
+transforms.add_validate(test_description_schema)
+
+
+@transforms.add
+def run_variant_transforms(config, tasks):
+ """Variant transforms are run as soon as possible to allow other transforms
+ to key by variant."""
+ for task in tasks:
+ xforms = TransformSequence()
+ mod = import_module("gecko_taskgraph.transforms.test.variant")
+ xforms.add(mod.transforms)
+
+ yield from xforms(config, [task])
+
+
+@transforms.add
+def resolve_keys(config, tasks):
+ keys = ("require-signed-extensions", "run-without-variant", "suite", "suite.name")
+ for task in tasks:
+ for key in keys:
+ resolve_keyed_by(
+ task,
+ key,
+ item_name=task["test-name"],
+ enforce_single_match=False,
+ **{
+ "release-type": config.params["release_type"],
+ "variant": task["attributes"].get("unittest_variant"),
+ },
+ )
+ yield task
+
+
+@transforms.add
+def run_remaining_transforms(config, tasks):
+ """Runs other transform files next to this module."""
+ # List of modules to load transforms from in order.
+ transform_modules = (
+ ("raptor", lambda t: t["suite"] == "raptor"),
+ ("other", None),
+ ("worker", None),
+ ("confirm_failure", None),
+ # These transforms should always run last as there is never any
+ # difference in configuration from one chunk to another (other than
+ # chunk number).
+ ("chunk", None),
+ )
+
+ for task in tasks:
+ xforms = TransformSequence()
+ for name, filterfn in transform_modules:
+ if filterfn and not filterfn(task):
+ continue
+
+ mod = import_module(f"gecko_taskgraph.transforms.test.{name}")
+ xforms.add(mod.transforms)
+
+ yield from xforms(config, [task])
+
+
+@transforms.add
+def make_job_description(config, tasks):
+ """Convert *test* descriptions to *job* descriptions (input to
+ gecko_taskgraph.transforms.job)"""
+
+ for task in tasks:
+ attributes = task.get("attributes", {})
+
+ mobile = get_mobile_project(task)
+ if mobile and (mobile not in task["test-name"]):
+ label = "{}-{}-{}-{}".format(
+ config.kind, task["test-platform"], mobile, task["test-name"]
+ )
+ else:
+ label = "{}-{}-{}".format(
+ config.kind, task["test-platform"], task["test-name"]
+ )
+
+ try_name = task["try-name"]
+ if attributes.get("unittest_variant"):
+ suffix = task.pop("variant-suffix")
+ label += suffix
+ try_name += suffix
+
+ if task["chunks"] > 1:
+ label += "-{}".format(task["this-chunk"])
+
+ if task.get("confirm-failure", False):
+ label += "-cf"
+
+ build_label = task["build-label"]
+
+ if task["suite"] == "talos":
+ attr_try_name = "talos_try_name"
+ elif task["suite"] == "raptor":
+ attr_try_name = "raptor_try_name"
+ else:
+ attr_try_name = "unittest_try_name"
+
+ attr_build_platform, attr_build_type = task["build-platform"].split("/", 1)
+ attributes.update(
+ {
+ "build_platform": attr_build_platform,
+ "build_type": attr_build_type,
+ "test_platform": task["test-platform"],
+ "test_chunk": str(task["this-chunk"]),
+ "supports-artifact-builds": task["supports-artifact-builds"],
+ attr_try_name: try_name,
+ }
+ )
+
+ if "test-manifests" in task:
+ attributes["test_manifests"] = task["test-manifests"]
+
+ jobdesc = {}
+ name = "{}-{}".format(task["test-platform"], task["test-name"])
+ jobdesc["name"] = name
+ jobdesc["label"] = label
+ jobdesc["description"] = task["description"]
+ jobdesc["attributes"] = attributes
+ jobdesc["dependencies"] = {"build": build_label}
+ jobdesc["job-from"] = task["job-from"]
+
+ if task.get("fetches"):
+ jobdesc["fetches"] = task["fetches"]
+
+ if task["mozharness"]["requires-signed-builds"] is True:
+ jobdesc["dependencies"]["build-signing"] = task["build-signing-label"]
+
+ if "expires-after" in task:
+ jobdesc["expires-after"] = task["expires-after"]
+
+ jobdesc["routes"] = []
+ jobdesc["run-on-projects"] = sorted(task["run-on-projects"])
+ jobdesc["scopes"] = []
+ jobdesc["tags"] = task.get("tags", {})
+ jobdesc["extra"] = {
+ "chunks": {
+ "current": task["this-chunk"],
+ "total": task["chunks"],
+ },
+ "suite": attributes["unittest_suite"],
+ "test-setting": task.pop("test-setting"),
+ }
+ jobdesc["treeherder"] = {
+ "symbol": task["treeherder-symbol"],
+ "kind": "test",
+ "tier": task["tier"],
+ "platform": task.get("treeherder-machine-platform", task["build-platform"]),
+ }
+
+ schedules = task.get("schedules-component", [])
+ if task.get("when"):
+ # This may still be used by comm-central.
+ jobdesc["when"] = task["when"]
+ elif "optimization" in task:
+ jobdesc["optimization"] = task["optimization"]
+ elif set(schedules) & set(INCLUSIVE_COMPONENTS):
+ jobdesc["optimization"] = {"test-inclusive": schedules}
+ else:
+ jobdesc["optimization"] = {"test": schedules}
+
+ run = jobdesc["run"] = {}
+ run["using"] = "mozharness-test"
+ run["test"] = task
+
+ if "workdir" in task:
+ run["workdir"] = task.pop("workdir")
+
+ jobdesc["worker-type"] = task.pop("worker-type")
+
+ if "worker" in task:
+ jobdesc["worker"] = task.pop("worker")
+
+ if task.get("fetches"):
+ jobdesc["fetches"] = task.pop("fetches")
+
+ yield jobdesc
+
+
+def normpath(path):
+ return path.replace("/", "\\")
+
+
+def get_firefox_version():
+ with open("browser/config/version.txt") as f:
+ return f.readline().strip()
diff --git a/taskcluster/gecko_taskgraph/transforms/test/chunk.py b/taskcluster/gecko_taskgraph/transforms/test/chunk.py
new file mode 100644
index 0000000000..7f832c57df
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/test/chunk.py
@@ -0,0 +1,269 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import json
+
+import taskgraph
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.attributes import keymatch
+from taskgraph.util.treeherder import join_symbol, split_symbol
+
+from gecko_taskgraph.util.attributes import is_try
+from gecko_taskgraph.util.chunking import (
+ DefaultLoader,
+ chunk_manifests,
+ get_manifest_loader,
+ get_runtimes,
+ guess_mozinfo_from_task,
+)
+from gecko_taskgraph.util.copy_task import copy_task
+from gecko_taskgraph.util.perfile import perfile_number_of_chunks
+
+DYNAMIC_CHUNK_DURATION = 20 * 60 # seconds
+"""The approximate time each test chunk should take to run."""
+
+
+DYNAMIC_CHUNK_MULTIPLIER = {
+ # Desktop xpcshell tests run in parallel. Reduce the total runtime to
+ # compensate.
+ "^(?!android).*-xpcshell.*": 0.2,
+}
+"""A multiplication factor to tweak the total duration per platform / suite."""
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def set_test_verify_chunks(config, tasks):
+ """Set the number of chunks we use for test-verify."""
+ for task in tasks:
+ if any(task["suite"].startswith(s) for s in ("test-verify", "test-coverage")):
+ env = config.params.get("try_task_config", {}) or {}
+ env = env.get("templates", {}).get("env", {})
+ task["chunks"] = perfile_number_of_chunks(
+ is_try(config.params),
+ env.get("MOZHARNESS_TEST_PATHS", ""),
+ config.params.get("head_repository", ""),
+ config.params.get("head_rev", ""),
+ task["test-name"],
+ )
+
+ # limit the number of chunks we run for test-verify mode because
+ # test-verify is comprehensive and takes a lot of time, if we have
+ # >30 tests changed, this is probably an import of external tests,
+ # or a patch renaming/moving files in bulk
+ maximum_number_verify_chunks = 3
+ if task["chunks"] > maximum_number_verify_chunks:
+ task["chunks"] = maximum_number_verify_chunks
+
+ yield task
+
+
+@transforms.add
+def set_test_manifests(config, tasks):
+ """Determine the set of test manifests that should run in this task."""
+
+ for task in tasks:
+ # When a task explicitly requests no 'test_manifest_loader', test
+ # resolving will happen at test runtime rather than in the taskgraph.
+ if "test-manifest-loader" in task and task["test-manifest-loader"] is None:
+ yield task
+ continue
+
+ # Set 'tests_grouped' to "1", so we can differentiate between suites that are
+ # chunked at the test runtime and those that are chunked in the taskgraph.
+ task.setdefault("tags", {})["tests_grouped"] = "1"
+
+ if taskgraph.fast:
+ # We want to avoid evaluating manifests when taskgraph.fast is set. But
+ # manifests are required for dynamic chunking. Just set the number of
+ # chunks to one in this case.
+ if task["chunks"] == "dynamic":
+ task["chunks"] = 1
+ yield task
+ continue
+
+ manifests = task.get("test-manifests")
+ if manifests:
+ if isinstance(manifests, list):
+ task["test-manifests"] = {"active": manifests, "skipped": []}
+ yield task
+ continue
+
+ mozinfo = guess_mozinfo_from_task(
+ task, config.params.get("head_repository", "")
+ )
+
+ loader_name = task.pop(
+ "test-manifest-loader", config.params["test_manifest_loader"]
+ )
+ loader = get_manifest_loader(loader_name, config.params)
+
+ task["test-manifests"] = loader.get_manifests(
+ task["suite"],
+ frozenset(mozinfo.items()),
+ )
+
+ # When scheduling with test paths, we often find manifests scheduled but all tests
+ # are skipped on a given config. This will remove the task from the task set if
+ # no manifests have active tests for the given task/config
+ mh_test_paths = {}
+ if "MOZHARNESS_TEST_PATHS" in config.params.get("try_task_config", {}).get(
+ "env", {}
+ ):
+ mh_test_paths = json.loads(
+ config.params["try_task_config"]["env"]["MOZHARNESS_TEST_PATHS"]
+ )
+
+ if task["attributes"]["unittest_suite"] in mh_test_paths.keys():
+ input_paths = mh_test_paths[task["attributes"]["unittest_suite"]]
+ remaining_manifests = []
+
+ # if we have web-platform tests incoming, just yield task
+ for m in input_paths:
+ if m.startswith("testing/web-platform/tests/"):
+ if not isinstance(loader, DefaultLoader):
+ task["chunks"] = "dynamic"
+ yield task
+ break
+
+ # input paths can exist in other directories (i.e. [../../dir/test.js])
+ # we need to look for all [active] manifests that include tests in the path
+ for m in input_paths:
+ if [tm for tm in task["test-manifests"]["active"] if tm.startswith(m)]:
+ remaining_manifests.append(m)
+
+ # look in the 'other' manifests
+ for m in input_paths:
+ man = m
+ for tm in task["test-manifests"]["other_dirs"]:
+ matched_dirs = [
+ dp
+ for dp in task["test-manifests"]["other_dirs"].get(tm)
+ if dp.startswith(man)
+ ]
+ if matched_dirs:
+ if tm not in task["test-manifests"]["active"]:
+ continue
+ if m not in remaining_manifests:
+ remaining_manifests.append(m)
+
+ if remaining_manifests == []:
+ continue
+
+ # The default loader loads all manifests. If we use a non-default
+ # loader, we'll only run some subset of manifests and the hardcoded
+ # chunk numbers will no longer be valid. Dynamic chunking should yield
+ # better results.
+ if not isinstance(loader, DefaultLoader):
+ task["chunks"] = "dynamic"
+
+ yield task
+
+
+@transforms.add
+def resolve_dynamic_chunks(config, tasks):
+ """Determine how many chunks are needed to handle the given set of manifests."""
+
+ for task in tasks:
+ if task["chunks"] != "dynamic":
+ yield task
+ continue
+
+ if not task.get("test-manifests"):
+ raise Exception(
+ "{} must define 'test-manifests' to use dynamic chunking!".format(
+ task["test-name"]
+ )
+ )
+
+ runtimes = {
+ m: r
+ for m, r in get_runtimes(task["test-platform"], task["suite"]).items()
+ if m in task["test-manifests"]["active"]
+ }
+
+ # Truncate runtimes that are above the desired chunk duration. They
+ # will be assigned to a chunk on their own and the excess duration
+ # shouldn't cause additional chunks to be needed.
+ times = [min(DYNAMIC_CHUNK_DURATION, r) for r in runtimes.values()]
+ avg = round(sum(times) / len(times), 2) if times else 0
+ total = sum(times)
+
+ # If there are manifests missing from the runtimes data, fill them in
+ # with the average of all present manifests.
+ missing = [m for m in task["test-manifests"]["active"] if m not in runtimes]
+ total += avg * len(missing)
+
+ # Apply any chunk multipliers if found.
+ key = "{}-{}".format(task["test-platform"], task["test-name"])
+ matches = keymatch(DYNAMIC_CHUNK_MULTIPLIER, key)
+ if len(matches) > 1:
+ raise Exception(
+ "Multiple matching values for {} found while "
+ "determining dynamic chunk multiplier!".format(key)
+ )
+ elif matches:
+ total = total * matches[0]
+
+ chunks = int(round(total / DYNAMIC_CHUNK_DURATION))
+
+ # Make sure we never exceed the number of manifests, nor have a chunk
+ # length of 0.
+ task["chunks"] = min(chunks, len(task["test-manifests"]["active"])) or 1
+ yield task
+
+
+@transforms.add
+def split_chunks(config, tasks):
+ """Based on the 'chunks' key, split tests up into chunks by duplicating
+ them and assigning 'this-chunk' appropriately and updating the treeherder
+ symbol.
+ """
+
+ for task in tasks:
+ # If test-manifests are set, chunk them ahead of time to avoid running
+ # the algorithm more than once.
+ chunked_manifests = None
+ if "test-manifests" in task:
+ # TODO: hardcoded to "2", ideally this should be centralized somewhere
+ if (
+ config.params["try_task_config"].get("new-test-config", False)
+ and task["chunks"] > 1
+ ):
+ task["chunks"] *= 2
+ task["max-run-time"] = int(task["max-run-time"] * 2)
+
+ manifests = task["test-manifests"]
+ chunked_manifests = chunk_manifests(
+ task["suite"],
+ task["test-platform"],
+ task["chunks"],
+ manifests["active"],
+ )
+
+ # Add all skipped manifests to the first chunk of backstop pushes
+ # so they still show up in the logs. They won't impact runtime much
+ # and this way tools like ActiveData are still aware that they
+ # exist.
+ if config.params["backstop"] and manifests["active"]:
+ chunked_manifests[0].extend(manifests["skipped"])
+
+ for i in range(task["chunks"]):
+ this_chunk = i + 1
+
+ # copy the test and update with the chunk number
+ chunked = copy_task(task)
+ chunked["this-chunk"] = this_chunk
+
+ if chunked_manifests is not None:
+ chunked["test-manifests"] = sorted(chunked_manifests[i])
+
+ group, symbol = split_symbol(chunked["treeherder-symbol"])
+ if task["chunks"] > 1 or not symbol:
+ # add the chunk number to the TH symbol
+ symbol += str(this_chunk)
+ chunked["treeherder-symbol"] = join_symbol(group, symbol)
+
+ yield chunked
diff --git a/taskcluster/gecko_taskgraph/transforms/test/confirm_failure.py b/taskcluster/gecko_taskgraph/transforms/test/confirm_failure.py
new file mode 100644
index 0000000000..d6a640c6d5
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/test/confirm_failure.py
@@ -0,0 +1,46 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.treeherder import join_symbol, split_symbol
+
+from gecko_taskgraph.util.copy_task import copy_task
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def test_confirm_failure_tasks(config, tasks):
+ """Copy test-* tasks to have -cf copy."""
+
+ for task in tasks:
+ if config.params["try_task_config"].get("new-test-config", False):
+ yield task
+ continue
+
+ if "backlog" in task["suite"] or "failure" in task["suite"]:
+ yield task
+ continue
+
+ # support mochitest, xpcshell, reftest, wpt*
+ if any(
+ task["suite"].startswith(s)
+ for s in ("mochitest", "reftest", "xpcshell", "web-platform")
+ ):
+ env = config.params.get("try_task_config", {}) or {}
+ env = env.get("templates", {}).get("env", {})
+
+ cftask = copy_task(task)
+
+ # when scheduled other settings will be made
+ cftask["tier"] = 2
+ cftask["confirm-failure"] = True
+ group, symbol = split_symbol(cftask["treeherder-symbol"])
+ group += "-cf"
+ cftask["treeherder-symbol"] = join_symbol(group, symbol)
+ cftask["run-on-projects"] = []
+ cftask["optimization"] = {"always": None}
+ yield cftask
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/test/other.py b/taskcluster/gecko_taskgraph/transforms/test/other.py
new file mode 100644
index 0000000000..dc258ef97a
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/test/other.py
@@ -0,0 +1,1107 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import hashlib
+import json
+import re
+
+from mozbuild.schedules import INCLUSIVE_COMPONENTS
+from mozbuild.util import ReadOnlyDict
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.attributes import keymatch
+from taskgraph.util.keyed_by import evaluate_keyed_by
+from taskgraph.util.schema import Schema, resolve_keyed_by
+from taskgraph.util.taskcluster import get_artifact_path, get_index_url
+from voluptuous import Any, Optional, Required
+
+from gecko_taskgraph.transforms.test.variant import TEST_VARIANTS
+from gecko_taskgraph.util.platforms import platform_family
+from gecko_taskgraph.util.templates import merge
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def limit_platforms(config, tasks):
+ for task in tasks:
+ if not task["limit-platforms"]:
+ yield task
+ continue
+
+ limited_platforms = {key: key for key in task["limit-platforms"]}
+ if keymatch(limited_platforms, task["test-platform"]):
+ yield task
+
+
+@transforms.add
+def handle_suite_category(config, tasks):
+ for task in tasks:
+ task.setdefault("suite", {})
+
+ if isinstance(task["suite"], str):
+ task["suite"] = {"name": task["suite"]}
+
+ suite = task["suite"].setdefault("name", task["test-name"])
+ category = task["suite"].setdefault("category", suite)
+
+ task.setdefault("attributes", {})
+ task["attributes"]["unittest_suite"] = suite
+ task["attributes"]["unittest_category"] = category
+
+ script = task["mozharness"]["script"]
+ category_arg = None
+ if suite.startswith("test-verify") or suite.startswith("test-coverage"):
+ pass
+ elif script in ("android_emulator_unittest.py", "android_hardware_unittest.py"):
+ category_arg = "--test-suite"
+ elif script == "desktop_unittest.py":
+ category_arg = f"--{category}-suite"
+
+ if category_arg:
+ task["mozharness"].setdefault("extra-options", [])
+ extra = task["mozharness"]["extra-options"]
+ if not any(arg.startswith(category_arg) for arg in extra):
+ extra.append(f"{category_arg}={suite}")
+
+ # From here on out we only use the suite name.
+ task["suite"] = suite
+
+ # in the future we might need to refactor new-test-config to be suite specific
+ if "mochitest" in task["suite"] and config.params["try_task_config"].get(
+ "new-test-config", False
+ ):
+ task = merge(
+ task, {"mozharness": {"extra-options": ["--restartAfterFailure"]}}
+ )
+ yield task
+
+
+@transforms.add
+def setup_talos(config, tasks):
+ """Add options that are specific to talos jobs (identified by suite=talos)"""
+ for task in tasks:
+ if task["suite"] != "talos":
+ yield task
+ continue
+
+ extra_options = task.setdefault("mozharness", {}).setdefault(
+ "extra-options", []
+ )
+ extra_options.append("--use-talos-json")
+
+ # win7 needs to test skip
+ if task["build-platform"].startswith("win32"):
+ extra_options.append("--add-option")
+ extra_options.append("--setpref,gfx.direct2d.disabled=true")
+
+ if config.params.get("project", None):
+ extra_options.append("--project=%s" % config.params["project"])
+
+ yield task
+
+
+@transforms.add
+def setup_browsertime_flag(config, tasks):
+ """Optionally add `--browsertime` flag to Raptor pageload tests."""
+
+ browsertime_flag = config.params["try_task_config"].get("browsertime", False)
+
+ for task in tasks:
+ if not browsertime_flag or task["suite"] != "raptor":
+ yield task
+ continue
+
+ if task["treeherder-symbol"].startswith("Rap"):
+ # The Rap group is subdivided as Rap{-fenix,-refbrow(...),
+ # so `taskgraph.util.treeherder.replace_group` isn't appropriate.
+ task["treeherder-symbol"] = task["treeherder-symbol"].replace(
+ "Rap", "Btime", 1
+ )
+
+ extra_options = task.setdefault("mozharness", {}).setdefault(
+ "extra-options", []
+ )
+ extra_options.append("--browsertime")
+
+ yield task
+
+
+@transforms.add
+def handle_artifact_prefix(config, tasks):
+ """Handle translating `artifact_prefix` appropriately"""
+ for task in tasks:
+ if task["build-attributes"].get("artifact_prefix"):
+ task.setdefault("attributes", {}).setdefault(
+ "artifact_prefix", task["build-attributes"]["artifact_prefix"]
+ )
+ yield task
+
+
+@transforms.add
+def set_treeherder_machine_platform(config, tasks):
+ """Set the appropriate task.extra.treeherder.machine.platform"""
+ translation = {
+ # Linux64 build platform for asan is specified differently to
+ # treeherder.
+ "macosx1100-64/opt": "osx-1100/opt",
+ "macosx1100-64-shippable/opt": "osx-1100-shippable/opt",
+ "macosx1400-64/opt": "osx-1300/opt",
+ "macosx1400-64-shippable/opt": "osx-1400-shippable/opt",
+ "win64-asan/opt": "windows10-64/asan",
+ "win64-aarch64/opt": "windows10-aarch64/opt",
+ }
+ for task in tasks:
+ # For most desktop platforms, the above table is not used for "regular"
+ # builds, so we'll always pick the test platform here.
+ # On macOS though, the regular builds are in the table. This causes a
+ # conflict in `verify_task_graph_symbol` once you add a new test
+ # platform based on regular macOS builds, such as for QR.
+ # Since it's unclear if the regular macOS builds can be removed from
+ # the table, workaround the issue for QR.
+ if "android" in task["test-platform"] and "pgo/opt" in task["test-platform"]:
+ platform_new = task["test-platform"].replace("-pgo/opt", "/pgo")
+ task["treeherder-machine-platform"] = platform_new
+ elif "android-em-7.0-x86_64-qr" in task["test-platform"]:
+ task["treeherder-machine-platform"] = task["test-platform"].replace(
+ ".", "-"
+ )
+ elif "android-em-7.0-x86_64-shippable-qr" in task["test-platform"]:
+ task["treeherder-machine-platform"] = task["test-platform"].replace(
+ ".", "-"
+ )
+ elif "android-em-7.0-x86_64-lite-qr" in task["test-platform"]:
+ task["treeherder-machine-platform"] = task["test-platform"].replace(
+ ".", "-"
+ )
+ elif "android-em-7.0-x86_64-shippable-lite-qr" in task["test-platform"]:
+ task["treeherder-machine-platform"] = task["test-platform"].replace(
+ ".", "-"
+ )
+ elif "android-em-7.0-x86-qr" in task["test-platform"]:
+ task["treeherder-machine-platform"] = task["test-platform"].replace(
+ ".", "-"
+ )
+ elif "-qr" in task["test-platform"]:
+ task["treeherder-machine-platform"] = task["test-platform"]
+ elif "android-hw" in task["test-platform"]:
+ task["treeherder-machine-platform"] = task["test-platform"]
+ elif "android-em-7.0-x86_64" in task["test-platform"]:
+ task["treeherder-machine-platform"] = task["test-platform"].replace(
+ ".", "-"
+ )
+ elif "android-em-7.0-x86" in task["test-platform"]:
+ task["treeherder-machine-platform"] = task["test-platform"].replace(
+ ".", "-"
+ )
+ # Bug 1602863 - must separately define linux64/asan and linux1804-64/asan
+ # otherwise causes an exception during taskgraph generation about
+ # duplicate treeherder platform/symbol.
+ elif "linux64-asan/opt" in task["test-platform"]:
+ task["treeherder-machine-platform"] = "linux64/asan"
+ elif "linux1804-asan/opt" in task["test-platform"]:
+ task["treeherder-machine-platform"] = "linux1804-64/asan"
+ else:
+ task["treeherder-machine-platform"] = translation.get(
+ task["build-platform"], task["test-platform"]
+ )
+ yield task
+
+
+@transforms.add
+def set_download_symbols(config, tasks):
+ """In general, we download symbols immediately for debug builds, but only
+ on demand for everything else. ASAN builds shouldn't download
+ symbols since they don't product symbol zips see bug 1283879"""
+ for task in tasks:
+ if task["test-platform"].split("/")[-1] == "debug":
+ task["mozharness"]["download-symbols"] = True
+ elif "asan" in task["build-platform"] or "tsan" in task["build-platform"]:
+ if "download-symbols" in task["mozharness"]:
+ del task["mozharness"]["download-symbols"]
+ else:
+ task["mozharness"]["download-symbols"] = "ondemand"
+ yield task
+
+
+@transforms.add
+def handle_keyed_by(config, tasks):
+ """Resolve fields that can be keyed by platform, etc."""
+ fields = [
+ "instance-size",
+ "docker-image",
+ "max-run-time",
+ "chunks",
+ "suite",
+ "run-on-projects",
+ "os-groups",
+ "run-as-administrator",
+ "workdir",
+ "worker-type",
+ "virtualization",
+ "fetches.fetch",
+ "fetches.toolchain",
+ "target",
+ "webrender-run-on-projects",
+ "mozharness.requires-signed-builds",
+ "build-signing-label",
+ ]
+ for task in tasks:
+ for field in fields:
+ resolve_keyed_by(
+ task,
+ field,
+ item_name=task["test-name"],
+ enforce_single_match=False,
+ project=config.params["project"],
+ variant=task["attributes"].get("unittest_variant"),
+ )
+ yield task
+
+
+@transforms.add
+def set_target(config, tasks):
+ for task in tasks:
+ build_platform = task["build-platform"]
+ target = None
+ if "target" in task:
+ target = task["target"]
+ if not target:
+ if build_platform.startswith("macosx"):
+ target = "target.dmg"
+ elif build_platform.startswith("android"):
+ target = "target.apk"
+ elif build_platform.startswith("win"):
+ target = "target.zip"
+ else:
+ target = "target.tar.bz2"
+
+ if isinstance(target, dict):
+ # TODO Remove hardcoded mobile artifact prefix
+ index_url = get_index_url(target["index"])
+ installer_url = "{}/artifacts/public/{}".format(index_url, target["name"])
+ task["mozharness"]["installer-url"] = installer_url
+ else:
+ task["mozharness"]["build-artifact-name"] = get_artifact_path(task, target)
+
+ yield task
+
+
+@transforms.add
+def setup_browsertime(config, tasks):
+ """Configure browsertime dependencies for Raptor pageload tests that have
+ `--browsertime` extra option."""
+
+ for task in tasks:
+ # We need to make non-trivial changes to various fetches, and our
+ # `by-test-platform` may not be "compatible" with existing
+ # `by-test-platform` filters. Therefore we do everything after
+ # `handle_keyed_by` so that existing fields have been resolved down to
+ # simple lists. But we use the `by-test-platform` machinery to express
+ # filters so that when the time comes to move browsertime into YAML
+ # files, the transition is straight-forward.
+ extra_options = task.get("mozharness", {}).get("extra-options", [])
+
+ if task["suite"] != "raptor":
+ yield task
+ continue
+
+ ts = {
+ "by-test-platform": {
+ "android.*": ["browsertime", "linux64-geckodriver", "linux64-node-16"],
+ "linux.*": ["browsertime", "linux64-geckodriver", "linux64-node-16"],
+ "macosx1015.*": [
+ "browsertime",
+ "macosx64-geckodriver",
+ "macosx64-node-16",
+ ],
+ "macosx1400.*": [
+ "browsertime",
+ "macosx64-aarch64-geckodriver",
+ "macosx64-aarch64-node-16",
+ ],
+ "windows.*aarch64.*": [
+ "browsertime",
+ "win32-geckodriver",
+ "win32-node-16",
+ ],
+ "windows.*-32.*": ["browsertime", "win32-geckodriver", "win32-node-16"],
+ "windows.*-64.*": ["browsertime", "win64-geckodriver", "win64-node-16"],
+ },
+ }
+
+ task.setdefault("fetches", {}).setdefault("toolchain", []).extend(
+ evaluate_keyed_by(ts, "fetches.toolchain", task)
+ )
+
+ fs = {
+ "by-test-platform": {
+ "android.*": ["linux64-ffmpeg-4.4.1"],
+ "linux.*": ["linux64-ffmpeg-4.4.1"],
+ "macosx1015.*": ["mac64-ffmpeg-4.4.1"],
+ "macosx1400.*": ["mac64-ffmpeg-4.4.1"],
+ "windows.*aarch64.*": ["win64-ffmpeg-4.4.1"],
+ "windows.*-32.*": ["win64-ffmpeg-4.4.1"],
+ "windows.*-64.*": ["win64-ffmpeg-4.4.1"],
+ },
+ }
+
+ cd_fetches = {
+ "android.*": [
+ "linux64-chromedriver-120",
+ "linux64-chromedriver-121",
+ "linux64-chromedriver-122",
+ ],
+ "linux.*": [
+ "linux64-chromedriver-120",
+ "linux64-chromedriver-121",
+ "linux64-chromedriver-122",
+ ],
+ "macosx1015.*": [
+ "mac64-chromedriver-120",
+ "mac64-chromedriver-121",
+ "mac64-chromedriver-122",
+ ],
+ "macosx1400.*": [
+ "mac-arm-chromedriver-120",
+ "mac-arm-chromedriver-121",
+ "mac-arm-chromedriver-122",
+ ],
+ "windows.*aarch64.*": [
+ "win32-chromedriver-120",
+ "win32-chromedriver-121",
+ "win32-chromedriver-122",
+ ],
+ "windows.*-32.*": [
+ "win32-chromedriver-120",
+ "win32-chromedriver-121",
+ "win32-chromedriver-122",
+ ],
+ "windows.*-64.*": [
+ "win32-chromedriver-120",
+ "win32-chromedriver-121",
+ "win32-chromedriver-122",
+ ],
+ }
+
+ chromium_fetches = {
+ "linux.*": ["linux64-chromium"],
+ "macosx1015.*": ["mac-chromium"],
+ "macosx1400.*": ["mac-chromium-arm"],
+ "windows.*aarch64.*": ["win32-chromium"],
+ "windows.*-32.*": ["win32-chromium"],
+ "windows.*-64.*": ["win64-chromium"],
+ "android.*": ["linux64-chromium"],
+ }
+
+ cd_extracted_name = {
+ "windows": "{}chromedriver.exe",
+ "mac": "{}chromedriver",
+ "default": "{}chromedriver",
+ }
+
+ if "--app=chrome" in extra_options or "--app=chrome-m" in extra_options:
+ # Only add the chromedriver fetches when chrome is running
+ for platform in cd_fetches:
+ fs["by-test-platform"][platform].extend(cd_fetches[platform])
+ if (
+ "--app=chromium" in extra_options
+ or "--app=custom-car" in extra_options
+ or "--app=cstm-car-m" in extra_options
+ ):
+ for platform in chromium_fetches:
+ fs["by-test-platform"][platform].extend(chromium_fetches[platform])
+
+ # The chromedrivers for chromium are repackaged into the archives
+ # that we get the chromium binary from so we always have a compatible
+ # version.
+ cd_extracted_name = {
+ "windows": "chrome-win/chromedriver.exe",
+ "mac": "chrome-mac/chromedriver",
+ "default": "chrome-linux/chromedriver",
+ }
+
+ # Disable the Raptor install step
+ if "--app=chrome-m" in extra_options or "--app=cstm-car-m" in extra_options:
+ extra_options.append("--noinstall")
+
+ task.setdefault("fetches", {}).setdefault("fetch", []).extend(
+ evaluate_keyed_by(fs, "fetches.fetch", task)
+ )
+
+ extra_options.extend(
+ (
+ "--browsertime-browsertimejs",
+ "$MOZ_FETCHES_DIR/browsertime/node_modules/browsertime/bin/browsertime.js",
+ )
+ ) # noqa: E501
+
+ eos = {
+ "by-test-platform": {
+ "windows.*": [
+ "--browsertime-node",
+ "$MOZ_FETCHES_DIR/node/node.exe",
+ "--browsertime-geckodriver",
+ "$MOZ_FETCHES_DIR/geckodriver.exe",
+ "--browsertime-chromedriver",
+ "$MOZ_FETCHES_DIR/" + cd_extracted_name["windows"],
+ "--browsertime-ffmpeg",
+ "$MOZ_FETCHES_DIR/ffmpeg-4.4.1-full_build/bin/ffmpeg.exe",
+ ],
+ "macosx.*": [
+ "--browsertime-node",
+ "$MOZ_FETCHES_DIR/node/bin/node",
+ "--browsertime-geckodriver",
+ "$MOZ_FETCHES_DIR/geckodriver",
+ "--browsertime-chromedriver",
+ "$MOZ_FETCHES_DIR/" + cd_extracted_name["mac"],
+ "--browsertime-ffmpeg",
+ "$MOZ_FETCHES_DIR/ffmpeg-macos/ffmpeg",
+ ],
+ "default": [
+ "--browsertime-node",
+ "$MOZ_FETCHES_DIR/node/bin/node",
+ "--browsertime-geckodriver",
+ "$MOZ_FETCHES_DIR/geckodriver",
+ "--browsertime-chromedriver",
+ "$MOZ_FETCHES_DIR/" + cd_extracted_name["default"],
+ "--browsertime-ffmpeg",
+ "$MOZ_FETCHES_DIR/ffmpeg-4.4.1-i686-static/ffmpeg",
+ ],
+ }
+ }
+
+ extra_options.extend(evaluate_keyed_by(eos, "mozharness.extra-options", task))
+
+ yield task
+
+
+def get_mobile_project(task):
+ """Returns the mobile project of the specified task or None."""
+
+ if not task["build-platform"].startswith("android"):
+ return
+
+ mobile_projects = ("fenix", "geckoview", "refbrow", "chrome-m", "cstm-car-m")
+
+ for name in mobile_projects:
+ if name in task["test-name"]:
+ return name
+
+ target = None
+ if "target" in task:
+ resolve_keyed_by(
+ task, "target", item_name=task["test-name"], enforce_single_match=False
+ )
+ target = task["target"]
+ if target:
+ if isinstance(target, dict):
+ target = target["name"]
+
+ for name in mobile_projects:
+ if name in target:
+ return name
+
+ return None
+
+
+@transforms.add
+def disable_wpt_timeouts_on_autoland(config, tasks):
+ """do not run web-platform-tests that are expected TIMEOUT on autoland"""
+ for task in tasks:
+ if (
+ "web-platform-tests" in task["test-name"]
+ and config.params["project"] == "autoland"
+ ):
+ task["mozharness"].setdefault("extra-options", []).append("--skip-timeout")
+ yield task
+
+
+@transforms.add
+def enable_code_coverage(config, tasks):
+ """Enable code coverage for the ccov build-platforms"""
+ for task in tasks:
+ if "ccov" in task["build-platform"]:
+ # Do not run tests on fuzzing builds
+ if "fuzzing" in task["build-platform"]:
+ task["run-on-projects"] = []
+ continue
+
+ # Skip this transform for android code coverage builds.
+ if "android" in task["build-platform"]:
+ task.setdefault("fetches", {}).setdefault("toolchain", []).append(
+ "linux64-grcov"
+ )
+ task["mozharness"].setdefault("extra-options", []).append(
+ "--java-code-coverage"
+ )
+ yield task
+ continue
+ task["mozharness"].setdefault("extra-options", []).append("--code-coverage")
+ task["instance-size"] = "xlarge"
+
+ # Temporarily disable Mac tests on mozilla-central
+ if "mac" in task["build-platform"]:
+ task["run-on-projects"] = []
+
+ # Ensure we always run on the projects defined by the build, unless the test
+ # is try only or shouldn't run at all.
+ if task["run-on-projects"] not in [[]]:
+ task["run-on-projects"] = "built-projects"
+
+ # Ensure we don't optimize test suites out.
+ # We always want to run all test suites for coverage purposes.
+ task.pop("schedules-component", None)
+ task.pop("when", None)
+ task["optimization"] = None
+
+ # Add a toolchain and a fetch task for the grcov binary.
+ if any(p in task["build-platform"] for p in ("linux", "osx", "win")):
+ task.setdefault("fetches", {})
+ task["fetches"].setdefault("fetch", [])
+ task["fetches"].setdefault("toolchain", [])
+ task["fetches"].setdefault("build", [])
+
+ if "linux" in task["build-platform"]:
+ task["fetches"]["toolchain"].append("linux64-grcov")
+ elif "osx" in task["build-platform"]:
+ task["fetches"]["toolchain"].append("macosx64-grcov")
+ elif "win" in task["build-platform"]:
+ task["fetches"]["toolchain"].append("win64-grcov")
+
+ task["fetches"]["build"].append({"artifact": "target.mozinfo.json"})
+
+ if "talos" in task["test-name"]:
+ task["max-run-time"] = 7200
+ if "linux" in task["build-platform"]:
+ task["docker-image"] = {"in-tree": "ubuntu1804-test"}
+ task["mozharness"]["extra-options"].append("--add-option")
+ task["mozharness"]["extra-options"].append("--cycles,1")
+ task["mozharness"]["extra-options"].append("--add-option")
+ task["mozharness"]["extra-options"].append("--tppagecycles,1")
+ task["mozharness"]["extra-options"].append("--add-option")
+ task["mozharness"]["extra-options"].append("--no-upload-results")
+ task["mozharness"]["extra-options"].append("--add-option")
+ task["mozharness"]["extra-options"].append("--tptimeout,15000")
+ if "raptor" in task["test-name"]:
+ task["max-run-time"] = 1800
+ yield task
+
+
+@transforms.add
+def handle_run_on_projects(config, tasks):
+ """Handle translating `built-projects` appropriately"""
+ for task in tasks:
+ if task["run-on-projects"] == "built-projects":
+ task["run-on-projects"] = task["build-attributes"].get(
+ "run_on_projects", ["all"]
+ )
+
+ if task.pop("built-projects-only", False):
+ built_projects = set(
+ task["build-attributes"].get("run_on_projects", {"all"})
+ )
+ run_on_projects = set(task.get("run-on-projects", set()))
+
+ # If 'all' exists in run-on-projects, then the intersection of both
+ # is built-projects. Similarly if 'all' exists in built-projects,
+ # the intersection is run-on-projects (so do nothing). When neither
+ # contains 'all', take the actual set intersection.
+ if "all" in run_on_projects:
+ task["run-on-projects"] = sorted(built_projects)
+ elif "all" not in built_projects:
+ task["run-on-projects"] = sorted(run_on_projects & built_projects)
+ yield task
+
+
+@transforms.add
+def handle_tier(config, tasks):
+ """Set the tier based on policy for all test descriptions that do not
+ specify a tier otherwise."""
+ for task in tasks:
+ if "tier" in task:
+ resolve_keyed_by(
+ task,
+ "tier",
+ item_name=task["test-name"],
+ variant=task["attributes"].get("unittest_variant"),
+ enforce_single_match=False,
+ )
+
+ # only override if not set for the test
+ if "tier" not in task or task["tier"] == "default":
+ if task["test-platform"] in [
+ "linux64/opt",
+ "linux64/debug",
+ "linux64-shippable/opt",
+ "linux64-devedition/opt",
+ "linux64-asan/opt",
+ "linux64-qr/opt",
+ "linux64-qr/debug",
+ "linux64-shippable-qr/opt",
+ "linux1804-64/opt",
+ "linux1804-64/debug",
+ "linux1804-64-shippable/opt",
+ "linux1804-64-devedition/opt",
+ "linux1804-64-qr/opt",
+ "linux1804-64-qr/debug",
+ "linux1804-64-shippable-qr/opt",
+ "linux1804-64-asan-qr/opt",
+ "linux1804-64-tsan-qr/opt",
+ "windows10-32-qr/debug",
+ "windows10-32-qr/opt",
+ "windows10-32-shippable-qr/opt",
+ "windows10-32-2004-qr/debug",
+ "windows10-32-2004-qr/opt",
+ "windows10-32-2004-shippable-qr/opt",
+ "windows10-aarch64-qr/opt",
+ "windows10-64/debug",
+ "windows10-64/opt",
+ "windows10-64-shippable/opt",
+ "windows10-64-devedition/opt",
+ "windows10-64-qr/opt",
+ "windows10-64-qr/debug",
+ "windows10-64-shippable-qr/opt",
+ "windows10-64-devedition-qr/opt",
+ "windows10-64-asan-qr/opt",
+ "windows10-64-2004-qr/opt",
+ "windows10-64-2004-qr/debug",
+ "windows10-64-2004-shippable-qr/opt",
+ "windows10-64-2004-devedition-qr/opt",
+ "windows10-64-2004-asan-qr/opt",
+ "windows11-32-2009-qr/debug",
+ "windows11-32-2009-qr/opt",
+ "windows11-32-2009-shippable-qr/opt",
+ "windows11-64-2009-qr/opt",
+ "windows11-64-2009-qr/debug",
+ "windows11-64-2009-shippable-qr/opt",
+ "windows11-64-2009-devedition-qr/opt",
+ "windows11-64-2009-asan-qr/opt",
+ "macosx1015-64/opt",
+ "macosx1015-64/debug",
+ "macosx1015-64-shippable/opt",
+ "macosx1015-64-devedition/opt",
+ "macosx1015-64-devedition-qr/opt",
+ "macosx1015-64-qr/opt",
+ "macosx1015-64-shippable-qr/opt",
+ "macosx1015-64-qr/debug",
+ "macosx1100-64-shippable-qr/opt",
+ "macosx1100-64-qr/debug",
+ "macosx1400-64-shippable-qr/opt",
+ "macosx1400-64-qr/debug",
+ "android-em-7.0-x86_64-shippable/opt",
+ "android-em-7.0-x86_64-shippable-lite/opt",
+ "android-em-7.0-x86_64/debug",
+ "android-em-7.0-x86_64/debug-isolated-process",
+ "android-em-7.0-x86_64/opt",
+ "android-em-7.0-x86_64-lite/opt",
+ "android-em-7.0-x86-shippable/opt",
+ "android-em-7.0-x86-shippable-lite/opt",
+ "android-em-7.0-x86_64-shippable-qr/opt",
+ "android-em-7.0-x86_64-qr/debug",
+ "android-em-7.0-x86_64-qr/debug-isolated-process",
+ "android-em-7.0-x86_64-qr/opt",
+ "android-em-7.0-x86_64-shippable-lite-qr/opt",
+ "android-em-7.0-x86_64-lite-qr/debug",
+ "android-em-7.0-x86_64-lite-qr/opt",
+ ]:
+ task["tier"] = 1
+ else:
+ task["tier"] = 2
+
+ yield task
+
+
+@transforms.add
+def apply_raptor_tier_optimization(config, tasks):
+ for task in tasks:
+ if task["suite"] != "raptor":
+ yield task
+ continue
+
+ if "regression-tests" in task["test-name"]:
+ # Don't optimize the regression tests
+ yield task
+ continue
+
+ if not task["test-platform"].startswith("android-hw"):
+ task["optimization"] = {"skip-unless-expanded": None}
+ if task["tier"] > 1:
+ task["optimization"] = {"skip-unless-backstop": None}
+
+ if task["attributes"].get("unittest_variant"):
+ task["tier"] = max(task["tier"], 2)
+ yield task
+
+
+@transforms.add
+def disable_try_only_platforms(config, tasks):
+ """Turns off platforms that should only run on try."""
+ try_only_platforms = ()
+ for task in tasks:
+ if any(re.match(k + "$", task["test-platform"]) for k in try_only_platforms):
+ task["run-on-projects"] = []
+ yield task
+
+
+@transforms.add
+def ensure_spi_disabled_on_all_but_spi(config, tasks):
+ for task in tasks:
+ variant = task["attributes"].get("unittest_variant", "")
+ has_no_setpref = ("gtest", "cppunit", "jittest", "junit", "raptor")
+
+ if (
+ all(s not in task["suite"] for s in has_no_setpref)
+ and "socketprocess" not in variant
+ ):
+ task["mozharness"]["extra-options"].append(
+ "--setpref=media.peerconnection.mtransport_process=false"
+ )
+ task["mozharness"]["extra-options"].append(
+ "--setpref=network.process.enabled=false"
+ )
+
+ yield task
+
+
+test_setting_description_schema = Schema(
+ {
+ Required("_hash"): str,
+ "platform": {
+ Required("arch"): Any("32", "64", "aarch64", "arm7", "x86", "x86_64"),
+ Required("os"): {
+ Required("name"): Any("android", "linux", "macosx", "windows"),
+ Required("version"): str,
+ Optional("build"): str,
+ },
+ Optional("device"): str,
+ Optional("display"): "wayland",
+ Optional("machine"): Any("ref-hw-2017", "hw-ref"),
+ },
+ "build": {
+ Required("type"): Any("opt", "debug", "debug-isolated-process"),
+ Any(
+ "asan",
+ "ccov",
+ "clang-trunk",
+ "devedition",
+ "domstreams",
+ "lite",
+ "mingwclang",
+ "nightlyasrelease",
+ "shippable",
+ "tsan",
+ ): bool,
+ },
+ "runtime": {Any(*list(TEST_VARIANTS.keys()) + ["1proc"]): bool},
+ },
+ check=False,
+)
+"""Schema test settings must conform to. Validated by
+:py:func:`~test.test_mozilla_central.test_test_setting`"""
+
+
+@transforms.add
+def set_test_setting(config, tasks):
+ """A test ``setting`` is the set of configuration that uniquely
+ distinguishes a test task from other tasks that run the same suite
+ (ignoring chunks).
+
+ There are three different types of information that make up a setting:
+
+ 1. Platform - Information describing the underlying platform tests run on,
+ e.g, OS, CPU architecture, etc.
+
+ 2. Build - Information describing the build being tested, e.g build type,
+ ccov, asan/tsan, etc.
+
+ 3. Runtime - Information describing which runtime parameters are enabled,
+ e.g, prefs, environment variables, etc.
+
+ This transform adds a ``test-setting`` object to the ``extra`` portion of
+ all test tasks, of the form:
+
+ .. code-block::
+
+ {
+ "platform": { ... },
+ "build": { ... },
+ "runtime": { ... }
+ }
+
+ This information could be derived from the label, but consuming this
+ object is less brittle.
+ """
+ # Some attributes have a dash in them which complicates parsing. Ensure we
+ # don't split them up.
+ # TODO Rename these so they don't have a dash.
+ dash_attrs = [
+ "clang-trunk",
+ "ref-hw-2017",
+ "hw-ref",
+ ]
+ dash_token = "%D%"
+ platform_re = re.compile(r"(\D+)(\d*)")
+
+ for task in tasks:
+ setting = {
+ "platform": {
+ "os": {},
+ },
+ "build": {},
+ "runtime": {},
+ }
+
+ # parse platform and build information out of 'test-platform'
+ platform, build_type = task["test-platform"].split("/", 1)
+
+ # ensure dashed attributes don't get split up
+ for attr in dash_attrs:
+ if attr in platform:
+ platform = platform.replace(attr, attr.replace("-", dash_token))
+
+ parts = platform.split("-")
+
+ # restore dashes now that split is finished
+ for i, part in enumerate(parts):
+ if dash_token in part:
+ parts[i] = part.replace(dash_token, "-")
+
+ match = platform_re.match(parts.pop(0))
+ assert match
+ os_name, os_version = match.groups()
+
+ device = machine = os_build = display = None
+ if os_name == "android":
+ device = parts.pop(0)
+ if device == "hw":
+ device = parts.pop(0)
+ else:
+ device = "emulator"
+
+ os_version = parts.pop(0)
+ if parts[0].isdigit():
+ os_version = f"{os_version}.{parts.pop(0)}"
+
+ if parts[0] == "android":
+ parts.pop(0)
+
+ arch = parts.pop(0)
+
+ else:
+ arch = parts.pop(0)
+ if parts[0].isdigit():
+ os_build = parts.pop(0)
+
+ if parts and parts[0] == "ref-hw-2017":
+ machine = parts.pop(0)
+
+ if parts and parts[0] == "hw-ref":
+ machine = parts.pop(0)
+
+ if parts and parts[0] == "wayland":
+ display = parts.pop(0)
+
+ if parts and parts[0] == "aarch64":
+ arch = parts.pop(0)
+
+ # It's not always possible to glean the exact architecture used from
+ # the task, so sometimes this will just be set to "32" or "64".
+ setting["platform"]["arch"] = arch
+ setting["platform"]["os"] = {
+ "name": os_name,
+ "version": os_version,
+ }
+
+ if os_build:
+ setting["platform"]["os"]["build"] = os_build
+
+ if device:
+ setting["platform"]["device"] = device
+
+ if machine:
+ setting["platform"]["machine"] = machine
+
+ if display:
+ setting["platform"]["display"] = display
+
+ # parse remaining parts as build attributes
+ setting["build"]["type"] = build_type
+ while parts:
+ attr = parts.pop(0)
+ if attr == "qr":
+ # all tasks are webrender now, no need to store it
+ continue
+
+ setting["build"][attr] = True
+
+ unittest_variant = task["attributes"].get("unittest_variant")
+ if unittest_variant:
+ for variant in unittest_variant.split("+"):
+ setting["runtime"][variant] = True
+
+ # add a hash of the setting object for easy comparisons
+ setting["_hash"] = hashlib.sha256(
+ json.dumps(setting, sort_keys=True).encode("utf-8")
+ ).hexdigest()[:12]
+
+ task["test-setting"] = ReadOnlyDict(**setting)
+ yield task
+
+
+@transforms.add
+def allow_software_gl_layers(config, tasks):
+ """
+ Handle the "allow-software-gl-layers" property for platforms where it
+ applies.
+ """
+ for task in tasks:
+ if task.get("allow-software-gl-layers"):
+ # This should be set always once bug 1296086 is resolved.
+ task["mozharness"].setdefault("extra-options", []).append(
+ "--allow-software-gl-layers"
+ )
+
+ yield task
+
+
+@transforms.add
+def enable_webrender(config, tasks):
+ """
+ Handle the "webrender" property by passing a flag to mozharness if it is
+ enabled.
+ """
+ for task in tasks:
+ # TODO: this was all conditionally in enable_webrender- do we still need this?
+ extra_options = task["mozharness"].setdefault("extra-options", [])
+ # We only want to 'setpref' on tests that have a profile
+ if not task["attributes"]["unittest_category"] in [
+ "cppunittest",
+ "geckoview-junit",
+ "gtest",
+ "jittest",
+ "raptor",
+ ]:
+ extra_options.append("--setpref=layers.d3d11.enable-blacklist=false")
+
+ yield task
+
+
+@transforms.add
+def set_schedules_for_webrender_android(config, tasks):
+ """android-hw has limited resources, we need webrender on phones"""
+ for task in tasks:
+ if task["suite"] in ["crashtest", "reftest"] and task[
+ "test-platform"
+ ].startswith("android-hw"):
+ task["schedules-component"] = "android-hw-gfx"
+ yield task
+
+
+@transforms.add
+def set_retry_exit_status(config, tasks):
+ """Set the retry exit status to TBPL_RETRY, the value returned by mozharness
+ scripts to indicate a transient failure that should be retried."""
+ for task in tasks:
+ # add in 137 as it is an error with GCP workers
+ task["retry-exit-status"] = [4, 137]
+ yield task
+
+
+@transforms.add
+def set_profile(config, tasks):
+ """Set profiling mode for tests."""
+ ttconfig = config.params["try_task_config"]
+ profile = ttconfig.get("gecko-profile", False)
+ settings = (
+ "gecko-profile-interval",
+ "gecko-profile-entries",
+ "gecko-profile-threads",
+ "gecko-profile-features",
+ )
+
+ for task in tasks:
+ if profile and task["suite"] in ["talos", "raptor"]:
+ extras = task["mozharness"]["extra-options"]
+ extras.append("--gecko-profile")
+ for setting in settings:
+ value = ttconfig.get(setting)
+ if value is not None:
+ # These values can contain spaces (eg the "DOM Worker"
+ # thread) and the command is constructed in different,
+ # incompatible ways on different platforms.
+
+ if task["test-platform"].startswith("win"):
+ # Double quotes for Windows (single won't work).
+ extras.append("--" + setting + '="' + str(value) + '"')
+ else:
+ # Other platforms keep things as separate values,
+ # rather than joining with spaces.
+ extras.append("--" + setting + "=" + str(value))
+
+ yield task
+
+
+@transforms.add
+def set_tag(config, tasks):
+ """Set test for a specific tag."""
+ tag = None
+ if config.params["try_mode"] == "try_option_syntax":
+ tag = config.params["try_options"]["tag"]
+ for task in tasks:
+ if tag:
+ task["mozharness"]["extra-options"].extend(["--tag", tag])
+ yield task
+
+
+@transforms.add
+def set_test_type(config, tasks):
+ types = ["mochitest", "reftest", "talos", "raptor", "geckoview-junit", "gtest"]
+ for task in tasks:
+ for test_type in types:
+ if test_type in task["suite"] and "web-platform" not in task["suite"]:
+ task.setdefault("tags", {})["test-type"] = test_type
+ yield task
+
+
+@transforms.add
+def set_schedules_components(config, tasks):
+ for task in tasks:
+ if "optimization" in task or "when" in task:
+ yield task
+ continue
+
+ category = task["attributes"]["unittest_category"]
+ schedules = task.get("schedules-component", category)
+ if isinstance(schedules, str):
+ schedules = [schedules]
+
+ schedules = set(schedules)
+ if schedules & set(INCLUSIVE_COMPONENTS):
+ # if this is an "inclusive" test, then all files which might
+ # cause it to run are annotated with SCHEDULES in moz.build,
+ # so do not include the platform or any other components here
+ task["schedules-component"] = sorted(schedules)
+ yield task
+ continue
+
+ schedules.add(category)
+ schedules.add(platform_family(task["build-platform"]))
+
+ task["schedules-component"] = sorted(schedules)
+ yield task
+
+
+@transforms.add
+def enable_parallel_marking_in_tsan_tests(config, tasks):
+ """Enable parallel marking in TSAN tests"""
+ skip_list = ["cppunittest", "gtest"]
+ for task in tasks:
+ if "-tsan-" in task["test-platform"]:
+ if task["suite"] not in skip_list:
+ extra_options = task["mozharness"].setdefault("extra-options", [])
+ extra_options.append(
+ "--setpref=javascript.options.mem.gc_parallel_marking=true"
+ )
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/test/raptor.py b/taskcluster/gecko_taskgraph/transforms/test/raptor.py
new file mode 100644
index 0000000000..0667d22bb2
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/test/raptor.py
@@ -0,0 +1,326 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import Schema, optionally_keyed_by, resolve_keyed_by
+from taskgraph.util.treeherder import join_symbol, split_symbol
+from voluptuous import Extra, Optional, Required
+
+from gecko_taskgraph.transforms.test import test_description_schema
+from gecko_taskgraph.util.copy_task import copy_task
+
+transforms = TransformSequence()
+task_transforms = TransformSequence()
+
+raptor_description_schema = Schema(
+ {
+ # Raptor specific configs.
+ Optional("raptor"): {
+ Optional("activity"): optionally_keyed_by("app", str),
+ Optional("apps"): optionally_keyed_by("test-platform", "subtest", [str]),
+ Optional("binary-path"): optionally_keyed_by("app", str),
+ Optional("run-visual-metrics"): optionally_keyed_by(
+ "app", "test-platform", bool
+ ),
+ Optional("subtests"): optionally_keyed_by("app", "test-platform", list),
+ Optional("test"): str,
+ Optional("test-url-param"): optionally_keyed_by(
+ "subtest", "test-platform", str
+ ),
+ },
+ # Configs defined in the 'test_description_schema'.
+ Optional("max-run-time"): optionally_keyed_by(
+ "app", "subtest", "test-platform", test_description_schema["max-run-time"]
+ ),
+ Optional("run-on-projects"): optionally_keyed_by(
+ "app",
+ "test-name",
+ "raptor.test",
+ "subtest",
+ "variant",
+ test_description_schema["run-on-projects"],
+ ),
+ Optional("variants"): test_description_schema["variants"],
+ Optional("target"): optionally_keyed_by(
+ "app", test_description_schema["target"]
+ ),
+ Optional("tier"): optionally_keyed_by(
+ "app", "raptor.test", "subtest", "variant", test_description_schema["tier"]
+ ),
+ Required("test-name"): test_description_schema["test-name"],
+ Required("test-platform"): test_description_schema["test-platform"],
+ Required("require-signed-extensions"): test_description_schema[
+ "require-signed-extensions"
+ ],
+ Required("treeherder-symbol"): test_description_schema["treeherder-symbol"],
+ # Any unrecognized keys will be validated against the test_description_schema.
+ Extra: object,
+ }
+)
+
+transforms.add_validate(raptor_description_schema)
+
+
+@transforms.add
+def set_defaults(config, tests):
+ for test in tests:
+ test.setdefault("raptor", {}).setdefault("run-visual-metrics", False)
+ yield test
+
+
+@transforms.add
+def split_apps(config, tests):
+ app_symbols = {
+ "chrome": "ChR",
+ "chrome-m": "ChR",
+ "chromium": "Cr",
+ "fenix": "fenix",
+ "refbrow": "refbrow",
+ "safari": "Saf",
+ "custom-car": "CaR",
+ "cstm-car-m": "CaR",
+ }
+
+ for test in tests:
+ apps = test["raptor"].pop("apps", None)
+ if not apps:
+ yield test
+ continue
+
+ for app in apps:
+ # Ignore variants for non-Firefox or non-mobile applications.
+ if app not in [
+ "firefox",
+ "geckoview",
+ "fenix",
+ "chrome-m",
+ "cstm-car-m",
+ ] and test["attributes"].get("unittest_variant"):
+ continue
+
+ atest = copy_task(test)
+ suffix = f"-{app}"
+ atest["app"] = app
+ atest["description"] += f" on {app.capitalize()}"
+
+ name = atest["test-name"] + suffix
+ atest["test-name"] = name
+ atest["try-name"] = name
+
+ if app in app_symbols:
+ group, symbol = split_symbol(atest["treeherder-symbol"])
+ group += f"-{app_symbols[app]}"
+ atest["treeherder-symbol"] = join_symbol(group, symbol)
+
+ yield atest
+
+
+@transforms.add
+def handle_keyed_by_prereqs(config, tests):
+ """
+ Only resolve keys for prerequisite fields here since the
+ these keyed-by options might have keyed-by fields
+ as well.
+ """
+ for test in tests:
+ resolve_keyed_by(test, "raptor.subtests", item_name=test["test-name"])
+ yield test
+
+
+@transforms.add
+def split_raptor_subtests(config, tests):
+ for test in tests:
+ # For tests that have 'subtests' listed, we want to create a separate
+ # test job for every subtest (i.e. split out each page-load URL into its own job)
+ subtests = test["raptor"].pop("subtests", None)
+ if not subtests:
+ if "macosx1400" not in test["test-platform"]:
+ yield test
+ continue
+
+ for chunk_number, subtest in enumerate(subtests):
+ # Create new test job
+ chunked = copy_task(test)
+ chunked["chunk-number"] = 1 + chunk_number
+ chunked["subtest"] = subtest
+ chunked["subtest-symbol"] = subtest
+ if isinstance(chunked["subtest"], list):
+ chunked["subtest"] = subtest[0]
+ chunked["subtest-symbol"] = subtest[1]
+ chunked = resolve_keyed_by(
+ chunked, "tier", chunked["subtest"], defer=["variant"]
+ )
+ yield chunked
+
+
+@transforms.add
+def handle_keyed_by(config, tests):
+ fields = [
+ "raptor.test-url-param",
+ "raptor.run-visual-metrics",
+ "raptor.activity",
+ "raptor.binary-path",
+ "limit-platforms",
+ "fetches.fetch",
+ "max-run-time",
+ "run-on-projects",
+ "target",
+ "tier",
+ ]
+ for test in tests:
+ for field in fields:
+ resolve_keyed_by(
+ test, field, item_name=test["test-name"], defer=["variant"]
+ )
+ yield test
+
+
+@transforms.add
+def split_page_load_by_url(config, tests):
+ for test in tests:
+ # `chunk-number` and 'subtest' only exists when the task had a
+ # definition for `subtests`
+ chunk_number = test.pop("chunk-number", None)
+ subtest = test.get(
+ "subtest"
+ ) # don't pop as some tasks need this value after splitting variants
+ subtest_symbol = test.pop("subtest-symbol", None)
+
+ if not chunk_number or not subtest:
+ yield test
+ continue
+
+ if len(subtest_symbol) > 10 and "ytp" not in subtest_symbol:
+ raise Exception(
+ "Treeherder symbol %s is larger than 10 char! Please use a different symbol."
+ % subtest_symbol
+ )
+
+ if test["test-name"].startswith("browsertime-"):
+ test["raptor"]["test"] = subtest
+
+ # Remove youtube-playback in the test name to avoid duplication
+ test["test-name"] = test["test-name"].replace("youtube-playback-", "")
+ else:
+ # Use full test name if running on webextension
+ test["raptor"]["test"] = "raptor-tp6-" + subtest + "-{}".format(test["app"])
+
+ # Only run the subtest/single URL
+ test["test-name"] += f"-{subtest}"
+ test["try-name"] += f"-{subtest}"
+
+ # Set treeherder symbol and description
+ group, _ = split_symbol(test["treeherder-symbol"])
+ test["treeherder-symbol"] = join_symbol(group, subtest_symbol)
+ test["description"] += f" on {subtest}"
+
+ yield test
+
+
+@transforms.add
+def modify_extra_options(config, tests):
+ for test in tests:
+ test_name = test.get("test-name", None)
+
+ if "first-install" in test_name:
+ # First-install tests should never use conditioned profiles
+ extra_options = test.setdefault("mozharness", {}).setdefault(
+ "extra-options", []
+ )
+
+ for i, opt in enumerate(extra_options):
+ if "conditioned-profile" in opt:
+ if i:
+ extra_options.pop(i)
+ break
+
+ if "-widevine" in test_name:
+ extra_options = test.setdefault("mozharness", {}).setdefault(
+ "extra-options", []
+ )
+ for i, opt in enumerate(extra_options):
+ if "--conditioned-profile=settled" in opt:
+ if i:
+ extra_options[i] += "-youtube"
+ break
+
+ if "unity-webgl" in test_name:
+ # Disable the extra-profiler-run for unity-webgl tests.
+ extra_options = test.setdefault("mozharness", {}).setdefault(
+ "extra-options", []
+ )
+ for i, opt in enumerate(extra_options):
+ if "extra-profiler-run" in opt:
+ if i:
+ extra_options.pop(i)
+ break
+
+ yield test
+
+
+@transforms.add
+def add_extra_options(config, tests):
+ for test in tests:
+ mozharness = test.setdefault("mozharness", {})
+ if test.get("app", "") == "chrome-m":
+ mozharness["tooltool-downloads"] = "internal"
+
+ extra_options = mozharness.setdefault("extra-options", [])
+
+ # Adding device name if we're on android
+ test_platform = test["test-platform"]
+ if test_platform.startswith("android-hw-a51"):
+ extra_options.append("--device-name=a51")
+ elif test_platform.startswith("android-hw-p5"):
+ extra_options.append("--device-name=p5_aarch64")
+ elif test_platform.startswith("android-hw-p6"):
+ extra_options.append("--device-name=p6_aarch64")
+ elif test_platform.startswith("android-hw-s21"):
+ extra_options.append("--device-name=s21_aarch64")
+
+ if test["raptor"].pop("run-visual-metrics", False):
+ extra_options.append("--browsertime-video")
+ extra_options.append("--browsertime-visualmetrics")
+ test["attributes"]["run-visual-metrics"] = True
+
+ if "app" in test:
+ extra_options.append(
+ "--app={}".format(test["app"])
+ ) # don't pop as some tasks need this value after splitting variants
+
+ if "activity" in test["raptor"]:
+ extra_options.append("--activity={}".format(test["raptor"].pop("activity")))
+
+ if "binary-path" in test["raptor"]:
+ extra_options.append(
+ "--binary-path={}".format(test["raptor"].pop("binary-path"))
+ )
+
+ if "test" in test["raptor"]:
+ extra_options.append("--test={}".format(test["raptor"].pop("test")))
+
+ if test["require-signed-extensions"]:
+ extra_options.append("--is-release-build")
+
+ if "test-url-param" in test["raptor"]:
+ param = test["raptor"].pop("test-url-param")
+ if not param == []:
+ extra_options.append(
+ "--test-url-params={}".format(param.replace(" ", ""))
+ )
+
+ extra_options.append("--project={}".format(config.params.get("project")))
+
+ yield test
+
+
+@task_transforms.add
+def add_scopes_and_proxy(config, tasks):
+ for task in tasks:
+ task.setdefault("worker", {})["taskcluster-proxy"] = True
+ task.setdefault("scopes", []).append(
+ "secrets:get:project/perftest/gecko/level-{level}/perftest-login"
+ )
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/test/variant.py b/taskcluster/gecko_taskgraph/transforms/test/variant.py
new file mode 100644
index 0000000000..bda91b2f25
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/test/variant.py
@@ -0,0 +1,124 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import datetime
+
+import jsone
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import Schema, validate_schema
+from taskgraph.util.treeherder import join_symbol, split_symbol
+from voluptuous import Any, Optional, Required
+
+from gecko_taskgraph.util.chunking import TEST_VARIANTS
+from gecko_taskgraph.util.copy_task import copy_task
+from gecko_taskgraph.util.templates import merge
+
+transforms = TransformSequence()
+
+"""List of available test variants defined."""
+
+
+variant_description_schema = Schema(
+ {
+ str: {
+ Required("description"): str,
+ Required("suffix"): str,
+ Optional("mozinfo"): str,
+ Required("component"): str,
+ Required("expiration"): str,
+ Optional("when"): {Any("$eval", "$if"): str},
+ Optional("replace"): {str: object},
+ Optional("merge"): {str: object},
+ }
+ }
+)
+"""variant description schema"""
+
+
+@transforms.add
+def split_variants(config, tasks):
+ """Splits test definitions into multiple tasks based on the `variants` key.
+
+ If `variants` are defined, the original task will be yielded along with a
+ copy of the original task for each variant defined in the list. The copies
+ will have the 'unittest_variant' attribute set.
+ """
+ validate_schema(variant_description_schema, TEST_VARIANTS, "In variants.yml:")
+
+ def find_expired_variants(variants):
+ expired = []
+
+ # do not expire on esr/beta/release
+ if config.params.get("release_type", "") in [
+ "release",
+ "beta",
+ ]:
+ return []
+
+ if "esr" in config.params.get("release_type", ""):
+ return []
+
+ today = datetime.datetime.today()
+ for variant in variants:
+ expiration = variants[variant]["expiration"]
+ if len(expiration.split("-")) == 1:
+ continue
+ expires_at = datetime.datetime.strptime(expiration, "%Y-%m-%d")
+ if expires_at < today:
+ expired.append(variant)
+ return expired
+
+ def remove_expired(variants, expired):
+ remaining_variants = []
+ for name in variants:
+ parts = [p for p in name.split("+") if p in expired]
+ if len(parts) > 0:
+ continue
+
+ remaining_variants.append(name)
+ return remaining_variants
+
+ def apply_variant(variant, task):
+ task["description"] = variant["description"].format(**task)
+
+ suffix = f"-{variant['suffix']}"
+ group, symbol = split_symbol(task["treeherder-symbol"])
+ if group != "?":
+ group += suffix
+ else:
+ symbol += suffix
+ task["treeherder-symbol"] = join_symbol(group, symbol)
+
+ # This will be used to set the label and try-name in 'make_job_description'.
+ task.setdefault("variant-suffix", "")
+ task["variant-suffix"] += suffix
+
+ # Replace and/or merge the configuration.
+ task.update(variant.get("replace", {}))
+ return merge(task, variant.get("merge", {}))
+
+ expired_variants = find_expired_variants(TEST_VARIANTS)
+ for task in tasks:
+ variants = task.pop("variants", [])
+ variants = remove_expired(variants, expired_variants)
+
+ if task.pop("run-without-variant"):
+ yield copy_task(task)
+
+ for name in variants:
+ # Apply composite variants (joined by '+') in order.
+ parts = name.split("+")
+ taskv = copy_task(task)
+ for part in parts:
+ variant = TEST_VARIANTS[part]
+
+ # If any variant in a composite fails this check we skip it.
+ if "when" in variant:
+ context = {"task": task}
+ if not jsone.render(variant["when"], context):
+ break
+
+ taskv = apply_variant(variant, taskv)
+ else:
+ taskv["attributes"]["unittest_variant"] = name
+ yield taskv
diff --git a/taskcluster/gecko_taskgraph/transforms/test/worker.py b/taskcluster/gecko_taskgraph/transforms/test/worker.py
new file mode 100644
index 0000000000..873347459c
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/test/worker.py
@@ -0,0 +1,204 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from taskgraph.transforms.base import TransformSequence
+
+# default worker types keyed by instance-size
+LINUX_WORKER_TYPES = {
+ "large": "t-linux-large",
+ "xlarge": "t-linux-xlarge",
+ "default": "t-linux-large",
+}
+
+# windows worker types keyed by test-platform and virtualization
+WINDOWS_WORKER_TYPES = {
+ "windows10-64": { # source-test
+ "virtual": "t-win10-64",
+ "virtual-with-gpu": "t-win10-64-gpu-s",
+ "hardware": "t-win10-64-1803-hw",
+ },
+ "windows10-64-shippable-qr": {
+ "virtual": "t-win10-64",
+ "virtual-with-gpu": "t-win10-64-gpu-s",
+ "hardware": "t-win10-64-1803-hw",
+ },
+ "windows10-64-ref-hw-2017": {
+ "virtual": "t-win10-64",
+ "virtual-with-gpu": "t-win10-64-gpu-s",
+ "hardware": "t-win10-64-ref-hw",
+ },
+ "windows11-64-2009-hw-ref-shippable": {
+ "virtual": "win11-64-2009-hw-ref",
+ "virtual-with-gpu": "win11-64-2009-hw-ref",
+ "hardware": "win11-64-2009-hw-ref",
+ },
+ "windows11-64-2009-hw-ref": {
+ "virtual": "win11-64-2009-hw-ref",
+ "virtual-with-gpu": "win11-64-2009-hw-ref",
+ "hardware": "win11-64-2009-hw-ref",
+ },
+ "windows10-64-2009-qr": {
+ "virtual": "win10-64-2009",
+ "virtual-with-gpu": "win10-64-2009-gpu",
+ },
+ "windows10-64-2009-shippable-qr": {
+ "virtual": "win10-64-2009",
+ "virtual-with-gpu": "win10-64-2009-gpu",
+ },
+ "windows11-32-2009-mingwclang-qr": {
+ "virtual": "win11-64-2009",
+ "virtual-with-gpu": "win11-64-2009-gpu",
+ },
+ "windows11-32-2009-qr": {
+ "virtual": "win11-64-2009",
+ "virtual-with-gpu": "win11-64-2009-gpu",
+ },
+ "windows11-32-2009-shippable-qr": {
+ "virtual": "win11-64-2009",
+ "virtual-with-gpu": "win11-64-2009-gpu",
+ },
+ "windows11-64-2009": {
+ "virtual": "win11-64-2009",
+ "virtual-with-gpu": "win11-64-2009-gpu",
+ },
+ "windows11-64-2009-ccov": {
+ "virtual": "win11-64-2009-ssd",
+ "virtual-with-gpu": "win11-64-2009-ssd-gpu",
+ },
+ "windows11-64-2009-ccov-qr": {
+ "virtual": "win11-64-2009-ssd",
+ "virtual-with-gpu": "win11-64-2009-ssd-gpu",
+ },
+ "windows11-64-2009-devedition": {
+ "virtual": "win11-64-2009",
+ "virtual-with-gpu": "win11-64-2009-gpu",
+ },
+ "windows11-64-2009-shippable": {
+ "virtual": "win11-64-2009",
+ "virtual-with-gpu": "win11-64-2009-gpu",
+ },
+ "windows11-64-2009-qr": {
+ "virtual": "win11-64-2009",
+ "virtual-with-gpu": "win11-64-2009-gpu",
+ },
+ "windows11-64-2009-shippable-qr": {
+ "virtual": "win11-64-2009",
+ "virtual-with-gpu": "win11-64-2009-gpu",
+ },
+ "windows11-64-2009-devedition-qr": {
+ "virtual": "win11-64-2009",
+ "virtual-with-gpu": "win11-64-2009-gpu",
+ },
+ "windows11-64-2009-asan-qr": {
+ "virtual": "win11-64-2009",
+ "virtual-with-gpu": "win11-64-2009-gpu",
+ },
+ "windows11-64-2009-mingwclang-qr": {
+ "virtual": "win11-64-2009",
+ "virtual-with-gpu": "win11-64-2009-gpu",
+ },
+}
+
+# os x worker types keyed by test-platform
+MACOSX_WORKER_TYPES = {
+ "macosx1015-64": "t-osx-1015-r8",
+ "macosx1100-64": "t-osx-1100-m1",
+ "macosx1400-64": "t-osx-1400-m2",
+}
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def set_worker_type(config, tasks):
+ """Set the worker type based on the test platform."""
+ for task in tasks:
+ # during the taskcluster migration, this is a bit tortured, but it
+ # will get simpler eventually!
+ test_platform = task["test-platform"]
+ if task.get("worker-type", "default") != "default":
+ # This test already has its worker type defined, so just use that (yields below)
+ # Unless the value is set to "default", in that case ignore it.
+ pass
+ elif test_platform.startswith("macosx1015-64"):
+ task["worker-type"] = MACOSX_WORKER_TYPES["macosx1015-64"]
+ elif test_platform.startswith("macosx1100-64"):
+ task["worker-type"] = MACOSX_WORKER_TYPES["macosx1100-64"]
+ elif test_platform.startswith("macosx1400-64"):
+ task["worker-type"] = MACOSX_WORKER_TYPES["macosx1400-64"]
+ elif test_platform.startswith("win"):
+ # figure out what platform the job needs to run on
+ if task["virtualization"] == "hardware":
+ # some jobs like talos and reftest run on real h/w - those are all win10
+ if test_platform.startswith("windows10-64-ref-hw-2017"):
+ win_worker_type_platform = WINDOWS_WORKER_TYPES[
+ "windows10-64-ref-hw-2017"
+ ]
+ elif test_platform.startswith("windows11-64-2009-hw-ref"):
+ win_worker_type_platform = WINDOWS_WORKER_TYPES[
+ "windows11-64-2009-hw-ref"
+ ]
+ else:
+ win_worker_type_platform = WINDOWS_WORKER_TYPES["windows10-64"]
+ else:
+ # the other jobs run on a vm which may or may not be a win10 vm
+ win_worker_type_platform = WINDOWS_WORKER_TYPES[
+ test_platform.split("/")[0]
+ ]
+ if task[
+ "virtualization"
+ ] == "virtual-with-gpu" and test_platform.startswith("windows1"):
+ # add in `--requires-gpu` to the mozharness options
+ task["mozharness"]["extra-options"].append("--requires-gpu")
+
+ # now we have the right platform set the worker type accordingly
+ task["worker-type"] = win_worker_type_platform[task["virtualization"]]
+ elif test_platform.startswith("android-hw-p5"):
+ if task["suite"] != "raptor":
+ task["worker-type"] = "t-bitbar-gw-unit-p5"
+ else:
+ task["worker-type"] = "t-bitbar-gw-perf-p5"
+ elif test_platform.startswith("android-hw-p6"):
+ if task["suite"] != "raptor":
+ task["worker-type"] = "t-bitbar-gw-unit-p6"
+ else:
+ task["worker-type"] = "t-bitbar-gw-perf-p6"
+ elif test_platform.startswith("android-hw-s21"):
+ if task["suite"] != "raptor":
+ task["worker-type"] = "t-bitbar-gw-unit-s21"
+ else:
+ task["worker-type"] = "t-bitbar-gw-perf-s21"
+ elif test_platform.startswith("android-hw-a51"):
+ if task["suite"] != "raptor":
+ task["worker-type"] = "t-bitbar-gw-unit-a51"
+ else:
+ task["worker-type"] = "t-bitbar-gw-perf-a51"
+ elif test_platform.startswith("android-em-7.0-x86"):
+ task["worker-type"] = "t-linux-kvm"
+ elif test_platform.startswith("linux") or test_platform.startswith("android"):
+ if "wayland" in test_platform:
+ task["worker-type"] = "t-linux-wayland"
+ elif task.get("suite", "") in ["talos", "raptor"] and not task[
+ "build-platform"
+ ].startswith("linux64-ccov"):
+ task["worker-type"] = "t-linux-talos-1804"
+ else:
+ task["worker-type"] = LINUX_WORKER_TYPES[task["instance-size"]]
+ else:
+ raise Exception(f"unknown test_platform {test_platform}")
+
+ yield task
+
+
+@transforms.add
+def set_wayland_env(config, tasks):
+ for task in tasks:
+ if task["worker-type"] != "t-linux-wayland":
+ yield task
+ continue
+
+ env = task.setdefault("worker", {}).setdefault("env", {})
+ env["MOZ_ENABLE_WAYLAND"] = "1"
+ env["WAYLAND_DISPLAY"] = "wayland-0"
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/trigger_comm_central.py b/taskcluster/gecko_taskgraph/transforms/trigger_comm_central.py
new file mode 100644
index 0000000000..a3909e37a7
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/trigger_comm_central.py
@@ -0,0 +1,24 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at https://mozilla.org/MPL/2.0/.
+
+"""
+Resolve keys for the jobs defined in the trigger-comm-central kind.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def resolve_keys(config, jobs):
+ for job in jobs:
+ resolve_keyed_by(
+ job,
+ "scopes",
+ item_name=job["name"],
+ level=config.params["level"],
+ )
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/try_job.py b/taskcluster/gecko_taskgraph/transforms/try_job.py
new file mode 100644
index 0000000000..4b3281f5c5
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/try_job.py
@@ -0,0 +1,18 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def set_job_try_name(config, jobs):
+ """
+ For a task which is governed by `-j` in try syntax, set the `job_try_name`
+ attribute based on the job name.
+ """
+ for job in jobs:
+ job.setdefault("attributes", {}).setdefault("job_try_name", job["name"])
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/update_verify.py b/taskcluster/gecko_taskgraph/transforms/update_verify.py
new file mode 100644
index 0000000000..19c932c746
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/update_verify.py
@@ -0,0 +1,58 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the beetmover task into an actual task description.
+"""
+
+
+from copy import deepcopy
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.treeherder import add_suffix, inherit_treeherder_from_dep
+
+from gecko_taskgraph.util.attributes import task_name
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def add_command(config, tasks):
+ config_tasks = {}
+ for dep in config.kind_dependencies_tasks.values():
+ if (
+ "update-verify-config" in dep.kind
+ or "update-verify-next-config" in dep.kind
+ ):
+ config_tasks[task_name(dep)] = dep
+
+ for task in tasks:
+ config_task = config_tasks[task["name"]]
+ total_chunks = task["extra"]["chunks"]
+ task["worker"].setdefault("env", {})["CHANNEL"] = config_task.task["extra"][
+ "channel"
+ ]
+ task.setdefault("fetches", {})[config_task.label] = [
+ "update-verify.cfg",
+ ]
+ task["treeherder"] = inherit_treeherder_from_dep(task, config_task)
+
+ for this_chunk in range(1, total_chunks + 1):
+ chunked = deepcopy(task)
+ chunked["treeherder"]["symbol"] = add_suffix(
+ chunked["treeherder"]["symbol"], this_chunk
+ )
+ chunked["label"] = "release-update-verify-{}-{}/{}".format(
+ chunked["name"], this_chunk, total_chunks
+ )
+ if not chunked["worker"].get("env"):
+ chunked["worker"]["env"] = {}
+ chunked["run"] = {
+ "using": "run-task",
+ "cwd": "{checkout}",
+ "command": "tools/update-verify/scripts/chunked-verify.sh "
+ f"--total-chunks={total_chunks} --this-chunk={this_chunk}",
+ "sparse-profile": "update-verify",
+ }
+
+ yield chunked
diff --git a/taskcluster/gecko_taskgraph/transforms/update_verify_config.py b/taskcluster/gecko_taskgraph/transforms/update_verify_config.py
new file mode 100644
index 0000000000..2d1cd40877
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/update_verify_config.py
@@ -0,0 +1,148 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the beetmover task into an actual task description.
+"""
+
+from urllib.parse import urlsplit
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+from gecko_taskgraph.transforms.task import get_branch_repo, get_branch_rev
+from gecko_taskgraph.util.attributes import release_level
+from gecko_taskgraph.util.scriptworker import get_release_config
+
+transforms = TransformSequence()
+
+
+# The beta regexes do not match point releases.
+# In the rare event that we do ship a point
+# release to beta, we need to either:
+# 1) update these regexes to match that specific version
+# 2) pass a second include version that matches that specific version
+INCLUDE_VERSION_REGEXES = {
+ "beta": r"'^(\d+\.\d+(b\d+)?)$'",
+ "nonbeta": r"'^\d+\.\d+(\.\d+)?$'",
+ # Same as nonbeta, except for the esr suffix
+ "esr": r"'^\d+\.\d+(\.\d+)?esr$'",
+ # Previous esr versions, for update testing before we update users to esr115
+ "esr115-next": r"'^(52|60|68|78|91|102)+\.\d+(\.\d+)?esr$'",
+}
+
+MAR_CHANNEL_ID_OVERRIDE_REGEXES = {
+ "beta": r"'^\d+\.\d+(\.\d+)?$$,firefox-mozilla-beta,firefox-mozilla-release'",
+}
+
+
+def ensure_wrapped_singlequote(regexes):
+ """Ensure that a regex (from INCLUDE_VERSION_REGEXES or MAR_CHANNEL_ID_OVERRIDE_REGEXES)
+ is wrapper in single quotes.
+ """
+ for name, regex in regexes.items():
+ if regex[0] != "'" or regex[-1] != "'":
+ raise Exception(
+ "Regex {} is invalid: not wrapped with single quotes.\n{}".format(
+ name, regex
+ )
+ )
+
+
+ensure_wrapped_singlequote(INCLUDE_VERSION_REGEXES)
+ensure_wrapped_singlequote(MAR_CHANNEL_ID_OVERRIDE_REGEXES)
+
+
+@transforms.add
+def add_command(config, tasks):
+ keyed_by_args = [
+ "channel",
+ "archive-prefix",
+ "previous-archive-prefix",
+ "aus-server",
+ "override-certs",
+ "include-version",
+ "mar-channel-id-override",
+ "last-watershed",
+ ]
+ optional_args = [
+ "updater-platform",
+ ]
+
+ release_config = get_release_config(config)
+
+ for task in tasks:
+ task["description"] = "generate update verify config for {}".format(
+ task["attributes"]["build_platform"]
+ )
+
+ command = [
+ "python",
+ "testing/mozharness/scripts/release/update-verify-config-creator.py",
+ "--product",
+ task["extra"]["product"],
+ "--stage-product",
+ task["shipping-product"],
+ "--app-name",
+ task["extra"]["app-name"],
+ "--branch-prefix",
+ task["extra"]["branch-prefix"],
+ "--platform",
+ task["extra"]["platform"],
+ "--to-version",
+ release_config["version"],
+ "--to-app-version",
+ release_config["appVersion"],
+ "--to-build-number",
+ str(release_config["build_number"]),
+ "--to-buildid",
+ config.params["moz_build_date"],
+ "--to-revision",
+ get_branch_rev(config),
+ "--output-file",
+ "update-verify.cfg",
+ ]
+
+ repo_path = urlsplit(get_branch_repo(config)).path.lstrip("/")
+ command.extend(["--repo-path", repo_path])
+
+ if release_config.get("partial_versions"):
+ for partial in release_config["partial_versions"].split(","):
+ command.extend(["--partial-version", partial.split("build")[0]])
+
+ for arg in optional_args:
+ if task["extra"].get(arg):
+ command.append(f"--{arg}")
+ command.append(task["extra"][arg])
+
+ for arg in keyed_by_args:
+ thing = f"extra.{arg}"
+ resolve_keyed_by(
+ task,
+ thing,
+ item_name=task["name"],
+ platform=task["attributes"]["build_platform"],
+ **{
+ "release-type": config.params["release_type"],
+ "release-level": release_level(config.params["project"]),
+ },
+ )
+ # ignore things that resolved to null
+ if not task["extra"].get(arg):
+ continue
+ if arg == "include-version":
+ task["extra"][arg] = INCLUDE_VERSION_REGEXES[task["extra"][arg]]
+ if arg == "mar-channel-id-override":
+ task["extra"][arg] = MAR_CHANNEL_ID_OVERRIDE_REGEXES[task["extra"][arg]]
+
+ command.append(f"--{arg}")
+ command.append(task["extra"][arg])
+
+ task["run"].update(
+ {
+ "using": "mach",
+ "mach": " ".join(command),
+ }
+ )
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/upload_generated_sources.py b/taskcluster/gecko_taskgraph/transforms/upload_generated_sources.py
new file mode 100644
index 0000000000..aea948f90e
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/upload_generated_sources.py
@@ -0,0 +1,42 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the upload-generated-files task description template,
+taskcluster/ci/upload-generated-sources/kind.yml, into an actual task description.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def add_task_info(config, jobs):
+ for job in jobs:
+ dep_task = get_primary_dependency(config, job)
+ assert dep_task
+
+ # Add a dependency on the build task.
+ job["dependencies"] = {"build": dep_task.label}
+ # Label the job to match the build task it's uploading from.
+ job["label"] = dep_task.label.replace("build-", "upload-generated-sources-")
+ # Copy over some bits of metdata from the build task.
+ dep_th = dep_task.task["extra"]["treeherder"]
+ job.setdefault("attributes", {})
+ job["attributes"]["build_platform"] = dep_task.attributes.get("build_platform")
+ if dep_task.attributes.get("shippable"):
+ job["attributes"]["shippable"] = True
+ plat = "{}/{}".format(
+ dep_th["machine"]["platform"], dep_task.attributes.get("build_type")
+ )
+ job["treeherder"]["platform"] = plat
+ job["treeherder"]["tier"] = dep_th["tier"]
+ if dep_th["symbol"] != "N":
+ job["treeherder"]["symbol"] = "Ugs{}".format(dep_th["symbol"])
+ job["run-on-projects"] = dep_task.attributes.get("run_on_projects")
+ job["optimization"] = dep_task.optimization
+ job["shipping-product"] = dep_task.attributes.get("shipping_product")
+
+ yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/upload_symbols.py b/taskcluster/gecko_taskgraph/transforms/upload_symbols.py
new file mode 100644
index 0000000000..5c0bf18cb0
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/upload_symbols.py
@@ -0,0 +1,94 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the upload-symbols task description template,
+taskcluster/ci/upload-symbols/job-template.yml into an actual task description.
+"""
+
+
+import logging
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.dependencies import get_primary_dependency
+from taskgraph.util.treeherder import inherit_treeherder_from_dep, join_symbol
+
+from gecko_taskgraph.util.attributes import (
+ RELEASE_PROJECTS,
+ copy_attributes_from_dependent_job,
+)
+
+logger = logging.getLogger(__name__)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def check_nightlies(config, tasks):
+ """Ensure that we upload symbols for all shippable builds, so that crash-stats can
+ resolve any reports sent to it. Try may enable full symbols but not upload them.
+
+ Putting this check here (instead of the transforms for the build kind) lets us
+ leverage the any not-for-build-platforms set in the update-symbols kind."""
+ for task in tasks:
+ dep = get_primary_dependency(config, task)
+ assert dep
+
+ if (
+ config.params["project"] in RELEASE_PROJECTS
+ and dep.attributes.get("shippable")
+ and not dep.attributes.get("enable-full-crashsymbols")
+ and not dep.attributes.get("skip-upload-crashsymbols")
+ ):
+ raise Exception(
+ "Shippable job %s should have enable-full-crashsymbols attribute "
+ "set to true to enable symbol upload to crash-stats" % dep.label
+ )
+ yield task
+
+
+@transforms.add
+def fill_template(config, tasks):
+ for task in tasks:
+ dep = get_primary_dependency(config, task)
+ assert dep
+
+ # Fill out the dynamic fields in the task description
+ task["label"] = dep.label + "-upload-symbols"
+
+ # Skip tasks where we don't have the full crashsymbols enabled
+ if not dep.attributes.get("enable-full-crashsymbols") or dep.attributes.get(
+ "skip-upload-crashsymbols"
+ ):
+ logger.debug("Skipping upload symbols task for %s", task["label"])
+ continue
+
+ task["worker"]["env"]["GECKO_HEAD_REPOSITORY"] = config.params[
+ "head_repository"
+ ]
+ task["worker"]["env"]["GECKO_HEAD_REV"] = config.params["head_rev"]
+ task["worker"]["env"]["SYMBOL_SECRET"] = task["worker"]["env"][
+ "SYMBOL_SECRET"
+ ].format(level=config.params["level"])
+
+ attributes = copy_attributes_from_dependent_job(dep)
+ attributes.update(task.get("attributes", {}))
+ task["attributes"] = attributes
+
+ treeherder = inherit_treeherder_from_dep(task, dep)
+ th = dep.task.get("extra")["treeherder"]
+ th_symbol = th.get("symbol")
+ th_groupsymbol = th.get("groupSymbol", "?")
+
+ # Disambiguate the treeherder symbol.
+ sym = "Sym" + (th_symbol[1:] if th_symbol.startswith("B") else th_symbol)
+ treeherder.setdefault("symbol", join_symbol(th_groupsymbol, sym))
+ task["treeherder"] = treeherder
+
+ # We only want to run these tasks if the build is run.
+ # XXX Better to run this on promote phase instead?
+ task["run-on-projects"] = dep.attributes.get("run_on_projects")
+ task["optimization"] = {"upload-symbols": None}
+ task["if-dependencies"] = [task["attributes"]["primary-kind-dependency"]]
+
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/upstream_artifact_task.py b/taskcluster/gecko_taskgraph/transforms/upstream_artifact_task.py
new file mode 100644
index 0000000000..62f94a8238
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/upstream_artifact_task.py
@@ -0,0 +1,29 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Find upstream artifact task.
+"""
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def find_upstream_artifact_task(config, jobs):
+ for job in jobs:
+ dep_job = None
+ if job.get("dependent-tasks"):
+ dep_labels = [l for l in job["dependent-tasks"].keys()]
+ for label in dep_labels:
+ if label.endswith("-mac-signing"):
+ assert (
+ dep_job is None
+ ), "Can't determine whether " "{} or {} is dep_job!".format(
+ dep_job.label, label
+ )
+ dep_job = job["dependent-tasks"][label]
+ if dep_job is not None:
+ job["upstream-artifact-task"] = dep_job
+ yield job
diff --git a/taskcluster/gecko_taskgraph/try_option_syntax.py b/taskcluster/gecko_taskgraph/try_option_syntax.py
new file mode 100644
index 0000000000..4dfac574df
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/try_option_syntax.py
@@ -0,0 +1,750 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import argparse
+import copy
+import logging
+import re
+import shlex
+from collections import defaultdict
+
+logger = logging.getLogger(__name__)
+
+# The build type aliases are very cryptic and only used in try flags these are
+# mappings from the single char alias to a longer more recognizable form.
+BUILD_TYPE_ALIASES = {"o": "opt", "d": "debug"}
+
+# consider anything in this whitelist of kinds to be governed by -b/-p
+BUILD_KINDS = {
+ "build",
+ "artifact-build",
+ "hazard",
+ "l10n",
+ "valgrind",
+ "spidermonkey",
+}
+
+
+# mapping from shortcut name (usable with -u) to a boolean function identifying
+# matching test names
+def alias_prefix(prefix):
+ return lambda name: name.startswith(prefix)
+
+
+def alias_contains(infix):
+ return lambda name: infix in name
+
+
+def alias_matches(pattern):
+ pattern = re.compile(pattern)
+ return lambda name: pattern.match(name)
+
+
+UNITTEST_ALIASES = {
+ # Aliases specify shorthands that can be used in try syntax. The shorthand
+ # is the dictionary key, with the value representing a pattern for matching
+ # unittest_try_names.
+ #
+ # Note that alias expansion is performed in the absence of any chunk
+ # prefixes. For example, the first example above would replace "foo-7"
+ # with "foobar-7". Note that a few aliases allowed chunks to be specified
+ # without a leading `-`, for example 'mochitest-dt1'. That's no longer
+ # supported.
+ "cppunit": alias_prefix("cppunit"),
+ "crashtest": alias_prefix("crashtest"),
+ "crashtest-e10s": alias_prefix("crashtest-e10s"),
+ "e10s": alias_contains("e10s"),
+ "firefox-ui-functional": alias_prefix("firefox-ui-functional"),
+ "gaia-js-integration": alias_contains("gaia-js-integration"),
+ "gtest": alias_prefix("gtest"),
+ "jittest": alias_prefix("jittest"),
+ "jittests": alias_prefix("jittest"),
+ "jsreftest": alias_prefix("jsreftest"),
+ "jsreftest-e10s": alias_prefix("jsreftest-e10s"),
+ "marionette": alias_prefix("marionette"),
+ "mochitest": alias_prefix("mochitest"),
+ "mochitests": alias_prefix("mochitest"),
+ "mochitest-e10s": alias_prefix("mochitest-e10s"),
+ "mochitests-e10s": alias_prefix("mochitest-e10s"),
+ "mochitest-debug": alias_prefix("mochitest-debug-"),
+ "mochitest-a11y": alias_contains("mochitest-a11y"),
+ "mochitest-bc": alias_prefix("mochitest-browser-chrome"),
+ "mochitest-e10s-bc": alias_prefix("mochitest-browser-chrome-e10s"),
+ "mochitest-browser-chrome": alias_prefix("mochitest-browser-chrome"),
+ "mochitest-e10s-browser-chrome": alias_prefix("mochitest-browser-chrome-e10s"),
+ "mochitest-chrome": alias_contains("mochitest-chrome"),
+ "mochitest-dt": alias_prefix("mochitest-devtools-chrome"),
+ "mochitest-e10s-dt": alias_prefix("mochitest-devtools-chrome-e10s"),
+ "mochitest-gl": alias_prefix("mochitest-webgl"),
+ "mochitest-gl-e10s": alias_prefix("mochitest-webgl-e10s"),
+ "mochitest-gpu": alias_prefix("mochitest-gpu"),
+ "mochitest-gpu-e10s": alias_prefix("mochitest-gpu-e10s"),
+ "mochitest-media": alias_prefix("mochitest-media"),
+ "mochitest-media-e10s": alias_prefix("mochitest-media-e10s"),
+ "mochitest-vg": alias_prefix("mochitest-valgrind"),
+ "reftest": alias_matches(r"^(plain-)?reftest.*$"),
+ "reftest-no-accel": alias_matches(r"^(plain-)?reftest-no-accel.*$"),
+ "reftests": alias_matches(r"^(plain-)?reftest.*$"),
+ "reftests-e10s": alias_matches(r"^(plain-)?reftest-e10s.*$"),
+ "robocop": alias_prefix("robocop"),
+ "web-platform-test": alias_prefix("web-platform-tests"),
+ "web-platform-tests": alias_prefix("web-platform-tests"),
+ "web-platform-tests-e10s": alias_prefix("web-platform-tests-e10s"),
+ "web-platform-tests-crashtests": alias_prefix("web-platform-tests-crashtest"),
+ "web-platform-tests-print-reftest": alias_prefix(
+ "web-platform-tests-print-reftest"
+ ),
+ "web-platform-tests-reftests": alias_prefix("web-platform-tests-reftest"),
+ "web-platform-tests-reftests-e10s": alias_prefix("web-platform-tests-reftest-e10s"),
+ "web-platform-tests-wdspec": alias_prefix("web-platform-tests-wdspec"),
+ "web-platform-tests-wdspec-e10s": alias_prefix("web-platform-tests-wdspec-e10s"),
+ "xpcshell": alias_prefix("xpcshell"),
+}
+
+# unittest platforms can be specified by substring of the "pretty name", which
+# is basically the old Buildbot builder name. This dict has {pretty name,
+# [test_platforms]} translations, This includes only the most commonly-used
+# substrings. It is OK to add new test platforms to various shorthands here;
+# if you add a new Linux64 test platform for instance, people will expect that
+# their previous methods of requesting "all linux64 tests" will include this
+# new platform, and they shouldn't have to explicitly spell out the new platform
+# every time for such cases.
+#
+# Note that the test platforms here are only the prefix up to the `/`.
+UNITTEST_PLATFORM_PRETTY_NAMES = {
+ "Ubuntu": [
+ "linux32",
+ "linux64",
+ "linux64-asan",
+ "linux1804-64",
+ "linux1804-64-asan",
+ ],
+ "x64": ["linux64", "linux64-asan", "linux1804-64", "linux1804-64-asan"],
+ "Android 7.0 Samsung A51 32bit": ["android-hw-a51-11.0-arm7"],
+ "Android 7.0 Samsung A51 64bit": ["android-hw-a51-11.0-aarch64"],
+ "Android 13.0 Google Pixel 5 32bit": ["android-hw-p5-13.0-arm7"],
+ "Android 13.0 Google Pixel 5 64bit": ["android-hw-p5-13.0-android-aarch64"],
+ "Android 13.0 Google Pixel 6 64bit": ["android-hw-p6-13.0-android-aarch64"],
+ "Android 13.0 Samsung S21 64bit": ["android-hw-s21-13.0-android-aarch64"],
+ "Windows 10": ["windows10-64"],
+}
+
+TEST_CHUNK_SUFFIX = re.compile("(.*)-([0-9]+)$")
+
+
+def escape_whitespace_in_brackets(input_str):
+ """
+ In tests you may restrict them by platform [] inside of the brackets
+ whitespace may occur this is typically invalid shell syntax so we escape it
+ with backslash sequences .
+ """
+ result = ""
+ in_brackets = False
+ for char in input_str:
+ if char == "[":
+ in_brackets = True
+ result += char
+ continue
+
+ if char == "]":
+ in_brackets = False
+ result += char
+ continue
+
+ if char == " " and in_brackets:
+ result += r"\ "
+ continue
+
+ result += char
+
+ return result
+
+
+def split_try_msg(message):
+ try:
+ try_idx = message.index("try:")
+ except ValueError:
+ return []
+ message = message[try_idx:].split("\n")[0]
+ # shlex used to ensure we split correctly when giving values to argparse.
+ return shlex.split(escape_whitespace_in_brackets(message))
+
+
+def parse_message(message):
+ parts = split_try_msg(message)
+
+ # Argument parser based on try flag flags
+ parser = argparse.ArgumentParser()
+ parser.add_argument("-b", "--build", dest="build_types")
+ parser.add_argument(
+ "-p", "--platform", nargs="?", dest="platforms", const="all", default="all"
+ )
+ parser.add_argument(
+ "-u", "--unittests", nargs="?", dest="unittests", const="all", default="all"
+ )
+ parser.add_argument(
+ "-t", "--talos", nargs="?", dest="talos", const="all", default="none"
+ )
+ parser.add_argument(
+ "-r", "--raptor", nargs="?", dest="raptor", const="all", default="none"
+ )
+ parser.add_argument(
+ "-i", "--interactive", dest="interactive", action="store_true", default=False
+ )
+ parser.add_argument(
+ "-e", "--all-emails", dest="notifications", action="store_const", const="all"
+ )
+ parser.add_argument(
+ "-f",
+ "--failure-emails",
+ dest="notifications",
+ action="store_const",
+ const="failure",
+ )
+ parser.add_argument("-j", "--job", dest="jobs", action="append")
+ parser.add_argument(
+ "--rebuild-talos",
+ dest="talos_trigger_tests",
+ action="store",
+ type=int,
+ default=1,
+ )
+ parser.add_argument(
+ "--rebuild-raptor",
+ dest="raptor_trigger_tests",
+ action="store",
+ type=int,
+ default=1,
+ )
+ parser.add_argument("--setenv", dest="env", action="append")
+ parser.add_argument("--gecko-profile", dest="profile", action="store_true")
+ parser.add_argument("--tag", dest="tag", action="store", default=None)
+ parser.add_argument("--no-retry", dest="no_retry", action="store_true")
+ parser.add_argument(
+ "--include-nightly", dest="include_nightly", action="store_true"
+ )
+ parser.add_argument("--artifact", dest="artifact", action="store_true")
+
+ # While we are transitioning from BB to TC, we want to push jobs to tc-worker
+ # machines but not overload machines with every try push. Therefore, we add
+ # this temporary option to be able to push jobs to tc-worker.
+ parser.add_argument(
+ "-w",
+ "--taskcluster-worker",
+ dest="taskcluster_worker",
+ action="store_true",
+ default=False,
+ )
+
+ # In order to run test jobs multiple times
+ parser.add_argument("--rebuild", dest="trigger_tests", type=int, default=1)
+ args, _ = parser.parse_known_args(parts)
+
+ try_options = vars(args)
+ try_task_config = {
+ "use-artifact-builds": try_options.pop("artifact"),
+ "gecko-profile": try_options.pop("profile"),
+ "env": dict(arg.split("=") for arg in try_options.pop("env") or []),
+ }
+ return {
+ "try_options": try_options,
+ "try_task_config": try_task_config,
+ }
+
+
+class TryOptionSyntax:
+ def __init__(self, parameters, full_task_graph, graph_config):
+ """
+ Apply the try options in parameters.
+
+ The resulting object has attributes:
+
+ - build_types: a list containing zero or more of 'opt' and 'debug'
+ - platforms: a list of selected platform names, or None for all
+ - unittests: a list of tests, of the form given below, or None for all
+ - jobs: a list of requested job names, or None for all
+ - trigger_tests: the number of times tests should be triggered (--rebuild)
+ - interactive: true if --interactive
+ - notifications: either None if no notifications or one of 'all' or 'failure'
+ - talos_trigger_tests: the number of time talos tests should be triggered (--rebuild-talos)
+ - tag: restrict tests to the specified tag
+ - no_retry: do not retry failed jobs
+
+ The unittests and talos lists contain dictionaries of the form:
+
+ {
+ 'test': '<suite name>',
+ 'platforms': [..platform names..], # to limit to only certain platforms
+ 'only_chunks': set([..chunk numbers..]), # to limit only to certain chunks
+ }
+ """
+ self.full_task_graph = full_task_graph
+ self.graph_config = graph_config
+ self.jobs = []
+ self.build_types = []
+ self.platforms = []
+ self.unittests = []
+ self.talos = []
+ self.raptor = []
+ self.trigger_tests = 0
+ self.interactive = False
+ self.notifications = None
+ self.talos_trigger_tests = 0
+ self.raptor_trigger_tests = 0
+ self.tag = None
+ self.no_retry = False
+
+ options = parameters["try_options"]
+ if not options:
+ return None
+ self.jobs = self.parse_jobs(options["jobs"])
+ self.build_types = self.parse_build_types(
+ options["build_types"], full_task_graph
+ )
+ self.platforms = self.parse_platforms(options, full_task_graph)
+ self.unittests = self.parse_test_option(
+ "unittest_try_name", options["unittests"], full_task_graph
+ )
+ self.talos = self.parse_test_option(
+ "talos_try_name", options["talos"], full_task_graph
+ )
+ self.raptor = self.parse_test_option(
+ "raptor_try_name", options["raptor"], full_task_graph
+ )
+ self.trigger_tests = options["trigger_tests"]
+ self.interactive = options["interactive"]
+ self.notifications = options["notifications"]
+ self.talos_trigger_tests = options["talos_trigger_tests"]
+ self.raptor_trigger_tests = options["raptor_trigger_tests"]
+ self.tag = options["tag"]
+ self.no_retry = options["no_retry"]
+ self.include_nightly = options["include_nightly"]
+
+ self.test_tiers = self.generate_test_tiers(full_task_graph)
+
+ def generate_test_tiers(self, full_task_graph):
+ retval = defaultdict(set)
+ for t in full_task_graph.tasks.values():
+ if t.attributes.get("kind") == "test":
+ try:
+ tier = t.task["extra"]["treeherder"]["tier"]
+ name = t.attributes.get("unittest_try_name")
+ retval[name].add(tier)
+ except KeyError:
+ pass
+
+ return retval
+
+ def parse_jobs(self, jobs_arg):
+ if not jobs_arg or jobs_arg == ["none"]:
+ return [] # default is `-j none`
+ if jobs_arg == ["all"]:
+ return None
+ expanded = []
+ for job in jobs_arg:
+ expanded.extend(j.strip() for j in job.split(","))
+ return expanded
+
+ def parse_build_types(self, build_types_arg, full_task_graph):
+ if build_types_arg is None:
+ build_types_arg = []
+
+ build_types = [
+ _f
+ for _f in (
+ BUILD_TYPE_ALIASES.get(build_type) for build_type in build_types_arg
+ )
+ if _f
+ ]
+
+ all_types = {
+ t.attributes["build_type"]
+ for t in full_task_graph.tasks.values()
+ if "build_type" in t.attributes
+ }
+ bad_types = set(build_types) - all_types
+ if bad_types:
+ raise Exception(
+ "Unknown build type(s) [%s] specified for try" % ",".join(bad_types)
+ )
+
+ return build_types
+
+ def parse_platforms(self, options, full_task_graph):
+ platform_arg = options["platforms"]
+ if platform_arg == "all":
+ return None
+
+ RIDEALONG_BUILDS = self.graph_config["try"]["ridealong-builds"]
+ results = []
+ for build in platform_arg.split(","):
+ if build in ("macosx64",):
+ # Regular opt builds are faster than shippable ones, but we don't run
+ # tests against them.
+ # We want to choose them (and only them) if no tests were requested.
+ if (
+ options["unittests"] == "none"
+ and options["talos"] == "none"
+ and options["raptor"] == "none"
+ ):
+ results.append("macosx64")
+ logger.info("adding macosx64 for try syntax using macosx64.")
+ # Otherwise, use _just_ the shippable builds.
+ else:
+ results.append("macosx64-shippable")
+ logger.info(
+ "adding macosx64-shippable for try syntax using macosx64."
+ )
+ else:
+ results.append(build)
+ if build in RIDEALONG_BUILDS:
+ results.extend(RIDEALONG_BUILDS[build])
+ logger.info(
+ "platform %s triggers ridealong builds %s"
+ % (build, ", ".join(RIDEALONG_BUILDS[build]))
+ )
+
+ test_platforms = {
+ t.attributes["test_platform"]
+ for t in full_task_graph.tasks.values()
+ if "test_platform" in t.attributes
+ }
+ build_platforms = {
+ t.attributes["build_platform"]
+ for t in full_task_graph.tasks.values()
+ if "build_platform" in t.attributes
+ }
+ all_platforms = test_platforms | build_platforms
+ bad_platforms = set(results) - all_platforms
+ if bad_platforms:
+ raise Exception(
+ "Unknown platform(s) [%s] specified for try" % ",".join(bad_platforms)
+ )
+
+ return results
+
+ def parse_test_option(self, attr_name, test_arg, full_task_graph):
+ """
+
+ Parse a unittest (-u) or talos (-t) option, in the context of a full
+ task graph containing available `unittest_try_name` or `talos_try_name`
+ attributes. There are three cases:
+
+ - test_arg is == 'none' (meaning an empty list)
+ - test_arg is == 'all' (meaning use the list of jobs for that job type)
+ - test_arg is comma string which needs to be parsed
+ """
+
+ # Empty job list case...
+ if test_arg is None or test_arg == "none":
+ return []
+
+ all_platforms = {
+ t.attributes["test_platform"].split("/")[0]
+ for t in full_task_graph.tasks.values()
+ if "test_platform" in t.attributes
+ }
+
+ tests = self.parse_test_opts(test_arg, all_platforms)
+
+ if not tests:
+ return []
+
+ all_tests = {
+ t.attributes[attr_name]
+ for t in full_task_graph.tasks.values()
+ if attr_name in t.attributes
+ }
+
+ # Special case where tests is 'all' and must be expanded
+ if tests[0]["test"] == "all":
+ results = []
+ all_entry = tests[0]
+ for test in all_tests:
+ entry = {"test": test}
+ # If there are platform restrictions copy them across the list.
+ if "platforms" in all_entry:
+ entry["platforms"] = list(all_entry["platforms"])
+ results.append(entry)
+ return self.parse_test_chunks(all_tests, results)
+ return self.parse_test_chunks(all_tests, tests)
+
+ def parse_test_opts(self, input_str, all_platforms):
+ """
+ Parse `testspec,testspec,..`, where each testspec is a test name
+ optionally followed by a list of test platforms or negated platforms in
+ `[]`.
+
+ No brackets indicates that tests should run on all platforms for which
+ builds are available. If testspecs are provided, then each is treated,
+ from left to right, as an instruction to include or (if negated)
+ exclude a set of test platforms. A single spec may expand to multiple
+ test platforms via UNITTEST_PLATFORM_PRETTY_NAMES. If the first test
+ spec is negated, processing begins with the full set of available test
+ platforms; otherwise, processing begins with an empty set of test
+ platforms.
+ """
+
+ # Final results which we will return.
+ tests = []
+
+ cur_test = {}
+ token = ""
+ in_platforms = False
+
+ def normalize_platforms():
+ if "platforms" not in cur_test:
+ return
+ # if the first spec is a negation, start with all platforms
+ if cur_test["platforms"][0][0] == "-":
+ platforms = all_platforms.copy()
+ else:
+ platforms = []
+ for platform in cur_test["platforms"]:
+ if platform[0] == "-":
+ platforms = [p for p in platforms if p != platform[1:]]
+ else:
+ platforms.append(platform)
+ cur_test["platforms"] = platforms
+
+ def add_test(value):
+ normalize_platforms()
+ cur_test["test"] = value.strip()
+ tests.insert(0, cur_test)
+
+ def add_platform(value):
+ platform = value.strip()
+ if platform[0] == "-":
+ negated = True
+ platform = platform[1:]
+ else:
+ negated = False
+ platforms = UNITTEST_PLATFORM_PRETTY_NAMES.get(platform, [platform])
+ if negated:
+ platforms = ["-" + p for p in platforms]
+ cur_test["platforms"] = platforms + cur_test.get("platforms", [])
+
+ # This might be somewhat confusing but we parse the string _backwards_ so
+ # there is no ambiguity over what state we are in.
+ for char in reversed(input_str):
+ # , indicates exiting a state
+ if char == ",":
+ # Exit a particular platform.
+ if in_platforms:
+ add_platform(token)
+
+ # Exit a particular test.
+ else:
+ add_test(token)
+ cur_test = {}
+
+ # Token must always be reset after we exit a state
+ token = ""
+ elif char == "[":
+ # Exiting platform state entering test state.
+ add_platform(token)
+ token = ""
+ in_platforms = False
+ elif char == "]":
+ # Entering platform state.
+ in_platforms = True
+ else:
+ # Accumulator.
+ token = char + token
+
+ # Handle any left over tokens.
+ if token:
+ add_test(token)
+
+ return tests
+
+ def handle_alias(self, test, all_tests):
+ """
+ Expand a test if its name refers to an alias, returning a list of test
+ dictionaries cloned from the first (to maintain any metadata).
+ """
+ if test["test"] not in UNITTEST_ALIASES:
+ return [test]
+
+ alias = UNITTEST_ALIASES[test["test"]]
+
+ def mktest(name):
+ newtest = copy.deepcopy(test)
+ newtest["test"] = name
+ return newtest
+
+ def exprmatch(alias):
+ return [t for t in all_tests if alias(t)]
+
+ return [mktest(t) for t in exprmatch(alias)]
+
+ def parse_test_chunks(self, all_tests, tests):
+ """
+ Test flags may include parameters to narrow down the number of chunks in a
+ given push. We don't model 1 chunk = 1 job in taskcluster so we must check
+ each test flag to see if it is actually specifying a chunk.
+ """
+ results = []
+ seen_chunks = {}
+ for test in tests:
+ matches = TEST_CHUNK_SUFFIX.match(test["test"])
+ if matches:
+ name = matches.group(1)
+ chunk = matches.group(2)
+ if name in seen_chunks:
+ seen_chunks[name].add(chunk)
+ else:
+ seen_chunks[name] = {chunk}
+ test["test"] = name
+ test["only_chunks"] = seen_chunks[name]
+ results.append(test)
+ else:
+ results.extend(self.handle_alias(test, all_tests))
+
+ # uniquify the results over the test names
+ results = sorted(
+ {test["test"]: test for test in results}.values(),
+ key=lambda test: test["test"],
+ )
+ return results
+
+ def find_all_attribute_suffixes(self, graph, prefix):
+ rv = set()
+ for t in graph.tasks.values():
+ for a in t.attributes:
+ if a.startswith(prefix):
+ rv.add(a[len(prefix) :])
+ return sorted(rv)
+
+ def task_matches(self, task):
+ attr = task.attributes.get
+
+ def check_run_on_projects():
+ return {"all"} & set(attr("run_on_projects", []))
+
+ def match_test(try_spec, attr_name):
+ run_by_default = True
+ if attr("build_type") not in self.build_types:
+ return False
+
+ if (
+ self.platforms is not None
+ and attr("build_platform") not in self.platforms
+ ):
+ return False
+ if not check_run_on_projects():
+ run_by_default = False
+
+ if try_spec is None:
+ return run_by_default
+
+ # TODO: optimize this search a bit
+ for test in try_spec:
+ if attr(attr_name) == test["test"]:
+ break
+ else:
+ return False
+
+ if "only_chunks" in test and attr("test_chunk") not in test["only_chunks"]:
+ return False
+
+ tier = task.task["extra"]["treeherder"]["tier"]
+ if "platforms" in test:
+ if "all" in test["platforms"]:
+ return True
+ platform = attr("test_platform", "").split("/")[0]
+ # Platforms can be forced by syntax like "-u xpcshell[Windows 8]"
+ return platform in test["platforms"]
+ if tier != 1:
+ # Run Tier 2/3 tests if their build task is Tier 2/3 OR if there is
+ # no tier 1 test of that name.
+ build_task = self.full_task_graph.tasks[task.dependencies["build"]]
+ build_task_tier = build_task.task["extra"]["treeherder"]["tier"]
+
+ name = attr("unittest_try_name")
+ test_tiers = self.test_tiers.get(name)
+
+ if tier <= build_task_tier:
+ logger.debug(
+ "not skipping tier {} test {} because build task {} "
+ "is tier {}".format(
+ tier, task.label, build_task.label, build_task_tier
+ )
+ )
+ return True
+ if 1 not in test_tiers:
+ logger.debug(
+ "not skipping tier {} test {} without explicit inclusion; "
+ "it is configured to run on tiers {}".format(
+ tier, task.label, test_tiers
+ )
+ )
+ return True
+ logger.debug(
+ "skipping tier {} test {} because build task {} is "
+ "tier {} and there is a higher-tier test of the same name".format(
+ tier, task.label, build_task.label, build_task_tier
+ )
+ )
+ return False
+ if run_by_default:
+ return check_run_on_projects()
+ return False
+
+ if attr("job_try_name"):
+ # Beware the subtle distinction between [] and None for self.jobs and self.platforms.
+ # They will be [] if there was no try syntax, and None if try syntax was detected but
+ # they remained unspecified.
+ if self.jobs is not None:
+ return attr("job_try_name") in self.jobs
+
+ # User specified `-j all`
+ if (
+ self.platforms is not None
+ and attr("build_platform") not in self.platforms
+ ):
+ return False # honor -p for jobs governed by a platform
+ # "all" means "everything with `try` in run_on_projects"
+ return check_run_on_projects()
+ if attr("kind") == "test":
+ return (
+ match_test(self.unittests, "unittest_try_name")
+ or match_test(self.talos, "talos_try_name")
+ or match_test(self.raptor, "raptor_try_name")
+ )
+ if attr("kind") in BUILD_KINDS:
+ if attr("build_type") not in self.build_types:
+ return False
+ if self.platforms is None:
+ # for "-p all", look for try in the 'run_on_projects' attribute
+ return check_run_on_projects()
+ if attr("build_platform") not in self.platforms:
+ return False
+ return True
+ return False
+
+ def __str__(self):
+ def none_for_all(list):
+ if list is None:
+ return "<all>"
+ return ", ".join(str(e) for e in list)
+
+ return "\n".join(
+ [
+ "build_types: " + ", ".join(self.build_types),
+ "platforms: " + none_for_all(self.platforms),
+ "unittests: " + none_for_all(self.unittests),
+ "talos: " + none_for_all(self.talos),
+ "raptor" + none_for_all(self.raptor),
+ "jobs: " + none_for_all(self.jobs),
+ "trigger_tests: " + str(self.trigger_tests),
+ "interactive: " + str(self.interactive),
+ "notifications: " + str(self.notifications),
+ "talos_trigger_tests: " + str(self.talos_trigger_tests),
+ "raptor_trigger_tests: " + str(self.raptor_trigger_tests),
+ "tag: " + str(self.tag),
+ "no_retry: " + str(self.no_retry),
+ ]
+ )
diff --git a/taskcluster/gecko_taskgraph/util/__init__.py b/taskcluster/gecko_taskgraph/util/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/util/__init__.py
diff --git a/taskcluster/gecko_taskgraph/util/attributes.py b/taskcluster/gecko_taskgraph/util/attributes.py
new file mode 100644
index 0000000000..2d01e9c5e0
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/util/attributes.py
@@ -0,0 +1,147 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import re
+
+INTEGRATION_PROJECTS = {
+ "autoland",
+}
+
+TRUNK_PROJECTS = INTEGRATION_PROJECTS | {"mozilla-central", "comm-central"}
+
+RELEASE_PROJECTS = {
+ "mozilla-central",
+ "mozilla-beta",
+ "mozilla-release",
+ "mozilla-esr115",
+ "comm-central",
+ "comm-beta",
+ "comm-release",
+ "comm-esr115",
+ # bug 1845368: pine is a permanent project branch used for testing
+ # nightly updates
+ "pine",
+ # bug 1877483: larch has similar needs for nightlies
+ "larch",
+}
+
+RELEASE_PROMOTION_PROJECTS = {
+ "jamun",
+ "maple",
+ "try",
+ "try-comm-central",
+} | RELEASE_PROJECTS
+
+TEMPORARY_PROJECTS = set(
+ {
+ # When using a "Disposable Project Branch" you can specify your branch here. e.g.:
+ "oak",
+ }
+)
+
+TRY_PROJECTS = {
+ "try",
+ "try-comm-central",
+}
+
+ALL_PROJECTS = RELEASE_PROMOTION_PROJECTS | TRUNK_PROJECTS | TEMPORARY_PROJECTS
+
+RUN_ON_PROJECT_ALIASES = {
+ # key is alias, value is lambda to test it against
+ "all": lambda project: True,
+ "integration": lambda project: (
+ project in INTEGRATION_PROJECTS or project == "toolchains"
+ ),
+ "release": lambda project: (project in RELEASE_PROJECTS or project == "toolchains"),
+ "trunk": lambda project: (project in TRUNK_PROJECTS or project == "toolchains"),
+ "trunk-only": lambda project: project in TRUNK_PROJECTS,
+ "autoland": lambda project: project in ("autoland", "toolchains"),
+ "autoland-only": lambda project: project == "autoland",
+ "mozilla-central": lambda project: project in ("mozilla-central", "toolchains"),
+ "mozilla-central-only": lambda project: project == "mozilla-central",
+}
+
+_COPYABLE_ATTRIBUTES = (
+ "accepted-mar-channel-ids",
+ "artifact_map",
+ "artifact_prefix",
+ "build_platform",
+ "build_type",
+ "l10n_chunk",
+ "locale",
+ "mar-channel-id",
+ "maven_packages",
+ "nightly",
+ "required_signoffs",
+ "shippable",
+ "shipping_phase",
+ "shipping_product",
+ "signed",
+ "stub-installer",
+ "update-channel",
+)
+
+
+def match_run_on_projects(project, run_on_projects):
+ """Determine whether the given project is included in the `run-on-projects`
+ parameter, applying expansions for things like "integration" mentioned in
+ the attribute documentation."""
+ aliases = RUN_ON_PROJECT_ALIASES.keys()
+ run_aliases = set(aliases) & set(run_on_projects)
+ if run_aliases:
+ if any(RUN_ON_PROJECT_ALIASES[alias](project) for alias in run_aliases):
+ return True
+
+ return project in run_on_projects
+
+
+def match_run_on_hg_branches(hg_branch, run_on_hg_branches):
+ """Determine whether the given project is included in the `run-on-hg-branches`
+ parameter. Allows 'all'."""
+ if "all" in run_on_hg_branches:
+ return True
+
+ for expected_hg_branch_pattern in run_on_hg_branches:
+ if re.match(expected_hg_branch_pattern, hg_branch):
+ return True
+
+ return False
+
+
+def copy_attributes_from_dependent_job(dep_job, denylist=()):
+ return {
+ attr: dep_job.attributes[attr]
+ for attr in _COPYABLE_ATTRIBUTES
+ if attr in dep_job.attributes and attr not in denylist
+ }
+
+
+def sorted_unique_list(*args):
+ """Join one or more lists, and return a sorted list of unique members"""
+ combined = set().union(*args)
+ return sorted(combined)
+
+
+def release_level(project):
+ """
+ Whether this is a staging release or not.
+
+ :return str: One of "production" or "staging".
+ """
+ return "production" if project in RELEASE_PROJECTS else "staging"
+
+
+def is_try(params):
+ """
+ Determine whether this graph is being built on a try project or for
+ `mach try fuzzy`.
+ """
+ return "try" in params["project"] or params["try_mode"] == "try_select"
+
+
+def task_name(task):
+ if task.label.startswith(task.kind + "-"):
+ return task.label[len(task.kind) + 1 :]
+ raise AttributeError(f"Task {task.label} does not have a name.")
diff --git a/taskcluster/gecko_taskgraph/util/backstop.py b/taskcluster/gecko_taskgraph/util/backstop.py
new file mode 100644
index 0000000000..26c9a4fb91
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/util/backstop.py
@@ -0,0 +1,84 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+from requests import HTTPError
+from taskgraph.util.taskcluster import find_task_id, get_artifact
+
+from gecko_taskgraph.util.attributes import INTEGRATION_PROJECTS, TRY_PROJECTS
+from gecko_taskgraph.util.taskcluster import state_task
+
+BACKSTOP_PUSH_INTERVAL = 20
+BACKSTOP_TIME_INTERVAL = 60 * 4 # minutes
+BACKSTOP_INDEX = "{trust-domain}.v2.{project}.latest.taskgraph.backstop"
+
+
+def is_backstop(
+ params,
+ push_interval=BACKSTOP_PUSH_INTERVAL,
+ time_interval=BACKSTOP_TIME_INTERVAL,
+ trust_domain="gecko",
+ integration_projects=INTEGRATION_PROJECTS,
+):
+ """Determines whether the given parameters represent a backstop push.
+
+ Args:
+ push_interval (int): Number of pushes
+ time_interval (int): Minutes between forced schedules.
+ Use 0 to disable.
+ trust_domain (str): "gecko" for Firefox, "comm" for Thunderbird
+ integration_projects (set): project that uses backstop optimization
+ Returns:
+ bool: True if this is a backstop, otherwise False.
+ """
+ # In case this is being faked on try.
+ if params.get("backstop", False):
+ return True
+
+ project = params["project"]
+ pushid = int(params["pushlog_id"])
+ pushdate = int(params["pushdate"])
+
+ if project in TRY_PROJECTS:
+ return False
+ if project not in integration_projects:
+ return True
+
+ # On every Nth push, want to run all tasks.
+ if pushid % push_interval == 0:
+ return True
+
+ if time_interval <= 0:
+ return False
+
+ # We also want to ensure we run all tasks at least once per N minutes.
+ subs = {"trust-domain": trust_domain, "project": project}
+ index = BACKSTOP_INDEX.format(**subs)
+
+ try:
+ last_backstop_id = find_task_id(index)
+ except KeyError:
+ # Index wasn't found, implying there hasn't been a backstop push yet.
+ return True
+
+ if state_task(last_backstop_id) in ("failed", "exception"):
+ # If the last backstop failed its decision task, make this a backstop.
+ return True
+
+ try:
+ last_pushdate = get_artifact(last_backstop_id, "public/parameters.yml")[
+ "pushdate"
+ ]
+ except HTTPError as e:
+ # If the last backstop decision task exists in the index, but
+ # parameters.yml isn't available yet, it means the decision task is
+ # still running. If that's the case, we can be pretty sure the time
+ # component will not cause a backstop, so just return False.
+ if e.response.status_code == 404:
+ return False
+ raise
+
+ if (pushdate - last_pushdate) / 60 >= time_interval:
+ return True
+ return False
diff --git a/taskcluster/gecko_taskgraph/util/bugbug.py b/taskcluster/gecko_taskgraph/util/bugbug.py
new file mode 100644
index 0000000000..50e02d69c6
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/util/bugbug.py
@@ -0,0 +1,125 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import json
+import os
+import sys
+import time
+
+import requests
+from mozbuild.util import memoize
+from taskgraph import create
+from taskgraph.util.taskcluster import requests_retry_session
+
+try:
+ # TODO(py3): use time.monotonic()
+ from time import monotonic
+except ImportError:
+ from time import time as monotonic
+
+BUGBUG_BASE_URL = "https://bugbug.herokuapp.com"
+RETRY_TIMEOUT = 9 * 60 # seconds
+RETRY_INTERVAL = 10 # seconds
+
+# Preset confidence thresholds.
+CT_LOW = 0.7
+CT_MEDIUM = 0.8
+CT_HIGH = 0.9
+
+GROUP_TRANSLATIONS = {
+ "testing/web-platform/tests": "",
+ "testing/web-platform/mozilla/tests": "/_mozilla",
+}
+
+
+def translate_group(group):
+ for prefix, value in GROUP_TRANSLATIONS.items():
+ if group.startswith(prefix):
+ return group.replace(prefix, value)
+
+ return group
+
+
+class BugbugTimeoutException(Exception):
+ pass
+
+
+@memoize
+def get_session():
+ s = requests.Session()
+ s.headers.update({"X-API-KEY": "gecko-taskgraph"})
+ return requests_retry_session(retries=5, session=s)
+
+
+def _write_perfherder_data(lower_is_better):
+ if os.environ.get("MOZ_AUTOMATION", "0") == "1":
+ perfherder_data = {
+ "framework": {"name": "build_metrics"},
+ "suites": [
+ {
+ "name": suite,
+ "value": value,
+ "lowerIsBetter": True,
+ "shouldAlert": False,
+ "subtests": [],
+ }
+ for suite, value in lower_is_better.items()
+ ],
+ }
+ print(f"PERFHERDER_DATA: {json.dumps(perfherder_data)}", file=sys.stderr)
+
+
+@memoize
+def push_schedules(branch, rev):
+ # Noop if we're in test-action-callback
+ if create.testing:
+ return
+
+ url = BUGBUG_BASE_URL + "/push/{branch}/{rev}/schedules".format(
+ branch=branch, rev=rev
+ )
+ start = monotonic()
+ session = get_session()
+
+ # On try there is no fallback and pulling is slower, so we allow bugbug more
+ # time to compute the results.
+ # See https://github.com/mozilla/bugbug/issues/1673.
+ timeout = RETRY_TIMEOUT
+ if branch == "try":
+ timeout += int(timeout / 3)
+
+ attempts = timeout / RETRY_INTERVAL
+ i = 0
+ while i < attempts:
+ r = session.get(url)
+ r.raise_for_status()
+
+ if r.status_code != 202:
+ break
+
+ time.sleep(RETRY_INTERVAL)
+ i += 1
+ end = monotonic()
+
+ _write_perfherder_data(
+ lower_is_better={
+ "bugbug_push_schedules_time": end - start,
+ "bugbug_push_schedules_retries": i,
+ }
+ )
+
+ data = r.json()
+ if r.status_code == 202:
+ raise BugbugTimeoutException(f"Timed out waiting for result from '{url}'")
+
+ if "groups" in data:
+ data["groups"] = {translate_group(k): v for k, v in data["groups"].items()}
+
+ if "config_groups" in data:
+ data["config_groups"] = {
+ translate_group(k): v for k, v in data["config_groups"].items()
+ }
+
+ return data
diff --git a/taskcluster/gecko_taskgraph/util/cached_tasks.py b/taskcluster/gecko_taskgraph/util/cached_tasks.py
new file mode 100644
index 0000000000..fff9bb9844
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/util/cached_tasks.py
@@ -0,0 +1,82 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import hashlib
+import time
+
+TARGET_CACHE_INDEX = "{trust_domain}.cache.level-{level}.{type}.{name}.hash.{digest}"
+EXTRA_CACHE_INDEXES = [
+ "{trust_domain}.cache.level-{level}.{type}.{name}.latest",
+ "{trust_domain}.cache.level-{level}.{type}.{name}.pushdate.{build_date_long}",
+]
+
+
+def add_optimization(
+ config, taskdesc, cache_type, cache_name, digest=None, digest_data=None
+):
+ """
+ Allow the results of this task to be cached. This adds index routes to the
+ task so it can be looked up for future runs, and optimization hints so that
+ cached artifacts can be found. Exactly one of `digest` and `digest_data`
+ must be passed.
+
+ :param TransformConfig config: The configuration for the kind being transformed.
+ :param dict taskdesc: The description of the current task.
+ :param str cache_type: The type of task result being cached.
+ :param str cache_name: The name of the object being cached.
+ :param digest: A unique string indentifying this version of the artifacts
+ being generated. Typically this will be the hash of inputs to the task.
+ :type digest: bytes or None
+ :param digest_data: A list of bytes representing the inputs of this task.
+ They will be concatenated and hashed to create the digest for this
+ task.
+ :type digest_data: list of bytes or None
+ """
+ cached_task = taskdesc.get("attributes", {}).get("cached_task")
+ if cached_task is False:
+ return
+
+ if (digest is None) == (digest_data is None):
+ raise Exception("Must pass exactly one of `digest` and `digest_data`.")
+ if digest is None:
+ digest = hashlib.sha256("\n".join(digest_data).encode("utf-8")).hexdigest()
+
+ subs = {
+ "trust_domain": config.graph_config["trust-domain"],
+ "type": cache_type,
+ "name": cache_name,
+ "digest": digest,
+ }
+
+ # We'll try to find a cached version of the toolchain at levels above
+ # and including the current level, starting at the highest level.
+ index_routes = []
+ for level in reversed(range(int(config.params["level"]), 4)):
+ subs["level"] = level
+ index_routes.append(TARGET_CACHE_INDEX.format(**subs))
+ taskdesc["optimization"] = {"index-search": index_routes}
+
+ # ... and cache at the lowest level.
+ taskdesc.setdefault("routes", []).append(
+ f"index.{TARGET_CACHE_INDEX.format(**subs)}"
+ )
+
+ # ... and add some extra routes for humans
+ subs["build_date_long"] = time.strftime(
+ "%Y.%m.%d.%Y%m%d%H%M%S", time.gmtime(config.params["build_date"])
+ )
+ taskdesc["routes"].extend(
+ [f"index.{route.format(**subs)}" for route in EXTRA_CACHE_INDEXES]
+ )
+
+ taskdesc["attributes"]["cached_task"] = {
+ "type": cache_type,
+ "name": cache_name,
+ "digest": digest,
+ }
+
+ # Allow future pushes to find this task before it completes
+ # Implementation in morphs
+ taskdesc["attributes"]["eager_indexes"] = [TARGET_CACHE_INDEX.format(**subs)]
diff --git a/taskcluster/gecko_taskgraph/util/chunking.py b/taskcluster/gecko_taskgraph/util/chunking.py
new file mode 100644
index 0000000000..a0ed56de78
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/util/chunking.py
@@ -0,0 +1,351 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+"""Utility functions to handle test chunking."""
+
+import json
+import logging
+import os
+from abc import ABCMeta, abstractmethod
+
+from manifestparser import TestManifest
+from manifestparser.filters import chunk_by_runtime, tags
+from mozbuild.util import memoize
+from moztest.resolve import TEST_SUITES, TestManifestLoader, TestResolver
+from taskgraph.util.yaml import load_yaml
+
+from gecko_taskgraph import GECKO
+from gecko_taskgraph.util.bugbug import CT_LOW, BugbugTimeoutException, push_schedules
+
+logger = logging.getLogger(__name__)
+here = os.path.abspath(os.path.dirname(__file__))
+resolver = TestResolver.from_environment(cwd=here, loader_cls=TestManifestLoader)
+
+TEST_VARIANTS = {}
+if os.path.exists(os.path.join(GECKO, "taskcluster", "ci", "test", "variants.yml")):
+ TEST_VARIANTS = load_yaml(GECKO, "taskcluster", "ci", "test", "variants.yml")
+
+WPT_SUBSUITES = {
+ "canvas": "html/canvas",
+ "webgpu": "_mozilla/webgpu",
+ "privatebrowsing": "/service-workers/cache-storage",
+}
+
+
+def guess_mozinfo_from_task(task, repo=""):
+ """Attempt to build a mozinfo dict from a task definition.
+
+ This won't be perfect and many values used in the manifests will be missing. But
+ it should cover most of the major ones and be "good enough" for chunking in the
+ taskgraph.
+
+ Args:
+ task (dict): A task definition.
+
+ Returns:
+ A dict that can be used as a mozinfo replacement.
+ """
+ setting = task["test-setting"]
+ runtime_keys = setting["runtime"].keys()
+ arch = setting["platform"]["arch"]
+ p_os = setting["platform"]["os"]
+
+ info = {
+ "asan": setting["build"].get("asan", False),
+ "bits": 32 if "32" in arch else 64,
+ "ccov": setting["build"].get("ccov", False),
+ "debug": setting["build"]["type"] in ("debug", "debug-isolated-process"),
+ "tsan": setting["build"].get("tsan", False),
+ "nightly_build": repo in ["mozilla-central", "autoland", "try", ""], # trunk
+ }
+
+ for platform in ("android", "linux", "mac", "win"):
+ if p_os["name"].startswith(platform):
+ info["os"] = platform
+ break
+ else:
+ raise ValueError("{} is not a known platform!".format(p_os["name"]))
+
+ # crashreporter is disabled for asan / tsan builds
+ if info["asan"] or info["tsan"]:
+ info["crashreporter"] = False
+ else:
+ info["crashreporter"] = True
+
+ info["appname"] = "fennec" if info["os"] == "android" else "firefox"
+
+ # guess processor
+ if arch == "aarch64":
+ info["processor"] = "aarch64"
+ elif info["os"] == "android" and "arm" in arch:
+ info["processor"] = "arm"
+ elif info["bits"] == 32:
+ info["processor"] = "x86"
+ else:
+ info["processor"] = "x86_64"
+
+ # guess toolkit
+ if info["os"] == "android":
+ info["toolkit"] = "android"
+ elif info["os"] == "win":
+ info["toolkit"] = "windows"
+ elif info["os"] == "mac":
+ info["toolkit"] = "cocoa"
+ else:
+ info["toolkit"] = "gtk"
+
+ # guess os_version
+ os_versions = {
+ ("linux", "1804"): "18.04",
+ ("macosx", "1015"): "10.15",
+ ("macosx", "1100"): "11.00",
+ ("windows", "7"): "6.1",
+ ("windows", "10"): "10.0",
+ }
+ for (name, old_ver), new_ver in os_versions.items():
+ if p_os["name"] == name and p_os["version"] == old_ver:
+ info["os_version"] = new_ver
+ break
+
+ for variant in TEST_VARIANTS:
+ tag = TEST_VARIANTS[variant].get("mozinfo", "")
+ if tag == "":
+ continue
+
+ value = variant in runtime_keys
+
+ if variant == "1proc":
+ value = not value
+ elif "fission" in variant:
+ value = any(
+ "1proc" not in key or "no-fission" not in key for key in runtime_keys
+ )
+ if "no-fission" not in variant:
+ value = not value
+ elif tag == "xorigin":
+ value = any("xorigin" in key for key in runtime_keys)
+
+ info[tag] = value
+
+ # wpt has canvas and webgpu as tags, lets find those
+ for tag in WPT_SUBSUITES.keys():
+ if tag in task["test-name"]:
+ info[tag] = True
+ else:
+ info[tag] = False
+ return info
+
+
+@memoize
+def get_runtimes(platform, suite_name):
+ if not suite_name or not platform:
+ raise TypeError("suite_name and platform cannot be empty.")
+
+ base = os.path.join(GECKO, "testing", "runtimes", "manifest-runtimes-{}.json")
+ for key in ("android", "windows"):
+ if key in platform:
+ path = base.format(key)
+ break
+ else:
+ path = base.format("unix")
+
+ if not os.path.exists(path):
+ raise OSError(f"manifest runtime file at {path} not found.")
+
+ with open(path) as fh:
+ return json.load(fh)[suite_name]
+
+
+def chunk_manifests(suite, platform, chunks, manifests):
+ """Run the chunking algorithm.
+
+ Args:
+ platform (str): Platform used to find runtime info.
+ chunks (int): Number of chunks to split manifests into.
+ manifests(list): Manifests to chunk.
+
+ Returns:
+ A list of length `chunks` where each item contains a list of manifests
+ that run in that chunk.
+ """
+ ini_manifests = set([x.replace(".toml", ".ini") for x in manifests])
+
+ if "web-platform-tests" not in suite:
+ runtimes = {
+ k: v for k, v in get_runtimes(platform, suite).items() if k in ini_manifests
+ }
+ retVal = []
+ for c in chunk_by_runtime(None, chunks, runtimes).get_chunked_manifests(
+ ini_manifests
+ ):
+ retVal.append(
+ [m if m in manifests else m.replace(".ini", ".toml") for m in c[1]]
+ )
+
+ # Keep track of test paths for each chunk, and the runtime information.
+ chunked_manifests = [[] for _ in range(chunks)]
+
+ # Spread out the test manifests evenly across all chunks.
+ for index, key in enumerate(sorted(manifests)):
+ chunked_manifests[index % chunks].append(key)
+
+ # One last sort by the number of manifests. Chunk size should be more or less
+ # equal in size.
+ chunked_manifests.sort(key=lambda x: len(x))
+
+ # Return just the chunked test paths.
+ return chunked_manifests
+
+
+class BaseManifestLoader(metaclass=ABCMeta):
+ def __init__(self, params):
+ self.params = params
+
+ @abstractmethod
+ def get_manifests(self, flavor, subsuite, mozinfo):
+ """Compute which manifests should run for the given flavor, subsuite and mozinfo.
+
+ This function returns skipped manifests separately so that more balanced
+ chunks can be achieved by only considering "active" manifests in the
+ chunking algorithm.
+
+ Args:
+ flavor (str): The suite to run. Values are defined by the 'build_flavor' key
+ in `moztest.resolve.TEST_SUITES`.
+ subsuite (str): The subsuite to run or 'undefined' to denote no subsuite.
+ mozinfo (frozenset): Set of data in the form of (<key>, <value>) used
+ for filtering.
+
+ Returns:
+ A tuple of two manifest lists. The first is the set of active manifests (will
+ run at least one test. The second is a list of skipped manifests (all tests are
+ skipped).
+ """
+
+
+class DefaultLoader(BaseManifestLoader):
+ """Load manifests using metadata from the TestResolver."""
+
+ @memoize
+ def get_tests(self, suite):
+ suite_definition = TEST_SUITES[suite]
+ return list(
+ resolver.resolve_tests(
+ flavor=suite_definition["build_flavor"],
+ subsuite=suite_definition.get("kwargs", {}).get(
+ "subsuite", "undefined"
+ ),
+ )
+ )
+
+ @memoize
+ def get_manifests(self, suite, mozinfo):
+ mozinfo = dict(mozinfo)
+ # Compute all tests for the given suite/subsuite.
+ tests = self.get_tests(suite)
+
+ # TODO: the only exception here is we schedule webgpu as that is a --tag
+ if "web-platform-tests" in suite:
+ manifests = set()
+ subsuite = [x for x in WPT_SUBSUITES.keys() if mozinfo[x]]
+ for t in tests:
+ if subsuite:
+ # add specific directories
+ if WPT_SUBSUITES[subsuite[0]] in t["manifest"]:
+ manifests.add(t["manifest"])
+ else:
+ if any(x in t["manifest"] for x in WPT_SUBSUITES.values()):
+ continue
+ manifests.add(t["manifest"])
+ return {
+ "active": list(manifests),
+ "skipped": [],
+ "other_dirs": dict.fromkeys(manifests, ""),
+ }
+
+ manifests = {chunk_by_runtime.get_manifest(t) for t in tests}
+
+ filters = None
+ if mozinfo["condprof"]:
+ filters = [tags(["condprof"])]
+
+ # Compute the active tests.
+ m = TestManifest()
+ m.tests = tests
+ tests = m.active_tests(disabled=False, exists=False, filters=filters, **mozinfo)
+ active = {}
+ # map manifests and 'other' directories included
+ for t in tests:
+ mp = chunk_by_runtime.get_manifest(t)
+ active.setdefault(mp, [])
+
+ if not mp.startswith(t["dir_relpath"]):
+ active[mp].append(t["dir_relpath"])
+
+ skipped = manifests - set(active.keys())
+ other = {}
+ for m in active:
+ if len(active[m]) > 0:
+ other[m] = list(set(active[m]))
+ return {
+ "active": list(active.keys()),
+ "skipped": list(skipped),
+ "other_dirs": other,
+ }
+
+
+class BugbugLoader(DefaultLoader):
+ """Load manifests using metadata from the TestResolver, and then
+ filter them based on a query to bugbug."""
+
+ CONFIDENCE_THRESHOLD = CT_LOW
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.timedout = False
+
+ @memoize
+ def get_manifests(self, suite, mozinfo):
+ manifests = super().get_manifests(suite, mozinfo)
+
+ # Don't prune any manifests if we're on a backstop push or there was a timeout.
+ if self.params["backstop"] or self.timedout:
+ return manifests
+
+ try:
+ data = push_schedules(self.params["project"], self.params["head_rev"])
+ except BugbugTimeoutException:
+ logger.warning("Timed out waiting for bugbug, loading all test manifests.")
+ self.timedout = True
+ return self.get_manifests(suite, mozinfo)
+
+ bugbug_manifests = {
+ m
+ for m, c in data.get("groups", {}).items()
+ if c >= self.CONFIDENCE_THRESHOLD
+ }
+
+ manifests["active"] = list(set(manifests["active"]) & bugbug_manifests)
+ manifests["skipped"] = list(set(manifests["skipped"]) & bugbug_manifests)
+ return manifests
+
+
+manifest_loaders = {
+ "bugbug": BugbugLoader,
+ "default": DefaultLoader,
+}
+
+_loader_cache = {}
+
+
+def get_manifest_loader(name, params):
+ # Ensure we never create more than one instance of the same loader type for
+ # performance reasons.
+ if name in _loader_cache:
+ return _loader_cache[name]
+
+ loader = manifest_loaders[name](dict(params))
+ _loader_cache[name] = loader
+ return loader
diff --git a/taskcluster/gecko_taskgraph/util/copy_task.py b/taskcluster/gecko_taskgraph/util/copy_task.py
new file mode 100644
index 0000000000..0aaf43361e
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/util/copy_task.py
@@ -0,0 +1,40 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozbuild.util import ReadOnlyDict
+from taskgraph.task import Task
+
+immutable_types = {int, float, bool, str, type(None), ReadOnlyDict}
+
+
+def copy_task(obj):
+ """
+ Perform a deep copy of a task that has a tree-like structure.
+
+ Unlike copy.deepcopy, this does *not* support copying graph-like structure,
+ but it does it more efficiently than deepcopy.
+ """
+ ty = type(obj)
+ if ty in immutable_types:
+ return obj
+ if ty is dict:
+ return {k: copy_task(v) for k, v in obj.items()}
+ if ty is list:
+ return [copy_task(elt) for elt in obj]
+ if ty is Task:
+ task = Task(
+ kind=copy_task(obj.kind),
+ label=copy_task(obj.label),
+ attributes=copy_task(obj.attributes),
+ task=copy_task(obj.task),
+ description=copy_task(obj.description),
+ optimization=copy_task(obj.optimization),
+ dependencies=copy_task(obj.dependencies),
+ soft_dependencies=copy_task(obj.soft_dependencies),
+ if_dependencies=copy_task(obj.if_dependencies),
+ )
+ if obj.task_id:
+ task.task_id = obj.task_id
+ return task
+ raise NotImplementedError(f"copying '{ty}' from '{obj}'")
diff --git a/taskcluster/gecko_taskgraph/util/declarative_artifacts.py b/taskcluster/gecko_taskgraph/util/declarative_artifacts.py
new file mode 100644
index 0000000000..24689ae94c
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/util/declarative_artifacts.py
@@ -0,0 +1,92 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import re
+
+from gecko_taskgraph.util.scriptworker import (
+ generate_beetmover_artifact_map,
+ generate_beetmover_upstream_artifacts,
+)
+
+_ARTIFACT_ID_PER_PLATFORM = {
+ "android-aarch64-opt": "{package}-default-omni-arm64-v8a",
+ "android-arm-opt": "{package}-default-omni-armeabi-v7a",
+ "android-x86-opt": "{package}-default-omni-x86",
+ "android-x86_64-opt": "{package}-default-omni-x86_64",
+ "android-geckoview-fat-aar-opt": "{package}-default",
+ "android-aarch64-shippable": "{package}{update_channel}-omni-arm64-v8a",
+ "android-aarch64-shippable-lite": "{package}{update_channel}-arm64-v8a",
+ "android-arm-shippable": "{package}{update_channel}-omni-armeabi-v7a",
+ "android-arm-shippable-lite": "{package}{update_channel}-armeabi-v7a",
+ "android-x86-shippable": "{package}{update_channel}-omni-x86",
+ "android-x86-shippable-lite": "{package}{update_channel}-x86",
+ "android-x86_64-shippable": "{package}{update_channel}-omni-x86_64",
+ "android-x86_64-shippable-lite": "{package}{update_channel}-x86_64",
+ "android-geckoview-fat-aar-shippable": "{package}{update_channel}-omni",
+ "android-geckoview-fat-aar-shippable-lite": "{package}{update_channel}",
+}
+
+
+def get_geckoview_artifact_map(config, job):
+ return generate_beetmover_artifact_map(
+ config,
+ job,
+ **get_geckoview_template_vars(
+ config,
+ job["attributes"]["build_platform"],
+ job["maven-package"],
+ job["attributes"].get("update-channel"),
+ ),
+ )
+
+
+def get_geckoview_upstream_artifacts(config, job, package, platform=""):
+ if not platform:
+ platform = job["attributes"]["build_platform"]
+ upstream_artifacts = generate_beetmover_upstream_artifacts(
+ config,
+ job,
+ platform="",
+ **get_geckoview_template_vars(
+ config, platform, package, job["attributes"].get("update-channel")
+ ),
+ )
+ return [
+ {key: value for key, value in upstream_artifact.items() if key != "locale"}
+ for upstream_artifact in upstream_artifacts
+ ]
+
+
+def get_geckoview_template_vars(config, platform, package, update_channel):
+ version_groups = re.match(r"(\d+).(\d+).*", config.params["version"])
+ if version_groups:
+ major_version, minor_version = version_groups.groups()
+
+ return {
+ "artifact_id": get_geckoview_artifact_id(
+ config,
+ platform,
+ package,
+ update_channel,
+ ),
+ "build_date": config.params["moz_build_date"],
+ "major_version": major_version,
+ "minor_version": minor_version,
+ }
+
+
+def get_geckoview_artifact_id(config, platform, package, update_channel=None):
+ if update_channel == "release":
+ update_channel = ""
+ elif update_channel is not None:
+ update_channel = f"-{update_channel}"
+ else:
+ # For shippable builds, mozharness defaults to using
+ # "nightly-{project}" for the update channel. For other builds, the
+ # update channel is not set, but the value is not substituted.
+ update_channel = "-nightly-{}".format(config.params["project"])
+ return _ARTIFACT_ID_PER_PLATFORM[platform].format(
+ update_channel=update_channel, package=package
+ )
diff --git a/taskcluster/gecko_taskgraph/util/dependencies.py b/taskcluster/gecko_taskgraph/util/dependencies.py
new file mode 100644
index 0000000000..bf747926d8
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/util/dependencies.py
@@ -0,0 +1,156 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from taskgraph.util.dependencies import group_by
+
+
+def skip_only_or_not(config, task):
+ """Return True if we should skip this task based on `only_` or `not_` config."""
+ only_platforms = config.get("only-for-build-platforms")
+ not_platforms = config.get("not-for-build-platforms")
+ only_attributes = config.get("only-for-attributes")
+ not_attributes = config.get("not-for-attributes")
+ task_attrs = task.attributes
+ if only_platforms or not_platforms:
+ platform = task_attrs.get("build_platform")
+ build_type = task_attrs.get("build_type")
+ if not platform or not build_type:
+ return True
+ combined_platform = f"{platform}/{build_type}"
+ if only_platforms and combined_platform not in only_platforms:
+ return True
+ if not_platforms and combined_platform in not_platforms:
+ return True
+ if only_attributes:
+ if not set(only_attributes) & set(task_attrs):
+ # make sure any attribute exists
+ return True
+ if not_attributes:
+ if set(not_attributes) & set(task_attrs):
+ return True
+ return False
+
+
+@group_by("single-with-filters")
+def single_grouping(config, tasks):
+ for task in tasks:
+ if skip_only_or_not(config.config, task):
+ continue
+ yield [task]
+
+
+@group_by("platform")
+def platform_grouping(config, tasks):
+ groups = {}
+ for task in tasks:
+ if task.kind not in config.config.get("kind-dependencies", []):
+ continue
+ if skip_only_or_not(config.config, task):
+ continue
+ platform = task.attributes.get("build_platform")
+ build_type = task.attributes.get("build_type")
+ product = task.attributes.get(
+ "shipping_product", task.task.get("shipping-product")
+ )
+
+ groups.setdefault((platform, build_type, product), []).append(task)
+ return groups.values()
+
+
+@group_by("single-locale")
+def single_locale_grouping(config, tasks):
+ """Split by a single locale (but also by platform, build-type, product)
+
+ The locale can be `None` (en-US build/signing/repackage), a single locale,
+ or multiple locales per task, e.g. for l10n chunking. In the case of a task
+ with, say, five locales, the task will show up in all five locale groupings.
+
+ This grouping is written for non-partner-repack beetmover, but might also
+ be useful elsewhere.
+
+ """
+ groups = {}
+
+ for task in tasks:
+ if task.kind not in config.config.get("kind-dependencies", []):
+ continue
+ if skip_only_or_not(config.config, task):
+ continue
+ platform = task.attributes.get("build_platform")
+ build_type = task.attributes.get("build_type")
+ product = task.attributes.get(
+ "shipping_product", task.task.get("shipping-product")
+ )
+ task_locale = task.attributes.get("locale")
+ chunk_locales = task.attributes.get("chunk_locales")
+ locales = chunk_locales or [task_locale]
+
+ for locale in locales:
+ locale_key = (platform, build_type, product, locale)
+ groups.setdefault(locale_key, [])
+ if task not in groups[locale_key]:
+ groups[locale_key].append(task)
+
+ return groups.values()
+
+
+@group_by("chunk-locales")
+def chunk_locale_grouping(config, tasks):
+ """Split by a chunk_locale (but also by platform, build-type, product)
+
+ This grouping is written for mac signing with notarization, but might also
+ be useful elsewhere.
+
+ """
+ groups = {}
+
+ for task in tasks:
+ if task.kind not in config.config.get("kind-dependencies", []):
+ continue
+ if skip_only_or_not(config.config, task):
+ continue
+ platform = task.attributes.get("build_platform")
+ build_type = task.attributes.get("build_type")
+ product = task.attributes.get(
+ "shipping_product", task.task.get("shipping-product")
+ )
+ chunk_locales = tuple(sorted(task.attributes.get("chunk_locales", [])))
+
+ chunk_locale_key = (platform, build_type, product, chunk_locales)
+ groups.setdefault(chunk_locale_key, [])
+ if task not in groups[chunk_locale_key]:
+ groups[chunk_locale_key].append(task)
+
+ return groups.values()
+
+
+@group_by("partner-repack-ids")
+def partner_repack_ids_grouping(config, tasks):
+ """Split by partner_repack_ids (but also by platform, build-type, product)
+
+ This grouping is written for release-{eme-free,partner}-repack-signing.
+
+ """
+ groups = {}
+
+ for task in tasks:
+ if task.kind not in config.config.get("kind-dependencies", []):
+ continue
+ if skip_only_or_not(config.config, task):
+ continue
+ platform = task.attributes.get("build_platform")
+ build_type = task.attributes.get("build_type")
+ product = task.attributes.get(
+ "shipping_product", task.task.get("shipping-product")
+ )
+ partner_repack_ids = tuple(
+ sorted(task.task.get("extra", {}).get("repack_ids", []))
+ )
+
+ partner_repack_ids_key = (platform, build_type, product, partner_repack_ids)
+ groups.setdefault(partner_repack_ids_key, [])
+ if task not in groups[partner_repack_ids_key]:
+ groups[partner_repack_ids_key].append(task)
+
+ return groups.values()
diff --git a/taskcluster/gecko_taskgraph/util/docker.py b/taskcluster/gecko_taskgraph/util/docker.py
new file mode 100644
index 0000000000..e8de7d1fdb
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/util/docker.py
@@ -0,0 +1,333 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import hashlib
+import json
+import os
+import re
+import sys
+from collections.abc import Mapping
+from urllib.parse import quote, urlencode, urlunparse
+
+import requests
+import requests_unixsocket
+from mozbuild.util import memoize
+from mozpack.archive import create_tar_gz_from_files
+from mozpack.files import GeneratedFile
+from taskgraph.util.yaml import load_yaml
+
+from .. import GECKO
+
+IMAGE_DIR = os.path.join(GECKO, "taskcluster", "docker")
+
+
+def docker_url(path, **kwargs):
+ docker_socket = os.environ.get("DOCKER_SOCKET", "/var/run/docker.sock")
+ return urlunparse(
+ ("http+unix", quote(docker_socket, safe=""), path, "", urlencode(kwargs), "")
+ )
+
+
+def post_to_docker(tar, api_path, **kwargs):
+ """POSTs a tar file to a given docker API path.
+
+ The tar argument can be anything that can be passed to requests.post()
+ as data (e.g. iterator or file object).
+ The extra keyword arguments are passed as arguments to the docker API.
+ """
+ # requests-unixsocket doesn't honor requests timeouts
+ # See https://github.com/msabramo/requests-unixsocket/issues/44
+ # We have some large docker images that trigger the default timeout,
+ # so we increase the requests-unixsocket timeout here.
+ session = requests.Session()
+ session.mount(
+ requests_unixsocket.DEFAULT_SCHEME,
+ requests_unixsocket.UnixAdapter(timeout=120),
+ )
+ req = session.post(
+ docker_url(api_path, **kwargs),
+ data=tar,
+ stream=True,
+ headers={"Content-Type": "application/x-tar"},
+ )
+ if req.status_code != 200:
+ message = req.json().get("message")
+ if not message:
+ message = f"docker API returned HTTP code {req.status_code}"
+ raise Exception(message)
+ status_line = {}
+
+ buf = b""
+ for content in req.iter_content(chunk_size=None):
+ if not content:
+ continue
+ # Sometimes, a chunk of content is not a complete json, so we cumulate
+ # with leftovers from previous iterations.
+ buf += content
+ try:
+ data = json.loads(buf)
+ except Exception:
+ continue
+ buf = b""
+ # data is sometimes an empty dict.
+ if not data:
+ continue
+ # Mimick how docker itself presents the output. This code was tested
+ # with API version 1.18 and 1.26.
+ if "status" in data:
+ if "id" in data:
+ if sys.stderr.isatty():
+ total_lines = len(status_line)
+ line = status_line.setdefault(data["id"], total_lines)
+ n = total_lines - line
+ if n > 0:
+ # Move the cursor up n lines.
+ sys.stderr.write(f"\033[{n}A")
+ # Clear line and move the cursor to the beginning of it.
+ sys.stderr.write("\033[2K\r")
+ sys.stderr.write(
+ "{}: {} {}\n".format(
+ data["id"], data["status"], data.get("progress", "")
+ )
+ )
+ if n > 1:
+ # Move the cursor down n - 1 lines, which, considering
+ # the carriage return on the last write, gets us back
+ # where we started.
+ sys.stderr.write(f"\033[{n - 1}B")
+ else:
+ status = status_line.get(data["id"])
+ # Only print status changes.
+ if status != data["status"]:
+ sys.stderr.write("{}: {}\n".format(data["id"], data["status"]))
+ status_line[data["id"]] = data["status"]
+ else:
+ status_line = {}
+ sys.stderr.write("{}\n".format(data["status"]))
+ elif "stream" in data:
+ sys.stderr.write(data["stream"])
+ elif "aux" in data:
+ sys.stderr.write(repr(data["aux"]))
+ elif "error" in data:
+ sys.stderr.write("{}\n".format(data["error"]))
+ # Sadly, docker doesn't give more than a plain string for errors,
+ # so the best we can do to propagate the error code from the command
+ # that failed is to parse the error message...
+ errcode = 1
+ m = re.search(r"returned a non-zero code: (\d+)", data["error"])
+ if m:
+ errcode = int(m.group(1))
+ sys.exit(errcode)
+ else:
+ raise NotImplementedError(repr(data))
+ sys.stderr.flush()
+
+
+def docker_image(name, by_tag=False):
+ """
+ Resolve in-tree prebuilt docker image to ``<registry>/<repository>@sha256:<digest>``,
+ or ``<registry>/<repository>:<tag>`` if `by_tag` is `True`.
+ """
+ try:
+ with open(os.path.join(IMAGE_DIR, name, "REGISTRY")) as f:
+ registry = f.read().strip()
+ except OSError:
+ with open(os.path.join(IMAGE_DIR, "REGISTRY")) as f:
+ registry = f.read().strip()
+
+ if not by_tag:
+ hashfile = os.path.join(IMAGE_DIR, name, "HASH")
+ try:
+ with open(hashfile) as f:
+ return f"{registry}/{name}@{f.read().strip()}"
+ except OSError:
+ raise Exception(f"Failed to read HASH file {hashfile}")
+
+ try:
+ with open(os.path.join(IMAGE_DIR, name, "VERSION")) as f:
+ tag = f.read().strip()
+ except OSError:
+ tag = "latest"
+ return f"{registry}/{name}:{tag}"
+
+
+class VoidWriter:
+ """A file object with write capabilities that does nothing with the written
+ data."""
+
+ def write(self, buf):
+ pass
+
+
+def generate_context_hash(topsrcdir, image_path, image_name, args):
+ """Generates a sha256 hash for context directory used to build an image."""
+
+ return stream_context_tar(
+ topsrcdir, image_path, VoidWriter(), image_name, args=args
+ )
+
+
+class HashingWriter:
+ """A file object with write capabilities that hashes the written data at
+ the same time it passes down to a real file object."""
+
+ def __init__(self, writer):
+ self._hash = hashlib.sha256()
+ self._writer = writer
+
+ def write(self, buf):
+ self._hash.update(buf)
+ self._writer.write(buf)
+
+ def hexdigest(self):
+ return self._hash.hexdigest()
+
+
+def create_context_tar(topsrcdir, context_dir, out_path, image_name, args):
+ """Create a context tarball.
+
+ A directory ``context_dir`` containing a Dockerfile will be assembled into
+ a gzipped tar file at ``out_path``.
+
+ We also scan the source Dockerfile for special syntax that influences
+ context generation.
+
+ If a line in the Dockerfile has the form ``# %include <path>``,
+ the relative path specified on that line will be matched against
+ files in the source repository and added to the context under the
+ path ``topsrcdir/``. If an entry is a directory, we add all files
+ under that directory.
+
+ Returns the SHA-256 hex digest of the created archive.
+ """
+ with open(out_path, "wb") as fh:
+ return stream_context_tar(
+ topsrcdir,
+ context_dir,
+ fh,
+ image_name=image_name,
+ args=args,
+ )
+
+
+def stream_context_tar(topsrcdir, context_dir, out_file, image_name, args):
+ """Like create_context_tar, but streams the tar file to the `out_file` file
+ object."""
+ archive_files = {}
+ content = []
+
+ context_dir = os.path.join(topsrcdir, context_dir)
+
+ for root, dirs, files in os.walk(context_dir):
+ for f in files:
+ source_path = os.path.join(root, f)
+ archive_path = source_path[len(context_dir) + 1 :]
+ archive_files[archive_path] = source_path
+
+ # Parse Dockerfile for special syntax of extra files to include.
+ with open(os.path.join(context_dir, "Dockerfile"), "r") as fh:
+ for line in fh:
+ content.append(line)
+
+ if not line.startswith("# %include"):
+ continue
+
+ p = line[len("# %include ") :].strip()
+ if os.path.isabs(p):
+ raise Exception("extra include path cannot be absolute: %s" % p)
+
+ fs_path = os.path.normpath(os.path.join(topsrcdir, p))
+ # Check for filesystem traversal exploits.
+ if not fs_path.startswith(topsrcdir):
+ raise Exception("extra include path outside topsrcdir: %s" % p)
+
+ if not os.path.exists(fs_path):
+ raise Exception("extra include path does not exist: %s" % p)
+
+ if os.path.isdir(fs_path):
+ for root, dirs, files in os.walk(fs_path):
+ for f in files:
+ source_path = os.path.join(root, f)
+ rel = source_path[len(fs_path) + 1 :]
+ archive_path = os.path.join("topsrcdir", p, rel)
+ archive_files[archive_path] = source_path
+ else:
+ archive_path = os.path.join("topsrcdir", p)
+ archive_files[archive_path] = fs_path
+
+ archive_files["Dockerfile"] = GeneratedFile("".join(content).encode("utf-8"))
+
+ writer = HashingWriter(out_file)
+ create_tar_gz_from_files(writer, archive_files, f"{image_name}.tar")
+ return writer.hexdigest()
+
+
+class ImagePathsMap(Mapping):
+ """ImagePathsMap contains the mapping of Docker image names to their
+ context location in the filesystem. The register function allows Thunderbird
+ to define additional images under comm/taskcluster.
+ """
+
+ def __init__(self, config_path, image_dir=IMAGE_DIR):
+ config = load_yaml(GECKO, config_path)
+ self.__update_image_paths(config["jobs"], image_dir)
+
+ def __getitem__(self, key):
+ return self.__dict__[key]
+
+ def __iter__(self):
+ return iter(self.__dict__)
+
+ def __len__(self):
+ return len(self.__dict__)
+
+ def __update_image_paths(self, jobs, image_dir):
+ self.__dict__.update(
+ {
+ k: os.path.join(image_dir, v.get("definition", k))
+ for k, v in jobs.items()
+ }
+ )
+
+ def register(self, jobs_config_path, image_dir):
+ """Register additional image_paths. In this case, there is no 'jobs'
+ key in the loaded YAML as this file is loaded via jobs-from in kind.yml."""
+ jobs = load_yaml(GECKO, jobs_config_path)
+ self.__update_image_paths(jobs, image_dir)
+
+
+image_paths = ImagePathsMap("taskcluster/ci/docker-image/kind.yml")
+
+
+def image_path(name):
+ if name in image_paths:
+ return image_paths[name]
+ return os.path.join(IMAGE_DIR, name)
+
+
+@memoize
+def parse_volumes(image):
+ """Parse VOLUME entries from a Dockerfile for an image."""
+ volumes = set()
+
+ path = image_path(image)
+
+ with open(os.path.join(path, "Dockerfile"), "rb") as fh:
+ for line in fh:
+ line = line.strip()
+ # We assume VOLUME definitions don't use ARGS.
+ if not line.startswith(b"VOLUME "):
+ continue
+
+ v = line.split(None, 1)[1]
+ if v.startswith(b"["):
+ raise ValueError(
+ "cannot parse array syntax for VOLUME; "
+ "convert to multiple entries"
+ )
+
+ volumes |= {v.decode("utf-8") for v in v.split()}
+
+ return volumes
diff --git a/taskcluster/gecko_taskgraph/util/hash.py b/taskcluster/gecko_taskgraph/util/hash.py
new file mode 100644
index 0000000000..485c9a7c48
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/util/hash.py
@@ -0,0 +1,68 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import hashlib
+
+import mozpack.path as mozpath
+from mozbuild.util import memoize
+from mozversioncontrol import get_repository_object
+
+
+@memoize
+def hash_path(path):
+ """Hash a single file.
+
+ Returns the SHA-256 hash in hex form.
+ """
+ with open(path, mode="rb") as fh:
+ return hashlib.sha256(fh.read()).hexdigest()
+
+
+@memoize
+def get_file_finder(base_path):
+ from pathlib import Path
+
+ repo = get_repository_object(base_path)
+ if repo:
+ files = repo.get_tracked_files_finder(base_path)
+ if files:
+ return files
+ else:
+ return None
+ else:
+ return get_repository_object(Path(base_path)).get_tracked_files_finder(
+ base_path
+ )
+
+
+def hash_paths(base_path, patterns):
+ """
+ Give a list of path patterns, return a digest of the contents of all
+ the corresponding files, similarly to git tree objects or mercurial
+ manifests.
+
+ Each file is hashed. The list of all hashes and file paths is then
+ itself hashed to produce the result.
+ """
+ finder = get_file_finder(base_path)
+ h = hashlib.sha256()
+ files = {}
+ if finder:
+ for pattern in patterns:
+ found = list(finder.find(pattern))
+ if found:
+ files.update(found)
+ else:
+ raise Exception("%s did not match anything" % pattern)
+ for path in sorted(files.keys()):
+ if path.endswith((".pyc", ".pyd", ".pyo")):
+ continue
+ h.update(
+ "{} {}\n".format(
+ hash_path(mozpath.abspath(mozpath.join(base_path, path))),
+ mozpath.normsep(path),
+ ).encode("utf-8")
+ )
+
+ return h.hexdigest()
diff --git a/taskcluster/gecko_taskgraph/util/hg.py b/taskcluster/gecko_taskgraph/util/hg.py
new file mode 100644
index 0000000000..18a92fbd0d
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/util/hg.py
@@ -0,0 +1,139 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import logging
+import subprocess
+
+import requests
+from mozbuild.util import memoize
+from redo import retry
+
+logger = logging.getLogger(__name__)
+
+PUSHLOG_CHANGESET_TMPL = (
+ "{repository}/json-pushes?version=2&changeset={revision}&tipsonly=1"
+)
+PUSHLOG_PUSHES_TMPL = (
+ "{repository}/json-pushes/?version=2&startID={push_id_start}&endID={push_id_end}"
+)
+
+
+def _query_pushlog(url):
+ response = retry(
+ requests.get,
+ attempts=5,
+ sleeptime=10,
+ args=(url,),
+ kwargs={"timeout": 60, "headers": {"User-Agent": "TaskCluster"}},
+ )
+
+ return response.json()["pushes"]
+
+
+def find_hg_revision_push_info(repository, revision):
+ """Given the parameters for this action and a revision, find the
+ pushlog_id of the revision."""
+ url = PUSHLOG_CHANGESET_TMPL.format(repository=repository, revision=revision)
+
+ pushes = _query_pushlog(url)
+
+ if len(pushes) != 1:
+ raise RuntimeError(
+ "Found {} pushlog_ids, expected 1, for {} revision {}: {}".format(
+ len(pushes), repository, revision, pushes
+ )
+ )
+
+ pushid = list(pushes.keys())[0]
+ return {
+ "pushdate": pushes[pushid]["date"],
+ "pushid": pushid,
+ "user": pushes[pushid]["user"],
+ }
+
+
+@memoize
+def get_push_data(repository, project, push_id_start, push_id_end):
+ url = PUSHLOG_PUSHES_TMPL.format(
+ repository=repository,
+ push_id_start=push_id_start - 1,
+ push_id_end=push_id_end,
+ )
+
+ try:
+ pushes = _query_pushlog(url)
+
+ return {
+ push_id: pushes[str(push_id)]
+ for push_id in range(push_id_start, push_id_end + 1)
+ }
+
+ # In the event of request times out, requests will raise a TimeoutError.
+ except requests.exceptions.Timeout:
+ logger.warning("json-pushes timeout")
+
+ # In the event of a network problem (e.g. DNS failure, refused connection, etc),
+ # requests will raise a ConnectionError.
+ except requests.exceptions.ConnectionError:
+ logger.warning("json-pushes connection error")
+
+ # In the event of the rare invalid HTTP response(e.g 404, 401),
+ # requests will raise an HTTPError exception
+ except requests.exceptions.HTTPError:
+ logger.warning("Bad Http response")
+
+ # When we get invalid JSON (i.e. 500 error), it results in a ValueError (bug 1313426)
+ except ValueError as error:
+ logger.warning(f"Invalid JSON, possible server error: {error}")
+
+ # We just print the error out as a debug message if we failed to catch the exception above
+ except requests.exceptions.RequestException as error:
+ logger.warning(error)
+
+ return None
+
+
+@memoize
+def get_json_automationrelevance(repository, revision):
+ url = "{}/json-automationrelevance/{}".format(repository.rstrip("/"), revision)
+ logger.debug("Querying version control for metadata: %s", url)
+
+ def get_automationrelevance():
+ response = requests.get(url, timeout=30)
+ return response.json()
+
+ return retry(get_automationrelevance, attempts=10, sleeptime=10)
+
+
+def get_hg_revision_branch(root, revision):
+ """Given the parameters for a revision, find the hg_branch (aka
+ relbranch) of the revision."""
+ return subprocess.check_output(
+ [
+ "hg",
+ "identify",
+ "-T",
+ "{branch}",
+ "--rev",
+ revision,
+ ],
+ cwd=root,
+ universal_newlines=True,
+ )
+
+
+# For these functions, we assume that run-task has correctly checked out the
+# revision indicated by GECKO_HEAD_REF, so all that remains is to see what the
+# current revision is. Mercurial refers to that as `.`.
+def get_hg_commit_message(root, rev="."):
+ return subprocess.check_output(
+ ["hg", "log", "-r", rev, "-T", "{desc}"], cwd=root, universal_newlines=True
+ )
+
+
+def calculate_head_rev(root):
+ return subprocess.check_output(
+ ["hg", "log", "-r", ".", "-T", "{node}"], cwd=root, universal_newlines=True
+ )
diff --git a/taskcluster/gecko_taskgraph/util/partials.py b/taskcluster/gecko_taskgraph/util/partials.py
new file mode 100644
index 0000000000..1a3affcc42
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/util/partials.py
@@ -0,0 +1,297 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import logging
+
+import redo
+import requests
+
+from gecko_taskgraph.util.scriptworker import (
+ BALROG_SCOPE_ALIAS_TO_PROJECT,
+ BALROG_SERVER_SCOPES,
+)
+
+logger = logging.getLogger(__name__)
+
+PLATFORM_RENAMES = {
+ "windows2012-32": "win32",
+ "windows2012-64": "win64",
+ "windows2012-aarch64": "win64-aarch64",
+ "osx-cross": "macosx64",
+ "osx": "macosx64",
+}
+
+BALROG_PLATFORM_MAP = {
+ "linux": ["Linux_x86-gcc3"],
+ "linux32": ["Linux_x86-gcc3"],
+ "linux64": ["Linux_x86_64-gcc3"],
+ "linux64-asan-reporter": ["Linux_x86_64-gcc3-asan"],
+ "macosx64": [
+ "Darwin_x86_64-gcc3-u-i386-x86_64",
+ "Darwin_x86-gcc3-u-i386-x86_64",
+ "Darwin_aarch64-gcc3",
+ "Darwin_x86-gcc3",
+ "Darwin_x86_64-gcc3",
+ ],
+ "win32": ["WINNT_x86-msvc", "WINNT_x86-msvc-x86", "WINNT_x86-msvc-x64"],
+ "win64": ["WINNT_x86_64-msvc", "WINNT_x86_64-msvc-x64"],
+ "win64-asan-reporter": ["WINNT_x86_64-msvc-x64-asan"],
+ "win64-aarch64": [
+ "WINNT_aarch64-msvc-aarch64",
+ ],
+}
+
+FTP_PLATFORM_MAP = {
+ "Darwin_x86-gcc3": "mac",
+ "Darwin_x86-gcc3-u-i386-x86_64": "mac",
+ "Darwin_x86_64-gcc3": "mac",
+ "Darwin_x86_64-gcc3-u-i386-x86_64": "mac",
+ "Darwin_aarch64-gcc3": "mac",
+ "Linux_x86-gcc3": "linux-i686",
+ "Linux_x86_64-gcc3": "linux-x86_64",
+ "Linux_x86_64-gcc3-asan": "linux-x86_64-asan-reporter",
+ "WINNT_x86_64-msvc-x64-asan": "win64-asan-reporter",
+ "WINNT_x86-msvc": "win32",
+ "WINNT_x86-msvc-x64": "win32",
+ "WINNT_x86-msvc-x86": "win32",
+ "WINNT_x86_64-msvc": "win64",
+ "WINNT_x86_64-msvc-x64": "win64",
+ "WINNT_aarch64-msvc-aarch64": "win64-aarch64",
+}
+
+
+def get_balrog_platform_name(platform):
+ """Convert build platform names into balrog platform names.
+
+ Remove known values instead to catch aarch64 and other platforms
+ that may be added.
+ """
+ removals = ["-devedition", "-shippable"]
+ for remove in removals:
+ platform = platform.replace(remove, "")
+ return PLATFORM_RENAMES.get(platform, platform)
+
+
+def _sanitize_platform(platform):
+ platform = get_balrog_platform_name(platform)
+ if platform not in BALROG_PLATFORM_MAP:
+ return platform
+ return BALROG_PLATFORM_MAP[platform][0]
+
+
+def get_builds(release_history, platform, locale):
+ """Examine cached balrog release history and return the list of
+ builds we need to generate diffs from"""
+ platform = _sanitize_platform(platform)
+ return release_history.get(platform, {}).get(locale, {})
+
+
+def get_partials_artifacts_from_params(release_history, platform, locale):
+ platform = _sanitize_platform(platform)
+ return [
+ (artifact, details.get("previousVersion", None))
+ for artifact, details in release_history.get(platform, {})
+ .get(locale, {})
+ .items()
+ ]
+
+
+def get_partials_info_from_params(release_history, platform, locale):
+ platform = _sanitize_platform(platform)
+
+ artifact_map = {}
+ for k in release_history.get(platform, {}).get(locale, {}):
+ details = release_history[platform][locale][k]
+ attributes = ("buildid", "previousBuildNumber", "previousVersion")
+ artifact_map[k] = {
+ attr: details[attr] for attr in attributes if attr in details
+ }
+ return artifact_map
+
+
+def _retry_on_http_errors(url, verify, params, errors):
+ if params:
+ params_str = "&".join("=".join([k, str(v)]) for k, v in params.items())
+ else:
+ params_str = ""
+ logger.info("Connecting to %s?%s", url, params_str)
+ for _ in redo.retrier(sleeptime=5, max_sleeptime=30, attempts=10):
+ try:
+ req = requests.get(url, verify=verify, params=params, timeout=10)
+ req.raise_for_status()
+ return req
+ except requests.HTTPError as e:
+ if e.response.status_code in errors:
+ logger.exception(
+ "Got HTTP %s trying to reach %s", e.response.status_code, url
+ )
+ else:
+ raise
+ else:
+ raise Exception(f"Cannot connect to {url}!")
+
+
+def get_sorted_releases(product, branch):
+ """Returns a list of release names from Balrog.
+ :param product: product name, AKA appName
+ :param branch: branch name, e.g. mozilla-central
+ :return: a sorted list of release names, most recent first.
+ """
+ url = f"{_get_balrog_api_root(branch)}/releases"
+ params = {
+ "product": product,
+ # Adding -nightly-2 (2 stands for the beginning of build ID
+ # based on date) should filter out release and latest blobs.
+ # This should be changed to -nightly-3 in 3000 ;)
+ "name_prefix": f"{product}-{branch}-nightly-2",
+ "names_only": True,
+ }
+ req = _retry_on_http_errors(url=url, verify=True, params=params, errors=[500])
+ releases = req.json()["names"]
+ releases = sorted(releases, reverse=True)
+ return releases
+
+
+def get_release_builds(release, branch):
+ url = f"{_get_balrog_api_root(branch)}/releases/{release}"
+ req = _retry_on_http_errors(url=url, verify=True, params=None, errors=[500])
+ return req.json()
+
+
+def _get_balrog_api_root(branch):
+ # Query into the scopes scriptworker uses to make sure we check against the same balrog server
+ # That our jobs would use.
+ scope = None
+ for alias, projects in BALROG_SCOPE_ALIAS_TO_PROJECT:
+ if branch in projects and alias in BALROG_SERVER_SCOPES:
+ scope = BALROG_SERVER_SCOPES[alias]
+ break
+ else:
+ scope = BALROG_SERVER_SCOPES["default"]
+
+ if scope == "balrog:server:dep":
+ return "https://stage.balrog.nonprod.cloudops.mozgcp.net/api/v1"
+ return "https://aus5.mozilla.org/api/v1"
+
+
+def find_localtest(fileUrls):
+ for channel in fileUrls:
+ if "-localtest" in channel:
+ return channel
+
+
+def populate_release_history(
+ product, branch, maxbuilds=4, maxsearch=10, partial_updates=None
+):
+ # Assuming we are using release branches when we know the list of previous
+ # releases in advance
+ if partial_updates is not None:
+ return _populate_release_history(
+ product, branch, partial_updates=partial_updates
+ )
+ return _populate_nightly_history(
+ product, branch, maxbuilds=maxbuilds, maxsearch=maxsearch
+ )
+
+
+def _populate_nightly_history(product, branch, maxbuilds=4, maxsearch=10):
+ """Find relevant releases in Balrog
+ Not all releases have all platforms and locales, due
+ to Taskcluster migration.
+
+ Args:
+ product (str): capitalized product name, AKA appName, e.g. Firefox
+ branch (str): branch name (mozilla-central)
+ maxbuilds (int): Maximum number of historical releases to populate
+ maxsearch(int): Traverse at most this many releases, to avoid
+ working through the entire history.
+ Returns:
+ json object based on data from balrog api
+
+ results = {
+ 'platform1': {
+ 'locale1': {
+ 'buildid1': mar_url,
+ 'buildid2': mar_url,
+ 'buildid3': mar_url,
+ },
+ 'locale2': {
+ 'target.partial-1.mar': {'buildid1': 'mar_url'},
+ }
+ },
+ 'platform2': {
+ }
+ }
+ """
+ last_releases = get_sorted_releases(product, branch)
+
+ partial_mar_tmpl = "target.partial-{}.mar"
+
+ builds = dict()
+ for release in last_releases[:maxsearch]:
+ # maxbuilds in all categories, don't make any more queries
+ full = len(builds) > 0 and all(
+ len(builds[platform][locale]) >= maxbuilds
+ for platform in builds
+ for locale in builds[platform]
+ )
+ if full:
+ break
+ history = get_release_builds(release, branch)
+
+ for platform in history["platforms"]:
+ if "alias" in history["platforms"][platform]:
+ continue
+ if platform not in builds:
+ builds[platform] = dict()
+ for locale in history["platforms"][platform]["locales"]:
+ if locale not in builds[platform]:
+ builds[platform][locale] = dict()
+ if len(builds[platform][locale]) >= maxbuilds:
+ continue
+ buildid = history["platforms"][platform]["locales"][locale]["buildID"]
+ url = history["platforms"][platform]["locales"][locale]["completes"][0][
+ "fileUrl"
+ ]
+ nextkey = len(builds[platform][locale]) + 1
+ builds[platform][locale][partial_mar_tmpl.format(nextkey)] = {
+ "buildid": buildid,
+ "mar_url": url,
+ }
+ return builds
+
+
+def _populate_release_history(product, branch, partial_updates):
+ builds = dict()
+ for version, release in partial_updates.items():
+ prev_release_blob = "{product}-{version}-build{build_number}".format(
+ product=product, version=version, build_number=release["buildNumber"]
+ )
+ partial_mar_key = f"target-{version}.partial.mar"
+ history = get_release_builds(prev_release_blob, branch)
+ # use one of the localtest channels to avoid relying on bouncer
+ localtest = find_localtest(history["fileUrls"])
+ url_pattern = history["fileUrls"][localtest]["completes"]["*"]
+
+ for platform in history["platforms"]:
+ if "alias" in history["platforms"][platform]:
+ continue
+ if platform not in builds:
+ builds[platform] = dict()
+ for locale in history["platforms"][platform]["locales"]:
+ if locale not in builds[platform]:
+ builds[platform][locale] = dict()
+ buildid = history["platforms"][platform]["locales"][locale]["buildID"]
+ url = url_pattern.replace(
+ "%OS_FTP%", FTP_PLATFORM_MAP[platform]
+ ).replace("%LOCALE%", locale)
+ builds[platform][locale][partial_mar_key] = {
+ "buildid": buildid,
+ "mar_url": url,
+ "previousVersion": version,
+ "previousBuildNumber": release["buildNumber"],
+ "product": product,
+ }
+ return builds
diff --git a/taskcluster/gecko_taskgraph/util/partners.py b/taskcluster/gecko_taskgraph/util/partners.py
new file mode 100644
index 0000000000..2546e1ae88
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/util/partners.py
@@ -0,0 +1,555 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import json
+import logging
+import os
+import xml.etree.ElementTree as ET
+from urllib.parse import urlencode
+
+import requests
+import yaml
+from redo import retry
+from taskgraph.util.schema import resolve_keyed_by
+
+from gecko_taskgraph.util.attributes import release_level
+from gecko_taskgraph.util.copy_task import copy_task
+
+# Suppress chatty requests logging
+logging.getLogger("requests").setLevel(logging.WARNING)
+
+log = logging.getLogger(__name__)
+
+GITHUB_API_ENDPOINT = "https://api.github.com/graphql"
+
+"""
+LOGIN_QUERY, MANIFEST_QUERY, and REPACK_CFG_QUERY are all written to the Github v4 API,
+which users GraphQL. See https://developer.github.com/v4/
+"""
+
+LOGIN_QUERY = """query {
+ viewer {
+ login
+ name
+ }
+}
+"""
+
+# Returns the contents of default.xml from a manifest repository
+MANIFEST_QUERY = """query {
+ repository(owner:"%(owner)s", name:"%(repo)s") {
+ object(expression: "master:%(file)s") {
+ ... on Blob {
+ text
+ }
+ }
+ }
+}
+"""
+# Example response:
+# {
+# "data": {
+# "repository": {
+# "object": {
+# "text": "<?xml version=\"1.0\" ?>\n<manifest>\n " +
+# "<remote fetch=\"git@github.com:mozilla-partners/\" name=\"mozilla-partners\"/>\n " +
+# "<remote fetch=\"git@github.com:mozilla/\" name=\"mozilla\"/>\n\n " +
+# "<project name=\"repack-scripts\" path=\"scripts\" remote=\"mozilla-partners\" " +
+# "revision=\"master\"/>\n <project name=\"build-tools\" path=\"scripts/tools\" " +
+# "remote=\"mozilla\" revision=\"master\"/>\n <project name=\"mozilla-EME-free\" " +
+# "path=\"partners/mozilla-EME-free\" remote=\"mozilla-partners\" " +
+# "revision=\"master\"/>\n</manifest>\n"
+# }
+# }
+# }
+# }
+
+# Returns the contents of desktop/*/repack.cfg for a partner repository
+REPACK_CFG_QUERY = """query{
+ repository(owner:"%(owner)s", name:"%(repo)s") {
+ object(expression: "%(revision)s:desktop/"){
+ ... on Tree {
+ entries {
+ name
+ object {
+ ... on Tree {
+ entries {
+ name
+ object {
+ ... on Blob {
+ text
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
+"""
+# Example response:
+# {
+# "data": {
+# "repository": {
+# "object": {
+# "entries": [
+# {
+# "name": "mozilla-EME-free",
+# "object": {
+# "entries": [
+# {
+# "name": "distribution",
+# "object": {}
+# },
+# {
+# "name": "repack.cfg",
+# "object": {
+# "text": "aus=\"mozilla-EMEfree\"\ndist_id=\"mozilla-EMEfree\"\n" +
+# "dist_version=\"1.0\"\nlinux-i686=true\nlinux-x86_64=true\n" +
+# " locales=\"ach af de en-US\"\nmac=true\nwin32=true\nwin64=true\n" +
+# "output_dir=\"%(platform)s-EME-free/%(locale)s\"\n\n" +
+# "# Upload params\nbucket=\"net-mozaws-prod-delivery-firefox\"\n" +
+# "upload_to_candidates=true\n"
+# }
+# }
+# ]
+# }
+# }
+# ]
+# }
+# }
+# }
+# }
+
+# Map platforms in repack.cfg into their equivalents in taskcluster
+TC_PLATFORM_PER_FTP = {
+ "linux-i686": "linux-shippable",
+ "linux-x86_64": "linux64-shippable",
+ "mac": "macosx64-shippable",
+ "win32": "win32-shippable",
+ "win64": "win64-shippable",
+ "win64-aarch64": "win64-aarch64-shippable",
+}
+
+TASKCLUSTER_PROXY_SECRET_ROOT = "http://taskcluster/secrets/v1/secret"
+
+LOCALES_FILE = os.path.join(
+ os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(__file__)))),
+ "browser",
+ "locales",
+ "l10n-changesets.json",
+)
+
+# cache data at the module level
+partner_configs = {}
+
+
+def get_token(params):
+ """We use a Personal Access Token from Github to lookup partner config. No extra scopes are
+ needed on the token to read public repositories, but need the 'repo' scope to see private
+ repositories. This is not fine grained and also grants r/w access, but is revoked at the repo
+ level.
+ """
+
+ # Allow for local taskgraph debugging
+ if os.environ.get("GITHUB_API_TOKEN"):
+ return os.environ["GITHUB_API_TOKEN"]
+
+ # The 'usual' method - via taskClusterProxy for decision tasks
+ url = "{secret_root}/project/releng/gecko/build/level-{level}/partner-github-api".format(
+ secret_root=TASKCLUSTER_PROXY_SECRET_ROOT, **params
+ )
+ try:
+ resp = retry(
+ requests.get,
+ attempts=2,
+ sleeptime=10,
+ args=(url,),
+ kwargs={"timeout": 60, "headers": ""},
+ )
+ j = resp.json()
+ return j["secret"]["key"]
+ except (requests.ConnectionError, ValueError, KeyError):
+ raise RuntimeError("Could not get Github API token to lookup partner data")
+
+
+def query_api(query, token):
+ """Make a query with a Github auth header, returning the json"""
+ headers = {"Authorization": "bearer %s" % token}
+ r = requests.post(GITHUB_API_ENDPOINT, json={"query": query}, headers=headers)
+ r.raise_for_status()
+
+ j = r.json()
+ if "errors" in j:
+ raise RuntimeError("Github query error - %s", j["errors"])
+ return j
+
+
+def check_login(token):
+ log.debug("Checking we have a valid login")
+ query_api(LOGIN_QUERY, token)
+
+
+def get_repo_params(repo):
+ """Parse the organisation and repo name from an https or git url for a repo"""
+ if repo.startswith("https"):
+ # eg https://github.com/mozilla-partners/mozilla-EME-free
+ return repo.rsplit("/", 2)[-2:]
+ if repo.startswith("git@"):
+ # eg git@github.com:mozilla-partners/mailru.git
+ repo = repo.replace(".git", "")
+ return repo.split(":")[-1].split("/")
+
+
+def get_partners(manifestRepo, token):
+ """Given the url to a manifest repository, retrieve the default.xml and parse it into a
+ list of partner repos.
+ """
+ log.debug("Querying for manifest default.xml in %s", manifestRepo)
+ owner, repo = get_repo_params(manifestRepo)
+ query = MANIFEST_QUERY % {"owner": owner, "repo": repo, "file": "default.xml"}
+ raw_manifest = query_api(query, token)
+ log.debug("Raw manifest: %s", raw_manifest)
+ if not raw_manifest["data"]["repository"]:
+ raise RuntimeError(
+ "Couldn't load partner manifest at %s, insufficient permissions ?"
+ % manifestRepo
+ )
+ e = ET.fromstring(raw_manifest["data"]["repository"]["object"]["text"])
+
+ remotes = {}
+ partners = {}
+ for child in e:
+ if child.tag == "remote":
+ name = child.attrib["name"]
+ url = child.attrib["fetch"]
+ remotes[name] = url
+ log.debug("Added remote %s at %s", name, url)
+ elif child.tag == "project":
+ # we don't need to check any code repos
+ if "scripts" in child.attrib["path"]:
+ continue
+ owner, _ = get_repo_params(remotes[child.attrib["remote"]] + "_")
+ partner_url = {
+ "owner": owner,
+ "repo": child.attrib["name"],
+ "revision": child.attrib["revision"],
+ }
+ partners[child.attrib["name"]] = partner_url
+ log.debug(
+ "Added partner %s at revision %s"
+ % (partner_url["repo"], partner_url["revision"])
+ )
+ return partners
+
+
+def parse_config(data):
+ """Parse a single repack.cfg file into a python dictionary.
+ data is contents of the file, in "foo=bar\nbaz=buzz" style. We do some translation on
+ locales and platforms data, otherwise passthrough
+ """
+ ALLOWED_KEYS = (
+ "locales",
+ "platforms",
+ "upload_to_candidates",
+ "repack_stub_installer",
+ "publish_to_releases",
+ )
+ config = {"platforms": []}
+ for l in data.splitlines():
+ if "=" in l:
+ l = str(l)
+ key, value = l.split("=", 1)
+ value = value.strip("'\"").rstrip("'\"")
+ if key in TC_PLATFORM_PER_FTP.keys():
+ if value.lower() == "true":
+ config["platforms"].append(TC_PLATFORM_PER_FTP[key])
+ continue
+ if key not in ALLOWED_KEYS:
+ continue
+ if key == "locales":
+ # a list please
+ value = value.split(" ")
+ config[key] = value
+ return config
+
+
+def get_repack_configs(repackRepo, token):
+ """For a partner repository, retrieve all the repack.cfg files and parse them into a dict"""
+ log.debug("Querying for configs in %s", repackRepo)
+ query = REPACK_CFG_QUERY % repackRepo
+ raw_configs = query_api(query, token)
+ raw_configs = raw_configs["data"]["repository"]["object"]["entries"]
+
+ configs = {}
+ for sub_config in raw_configs:
+ name = sub_config["name"]
+ for file in sub_config["object"].get("entries", []):
+ if file["name"] != "repack.cfg":
+ continue
+ configs[name] = parse_config(file["object"]["text"])
+ return configs
+
+
+def get_attribution_config(manifestRepo, token):
+ log.debug("Querying for manifest attribution_config.yml in %s", manifestRepo)
+ owner, repo = get_repo_params(manifestRepo)
+ query = MANIFEST_QUERY % {
+ "owner": owner,
+ "repo": repo,
+ "file": "attribution_config.yml",
+ }
+ raw_manifest = query_api(query, token)
+ if not raw_manifest["data"]["repository"]:
+ raise RuntimeError(
+ "Couldn't load partner manifest at %s, insufficient permissions ?"
+ % manifestRepo
+ )
+ # no file has been set up, gracefully continue
+ if raw_manifest["data"]["repository"]["object"] is None:
+ log.debug("No attribution_config.yml file found")
+ return {}
+
+ return yaml.safe_load(raw_manifest["data"]["repository"]["object"]["text"])
+
+
+def get_partner_config_by_url(manifest_url, kind, token, partner_subset=None):
+ """Retrieve partner data starting from the manifest url, which points to a repository
+ containing a default.xml that is intended to be drive the Google tool 'repo'. It
+ descends into each partner repo to lookup and parse the repack.cfg file(s).
+
+ If partner_subset is a list of sub_config names only return data for those.
+
+ Supports caching data by kind to avoid repeated requests, relying on the related kinds for
+ partner repacking, signing, repackage, repackage signing all having the same kind prefix.
+ """
+ if not manifest_url:
+ raise RuntimeError(f"Manifest url for {kind} not defined")
+ if kind not in partner_configs:
+ log.info("Looking up data for %s from %s", kind, manifest_url)
+ check_login(token)
+ if kind == "release-partner-attribution":
+ partner_configs[kind] = get_attribution_config(manifest_url, token)
+ else:
+ partners = get_partners(manifest_url, token)
+
+ partner_configs[kind] = {}
+ for partner, partner_url in partners.items():
+ if partner_subset and partner not in partner_subset:
+ continue
+ partner_configs[kind][partner] = get_repack_configs(partner_url, token)
+
+ return partner_configs[kind]
+
+
+def check_if_partners_enabled(config, tasks):
+ if (
+ (
+ config.params["release_enable_partner_repack"]
+ and config.kind.startswith("release-partner-repack")
+ )
+ or (
+ config.params["release_enable_partner_attribution"]
+ and config.kind.startswith("release-partner-attribution")
+ )
+ or (
+ config.params["release_enable_emefree"]
+ and config.kind.startswith("release-eme-free-")
+ )
+ ):
+ yield from tasks
+
+
+def get_partner_config_by_kind(config, kind):
+ """Retrieve partner data starting from the manifest url, which points to a repository
+ containing a default.xml that is intended to be drive the Google tool 'repo'. It
+ descends into each partner repo to lookup and parse the repack.cfg file(s).
+
+ Supports caching data by kind to avoid repeated requests, relying on the related kinds for
+ partner repacking, signing, repackage, repackage signing all having the same kind prefix.
+ """
+ partner_subset = config.params["release_partners"]
+ partner_configs = config.params["release_partner_config"] or {}
+
+ # TODO eme-free should be a partner; we shouldn't care about per-kind
+ for k in partner_configs:
+ if kind.startswith(k):
+ kind_config = partner_configs[k]
+ break
+ else:
+ return {}
+ # if we're only interested in a subset of partners we remove the rest
+ if partner_subset:
+ if kind.startswith("release-partner-repack"):
+ # TODO - should be fatal to have an unknown partner in partner_subset
+ for partner in [p for p in kind_config.keys() if p not in partner_subset]:
+ del kind_config[partner]
+ elif kind.startswith("release-partner-attribution") and isinstance(
+ kind_config, dict
+ ):
+ all_configs = copy_task(kind_config.get("configs", []))
+ kind_config["configs"] = []
+ for this_config in all_configs:
+ if this_config["campaign"] in partner_subset:
+ kind_config["configs"].append(this_config)
+ return kind_config
+
+
+def _fix_subpartner_locales(orig_config, all_locales):
+ subpartner_config = copy_task(orig_config)
+ # Get an ordered list of subpartner locales that is a subset of all_locales
+ subpartner_config["locales"] = sorted(
+ list(set(orig_config["locales"]) & set(all_locales))
+ )
+ return subpartner_config
+
+
+def fix_partner_config(orig_config):
+ pc = {}
+ with open(LOCALES_FILE) as fh:
+ all_locales = list(json.load(fh).keys())
+ # l10n-changesets.json doesn't include en-US, but the repack list does
+ if "en-US" not in all_locales:
+ all_locales.append("en-US")
+ for kind, kind_config in orig_config.items():
+ if kind == "release-partner-attribution":
+ pc[kind] = {}
+ if kind_config:
+ pc[kind] = {"defaults": kind_config["defaults"]}
+ for config in kind_config["configs"]:
+ # Make sure our locale list is a subset of all_locales
+ pc[kind].setdefault("configs", []).append(
+ _fix_subpartner_locales(config, all_locales)
+ )
+ else:
+ for partner, partner_config in kind_config.items():
+ for subpartner, subpartner_config in partner_config.items():
+ # get rid of empty subpartner configs
+ if not subpartner_config:
+ continue
+ # Make sure our locale list is a subset of all_locales
+ pc.setdefault(kind, {}).setdefault(partner, {})[
+ subpartner
+ ] = _fix_subpartner_locales(subpartner_config, all_locales)
+ return pc
+
+
+# seems likely this exists elsewhere already
+def get_ftp_platform(platform):
+ if platform.startswith("win32"):
+ return "win32"
+ if platform.startswith("win64-aarch64"):
+ return "win64-aarch64"
+ if platform.startswith("win64"):
+ return "win64"
+ if platform.startswith("macosx"):
+ return "mac"
+ if platform.startswith("linux-"):
+ return "linux-i686"
+ if platform.startswith("linux64"):
+ return "linux-x86_64"
+ raise ValueError(f"Unimplemented platform {platform}")
+
+
+# Ugh
+def locales_per_build_platform(build_platform, locales):
+ if build_platform.startswith("mac"):
+ exclude = ["ja"]
+ else:
+ exclude = ["ja-JP-mac"]
+ return [locale for locale in locales if locale not in exclude]
+
+
+def get_partner_url_config(parameters, graph_config):
+ partner_url_config = copy_task(graph_config["partner-urls"])
+ substitutions = {
+ "release-product": parameters["release_product"],
+ "release-level": release_level(parameters["project"]),
+ "release-type": parameters["release_type"],
+ }
+ resolve_keyed_by(
+ partner_url_config,
+ "release-eme-free-repack",
+ "eme-free manifest_url",
+ **substitutions,
+ )
+ resolve_keyed_by(
+ partner_url_config,
+ "release-partner-repack",
+ "partner manifest url",
+ **substitutions,
+ )
+ resolve_keyed_by(
+ partner_url_config,
+ "release-partner-attribution",
+ "partner attribution url",
+ **substitutions,
+ )
+ return partner_url_config
+
+
+def get_repack_ids_by_platform(config, build_platform):
+ partner_config = get_partner_config_by_kind(config, config.kind)
+ combinations = []
+ for partner, subconfigs in partner_config.items():
+ for sub_config_name, sub_config in subconfigs.items():
+ if build_platform not in sub_config.get("platforms", []):
+ continue
+ locales = locales_per_build_platform(
+ build_platform, sub_config.get("locales", [])
+ )
+ for locale in locales:
+ combinations.append(f"{partner}/{sub_config_name}/{locale}")
+ return sorted(combinations)
+
+
+def get_partners_to_be_published(config):
+ # hardcoded kind because release-bouncer-aliases doesn't match otherwise
+ partner_config = get_partner_config_by_kind(config, "release-partner-repack")
+ partners = []
+ for partner, subconfigs in partner_config.items():
+ for sub_config_name, sub_config in subconfigs.items():
+ if sub_config.get("publish_to_releases"):
+ partners.append((partner, sub_config_name, sub_config["platforms"]))
+ return partners
+
+
+def apply_partner_priority(config, jobs):
+ priority = None
+ # Reduce the priority of the partner repack jobs because they don't block QE. Meanwhile
+ # leave EME-free jobs alone because they do, and they'll get the branch priority like the rest
+ # of the release. Only bother with this in production, not on staging releases on try.
+ # medium is the same as mozilla-central, see taskcluster/ci/config.yml. ie higher than
+ # integration branches because we don't want to wait a lot for the graph to be done, but
+ # for multiple releases the partner tasks always wait for non-partner.
+ if (
+ config.kind.startswith(
+ ("release-partner-repack", "release-partner-attribution")
+ )
+ and release_level(config.params["project"]) == "production"
+ ):
+ priority = "medium"
+ for job in jobs:
+ if priority:
+ job["priority"] = priority
+ yield job
+
+
+def generate_attribution_code(defaults, partner):
+ params = {
+ "medium": defaults["medium"],
+ "source": defaults["source"],
+ "campaign": partner["campaign"],
+ "content": partner["content"],
+ }
+ if partner.get("variation"):
+ params["variation"] = partner["variation"]
+ if partner.get("experiment"):
+ params["experiment"] = partner["experiment"]
+
+ code = urlencode(params)
+ return code
diff --git a/taskcluster/gecko_taskgraph/util/perfile.py b/taskcluster/gecko_taskgraph/util/perfile.py
new file mode 100644
index 0000000000..4e82d87dad
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/util/perfile.py
@@ -0,0 +1,104 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import itertools
+import json
+import logging
+import math
+
+import taskgraph
+from mozbuild.util import memoize
+from mozpack.path import match as mozpackmatch
+
+from gecko_taskgraph import files_changed
+
+from .. import GECKO
+
+logger = logging.getLogger(__name__)
+
+
+@memoize
+def perfile_number_of_chunks(is_try, try_task_config, head_repository, head_rev, type):
+ if taskgraph.fast and not is_try:
+ # When iterating on taskgraph changes, the exact number of chunks that
+ # test-verify runs usually isn't important, so skip it when going fast.
+ return 3
+ tests_per_chunk = 10.0
+ if type.startswith("test-coverage"):
+ tests_per_chunk = 30.0
+
+ if type.startswith("test-verify-wpt") or type.startswith("test-coverage-wpt"):
+ file_patterns = [
+ "testing/web-platform/tests/**",
+ "testing/web-platform/mozilla/tests/**",
+ ]
+ elif type.startswith("test-verify-gpu") or type.startswith("test-coverage-gpu"):
+ file_patterns = [
+ "**/*webgl*/**/test_*",
+ "**/dom/canvas/**/test_*",
+ "**/gfx/tests/**/test_*",
+ "**/devtools/canvasdebugger/**/browser_*",
+ "**/reftest*/**",
+ ]
+ elif type.startswith("test-verify") or type.startswith("test-coverage"):
+ file_patterns = [
+ "**/test_*",
+ "**/browser_*",
+ "**/crashtest*/**",
+ "js/src/tests/test/**",
+ "js/src/tests/non262/**",
+ "js/src/tests/test262/**",
+ ]
+ else:
+ # Returning 0 means no tests to run, this captures non test-verify tasks
+ return 1
+
+ changed_files = set()
+ if try_task_config:
+ suite_to_paths = json.loads(try_task_config)
+ specified_files = itertools.chain.from_iterable(suite_to_paths.values())
+ changed_files.update(specified_files)
+
+ if is_try:
+ changed_files.update(files_changed.get_locally_changed_files(GECKO))
+ else:
+ changed_files.update(files_changed.get_changed_files(head_repository, head_rev))
+
+ test_count = 0
+ for pattern in file_patterns:
+ for path in changed_files:
+ # TODO: consider running tests if a manifest changes
+ if path.endswith(".list") or path.endswith(".ini"):
+ continue
+ if path.endswith("^headers^"):
+ continue
+
+ if mozpackmatch(path, pattern):
+ gpu = False
+ if type == "test-verify-e10s" or type == "test-coverage-e10s":
+ # file_patterns for test-verify will pick up some gpu tests, lets ignore
+ # in the case of reftest, we will not have any in the regular case
+ gpu_dirs = [
+ "dom/canvas",
+ "gfx/tests",
+ "devtools/canvasdebugger",
+ "webgl",
+ ]
+ for gdir in gpu_dirs:
+ if len(path.split(gdir)) > 1:
+ gpu = True
+
+ if not gpu:
+ test_count += 1
+
+ chunks = test_count / tests_per_chunk
+ chunks = int(math.ceil(chunks))
+
+ # Never return 0 chunks on try, so that per-file tests can be pushed to try with
+ # an explicit path, and also so "empty" runs can be checked on try.
+ if is_try and chunks == 0:
+ chunks = 1
+
+ return chunks
diff --git a/taskcluster/gecko_taskgraph/util/platforms.py b/taskcluster/gecko_taskgraph/util/platforms.py
new file mode 100644
index 0000000000..2c423223fe
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/util/platforms.py
@@ -0,0 +1,58 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import re
+
+from taskgraph.util.attributes import keymatch
+
+# platform family is extracted from build platform by taking the alphabetic prefix
+# and then translating win -> windows
+_platform_re = re.compile(r"^[a-z]*")
+_renames = {"win": "windows"}
+
+
+_archive_formats = {
+ "linux": ".tar.bz2",
+ "macosx": ".tar.gz",
+ "windows": ".zip",
+}
+
+_executable_extension = {
+ "linux": "",
+ "macosx": "",
+ "windows": ".exe",
+}
+
+_architectures = {
+ r"linux\b.*": "x86",
+ r"linux64\b.*": "x86_64",
+ r"macosx64\b.*": "macos-x86_64-aarch64",
+ r"win32\b.*": "x86",
+ r"win64\b(?!-aarch64).*": "x86_64",
+ r"win64-aarch64\b.*": "aarch64",
+}
+
+
+def platform_family(build_platform):
+ """Given a build platform, return the platform family (linux, macosx, etc.)"""
+ family = _platform_re.match(build_platform).group(0)
+ return _renames.get(family, family)
+
+
+def archive_format(build_platform):
+ """Given a build platform, return the archive format used on the platform."""
+ return _archive_formats[platform_family(build_platform)]
+
+
+def executable_extension(build_platform):
+ """Given a build platform, return the executable extension used on the platform."""
+ return _executable_extension[platform_family(build_platform)]
+
+
+def architecture(build_platform):
+ matches = keymatch(_architectures, build_platform)
+ if len(matches) == 1:
+ return matches[0]
+ raise Exception(f"Could not determine architecture of platform `{build_platform}`.")
diff --git a/taskcluster/gecko_taskgraph/util/scriptworker.py b/taskcluster/gecko_taskgraph/util/scriptworker.py
new file mode 100644
index 0000000000..0d2e4b805b
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/util/scriptworker.py
@@ -0,0 +1,865 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""Make scriptworker.cot.verify more user friendly by making scopes dynamic.
+
+Scriptworker uses certain scopes to determine which sets of credentials to use.
+Certain scopes are restricted by branch in chain of trust verification, and are
+checked again at the script level. This file provides functions to adjust
+these scopes automatically by project; this makes pushing to try, forking a
+project branch, and merge day uplifts more user friendly.
+
+In the future, we may adjust scopes by other settings as well, e.g. different
+scopes for `push-to-candidates` rather than `push-to-releases`, even if both
+happen on mozilla-beta and mozilla-release.
+
+Additional configuration is found in the :ref:`graph config <taskgraph-graph-config>`.
+"""
+import functools
+import itertools
+import json
+import os
+from datetime import datetime
+
+import jsone
+from mozbuild.util import memoize
+from taskgraph.util.schema import resolve_keyed_by
+from taskgraph.util.taskcluster import get_artifact_prefix
+from taskgraph.util.yaml import load_yaml
+
+from gecko_taskgraph.util.copy_task import copy_task
+
+# constants {{{1
+"""Map signing scope aliases to sets of projects.
+
+Currently m-c and DevEdition on m-b use nightly signing; Beta on m-b and m-r
+use release signing. These data structures aren't set-up to handle different
+scopes on the same repo, so we use a different set of them for DevEdition, and
+callers are responsible for using the correct one (by calling the appropriate
+helper below). More context on this in https://bugzilla.mozilla.org/show_bug.cgi?id=1358601.
+
+We will need to add esr support at some point. Eventually we want to add
+nuance so certain m-b and m-r tasks use dep or nightly signing, and we only
+release sign when we have a signed-off set of candidate builds. This current
+approach works for now, though.
+
+This is a list of list-pairs, for ordering.
+"""
+SIGNING_SCOPE_ALIAS_TO_PROJECT = [
+ [
+ "all-nightly-branches",
+ {
+ "mozilla-central",
+ "comm-central",
+ # bug 1845368: pine is a permanent project branch used for testing
+ # nightly updates
+ "pine",
+ # bug 1877483: larch has similar needs for nightlies
+ "larch",
+ },
+ ],
+ [
+ "all-release-branches",
+ {
+ "mozilla-beta",
+ "mozilla-release",
+ "mozilla-esr115",
+ "comm-beta",
+ "comm-release",
+ "comm-esr115",
+ },
+ ],
+]
+
+"""Map the signing scope aliases to the actual scopes.
+"""
+SIGNING_CERT_SCOPES = {
+ "all-release-branches": "signing:cert:release-signing",
+ "all-nightly-branches": "signing:cert:nightly-signing",
+ "default": "signing:cert:dep-signing",
+}
+
+DEVEDITION_SIGNING_SCOPE_ALIAS_TO_PROJECT = [
+ [
+ "beta",
+ {
+ "mozilla-beta",
+ },
+ ]
+]
+
+DEVEDITION_SIGNING_CERT_SCOPES = {
+ "beta": "signing:cert:nightly-signing",
+ "default": "signing:cert:dep-signing",
+}
+
+"""Map beetmover scope aliases to sets of projects.
+"""
+BEETMOVER_SCOPE_ALIAS_TO_PROJECT = [
+ [
+ "all-nightly-branches",
+ {
+ "mozilla-central",
+ "comm-central",
+ # bug 1845368: pine is a permanent project branch used for testing
+ # nightly updates
+ "pine",
+ # bug 1877483: larch has similar needs for nightlies
+ "larch",
+ },
+ ],
+ [
+ "all-release-branches",
+ {
+ "mozilla-beta",
+ "mozilla-release",
+ "mozilla-esr115",
+ "comm-beta",
+ "comm-release",
+ "comm-esr115",
+ },
+ ],
+]
+
+"""Map the beetmover scope aliases to the actual scopes.
+"""
+BEETMOVER_BUCKET_SCOPES = {
+ "all-release-branches": "beetmover:bucket:release",
+ "all-nightly-branches": "beetmover:bucket:nightly",
+ "default": "beetmover:bucket:dep",
+}
+
+"""Map the beetmover scope aliases to the actual scopes.
+These are the scopes needed to import artifacts into the product delivery APT repos.
+"""
+BEETMOVER_APT_REPO_SCOPES = {
+ "all-release-branches": "beetmover:apt-repo:release",
+ "all-nightly-branches": "beetmover:apt-repo:nightly",
+ "default": "beetmover:apt-repo:dep",
+}
+
+"""Map the beetmover tasks aliases to the actual action scopes.
+"""
+BEETMOVER_ACTION_SCOPES = {
+ "nightly": "beetmover:action:push-to-nightly",
+ # bug 1845368: pine is a permanent project branch used for testing
+ # nightly updates
+ "nightly-pine": "beetmover:action:push-to-nightly",
+ # bug 1877483: larch has similar needs for nightlies
+ "nightly-larch": "beetmover:action:push-to-nightly",
+ "default": "beetmover:action:push-to-candidates",
+}
+
+"""Map the beetmover tasks aliases to the actual action scopes.
+The action scopes are generic across different repo types.
+"""
+BEETMOVER_REPO_ACTION_SCOPES = {
+ "default": "beetmover:action:import-from-gcs-to-artifact-registry",
+}
+
+"""Known balrog actions."""
+BALROG_ACTIONS = (
+ "submit-locale",
+ "submit-toplevel",
+ "schedule",
+ "v2-submit-locale",
+ "v2-submit-toplevel",
+)
+
+"""Map balrog scope aliases to sets of projects.
+
+This is a list of list-pairs, for ordering.
+"""
+BALROG_SCOPE_ALIAS_TO_PROJECT = [
+ [
+ "nightly",
+ {
+ "mozilla-central",
+ "comm-central",
+ # bug 1845368: pine is a permanent project branch used for testing
+ # nightly updates
+ "pine",
+ # bug 1877483: larch has similar needs for nightlies
+ "larch",
+ },
+ ],
+ [
+ "beta",
+ {
+ "mozilla-beta",
+ "comm-beta",
+ },
+ ],
+ [
+ "release",
+ {
+ "mozilla-release",
+ "comm-release",
+ },
+ ],
+ [
+ "esr115",
+ {
+ "mozilla-esr115",
+ "comm-esr115",
+ },
+ ],
+]
+
+"""Map the balrog scope aliases to the actual scopes.
+"""
+BALROG_SERVER_SCOPES = {
+ "nightly": "balrog:server:nightly",
+ "aurora": "balrog:server:aurora",
+ "beta": "balrog:server:beta",
+ "release": "balrog:server:release",
+ "esr115": "balrog:server:esr",
+ "default": "balrog:server:dep",
+}
+
+
+""" The list of the release promotion phases which we send notifications for
+"""
+RELEASE_NOTIFICATION_PHASES = ("promote", "push", "ship")
+
+
+def add_scope_prefix(config, scope):
+ """
+ Prepends the scriptworker scope prefix from the :ref:`graph config
+ <taskgraph-graph-config>`.
+
+ Args:
+ config (TransformConfig): The configuration for the kind being transformed.
+ scope (string): The suffix of the scope
+
+ Returns:
+ string: the scope to use.
+ """
+ return "{prefix}:{scope}".format(
+ prefix=config.graph_config["scriptworker"]["scope-prefix"],
+ scope=scope,
+ )
+
+
+def with_scope_prefix(f):
+ """
+ Wraps a function, calling :py:func:`add_scope_prefix` on the result of
+ calling the wrapped function.
+
+ Args:
+ f (callable): A function that takes a ``config`` and some keyword
+ arguments, and returns a scope suffix.
+
+ Returns:
+ callable: the wrapped function
+ """
+
+ @functools.wraps(f)
+ def wrapper(config, **kwargs):
+ scope_or_scopes = f(config, **kwargs)
+ if isinstance(scope_or_scopes, list):
+ return map(functools.partial(add_scope_prefix, config), scope_or_scopes)
+ return add_scope_prefix(config, scope_or_scopes)
+
+ return wrapper
+
+
+# scope functions {{{1
+@with_scope_prefix
+def get_scope_from_project(config, alias_to_project_map, alias_to_scope_map):
+ """Determine the restricted scope from `config.params['project']`.
+
+ Args:
+ config (TransformConfig): The configuration for the kind being transformed.
+ alias_to_project_map (list of lists): each list pair contains the
+ alias and the set of projects that match. This is ordered.
+ alias_to_scope_map (dict): the alias alias to scope
+
+ Returns:
+ string: the scope to use.
+ """
+ for alias, projects in alias_to_project_map:
+ if config.params["project"] in projects and alias in alias_to_scope_map:
+ return alias_to_scope_map[alias]
+ return alias_to_scope_map["default"]
+
+
+@with_scope_prefix
+def get_scope_from_release_type(config, release_type_to_scope_map):
+ """Determine the restricted scope from `config.params['target_tasks_method']`.
+
+ Args:
+ config (TransformConfig): The configuration for the kind being transformed.
+ release_type_to_scope_map (dict): the maps release types to scopes
+
+ Returns:
+ string: the scope to use.
+ """
+ return release_type_to_scope_map.get(
+ config.params["release_type"], release_type_to_scope_map["default"]
+ )
+
+
+def get_phase_from_target_method(config, alias_to_tasks_map, alias_to_phase_map):
+ """Determine the phase from `config.params['target_tasks_method']`.
+
+ Args:
+ config (TransformConfig): The configuration for the kind being transformed.
+ alias_to_tasks_map (list of lists): each list pair contains the
+ alias and the set of target methods that match. This is ordered.
+ alias_to_phase_map (dict): the alias to phase map
+
+ Returns:
+ string: the phase to use.
+ """
+ for alias, tasks in alias_to_tasks_map:
+ if (
+ config.params["target_tasks_method"] in tasks
+ and alias in alias_to_phase_map
+ ):
+ return alias_to_phase_map[alias]
+ return alias_to_phase_map["default"]
+
+
+get_signing_cert_scope = functools.partial(
+ get_scope_from_project,
+ alias_to_project_map=SIGNING_SCOPE_ALIAS_TO_PROJECT,
+ alias_to_scope_map=SIGNING_CERT_SCOPES,
+)
+
+get_devedition_signing_cert_scope = functools.partial(
+ get_scope_from_project,
+ alias_to_project_map=DEVEDITION_SIGNING_SCOPE_ALIAS_TO_PROJECT,
+ alias_to_scope_map=DEVEDITION_SIGNING_CERT_SCOPES,
+)
+
+get_beetmover_bucket_scope = functools.partial(
+ get_scope_from_project,
+ alias_to_project_map=BEETMOVER_SCOPE_ALIAS_TO_PROJECT,
+ alias_to_scope_map=BEETMOVER_BUCKET_SCOPES,
+)
+
+get_beetmover_apt_repo_scope = functools.partial(
+ get_scope_from_project,
+ alias_to_project_map=BEETMOVER_SCOPE_ALIAS_TO_PROJECT,
+ alias_to_scope_map=BEETMOVER_APT_REPO_SCOPES,
+)
+
+get_beetmover_repo_action_scope = functools.partial(
+ get_scope_from_release_type,
+ release_type_to_scope_map=BEETMOVER_REPO_ACTION_SCOPES,
+)
+
+get_beetmover_action_scope = functools.partial(
+ get_scope_from_release_type,
+ release_type_to_scope_map=BEETMOVER_ACTION_SCOPES,
+)
+
+get_balrog_server_scope = functools.partial(
+ get_scope_from_project,
+ alias_to_project_map=BALROG_SCOPE_ALIAS_TO_PROJECT,
+ alias_to_scope_map=BALROG_SERVER_SCOPES,
+)
+
+cached_load_yaml = memoize(load_yaml)
+
+
+# release_config {{{1
+def get_release_config(config):
+ """Get the build number and version for a release task.
+
+ Currently only applies to beetmover tasks.
+
+ Args:
+ config (TransformConfig): The configuration for the kind being transformed.
+
+ Returns:
+ dict: containing both `build_number` and `version`. This can be used to
+ update `task.payload`.
+ """
+ release_config = {}
+
+ partial_updates = os.environ.get("PARTIAL_UPDATES", "")
+ if partial_updates != "" and config.kind in (
+ "release-bouncer-sub",
+ "release-bouncer-check",
+ "release-update-verify-config",
+ "release-secondary-update-verify-config",
+ "release-balrog-submit-toplevel",
+ "release-secondary-balrog-submit-toplevel",
+ ):
+ partial_updates = json.loads(partial_updates)
+ release_config["partial_versions"] = ", ".join(
+ [
+ "{}build{}".format(v, info["buildNumber"])
+ for v, info in partial_updates.items()
+ ]
+ )
+ if release_config["partial_versions"] == "{}":
+ del release_config["partial_versions"]
+
+ release_config["version"] = config.params["version"]
+ release_config["appVersion"] = config.params["app_version"]
+
+ release_config["next_version"] = config.params["next_version"]
+ release_config["build_number"] = config.params["build_number"]
+ return release_config
+
+
+def get_signing_cert_scope_per_platform(build_platform, is_shippable, config):
+ if "devedition" in build_platform:
+ return get_devedition_signing_cert_scope(config)
+ if is_shippable:
+ return get_signing_cert_scope(config)
+ return add_scope_prefix(config, "signing:cert:dep-signing")
+
+
+# generate_beetmover_upstream_artifacts {{{1
+def generate_beetmover_upstream_artifacts(
+ config, job, platform, locale=None, dependencies=None, **kwargs
+):
+ """Generate the upstream artifacts for beetmover, using the artifact map.
+
+ Currently only applies to beetmover tasks.
+
+ Args:
+ job (dict): The current job being generated
+ dependencies (list): A list of the job's dependency labels.
+ platform (str): The current build platform
+ locale (str): The current locale being beetmoved.
+
+ Returns:
+ list: A list of dictionaries conforming to the upstream_artifacts spec.
+ """
+ base_artifact_prefix = get_artifact_prefix(job)
+ resolve_keyed_by(
+ job,
+ "attributes.artifact_map",
+ "artifact map",
+ **{
+ "release-type": config.params["release_type"],
+ "platform": platform,
+ },
+ )
+ map_config = copy_task(cached_load_yaml(job["attributes"]["artifact_map"]))
+ upstream_artifacts = list()
+
+ if not locale:
+ locales = map_config["default_locales"]
+ elif isinstance(locale, list):
+ locales = locale
+ else:
+ locales = [locale]
+
+ if not dependencies:
+ if job.get("dependencies"):
+ dependencies = job["dependencies"].keys()
+ else:
+ raise Exception(f"Unsupported type of dependency. Got job: {job}")
+
+ for locale, dep in itertools.product(locales, dependencies):
+ paths = list()
+
+ for filename in map_config["mapping"]:
+ resolve_keyed_by(
+ map_config["mapping"][filename],
+ "from",
+ f"beetmover filename {filename}",
+ platform=platform,
+ )
+ if dep not in map_config["mapping"][filename]["from"]:
+ continue
+ if locale != "en-US" and not map_config["mapping"][filename]["all_locales"]:
+ continue
+ if (
+ "only_for_platforms" in map_config["mapping"][filename]
+ and platform
+ not in map_config["mapping"][filename]["only_for_platforms"]
+ ):
+ continue
+ if (
+ "not_for_platforms" in map_config["mapping"][filename]
+ and platform in map_config["mapping"][filename]["not_for_platforms"]
+ ):
+ continue
+ if "partials_only" in map_config["mapping"][filename]:
+ continue
+ # The next time we look at this file it might be a different locale.
+ file_config = copy_task(map_config["mapping"][filename])
+ resolve_keyed_by(
+ file_config,
+ "source_path_modifier",
+ "source path modifier",
+ locale=locale,
+ )
+
+ kwargs["locale"] = locale
+
+ paths.append(
+ os.path.join(
+ base_artifact_prefix,
+ jsone.render(file_config["source_path_modifier"], kwargs),
+ jsone.render(filename, kwargs),
+ )
+ )
+
+ if (
+ job.get("dependencies")
+ and getattr(job["dependencies"][dep], "attributes", None)
+ and job["dependencies"][dep].attributes.get("release_artifacts")
+ ):
+ paths = [
+ path
+ for path in paths
+ if path in job["dependencies"][dep].attributes["release_artifacts"]
+ ]
+
+ if not paths:
+ continue
+
+ upstream_artifacts.append(
+ {
+ "taskId": {"task-reference": f"<{dep}>"},
+ "taskType": map_config["tasktype_map"].get(dep),
+ "paths": sorted(paths),
+ "locale": locale,
+ }
+ )
+
+ upstream_artifacts.sort(key=lambda u: u["paths"])
+ return upstream_artifacts
+
+
+def generate_artifact_registry_gcs_sources(dep):
+ gcs_sources = []
+ locale = dep.attributes.get("locale")
+ if not locale:
+ repackage_deb_reference = "<repackage-deb>"
+ repackage_deb_artifact = "public/build/target.deb"
+ else:
+ repackage_deb_reference = "<repackage-deb-l10n>"
+ repackage_deb_artifact = f"public/build/{locale}/target.langpack.deb"
+ for config in dep.task["payload"]["artifactMap"]:
+ if (
+ config["taskId"]["task-reference"] == repackage_deb_reference
+ and repackage_deb_artifact in config["paths"]
+ ):
+ gcs_sources.append(
+ config["paths"][repackage_deb_artifact]["destinations"][0]
+ )
+ return gcs_sources
+
+
+# generate_beetmover_artifact_map {{{1
+def generate_beetmover_artifact_map(config, job, **kwargs):
+ """Generate the beetmover artifact map.
+
+ Currently only applies to beetmover tasks.
+
+ Args:
+ config (): Current taskgraph configuration.
+ job (dict): The current job being generated
+ Common kwargs:
+ platform (str): The current build platform
+ locale (str): The current locale being beetmoved.
+
+ Returns:
+ list: A list of dictionaries containing source->destination
+ maps for beetmover.
+ """
+ platform = kwargs.get("platform", "")
+ resolve_keyed_by(
+ job,
+ "attributes.artifact_map",
+ job["label"],
+ **{
+ "release-type": config.params["release_type"],
+ "platform": platform,
+ },
+ )
+ map_config = copy_task(cached_load_yaml(job["attributes"]["artifact_map"]))
+ base_artifact_prefix = map_config.get(
+ "base_artifact_prefix", get_artifact_prefix(job)
+ )
+
+ artifacts = list()
+
+ dependencies = job["dependencies"].keys()
+
+ if kwargs.get("locale"):
+ if isinstance(kwargs["locale"], list):
+ locales = kwargs["locale"]
+ else:
+ locales = [kwargs["locale"]]
+ else:
+ locales = map_config["default_locales"]
+
+ resolve_keyed_by(map_config, "s3_bucket_paths", job["label"], platform=platform)
+
+ for locale, dep in sorted(itertools.product(locales, dependencies)):
+ paths = dict()
+ for filename in map_config["mapping"]:
+ # Relevancy checks
+ resolve_keyed_by(
+ map_config["mapping"][filename], "from", "blah", platform=platform
+ )
+ if dep not in map_config["mapping"][filename]["from"]:
+ # We don't get this file from this dependency.
+ continue
+ if locale != "en-US" and not map_config["mapping"][filename]["all_locales"]:
+ # This locale either doesn't produce or shouldn't upload this file.
+ continue
+ if (
+ "only_for_platforms" in map_config["mapping"][filename]
+ and platform
+ not in map_config["mapping"][filename]["only_for_platforms"]
+ ):
+ # This platform either doesn't produce or shouldn't upload this file.
+ continue
+ if (
+ "not_for_platforms" in map_config["mapping"][filename]
+ and platform in map_config["mapping"][filename]["not_for_platforms"]
+ ):
+ # This platform either doesn't produce or shouldn't upload this file.
+ continue
+ if "partials_only" in map_config["mapping"][filename]:
+ continue
+
+ # copy_task because the next time we look at this file the locale will differ.
+ file_config = copy_task(map_config["mapping"][filename])
+
+ for field in [
+ "destinations",
+ "locale_prefix",
+ "source_path_modifier",
+ "update_balrog_manifest",
+ "pretty_name",
+ "checksums_path",
+ ]:
+ resolve_keyed_by(
+ file_config, field, job["label"], locale=locale, platform=platform
+ )
+
+ # This format string should ideally be in the configuration file,
+ # but this would mean keeping variable names in sync between code + config.
+ destinations = [
+ "{s3_bucket_path}/{dest_path}/{locale_prefix}{filename}".format(
+ s3_bucket_path=bucket_path,
+ dest_path=dest_path,
+ locale_prefix=file_config["locale_prefix"],
+ filename=file_config.get("pretty_name", filename),
+ )
+ for dest_path, bucket_path in itertools.product(
+ file_config["destinations"], map_config["s3_bucket_paths"]
+ )
+ ]
+ # Creating map entries
+ # Key must be artifact path, to avoid trampling duplicates, such
+ # as public/build/target.apk and public/build/en-US/target.apk
+ key = os.path.join(
+ base_artifact_prefix,
+ file_config["source_path_modifier"],
+ filename,
+ )
+
+ paths[key] = {
+ "destinations": destinations,
+ }
+ if file_config.get("checksums_path"):
+ paths[key]["checksums_path"] = file_config["checksums_path"]
+
+ # optional flag: balrog manifest
+ if file_config.get("update_balrog_manifest"):
+ paths[key]["update_balrog_manifest"] = True
+ if file_config.get("balrog_format"):
+ paths[key]["balrog_format"] = file_config["balrog_format"]
+
+ if not paths:
+ # No files for this dependency/locale combination.
+ continue
+
+ # Render all variables for the artifact map
+ platforms = copy_task(map_config.get("platform_names", {}))
+ if platform:
+ for key in platforms.keys():
+ resolve_keyed_by(platforms, key, job["label"], platform=platform)
+
+ upload_date = datetime.fromtimestamp(config.params["build_date"])
+
+ kwargs.update(
+ {
+ "locale": locale,
+ "version": config.params["version"],
+ "branch": config.params["project"],
+ "build_number": config.params["build_number"],
+ "year": upload_date.year,
+ "month": upload_date.strftime("%m"), # zero-pad the month
+ "upload_date": upload_date.strftime("%Y-%m-%d-%H-%M-%S"),
+ }
+ )
+ kwargs.update(**platforms)
+ paths = jsone.render(paths, kwargs)
+ artifacts.append(
+ {
+ "taskId": {"task-reference": f"<{dep}>"},
+ "locale": locale,
+ "paths": paths,
+ }
+ )
+
+ return artifacts
+
+
+# generate_beetmover_partials_artifact_map {{{1
+def generate_beetmover_partials_artifact_map(config, job, partials_info, **kwargs):
+ """Generate the beetmover partials artifact map.
+
+ Currently only applies to beetmover tasks.
+
+ Args:
+ config (): Current taskgraph configuration.
+ job (dict): The current job being generated
+ partials_info (dict): Current partials and information about them in a dict
+ Common kwargs:
+ platform (str): The current build platform
+ locale (str): The current locale being beetmoved.
+
+ Returns:
+ list: A list of dictionaries containing source->destination
+ maps for beetmover.
+ """
+ platform = kwargs.get("platform", "")
+ resolve_keyed_by(
+ job,
+ "attributes.artifact_map",
+ "artifact map",
+ **{
+ "release-type": config.params["release_type"],
+ "platform": platform,
+ },
+ )
+ map_config = copy_task(cached_load_yaml(job["attributes"]["artifact_map"]))
+ base_artifact_prefix = map_config.get(
+ "base_artifact_prefix", get_artifact_prefix(job)
+ )
+
+ artifacts = list()
+ dependencies = job["dependencies"].keys()
+
+ if kwargs.get("locale"):
+ locales = [kwargs["locale"]]
+ else:
+ locales = map_config["default_locales"]
+
+ resolve_keyed_by(
+ map_config, "s3_bucket_paths", "s3_bucket_paths", platform=platform
+ )
+
+ platforms = copy_task(map_config.get("platform_names", {}))
+ if platform:
+ for key in platforms.keys():
+ resolve_keyed_by(platforms, key, key, platform=platform)
+ upload_date = datetime.fromtimestamp(config.params["build_date"])
+
+ for locale, dep in itertools.product(locales, dependencies):
+ paths = dict()
+ for filename in map_config["mapping"]:
+ # Relevancy checks
+ if dep not in map_config["mapping"][filename]["from"]:
+ # We don't get this file from this dependency.
+ continue
+ if locale != "en-US" and not map_config["mapping"][filename]["all_locales"]:
+ # This locale either doesn't produce or shouldn't upload this file.
+ continue
+ if "partials_only" not in map_config["mapping"][filename]:
+ continue
+ # copy_task because the next time we look at this file the locale will differ.
+ file_config = copy_task(map_config["mapping"][filename])
+
+ for field in [
+ "destinations",
+ "locale_prefix",
+ "source_path_modifier",
+ "update_balrog_manifest",
+ "from_buildid",
+ "pretty_name",
+ "checksums_path",
+ ]:
+ resolve_keyed_by(
+ file_config, field, field, locale=locale, platform=platform
+ )
+
+ # This format string should ideally be in the configuration file,
+ # but this would mean keeping variable names in sync between code + config.
+ destinations = [
+ "{s3_bucket_path}/{dest_path}/{locale_prefix}{filename}".format(
+ s3_bucket_path=bucket_path,
+ dest_path=dest_path,
+ locale_prefix=file_config["locale_prefix"],
+ filename=file_config.get("pretty_name", filename),
+ )
+ for dest_path, bucket_path in itertools.product(
+ file_config["destinations"], map_config["s3_bucket_paths"]
+ )
+ ]
+ # Creating map entries
+ # Key must be artifact path, to avoid trampling duplicates, such
+ # as public/build/target.apk and public/build/en-US/target.apk
+ key = os.path.join(
+ base_artifact_prefix,
+ file_config["source_path_modifier"],
+ filename,
+ )
+ partials_paths = {}
+ for pname, info in partials_info.items():
+ partials_paths[key] = {
+ "destinations": destinations,
+ }
+ if file_config.get("checksums_path"):
+ partials_paths[key]["checksums_path"] = file_config[
+ "checksums_path"
+ ]
+
+ # optional flag: balrog manifest
+ if file_config.get("update_balrog_manifest"):
+ partials_paths[key]["update_balrog_manifest"] = True
+ if file_config.get("balrog_format"):
+ partials_paths[key]["balrog_format"] = file_config[
+ "balrog_format"
+ ]
+ # optional flag: from_buildid
+ if file_config.get("from_buildid"):
+ partials_paths[key]["from_buildid"] = file_config["from_buildid"]
+
+ # render buildid
+ kwargs.update(
+ {
+ "partial": pname,
+ "from_buildid": info["buildid"],
+ "previous_version": info.get("previousVersion"),
+ "buildid": str(config.params["moz_build_date"]),
+ "locale": locale,
+ "version": config.params["version"],
+ "branch": config.params["project"],
+ "build_number": config.params["build_number"],
+ "year": upload_date.year,
+ "month": upload_date.strftime("%m"), # zero-pad the month
+ "upload_date": upload_date.strftime("%Y-%m-%d-%H-%M-%S"),
+ }
+ )
+ kwargs.update(**platforms)
+ paths.update(jsone.render(partials_paths, kwargs))
+
+ if not paths:
+ continue
+
+ artifacts.append(
+ {
+ "taskId": {"task-reference": f"<{dep}>"},
+ "locale": locale,
+ "paths": paths,
+ }
+ )
+
+ artifacts.sort(key=lambda a: sorted(a["paths"].items()))
+ return artifacts
diff --git a/taskcluster/gecko_taskgraph/util/signed_artifacts.py b/taskcluster/gecko_taskgraph/util/signed_artifacts.py
new file mode 100644
index 0000000000..2467ff8046
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/util/signed_artifacts.py
@@ -0,0 +1,198 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Defines artifacts to sign before repackage.
+"""
+
+from taskgraph.util.taskcluster import get_artifact_path
+
+from gecko_taskgraph.util.declarative_artifacts import get_geckoview_upstream_artifacts
+
+LANGPACK_SIGN_PLATFORMS = { # set
+ "linux64-shippable",
+ "linux64-devedition",
+ "macosx64-shippable",
+ "macosx64-devedition",
+}
+
+
+def is_partner_kind(kind):
+ if kind and kind.startswith(("release-partner", "release-eme-free")):
+ return True
+
+
+def is_notarization_kind(kind):
+ if kind and "notarization" in kind:
+ return True
+
+
+def is_mac_signing_king(kind):
+ return kind and "mac-signing" in kind
+
+
+def generate_specifications_of_artifacts_to_sign(
+ config, job, keep_locale_template=True, kind=None, dep_kind=None
+):
+ build_platform = job["attributes"].get("build_platform")
+ use_stub = job["attributes"].get("stub-installer")
+ # Get locales to know if we want to sign ja-JP-mac langpack
+ locales = job["attributes"].get("chunk_locales", [])
+ if kind == "release-source-signing":
+ artifacts_specifications = [
+ {
+ "artifacts": [get_artifact_path(job, "source.tar.xz")],
+ "formats": ["autograph_gpg"],
+ }
+ ]
+ elif "android" in build_platform:
+ artifacts_specifications = [
+ {
+ "artifacts": get_geckoview_artifacts_to_sign(config, job),
+ "formats": ["autograph_gpg"],
+ }
+ ]
+ # XXX: Mars aren't signed here (on any platform) because internals will be
+ # signed at after this stage of the release
+ elif "macosx" in build_platform:
+ langpack_formats = []
+ if is_notarization_kind(config.kind):
+ formats = ["apple_notarization"]
+ artifacts_specifications = [
+ {
+ "artifacts": [
+ get_artifact_path(job, "{locale}/target.tar.gz"),
+ get_artifact_path(job, "{locale}/target.pkg"),
+ ],
+ "formats": formats,
+ }
+ ]
+ else:
+ # This task is mac-signing
+ if is_partner_kind(kind):
+ extension = "tar.gz"
+ else:
+ extension = "dmg"
+ artifacts_specifications = [
+ {
+ "artifacts": [
+ get_artifact_path(job, f"{{locale}}/target.{extension}")
+ ],
+ "formats": ["macapp", "autograph_widevine", "autograph_omnija"],
+ }
+ ]
+ langpack_formats = ["autograph_langpack"]
+
+ if "ja-JP-mac" in locales and build_platform in LANGPACK_SIGN_PLATFORMS:
+ artifacts_specifications += [
+ {
+ "artifacts": [
+ get_artifact_path(job, "ja-JP-mac/target.langpack.xpi")
+ ],
+ "formats": langpack_formats,
+ }
+ ]
+ elif "win" in build_platform:
+ artifacts_specifications = [
+ {
+ "artifacts": [
+ get_artifact_path(job, "{locale}/setup.exe"),
+ ],
+ "formats": ["autograph_authenticode_sha2"],
+ },
+ {
+ "artifacts": [
+ get_artifact_path(job, "{locale}/target.zip"),
+ ],
+ "formats": [
+ "autograph_authenticode_sha2",
+ "autograph_widevine",
+ "autograph_omnija",
+ ],
+ },
+ ]
+
+ if use_stub:
+ artifacts_specifications[0]["artifacts"] += [
+ get_artifact_path(job, "{locale}/setup-stub.exe")
+ ]
+ elif "linux" in build_platform:
+ artifacts_specifications = [
+ {
+ "artifacts": [get_artifact_path(job, "{locale}/target.tar.bz2")],
+ "formats": ["autograph_gpg", "autograph_widevine", "autograph_omnija"],
+ }
+ ]
+ if build_platform in LANGPACK_SIGN_PLATFORMS:
+ artifacts_specifications += [
+ {
+ "artifacts": [
+ get_artifact_path(job, "{locale}/target.langpack.xpi")
+ ],
+ "formats": ["autograph_langpack"],
+ }
+ ]
+ else:
+ raise Exception("Platform not implemented for signing")
+
+ if not keep_locale_template:
+ artifacts_specifications = _strip_locale_template(artifacts_specifications)
+
+ if is_partner_kind(kind):
+ artifacts_specifications = _strip_widevine_for_partners(
+ artifacts_specifications
+ )
+
+ return artifacts_specifications
+
+
+def _strip_locale_template(artifacts_without_locales):
+ for spec in artifacts_without_locales:
+ for index, artifact in enumerate(spec["artifacts"]):
+ stripped_artifact = artifact.format(locale="")
+ stripped_artifact = stripped_artifact.replace("//", "/")
+ spec["artifacts"][index] = stripped_artifact
+
+ return artifacts_without_locales
+
+
+def _strip_widevine_for_partners(artifacts_specifications):
+ """Partner repacks should not resign that's previously signed for fear of breaking partial
+ updates
+ """
+ for spec in artifacts_specifications:
+ if "autograph_widevine" in spec["formats"]:
+ spec["formats"].remove("autograph_widevine")
+ if "autograph_omnija" in spec["formats"]:
+ spec["formats"].remove("autograph_omnija")
+
+ return artifacts_specifications
+
+
+def get_signed_artifacts(input, formats, behavior=None):
+ """
+ Get the list of signed artifacts for the given input and formats.
+ """
+ artifacts = set()
+ if input.endswith(".dmg"):
+ artifacts.add(input.replace(".dmg", ".tar.gz"))
+ if behavior and behavior != "mac_sign":
+ artifacts.add(input.replace(".dmg", ".pkg"))
+ else:
+ artifacts.add(input)
+ if "autograph_gpg" in formats:
+ artifacts.add(f"{input}.asc")
+
+ return artifacts
+
+
+def get_geckoview_artifacts_to_sign(config, job):
+ upstream_artifacts = []
+ for package in job["attributes"]["maven_packages"]:
+ upstream_artifacts += get_geckoview_upstream_artifacts(config, job, package)
+ return [
+ path
+ for upstream_artifact in upstream_artifacts
+ for path in upstream_artifact["paths"]
+ if not path.endswith(".md5") and not path.endswith(".sha1")
+ ]
diff --git a/taskcluster/gecko_taskgraph/util/taskcluster.py b/taskcluster/gecko_taskgraph/util/taskcluster.py
new file mode 100644
index 0000000000..cddb01fd37
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/util/taskcluster.py
@@ -0,0 +1,128 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import logging
+import os
+
+import taskcluster_urls as liburls
+from taskcluster import Hooks
+from taskgraph.util import taskcluster as tc_util
+from taskgraph.util.taskcluster import (
+ _do_request,
+ get_index_url,
+ get_root_url,
+ get_task_definition,
+ get_task_url,
+)
+
+logger = logging.getLogger(__name__)
+
+
+def insert_index(index_path, task_id, data=None, use_proxy=False):
+ index_url = get_index_url(index_path, use_proxy=use_proxy)
+
+ # Find task expiry.
+ expires = get_task_definition(task_id, use_proxy=use_proxy)["expires"]
+
+ response = _do_request(
+ index_url,
+ method="put",
+ json={
+ "taskId": task_id,
+ "rank": 0,
+ "data": data or {},
+ "expires": expires,
+ },
+ )
+ return response
+
+
+def status_task(task_id, use_proxy=False):
+ """Gets the status of a task given a task_id.
+
+ In testing mode, just logs that it would have retrieved status.
+
+ Args:
+ task_id (str): A task id.
+ use_proxy (bool): Whether to use taskcluster-proxy (default: False)
+
+ Returns:
+ dict: A dictionary object as defined here:
+ https://docs.taskcluster.net/docs/reference/platform/queue/api#status
+ """
+ if tc_util.testing:
+ logger.info(f"Would have gotten status for {task_id}.")
+ else:
+ resp = _do_request(get_task_url(task_id, use_proxy) + "/status")
+ status = resp.json().get("status", {})
+ return status
+
+
+def state_task(task_id, use_proxy=False):
+ """Gets the state of a task given a task_id.
+
+ In testing mode, just logs that it would have retrieved state. This is a subset of the
+ data returned by :func:`status_task`.
+
+ Args:
+ task_id (str): A task id.
+ use_proxy (bool): Whether to use taskcluster-proxy (default: False)
+
+ Returns:
+ str: The state of the task, one of
+ ``pending, running, completed, failed, exception, unknown``.
+ """
+ if tc_util.testing:
+ logger.info(f"Would have gotten state for {task_id}.")
+ else:
+ status = status_task(task_id, use_proxy=use_proxy).get("state") or "unknown"
+ return status
+
+
+def trigger_hook(hook_group_id, hook_id, hook_payload):
+ hooks = Hooks({"rootUrl": get_root_url(True)})
+ response = hooks.triggerHook(hook_group_id, hook_id, hook_payload)
+
+ logger.info(
+ "Task seen here: {}/tasks/{}".format(
+ get_root_url(os.environ.get("TASKCLUSTER_PROXY_URL")),
+ response["status"]["taskId"],
+ )
+ )
+
+
+def list_task_group_tasks(task_group_id):
+ """Generate the tasks in a task group"""
+ params = {}
+ while True:
+ url = liburls.api(
+ get_root_url(False),
+ "queue",
+ "v1",
+ f"task-group/{task_group_id}/list",
+ )
+ resp = _do_request(url, method="get", params=params).json()
+ yield from resp["tasks"]
+ if resp.get("continuationToken"):
+ params = {"continuationToken": resp.get("continuationToken")}
+ else:
+ break
+
+
+def list_task_group_incomplete_task_ids(task_group_id):
+ states = ("running", "pending", "unscheduled")
+ for task in [t["status"] for t in list_task_group_tasks(task_group_id)]:
+ if task["state"] in states:
+ yield task["taskId"]
+
+
+def list_task_group_complete_tasks(task_group_id):
+ tasks = {}
+ for task in list_task_group_tasks(task_group_id):
+ if task.get("status", {}).get("state", "") == "completed":
+ tasks[task.get("task", {}).get("metadata", {}).get("name", "")] = task.get(
+ "status", {}
+ ).get("taskId", "")
+ return tasks
diff --git a/taskcluster/gecko_taskgraph/util/taskgraph.py b/taskcluster/gecko_taskgraph/util/taskgraph.py
new file mode 100644
index 0000000000..bac7b3fbb8
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/util/taskgraph.py
@@ -0,0 +1,49 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Tools for interacting with existing taskgraphs.
+"""
+
+from taskgraph.util.taskcluster import find_task_id, get_artifact
+
+
+def find_decision_task(parameters, graph_config):
+ """Given the parameters for this action, find the taskId of the decision
+ task"""
+ head_rev_param = "{}head_rev".format(graph_config["project-repo-param-prefix"])
+ return find_task_id(
+ "{}.v2.{}.revision.{}.taskgraph.decision".format(
+ graph_config["trust-domain"],
+ parameters["project"],
+ parameters[head_rev_param],
+ )
+ )
+
+
+def find_existing_tasks(previous_graph_ids):
+ existing_tasks = {}
+ for previous_graph_id in previous_graph_ids:
+ label_to_taskid = get_artifact(previous_graph_id, "public/label-to-taskid.json")
+ existing_tasks.update(label_to_taskid)
+ return existing_tasks
+
+
+def find_existing_tasks_from_previous_kinds(
+ full_task_graph, previous_graph_ids, rebuild_kinds
+):
+ """Given a list of previous decision/action taskIds and kinds to ignore
+ from the previous graphs, return a dictionary of labels-to-taskids to use
+ as ``existing_tasks`` in the optimization step."""
+ existing_tasks = find_existing_tasks(previous_graph_ids)
+ kind_labels = {
+ t.label
+ for t in full_task_graph.tasks.values()
+ if t.attributes["kind"] not in rebuild_kinds
+ }
+ return {
+ label: taskid
+ for (label, taskid) in existing_tasks.items()
+ if label in kind_labels
+ }
diff --git a/taskcluster/gecko_taskgraph/util/templates.py b/taskcluster/gecko_taskgraph/util/templates.py
new file mode 100644
index 0000000000..e6640a7edd
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/util/templates.py
@@ -0,0 +1,59 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+from gecko_taskgraph.util.copy_task import copy_task
+
+
+def merge_to(source, dest):
+ """
+ Merge dict and arrays (override scalar values)
+
+ Keys from source override keys from dest, and elements from lists in source
+ are appended to lists in dest.
+
+ :param dict source: to copy from
+ :param dict dest: to copy to (modified in place)
+ """
+
+ for key, value in source.items():
+ if (
+ isinstance(value, dict)
+ and len(value) == 1
+ and list(value)[0].startswith("by-")
+ ):
+ # Do not merge by-* values as this is likely to confuse someone
+ dest[key] = value
+ continue
+
+ # Override mismatching or empty types
+ if type(value) != type(dest.get(key)): # noqa
+ dest[key] = value
+ continue
+
+ # Merge dict
+ if isinstance(value, dict):
+ merge_to(value, dest[key])
+ continue
+
+ if isinstance(value, list):
+ dest[key] = dest[key] + value
+ continue
+
+ dest[key] = value
+
+ return dest
+
+
+def merge(*objects):
+ """
+ Merge the given objects, using the semantics described for merge_to, with
+ objects later in the list taking precedence. From an inheritance
+ perspective, "parents" should be listed before "children".
+
+ Returns the result without modifying any arguments.
+ """
+ if len(objects) == 1:
+ return copy_task(objects[0])
+ return merge_to(objects[-1], merge(*objects[:-1]))
diff --git a/taskcluster/gecko_taskgraph/util/verify.py b/taskcluster/gecko_taskgraph/util/verify.py
new file mode 100644
index 0000000000..037d985cca
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/util/verify.py
@@ -0,0 +1,454 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import logging
+import os
+import re
+import sys
+
+import attr
+from taskgraph.util.treeherder import join_symbol
+from taskgraph.util.verify import VerificationSequence
+
+from gecko_taskgraph import GECKO
+from gecko_taskgraph.util.attributes import (
+ ALL_PROJECTS,
+ RELEASE_PROJECTS,
+ RUN_ON_PROJECT_ALIASES,
+)
+
+logger = logging.getLogger(__name__)
+doc_base_path = os.path.join(GECKO, "taskcluster", "docs")
+
+
+verifications = VerificationSequence()
+
+
+@attr.s(frozen=True)
+class DocPaths:
+ _paths = attr.ib(factory=list)
+
+ def get_files(self, filename):
+ rv = []
+ for p in self._paths:
+ doc_path = os.path.join(p, filename)
+ if os.path.exists(doc_path):
+ rv.append(doc_path)
+ return rv
+
+ def add(self, path):
+ """
+ Projects that make use of Firefox's taskgraph can extend it with
+ their own task kinds by registering additional paths for documentation.
+ documentation_paths.add() needs to be called by the project's Taskgraph
+ registration function. See taskgraph.config.
+ """
+ self._paths.append(path)
+
+
+documentation_paths = DocPaths()
+documentation_paths.add(doc_base_path)
+
+
+def verify_docs(filename, identifiers, appearing_as):
+ """
+ Look for identifiers of the type appearing_as in the files
+ returned by documentation_paths.get_files(). Firefox will have
+ a single file in a list, but projects such as Thunderbird can have
+ documentation in another location and may return multiple files.
+ """
+ # We ignore identifiers starting with '_' for the sake of tests.
+ # Strings starting with "_" are ignored for doc verification
+ # hence they can be used for faking test values
+ doc_files = documentation_paths.get_files(filename)
+ doctext = "".join([open(d).read() for d in doc_files])
+
+ if appearing_as == "inline-literal":
+ expression_list = [
+ "``" + identifier + "``"
+ for identifier in identifiers
+ if not identifier.startswith("_")
+ ]
+ elif appearing_as == "heading":
+ expression_list = [
+ "\n" + identifier + "\n(?:(?:(?:-+\n)+)|(?:(?:.+\n)+))"
+ for identifier in identifiers
+ if not identifier.startswith("_")
+ ]
+ else:
+ raise Exception(f"appearing_as = `{appearing_as}` not defined")
+
+ for expression, identifier in zip(expression_list, identifiers):
+ match_group = re.search(expression, doctext)
+ if not match_group:
+ raise Exception(
+ "{}: `{}` missing from doc file: `{}`".format(
+ appearing_as, identifier, filename
+ )
+ )
+
+
+@verifications.add("initial")
+def verify_run_using():
+ from gecko_taskgraph.transforms.job import registry
+
+ verify_docs(
+ filename="transforms/job.rst",
+ identifiers=registry.keys(),
+ appearing_as="inline-literal",
+ )
+
+
+@verifications.add("parameters")
+def verify_parameters_docs(parameters):
+ if not parameters.strict:
+ return
+
+ parameters_dict = dict(**parameters)
+ verify_docs(
+ filename="parameters.rst",
+ identifiers=list(parameters_dict),
+ appearing_as="inline-literal",
+ )
+
+
+@verifications.add("kinds")
+def verify_kinds_docs(kinds):
+ verify_docs(filename="kinds.rst", identifiers=kinds.keys(), appearing_as="heading")
+
+
+@verifications.add("full_task_set")
+def verify_attributes(task, taskgraph, scratch_pad, graph_config, parameters):
+ if task is None:
+ verify_docs(
+ filename="attributes.rst",
+ identifiers=list(scratch_pad["attribute_set"]),
+ appearing_as="heading",
+ )
+ return
+ scratch_pad.setdefault("attribute_set", set()).update(task.attributes.keys())
+
+
+@verifications.add("full_task_graph")
+def verify_task_graph_symbol(task, taskgraph, scratch_pad, graph_config, parameters):
+ """
+ This function verifies that tuple
+ (collection.keys(), machine.platform, groupSymbol, symbol) is unique
+ for a target task graph.
+ """
+ if task is None:
+ return
+ task_dict = task.task
+ if "extra" in task_dict:
+ extra = task_dict["extra"]
+ if "treeherder" in extra:
+ treeherder = extra["treeherder"]
+
+ collection_keys = tuple(sorted(treeherder.get("collection", {}).keys()))
+ if len(collection_keys) != 1:
+ raise Exception(
+ "Task {} can't be in multiple treeherder collections "
+ "(the part of the platform after `/`): {}".format(
+ task.label, collection_keys
+ )
+ )
+ platform = treeherder.get("machine", {}).get("platform")
+ group_symbol = treeherder.get("groupSymbol")
+ symbol = treeherder.get("symbol")
+
+ key = (platform, collection_keys[0], group_symbol, symbol)
+ if key in scratch_pad:
+ raise Exception(
+ "Duplicate treeherder platform and symbol in tasks "
+ "`{}`and `{}`: {} {}".format(
+ task.label,
+ scratch_pad[key],
+ f"{platform}/{collection_keys[0]}",
+ join_symbol(group_symbol, symbol),
+ )
+ )
+ else:
+ scratch_pad[key] = task.label
+
+
+@verifications.add("full_task_graph")
+def verify_trust_domain_v2_routes(
+ task, taskgraph, scratch_pad, graph_config, parameters
+):
+ """
+ This function ensures that any two tasks have distinct ``index.{trust-domain}.v2`` routes.
+ """
+ if task is None:
+ return
+ route_prefix = "index.{}.v2".format(graph_config["trust-domain"])
+ task_dict = task.task
+ routes = task_dict.get("routes", [])
+
+ for route in routes:
+ if route.startswith(route_prefix):
+ if route in scratch_pad:
+ raise Exception(
+ "conflict between {}:{} for route: {}".format(
+ task.label, scratch_pad[route], route
+ )
+ )
+ else:
+ scratch_pad[route] = task.label
+
+
+@verifications.add("full_task_graph")
+def verify_routes_notification_filters(
+ task, taskgraph, scratch_pad, graph_config, parameters
+):
+ """
+ This function ensures that only understood filters for notifications are
+ specified.
+
+ See: https://firefox-ci-tc.services.mozilla.com/docs/manual/using/task-notifications
+ """
+ if task is None:
+ return
+ route_prefix = "notify."
+ valid_filters = ("on-any", "on-completed", "on-failed", "on-exception")
+ task_dict = task.task
+ routes = task_dict.get("routes", [])
+
+ for route in routes:
+ if route.startswith(route_prefix):
+ # Get the filter of the route
+ route_filter = route.split(".")[-1]
+ if route_filter not in valid_filters:
+ raise Exception(
+ "{} has invalid notification filter ({})".format(
+ task.label, route_filter
+ )
+ )
+
+
+@verifications.add("full_task_graph")
+def verify_dependency_tiers(task, taskgraph, scratch_pad, graph_config, parameters):
+ tiers = scratch_pad
+ if task is not None:
+ tiers[task.label] = (
+ task.task.get("extra", {}).get("treeherder", {}).get("tier", sys.maxsize)
+ )
+ else:
+
+ def printable_tier(tier):
+ if tier == sys.maxsize:
+ return "unknown"
+ return tier
+
+ for task in taskgraph.tasks.values():
+ tier = tiers[task.label]
+ for d in task.dependencies.values():
+ if taskgraph[d].task.get("workerType") == "always-optimized":
+ continue
+ if "dummy" in taskgraph[d].kind:
+ continue
+ if tier < tiers[d]:
+ raise Exception(
+ "{} (tier {}) cannot depend on {} (tier {})".format(
+ task.label,
+ printable_tier(tier),
+ d,
+ printable_tier(tiers[d]),
+ )
+ )
+
+
+@verifications.add("full_task_graph")
+def verify_required_signoffs(task, taskgraph, scratch_pad, graph_config, parameters):
+ """
+ Task with required signoffs can't be dependencies of tasks with less
+ required signoffs.
+ """
+ all_required_signoffs = scratch_pad
+ if task is not None:
+ all_required_signoffs[task.label] = set(
+ task.attributes.get("required_signoffs", [])
+ )
+ else:
+
+ def printable_signoff(signoffs):
+ if len(signoffs) == 1:
+ return "required signoff {}".format(*signoffs)
+ if signoffs:
+ return "required signoffs {}".format(", ".join(signoffs))
+ return "no required signoffs"
+
+ for task in taskgraph.tasks.values():
+ required_signoffs = all_required_signoffs[task.label]
+ for d in task.dependencies.values():
+ if required_signoffs < all_required_signoffs[d]:
+ raise Exception(
+ "{} ({}) cannot depend on {} ({})".format(
+ task.label,
+ printable_signoff(required_signoffs),
+ d,
+ printable_signoff(all_required_signoffs[d]),
+ )
+ )
+
+
+@verifications.add("full_task_graph")
+def verify_aliases(task, taskgraph, scratch_pad, graph_config, parameters):
+ """
+ This function verifies that aliases are not reused.
+ """
+ if task is None:
+ return
+ if task.kind not in ("toolchain", "fetch"):
+ return
+ for_kind = scratch_pad.setdefault(task.kind, {})
+ aliases = for_kind.setdefault("aliases", {})
+ alias_attribute = f"{task.kind}-alias"
+ if task.label in aliases:
+ raise Exception(
+ "Task `{}` has a {} of `{}`, masking a task of that name.".format(
+ aliases[task.label],
+ alias_attribute,
+ task.label[len(task.kind) + 1 :],
+ )
+ )
+ labels = for_kind.setdefault("labels", set())
+ labels.add(task.label)
+ attributes = task.attributes
+ if alias_attribute in attributes:
+ keys = attributes[alias_attribute]
+ if not keys:
+ keys = []
+ elif isinstance(keys, str):
+ keys = [keys]
+ for key in keys:
+ full_key = f"{task.kind}-{key}"
+ if full_key in labels:
+ raise Exception(
+ "Task `{}` has a {} of `{}`,"
+ " masking a task of that name.".format(
+ task.label,
+ alias_attribute,
+ key,
+ )
+ )
+ if full_key in aliases:
+ raise Exception(
+ "Duplicate {} in tasks `{}`and `{}`: {}".format(
+ alias_attribute,
+ task.label,
+ aliases[full_key],
+ key,
+ )
+ )
+ else:
+ aliases[full_key] = task.label
+
+
+@verifications.add("optimized_task_graph")
+def verify_always_optimized(task, taskgraph, scratch_pad, graph_config, parameters):
+ """
+ This function ensures that always-optimized tasks have been optimized.
+ """
+ if task is None:
+ return
+ if task.task.get("workerType") == "always-optimized":
+ raise Exception(f"Could not optimize the task {task.label!r}")
+
+
+@verifications.add("full_task_graph", run_on_projects=RELEASE_PROJECTS)
+def verify_shippable_no_sccache(task, taskgraph, scratch_pad, graph_config, parameters):
+ if task and task.attributes.get("shippable"):
+ if task.task.get("payload", {}).get("env", {}).get("USE_SCCACHE"):
+ raise Exception(f"Shippable job {task.label} cannot use sccache")
+
+
+@verifications.add("full_task_graph")
+def verify_test_packaging(task, taskgraph, scratch_pad, graph_config, parameters):
+ if task is None:
+ # In certain cases there are valid reasons for tests to be missing,
+ # don't error out when that happens.
+ missing_tests_allowed = any(
+ (
+ # user specified `--target-kind`
+ bool(parameters.get("target-kinds")),
+ # manifest scheduling is enabled
+ parameters["test_manifest_loader"] != "default",
+ )
+ )
+
+ exceptions = []
+ for task in taskgraph.tasks.values():
+ if task.kind == "build" and not task.attributes.get(
+ "skip-verify-test-packaging"
+ ):
+ build_env = task.task.get("payload", {}).get("env", {})
+ package_tests = build_env.get("MOZ_AUTOMATION_PACKAGE_TESTS")
+ shippable = task.attributes.get("shippable", False)
+ build_has_tests = scratch_pad.get(task.label)
+
+ if package_tests != "1":
+ # Shippable builds should always package tests.
+ if shippable:
+ exceptions.append(
+ "Build job {} is shippable and does not specify "
+ "MOZ_AUTOMATION_PACKAGE_TESTS=1 in the "
+ "environment.".format(task.label)
+ )
+
+ # Build tasks in the scratch pad have tests dependent on
+ # them, so we need to package tests during build.
+ if build_has_tests:
+ exceptions.append(
+ "Build job {} has tests dependent on it and does not specify "
+ "MOZ_AUTOMATION_PACKAGE_TESTS=1 in the environment".format(
+ task.label
+ )
+ )
+ else:
+ # Build tasks that aren't in the scratch pad have no
+ # dependent tests, so we shouldn't package tests.
+ # With the caveat that we expect shippable jobs to always
+ # produce tests.
+ if not build_has_tests and not shippable:
+ # If we have not generated all task kinds, we can't verify that
+ # there are no dependent tests.
+ if not missing_tests_allowed:
+ exceptions.append(
+ "Build job {} has no tests, but specifies "
+ "MOZ_AUTOMATION_PACKAGE_TESTS={} in the environment. "
+ "Unset MOZ_AUTOMATION_PACKAGE_TESTS in the task definition "
+ "to fix.".format(task.label, package_tests)
+ )
+ if exceptions:
+ raise Exception("\n".join(exceptions))
+ return
+ if task.kind == "test":
+ build_task = taskgraph[task.dependencies["build"]]
+ scratch_pad[build_task.label] = 1
+
+
+@verifications.add("full_task_graph")
+def verify_run_known_projects(task, taskgraph, scratch_pad, graph_config, parameters):
+ """Validates the inputs in run-on-projects.
+
+ We should never let 'try' (or 'try-comm-central') be in run-on-projects even though it
+ is valid because it is not considered for try pushes. While here we also validate for
+ other unknown projects or typos.
+ """
+ if task and task.attributes.get("run_on_projects"):
+ projects = set(task.attributes["run_on_projects"])
+ if {"try", "try-comm-central"} & set(projects):
+ raise Exception(
+ "In task {}: using try in run-on-projects is invalid; use try "
+ "selectors to select this task on try".format(task.label)
+ )
+ # try isn't valid, but by the time we get here its not an available project anyway.
+ valid_projects = ALL_PROJECTS | set(RUN_ON_PROJECT_ALIASES.keys())
+ invalid_projects = projects - valid_projects
+ if invalid_projects:
+ raise Exception(
+ "Task '{}' has an invalid run-on-projects value: "
+ "{}".format(task.label, invalid_projects)
+ )
diff --git a/taskcluster/gecko_taskgraph/util/workertypes.py b/taskcluster/gecko_taskgraph/util/workertypes.py
new file mode 100644
index 0000000000..b9617993dd
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/util/workertypes.py
@@ -0,0 +1,103 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozbuild.util import memoize
+from taskgraph.util.attributes import keymatch
+from taskgraph.util.keyed_by import evaluate_keyed_by
+
+from gecko_taskgraph.util.attributes import release_level as _release_level
+
+WORKER_TYPES = {
+ "gce/gecko-1-b-linux": ("docker-worker", "linux"),
+ "gce/gecko-2-b-linux": ("docker-worker", "linux"),
+ "gce/gecko-3-b-linux": ("docker-worker", "linux"),
+ "invalid/invalid": ("invalid", None),
+ "invalid/always-optimized": ("always-optimized", None),
+ "scriptworker-prov-v1/signing-linux-v1": ("scriptworker-signing", None),
+}
+
+
+@memoize
+def _get(graph_config, alias, level, release_level, project):
+ """Get the configuration for this worker_type alias: {provisioner,
+ worker-type, implementation, os}"""
+ level = str(level)
+
+ # handle the legacy (non-alias) format
+ if "/" in alias:
+ alias = alias.format(level=level)
+ provisioner, worker_type = alias.split("/", 1)
+ try:
+ implementation, os = WORKER_TYPES[alias]
+ return {
+ "provisioner": provisioner,
+ "worker-type": worker_type,
+ "implementation": implementation,
+ "os": os,
+ }
+ except KeyError:
+ return {
+ "provisioner": provisioner,
+ "worker-type": worker_type,
+ }
+
+ matches = keymatch(graph_config["workers"]["aliases"], alias)
+ if len(matches) > 1:
+ raise KeyError("Multiple matches for worker-type alias " + alias)
+ elif not matches:
+ raise KeyError("No matches for worker-type alias " + alias)
+ worker_config = matches[0].copy()
+
+ worker_config["provisioner"] = evaluate_keyed_by(
+ worker_config["provisioner"],
+ f"worker-type alias {alias} field provisioner",
+ {"level": level},
+ ).format(
+ **{
+ "trust-domain": graph_config["trust-domain"],
+ "level": level,
+ "alias": alias,
+ }
+ )
+ attrs = {"level": level, "release-level": release_level}
+ if project:
+ attrs["project"] = project
+ worker_config["worker-type"] = evaluate_keyed_by(
+ worker_config["worker-type"],
+ f"worker-type alias {alias} field worker-type",
+ attrs,
+ ).format(
+ **{
+ "trust-domain": graph_config["trust-domain"],
+ "level": level,
+ "alias": alias,
+ }
+ )
+
+ return worker_config
+
+
+def worker_type_implementation(graph_config, parameters, worker_type):
+ """Get the worker implementation and OS for the given workerType, where the
+ OS represents the host system, not the target OS, in the case of
+ cross-compiles."""
+ worker_config = _get(
+ graph_config, worker_type, "1", "staging", parameters["project"]
+ )
+ return worker_config["implementation"], worker_config.get("os")
+
+
+def get_worker_type(graph_config, parameters, worker_type):
+ """
+ Get the worker type provisioner and worker-type, optionally evaluating
+ aliases from the graph config.
+ """
+ worker_config = _get(
+ graph_config,
+ worker_type,
+ parameters["level"],
+ _release_level(parameters.get("project")),
+ parameters.get("project"),
+ )
+ return worker_config["provisioner"], worker_config["worker-type"]