summaryrefslogtreecommitdiffstats
path: root/taskcluster/gecko_taskgraph/transforms/job
diff options
context:
space:
mode:
Diffstat (limited to 'taskcluster/gecko_taskgraph/transforms/job')
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/__init__.py504
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/common.py269
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/distro_package.py238
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/hazard.py66
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/mach.py83
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/mozharness.py366
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/mozharness_test.py477
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/python_test.py47
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/run_task.py308
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/spidermonkey.py109
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/toolchain.py257
11 files changed, 2724 insertions, 0 deletions
diff --git a/taskcluster/gecko_taskgraph/transforms/job/__init__.py b/taskcluster/gecko_taskgraph/transforms/job/__init__.py
new file mode 100644
index 0000000000..9b6924b605
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/job/__init__.py
@@ -0,0 +1,504 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Convert a job description into a task description.
+
+Jobs descriptions are similar to task descriptions, but they specify how to run
+the job at a higher level, using a "run" field that can be interpreted by
+run-using handlers in `taskcluster/gecko_taskgraph/transforms/job`.
+"""
+
+
+import json
+import logging
+
+import mozpack.path as mozpath
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.python_path import import_sibling_modules
+from taskgraph.util.schema import Schema, validate_schema
+from taskgraph.util.taskcluster import get_artifact_prefix
+from voluptuous import Any, Exclusive, Extra, Optional, Required
+
+from gecko_taskgraph.transforms.cached_tasks import order_tasks
+from gecko_taskgraph.transforms.task import task_description_schema
+from gecko_taskgraph.util.copy_task import copy_task
+from gecko_taskgraph.util.workertypes import worker_type_implementation
+
+logger = logging.getLogger(__name__)
+
+# Schema for a build description
+job_description_schema = Schema(
+ {
+ # The name of the job and the job's label. At least one must be specified,
+ # and the label will be generated from the name if necessary, by prepending
+ # the kind.
+ Optional("name"): str,
+ Optional("label"): str,
+ # the following fields are passed directly through to the task description,
+ # possibly modified by the run implementation. See
+ # taskcluster/gecko_taskgraph/transforms/task.py for the schema details.
+ Required("description"): task_description_schema["description"],
+ Optional("attributes"): task_description_schema["attributes"],
+ Optional("job-from"): task_description_schema["job-from"],
+ Optional("dependencies"): task_description_schema["dependencies"],
+ Optional("if-dependencies"): task_description_schema["if-dependencies"],
+ Optional("soft-dependencies"): task_description_schema["soft-dependencies"],
+ Optional("if-dependencies"): task_description_schema["if-dependencies"],
+ Optional("requires"): task_description_schema["requires"],
+ Optional("expires-after"): task_description_schema["expires-after"],
+ Optional("expiration-policy"): task_description_schema["expiration-policy"],
+ Optional("routes"): task_description_schema["routes"],
+ Optional("scopes"): task_description_schema["scopes"],
+ Optional("tags"): task_description_schema["tags"],
+ Optional("extra"): task_description_schema["extra"],
+ Optional("treeherder"): task_description_schema["treeherder"],
+ Optional("index"): task_description_schema["index"],
+ Optional("run-on-projects"): task_description_schema["run-on-projects"],
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Optional("always-target"): task_description_schema["always-target"],
+ Exclusive("optimization", "optimization"): task_description_schema[
+ "optimization"
+ ],
+ Optional("use-sccache"): task_description_schema["use-sccache"],
+ Optional("use-system-python"): bool,
+ Optional("priority"): task_description_schema["priority"],
+ # The "when" section contains descriptions of the circumstances under which
+ # this task should be included in the task graph. This will be converted
+ # into an optimization, so it cannot be specified in a job description that
+ # also gives 'optimization'.
+ Exclusive("when", "optimization"): Any(
+ None,
+ {
+ # This task only needs to be run if a file matching one of the given
+ # patterns has changed in the push. The patterns use the mozpack
+ # match function (python/mozbuild/mozpack/path.py).
+ Optional("files-changed"): [str],
+ },
+ ),
+ # A list of artifacts to install from 'fetch' tasks.
+ Optional("fetches"): {
+ str: [
+ str,
+ {
+ Required("artifact"): str,
+ Optional("dest"): str,
+ Optional("extract"): bool,
+ Optional("verify-hash"): bool,
+ },
+ ],
+ },
+ # A description of how to run this job.
+ "run": {
+ # The key to a job implementation in a peer module to this one
+ "using": str,
+ # Base work directory used to set up the task.
+ Optional("workdir"): str,
+ # Any remaining content is verified against that job implementation's
+ # own schema.
+ Extra: object,
+ },
+ Required("worker-type"): task_description_schema["worker-type"],
+ # This object will be passed through to the task description, with additions
+ # provided by the job's run-using function
+ Optional("worker"): dict,
+ }
+)
+
+transforms = TransformSequence()
+transforms.add_validate(job_description_schema)
+
+
+@transforms.add
+def rewrite_when_to_optimization(config, jobs):
+ for job in jobs:
+ when = job.pop("when", {})
+ if not when:
+ yield job
+ continue
+
+ files_changed = when.get("files-changed")
+
+ # implicitly add task config directory.
+ files_changed.append(f"{config.path}/**")
+
+ # "only when files changed" implies "skip if files have not changed"
+ job["optimization"] = {"skip-unless-changed": files_changed}
+
+ assert "when" not in job
+ yield job
+
+
+@transforms.add
+def set_implementation(config, jobs):
+ for job in jobs:
+ impl, os = worker_type_implementation(
+ config.graph_config, config.params, job["worker-type"]
+ )
+ if os:
+ job.setdefault("tags", {})["os"] = os
+ if impl:
+ job.setdefault("tags", {})["worker-implementation"] = impl
+ worker = job.setdefault("worker", {})
+ assert "implementation" not in worker
+ worker["implementation"] = impl
+ if os:
+ worker["os"] = os
+ yield job
+
+
+@transforms.add
+def set_label(config, jobs):
+ for job in jobs:
+ if "label" not in job:
+ if "name" not in job:
+ raise Exception("job has neither a name nor a label")
+ job["label"] = "{}-{}".format(config.kind, job["name"])
+ if job.get("name"):
+ del job["name"]
+ yield job
+
+
+@transforms.add
+def add_resource_monitor(config, jobs):
+ for job in jobs:
+ if job.get("attributes", {}).get("resource-monitor"):
+ worker_implementation, worker_os = worker_type_implementation(
+ config.graph_config, config.params, job["worker-type"]
+ )
+ # Normalise worker os so that linux-bitbar and similar use linux tools.
+ worker_os = worker_os.split("-")[0]
+ # We don't currently support an Arm worker, due to gopsutil's indirect
+ # dependencies (go-ole)
+ if "aarch64" in job["worker-type"]:
+ yield job
+ continue
+ elif "win7" in job["worker-type"]:
+ arch = "32"
+ else:
+ arch = "64"
+ job.setdefault("fetches", {})
+ job["fetches"].setdefault("toolchain", [])
+ job["fetches"]["toolchain"].append(f"{worker_os}{arch}-resource-monitor")
+
+ if worker_implementation == "docker-worker":
+ artifact_source = "/builds/worker/monitoring/resource-monitor.json"
+ else:
+ artifact_source = "monitoring/resource-monitor.json"
+ job["worker"].setdefault("artifacts", [])
+ job["worker"]["artifacts"].append(
+ {
+ "name": "public/monitoring/resource-monitor.json",
+ "type": "file",
+ "path": artifact_source,
+ }
+ )
+ # Set env for output file
+ job["worker"].setdefault("env", {})
+ job["worker"]["env"]["RESOURCE_MONITOR_OUTPUT"] = artifact_source
+
+ yield job
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ """Given a build description, create a task description"""
+ # import plugin modules first, before iterating over jobs
+ import_sibling_modules(exceptions=("common.py",))
+
+ for job in jobs:
+ # only docker-worker uses a fixed absolute path to find directories
+ if job["worker"]["implementation"] == "docker-worker":
+ job["run"].setdefault("workdir", "/builds/worker")
+
+ taskdesc = copy_task(job)
+
+ # fill in some empty defaults to make run implementations easier
+ taskdesc.setdefault("attributes", {})
+ taskdesc.setdefault("dependencies", {})
+ taskdesc.setdefault("if-dependencies", [])
+ taskdesc.setdefault("soft-dependencies", [])
+ taskdesc.setdefault("routes", [])
+ taskdesc.setdefault("scopes", [])
+ taskdesc.setdefault("extra", {})
+
+ # give the function for job.run.using on this worker implementation a
+ # chance to set up the task description.
+ configure_taskdesc_for_run(
+ config, job, taskdesc, job["worker"]["implementation"]
+ )
+ del taskdesc["run"]
+
+ # yield only the task description, discarding the job description
+ yield taskdesc
+
+
+def get_attribute(dict, key, attributes, attribute_name):
+ """Get `attribute_name` from the given `attributes` dict, and if there
+ is a corresponding value, set `key` in `dict` to that value."""
+ value = attributes.get(attribute_name)
+ if value:
+ dict[key] = value
+
+
+@transforms.add
+def use_system_python(config, jobs):
+ for job in jobs:
+ if job.pop("use-system-python", True):
+ yield job
+ else:
+ fetches = job.setdefault("fetches", {})
+ toolchain = fetches.setdefault("toolchain", [])
+
+ moz_python_home = mozpath.join("fetches", "python")
+ if "win" in job["worker"]["os"]:
+ platform = "win64"
+ elif "linux" in job["worker"]["os"]:
+ platform = "linux64"
+ elif "macosx" in job["worker"]["os"]:
+ platform = "macosx64"
+ else:
+ raise ValueError("unexpected worker.os value {}".format(platform))
+
+ toolchain.append("{}-python".format(platform))
+
+ worker = job.setdefault("worker", {})
+ env = worker.setdefault("env", {})
+ env["MOZ_PYTHON_HOME"] = moz_python_home
+
+ yield job
+
+
+@transforms.add
+def use_fetches(config, jobs):
+ artifact_names = {}
+ extra_env = {}
+ aliases = {}
+ tasks = []
+
+ if config.kind in ("toolchain", "fetch"):
+ jobs = list(jobs)
+ tasks.extend((config.kind, j) for j in jobs)
+
+ tasks.extend(
+ (task.kind, task.__dict__)
+ for task in config.kind_dependencies_tasks.values()
+ if task.kind in ("fetch", "toolchain")
+ )
+ for (kind, task) in tasks:
+ get_attribute(
+ artifact_names, task["label"], task["attributes"], f"{kind}-artifact"
+ )
+ get_attribute(extra_env, task["label"], task["attributes"], f"{kind}-env")
+ value = task["attributes"].get(f"{kind}-alias")
+ if not value:
+ value = []
+ elif isinstance(value, str):
+ value = [value]
+ for alias in value:
+ fully_qualified = f"{kind}-{alias}"
+ label = task["label"]
+ if fully_qualified == label:
+ raise Exception(f"The alias {alias} of task {label} points to itself!")
+ aliases[fully_qualified] = label
+
+ artifact_prefixes = {}
+ for job in order_tasks(config, jobs):
+ artifact_prefixes[job["label"]] = get_artifact_prefix(job)
+
+ fetches = job.pop("fetches", None)
+ if not fetches:
+ yield job
+ continue
+
+ job_fetches = []
+ name = job.get("name") or job.get("label").replace(f"{config.kind}-", "")
+ dependencies = job.setdefault("dependencies", {})
+ worker = job.setdefault("worker", {})
+ env = worker.setdefault("env", {})
+ prefix = get_artifact_prefix(job)
+ has_sccache = False
+ for kind, artifacts in fetches.items():
+ if kind in ("fetch", "toolchain"):
+ for fetch_name in artifacts:
+ label = f"{kind}-{fetch_name}"
+ label = aliases.get(label, label)
+ if label not in artifact_names:
+ raise Exception(
+ "Missing fetch job for {kind}-{name}: {fetch}".format(
+ kind=config.kind, name=name, fetch=fetch_name
+ )
+ )
+ if label in extra_env:
+ env.update(extra_env[label])
+
+ path = artifact_names[label]
+
+ dependencies[label] = label
+ job_fetches.append(
+ {
+ "artifact": path,
+ "task": f"<{label}>",
+ "extract": True,
+ }
+ )
+
+ if kind == "toolchain" and fetch_name.endswith("-sccache"):
+ has_sccache = True
+ else:
+ if kind not in dependencies:
+ raise Exception(
+ "{name} can't fetch {kind} artifacts because "
+ "it has no {kind} dependencies!".format(name=name, kind=kind)
+ )
+ dep_label = dependencies[kind]
+ if dep_label in artifact_prefixes:
+ prefix = artifact_prefixes[dep_label]
+ else:
+ if dep_label not in config.kind_dependencies_tasks:
+ raise Exception(
+ "{name} can't fetch {kind} artifacts because "
+ "there are no tasks with label {label} in kind dependencies!".format(
+ name=name,
+ kind=kind,
+ label=dependencies[kind],
+ )
+ )
+
+ prefix = get_artifact_prefix(
+ config.kind_dependencies_tasks[dep_label]
+ )
+
+ for artifact in artifacts:
+ if isinstance(artifact, str):
+ path = artifact
+ dest = None
+ extract = True
+ verify_hash = False
+ else:
+ path = artifact["artifact"]
+ dest = artifact.get("dest")
+ extract = artifact.get("extract", True)
+ verify_hash = artifact.get("verify-hash", False)
+
+ fetch = {
+ "artifact": f"{prefix}/{path}"
+ if not path.startswith("/")
+ else path[1:],
+ "task": f"<{kind}>",
+ "extract": extract,
+ }
+ if dest is not None:
+ fetch["dest"] = dest
+ if verify_hash:
+ fetch["verify-hash"] = verify_hash
+ job_fetches.append(fetch)
+
+ if job.get("use-sccache") and not has_sccache:
+ raise Exception("Must provide an sccache toolchain if using sccache.")
+
+ job_artifact_prefixes = {
+ mozpath.dirname(fetch["artifact"])
+ for fetch in job_fetches
+ if not fetch["artifact"].startswith("public/")
+ }
+ if job_artifact_prefixes:
+ # Use taskcluster-proxy and request appropriate scope. For example, add
+ # 'scopes: [queue:get-artifact:path/to/*]' for 'path/to/artifact.tar.xz'.
+ worker["taskcluster-proxy"] = True
+ for prefix in sorted(job_artifact_prefixes):
+ scope = f"queue:get-artifact:{prefix}/*"
+ if scope not in job.setdefault("scopes", []):
+ job["scopes"].append(scope)
+
+ artifacts = {}
+ for f in job_fetches:
+ _, __, artifact = f["artifact"].rpartition("/")
+ if "dest" in f:
+ artifact = f"{f['dest']}/{artifact}"
+ task = f["task"][1:-1]
+ if artifact in artifacts:
+ raise Exception(
+ f"Task {name} depends on {artifacts[artifact]} and {task} "
+ f"that both provide {artifact}"
+ )
+ artifacts[artifact] = task
+
+ env["MOZ_FETCHES"] = {
+ "task-reference": json.dumps(
+ sorted(job_fetches, key=lambda x: sorted(x.items())), sort_keys=True
+ )
+ }
+ # The path is normalized to an absolute path in run-task
+ env.setdefault("MOZ_FETCHES_DIR", "fetches")
+
+ yield job
+
+
+# A registry of all functions decorated with run_job_using
+registry = {}
+
+
+def run_job_using(worker_implementation, run_using, schema=None, defaults={}):
+ """Register the decorated function as able to set up a task description for
+ jobs with the given worker implementation and `run.using` property. If
+ `schema` is given, the job's run field will be verified to match it.
+
+ The decorated function should have the signature `using_foo(config, job, taskdesc)`
+ and should modify the task description in-place. The skeleton of
+ the task description is already set up, but without a payload."""
+
+ def wrap(func):
+ for_run_using = registry.setdefault(run_using, {})
+ if worker_implementation in for_run_using:
+ raise Exception(
+ "run_job_using({!r}, {!r}) already exists: {!r}".format(
+ run_using, worker_implementation, for_run_using[run_using]
+ )
+ )
+ for_run_using[worker_implementation] = (func, schema, defaults)
+ return func
+
+ return wrap
+
+
+@run_job_using(
+ "always-optimized", "always-optimized", Schema({"using": "always-optimized"})
+)
+def always_optimized(config, job, taskdesc):
+ pass
+
+
+def configure_taskdesc_for_run(config, job, taskdesc, worker_implementation):
+ """
+ Run the appropriate function for this job against the given task
+ description.
+
+ This will raise an appropriate error if no function exists, or if the job's
+ run is not valid according to the schema.
+ """
+ run_using = job["run"]["using"]
+ if run_using not in registry:
+ raise Exception(f"no functions for run.using {run_using!r}")
+
+ if worker_implementation not in registry[run_using]:
+ raise Exception(
+ "no functions for run.using {!r} on {!r}".format(
+ run_using, worker_implementation
+ )
+ )
+
+ func, schema, defaults = registry[run_using][worker_implementation]
+ for k, v in defaults.items():
+ job["run"].setdefault(k, v)
+
+ if schema:
+ validate_schema(
+ schema,
+ job["run"],
+ "In job.run using {!r}/{!r} for job {!r}:".format(
+ job["run"]["using"], worker_implementation, job["label"]
+ ),
+ )
+ func(config, job, taskdesc)
diff --git a/taskcluster/gecko_taskgraph/transforms/job/common.py b/taskcluster/gecko_taskgraph/transforms/job/common.py
new file mode 100644
index 0000000000..0c6289a6db
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/job/common.py
@@ -0,0 +1,269 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Common support for various job types. These functions are all named after the
+worker implementation they operate on, and take the same three parameters, for
+consistency.
+"""
+
+
+from taskgraph.util.keyed_by import evaluate_keyed_by
+from taskgraph.util.taskcluster import get_artifact_prefix
+
+SECRET_SCOPE = "secrets:get:project/releng/{trust_domain}/{kind}/level-{level}/{secret}"
+
+
+def add_cache(job, taskdesc, name, mount_point, skip_untrusted=False):
+ """Adds a cache based on the worker's implementation.
+
+ Args:
+ job (dict): Task's job description.
+ taskdesc (dict): Target task description to modify.
+ name (str): Name of the cache.
+ mount_point (path): Path on the host to mount the cache.
+ skip_untrusted (bool): Whether cache is used in untrusted environments
+ (default: False). Only applies to docker-worker.
+ """
+ if not job["run"].get("use-caches", True):
+ return
+
+ worker = job["worker"]
+
+ if worker["implementation"] == "docker-worker":
+ taskdesc["worker"].setdefault("caches", []).append(
+ {
+ "type": "persistent",
+ "name": name,
+ "mount-point": mount_point,
+ "skip-untrusted": skip_untrusted,
+ }
+ )
+
+ elif worker["implementation"] == "generic-worker":
+ taskdesc["worker"].setdefault("mounts", []).append(
+ {
+ "cache-name": name,
+ "directory": mount_point,
+ }
+ )
+
+ else:
+ # Caches not implemented
+ pass
+
+
+def add_artifacts(config, job, taskdesc, path):
+ taskdesc["worker"].setdefault("artifacts", []).append(
+ {
+ "name": get_artifact_prefix(taskdesc),
+ "path": path,
+ "type": "directory",
+ }
+ )
+
+
+def docker_worker_add_artifacts(config, job, taskdesc):
+ """Adds an artifact directory to the task"""
+ path = "{workdir}/artifacts/".format(**job["run"])
+ taskdesc["worker"].setdefault("env", {})["UPLOAD_DIR"] = path
+ add_artifacts(config, job, taskdesc, path)
+
+
+def generic_worker_add_artifacts(config, job, taskdesc):
+ """Adds an artifact directory to the task"""
+ # The path is the location on disk; it doesn't necessarily
+ # mean the artifacts will be public or private; that is set via the name
+ # attribute in add_artifacts.
+ path = get_artifact_prefix(taskdesc)
+ taskdesc["worker"].setdefault("env", {})["UPLOAD_DIR"] = path
+ add_artifacts(config, job, taskdesc, path=path)
+
+
+def support_vcs_checkout(config, job, taskdesc, sparse=False):
+ """Update a job/task with parameters to enable a VCS checkout.
+
+ This can only be used with ``run-task`` tasks, as the cache name is
+ reserved for ``run-task`` tasks.
+ """
+ worker = job["worker"]
+ is_mac = worker["os"] == "macosx"
+ is_win = worker["os"] == "windows"
+ is_linux = worker["os"] == "linux" or "linux-bitbar"
+ is_docker = worker["implementation"] == "docker-worker"
+ assert is_mac or is_win or is_linux
+
+ if is_win:
+ checkoutdir = "./build"
+ geckodir = f"{checkoutdir}/src"
+ hgstore = "y:/hg-shared"
+ elif is_docker:
+ checkoutdir = "{workdir}/checkouts".format(**job["run"])
+ geckodir = f"{checkoutdir}/gecko"
+ hgstore = f"{checkoutdir}/hg-store"
+ else:
+ checkoutdir = "./checkouts"
+ geckodir = f"{checkoutdir}/gecko"
+ hgstore = f"{checkoutdir}/hg-shared"
+
+ cache_name = "checkouts"
+
+ # Sparse checkouts need their own cache because they can interfere
+ # with clients that aren't sparse aware.
+ if sparse:
+ cache_name += "-sparse"
+
+ # Workers using Mercurial >= 5.8 will enable revlog-compression-zstd, which
+ # workers using older versions can't understand, so they can't share cache.
+ # At the moment, only docker workers use the newer version.
+ if is_docker:
+ cache_name += "-hg58"
+
+ add_cache(job, taskdesc, cache_name, checkoutdir)
+
+ taskdesc["worker"].setdefault("env", {}).update(
+ {
+ "GECKO_BASE_REPOSITORY": config.params["base_repository"],
+ "GECKO_HEAD_REPOSITORY": config.params["head_repository"],
+ "GECKO_HEAD_REV": config.params["head_rev"],
+ "HG_STORE_PATH": hgstore,
+ }
+ )
+ taskdesc["worker"]["env"].setdefault("GECKO_PATH", geckodir)
+
+ if "comm_base_repository" in config.params:
+ taskdesc["worker"]["env"].update(
+ {
+ "COMM_BASE_REPOSITORY": config.params["comm_base_repository"],
+ "COMM_HEAD_REPOSITORY": config.params["comm_head_repository"],
+ "COMM_HEAD_REV": config.params["comm_head_rev"],
+ }
+ )
+ elif job["run"].get("comm-checkout", False):
+ raise Exception(
+ "Can't checkout from comm-* repository if not given a repository."
+ )
+
+ # Give task access to hgfingerprint secret so it can pin the certificate
+ # for hg.mozilla.org.
+ taskdesc["scopes"].append("secrets:get:project/taskcluster/gecko/hgfingerprint")
+ taskdesc["scopes"].append("secrets:get:project/taskcluster/gecko/hgmointernal")
+
+ # only some worker platforms have taskcluster-proxy enabled
+ if job["worker"]["implementation"] in ("docker-worker",):
+ taskdesc["worker"]["taskcluster-proxy"] = True
+
+
+def generic_worker_hg_commands(
+ base_repo, head_repo, head_rev, path, sparse_profile=None
+):
+ """Obtain commands needed to obtain a Mercurial checkout on generic-worker.
+
+ Returns two command strings. One performs the checkout. Another logs.
+ """
+ args = [
+ r'"c:\Program Files\Mercurial\hg.exe"',
+ "robustcheckout",
+ "--sharebase",
+ r"y:\hg-shared",
+ "--purge",
+ "--upstream",
+ base_repo,
+ "--revision",
+ head_rev,
+ ]
+
+ if sparse_profile:
+ args.extend(["--config", "extensions.sparse="])
+ args.extend(["--sparseprofile", sparse_profile])
+
+ args.extend(
+ [
+ head_repo,
+ path,
+ ]
+ )
+
+ logging_args = [
+ b":: TinderboxPrint:<a href={source_repo}/rev/{revision} "
+ b"title='Built from {repo_name} revision {revision}'>{revision}</a>"
+ b"\n".format(
+ revision=head_rev, source_repo=head_repo, repo_name=head_repo.split("/")[-1]
+ ),
+ ]
+
+ return [" ".join(args), " ".join(logging_args)]
+
+
+def setup_secrets(config, job, taskdesc):
+ """Set up access to secrets via taskcluster-proxy. The value of
+ run['secrets'] should be a boolean or a list of secret names that
+ can be accessed."""
+ if not job["run"].get("secrets"):
+ return
+
+ taskdesc["worker"]["taskcluster-proxy"] = True
+ secrets = job["run"]["secrets"]
+ if secrets is True:
+ secrets = ["*"]
+ for secret in secrets:
+ taskdesc["scopes"].append(
+ SECRET_SCOPE.format(
+ trust_domain=config.graph_config["trust-domain"],
+ kind=job["treeherder"]["kind"],
+ level=config.params["level"],
+ secret=secret,
+ )
+ )
+
+
+def add_tooltool(config, job, taskdesc, internal=False):
+ """Give the task access to tooltool.
+
+ Enables the tooltool cache. Adds releng proxy. Configures scopes.
+
+ By default, only public tooltool access will be granted. Access to internal
+ tooltool can be enabled via ``internal=True``.
+
+ This can only be used with ``run-task`` tasks, as the cache name is
+ reserved for use with ``run-task``.
+ """
+
+ if job["worker"]["implementation"] in ("docker-worker",):
+ add_cache(
+ job,
+ taskdesc,
+ "tooltool-cache",
+ "{workdir}/tooltool-cache".format(**job["run"]),
+ )
+
+ taskdesc["worker"].setdefault("env", {}).update(
+ {
+ "TOOLTOOL_CACHE": "{workdir}/tooltool-cache".format(**job["run"]),
+ }
+ )
+ elif not internal:
+ return
+
+ taskdesc["worker"]["taskcluster-proxy"] = True
+ taskdesc["scopes"].extend(
+ [
+ "project:releng:services/tooltool/api/download/public",
+ ]
+ )
+
+ if internal:
+ taskdesc["scopes"].extend(
+ [
+ "project:releng:services/tooltool/api/download/internal",
+ ]
+ )
+
+
+def get_expiration(config, policy="default"):
+ expires = evaluate_keyed_by(
+ config.graph_config["expiration-policy"],
+ "artifact expiration",
+ {"project": config.params["project"]},
+ )[policy]
+ return expires
diff --git a/taskcluster/gecko_taskgraph/transforms/job/distro_package.py b/taskcluster/gecko_taskgraph/transforms/job/distro_package.py
new file mode 100644
index 0000000000..44236b1abc
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/job/distro_package.py
@@ -0,0 +1,238 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running spidermonkey jobs via dedicated scripts
+"""
+
+
+import os
+import re
+
+import taskgraph
+from taskgraph.util.schema import Schema
+from taskgraph.util.taskcluster import get_root_url
+from voluptuous import Any, Optional, Required
+
+from gecko_taskgraph import GECKO
+from gecko_taskgraph.transforms.job import run_job_using
+from gecko_taskgraph.transforms.job.common import add_artifacts
+from gecko_taskgraph.util.hash import hash_path
+
+DSC_PACKAGE_RE = re.compile(".*(?=_)")
+SOURCE_PACKAGE_RE = re.compile(r".*(?=[-_]\d)")
+
+source_definition = {
+ Required("url"): str,
+ Required("sha256"): str,
+}
+
+common_schema = Schema(
+ {
+ # URL/SHA256 of a source file to build, which can either be a source
+ # control (.dsc), or a tarball.
+ Required(Any("dsc", "tarball")): source_definition,
+ # Package name. Normally derived from the source control or tarball file
+ # name. Use in case the name doesn't match DSC_PACKAGE_RE or
+ # SOURCE_PACKAGE_RE.
+ Optional("name"): str,
+ # Patch to apply to the extracted source.
+ Optional("patch"): str,
+ # Command to run before dpkg-buildpackage.
+ Optional("pre-build-command"): str,
+ # Architecture to build the package for.
+ Optional("arch"): str,
+ # List of package tasks to get build dependencies from.
+ Optional("packages"): [str],
+ # What resolver to use to install build dependencies. The default
+ # (apt-get) is good in most cases, but in subtle cases involving
+ # a *-backports archive, its solver might not be able to find a
+ # solution that satisfies the build dependencies.
+ Optional("resolver"): Any("apt-get", "aptitude"),
+ # Base work directory used to set up the task.
+ Required("workdir"): str,
+ }
+)
+
+debian_schema = common_schema.extend(
+ {
+ Required("using"): "debian-package",
+ # Debian distribution
+ Required("dist"): str,
+ }
+)
+
+ubuntu_schema = common_schema.extend(
+ {
+ Required("using"): "ubuntu-package",
+ # Ubuntu distribution
+ Required("dist"): str,
+ }
+)
+
+
+def common_package(config, job, taskdesc, distro, version):
+ run = job["run"]
+
+ name = taskdesc["label"].replace(f"{config.kind}-", "", 1)
+
+ arch = run.get("arch", "amd64")
+
+ worker = taskdesc["worker"]
+ worker.setdefault("artifacts", [])
+
+ image = "%s%d" % (distro, version)
+ if arch != "amd64":
+ image += "-" + arch
+ image += "-packages"
+ worker["docker-image"] = {"in-tree": image}
+
+ add_artifacts(config, job, taskdesc, path="/tmp/artifacts")
+
+ env = worker.setdefault("env", {})
+ env["DEBFULLNAME"] = "Mozilla build team"
+ env["DEBEMAIL"] = "dev-builds@lists.mozilla.org"
+
+ if "dsc" in run:
+ src = run["dsc"]
+ unpack = "dpkg-source -x {src_file} {package}"
+ package_re = DSC_PACKAGE_RE
+ elif "tarball" in run:
+ src = run["tarball"]
+ unpack = (
+ "mkdir {package} && "
+ "tar -C {package} -axf {src_file} --strip-components=1"
+ )
+ package_re = SOURCE_PACKAGE_RE
+ else:
+ raise RuntimeError("Unreachable")
+ src_url = src["url"]
+ src_file = os.path.basename(src_url)
+ src_sha256 = src["sha256"]
+ package = run.get("name")
+ if not package:
+ package = package_re.match(src_file).group(0)
+ unpack = unpack.format(src_file=src_file, package=package)
+
+ resolver = run.get("resolver", "apt-get")
+ if resolver == "apt-get":
+ resolver = "apt-get -yyq --no-install-recommends"
+ elif resolver == "aptitude":
+ resolver = (
+ "aptitude -y --without-recommends -o "
+ "Aptitude::ProblemResolver::Hints::KeepBuildDeps="
+ '"reject {}-build-deps :UNINST"'
+ ).format(package)
+ else:
+ raise RuntimeError("Unreachable")
+
+ adjust = ""
+ if "patch" in run:
+ # We don't use robustcheckout or run-task to get a checkout. So for
+ # this one file we'd need from a checkout, download it.
+ env["PATCH_URL"] = config.params.file_url(
+ "build/debian-packages/{patch}".format(patch=run["patch"]),
+ )
+ adjust += "curl -sL $PATCH_URL | patch -p1 && "
+ if "pre-build-command" in run:
+ adjust += run["pre-build-command"] + " && "
+ if "tarball" in run:
+ adjust += "mv ../{src_file} ../{package}_{ver}.orig.tar.gz && ".format(
+ src_file=src_file,
+ package=package,
+ ver="$(dpkg-parsechangelog | awk '$1==\"Version:\"{print $2}' | cut -f 1 -d -)",
+ )
+ if "patch" not in run and "pre-build-command" not in run:
+ adjust += (
+ 'debchange -l ".{prefix}moz" --distribution "{dist}"'
+ ' "Mozilla backport for {dist}." < /dev/null && '
+ ).format(
+ prefix=name.split("-", 1)[0],
+ dist=run["dist"],
+ )
+
+ worker["command"] = [
+ "sh",
+ "-x",
+ "-c",
+ # Add sources for packages coming from other package tasks.
+ "/usr/local/sbin/setup_packages.sh {root_url} $PACKAGES && "
+ "apt-get update && "
+ # Upgrade packages that might have new versions in package tasks.
+ "apt-get dist-upgrade && " "cd /tmp && "
+ # Get, validate and extract the package source.
+ "(dget -d -u {src_url} || exit 100) && "
+ 'echo "{src_sha256} {src_file}" | sha256sum -c && '
+ "{unpack} && "
+ "cd {package} && "
+ # Optionally apply patch and/or pre-build command.
+ "{adjust}"
+ # Install the necessary build dependencies.
+ "(cd ..; mk-build-deps -i -r {package}/debian/control -t '{resolver}' || exit 100) && "
+ # Build the package
+ 'DEB_BUILD_OPTIONS="parallel=$(nproc) nocheck" dpkg-buildpackage -sa && '
+ # Copy the artifacts
+ "mkdir -p {artifacts}/apt && "
+ "dcmd cp ../{package}_*.changes {artifacts}/apt/ && "
+ "cd {artifacts} && "
+ # Make the artifacts directory usable as an APT repository.
+ "apt-ftparchive sources apt | gzip -c9 > apt/Sources.gz && "
+ "apt-ftparchive packages apt | gzip -c9 > apt/Packages.gz".format(
+ root_url=get_root_url(False),
+ package=package,
+ src_url=src_url,
+ src_file=src_file,
+ src_sha256=src_sha256,
+ unpack=unpack,
+ adjust=adjust,
+ artifacts="/tmp/artifacts",
+ resolver=resolver,
+ ),
+ ]
+
+ if run.get("packages"):
+ env = worker.setdefault("env", {})
+ env["PACKAGES"] = {
+ "task-reference": " ".join(f"<{p}>" for p in run["packages"])
+ }
+ deps = taskdesc.setdefault("dependencies", {})
+ for p in run["packages"]:
+ deps[p] = f"packages-{p}"
+
+ # Use the command generated above as the base for the index hash.
+ # We rely on it not varying depending on the head_repository or head_rev.
+ digest_data = list(worker["command"])
+ if "patch" in run:
+ digest_data.append(
+ hash_path(os.path.join(GECKO, "build", "debian-packages", run["patch"]))
+ )
+
+ if not taskgraph.fast:
+ taskdesc["cache"] = {
+ "type": "packages.v1",
+ "name": name,
+ "digest-data": digest_data,
+ }
+
+
+@run_job_using("docker-worker", "debian-package", schema=debian_schema)
+def docker_worker_debian_package(config, job, taskdesc):
+ run = job["run"]
+ version = {
+ "wheezy": 7,
+ "jessie": 8,
+ "stretch": 9,
+ "buster": 10,
+ "bullseye": 11,
+ }[run["dist"]]
+ common_package(config, job, taskdesc, "debian", version)
+
+
+@run_job_using("docker-worker", "ubuntu-package", schema=ubuntu_schema)
+def docker_worker_ubuntu_package(config, job, taskdesc):
+ run = job["run"]
+ version = {
+ "bionic": 1804,
+ "focal": 2004,
+ }[run["dist"]]
+ common_package(config, job, taskdesc, "ubuntu", version)
diff --git a/taskcluster/gecko_taskgraph/transforms/job/hazard.py b/taskcluster/gecko_taskgraph/transforms/job/hazard.py
new file mode 100644
index 0000000000..af0e8616e0
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/job/hazard.py
@@ -0,0 +1,66 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running hazard jobs via dedicated scripts
+"""
+
+
+from taskgraph.util.schema import Schema
+from voluptuous import Any, Optional, Required
+
+from gecko_taskgraph.transforms.job import configure_taskdesc_for_run, run_job_using
+from gecko_taskgraph.transforms.job.common import (
+ add_tooltool,
+ docker_worker_add_artifacts,
+ setup_secrets,
+)
+
+haz_run_schema = Schema(
+ {
+ Required("using"): "hazard",
+ # The command to run within the task image (passed through to the worker)
+ Required("command"): str,
+ # The mozconfig to use; default in the script is used if omitted
+ Optional("mozconfig"): str,
+ # The set of secret names to which the task has access; these are prefixed
+ # with `project/releng/gecko/{treeherder.kind}/level-{level}/`. Setting
+ # this will enable any worker features required and set the task's scopes
+ # appropriately. `true` here means ['*'], all secrets. Not supported on
+ # Windows
+ Optional("secrets"): Any(bool, [str]),
+ # Base work directory used to set up the task.
+ Optional("workdir"): str,
+ }
+)
+
+
+@run_job_using("docker-worker", "hazard", schema=haz_run_schema)
+def docker_worker_hazard(config, job, taskdesc):
+ run = job["run"]
+
+ worker = taskdesc["worker"] = job["worker"]
+ worker.setdefault("artifacts", [])
+
+ docker_worker_add_artifacts(config, job, taskdesc)
+ worker.setdefault("required-volumes", []).append(
+ "{workdir}/workspace".format(**run)
+ )
+ add_tooltool(config, job, taskdesc)
+ setup_secrets(config, job, taskdesc)
+
+ env = worker["env"]
+ env.update(
+ {
+ "MOZ_BUILD_DATE": config.params["moz_build_date"],
+ "MOZ_SCM_LEVEL": config.params["level"],
+ }
+ )
+
+ # script parameters
+ if run.get("mozconfig"):
+ env["MOZCONFIG"] = run.pop("mozconfig")
+
+ run["using"] = "run-task"
+ run["cwd"] = run["workdir"]
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
diff --git a/taskcluster/gecko_taskgraph/transforms/job/mach.py b/taskcluster/gecko_taskgraph/transforms/job/mach.py
new file mode 100644
index 0000000000..a418b44794
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/job/mach.py
@@ -0,0 +1,83 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running mach tasks (via run-task)
+"""
+
+from taskgraph.util.schema import Schema, taskref_or_string
+from voluptuous import Any, Optional, Required
+
+from gecko_taskgraph.transforms.job import configure_taskdesc_for_run, run_job_using
+
+mach_schema = Schema(
+ {
+ Required("using"): "mach",
+ # The mach command (omitting `./mach`) to run
+ Required("mach"): taskref_or_string,
+ # The version of Python to run with. Either an absolute path to the binary
+ # on the worker, a version identifier (e.g python2.7 or 3.8). There is no
+ # validation performed to ensure the specified binaries actually exist.
+ Optional("python-version"): Any(str, int, float),
+ # The sparse checkout profile to use. Value is the filename relative to the
+ # directory where sparse profiles are defined (build/sparse-profiles/).
+ Optional("sparse-profile"): Any(str, None),
+ # if true, perform a checkout of a comm-central based branch inside the
+ # gecko checkout
+ Required("comm-checkout"): bool,
+ # Base work directory used to set up the task.
+ Optional("workdir"): str,
+ # Context to substitute into the command using format string
+ # substitution (e.g {value}). This is useful if certain aspects of the
+ # command need to be generated in transforms.
+ Optional("command-context"): dict,
+ }
+)
+
+
+defaults = {
+ "comm-checkout": False,
+}
+
+
+@run_job_using("docker-worker", "mach", schema=mach_schema, defaults=defaults)
+@run_job_using("generic-worker", "mach", schema=mach_schema, defaults=defaults)
+def configure_mach(config, job, taskdesc):
+ run = job["run"]
+ worker = job["worker"]
+
+ additional_prefix = []
+ if worker["os"] == "macosx":
+ additional_prefix = ["LC_ALL=en_US.UTF-8", "LANG=en_US.UTF-8"]
+
+ python = run.get("python-version")
+ if python:
+ del run["python-version"]
+
+ if worker["os"] == "macosx" and python == 3:
+ python = "/usr/local/bin/python3"
+
+ python = str(python)
+ try:
+ float(python)
+ python = "python" + python
+ except ValueError:
+ pass
+
+ additional_prefix.append(python)
+
+ command_prefix = " ".join(additional_prefix + ["./mach "])
+
+ mach = run["mach"]
+ if isinstance(mach, dict):
+ ref, pattern = next(iter(mach.items()))
+ command = {ref: command_prefix + pattern}
+ else:
+ command = command_prefix + mach
+
+ # defer to the run_task implementation
+ run["command"] = command
+ run["cwd"] = "{checkout}"
+ run["using"] = "run-task"
+ del run["mach"]
+ configure_taskdesc_for_run(config, job, taskdesc, job["worker"]["implementation"])
diff --git a/taskcluster/gecko_taskgraph/transforms/job/mozharness.py b/taskcluster/gecko_taskgraph/transforms/job/mozharness.py
new file mode 100644
index 0000000000..3dbcc6e015
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/job/mozharness.py
@@ -0,0 +1,366 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+
+Support for running jobs via mozharness. Ideally, most stuff gets run this
+way, and certainly anything using mozharness should use this approach.
+
+"""
+
+import json
+from textwrap import dedent
+
+from mozpack import path as mozpath
+from taskgraph.util.schema import Schema
+from voluptuous import Any, Optional, Required
+from voluptuous.validators import Match
+
+from gecko_taskgraph.transforms.job import configure_taskdesc_for_run, run_job_using
+from gecko_taskgraph.transforms.job.common import (
+ docker_worker_add_artifacts,
+ generic_worker_add_artifacts,
+ get_expiration,
+ setup_secrets,
+)
+from gecko_taskgraph.transforms.task import get_branch_repo, get_branch_rev
+from gecko_taskgraph.util.attributes import is_try
+
+mozharness_run_schema = Schema(
+ {
+ Required("using"): "mozharness",
+ # the mozharness script used to run this task, relative to the testing/
+ # directory and using forward slashes even on Windows
+ Required("script"): str,
+ # Additional paths to look for mozharness configs in. These should be
+ # relative to the base of the source checkout
+ Optional("config-paths"): [str],
+ # the config files required for the task, relative to
+ # testing/mozharness/configs or one of the paths specified in
+ # `config-paths` and using forward slashes even on Windows
+ Required("config"): [str],
+ # any additional actions to pass to the mozharness command
+ Optional("actions"): [
+ Match("^[a-z0-9-]+$", "actions must be `-` seperated alphanumeric strings")
+ ],
+ # any additional options (without leading --) to be passed to mozharness
+ Optional("options"): [
+ Match(
+ "^[a-z0-9-]+(=[^ ]+)?$",
+ "options must be `-` seperated alphanumeric strings (with optional argument)",
+ )
+ ],
+ # --custom-build-variant-cfg value
+ Optional("custom-build-variant-cfg"): str,
+ # Extra configuration options to pass to mozharness.
+ Optional("extra-config"): dict,
+ # If not false, tooltool downloads will be enabled via relengAPIProxy
+ # for either just public files, or all files. Not supported on Windows
+ Required("tooltool-downloads"): Any(
+ False,
+ "public",
+ "internal",
+ ),
+ # The set of secret names to which the task has access; these are prefixed
+ # with `project/releng/gecko/{treeherder.kind}/level-{level}/`. Setting
+ # this will enable any worker features required and set the task's scopes
+ # appropriately. `true` here means ['*'], all secrets. Not supported on
+ # Windows
+ Required("secrets"): Any(bool, [str]),
+ # If true, taskcluster proxy will be enabled; note that it may also be enabled
+ # automatically e.g., for secrets support. Not supported on Windows.
+ Required("taskcluster-proxy"): bool,
+ # If false, indicate that builds should skip producing artifacts. Not
+ # supported on Windows.
+ Required("keep-artifacts"): bool,
+ # If specified, use the in-tree job script specified.
+ Optional("job-script"): str,
+ Required("requires-signed-builds"): bool,
+ # Whether or not to use caches.
+ Optional("use-caches"): bool,
+ # If false, don't set MOZ_SIMPLE_PACKAGE_NAME
+ # Only disableable on windows
+ Required("use-simple-package"): bool,
+ # If false don't pass --branch mozharness script
+ # Only disableable on windows
+ Required("use-magic-mh-args"): bool,
+ # if true, perform a checkout of a comm-central based branch inside the
+ # gecko checkout
+ Required("comm-checkout"): bool,
+ # Base work directory used to set up the task.
+ Optional("workdir"): str,
+ Optional("run-as-root"): bool,
+ }
+)
+
+
+mozharness_defaults = {
+ "tooltool-downloads": False,
+ "secrets": False,
+ "taskcluster-proxy": False,
+ "keep-artifacts": True,
+ "requires-signed-builds": False,
+ "use-simple-package": True,
+ "use-magic-mh-args": True,
+ "comm-checkout": False,
+ "run-as-root": False,
+}
+
+
+@run_job_using(
+ "docker-worker",
+ "mozharness",
+ schema=mozharness_run_schema,
+ defaults=mozharness_defaults,
+)
+def mozharness_on_docker_worker_setup(config, job, taskdesc):
+ run = job["run"]
+
+ worker = taskdesc["worker"] = job["worker"]
+
+ if not run.pop("use-simple-package", None):
+ raise NotImplementedError(
+ "Simple packaging cannot be disabled via"
+ "'use-simple-package' on docker-workers"
+ )
+ if not run.pop("use-magic-mh-args", None):
+ raise NotImplementedError(
+ "Cannot disabled mh magic arg passing via"
+ "'use-magic-mh-args' on docker-workers"
+ )
+
+ # Running via mozharness assumes an image that contains build.sh:
+ # by default, debian11-amd64-build, but it could be another image (like
+ # android-build).
+ worker.setdefault("docker-image", {"in-tree": "debian11-amd64-build"})
+
+ worker.setdefault("artifacts", []).append(
+ {
+ "name": "public/logs",
+ "path": "{workdir}/logs/".format(**run),
+ "type": "directory",
+ "expires-after": get_expiration(config, "medium"),
+ }
+ )
+ worker["taskcluster-proxy"] = run.pop("taskcluster-proxy", None)
+ docker_worker_add_artifacts(config, job, taskdesc)
+
+ env = worker.setdefault("env", {})
+ env.update(
+ {
+ "WORKSPACE": "{workdir}/workspace".format(**run),
+ "MOZHARNESS_CONFIG": " ".join(run.pop("config")),
+ "MOZHARNESS_SCRIPT": run.pop("script"),
+ "MH_BRANCH": config.params["project"],
+ "MOZ_SOURCE_CHANGESET": get_branch_rev(config),
+ "MOZ_SOURCE_REPO": get_branch_repo(config),
+ "MH_BUILD_POOL": "taskcluster",
+ "MOZ_BUILD_DATE": config.params["moz_build_date"],
+ "MOZ_SCM_LEVEL": config.params["level"],
+ "PYTHONUNBUFFERED": "1",
+ }
+ )
+
+ worker.setdefault("required-volumes", []).append(env["WORKSPACE"])
+
+ if "actions" in run:
+ env["MOZHARNESS_ACTIONS"] = " ".join(run.pop("actions"))
+
+ if "options" in run:
+ env["MOZHARNESS_OPTIONS"] = " ".join(run.pop("options"))
+
+ if "config-paths" in run:
+ env["MOZHARNESS_CONFIG_PATHS"] = " ".join(run.pop("config-paths"))
+
+ if "custom-build-variant-cfg" in run:
+ env["MH_CUSTOM_BUILD_VARIANT_CFG"] = run.pop("custom-build-variant-cfg")
+
+ extra_config = run.pop("extra-config", {})
+ extra_config["objdir"] = "obj-build"
+ env["EXTRA_MOZHARNESS_CONFIG"] = json.dumps(extra_config, sort_keys=True)
+
+ if "job-script" in run:
+ env["JOB_SCRIPT"] = run["job-script"]
+
+ if is_try(config.params):
+ env["TRY_COMMIT_MSG"] = config.params["message"]
+
+ # if we're not keeping artifacts, set some env variables to empty values
+ # that will cause the build process to skip copying the results to the
+ # artifacts directory. This will have no effect for operations that are
+ # not builds.
+ if not run.pop("keep-artifacts"):
+ env["DIST_TARGET_UPLOADS"] = ""
+ env["DIST_UPLOADS"] = ""
+
+ # Retry if mozharness returns TBPL_RETRY
+ worker["retry-exit-status"] = [4]
+
+ setup_secrets(config, job, taskdesc)
+
+ run["using"] = "run-task"
+ run["command"] = mozpath.join(
+ "${GECKO_PATH}",
+ run.pop("job-script", "taskcluster/scripts/builder/build-linux.sh"),
+ )
+ run.pop("secrets")
+ run.pop("requires-signed-builds")
+
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
+
+
+@run_job_using(
+ "generic-worker",
+ "mozharness",
+ schema=mozharness_run_schema,
+ defaults=mozharness_defaults,
+)
+def mozharness_on_generic_worker(config, job, taskdesc):
+ assert job["worker"]["os"] in (
+ "windows",
+ "macosx",
+ ), "only supports windows and macOS right now: {}".format(job["label"])
+
+ run = job["run"]
+
+ # fail if invalid run options are included
+ invalid = []
+ if not run.pop("keep-artifacts", True):
+ invalid.append("keep-artifacts")
+ if invalid:
+ raise Exception(
+ "Jobs run using mozharness on Windows do not support properties "
+ + ", ".join(invalid)
+ )
+
+ worker = taskdesc["worker"] = job["worker"]
+
+ worker["taskcluster-proxy"] = run.pop("taskcluster-proxy", None)
+
+ setup_secrets(config, job, taskdesc)
+
+ taskdesc["worker"].setdefault("artifacts", []).append(
+ {
+ "name": "public/logs",
+ "path": "logs",
+ "type": "directory",
+ "expires-after": get_expiration(config, "medium"),
+ }
+ )
+
+ if not worker.get("skip-artifacts", False):
+ generic_worker_add_artifacts(config, job, taskdesc)
+
+ env = worker.setdefault("env", {})
+ env.update(
+ {
+ "MOZ_BUILD_DATE": config.params["moz_build_date"],
+ "MOZ_SCM_LEVEL": config.params["level"],
+ "MH_BRANCH": config.params["project"],
+ "MOZ_SOURCE_CHANGESET": get_branch_rev(config),
+ "MOZ_SOURCE_REPO": get_branch_repo(config),
+ }
+ )
+ if run.pop("use-simple-package"):
+ env.update({"MOZ_SIMPLE_PACKAGE_NAME": "target"})
+
+ extra_config = run.pop("extra-config", {})
+ extra_config["objdir"] = "obj-build"
+ env["EXTRA_MOZHARNESS_CONFIG"] = json.dumps(extra_config, sort_keys=True)
+
+ # The windows generic worker uses batch files to pass environment variables
+ # to commands. Setting a variable to empty in a batch file unsets, so if
+ # there is no `TRY_COMMIT_MESSAGE`, pass a space instead, so that
+ # mozharness doesn't try to find the commit message on its own.
+ if is_try(config.params):
+ env["TRY_COMMIT_MSG"] = config.params["message"] or "no commit message"
+
+ if not job["attributes"]["build_platform"].startswith(("win", "macosx")):
+ raise Exception(
+ "Task generation for mozharness build jobs currently only supported on "
+ "Windows and macOS"
+ )
+
+ mh_command = []
+ if job["worker"]["os"] == "windows":
+ system_python_dir = "c:/mozilla-build/python3/"
+ gecko_path = "%GECKO_PATH%"
+ else:
+ system_python_dir = ""
+ gecko_path = "$GECKO_PATH"
+
+ if run.get("use-system-python", True):
+ python_bindir = system_python_dir
+ else:
+ # $MOZ_PYTHON_HOME is going to be substituted in run-task, when we
+ # know the actual MOZ_PYTHON_HOME value.
+ is_windows = job["worker"]["os"] == "windows"
+ if is_windows:
+ python_bindir = "%MOZ_PYTHON_HOME%/"
+ else:
+ python_bindir = "${MOZ_PYTHON_HOME}/bin/"
+
+ mh_command = ["{}python3".format(python_bindir)]
+
+ mh_command += [
+ f"{gecko_path}/mach",
+ "python",
+ "{}/testing/{}".format(gecko_path, run.pop("script")),
+ ]
+
+ for path in run.pop("config-paths", []):
+ mh_command.append(f"--extra-config-path {gecko_path}/{path}")
+
+ for cfg in run.pop("config"):
+ mh_command.extend(("--config", cfg))
+ if run.pop("use-magic-mh-args"):
+ mh_command.extend(("--branch", config.params["project"]))
+ if job["worker"]["os"] == "windows":
+ mh_command.extend(("--work-dir", r"%cd:Z:=z:%\workspace"))
+ for action in run.pop("actions", []):
+ mh_command.append("--" + action)
+
+ for option in run.pop("options", []):
+ mh_command.append("--" + option)
+ if run.get("custom-build-variant-cfg"):
+ mh_command.append("--custom-build-variant")
+ mh_command.append(run.pop("custom-build-variant-cfg"))
+
+ if job["worker"]["os"] == "macosx":
+ # Ideally, we'd use shellutil.quote, but that would single-quote
+ # $GECKO_PATH, which would defeat having the variable in the command
+ # in the first place, as it wouldn't be expanded.
+ # In practice, arguments are expected not to contain characters that
+ # would require quoting.
+ mh_command = " ".join(mh_command)
+
+ run["using"] = "run-task"
+ run["command"] = mh_command
+ run.pop("secrets")
+ run.pop("requires-signed-builds")
+ run.pop("job-script", None)
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
+
+ # Everything past this point is Windows-specific.
+ if job["worker"]["os"] == "macosx":
+ return
+
+ if taskdesc.get("use-sccache"):
+ worker["command"] = (
+ [
+ # Make the comment part of the first command, as it will help users to
+ # understand what is going on, and why these steps are implemented.
+ dedent(
+ """\
+ :: sccache currently uses the full compiler commandline as input to the
+ :: cache hash key, so create a symlink to the task dir and build from
+ :: the symlink dir to get consistent paths.
+ if exist z:\\build rmdir z:\\build"""
+ ),
+ r"mklink /d z:\build %cd%",
+ # Grant delete permission on the link to everyone.
+ r"icacls z:\build /grant *S-1-1-0:D /L",
+ r"cd /d z:\build",
+ ]
+ + worker["command"]
+ )
diff --git a/taskcluster/gecko_taskgraph/transforms/job/mozharness_test.py b/taskcluster/gecko_taskgraph/transforms/job/mozharness_test.py
new file mode 100644
index 0000000000..eb4aea609f
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/job/mozharness_test.py
@@ -0,0 +1,477 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import json
+import os
+import re
+
+from taskgraph.util.schema import Schema
+from taskgraph.util.taskcluster import get_artifact_path, get_artifact_url
+from voluptuous import Extra, Optional, Required
+
+from gecko_taskgraph.transforms.job import configure_taskdesc_for_run, run_job_using
+from gecko_taskgraph.transforms.job.common import get_expiration, support_vcs_checkout
+from gecko_taskgraph.transforms.test import normpath, test_description_schema
+from gecko_taskgraph.util.attributes import is_try
+
+VARIANTS = [
+ "shippable",
+ "shippable-qr",
+ "shippable-lite",
+ "shippable-lite-qr",
+ "devedition",
+ "pgo",
+ "asan",
+ "stylo",
+ "qr",
+ "ccov",
+]
+
+
+def get_variant(test_platform):
+ for v in VARIANTS:
+ if f"-{v}/" in test_platform:
+ return v
+ return ""
+
+
+mozharness_test_run_schema = Schema(
+ {
+ Required("using"): "mozharness-test",
+ Required("test"): {
+ Required("test-platform"): str,
+ Required("mozharness"): test_description_schema["mozharness"],
+ Required("docker-image"): test_description_schema["docker-image"],
+ Required("loopback-video"): test_description_schema["loopback-video"],
+ Required("loopback-audio"): test_description_schema["loopback-audio"],
+ Required("max-run-time"): test_description_schema["max-run-time"],
+ Optional("retry-exit-status"): test_description_schema["retry-exit-status"],
+ Extra: object,
+ },
+ # Base work directory used to set up the task.
+ Optional("workdir"): str,
+ }
+)
+
+
+def test_packages_url(taskdesc):
+ """Account for different platforms that name their test packages differently"""
+ artifact_url = get_artifact_url(
+ "<build>", get_artifact_path(taskdesc, "target.test_packages.json")
+ )
+ # for android shippable we need to add 'en-US' to the artifact url
+ test = taskdesc["run"]["test"]
+ if "android" in test["test-platform"] and (
+ get_variant(test["test-platform"])
+ in ("shippable", "shippable-qr", "shippable-lite", "shippable-lite-qr")
+ ):
+ head, tail = os.path.split(artifact_url)
+ artifact_url = os.path.join(head, "en-US", tail)
+ return artifact_url
+
+
+def installer_url(taskdesc):
+ test = taskdesc["run"]["test"]
+ mozharness = test["mozharness"]
+
+ if "installer-url" in mozharness:
+ installer_url = mozharness["installer-url"]
+ else:
+ upstream_task = (
+ "<build-signing>" if mozharness["requires-signed-builds"] else "<build>"
+ )
+ installer_url = get_artifact_url(
+ upstream_task, mozharness["build-artifact-name"]
+ )
+
+ return installer_url
+
+
+@run_job_using("docker-worker", "mozharness-test", schema=mozharness_test_run_schema)
+def mozharness_test_on_docker(config, job, taskdesc):
+ run = job["run"]
+ test = taskdesc["run"]["test"]
+ mozharness = test["mozharness"]
+ worker = taskdesc["worker"] = job["worker"]
+
+ # apply some defaults
+ worker["docker-image"] = test["docker-image"]
+ worker["allow-ptrace"] = True # required for all tests, for crashreporter
+ worker["loopback-video"] = test["loopback-video"]
+ worker["loopback-audio"] = test["loopback-audio"]
+ worker["max-run-time"] = test["max-run-time"]
+ worker["retry-exit-status"] = test["retry-exit-status"]
+ if "android-em-7.0-x86" in test["test-platform"]:
+ worker["privileged"] = True
+
+ artifacts = [
+ # (artifact name prefix, in-image path)
+ ("public/logs", "{workdir}/workspace/logs/".format(**run)),
+ ("public/test", "{workdir}/artifacts/".format(**run)),
+ (
+ "public/test_info",
+ "{workdir}/workspace/build/blobber_upload_dir/".format(**run),
+ ),
+ ]
+
+ installer = installer_url(taskdesc)
+
+ mozharness_url = get_artifact_url(
+ "<build>", get_artifact_path(taskdesc, "mozharness.zip")
+ )
+
+ worker.setdefault("artifacts", [])
+ worker["artifacts"].extend(
+ [
+ {
+ "name": prefix,
+ "path": os.path.join("{workdir}/workspace".format(**run), path),
+ "type": "directory",
+ "expires-after": get_expiration(config, "default"),
+ }
+ for (prefix, path) in artifacts
+ ]
+ )
+
+ env = worker.setdefault("env", {})
+ env.update(
+ {
+ "MOZHARNESS_CONFIG": " ".join(mozharness["config"]),
+ "MOZHARNESS_SCRIPT": mozharness["script"],
+ "MOZILLA_BUILD_URL": {"task-reference": installer},
+ "NEED_PULSEAUDIO": "true",
+ "NEED_WINDOW_MANAGER": "true",
+ "ENABLE_E10S": str(bool(test.get("e10s"))).lower(),
+ "WORKING_DIR": "/builds/worker",
+ }
+ )
+
+ env["PYTHON"] = "python3"
+
+ # Legacy linux64 tests rely on compiz.
+ if test.get("docker-image", {}).get("in-tree") == "desktop1604-test":
+ env.update({"NEED_COMPIZ": "true"})
+
+ # Bug 1602701/1601828 - use compiz on ubuntu1804 due to GTK asynchiness
+ # when manipulating windows.
+ if test.get("docker-image", {}).get("in-tree") == "ubuntu1804-test":
+ if "wdspec" in job["run"]["test"]["suite"] or (
+ "marionette" in job["run"]["test"]["suite"]
+ and "headless" not in job["label"]
+ ):
+ env.update({"NEED_COMPIZ": "true"})
+
+ # Set MOZ_ENABLE_WAYLAND env variables to enable Wayland backend.
+ if "wayland" in job["label"]:
+ env["MOZ_ENABLE_WAYLAND"] = "1"
+
+ if mozharness.get("mochitest-flavor"):
+ env["MOCHITEST_FLAVOR"] = mozharness["mochitest-flavor"]
+
+ if mozharness["set-moz-node-path"]:
+ env["MOZ_NODE_PATH"] = "/usr/local/bin/node"
+
+ if "actions" in mozharness:
+ env["MOZHARNESS_ACTIONS"] = " ".join(mozharness["actions"])
+
+ if is_try(config.params):
+ env["TRY_COMMIT_MSG"] = config.params["message"]
+
+ # handle some of the mozharness-specific options
+ if test["reboot"]:
+ raise Exception(
+ "reboot: {} not supported on generic-worker".format(test["reboot"])
+ )
+
+ # Support vcs checkouts regardless of whether the task runs from
+ # source or not in case it is needed on an interactive loaner.
+ support_vcs_checkout(config, job, taskdesc)
+
+ # If we have a source checkout, run mozharness from it instead of
+ # downloading a zip file with the same content.
+ if test["checkout"]:
+ env["MOZHARNESS_PATH"] = "{workdir}/checkouts/gecko/testing/mozharness".format(
+ **run
+ )
+ else:
+ env["MOZHARNESS_URL"] = {"task-reference": mozharness_url}
+
+ extra_config = {
+ "installer_url": installer,
+ "test_packages_url": test_packages_url(taskdesc),
+ }
+ env["EXTRA_MOZHARNESS_CONFIG"] = {
+ "task-reference": json.dumps(extra_config, sort_keys=True)
+ }
+
+ # Bug 1634554 - pass in decision task artifact URL to mozharness for WPT.
+ # Bug 1645974 - test-verify-wpt and test-coverage-wpt need artifact URL.
+ if "web-platform-tests" in test["suite"] or re.match(
+ "test-(coverage|verify)-wpt", test["suite"]
+ ):
+ env["TESTS_BY_MANIFEST_URL"] = {
+ "artifact-reference": "<decision/public/tests-by-manifest.json.gz>"
+ }
+
+ command = [
+ "{workdir}/bin/test-linux.sh".format(**run),
+ ]
+ command.extend(mozharness.get("extra-options", []))
+
+ if test.get("test-manifests"):
+ env["MOZHARNESS_TEST_PATHS"] = json.dumps(
+ {test["suite"]: test["test-manifests"]}, sort_keys=True
+ )
+
+ # TODO: remove the need for run['chunked']
+ elif mozharness.get("chunked") or test["chunks"] > 1:
+ command.append("--total-chunk={}".format(test["chunks"]))
+ command.append("--this-chunk={}".format(test["this-chunk"]))
+
+ if "download-symbols" in mozharness:
+ download_symbols = mozharness["download-symbols"]
+ download_symbols = {True: "true", False: "false"}.get(
+ download_symbols, download_symbols
+ )
+ command.append("--download-symbols=" + download_symbols)
+
+ job["run"] = {
+ "workdir": run["workdir"],
+ "tooltool-downloads": mozharness["tooltool-downloads"],
+ "checkout": test["checkout"],
+ "command": command,
+ "using": "run-task",
+ }
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
+
+
+@run_job_using("generic-worker", "mozharness-test", schema=mozharness_test_run_schema)
+def mozharness_test_on_generic_worker(config, job, taskdesc):
+ test = taskdesc["run"]["test"]
+ mozharness = test["mozharness"]
+ worker = taskdesc["worker"] = job["worker"]
+
+ bitbar_script = "test-linux.sh"
+
+ is_macosx = worker["os"] == "macosx"
+ is_windows = worker["os"] == "windows"
+ is_linux = worker["os"] == "linux" or worker["os"] == "linux-bitbar"
+ is_bitbar = worker["os"] == "linux-bitbar"
+ assert is_macosx or is_windows or is_linux
+
+ artifacts = [
+ {
+ "name": "public/logs",
+ "path": "logs",
+ "type": "directory",
+ "expires-after": get_expiration(config, "default"),
+ }
+ ]
+
+ # jittest doesn't have blob_upload_dir
+ if test["test-name"] != "jittest":
+ artifacts.append(
+ {
+ "name": "public/test_info",
+ "path": "build/blobber_upload_dir",
+ "type": "directory",
+ "expires-after": get_expiration(config, "default"),
+ }
+ )
+
+ if is_bitbar:
+ artifacts = [
+ {
+ "name": "public/test/",
+ "path": "artifacts/public",
+ "type": "directory",
+ "expires-after": get_expiration(config, "default"),
+ },
+ {
+ "name": "public/logs/",
+ "path": "workspace/logs",
+ "type": "directory",
+ "expires-after": get_expiration(config, "default"),
+ },
+ {
+ "name": "public/test_info/",
+ "path": "workspace/build/blobber_upload_dir",
+ "type": "directory",
+ "expires-after": get_expiration(config, "default"),
+ },
+ ]
+
+ installer = installer_url(taskdesc)
+
+ worker["os-groups"] = test["os-groups"]
+
+ # run-as-administrator is a feature for workers with UAC enabled and as such should not be
+ # included in tasks on workers that have UAC disabled. Currently UAC is only enabled on
+ # gecko Windows 10 workers, however this may be subject to change. Worker type
+ # environment definitions can be found in https://github.com/mozilla-releng/OpenCloudConfig
+ # See https://docs.microsoft.com/en-us/windows/desktop/secauthz/user-account-control
+ # for more information about UAC.
+ if test.get("run-as-administrator", False):
+ if job["worker-type"].startswith("win10-64") or job["worker-type"].startswith(
+ "win11-64"
+ ):
+ worker["run-as-administrator"] = True
+ else:
+ raise Exception(
+ "run-as-administrator not supported on {}".format(job["worker-type"])
+ )
+
+ if test["reboot"]:
+ raise Exception(
+ "reboot: {} not supported on generic-worker".format(test["reboot"])
+ )
+
+ worker["max-run-time"] = test["max-run-time"]
+ worker["retry-exit-status"] = test["retry-exit-status"]
+ worker.setdefault("artifacts", [])
+ worker["artifacts"].extend(artifacts)
+
+ env = worker.setdefault("env", {})
+ env["GECKO_HEAD_REPOSITORY"] = config.params["head_repository"]
+ env["GECKO_HEAD_REV"] = config.params["head_rev"]
+
+ # this list will get cleaned up / reduced / removed in bug 1354088
+ if is_macosx:
+ env.update(
+ {
+ "LC_ALL": "en_US.UTF-8",
+ "LANG": "en_US.UTF-8",
+ "MOZ_NODE_PATH": "/usr/local/bin/node",
+ "PATH": "/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin",
+ "SHELL": "/bin/bash",
+ }
+ )
+ elif is_bitbar:
+ env.update(
+ {
+ "LANG": "en_US.UTF-8",
+ "MOZHARNESS_CONFIG": " ".join(mozharness["config"]),
+ "MOZHARNESS_SCRIPT": mozharness["script"],
+ "MOZHARNESS_URL": {
+ "artifact-reference": "<build/public/build/mozharness.zip>"
+ },
+ "MOZILLA_BUILD_URL": {"task-reference": installer},
+ "MOZ_NO_REMOTE": "1",
+ "NEED_XVFB": "false",
+ "XPCOM_DEBUG_BREAK": "warn",
+ "NO_FAIL_ON_TEST_ERRORS": "1",
+ "MOZ_HIDE_RESULTS_TABLE": "1",
+ "MOZ_NODE_PATH": "/usr/local/bin/node",
+ "TASKCLUSTER_WORKER_TYPE": job["worker-type"],
+ }
+ )
+
+ extra_config = {
+ "installer_url": installer,
+ "test_packages_url": test_packages_url(taskdesc),
+ }
+ env["EXTRA_MOZHARNESS_CONFIG"] = {
+ "task-reference": json.dumps(extra_config, sort_keys=True)
+ }
+
+ # Bug 1634554 - pass in decision task artifact URL to mozharness for WPT.
+ # Bug 1645974 - test-verify-wpt and test-coverage-wpt need artifact URL.
+ if "web-platform-tests" in test["suite"] or re.match(
+ "test-(coverage|verify)-wpt", test["suite"]
+ ):
+ env["TESTS_BY_MANIFEST_URL"] = {
+ "artifact-reference": "<decision/public/tests-by-manifest.json.gz>"
+ }
+
+ if is_windows:
+ py_binary = "c:\\mozilla-build\\{python}\\{python}.exe".format(python="python3")
+ mh_command = [
+ py_binary,
+ "-u",
+ "mozharness\\scripts\\" + normpath(mozharness["script"]),
+ ]
+ elif is_bitbar:
+ py_binary = "python3"
+ mh_command = ["bash", f"./{bitbar_script}"]
+ elif is_macosx:
+ py_binary = "/usr/local/bin/{}".format("python3")
+ mh_command = [
+ py_binary,
+ "-u",
+ "mozharness/scripts/" + mozharness["script"],
+ ]
+ else:
+ # is_linux
+ py_binary = "/usr/bin/{}".format("python3")
+ mh_command = [
+ # Using /usr/bin/python2.7 rather than python2.7 because
+ # /usr/local/bin/python2.7 is broken on the mac workers.
+ # See bug #1547903.
+ py_binary,
+ "-u",
+ "mozharness/scripts/" + mozharness["script"],
+ ]
+
+ env["PYTHON"] = py_binary
+
+ for mh_config in mozharness["config"]:
+ cfg_path = "mozharness/configs/" + mh_config
+ if is_windows:
+ cfg_path = normpath(cfg_path)
+ mh_command.extend(["--cfg", cfg_path])
+ mh_command.extend(mozharness.get("extra-options", []))
+ if mozharness.get("download-symbols"):
+ if isinstance(mozharness["download-symbols"], str):
+ mh_command.extend(["--download-symbols", mozharness["download-symbols"]])
+ else:
+ mh_command.extend(["--download-symbols", "true"])
+ if mozharness.get("include-blob-upload-branch"):
+ mh_command.append("--blob-upload-branch=" + config.params["project"])
+
+ if test.get("test-manifests"):
+ env["MOZHARNESS_TEST_PATHS"] = json.dumps(
+ {test["suite"]: test["test-manifests"]}, sort_keys=True
+ )
+
+ # TODO: remove the need for run['chunked']
+ elif mozharness.get("chunked") or test["chunks"] > 1:
+ mh_command.append("--total-chunk={}".format(test["chunks"]))
+ mh_command.append("--this-chunk={}".format(test["this-chunk"]))
+
+ if is_try(config.params):
+ env["TRY_COMMIT_MSG"] = config.params["message"]
+
+ worker["mounts"] = [
+ {
+ "directory": "mozharness",
+ "content": {
+ "artifact": get_artifact_path(taskdesc, "mozharness.zip"),
+ "task-id": {"task-reference": "<build>"},
+ },
+ "format": "zip",
+ }
+ ]
+ if is_bitbar:
+ a_url = config.params.file_url(
+ f"taskcluster/scripts/tester/{bitbar_script}",
+ )
+ worker["mounts"] = [
+ {
+ "file": bitbar_script,
+ "content": {
+ "url": a_url,
+ },
+ }
+ ]
+
+ job["run"] = {
+ "tooltool-downloads": mozharness["tooltool-downloads"],
+ "checkout": test["checkout"],
+ "command": mh_command,
+ "using": "run-task",
+ }
+ if is_bitbar:
+ job["run"]["run-as-root"] = True
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
diff --git a/taskcluster/gecko_taskgraph/transforms/job/python_test.py b/taskcluster/gecko_taskgraph/transforms/job/python_test.py
new file mode 100644
index 0000000000..b572061217
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/job/python_test.py
@@ -0,0 +1,47 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running mach python-test tasks (via run-task)
+"""
+
+
+from taskgraph.util.schema import Schema
+from voluptuous import Optional, Required
+
+from gecko_taskgraph.transforms.job import configure_taskdesc_for_run, run_job_using
+
+python_test_schema = Schema(
+ {
+ Required("using"): "python-test",
+ # Python version to use
+ Required("python-version"): int,
+ # The subsuite to run
+ Required("subsuite"): str,
+ # Base work directory used to set up the task.
+ Optional("workdir"): str,
+ }
+)
+
+
+defaults = {
+ "python-version": 3,
+ "subsuite": "default",
+}
+
+
+@run_job_using(
+ "docker-worker", "python-test", schema=python_test_schema, defaults=defaults
+)
+@run_job_using(
+ "generic-worker", "python-test", schema=python_test_schema, defaults=defaults
+)
+def configure_python_test(config, job, taskdesc):
+ run = job["run"]
+ worker = job["worker"]
+
+ # defer to the mach implementation
+ run["mach"] = ("python-test --subsuite {subsuite} --run-slow").format(**run)
+ run["using"] = "mach"
+ del run["subsuite"]
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
diff --git a/taskcluster/gecko_taskgraph/transforms/job/run_task.py b/taskcluster/gecko_taskgraph/transforms/job/run_task.py
new file mode 100644
index 0000000000..201c0b825a
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/job/run_task.py
@@ -0,0 +1,308 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running jobs that are invoked via the `run-task` script.
+"""
+
+
+import os
+
+from mozbuild.util import memoize
+from mozpack import path
+from taskgraph.util.schema import Schema
+from taskgraph.util.yaml import load_yaml
+from voluptuous import Any, Extra, Optional, Required
+
+from gecko_taskgraph import GECKO
+from gecko_taskgraph.transforms.job import run_job_using
+from gecko_taskgraph.transforms.job.common import add_tooltool, support_vcs_checkout
+from gecko_taskgraph.transforms.task import taskref_or_string
+
+run_task_schema = Schema(
+ {
+ Required("using"): "run-task",
+ # if true, add a cache at ~worker/.cache, which is where things like pip
+ # tend to hide their caches. This cache is never added for level-1 jobs.
+ # TODO Once bug 1526028 is fixed, this and 'use-caches' should be merged.
+ Required("cache-dotcache"): bool,
+ # Whether or not to use caches.
+ Optional("use-caches"): bool,
+ # if true (the default), perform a checkout of gecko on the worker
+ Required("checkout"): bool,
+ Optional(
+ "cwd",
+ description="Path to run command in. If a checkout is present, the path "
+ "to the checkout will be interpolated with the key `checkout`",
+ ): str,
+ # The sparse checkout profile to use. Value is the filename relative to
+ # "sparse-profile-prefix" which defaults to "build/sparse-profiles/".
+ Required("sparse-profile"): Any(str, None),
+ # The relative path to the sparse profile.
+ Optional("sparse-profile-prefix"): str,
+ # if true, perform a checkout of a comm-central based branch inside the
+ # gecko checkout
+ Required("comm-checkout"): bool,
+ # The command arguments to pass to the `run-task` script, after the
+ # checkout arguments. If a list, it will be passed directly; otherwise
+ # it will be included in a single argument to `bash -cx`.
+ Required("command"): Any([taskref_or_string], taskref_or_string),
+ # Context to substitute into the command using format string
+ # substitution (e.g {value}). This is useful if certain aspects of the
+ # command need to be generated in transforms.
+ Optional("command-context"): {
+ # If present, loads a set of context variables from an unnested yaml
+ # file. If a value is present in both the provided file and directly
+ # in command-context, the latter will take priority.
+ Optional("from-file"): str,
+ Extra: object,
+ },
+ # Base work directory used to set up the task.
+ Optional("workdir"): str,
+ # If not false, tooltool downloads will be enabled via relengAPIProxy
+ # for either just public files, or all files. Only supported on
+ # docker-worker.
+ Required("tooltool-downloads"): Any(
+ False,
+ "public",
+ "internal",
+ ),
+ # Whether to run as root. (defaults to False)
+ Optional("run-as-root"): bool,
+ }
+)
+
+
+def common_setup(config, job, taskdesc, command):
+ run = job["run"]
+ if run["checkout"]:
+ support_vcs_checkout(config, job, taskdesc, sparse=bool(run["sparse-profile"]))
+ command.append(
+ "--gecko-checkout={}".format(taskdesc["worker"]["env"]["GECKO_PATH"])
+ )
+
+ if run["sparse-profile"]:
+ sparse_profile_prefix = run.pop(
+ "sparse-profile-prefix", "build/sparse-profiles"
+ )
+ sparse_profile_path = path.join(sparse_profile_prefix, run["sparse-profile"])
+ command.append(f"--gecko-sparse-profile={sparse_profile_path}")
+
+ taskdesc["worker"].setdefault("env", {})["MOZ_SCM_LEVEL"] = config.params["level"]
+
+
+worker_defaults = {
+ "cache-dotcache": False,
+ "checkout": True,
+ "comm-checkout": False,
+ "sparse-profile": None,
+ "tooltool-downloads": False,
+ "run-as-root": False,
+}
+
+
+load_yaml = memoize(load_yaml)
+
+
+def script_url(config, script):
+ if "MOZ_AUTOMATION" in os.environ and "TASK_ID" not in os.environ:
+ raise Exception("TASK_ID must be defined to use run-task on generic-worker")
+ task_id = os.environ.get("TASK_ID", "<TASK_ID>")
+ tc_url = "http://firefox-ci-tc.services.mozilla.com"
+ return f"{tc_url}/api/queue/v1/task/{task_id}/artifacts/public/{script}"
+
+
+def substitute_command_context(command_context, command):
+ from_file = command_context.pop("from-file", None)
+ full_context = {}
+ if from_file:
+ full_context = load_yaml(os.path.join(GECKO, from_file))
+ else:
+ full_context = {}
+
+ full_context.update(command_context)
+
+ if isinstance(command, list):
+ for i in range(len(command)):
+ command[i] = command[i].format(**full_context)
+ else:
+ command = command.format(**full_context)
+
+ return command
+
+
+@run_job_using(
+ "docker-worker", "run-task", schema=run_task_schema, defaults=worker_defaults
+)
+def docker_worker_run_task(config, job, taskdesc):
+ run = job["run"]
+ worker = taskdesc["worker"] = job["worker"]
+ command = ["/builds/worker/bin/run-task"]
+ common_setup(config, job, taskdesc, command)
+
+ if run["tooltool-downloads"]:
+ internal = run["tooltool-downloads"] == "internal"
+ add_tooltool(config, job, taskdesc, internal=internal)
+
+ if run.get("cache-dotcache"):
+ worker["caches"].append(
+ {
+ "type": "persistent",
+ "name": "{project}-dotcache".format(**config.params),
+ "mount-point": "{workdir}/.cache".format(**run),
+ "skip-untrusted": True,
+ }
+ )
+
+ if run.get("command-context"):
+ run_command = substitute_command_context(
+ run.get("command-context"), run["command"]
+ )
+ else:
+ run_command = run["command"]
+
+ run_cwd = run.get("cwd")
+ if run_cwd and run["checkout"]:
+ run_cwd = path.normpath(
+ run_cwd.format(checkout=taskdesc["worker"]["env"]["GECKO_PATH"])
+ )
+ elif run_cwd and "{checkout}" in run_cwd:
+ raise Exception(
+ "Found `{{checkout}}` interpolation in `cwd` for task {name} "
+ "but the task doesn't have a checkout: {cwd}".format(
+ cwd=run_cwd, name=job.get("name", job.get("label"))
+ )
+ )
+
+ # dict is for the case of `{'task-reference': text_type}`.
+ if isinstance(run_command, (str, dict)):
+ run_command = ["bash", "-cx", run_command]
+ if run["comm-checkout"]:
+ command.append(
+ "--comm-checkout={}/comm".format(taskdesc["worker"]["env"]["GECKO_PATH"])
+ )
+ if run["run-as-root"]:
+ command.extend(("--user", "root", "--group", "root"))
+ if run_cwd:
+ command.extend(("--task-cwd", run_cwd))
+ command.append("--")
+ command.extend(run_command)
+ worker["command"] = command
+
+
+@run_job_using(
+ "generic-worker", "run-task", schema=run_task_schema, defaults=worker_defaults
+)
+def generic_worker_run_task(config, job, taskdesc):
+ run = job["run"]
+ worker = taskdesc["worker"] = job["worker"]
+ is_win = worker["os"] == "windows"
+ is_mac = worker["os"] == "macosx"
+ is_bitbar = worker["os"] == "linux-bitbar"
+
+ if run["tooltool-downloads"]:
+ internal = run["tooltool-downloads"] == "internal"
+ add_tooltool(config, job, taskdesc, internal=internal)
+
+ if is_win:
+ command = ["C:/mozilla-build/python3/python3.exe", "run-task"]
+ elif is_mac:
+ command = ["/usr/local/bin/python3", "run-task"]
+ else:
+ command = ["./run-task"]
+
+ common_setup(config, job, taskdesc, command)
+
+ worker.setdefault("mounts", [])
+ if run.get("cache-dotcache"):
+ worker["mounts"].append(
+ {
+ "cache-name": "{project}-dotcache".format(**config.params),
+ "directory": "{workdir}/.cache".format(**run),
+ }
+ )
+ worker["mounts"].append(
+ {
+ "content": {
+ "url": script_url(config, "run-task"),
+ },
+ "file": "./run-task",
+ }
+ )
+ if job.get("fetches", {}):
+ worker["mounts"].append(
+ {
+ "content": {
+ "url": script_url(config, "fetch-content"),
+ },
+ "file": "./fetch-content",
+ }
+ )
+
+ run_command = run["command"]
+ run_cwd = run.get("cwd")
+ if run_cwd and run["checkout"]:
+ run_cwd = path.normpath(
+ run_cwd.format(checkout=taskdesc["worker"]["env"]["GECKO_PATH"])
+ )
+ elif run_cwd and "{checkout}" in run_cwd:
+ raise Exception(
+ "Found `{{checkout}}` interpolation in `cwd` for task {name} "
+ "but the task doesn't have a checkout: {cwd}".format(
+ cwd=run_cwd, name=job.get("name", job.get("label"))
+ )
+ )
+
+ # dict is for the case of `{'task-reference': text_type}`.
+ if isinstance(run_command, (str, dict)):
+ if is_win:
+ if isinstance(run_command, dict):
+ for k in run_command.keys():
+ run_command[k] = f'"{run_command[k]}"'
+ else:
+ run_command = f'"{run_command}"'
+ run_command = ["bash", "-cx", run_command]
+
+ if run.get("command-context"):
+ run_command = substitute_command_context(
+ run.get("command-context"), run_command
+ )
+
+ if run["comm-checkout"]:
+ command.append(
+ "--comm-checkout={}/comm".format(taskdesc["worker"]["env"]["GECKO_PATH"])
+ )
+
+ if run["run-as-root"]:
+ command.extend(("--user", "root", "--group", "root"))
+ if run_cwd:
+ command.extend(("--task-cwd", run_cwd))
+ command.append("--")
+ if is_bitbar:
+ # Use the bitbar wrapper script which sets up the device and adb
+ # environment variables
+ command.append("/builds/taskcluster/script.py")
+ command.extend(run_command)
+
+ if is_win:
+ taskref = False
+ for c in command:
+ if isinstance(c, dict):
+ taskref = True
+
+ if taskref:
+ cmd = []
+ for c in command:
+ if isinstance(c, dict):
+ for v in c.values():
+ cmd.append(v)
+ else:
+ cmd.append(c)
+ worker["command"] = [{"artifact-reference": " ".join(cmd)}]
+ else:
+ worker["command"] = [" ".join(command)]
+ else:
+ worker["command"] = [
+ ["chmod", "+x", "run-task"],
+ command,
+ ]
diff --git a/taskcluster/gecko_taskgraph/transforms/job/spidermonkey.py b/taskcluster/gecko_taskgraph/transforms/job/spidermonkey.py
new file mode 100644
index 0000000000..91c7e93bd6
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/job/spidermonkey.py
@@ -0,0 +1,109 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running spidermonkey jobs via dedicated scripts
+"""
+
+
+from taskgraph.util.schema import Schema
+from voluptuous import Any, Optional, Required
+
+from gecko_taskgraph.transforms.job import configure_taskdesc_for_run, run_job_using
+from gecko_taskgraph.transforms.job.common import (
+ docker_worker_add_artifacts,
+ generic_worker_add_artifacts,
+)
+
+sm_run_schema = Schema(
+ {
+ Required("using"): Any(
+ "spidermonkey",
+ "spidermonkey-package",
+ ),
+ # SPIDERMONKEY_VARIANT and SPIDERMONKEY_PLATFORM
+ Required("spidermonkey-variant"): str,
+ Optional("spidermonkey-platform"): str,
+ # Base work directory used to set up the task.
+ Optional("workdir"): str,
+ Required("tooltool-downloads"): Any(
+ False,
+ "public",
+ "internal",
+ ),
+ }
+)
+
+
+@run_job_using("docker-worker", "spidermonkey", schema=sm_run_schema)
+@run_job_using("docker-worker", "spidermonkey-package", schema=sm_run_schema)
+def docker_worker_spidermonkey(config, job, taskdesc):
+ run = job["run"]
+
+ worker = taskdesc["worker"] = job["worker"]
+ worker.setdefault("artifacts", [])
+
+ docker_worker_add_artifacts(config, job, taskdesc)
+
+ env = worker.setdefault("env", {})
+ env.update(
+ {
+ "MOZHARNESS_DISABLE": "true",
+ "SPIDERMONKEY_VARIANT": run.pop("spidermonkey-variant"),
+ "MOZ_BUILD_DATE": config.params["moz_build_date"],
+ "MOZ_SCM_LEVEL": config.params["level"],
+ }
+ )
+ if "spidermonkey-platform" in run:
+ env["SPIDERMONKEY_PLATFORM"] = run.pop("spidermonkey-platform")
+
+ script = "build-sm.sh"
+ if run["using"] == "spidermonkey-package":
+ script = "build-sm-package.sh"
+
+ run["using"] = "run-task"
+ run["cwd"] = run["workdir"]
+ run["command"] = [f"./checkouts/gecko/taskcluster/scripts/builder/{script}"]
+
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
+
+
+@run_job_using("generic-worker", "spidermonkey", schema=sm_run_schema)
+def generic_worker_spidermonkey(config, job, taskdesc):
+ assert job["worker"]["os"] == "windows", "only supports windows right now"
+
+ run = job["run"]
+
+ worker = taskdesc["worker"] = job["worker"]
+
+ generic_worker_add_artifacts(config, job, taskdesc)
+
+ env = worker.setdefault("env", {})
+ env.update(
+ {
+ "MOZHARNESS_DISABLE": "true",
+ "SPIDERMONKEY_VARIANT": run.pop("spidermonkey-variant"),
+ "MOZ_BUILD_DATE": config.params["moz_build_date"],
+ "MOZ_SCM_LEVEL": config.params["level"],
+ "SCCACHE_DISABLE": "1",
+ "WORK": ".", # Override the defaults in build scripts
+ "GECKO_PATH": "./src", # with values suiteable for windows generic worker
+ "UPLOAD_DIR": "./public/build",
+ }
+ )
+ if "spidermonkey-platform" in run:
+ env["SPIDERMONKEY_PLATFORM"] = run.pop("spidermonkey-platform")
+
+ script = "build-sm.sh"
+ if run["using"] == "spidermonkey-package":
+ script = "build-sm-package.sh"
+ # Don't allow untested configurations yet
+ raise Exception("spidermonkey-package is not a supported configuration")
+
+ run["using"] = "run-task"
+ run["command"] = [
+ "c:\\mozilla-build\\msys2\\usr\\bin\\bash.exe " # string concat
+ '"./src/taskcluster/scripts/builder/%s"' % script
+ ]
+
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
diff --git a/taskcluster/gecko_taskgraph/transforms/job/toolchain.py b/taskcluster/gecko_taskgraph/transforms/job/toolchain.py
new file mode 100644
index 0000000000..fb030019fc
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/job/toolchain.py
@@ -0,0 +1,257 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running toolchain-building jobs via dedicated scripts
+"""
+
+
+import os
+
+import taskgraph
+from mozbuild.shellutil import quote as shell_quote
+from taskgraph.util.schema import Schema, optionally_keyed_by, resolve_keyed_by
+from voluptuous import Any, Optional, Required
+
+from gecko_taskgraph import GECKO
+from gecko_taskgraph.transforms.job import configure_taskdesc_for_run, run_job_using
+from gecko_taskgraph.transforms.job.common import (
+ docker_worker_add_artifacts,
+ generic_worker_add_artifacts,
+)
+from gecko_taskgraph.util.attributes import RELEASE_PROJECTS
+from gecko_taskgraph.util.hash import hash_paths
+
+CACHE_TYPE = "toolchains.v3"
+
+toolchain_run_schema = Schema(
+ {
+ Required("using"): "toolchain-script",
+ # The script (in taskcluster/scripts/misc) to run.
+ # Python scripts are invoked with `mach python` so vendored libraries
+ # are available.
+ Required("script"): str,
+ # Arguments to pass to the script.
+ Optional("arguments"): [str],
+ # If not false, tooltool downloads will be enabled via relengAPIProxy
+ # for either just public files, or all files. Not supported on Windows
+ Required("tooltool-downloads"): Any(
+ False,
+ "public",
+ "internal",
+ ),
+ # Sparse profile to give to checkout using `run-task`. If given,
+ # Defaults to "toolchain-build". The value is relative to
+ # "sparse-profile-prefix", optionally defined below is the path,
+ # defaulting to "build/sparse-profiles".
+ # i.e. `build/sparse-profiles/toolchain-build`.
+ # If `None`, instructs `run-task` to not use a sparse profile at all.
+ Required("sparse-profile"): Any(str, None),
+ # The relative path to the sparse profile.
+ Optional("sparse-profile-prefix"): str,
+ # Paths/patterns pointing to files that influence the outcome of a
+ # toolchain build.
+ Optional("resources"): [str],
+ # Path to the artifact produced by the toolchain job
+ Required("toolchain-artifact"): str,
+ Optional(
+ "toolchain-alias",
+ description="An alias that can be used instead of the real toolchain job name in "
+ "fetch stanzas for jobs.",
+ ): optionally_keyed_by("project", Any(None, str, [str])),
+ Optional(
+ "toolchain-env",
+ description="Additional env variables to add to the worker when using this toolchain",
+ ): {str: object},
+ # Base work directory used to set up the task.
+ Optional("workdir"): str,
+ }
+)
+
+
+def get_digest_data(config, run, taskdesc):
+ files = list(run.pop("resources", []))
+ # The script
+ files.append("taskcluster/scripts/misc/{}".format(run["script"]))
+ # Tooltool manifest if any is defined:
+ tooltool_manifest = taskdesc["worker"]["env"].get("TOOLTOOL_MANIFEST")
+ if tooltool_manifest:
+ files.append(tooltool_manifest)
+
+ # Accumulate dependency hashes for index generation.
+ data = [hash_paths(GECKO, files)]
+
+ data.append(taskdesc["attributes"]["toolchain-artifact"])
+
+ # If the task uses an in-tree docker image, we want it to influence
+ # the index path as well. Ideally, the content of the docker image itself
+ # should have an influence, but at the moment, we can't get that
+ # information here. So use the docker image name as a proxy. Not a lot of
+ # changes to docker images actually have an impact on the resulting
+ # toolchain artifact, so we'll just rely on such important changes to be
+ # accompanied with a docker image name change.
+ image = taskdesc["worker"].get("docker-image", {}).get("in-tree")
+ if image:
+ data.append(image)
+
+ # Likewise script arguments should influence the index.
+ args = run.get("arguments")
+ if args:
+ data.extend(args)
+
+ if taskdesc["attributes"].get("rebuild-on-release"):
+ # Add whether this is a release branch or not
+ data.append(str(config.params["project"] in RELEASE_PROJECTS))
+ return data
+
+
+def common_toolchain(config, job, taskdesc, is_docker):
+ run = job["run"]
+
+ worker = taskdesc["worker"] = job["worker"]
+ worker["chain-of-trust"] = True
+
+ if is_docker:
+ # If the task doesn't have a docker-image, set a default
+ worker.setdefault("docker-image", {"in-tree": "deb11-toolchain-build"})
+
+ if job["worker"]["os"] == "windows":
+ # There were no caches on generic-worker before bug 1519472, and they cause
+ # all sorts of problems with Windows toolchain tasks, disable them until
+ # tasks are ready.
+ run["use-caches"] = False
+
+ env = worker.setdefault("env", {})
+ env.update(
+ {
+ "MOZ_BUILD_DATE": config.params["moz_build_date"],
+ "MOZ_SCM_LEVEL": config.params["level"],
+ "TOOLCHAIN_ARTIFACT": run["toolchain-artifact"],
+ }
+ )
+
+ if is_docker:
+ # Toolchain checkouts don't live under {workdir}/checkouts
+ workspace = "{workdir}/workspace/build".format(**run)
+ env["GECKO_PATH"] = f"{workspace}/src"
+
+ attributes = taskdesc.setdefault("attributes", {})
+ attributes["toolchain-artifact"] = run.pop("toolchain-artifact")
+ toolchain_artifact = attributes["toolchain-artifact"]
+ if not toolchain_artifact.startswith("public/build/"):
+ if "artifact_prefix" in attributes:
+ raise Exception(
+ "Toolchain {} has an artifact_prefix attribute. That is not"
+ " allowed on toolchain tasks.".format(taskdesc["label"])
+ )
+ attributes["artifact_prefix"] = os.path.dirname(toolchain_artifact)
+
+ resolve_keyed_by(
+ run,
+ "toolchain-alias",
+ item_name=taskdesc["label"],
+ project=config.params["project"],
+ )
+ alias = run.pop("toolchain-alias", None)
+ if alias:
+ attributes["toolchain-alias"] = alias
+ if "toolchain-env" in run:
+ attributes["toolchain-env"] = run.pop("toolchain-env")
+
+ # Allow the job to specify where artifacts come from, but add
+ # public/build if it's not there already.
+ artifacts = worker.setdefault("artifacts", [])
+ if not artifacts:
+ if is_docker:
+ docker_worker_add_artifacts(config, job, taskdesc)
+ else:
+ generic_worker_add_artifacts(config, job, taskdesc)
+
+ digest_data = get_digest_data(config, run, taskdesc)
+
+ if job.get("attributes", {}).get("cached_task") is not False and not taskgraph.fast:
+ name = taskdesc["label"].replace(f"{config.kind}-", "", 1)
+ taskdesc["cache"] = {
+ "type": CACHE_TYPE,
+ "name": name,
+ "digest-data": digest_data,
+ }
+
+ # Toolchains that are used for local development need to be built on a
+ # level-3 branch to be installable via `mach bootstrap`.
+ local_toolchain = taskdesc["attributes"].get("local-toolchain")
+ if local_toolchain:
+ if taskdesc.get("run-on-projects"):
+ raise Exception(
+ "Toolchain {} used for local developement must not have"
+ " run-on-projects set".format(taskdesc["label"])
+ )
+ taskdesc["run-on-projects"] = ["integration", "release"]
+
+ script = run.pop("script")
+ arguments = run.pop("arguments", [])
+ if local_toolchain and not attributes["toolchain-artifact"].startswith("public/"):
+ # Local toolchains with private artifacts are expected to have a script that
+ # fill a directory given as a final command line argument. That script, and the
+ # arguments provided, are used by the build system bootstrap code, and for the
+ # corresponding CI tasks, the command is wrapped with a script that creates an
+ # artifact based on that filled directory.
+ # We prefer automatic wrapping rather than manual wrapping in the yaml because
+ # it makes the index independent of the wrapper script, which is irrelevant.
+ # Also, an attribute is added for the bootstrap code to be able to easily parse
+ # the command.
+ attributes["toolchain-command"] = {
+ "script": script,
+ "arguments": list(arguments),
+ }
+ arguments.insert(0, script)
+ script = "private_local_toolchain.sh"
+
+ run["using"] = "run-task"
+ if is_docker:
+ gecko_path = "workspace/build/src"
+ elif job["worker"]["os"] == "windows":
+ gecko_path = "%GECKO_PATH%"
+ else:
+ gecko_path = "$GECKO_PATH"
+
+ if is_docker:
+ run["cwd"] = run["workdir"]
+ run["command"] = [
+ "{}/taskcluster/scripts/misc/{}".format(gecko_path, script)
+ ] + arguments
+ if not is_docker:
+ # Don't quote the first item in the command because it purposely contains
+ # an environment variable that is not meant to be quoted.
+ if len(run["command"]) > 1:
+ run["command"] = run["command"][0] + " " + shell_quote(*run["command"][1:])
+ else:
+ run["command"] = run["command"][0]
+
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
+
+
+toolchain_defaults = {
+ "tooltool-downloads": False,
+ "sparse-profile": "toolchain-build",
+}
+
+
+@run_job_using(
+ "docker-worker",
+ "toolchain-script",
+ schema=toolchain_run_schema,
+ defaults=toolchain_defaults,
+)
+def docker_worker_toolchain(config, job, taskdesc):
+ common_toolchain(config, job, taskdesc, is_docker=True)
+
+
+@run_job_using(
+ "generic-worker",
+ "toolchain-script",
+ schema=toolchain_run_schema,
+ defaults=toolchain_defaults,
+)
+def generic_worker_toolchain(config, job, taskdesc):
+ common_toolchain(config, job, taskdesc, is_docker=False)