summaryrefslogtreecommitdiffstats
path: root/taskcluster/taskgraph/transforms/job
diff options
context:
space:
mode:
Diffstat (limited to 'taskcluster/taskgraph/transforms/job')
-rw-r--r--taskcluster/taskgraph/transforms/job/__init__.py464
-rw-r--r--taskcluster/taskgraph/transforms/job/common.py254
-rw-r--r--taskcluster/taskgraph/transforms/job/debian_package.py215
-rw-r--r--taskcluster/taskgraph/transforms/job/hazard.py71
-rw-r--r--taskcluster/taskgraph/transforms/job/mach.py84
-rw-r--r--taskcluster/taskgraph/transforms/job/mozharness.py369
-rw-r--r--taskcluster/taskgraph/transforms/job/mozharness_test.py452
-rw-r--r--taskcluster/taskgraph/transforms/job/python_test.py48
-rw-r--r--taskcluster/taskgraph/transforms/job/run_task.py238
-rw-r--r--taskcluster/taskgraph/transforms/job/spidermonkey.py132
-rw-r--r--taskcluster/taskgraph/transforms/job/toolchain.py234
11 files changed, 2561 insertions, 0 deletions
diff --git a/taskcluster/taskgraph/transforms/job/__init__.py b/taskcluster/taskgraph/transforms/job/__init__.py
new file mode 100644
index 0000000000..e0fd3e8dd3
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/job/__init__.py
@@ -0,0 +1,464 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Convert a job description into a task description.
+
+Jobs descriptions are similar to task descriptions, but they specify how to run
+the job at a higher level, using a "run" field that can be interpreted by
+run-using handlers in `taskcluster/taskgraph/transforms/job`.
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import copy
+import logging
+import json
+import six
+from six import text_type
+
+import mozpack.path as mozpath
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.transforms.cached_tasks import order_tasks
+from taskgraph.util.schema import (
+ validate_schema,
+ Schema,
+)
+from taskgraph.util.python_path import import_sibling_modules
+from taskgraph.util.taskcluster import get_artifact_prefix
+from taskgraph.util.workertypes import worker_type_implementation
+from taskgraph.transforms.task import task_description_schema
+from voluptuous import (
+ Extra,
+ Optional,
+ Required,
+ Exclusive,
+)
+
+logger = logging.getLogger(__name__)
+
+# Schema for a build description
+job_description_schema = Schema(
+ {
+ # The name of the job and the job's label. At least one must be specified,
+ # and the label will be generated from the name if necessary, by prepending
+ # the kind.
+ Optional("name"): text_type,
+ Optional("label"): text_type,
+ # the following fields are passed directly through to the task description,
+ # possibly modified by the run implementation. See
+ # taskcluster/taskgraph/transforms/task.py for the schema details.
+ Required("description"): task_description_schema["description"],
+ Optional("attributes"): task_description_schema["attributes"],
+ Optional("job-from"): task_description_schema["job-from"],
+ Optional("dependencies"): task_description_schema["dependencies"],
+ Optional("if-dependencies"): task_description_schema["if-dependencies"],
+ Optional("soft-dependencies"): task_description_schema["soft-dependencies"],
+ Optional("if-dependencies"): task_description_schema["if-dependencies"],
+ Optional("requires"): task_description_schema["requires"],
+ Optional("expires-after"): task_description_schema["expires-after"],
+ Optional("routes"): task_description_schema["routes"],
+ Optional("scopes"): task_description_schema["scopes"],
+ Optional("tags"): task_description_schema["tags"],
+ Optional("extra"): task_description_schema["extra"],
+ Optional("treeherder"): task_description_schema["treeherder"],
+ Optional("index"): task_description_schema["index"],
+ Optional("run-on-projects"): task_description_schema["run-on-projects"],
+ Optional("shipping-phase"): task_description_schema["shipping-phase"],
+ Optional("shipping-product"): task_description_schema["shipping-product"],
+ Optional("always-target"): task_description_schema["always-target"],
+ Exclusive("optimization", "optimization"): task_description_schema[
+ "optimization"
+ ],
+ Optional("use-sccache"): task_description_schema["use-sccache"],
+ Optional("release-artifacts"): task_description_schema["release-artifacts"],
+ Optional("priority"): task_description_schema["priority"],
+ # The "when" section contains descriptions of the circumstances under which
+ # this task should be included in the task graph. This will be converted
+ # into an optimization, so it cannot be specified in a job description that
+ # also gives 'optimization'.
+ Exclusive("when", "optimization"): {
+ # This task only needs to be run if a file matching one of the given
+ # patterns has changed in the push. The patterns use the mozpack
+ # match function (python/mozbuild/mozpack/path.py).
+ Optional("files-changed"): [text_type],
+ },
+ # A list of artifacts to install from 'fetch' tasks.
+ Optional("fetches"): {
+ text_type: [
+ text_type,
+ {
+ Required("artifact"): text_type,
+ Optional("dest"): text_type,
+ Optional("extract"): bool,
+ Optional("verify-hash"): bool,
+ },
+ ],
+ },
+ # A description of how to run this job.
+ "run": {
+ # The key to a job implementation in a peer module to this one
+ "using": text_type,
+ # Base work directory used to set up the task.
+ Optional("workdir"): text_type,
+ # Any remaining content is verified against that job implementation's
+ # own schema.
+ Extra: object,
+ },
+ Required("worker-type"): task_description_schema["worker-type"],
+ # This object will be passed through to the task description, with additions
+ # provided by the job's run-using function
+ Optional("worker"): dict,
+ }
+)
+
+transforms = TransformSequence()
+transforms.add_validate(job_description_schema)
+
+
+@transforms.add
+def rewrite_when_to_optimization(config, jobs):
+ for job in jobs:
+ when = job.pop("when", {})
+ if not when:
+ yield job
+ continue
+
+ files_changed = when.get("files-changed")
+
+ # implicitly add task config directory.
+ files_changed.append("{}/**".format(config.path))
+
+ # "only when files changed" implies "skip if files have not changed"
+ job["optimization"] = {"skip-unless-changed": files_changed}
+
+ assert "when" not in job
+ yield job
+
+
+@transforms.add
+def set_implementation(config, jobs):
+ for job in jobs:
+ impl, os = worker_type_implementation(config.graph_config, job["worker-type"])
+ if os:
+ job.setdefault("tags", {})["os"] = os
+ if impl:
+ job.setdefault("tags", {})["worker-implementation"] = impl
+ worker = job.setdefault("worker", {})
+ assert "implementation" not in worker
+ worker["implementation"] = impl
+ if os:
+ worker["os"] = os
+ yield job
+
+
+@transforms.add
+def set_label(config, jobs):
+ for job in jobs:
+ if "label" not in job:
+ if "name" not in job:
+ raise Exception("job has neither a name nor a label")
+ job["label"] = "{}-{}".format(config.kind, job["name"])
+ if job.get("name"):
+ del job["name"]
+ yield job
+
+
+@transforms.add
+def add_resource_monitor(config, jobs):
+ for job in jobs:
+ if job.get("attributes", {}).get("resource-monitor"):
+ worker_implementation, worker_os = worker_type_implementation(
+ config.graph_config, job["worker-type"]
+ )
+ # Normalise worker os so that linux-bitbar and similar use linux tools.
+ worker_os = worker_os.split("-")[0]
+ # We don't currently support an Arm worker, due to gopsutil's indirect
+ # dependencies (go-ole)
+ if "aarch64" in job["worker-type"]:
+ yield job
+ continue
+ elif "win7" in job["worker-type"]:
+ arch = "32"
+ else:
+ arch = "64"
+ job.setdefault("fetches", {})
+ job["fetches"].setdefault("toolchain", [])
+ job["fetches"]["toolchain"].append(
+ "{}{}-resource-monitor".format(worker_os, arch)
+ )
+
+ if worker_implementation == "docker-worker":
+ artifact_source = "/builds/worker/monitoring/resource-monitor.json"
+ else:
+ artifact_source = "monitoring/resource-monitor.json"
+ job["worker"].setdefault("artifacts", [])
+ job["worker"]["artifacts"].append(
+ {
+ "name": "public/monitoring/resource-monitor.json",
+ "type": "file",
+ "path": artifact_source,
+ }
+ )
+ # Set env for output file
+ job["worker"].setdefault("env", {})
+ job["worker"]["env"]["RESOURCE_MONITOR_OUTPUT"] = artifact_source
+
+ yield job
+
+
+def get_attribute(dict, key, attributes, attribute_name):
+ """Get `attribute_name` from the given `attributes` dict, and if there
+ is a corresponding value, set `key` in `dict` to that value."""
+ value = attributes.get(attribute_name)
+ if value:
+ dict[key] = value
+
+
+@transforms.add
+def use_fetches(config, jobs):
+ artifact_names = {}
+ aliases = {}
+
+ if config.kind in ("toolchain", "fetch"):
+ jobs = list(jobs)
+ for job in jobs:
+ run = job.get("run", {})
+ label = job["label"]
+ get_attribute(artifact_names, label, run, "toolchain-artifact")
+ value = run.get("{}-alias".format(config.kind))
+ if value:
+ aliases["{}-{}".format(config.kind, value)] = label
+
+ for task in config.kind_dependencies_tasks.values():
+ if task.kind in ("fetch", "toolchain"):
+ get_attribute(
+ artifact_names,
+ task.label,
+ task.attributes,
+ "{kind}-artifact".format(kind=task.kind),
+ )
+ value = task.attributes.get("{}-alias".format(task.kind))
+ if value:
+ aliases["{}-{}".format(task.kind, value)] = task.label
+
+ artifact_prefixes = {}
+ for job in order_tasks(config, jobs):
+ artifact_prefixes[job["label"]] = get_artifact_prefix(job)
+
+ fetches = job.pop("fetches", None)
+ if not fetches:
+ yield job
+ continue
+
+ job_fetches = []
+ name = job.get("name", job.get("label"))
+ dependencies = job.setdefault("dependencies", {})
+ worker = job.setdefault("worker", {})
+ prefix = get_artifact_prefix(job)
+ has_sccache = False
+ for kind, artifacts in fetches.items():
+ if kind in ("fetch", "toolchain"):
+ for fetch_name in artifacts:
+ label = "{kind}-{name}".format(kind=kind, name=fetch_name)
+ label = aliases.get(label, label)
+ if label not in artifact_names:
+ raise Exception(
+ "Missing fetch job for {kind}-{name}: {fetch}".format(
+ kind=config.kind, name=name, fetch=fetch_name
+ )
+ )
+
+ path = artifact_names[label]
+
+ dependencies[label] = label
+ job_fetches.append(
+ {
+ "artifact": path,
+ "task": "<{label}>".format(label=label),
+ "extract": True,
+ }
+ )
+
+ if kind == "toolchain" and fetch_name.endswith("-sccache"):
+ has_sccache = True
+ else:
+ if kind not in dependencies:
+ raise Exception(
+ "{name} can't fetch {kind} artifacts because "
+ "it has no {kind} dependencies!".format(name=name, kind=kind)
+ )
+ dep_label = dependencies[kind]
+ if dep_label in artifact_prefixes:
+ prefix = artifact_prefixes[dep_label]
+ else:
+ if dep_label not in config.kind_dependencies_tasks:
+ raise Exception(
+ "{name} can't fetch {kind} artifacts because "
+ "there are no tasks with label {label} in kind dependencies!".format(
+ name=name,
+ kind=kind,
+ label=dependencies[kind],
+ )
+ )
+
+ prefix = get_artifact_prefix(
+ config.kind_dependencies_tasks[dep_label]
+ )
+
+ for artifact in artifacts:
+ if isinstance(artifact, text_type):
+ path = artifact
+ dest = None
+ extract = True
+ verify_hash = False
+ else:
+ path = artifact["artifact"]
+ dest = artifact.get("dest")
+ extract = artifact.get("extract", True)
+ verify_hash = artifact.get("verify-hash", False)
+
+ fetch = {
+ "artifact": "{prefix}/{path}".format(prefix=prefix, path=path)
+ if not path.startswith("/")
+ else path[1:],
+ "task": "<{dep}>".format(dep=kind),
+ "extract": extract,
+ }
+ if dest is not None:
+ fetch["dest"] = dest
+ if verify_hash:
+ fetch["verify-hash"] = verify_hash
+ job_fetches.append(fetch)
+
+ if job.get("use-sccache") and not has_sccache:
+ raise Exception("Must provide an sccache toolchain if using sccache.")
+
+ job_artifact_prefixes = {
+ mozpath.dirname(fetch["artifact"])
+ for fetch in job_fetches
+ if not fetch["artifact"].startswith("public/")
+ }
+ if job_artifact_prefixes:
+ # Use taskcluster-proxy and request appropriate scope. For example, add
+ # 'scopes: [queue:get-artifact:path/to/*]' for 'path/to/artifact.tar.xz'.
+ worker["taskcluster-proxy"] = True
+ for prefix in sorted(job_artifact_prefixes):
+ scope = "queue:get-artifact:{}/*".format(prefix)
+ if scope not in job.setdefault("scopes", []):
+ job["scopes"].append(scope)
+
+ env = worker.setdefault("env", {})
+ env["MOZ_FETCHES"] = {
+ "task-reference": six.ensure_text(
+ json.dumps(
+ sorted(job_fetches, key=lambda x: sorted(x.items())), sort_keys=True
+ )
+ )
+ }
+ # The path is normalized to an absolute path in run-task
+ env.setdefault("MOZ_FETCHES_DIR", "fetches")
+
+ yield job
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ """Given a build description, create a task description"""
+ # import plugin modules first, before iterating over jobs
+ import_sibling_modules(exceptions=("common.py",))
+
+ for job in jobs:
+ # only docker-worker uses a fixed absolute path to find directories
+ if job["worker"]["implementation"] == "docker-worker":
+ job["run"].setdefault("workdir", "/builds/worker")
+
+ taskdesc = copy.deepcopy(job)
+
+ # fill in some empty defaults to make run implementations easier
+ taskdesc.setdefault("attributes", {})
+ taskdesc.setdefault("dependencies", {})
+ taskdesc.setdefault("if-dependencies", [])
+ taskdesc.setdefault("soft-dependencies", [])
+ taskdesc.setdefault("routes", [])
+ taskdesc.setdefault("scopes", [])
+ taskdesc.setdefault("extra", {})
+
+ # give the function for job.run.using on this worker implementation a
+ # chance to set up the task description.
+ configure_taskdesc_for_run(
+ config, job, taskdesc, job["worker"]["implementation"]
+ )
+ del taskdesc["run"]
+
+ # yield only the task description, discarding the job description
+ yield taskdesc
+
+
+# A registry of all functions decorated with run_job_using
+registry = {}
+
+
+def run_job_using(worker_implementation, run_using, schema=None, defaults={}):
+ """Register the decorated function as able to set up a task description for
+ jobs with the given worker implementation and `run.using` property. If
+ `schema` is given, the job's run field will be verified to match it.
+
+ The decorated function should have the signature `using_foo(config, job, taskdesc)`
+ and should modify the task description in-place. The skeleton of
+ the task description is already set up, but without a payload."""
+
+ def wrap(func):
+ for_run_using = registry.setdefault(run_using, {})
+ if worker_implementation in for_run_using:
+ raise Exception(
+ "run_job_using({!r}, {!r}) already exists: {!r}".format(
+ run_using, worker_implementation, for_run_using[run_using]
+ )
+ )
+ for_run_using[worker_implementation] = (func, schema, defaults)
+ return func
+
+ return wrap
+
+
+@run_job_using(
+ "always-optimized", "always-optimized", Schema({"using": "always-optimized"})
+)
+def always_optimized(config, job, taskdesc):
+ pass
+
+
+def configure_taskdesc_for_run(config, job, taskdesc, worker_implementation):
+ """
+ Run the appropriate function for this job against the given task
+ description.
+
+ This will raise an appropriate error if no function exists, or if the job's
+ run is not valid according to the schema.
+ """
+ run_using = job["run"]["using"]
+ if run_using not in registry:
+ raise Exception("no functions for run.using {!r}".format(run_using))
+
+ if worker_implementation not in registry[run_using]:
+ raise Exception(
+ "no functions for run.using {!r} on {!r}".format(
+ run_using, worker_implementation
+ )
+ )
+
+ func, schema, defaults = registry[run_using][worker_implementation]
+ for k, v in defaults.items():
+ job["run"].setdefault(k, v)
+
+ if schema:
+ validate_schema(
+ schema,
+ job["run"],
+ "In job.run using {!r}/{!r} for job {!r}:".format(
+ job["run"]["using"], worker_implementation, job["label"]
+ ),
+ )
+ func(config, job, taskdesc)
diff --git a/taskcluster/taskgraph/transforms/job/common.py b/taskcluster/taskgraph/transforms/job/common.py
new file mode 100644
index 0000000000..4a63655105
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/job/common.py
@@ -0,0 +1,254 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Common support for various job types. These functions are all named after the
+worker implementation they operate on, and take the same three parameters, for
+consistency.
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from taskgraph.util.taskcluster import get_artifact_prefix
+
+SECRET_SCOPE = "secrets:get:project/releng/{trust_domain}/{kind}/level-{level}/{secret}"
+
+
+def add_cache(job, taskdesc, name, mount_point, skip_untrusted=False):
+ """Adds a cache based on the worker's implementation.
+
+ Args:
+ job (dict): Task's job description.
+ taskdesc (dict): Target task description to modify.
+ name (str): Name of the cache.
+ mount_point (path): Path on the host to mount the cache.
+ skip_untrusted (bool): Whether cache is used in untrusted environments
+ (default: False). Only applies to docker-worker.
+ """
+ if not job["run"].get("use-caches", True):
+ return
+
+ worker = job["worker"]
+
+ if worker["implementation"] == "docker-worker":
+ taskdesc["worker"].setdefault("caches", []).append(
+ {
+ "type": "persistent",
+ "name": name,
+ "mount-point": mount_point,
+ "skip-untrusted": skip_untrusted,
+ }
+ )
+
+ elif worker["implementation"] == "generic-worker":
+ taskdesc["worker"].setdefault("mounts", []).append(
+ {
+ "cache-name": name,
+ "directory": mount_point,
+ }
+ )
+
+ else:
+ # Caches not implemented
+ pass
+
+
+def add_artifacts(config, job, taskdesc, path):
+ taskdesc["worker"].setdefault("artifacts", []).append(
+ {
+ "name": get_artifact_prefix(taskdesc),
+ "path": path,
+ "type": "directory",
+ }
+ )
+
+
+def docker_worker_add_artifacts(config, job, taskdesc):
+ """ Adds an artifact directory to the task """
+ path = "{workdir}/artifacts/".format(**job["run"])
+ taskdesc["worker"].setdefault("env", {})["UPLOAD_DIR"] = path
+ add_artifacts(config, job, taskdesc, path)
+
+
+def generic_worker_add_artifacts(config, job, taskdesc):
+ """ Adds an artifact directory to the task """
+ # The path is the location on disk; it doesn't necessarily
+ # mean the artifacts will be public or private; that is set via the name
+ # attribute in add_artifacts.
+ path = get_artifact_prefix(taskdesc)
+ taskdesc["worker"].setdefault("env", {})["UPLOAD_DIR"] = path
+ add_artifacts(config, job, taskdesc, path=path)
+
+
+def support_vcs_checkout(config, job, taskdesc, sparse=False):
+ """Update a job/task with parameters to enable a VCS checkout.
+
+ This can only be used with ``run-task`` tasks, as the cache name is
+ reserved for ``run-task`` tasks.
+ """
+ worker = job["worker"]
+ is_mac = worker["os"] == "macosx"
+ is_win = worker["os"] == "windows"
+ is_linux = worker["os"] == "linux" or "linux-bitbar"
+ is_docker = worker["implementation"] == "docker-worker"
+ assert is_mac or is_win or is_linux
+
+ if is_win:
+ checkoutdir = "./build"
+ geckodir = "{}/src".format(checkoutdir)
+ hgstore = "y:/hg-shared"
+ elif is_docker:
+ checkoutdir = "{workdir}/checkouts".format(**job["run"])
+ geckodir = "{}/gecko".format(checkoutdir)
+ hgstore = "{}/hg-store".format(checkoutdir)
+ else:
+ checkoutdir = "./checkouts"
+ geckodir = "{}/gecko".format(checkoutdir)
+ hgstore = "{}/hg-shared".format(checkoutdir)
+
+ cache_name = "checkouts"
+
+ # Sparse checkouts need their own cache because they can interfere
+ # with clients that aren't sparse aware.
+ if sparse:
+ cache_name += "-sparse"
+
+ add_cache(job, taskdesc, cache_name, checkoutdir)
+
+ taskdesc["worker"].setdefault("env", {}).update(
+ {
+ "GECKO_BASE_REPOSITORY": config.params["base_repository"],
+ "GECKO_HEAD_REPOSITORY": config.params["head_repository"],
+ "GECKO_HEAD_REV": config.params["head_rev"],
+ "HG_STORE_PATH": hgstore,
+ }
+ )
+ taskdesc["worker"]["env"].setdefault("GECKO_PATH", geckodir)
+
+ if "comm_base_repository" in config.params:
+ taskdesc["worker"]["env"].update(
+ {
+ "COMM_BASE_REPOSITORY": config.params["comm_base_repository"],
+ "COMM_HEAD_REPOSITORY": config.params["comm_head_repository"],
+ "COMM_HEAD_REV": config.params["comm_head_rev"],
+ }
+ )
+ elif job["run"].get("comm-checkout", False):
+ raise Exception(
+ "Can't checkout from comm-* repository if not given a repository."
+ )
+
+ # Give task access to hgfingerprint secret so it can pin the certificate
+ # for hg.mozilla.org.
+ taskdesc["scopes"].append("secrets:get:project/taskcluster/gecko/hgfingerprint")
+ taskdesc["scopes"].append("secrets:get:project/taskcluster/gecko/hgmointernal")
+
+ # only some worker platforms have taskcluster-proxy enabled
+ if job["worker"]["implementation"] in ("docker-worker",):
+ taskdesc["worker"]["taskcluster-proxy"] = True
+
+
+def generic_worker_hg_commands(
+ base_repo, head_repo, head_rev, path, sparse_profile=None
+):
+ """Obtain commands needed to obtain a Mercurial checkout on generic-worker.
+
+ Returns two command strings. One performs the checkout. Another logs.
+ """
+ args = [
+ r'"c:\Program Files\Mercurial\hg.exe"',
+ "robustcheckout",
+ "--sharebase",
+ r"y:\hg-shared",
+ "--purge",
+ "--upstream",
+ base_repo,
+ "--revision",
+ head_rev,
+ ]
+
+ if sparse_profile:
+ args.extend(["--config", "extensions.sparse="])
+ args.extend(["--sparseprofile", sparse_profile])
+
+ args.extend(
+ [
+ head_repo,
+ path,
+ ]
+ )
+
+ logging_args = [
+ b":: TinderboxPrint:<a href={source_repo}/rev/{revision} "
+ b"title='Built from {repo_name} revision {revision}'>{revision}</a>"
+ b"\n".format(
+ revision=head_rev, source_repo=head_repo, repo_name=head_repo.split("/")[-1]
+ ),
+ ]
+
+ return [" ".join(args), " ".join(logging_args)]
+
+
+def setup_secrets(config, job, taskdesc):
+ """Set up access to secrets via taskcluster-proxy. The value of
+ run['secrets'] should be a boolean or a list of secret names that
+ can be accessed."""
+ if not job["run"].get("secrets"):
+ return
+
+ taskdesc["worker"]["taskcluster-proxy"] = True
+ secrets = job["run"]["secrets"]
+ if secrets is True:
+ secrets = ["*"]
+ for secret in secrets:
+ taskdesc["scopes"].append(
+ SECRET_SCOPE.format(
+ trust_domain=config.graph_config["trust-domain"],
+ kind=job["treeherder"]["kind"],
+ level=config.params["level"],
+ secret=secret,
+ )
+ )
+
+
+def add_tooltool(config, job, taskdesc, internal=False):
+ """Give the task access to tooltool.
+
+ Enables the tooltool cache. Adds releng proxy. Configures scopes.
+
+ By default, only public tooltool access will be granted. Access to internal
+ tooltool can be enabled via ``internal=True``.
+
+ This can only be used with ``run-task`` tasks, as the cache name is
+ reserved for use with ``run-task``.
+ """
+
+ if job["worker"]["implementation"] in ("docker-worker",):
+ add_cache(
+ job,
+ taskdesc,
+ "tooltool-cache",
+ "{workdir}/tooltool-cache".format(**job["run"]),
+ )
+
+ taskdesc["worker"].setdefault("env", {}).update(
+ {
+ "TOOLTOOL_CACHE": "{workdir}/tooltool-cache".format(**job["run"]),
+ }
+ )
+ elif not internal:
+ return
+
+ taskdesc["worker"]["taskcluster-proxy"] = True
+ taskdesc["scopes"].extend(
+ [
+ "project:releng:services/tooltool/api/download/public",
+ ]
+ )
+
+ if internal:
+ taskdesc["scopes"].extend(
+ [
+ "project:releng:services/tooltool/api/download/internal",
+ ]
+ )
diff --git a/taskcluster/taskgraph/transforms/job/debian_package.py b/taskcluster/taskgraph/transforms/job/debian_package.py
new file mode 100644
index 0000000000..1d7bafee93
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/job/debian_package.py
@@ -0,0 +1,215 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running spidermonkey jobs via dedicated scripts
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import os
+import re
+from six import text_type
+
+from taskgraph.util.schema import Schema
+from voluptuous import Any, Optional, Required
+
+from taskgraph.transforms.job import run_job_using
+from taskgraph.transforms.job.common import add_artifacts
+
+from taskgraph.util.hash import hash_path
+from taskgraph.util.taskcluster import get_root_url
+from taskgraph import GECKO
+import taskgraph
+
+DSC_PACKAGE_RE = re.compile(".*(?=_)")
+SOURCE_PACKAGE_RE = re.compile(".*(?=[-_]\d)")
+
+source_definition = {
+ Required("url"): text_type,
+ Required("sha256"): text_type,
+}
+
+run_schema = Schema(
+ {
+ Required("using"): "debian-package",
+ # Debian distribution
+ Required("dist"): text_type,
+ # Date of the snapshot (from snapshot.debian.org) to use, in the format
+ # YYYYMMDDTHHMMSSZ. The same date is used for the base docker-image name
+ # (only the YYYYMMDD part).
+ Required("snapshot"): text_type,
+ # URL/SHA256 of a source file to build, which can either be a source
+ # control (.dsc), or a tarball.
+ Required(Any("dsc", "tarball")): source_definition,
+ # Package name. Normally derived from the source control or tarball file
+ # name. Use in case the name doesn't match DSC_PACKAGE_RE or
+ # SOURCE_PACKAGE_RE.
+ Optional("name"): text_type,
+ # Patch to apply to the extracted source.
+ Optional("patch"): text_type,
+ # Command to run before dpkg-buildpackage.
+ Optional("pre-build-command"): text_type,
+ # Architecture to build the package for.
+ Optional("arch"): text_type,
+ # List of package tasks to get build dependencies from.
+ Optional("packages"): [text_type],
+ # What resolver to use to install build dependencies. The default
+ # (apt-get) is good in most cases, but in subtle cases involving
+ # a *-backports archive, its solver might not be able to find a
+ # solution that satisfies the build dependencies.
+ Optional("resolver"): Any("apt-get", "aptitude"),
+ # Base work directory used to set up the task.
+ Required("workdir"): text_type,
+ }
+)
+
+
+@run_job_using("docker-worker", "debian-package", schema=run_schema)
+def docker_worker_debian_package(config, job, taskdesc):
+ run = job["run"]
+
+ name = taskdesc["label"].replace("{}-".format(config.kind), "", 1)
+
+ arch = run.get("arch", "amd64")
+
+ worker = taskdesc["worker"]
+ worker.setdefault("artifacts", [])
+ version = {
+ "wheezy": 7,
+ "jessie": 8,
+ "stretch": 9,
+ "buster": 10,
+ }[run["dist"]]
+ image = "debian%d" % version
+ if arch != "amd64":
+ image += "-" + arch
+ image += "-packages"
+ worker["docker-image"] = {"in-tree": image}
+
+ add_artifacts(config, job, taskdesc, path="/tmp/artifacts")
+
+ env = worker.setdefault("env", {})
+ env["DEBFULLNAME"] = "Mozilla build team"
+ env["DEBEMAIL"] = "dev-builds@lists.mozilla.org"
+
+ if "dsc" in run:
+ src = run["dsc"]
+ unpack = "dpkg-source -x {src_file} {package}"
+ package_re = DSC_PACKAGE_RE
+ elif "tarball" in run:
+ src = run["tarball"]
+ unpack = (
+ "mkdir {package} && "
+ "tar -C {package} -axf {src_file} --strip-components=1"
+ )
+ package_re = SOURCE_PACKAGE_RE
+ else:
+ raise RuntimeError("Unreachable")
+ src_url = src["url"]
+ src_file = os.path.basename(src_url)
+ src_sha256 = src["sha256"]
+ package = run.get("name")
+ if not package:
+ package = package_re.match(src_file).group(0)
+ unpack = unpack.format(src_file=src_file, package=package)
+
+ resolver = run.get("resolver", "apt-get")
+ if resolver == "apt-get":
+ resolver = "apt-get -yyq --no-install-recommends"
+ elif resolver == "aptitude":
+ resolver = (
+ "aptitude -y --without-recommends -o "
+ "Aptitude::ProblemResolver::Hints::KeepBuildDeps="
+ '"reject {}-build-deps :UNINST"'
+ ).format(package)
+ else:
+ raise RuntimeError("Unreachable")
+
+ adjust = ""
+ if "patch" in run:
+ # We don't use robustcheckout or run-task to get a checkout. So for
+ # this one file we'd need from a checkout, download it.
+ env["PATCH_URL"] = config.params.file_url(
+ "build/debian-packages/{patch}".format(patch=run["patch"]),
+ )
+ adjust += "curl -sL $PATCH_URL | patch -p1 && "
+ if "pre-build-command" in run:
+ adjust += run["pre-build-command"] + " && "
+ if "tarball" in run:
+ adjust += "mv ../{src_file} ../{package}_{ver}.orig.tar.gz && ".format(
+ src_file=src_file,
+ package=package,
+ ver="$(dpkg-parsechangelog | awk '$1==\"Version:\"{print $2}' | cut -f 1 -d -)",
+ )
+ if "patch" not in run and "pre-build-command" not in run:
+ adjust += (
+ 'debchange -l ".{prefix}moz" --distribution "{dist}"'
+ ' "Mozilla backport for {dist}." < /dev/null && '
+ ).format(
+ prefix=name.split("-", 1)[0],
+ dist=run["dist"],
+ )
+
+ worker["command"] = [
+ "sh",
+ "-x",
+ "-c",
+ # Add sources for packages coming from other package tasks.
+ "/usr/local/sbin/setup_packages.sh {root_url} $PACKAGES && "
+ "apt-get update && "
+ # Upgrade packages that might have new versions in package tasks.
+ "apt-get dist-upgrade && " "cd /tmp && "
+ # Get, validate and extract the package source.
+ "(dget -d -u {src_url} || exit 100) && "
+ 'echo "{src_sha256} {src_file}" | sha256sum -c && '
+ "{unpack} && "
+ "cd {package} && "
+ # Optionally apply patch and/or pre-build command.
+ "{adjust}"
+ # Install the necessary build dependencies.
+ "(mk-build-deps -i -r debian/control -t '{resolver}' || exit 100) && "
+ # Build the package
+ 'DEB_BUILD_OPTIONS="parallel=$(nproc) nocheck" dpkg-buildpackage && '
+ # Copy the artifacts
+ "mkdir -p {artifacts}/debian && "
+ "dcmd cp ../{package}_*.changes {artifacts}/debian/ && "
+ "cd {artifacts} && "
+ # Make the artifacts directory usable as an APT repository.
+ "apt-ftparchive sources debian | gzip -c9 > debian/Sources.gz && "
+ "apt-ftparchive packages debian | gzip -c9 > debian/Packages.gz".format(
+ root_url=get_root_url(False),
+ package=package,
+ src_url=src_url,
+ src_file=src_file,
+ src_sha256=src_sha256,
+ unpack=unpack,
+ adjust=adjust,
+ artifacts="/tmp/artifacts",
+ resolver=resolver,
+ ),
+ ]
+
+ if run.get("packages"):
+ env = worker.setdefault("env", {})
+ env["PACKAGES"] = {
+ "task-reference": " ".join("<{}>".format(p) for p in run["packages"])
+ }
+ deps = taskdesc.setdefault("dependencies", {})
+ for p in run["packages"]:
+ deps[p] = "packages-{}".format(p)
+
+ # Use the command generated above as the base for the index hash.
+ # We rely on it not varying depending on the head_repository or head_rev.
+ digest_data = list(worker["command"])
+ if "patch" in run:
+ digest_data.append(
+ hash_path(os.path.join(GECKO, "build", "debian-packages", run["patch"]))
+ )
+
+ if not taskgraph.fast:
+ taskdesc["cache"] = {
+ "type": "packages.v1",
+ "name": name,
+ "digest-data": digest_data,
+ }
diff --git a/taskcluster/taskgraph/transforms/job/hazard.py b/taskcluster/taskgraph/transforms/job/hazard.py
new file mode 100644
index 0000000000..66a33a2001
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/job/hazard.py
@@ -0,0 +1,71 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running hazard jobs via dedicated scripts
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from six import text_type
+from taskgraph.util.schema import Schema
+from voluptuous import Required, Optional, Any
+
+from taskgraph.transforms.job import (
+ run_job_using,
+ configure_taskdesc_for_run,
+)
+from taskgraph.transforms.job.common import (
+ setup_secrets,
+ docker_worker_add_artifacts,
+ add_tooltool,
+)
+
+haz_run_schema = Schema(
+ {
+ Required("using"): "hazard",
+ # The command to run within the task image (passed through to the worker)
+ Required("command"): text_type,
+ # The mozconfig to use; default in the script is used if omitted
+ Optional("mozconfig"): text_type,
+ # The set of secret names to which the task has access; these are prefixed
+ # with `project/releng/gecko/{treeherder.kind}/level-{level}/`. Setting
+ # this will enable any worker features required and set the task's scopes
+ # appropriately. `true` here means ['*'], all secrets. Not supported on
+ # Windows
+ Optional("secrets"): Any(bool, [text_type]),
+ # Base work directory used to set up the task.
+ Optional("workdir"): text_type,
+ }
+)
+
+
+@run_job_using("docker-worker", "hazard", schema=haz_run_schema)
+def docker_worker_hazard(config, job, taskdesc):
+ run = job["run"]
+
+ worker = taskdesc["worker"] = job["worker"]
+ worker.setdefault("artifacts", [])
+
+ docker_worker_add_artifacts(config, job, taskdesc)
+ worker.setdefault("required-volumes", []).append(
+ "{workdir}/workspace".format(**run)
+ )
+ add_tooltool(config, job, taskdesc)
+ setup_secrets(config, job, taskdesc)
+
+ env = worker["env"]
+ env.update(
+ {
+ "MOZ_BUILD_DATE": config.params["moz_build_date"],
+ "MOZ_SCM_LEVEL": config.params["level"],
+ }
+ )
+
+ # script parameters
+ if run.get("mozconfig"):
+ env["MOZCONFIG"] = run.pop("mozconfig")
+
+ run["using"] = "run-task"
+ run["cwd"] = run["workdir"]
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
diff --git a/taskcluster/taskgraph/transforms/job/mach.py b/taskcluster/taskgraph/transforms/job/mach.py
new file mode 100644
index 0000000000..5a771a743c
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/job/mach.py
@@ -0,0 +1,84 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running mach tasks (via run-task)
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from six import text_type
+from taskgraph.transforms.job import run_job_using, configure_taskdesc_for_run
+from taskgraph.util.schema import (
+ Schema,
+ taskref_or_string,
+)
+from voluptuous import Required, Optional, Any
+
+mach_schema = Schema(
+ {
+ Required("using"): "mach",
+ # The mach command (omitting `./mach`) to run
+ Required("mach"): taskref_or_string,
+ # The version of Python to run with. Either an absolute path to the binary
+ # on the worker, a version identifier (e.g python2.7 or 3.8). There is no
+ # validation performed to ensure the specified binaries actually exist.
+ Optional("python-version"): Any(text_type, int, float),
+ # The sparse checkout profile to use. Value is the filename relative to the
+ # directory where sparse profiles are defined (build/sparse-profiles/).
+ Optional("sparse-profile"): Any(text_type, None),
+ # if true, perform a checkout of a comm-central based branch inside the
+ # gecko checkout
+ Required("comm-checkout"): bool,
+ # Base work directory used to set up the task.
+ Optional("workdir"): text_type,
+ }
+)
+
+
+defaults = {
+ "comm-checkout": False,
+}
+
+
+@run_job_using("docker-worker", "mach", schema=mach_schema, defaults=defaults)
+@run_job_using("generic-worker", "mach", schema=mach_schema, defaults=defaults)
+def configure_mach(config, job, taskdesc):
+ run = job["run"]
+ worker = job["worker"]
+
+ additional_prefix = []
+ if worker["os"] == "macosx":
+ additional_prefix = ["LC_ALL=en_US.UTF-8", "LANG=en_US.UTF-8"]
+
+ python = run.get("python-version")
+ if python:
+ del run["python-version"]
+
+ if worker["os"] == "macosx" and python == 3:
+ python = "/usr/local/bin/python3"
+
+ python = str(python)
+ try:
+ float(python)
+ python = "python" + python
+ except ValueError:
+ pass
+
+ additional_prefix.append(python)
+
+ command_prefix = " ".join(additional_prefix + ["./mach "])
+
+ mach = run["mach"]
+ if isinstance(mach, dict):
+ ref, pattern = next(iter(mach.items()))
+ command = {ref: command_prefix + pattern}
+ else:
+ command = command_prefix + mach
+
+ # defer to the run_task implementation
+ run["command"] = command
+ run["cwd"] = "{checkout}"
+ run["using"] = "run-task"
+ del run["mach"]
+ configure_taskdesc_for_run(config, job, taskdesc, job["worker"]["implementation"])
diff --git a/taskcluster/taskgraph/transforms/job/mozharness.py b/taskcluster/taskgraph/transforms/job/mozharness.py
new file mode 100644
index 0000000000..0b1da061cd
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/job/mozharness.py
@@ -0,0 +1,369 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+
+Support for running jobs via mozharness. Ideally, most stuff gets run this
+way, and certainly anything using mozharness should use this approach.
+
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+import json
+
+import six
+from six import text_type
+from textwrap import dedent
+
+from taskgraph.util.schema import Schema
+from voluptuous import Required, Optional, Any
+from voluptuous.validators import Match
+
+from mozpack import path as mozpath
+
+from taskgraph.transforms.job import (
+ configure_taskdesc_for_run,
+ run_job_using,
+)
+from taskgraph.transforms.job.common import (
+ setup_secrets,
+ docker_worker_add_artifacts,
+ generic_worker_add_artifacts,
+)
+from taskgraph.transforms.task import (
+ get_branch_repo,
+ get_branch_rev,
+)
+
+mozharness_run_schema = Schema(
+ {
+ Required("using"): "mozharness",
+ # the mozharness script used to run this task, relative to the testing/
+ # directory and using forward slashes even on Windows
+ Required("script"): text_type,
+ # Additional paths to look for mozharness configs in. These should be
+ # relative to the base of the source checkout
+ Optional("config-paths"): [text_type],
+ # the config files required for the task, relative to
+ # testing/mozharness/configs or one of the paths specified in
+ # `config-paths` and using forward slashes even on Windows
+ Required("config"): [text_type],
+ # any additional actions to pass to the mozharness command
+ Optional("actions"): [
+ Match("^[a-z0-9-]+$", "actions must be `-` seperated alphanumeric strings")
+ ],
+ # any additional options (without leading --) to be passed to mozharness
+ Optional("options"): [
+ Match(
+ "^[a-z0-9-]+(=[^ ]+)?$",
+ "options must be `-` seperated alphanumeric strings (with optional argument)",
+ )
+ ],
+ # --custom-build-variant-cfg value
+ Optional("custom-build-variant-cfg"): text_type,
+ # Extra configuration options to pass to mozharness.
+ Optional("extra-config"): dict,
+ # If not false, tooltool downloads will be enabled via relengAPIProxy
+ # for either just public files, or all files. Not supported on Windows
+ Required("tooltool-downloads"): Any(
+ False,
+ "public",
+ "internal",
+ ),
+ # The set of secret names to which the task has access; these are prefixed
+ # with `project/releng/gecko/{treeherder.kind}/level-{level}/`. Setting
+ # this will enable any worker features required and set the task's scopes
+ # appropriately. `true` here means ['*'], all secrets. Not supported on
+ # Windows
+ Required("secrets"): Any(bool, [text_type]),
+ # If true, taskcluster proxy will be enabled; note that it may also be enabled
+ # automatically e.g., for secrets support. Not supported on Windows.
+ Required("taskcluster-proxy"): bool,
+ # If true, the build scripts will start Xvfb. Not supported on Windows.
+ Required("need-xvfb"): bool,
+ # If false, indicate that builds should skip producing artifacts. Not
+ # supported on Windows.
+ Required("keep-artifacts"): bool,
+ # If specified, use the in-tree job script specified.
+ Optional("job-script"): text_type,
+ Required("requires-signed-builds"): bool,
+ # Whether or not to use caches.
+ Optional("use-caches"): bool,
+ # If false, don't set MOZ_SIMPLE_PACKAGE_NAME
+ # Only disableable on windows
+ Required("use-simple-package"): bool,
+ # If false don't pass --branch mozharness script
+ # Only disableable on windows
+ Required("use-magic-mh-args"): bool,
+ # if true, perform a checkout of a comm-central based branch inside the
+ # gecko checkout
+ Required("comm-checkout"): bool,
+ # Base work directory used to set up the task.
+ Optional("workdir"): text_type,
+ }
+)
+
+
+mozharness_defaults = {
+ "tooltool-downloads": False,
+ "secrets": False,
+ "taskcluster-proxy": False,
+ "need-xvfb": False,
+ "keep-artifacts": True,
+ "requires-signed-builds": False,
+ "use-simple-package": True,
+ "use-magic-mh-args": True,
+ "comm-checkout": False,
+}
+
+
+@run_job_using(
+ "docker-worker",
+ "mozharness",
+ schema=mozharness_run_schema,
+ defaults=mozharness_defaults,
+)
+def mozharness_on_docker_worker_setup(config, job, taskdesc):
+ run = job["run"]
+
+ worker = taskdesc["worker"] = job["worker"]
+
+ if not run.pop("use-simple-package", None):
+ raise NotImplementedError(
+ "Simple packaging cannot be disabled via"
+ "'use-simple-package' on docker-workers"
+ )
+ if not run.pop("use-magic-mh-args", None):
+ raise NotImplementedError(
+ "Cannot disabled mh magic arg passing via"
+ "'use-magic-mh-args' on docker-workers"
+ )
+
+ # Running via mozharness assumes an image that contains build.sh:
+ # by default, debian8-amd64-build, but it could be another image (like
+ # android-build).
+ worker.setdefault("docker-image", {"in-tree": "debian8-amd64-build"})
+
+ worker.setdefault("artifacts", []).append(
+ {
+ "name": "public/logs",
+ "path": "{workdir}/logs/".format(**run),
+ "type": "directory",
+ }
+ )
+ worker["taskcluster-proxy"] = run.pop("taskcluster-proxy", None)
+ docker_worker_add_artifacts(config, job, taskdesc)
+
+ env = worker.setdefault("env", {})
+ env.update(
+ {
+ "WORKSPACE": "{workdir}/workspace".format(**run),
+ "MOZHARNESS_CONFIG": " ".join(run.pop("config")),
+ "MOZHARNESS_SCRIPT": run.pop("script"),
+ "MH_BRANCH": config.params["project"],
+ "MOZ_SOURCE_CHANGESET": get_branch_rev(config),
+ "MOZ_SOURCE_REPO": get_branch_repo(config),
+ "MH_BUILD_POOL": "taskcluster",
+ "MOZ_BUILD_DATE": config.params["moz_build_date"],
+ "MOZ_SCM_LEVEL": config.params["level"],
+ "PYTHONUNBUFFERED": "1",
+ }
+ )
+
+ worker.setdefault("required-volumes", []).append(env["WORKSPACE"])
+
+ if "actions" in run:
+ env["MOZHARNESS_ACTIONS"] = " ".join(run.pop("actions"))
+
+ if "options" in run:
+ env["MOZHARNESS_OPTIONS"] = " ".join(run.pop("options"))
+
+ if "config-paths" in run:
+ env["MOZHARNESS_CONFIG_PATHS"] = " ".join(run.pop("config-paths"))
+
+ if "custom-build-variant-cfg" in run:
+ env["MH_CUSTOM_BUILD_VARIANT_CFG"] = run.pop("custom-build-variant-cfg")
+
+ extra_config = run.pop("extra-config", {})
+ extra_config["objdir"] = "obj-build"
+ env["EXTRA_MOZHARNESS_CONFIG"] = six.ensure_text(
+ json.dumps(extra_config, sort_keys=True)
+ )
+
+ if "job-script" in run:
+ env["JOB_SCRIPT"] = run["job-script"]
+
+ if config.params.is_try():
+ env["TRY_COMMIT_MSG"] = config.params["message"]
+
+ # if we're not keeping artifacts, set some env variables to empty values
+ # that will cause the build process to skip copying the results to the
+ # artifacts directory. This will have no effect for operations that are
+ # not builds.
+ if not run.pop("keep-artifacts"):
+ env["DIST_TARGET_UPLOADS"] = ""
+ env["DIST_UPLOADS"] = ""
+
+ # Xvfb
+ if run.pop("need-xvfb"):
+ env["NEED_XVFB"] = "true"
+ else:
+ env["NEED_XVFB"] = "false"
+
+ # Retry if mozharness returns TBPL_RETRY
+ worker["retry-exit-status"] = [4]
+
+ setup_secrets(config, job, taskdesc)
+
+ run["using"] = "run-task"
+ run["command"] = mozpath.join(
+ "${GECKO_PATH}",
+ run.pop("job-script", "taskcluster/scripts/builder/build-linux.sh"),
+ )
+ run.pop("secrets")
+ run.pop("requires-signed-builds")
+
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
+
+
+@run_job_using(
+ "generic-worker",
+ "mozharness",
+ schema=mozharness_run_schema,
+ defaults=mozharness_defaults,
+)
+def mozharness_on_generic_worker(config, job, taskdesc):
+ assert job["worker"]["os"] in (
+ "windows",
+ "macosx",
+ ), "only supports windows and macOS right now: {}".format(job["label"])
+
+ run = job["run"]
+
+ # fail if invalid run options are included
+ invalid = []
+ for prop in ["need-xvfb"]:
+ if prop in run and run.pop(prop):
+ invalid.append(prop)
+ if not run.pop("keep-artifacts", True):
+ invalid.append("keep-artifacts")
+ if invalid:
+ raise Exception(
+ "Jobs run using mozharness on Windows do not support properties "
+ + ", ".join(invalid)
+ )
+
+ worker = taskdesc["worker"] = job["worker"]
+
+ worker["taskcluster-proxy"] = run.pop("taskcluster-proxy", None)
+
+ setup_secrets(config, job, taskdesc)
+
+ taskdesc["worker"].setdefault("artifacts", []).append(
+ {"name": "public/logs", "path": "logs", "type": "directory"}
+ )
+ if not worker.get("skip-artifacts", False):
+ generic_worker_add_artifacts(config, job, taskdesc)
+
+ env = worker["env"]
+ env.update(
+ {
+ "MOZ_BUILD_DATE": config.params["moz_build_date"],
+ "MOZ_SCM_LEVEL": config.params["level"],
+ "MH_BRANCH": config.params["project"],
+ "MOZ_SOURCE_CHANGESET": get_branch_rev(config),
+ "MOZ_SOURCE_REPO": get_branch_repo(config),
+ }
+ )
+ if run.pop("use-simple-package"):
+ env.update({"MOZ_SIMPLE_PACKAGE_NAME": "target"})
+
+ extra_config = run.pop("extra-config", {})
+ extra_config["objdir"] = "obj-build"
+ env["EXTRA_MOZHARNESS_CONFIG"] = six.ensure_text(
+ json.dumps(extra_config, sort_keys=True)
+ )
+
+ # The windows generic worker uses batch files to pass environment variables
+ # to commands. Setting a variable to empty in a batch file unsets, so if
+ # there is no `TRY_COMMIT_MESSAGE`, pass a space instead, so that
+ # mozharness doesn't try to find the commit message on its own.
+ if config.params.is_try():
+ env["TRY_COMMIT_MSG"] = config.params["message"] or "no commit message"
+
+ if not job["attributes"]["build_platform"].startswith(("win", "macosx")):
+ raise Exception(
+ "Task generation for mozharness build jobs currently only supported on "
+ "Windows and macOS"
+ )
+
+ mh_command = []
+ if job["worker"]["os"] == "windows":
+ mh_command.append("c:/mozilla-build/python3/python3.exe")
+ gecko_path = "%GECKO_PATH%"
+ else:
+ gecko_path = "$GECKO_PATH"
+
+ mh_command += [
+ "{}/mach".format(gecko_path),
+ "python",
+ "--no-activate",
+ "{}/testing/{}".format(gecko_path, run.pop("script")),
+ ]
+
+ for path in run.pop("config-paths", []):
+ mh_command.append("--extra-config-path {}/{}".format(gecko_path, path))
+
+ for cfg in run.pop("config"):
+ mh_command.extend(("--config", cfg))
+ if run.pop("use-magic-mh-args"):
+ mh_command.extend(("--branch", config.params["project"]))
+ if job["worker"]["os"] == "windows":
+ mh_command.extend(("--work-dir", r"%cd:Z:=z:%\workspace"))
+ for action in run.pop("actions", []):
+ mh_command.append("--" + action)
+
+ for option in run.pop("options", []):
+ mh_command.append("--" + option)
+ if run.get("custom-build-variant-cfg"):
+ mh_command.append("--custom-build-variant")
+ mh_command.append(run.pop("custom-build-variant-cfg"))
+
+ if job["worker"]["os"] == "macosx":
+ # Ideally, we'd use shellutil.quote, but that would single-quote
+ # $GECKO_PATH, which would defeat having the variable in the command
+ # in the first place, as it wouldn't be expanded.
+ # In practice, arguments are expected not to contain characters that
+ # would require quoting.
+ mh_command = " ".join(mh_command)
+
+ run["using"] = "run-task"
+ run["command"] = mh_command
+ run.pop("secrets")
+ run.pop("requires-signed-builds")
+ run.pop("job-script", None)
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
+
+ # Everything past this point is Windows-specific.
+ if job["worker"]["os"] == "macosx":
+ return
+
+ if taskdesc.get("use-sccache"):
+ worker["command"] = (
+ [
+ # Make the comment part of the first command, as it will help users to
+ # understand what is going on, and why these steps are implemented.
+ dedent(
+ """\
+ :: sccache currently uses the full compiler commandline as input to the
+ :: cache hash key, so create a symlink to the task dir and build from
+ :: the symlink dir to get consistent paths.
+ if exist z:\\build rmdir z:\\build"""
+ ),
+ r"mklink /d z:\build %cd%",
+ # Grant delete permission on the link to everyone.
+ r"icacls z:\build /grant *S-1-1-0:D /L",
+ r"cd /d z:\build",
+ ]
+ + worker["command"]
+ )
diff --git a/taskcluster/taskgraph/transforms/job/mozharness_test.py b/taskcluster/taskgraph/transforms/job/mozharness_test.py
new file mode 100644
index 0000000000..ff29dcc9fb
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/job/mozharness_test.py
@@ -0,0 +1,452 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import json
+import os
+import re
+
+import six
+from six import text_type
+from voluptuous import Required, Optional
+
+from taskgraph.util.taskcluster import get_artifact_url
+from taskgraph.transforms.job import (
+ configure_taskdesc_for_run,
+ run_job_using,
+)
+from taskgraph.util.schema import Schema
+from taskgraph.util.taskcluster import get_artifact_path
+from taskgraph.transforms.tests import test_description_schema, normpath
+from taskgraph.transforms.job.common import support_vcs_checkout
+
+VARIANTS = [
+ "shippable",
+ "shippable-qr",
+ "devedition",
+ "pgo",
+ "asan",
+ "stylo",
+ "qr",
+ "ccov",
+]
+
+
+def get_variant(test_platform):
+ for v in VARIANTS:
+ if "-{}/".format(v) in test_platform:
+ return v
+ return ""
+
+
+mozharness_test_run_schema = Schema(
+ {
+ Required("using"): "mozharness-test",
+ Required("test"): test_description_schema,
+ # Base work directory used to set up the task.
+ Optional("workdir"): text_type,
+ }
+)
+
+
+def test_packages_url(taskdesc):
+ """Account for different platforms that name their test packages differently"""
+ artifact_url = get_artifact_url(
+ "<build>", get_artifact_path(taskdesc, "target.test_packages.json")
+ )
+ # for android shippable we need to add 'en-US' to the artifact url
+ test = taskdesc["run"]["test"]
+ if "android" in test["test-platform"] and (
+ get_variant(test["test-platform"]) in ("shippable", "shippable-qr")
+ ):
+ head, tail = os.path.split(artifact_url)
+ artifact_url = os.path.join(head, "en-US", tail)
+ return artifact_url
+
+
+def installer_url(taskdesc):
+ test = taskdesc["run"]["test"]
+ mozharness = test["mozharness"]
+
+ if "installer-url" in mozharness:
+ installer_url = mozharness["installer-url"]
+ else:
+ upstream_task = (
+ "<build-signing>" if mozharness["requires-signed-builds"] else "<build>"
+ )
+ installer_url = get_artifact_url(
+ upstream_task, mozharness["build-artifact-name"]
+ )
+
+ return installer_url
+
+
+@run_job_using("docker-worker", "mozharness-test", schema=mozharness_test_run_schema)
+def mozharness_test_on_docker(config, job, taskdesc):
+ run = job["run"]
+ test = taskdesc["run"]["test"]
+ mozharness = test["mozharness"]
+ worker = taskdesc["worker"] = job["worker"]
+
+ # apply some defaults
+ worker["docker-image"] = test["docker-image"]
+ worker["allow-ptrace"] = True # required for all tests, for crashreporter
+ worker["loopback-video"] = test["loopback-video"]
+ worker["loopback-audio"] = test["loopback-audio"]
+ worker["max-run-time"] = test["max-run-time"]
+ worker["retry-exit-status"] = test["retry-exit-status"]
+ if "android-em-7.0-x86" in test["test-platform"]:
+ worker["privileged"] = True
+
+ artifacts = [
+ # (artifact name prefix, in-image path)
+ ("public/logs/", "{workdir}/workspace/logs/".format(**run)),
+ ("public/test", "{workdir}/artifacts/".format(**run)),
+ (
+ "public/test_info/",
+ "{workdir}/workspace/build/blobber_upload_dir/".format(**run),
+ ),
+ ]
+
+ installer = installer_url(taskdesc)
+
+ mozharness_url = get_artifact_url(
+ "<build>", get_artifact_path(taskdesc, "mozharness.zip")
+ )
+
+ worker.setdefault("artifacts", [])
+ worker["artifacts"].extend(
+ [
+ {
+ "name": prefix,
+ "path": os.path.join("{workdir}/workspace".format(**run), path),
+ "type": "directory",
+ }
+ for (prefix, path) in artifacts
+ ]
+ )
+
+ env = worker.setdefault("env", {})
+ env.update(
+ {
+ "MOZHARNESS_CONFIG": " ".join(mozharness["config"]),
+ "MOZHARNESS_SCRIPT": mozharness["script"],
+ "MOZILLA_BUILD_URL": {"task-reference": installer},
+ "NEED_PULSEAUDIO": "true",
+ "NEED_WINDOW_MANAGER": "true",
+ "ENABLE_E10S": text_type(bool(test.get("e10s"))).lower(),
+ "WORKING_DIR": "/builds/worker",
+ }
+ )
+
+ if test.get("python-3"):
+ env["PYTHON"] = "python3"
+
+ # Legacy linux64 tests rely on compiz.
+ if test.get("docker-image", {}).get("in-tree") == "desktop1604-test":
+ env.update({"NEED_COMPIZ": "true"})
+
+ # Bug 1602701/1601828 - use compiz on ubuntu1804 due to GTK asynchiness
+ # when manipulating windows.
+ if test.get("docker-image", {}).get("in-tree") == "ubuntu1804-test":
+ if "wdspec" in job["run"]["test"]["suite"] or (
+ "marionette" in job["run"]["test"]["suite"]
+ and "headless" not in job["label"]
+ ):
+ env.update({"NEED_COMPIZ": "true"})
+
+ if mozharness.get("mochitest-flavor"):
+ env["MOCHITEST_FLAVOR"] = mozharness["mochitest-flavor"]
+
+ if mozharness["set-moz-node-path"]:
+ env["MOZ_NODE_PATH"] = "/usr/local/bin/node"
+
+ if "actions" in mozharness:
+ env["MOZHARNESS_ACTIONS"] = " ".join(mozharness["actions"])
+
+ if config.params.is_try():
+ env["TRY_COMMIT_MSG"] = config.params["message"]
+
+ # handle some of the mozharness-specific options
+ if test["reboot"]:
+ raise Exception(
+ "reboot: {} not supported on generic-worker".format(test["reboot"])
+ )
+
+ # Support vcs checkouts regardless of whether the task runs from
+ # source or not in case it is needed on an interactive loaner.
+ support_vcs_checkout(config, job, taskdesc)
+
+ # If we have a source checkout, run mozharness from it instead of
+ # downloading a zip file with the same content.
+ if test["checkout"]:
+ env["MOZHARNESS_PATH"] = "{workdir}/checkouts/gecko/testing/mozharness".format(
+ **run
+ )
+ else:
+ env["MOZHARNESS_URL"] = {"task-reference": mozharness_url}
+
+ extra_config = {
+ "installer_url": installer,
+ "test_packages_url": test_packages_url(taskdesc),
+ }
+ env["EXTRA_MOZHARNESS_CONFIG"] = {
+ "task-reference": six.ensure_text(json.dumps(extra_config, sort_keys=True))
+ }
+
+ # Bug 1634554 - pass in decision task artifact URL to mozharness for WPT.
+ # Bug 1645974 - test-verify-wpt and test-coverage-wpt need artifact URL.
+ if "web-platform-tests" in test["suite"] or re.match(
+ "test-(coverage|verify)-wpt", test["suite"]
+ ):
+ env["TESTS_BY_MANIFEST_URL"] = {
+ "artifact-reference": "<decision/public/tests-by-manifest.json.gz>"
+ }
+
+ command = [
+ "{workdir}/bin/test-linux.sh".format(**run),
+ ]
+ command.extend(mozharness.get("extra-options", []))
+
+ if test.get("test-manifests"):
+ env["MOZHARNESS_TEST_PATHS"] = six.ensure_text(
+ json.dumps({test["suite"]: test["test-manifests"]}, sort_keys=True)
+ )
+
+ # TODO: remove the need for run['chunked']
+ elif mozharness.get("chunked") or test["chunks"] > 1:
+ command.append("--total-chunk={}".format(test["chunks"]))
+ command.append("--this-chunk={}".format(test["this-chunk"]))
+
+ if "download-symbols" in mozharness:
+ download_symbols = mozharness["download-symbols"]
+ download_symbols = {True: "true", False: "false"}.get(
+ download_symbols, download_symbols
+ )
+ command.append("--download-symbols=" + download_symbols)
+
+ job["run"] = {
+ "workdir": run["workdir"],
+ "tooltool-downloads": mozharness["tooltool-downloads"],
+ "checkout": test["checkout"],
+ "command": command,
+ "using": "run-task",
+ }
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
+
+
+@run_job_using("generic-worker", "mozharness-test", schema=mozharness_test_run_schema)
+def mozharness_test_on_generic_worker(config, job, taskdesc):
+ test = taskdesc["run"]["test"]
+ mozharness = test["mozharness"]
+ worker = taskdesc["worker"] = job["worker"]
+
+ bitbar_script = "test-linux.sh"
+
+ is_macosx = worker["os"] == "macosx"
+ is_windows = worker["os"] == "windows"
+ is_linux = worker["os"] == "linux" or worker["os"] == "linux-bitbar"
+ is_bitbar = worker["os"] == "linux-bitbar"
+ assert is_macosx or is_windows or is_linux
+
+ artifacts = [
+ {"name": "public/logs", "path": "logs", "type": "directory"},
+ ]
+
+ # jittest doesn't have blob_upload_dir
+ if test["test-name"] != "jittest":
+ artifacts.append(
+ {
+ "name": "public/test_info",
+ "path": "build/blobber_upload_dir",
+ "type": "directory",
+ }
+ )
+
+ if is_bitbar:
+ artifacts = [
+ {"name": "public/test/", "path": "artifacts/public", "type": "directory"},
+ {"name": "public/logs/", "path": "workspace/logs", "type": "directory"},
+ {
+ "name": "public/test_info/",
+ "path": "workspace/build/blobber_upload_dir",
+ "type": "directory",
+ },
+ ]
+
+ installer = installer_url(taskdesc)
+
+ worker["os-groups"] = test["os-groups"]
+
+ # run-as-administrator is a feature for workers with UAC enabled and as such should not be
+ # included in tasks on workers that have UAC disabled. Currently UAC is only enabled on
+ # gecko Windows 10 workers, however this may be subject to change. Worker type
+ # environment definitions can be found in https://github.com/mozilla-releng/OpenCloudConfig
+ # See https://docs.microsoft.com/en-us/windows/desktop/secauthz/user-account-control
+ # for more information about UAC.
+ if test.get("run-as-administrator", False):
+ if job["worker-type"].startswith("t-win10-64"):
+ worker["run-as-administrator"] = True
+ else:
+ raise Exception(
+ "run-as-administrator not supported on {}".format(job["worker-type"])
+ )
+
+ if test["reboot"]:
+ raise Exception(
+ "reboot: {} not supported on generic-worker".format(test["reboot"])
+ )
+
+ worker["max-run-time"] = test["max-run-time"]
+ worker["retry-exit-status"] = test["retry-exit-status"]
+ worker.setdefault("artifacts", [])
+ worker["artifacts"].extend(artifacts)
+
+ env = worker.setdefault("env", {})
+ env["GECKO_HEAD_REPOSITORY"] = config.params["head_repository"]
+ env["GECKO_HEAD_REV"] = config.params["head_rev"]
+
+ # this list will get cleaned up / reduced / removed in bug 1354088
+ if is_macosx:
+ env.update(
+ {
+ "LC_ALL": "en_US.UTF-8",
+ "LANG": "en_US.UTF-8",
+ "MOZ_NODE_PATH": "/usr/local/bin/node",
+ "PATH": "/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin",
+ "SHELL": "/bin/bash",
+ }
+ )
+ elif is_bitbar:
+ env.update(
+ {
+ "MOZHARNESS_CONFIG": " ".join(mozharness["config"]),
+ "MOZHARNESS_SCRIPT": mozharness["script"],
+ "MOZHARNESS_URL": {
+ "artifact-reference": "<build/public/build/mozharness.zip>"
+ },
+ "MOZILLA_BUILD_URL": {"task-reference": installer},
+ "MOZ_NO_REMOTE": "1",
+ "NEED_XVFB": "false",
+ "XPCOM_DEBUG_BREAK": "warn",
+ "NO_FAIL_ON_TEST_ERRORS": "1",
+ "MOZ_HIDE_RESULTS_TABLE": "1",
+ "MOZ_NODE_PATH": "/usr/local/bin/node",
+ "TASKCLUSTER_WORKER_TYPE": job["worker-type"],
+ }
+ )
+
+ extra_config = {
+ "installer_url": installer,
+ "test_packages_url": test_packages_url(taskdesc),
+ }
+ env["EXTRA_MOZHARNESS_CONFIG"] = {
+ "task-reference": six.ensure_text(json.dumps(extra_config, sort_keys=True))
+ }
+
+ # Bug 1634554 - pass in decision task artifact URL to mozharness for WPT.
+ # Bug 1645974 - test-verify-wpt and test-coverage-wpt need artifact URL.
+ if "web-platform-tests" in test["suite"] or re.match(
+ "test-(coverage|verify)-wpt", test["suite"]
+ ):
+ env["TESTS_BY_MANIFEST_URL"] = {
+ "artifact-reference": "<decision/public/tests-by-manifest.json.gz>"
+ }
+
+ py_3 = test.get("python-3", False)
+
+ if is_windows:
+ py_binary = "c:\\mozilla-build\\{python}\\{python}.exe".format(
+ python="python3" if py_3 else "python"
+ )
+ mh_command = [
+ py_binary,
+ "-u",
+ "mozharness\\scripts\\" + normpath(mozharness["script"]),
+ ]
+ elif is_bitbar:
+ py_binary = "python3" if py_3 else "python"
+ mh_command = ["bash", "./{}".format(bitbar_script)]
+ elif is_macosx and "macosx1014-64" in test["test-platform"]:
+ py_binary = "/usr/local/bin/{}".format("python3" if py_3 else "python2")
+ mh_command = [
+ py_binary,
+ "-u",
+ "mozharness/scripts/" + mozharness["script"],
+ ]
+ else:
+ # is_linux or is_macosx
+ py_binary = "/usr/bin/{}".format("python3" if py_3 else "python2")
+ mh_command = [
+ # Using /usr/bin/python2.7 rather than python2.7 because
+ # /usr/local/bin/python2.7 is broken on the mac workers.
+ # See bug #1547903.
+ py_binary,
+ "-u",
+ "mozharness/scripts/" + mozharness["script"],
+ ]
+
+ if py_3:
+ env["PYTHON"] = py_binary
+
+ for mh_config in mozharness["config"]:
+ cfg_path = "mozharness/configs/" + mh_config
+ if is_windows:
+ cfg_path = normpath(cfg_path)
+ mh_command.extend(["--cfg", cfg_path])
+ mh_command.extend(mozharness.get("extra-options", []))
+ if mozharness.get("download-symbols"):
+ if isinstance(mozharness["download-symbols"], text_type):
+ mh_command.extend(["--download-symbols", mozharness["download-symbols"]])
+ else:
+ mh_command.extend(["--download-symbols", "true"])
+ if mozharness.get("include-blob-upload-branch"):
+ mh_command.append("--blob-upload-branch=" + config.params["project"])
+
+ if test.get("test-manifests"):
+ env["MOZHARNESS_TEST_PATHS"] = six.ensure_text(
+ json.dumps({test["suite"]: test["test-manifests"]}, sort_keys=True)
+ )
+
+ # TODO: remove the need for run['chunked']
+ elif mozharness.get("chunked") or test["chunks"] > 1:
+ mh_command.append("--total-chunk={}".format(test["chunks"]))
+ mh_command.append("--this-chunk={}".format(test["this-chunk"]))
+
+ if config.params.is_try():
+ env["TRY_COMMIT_MSG"] = config.params["message"]
+
+ worker["mounts"] = [
+ {
+ "directory": "mozharness",
+ "content": {
+ "artifact": get_artifact_path(taskdesc, "mozharness.zip"),
+ "task-id": {"task-reference": "<build>"},
+ },
+ "format": "zip",
+ }
+ ]
+ if is_bitbar:
+ a_url = config.params.file_url(
+ "taskcluster/scripts/tester/{}".format(bitbar_script),
+ )
+ worker["mounts"] = [
+ {
+ "file": bitbar_script,
+ "content": {
+ "url": a_url,
+ },
+ }
+ ]
+
+ job["run"] = {
+ "tooltool-downloads": mozharness["tooltool-downloads"],
+ "checkout": test["checkout"],
+ "command": mh_command,
+ "using": "run-task",
+ }
+ if is_bitbar:
+ job["run"]["run-as-root"] = True
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
diff --git a/taskcluster/taskgraph/transforms/job/python_test.py b/taskcluster/taskgraph/transforms/job/python_test.py
new file mode 100644
index 0000000000..b998102573
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/job/python_test.py
@@ -0,0 +1,48 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running mach python-test tasks (via run-task)
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from six import text_type
+from taskgraph.transforms.job import run_job_using, configure_taskdesc_for_run
+from taskgraph.util.schema import Schema
+from voluptuous import Required, Optional
+
+python_test_schema = Schema(
+ {
+ Required("using"): "python-test",
+ # Python version to use
+ Required("python-version"): int,
+ # The subsuite to run
+ Required("subsuite"): text_type,
+ # Base work directory used to set up the task.
+ Optional("workdir"): text_type,
+ }
+)
+
+
+defaults = {
+ "python-version": 2,
+ "subsuite": "default",
+}
+
+
+@run_job_using(
+ "docker-worker", "python-test", schema=python_test_schema, defaults=defaults
+)
+@run_job_using(
+ "generic-worker", "python-test", schema=python_test_schema, defaults=defaults
+)
+def configure_python_test(config, job, taskdesc):
+ run = job["run"]
+ worker = job["worker"]
+
+ # defer to the mach implementation
+ run["mach"] = ("python-test --subsuite {subsuite} --run-slow").format(**run)
+ run["using"] = "mach"
+ del run["subsuite"]
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
diff --git a/taskcluster/taskgraph/transforms/job/run_task.py b/taskcluster/taskgraph/transforms/job/run_task.py
new file mode 100644
index 0000000000..aab2bde7af
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/job/run_task.py
@@ -0,0 +1,238 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running jobs that are invoked via the `run-task` script.
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from six import text_type
+
+from mozpack import path
+
+from taskgraph.transforms.task import taskref_or_string
+from taskgraph.transforms.job import run_job_using
+from taskgraph.util.schema import Schema
+from taskgraph.transforms.job.common import add_tooltool, support_vcs_checkout
+from voluptuous import Any, Optional, Required
+
+run_task_schema = Schema(
+ {
+ Required("using"): "run-task",
+ # if true, add a cache at ~worker/.cache, which is where things like pip
+ # tend to hide their caches. This cache is never added for level-1 jobs.
+ # TODO Once bug 1526028 is fixed, this and 'use-caches' should be merged.
+ Required("cache-dotcache"): bool,
+ # Whether or not to use caches.
+ Optional("use-caches"): bool,
+ # if true (the default), perform a checkout of gecko on the worker
+ Required("checkout"): bool,
+ Optional(
+ "cwd",
+ description="Path to run command in. If a checkout is present, the path "
+ "to the checkout will be interpolated with the key `checkout`",
+ ): text_type,
+ # The sparse checkout profile to use. Value is the filename relative to the
+ # directory where sparse profiles are defined (build/sparse-profiles/).
+ Required("sparse-profile"): Any(text_type, None),
+ # if true, perform a checkout of a comm-central based branch inside the
+ # gecko checkout
+ Required("comm-checkout"): bool,
+ # The command arguments to pass to the `run-task` script, after the
+ # checkout arguments. If a list, it will be passed directly; otherwise
+ # it will be included in a single argument to `bash -cx`.
+ Required("command"): Any([taskref_or_string], taskref_or_string),
+ # Base work directory used to set up the task.
+ Optional("workdir"): text_type,
+ # If not false, tooltool downloads will be enabled via relengAPIProxy
+ # for either just public files, or all files. Only supported on
+ # docker-worker.
+ Required("tooltool-downloads"): Any(
+ False,
+ "public",
+ "internal",
+ ),
+ # Whether to run as root. (defaults to False)
+ Optional("run-as-root"): bool,
+ }
+)
+
+
+def common_setup(config, job, taskdesc, command):
+ run = job["run"]
+ if run["checkout"]:
+ support_vcs_checkout(config, job, taskdesc, sparse=bool(run["sparse-profile"]))
+ command.append(
+ "--gecko-checkout={}".format(taskdesc["worker"]["env"]["GECKO_PATH"])
+ )
+
+ if run["sparse-profile"]:
+ command.append(
+ "--gecko-sparse-profile=build/sparse-profiles/%s" % run["sparse-profile"]
+ )
+
+ taskdesc["worker"].setdefault("env", {})["MOZ_SCM_LEVEL"] = config.params["level"]
+
+
+worker_defaults = {
+ "cache-dotcache": False,
+ "checkout": True,
+ "comm-checkout": False,
+ "sparse-profile": None,
+ "tooltool-downloads": False,
+ "run-as-root": False,
+}
+
+
+def script_url(config, script):
+ return config.params.file_url(
+ "taskcluster/scripts/{}".format(script),
+ )
+
+
+@run_job_using(
+ "docker-worker", "run-task", schema=run_task_schema, defaults=worker_defaults
+)
+def docker_worker_run_task(config, job, taskdesc):
+ run = job["run"]
+ worker = taskdesc["worker"] = job["worker"]
+ command = ["/builds/worker/bin/run-task"]
+ common_setup(config, job, taskdesc, command)
+
+ if run["tooltool-downloads"]:
+ internal = run["tooltool-downloads"] == "internal"
+ add_tooltool(config, job, taskdesc, internal=internal)
+
+ if run.get("cache-dotcache"):
+ worker["caches"].append(
+ {
+ "type": "persistent",
+ "name": "{project}-dotcache".format(**config.params),
+ "mount-point": "{workdir}/.cache".format(**run),
+ "skip-untrusted": True,
+ }
+ )
+
+ run_command = run["command"]
+ run_cwd = run.get("cwd")
+ if run_cwd and run["checkout"]:
+ run_cwd = path.normpath(
+ run_cwd.format(checkout=taskdesc["worker"]["env"]["GECKO_PATH"])
+ )
+ elif run_cwd and "{checkout}" in run_cwd:
+ raise Exception(
+ "Found `{{checkout}}` interpolation in `cwd` for task {name} "
+ "but the task doesn't have a checkout: {cwd}".format(
+ cwd=run_cwd, name=job.get("name", job.get("label"))
+ )
+ )
+
+ # dict is for the case of `{'task-reference': text_type}`.
+ if isinstance(run_command, (text_type, dict)):
+ run_command = ["bash", "-cx", run_command]
+ if run["comm-checkout"]:
+ command.append(
+ "--comm-checkout={}/comm".format(taskdesc["worker"]["env"]["GECKO_PATH"])
+ )
+ command.append("--fetch-hgfingerprint")
+ if run["run-as-root"]:
+ command.extend(("--user", "root", "--group", "root"))
+ if run_cwd:
+ command.extend(("--task-cwd", run_cwd))
+ command.append("--")
+ command.extend(run_command)
+ worker["command"] = command
+
+
+@run_job_using(
+ "generic-worker", "run-task", schema=run_task_schema, defaults=worker_defaults
+)
+def generic_worker_run_task(config, job, taskdesc):
+ run = job["run"]
+ worker = taskdesc["worker"] = job["worker"]
+ is_win = worker["os"] == "windows"
+ is_mac = worker["os"] == "macosx"
+ is_bitbar = worker["os"] == "linux-bitbar"
+
+ if run["tooltool-downloads"]:
+ internal = run["tooltool-downloads"] == "internal"
+ add_tooltool(config, job, taskdesc, internal=internal)
+
+ if is_win:
+ command = ["C:/mozilla-build/python3/python3.exe", "run-task"]
+ elif is_mac:
+ command = ["/usr/local/bin/python3", "run-task"]
+ else:
+ command = ["./run-task"]
+
+ common_setup(config, job, taskdesc, command)
+
+ worker.setdefault("mounts", [])
+ if run.get("cache-dotcache"):
+ worker["mounts"].append(
+ {
+ "cache-name": "{project}-dotcache".format(**config.params),
+ "directory": "{workdir}/.cache".format(**run),
+ }
+ )
+ worker["mounts"].append(
+ {
+ "content": {
+ "url": script_url(config, "run-task"),
+ },
+ "file": "./run-task",
+ }
+ )
+ if worker.get("env", {}).get("MOZ_FETCHES"):
+ worker["mounts"].append(
+ {
+ "content": {
+ "url": script_url(config, "misc/fetch-content"),
+ },
+ "file": "./fetch-content",
+ }
+ )
+
+ run_command = run["command"]
+ run_cwd = run.get("cwd")
+ if run_cwd and run["checkout"]:
+ run_cwd = path.normpath(
+ run_cwd.format(checkout=taskdesc["worker"]["env"]["GECKO_PATH"])
+ )
+ elif run_cwd and "{checkout}" in run_cwd:
+ raise Exception(
+ "Found `{{checkout}}` interpolation in `cwd` for task {name} "
+ "but the task doesn't have a checkout: {cwd}".format(
+ cwd=run_cwd, name=job.get("name", job.get("label"))
+ )
+ )
+
+ if isinstance(run_command, text_type):
+ if is_win:
+ run_command = '"{}"'.format(run_command)
+ run_command = ["bash", "-cx", run_command]
+
+ if run["comm-checkout"]:
+ command.append(
+ "--comm-checkout={}/comm".format(taskdesc["worker"]["env"]["GECKO_PATH"])
+ )
+
+ if run["run-as-root"]:
+ command.extend(("--user", "root", "--group", "root"))
+ if run_cwd:
+ command.extend(("--task-cwd", run_cwd))
+ command.append("--")
+ if is_bitbar:
+ # Use the bitbar wrapper script which sets up the device and adb
+ # environment variables
+ command.append("/builds/taskcluster/script.py")
+ command.extend(run_command)
+
+ if is_win:
+ worker["command"] = [" ".join(command)]
+ else:
+ worker["command"] = [
+ ["chmod", "+x", "run-task"],
+ command,
+ ]
diff --git a/taskcluster/taskgraph/transforms/job/spidermonkey.py b/taskcluster/taskgraph/transforms/job/spidermonkey.py
new file mode 100644
index 0000000000..82c9303db1
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/job/spidermonkey.py
@@ -0,0 +1,132 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running spidermonkey jobs via dedicated scripts
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from six import text_type
+from taskgraph.util.schema import Schema
+from voluptuous import Required, Any, Optional
+
+from taskgraph.transforms.job import (
+ run_job_using,
+ configure_taskdesc_for_run,
+)
+from taskgraph.transforms.job.common import (
+ docker_worker_add_artifacts,
+ generic_worker_add_artifacts,
+)
+
+sm_run_schema = Schema(
+ {
+ Required("using"): Any(
+ "spidermonkey",
+ "spidermonkey-package",
+ "spidermonkey-mozjs-crate",
+ "spidermonkey-rust-bindings",
+ ),
+ # SPIDERMONKEY_VARIANT and SPIDERMONKEY_PLATFORM
+ Required("spidermonkey-variant"): text_type,
+ Optional("spidermonkey-platform"): text_type,
+ # Base work directory used to set up the task.
+ Optional("workdir"): text_type,
+ Required("tooltool-downloads"): Any(
+ False,
+ "public",
+ "internal",
+ ),
+ }
+)
+
+
+@run_job_using("docker-worker", "spidermonkey", schema=sm_run_schema)
+@run_job_using("docker-worker", "spidermonkey-package", schema=sm_run_schema)
+@run_job_using("docker-worker", "spidermonkey-mozjs-crate", schema=sm_run_schema)
+@run_job_using("docker-worker", "spidermonkey-rust-bindings", schema=sm_run_schema)
+def docker_worker_spidermonkey(config, job, taskdesc):
+ run = job["run"]
+
+ worker = taskdesc["worker"] = job["worker"]
+ worker.setdefault("artifacts", [])
+
+ docker_worker_add_artifacts(config, job, taskdesc)
+
+ env = worker.setdefault("env", {})
+ env.update(
+ {
+ "MOZHARNESS_DISABLE": "true",
+ "SPIDERMONKEY_VARIANT": run.pop("spidermonkey-variant"),
+ "MOZ_BUILD_DATE": config.params["moz_build_date"],
+ "MOZ_SCM_LEVEL": config.params["level"],
+ }
+ )
+ if "spidermonkey-platform" in run:
+ env["SPIDERMONKEY_PLATFORM"] = run.pop("spidermonkey-platform")
+
+ script = "build-sm.sh"
+ if run["using"] == "spidermonkey-package":
+ script = "build-sm-package.sh"
+ elif run["using"] == "spidermonkey-mozjs-crate":
+ script = "build-sm-mozjs-crate.sh"
+ elif run["using"] == "spidermonkey-rust-bindings":
+ script = "build-sm-rust-bindings.sh"
+
+ run["using"] = "run-task"
+ run["cwd"] = run["workdir"]
+ run["command"] = [
+ "./checkouts/gecko/taskcluster/scripts/builder/{script}".format(script=script)
+ ]
+
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
+
+
+@run_job_using("generic-worker", "spidermonkey", schema=sm_run_schema)
+def generic_worker_spidermonkey(config, job, taskdesc):
+ assert job["worker"]["os"] == "windows", "only supports windows right now"
+
+ run = job["run"]
+
+ worker = taskdesc["worker"] = job["worker"]
+
+ generic_worker_add_artifacts(config, job, taskdesc)
+
+ env = worker.setdefault("env", {})
+ env.update(
+ {
+ "MOZHARNESS_DISABLE": "true",
+ "SPIDERMONKEY_VARIANT": run.pop("spidermonkey-variant"),
+ "MOZ_BUILD_DATE": config.params["moz_build_date"],
+ "MOZ_SCM_LEVEL": config.params["level"],
+ "SCCACHE_DISABLE": "1",
+ "WORK": ".", # Override the defaults in build scripts
+ "GECKO_PATH": "./src", # with values suiteable for windows generic worker
+ "UPLOAD_DIR": "./public/build",
+ }
+ )
+ if "spidermonkey-platform" in run:
+ env["SPIDERMONKEY_PLATFORM"] = run.pop("spidermonkey-platform")
+
+ script = "build-sm.sh"
+ if run["using"] == "spidermonkey-package":
+ script = "build-sm-package.sh"
+ # Don't allow untested configurations yet
+ raise Exception("spidermonkey-package is not a supported configuration")
+ elif run["using"] == "spidermonkey-mozjs-crate":
+ script = "build-sm-mozjs-crate.sh"
+ # Don't allow untested configurations yet
+ raise Exception("spidermonkey-mozjs-crate is not a supported configuration")
+ elif run["using"] == "spidermonkey-rust-bindings":
+ script = "build-sm-rust-bindings.sh"
+ # Don't allow untested configurations yet
+ raise Exception("spidermonkey-rust-bindings is not a supported configuration")
+
+ run["using"] = "run-task"
+ run["command"] = [
+ "c:\\mozilla-build\\msys\\bin\\bash.exe " # string concat
+ '"./src/taskcluster/scripts/builder/%s"' % script
+ ]
+
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
diff --git a/taskcluster/taskgraph/transforms/job/toolchain.py b/taskcluster/taskgraph/transforms/job/toolchain.py
new file mode 100644
index 0000000000..4197517898
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/job/toolchain.py
@@ -0,0 +1,234 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running toolchain-building jobs via dedicated scripts
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from mozbuild.shellutil import quote as shell_quote
+
+from six import text_type
+from taskgraph.util.schema import Schema
+from voluptuous import Optional, Required, Any
+
+from taskgraph.transforms.job import (
+ configure_taskdesc_for_run,
+ run_job_using,
+)
+from taskgraph.transforms.job.common import (
+ docker_worker_add_artifacts,
+ generic_worker_add_artifacts,
+)
+from taskgraph.util.hash import hash_paths
+from taskgraph.util.attributes import RELEASE_PROJECTS
+from taskgraph import GECKO
+import taskgraph
+
+
+CACHE_TYPE = "toolchains.v3"
+
+toolchain_run_schema = Schema(
+ {
+ Required("using"): "toolchain-script",
+ # The script (in taskcluster/scripts/misc) to run.
+ # Python scripts are invoked with `mach python` so vendored libraries
+ # are available.
+ Required("script"): text_type,
+ # Arguments to pass to the script.
+ Optional("arguments"): [text_type],
+ # If not false, tooltool downloads will be enabled via relengAPIProxy
+ # for either just public files, or all files. Not supported on Windows
+ Required("tooltool-downloads"): Any(
+ False,
+ "public",
+ "internal",
+ ),
+ # Sparse profile to give to checkout using `run-task`. If given,
+ # a filename in `build/sparse-profiles`. Defaults to
+ # "toolchain-build", i.e., to
+ # `build/sparse-profiles/toolchain-build`. If `None`, instructs
+ # `run-task` to not use a sparse profile at all.
+ Required("sparse-profile"): Any(text_type, None),
+ # Paths/patterns pointing to files that influence the outcome of a
+ # toolchain build.
+ Optional("resources"): [text_type],
+ # Path to the artifact produced by the toolchain job
+ Required("toolchain-artifact"): text_type,
+ Optional(
+ "toolchain-alias",
+ description="An alias that can be used instead of the real toolchain job name in "
+ "fetch stanzas for jobs.",
+ ): text_type,
+ # Base work directory used to set up the task.
+ Optional("workdir"): text_type,
+ }
+)
+
+
+def get_digest_data(config, run, taskdesc):
+ files = list(run.pop("resources", []))
+ # The script
+ files.append("taskcluster/scripts/misc/{}".format(run["script"]))
+ # Tooltool manifest if any is defined:
+ tooltool_manifest = taskdesc["worker"]["env"].get("TOOLTOOL_MANIFEST")
+ if tooltool_manifest:
+ files.append(tooltool_manifest)
+
+ # Accumulate dependency hashes for index generation.
+ data = [hash_paths(GECKO, files)]
+
+ data.append(taskdesc["attributes"]["toolchain-artifact"])
+
+ # If the task uses an in-tree docker image, we want it to influence
+ # the index path as well. Ideally, the content of the docker image itself
+ # should have an influence, but at the moment, we can't get that
+ # information here. So use the docker image name as a proxy. Not a lot of
+ # changes to docker images actually have an impact on the resulting
+ # toolchain artifact, so we'll just rely on such important changes to be
+ # accompanied with a docker image name change.
+ image = taskdesc["worker"].get("docker-image", {}).get("in-tree")
+ if image:
+ data.append(image)
+
+ # Likewise script arguments should influence the index.
+ args = run.get("arguments")
+ if args:
+ data.extend(args)
+
+ if taskdesc["attributes"].get("rebuild-on-release"):
+ # Add whether this is a release branch or not
+ data.append(str(config.params["project"] in RELEASE_PROJECTS))
+ return data
+
+
+toolchain_defaults = {
+ "tooltool-downloads": False,
+ "sparse-profile": "toolchain-build",
+}
+
+
+@run_job_using(
+ "docker-worker",
+ "toolchain-script",
+ schema=toolchain_run_schema,
+ defaults=toolchain_defaults,
+)
+def docker_worker_toolchain(config, job, taskdesc):
+ run = job["run"]
+
+ worker = taskdesc["worker"] = job["worker"]
+ worker["chain-of-trust"] = True
+
+ # If the task doesn't have a docker-image, set a default
+ worker.setdefault("docker-image", {"in-tree": "deb8-toolchain-build"})
+
+ # Allow the job to specify where artifacts come from, but add
+ # public/build if it's not there already.
+ artifacts = worker.setdefault("artifacts", [])
+ if not artifacts:
+ docker_worker_add_artifacts(config, job, taskdesc)
+
+ # Toolchain checkouts don't live under {workdir}/checkouts
+ workspace = "{workdir}/workspace/build".format(**run)
+ gecko_path = "{}/src".format(workspace)
+
+ env = worker.setdefault("env", {})
+ env.update(
+ {
+ "MOZ_BUILD_DATE": config.params["moz_build_date"],
+ "MOZ_SCM_LEVEL": config.params["level"],
+ "GECKO_PATH": gecko_path,
+ }
+ )
+
+ attributes = taskdesc.setdefault("attributes", {})
+ attributes["toolchain-artifact"] = run.pop("toolchain-artifact")
+ if "toolchain-alias" in run:
+ attributes["toolchain-alias"] = run.pop("toolchain-alias")
+
+ if not taskgraph.fast:
+ name = taskdesc["label"].replace("{}-".format(config.kind), "", 1)
+ taskdesc["cache"] = {
+ "type": CACHE_TYPE,
+ "name": name,
+ "digest-data": get_digest_data(config, run, taskdesc),
+ }
+
+ run["using"] = "run-task"
+ run["cwd"] = run["workdir"]
+ run["command"] = [
+ "workspace/build/src/taskcluster/scripts/misc/{}".format(run.pop("script"))
+ ] + run.pop("arguments", [])
+
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
+
+
+@run_job_using(
+ "generic-worker",
+ "toolchain-script",
+ schema=toolchain_run_schema,
+ defaults=toolchain_defaults,
+)
+def generic_worker_toolchain(config, job, taskdesc):
+ run = job["run"]
+
+ worker = taskdesc["worker"] = job["worker"]
+ worker["chain-of-trust"] = True
+
+ # Allow the job to specify where artifacts come from, but add
+ # public/build if it's not there already.
+ artifacts = worker.setdefault("artifacts", [])
+ if not artifacts:
+ generic_worker_add_artifacts(config, job, taskdesc)
+
+ if job["worker"]["os"] == "windows":
+ # There were no caches on generic-worker before bug 1519472, and they cause
+ # all sorts of problems with Windows toolchain tasks, disable them until
+ # tasks are ready.
+ run["use-caches"] = False
+
+ env = worker.setdefault("env", {})
+ env.update(
+ {
+ "MOZ_BUILD_DATE": config.params["moz_build_date"],
+ "MOZ_SCM_LEVEL": config.params["level"],
+ }
+ )
+
+ # Use `mach` to invoke python scripts so in-tree libraries are available.
+ if run["script"].endswith(".py"):
+ raise NotImplementedError(
+ "Python toolchain scripts aren't supported on generic-worker"
+ )
+
+ attributes = taskdesc.setdefault("attributes", {})
+ attributes["toolchain-artifact"] = run.pop("toolchain-artifact")
+ if "toolchain-alias" in run:
+ attributes["toolchain-alias"] = run.pop("toolchain-alias")
+
+ if not taskgraph.fast:
+ name = taskdesc["label"].replace("{}-".format(config.kind), "", 1)
+ taskdesc["cache"] = {
+ "type": CACHE_TYPE,
+ "name": name,
+ "digest-data": get_digest_data(config, run, taskdesc),
+ }
+
+ run["using"] = "run-task"
+
+ args = run.pop("arguments", "")
+ if args:
+ args = " " + shell_quote(*args)
+
+ if job["worker"]["os"] == "windows":
+ gecko_path = "%GECKO_PATH%"
+ else:
+ gecko_path = "$GECKO_PATH"
+
+ run["command"] = "{}/taskcluster/scripts/misc/{}{}".format(
+ gecko_path, run.pop("script"), args
+ )
+
+ configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])