summaryrefslogtreecommitdiffstats
path: root/taskcluster/gecko_taskgraph
diff options
context:
space:
mode:
Diffstat (limited to 'taskcluster/gecko_taskgraph')
-rw-r--r--taskcluster/gecko_taskgraph/__init__.py11
-rw-r--r--taskcluster/gecko_taskgraph/config.py9
-rw-r--r--taskcluster/gecko_taskgraph/decision.py13
-rw-r--r--taskcluster/gecko_taskgraph/main.py40
-rw-r--r--taskcluster/gecko_taskgraph/manifests/firefox_candidates.yml2
-rw-r--r--taskcluster/gecko_taskgraph/manifests/firefox_nightly.yml1
-rw-r--r--taskcluster/gecko_taskgraph/optimize/strategies.py49
-rw-r--r--taskcluster/gecko_taskgraph/parameters.py3
-rw-r--r--taskcluster/gecko_taskgraph/target_tasks.py143
-rw-r--r--taskcluster/gecko_taskgraph/test/conftest.py24
-rw-r--r--taskcluster/gecko_taskgraph/test/python.toml2
-rw-r--r--taskcluster/gecko_taskgraph/test/test_decision.py306
-rw-r--r--taskcluster/gecko_taskgraph/test/test_transforms_build_schedules.py56
-rw-r--r--taskcluster/gecko_taskgraph/test/test_transforms_test.py8
-rw-r--r--taskcluster/gecko_taskgraph/test/test_util_backstop.py44
-rw-r--r--taskcluster/gecko_taskgraph/transforms/build_attrs.py16
-rw-r--r--taskcluster/gecko_taskgraph/transforms/build_schedules.py48
-rw-r--r--taskcluster/gecko_taskgraph/transforms/condprof.py8
-rw-r--r--taskcluster/gecko_taskgraph/transforms/geckodriver_mac_notarization.py6
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/__init__.py15
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/mach.py2
-rw-r--r--taskcluster/gecko_taskgraph/transforms/job/mozharness.py2
-rw-r--r--taskcluster/gecko_taskgraph/transforms/release_deps.py11
-rw-r--r--taskcluster/gecko_taskgraph/transforms/release_notifications.py3
-rw-r--r--taskcluster/gecko_taskgraph/transforms/signing.py26
-rw-r--r--taskcluster/gecko_taskgraph/transforms/task.py49
-rw-r--r--taskcluster/gecko_taskgraph/transforms/test/__init__.py14
-rw-r--r--taskcluster/gecko_taskgraph/transforms/test/chunk.py3
-rw-r--r--taskcluster/gecko_taskgraph/transforms/test/other.py81
-rw-r--r--taskcluster/gecko_taskgraph/transforms/test/raptor.py1
-rw-r--r--taskcluster/gecko_taskgraph/transforms/test/worker.py17
-rw-r--r--taskcluster/gecko_taskgraph/transforms/test_apk.py33
-rw-r--r--taskcluster/gecko_taskgraph/transforms/update_verify_config.py2
-rw-r--r--taskcluster/gecko_taskgraph/util/backstop.py28
-rw-r--r--taskcluster/gecko_taskgraph/util/chunking.py2
-rw-r--r--taskcluster/gecko_taskgraph/util/hg.py2
-rw-r--r--taskcluster/gecko_taskgraph/util/perfile.py13
37 files changed, 715 insertions, 378 deletions
diff --git a/taskcluster/gecko_taskgraph/__init__.py b/taskcluster/gecko_taskgraph/__init__.py
index f1de1e9120..c169eae023 100644
--- a/taskcluster/gecko_taskgraph/__init__.py
+++ b/taskcluster/gecko_taskgraph/__init__.py
@@ -51,12 +51,23 @@ def register(graph_config):
Args:
graph_config: The graph configuration object.
"""
+ import android_taskgraph
from taskgraph import generator
+ # TODO: Remove along with
+ # `gecko_taskgraph.optimize.strategies.SkipUnlessChanged`
+ # (see comment over there)
+ from taskgraph.optimize.base import registry
+
+ del registry["skip-unless-changed"]
+
from gecko_taskgraph import ( # noqa: trigger target task method registration
morph, # noqa: trigger morph registration
target_tasks,
)
+
+ android_taskgraph.register(graph_config)
+
from gecko_taskgraph.parameters import register_parameters
from gecko_taskgraph.util import dependencies # noqa: trigger group_by registration
from gecko_taskgraph.util.verify import verifications
diff --git a/taskcluster/gecko_taskgraph/config.py b/taskcluster/gecko_taskgraph/config.py
index 5045963b46..7b6237f9fb 100644
--- a/taskcluster/gecko_taskgraph/config.py
+++ b/taskcluster/gecko_taskgraph/config.py
@@ -105,13 +105,8 @@ graph_config_schema = Schema(
}
},
},
- Required("mac-notarization"): {
- Required("mac-entitlements"): optionally_keyed_by(
- "platform", "release-level", str
- ),
- Required("mac-requirements"): optionally_keyed_by("platform", str),
- },
Required("mac-signing"): {
+ Required("mac-requirements"): optionally_keyed_by("platform", str),
Required("hardened-sign-config"): optionally_keyed_by(
"hardened-signing-type",
[
@@ -128,7 +123,7 @@ graph_config_schema = Schema(
Required("globs"): [str],
}
],
- )
+ ),
},
Required("taskgraph"): {
Optional(
diff --git a/taskcluster/gecko_taskgraph/decision.py b/taskcluster/gecko_taskgraph/decision.py
index e0bc9e3ca8..9fd8a5b5c1 100644
--- a/taskcluster/gecko_taskgraph/decision.py
+++ b/taskcluster/gecko_taskgraph/decision.py
@@ -32,6 +32,7 @@ from taskgraph.util.yaml import load_yaml
from . import GECKO
from .actions import render_actions_json
+from .files_changed import get_changed_files
from .parameters import get_app_version, get_version
from .try_option_syntax import parse_message
from .util.backstop import BACKSTOP_INDEX, is_backstop
@@ -308,6 +309,9 @@ def get_decision_parameters(graph_config, options):
parameters["hg_branch"] = get_hg_revision_branch(
GECKO, revision=parameters["head_rev"]
)
+ parameters["files_changed"] = sorted(
+ get_changed_files(parameters["head_repository"], parameters["head_rev"])
+ )
parameters["next_version"] = None
parameters["optimize_strategies"] = None
parameters["optimize_target_tasks"] = True
@@ -448,14 +452,17 @@ def set_try_config(parameters, task_config_file):
def set_decision_indexes(decision_task_id, params, graph_config):
index_paths = []
if params["backstop"]:
- index_paths.append(BACKSTOP_INDEX)
+ # When two Decision tasks run at nearly the same time, it's possible
+ # they both end up being backstops if the second checks the backstop
+ # index before the first inserts it. Insert this index first to reduce
+ # the chances of that happening.
+ index_paths.insert(0, BACKSTOP_INDEX)
subs = params.copy()
subs["trust-domain"] = graph_config["trust-domain"]
- index_paths = [i.format(**subs) for i in index_paths]
for index_path in index_paths:
- insert_index(index_path, decision_task_id, use_proxy=True)
+ insert_index(index_path.format(**subs), decision_task_id, use_proxy=True)
def write_artifact(filename, data):
diff --git a/taskcluster/gecko_taskgraph/main.py b/taskcluster/gecko_taskgraph/main.py
index e9a353f246..e261f26c80 100644
--- a/taskcluster/gecko_taskgraph/main.py
+++ b/taskcluster/gecko_taskgraph/main.py
@@ -21,6 +21,9 @@ from typing import Any, List
import appdirs
import yaml
+from gecko_taskgraph import GECKO
+from gecko_taskgraph.files_changed import get_locally_changed_files
+
Command = namedtuple("Command", ["func", "args", "kwargs", "defaults"])
commands = {}
@@ -130,7 +133,7 @@ def get_taskgraph_generator(root, parameters):
return TaskGraphGenerator(root_dir=root, parameters=parameters)
-def format_taskgraph(options, parameters, logfile=None):
+def format_taskgraph(options, parameters, overrides, logfile=None):
import taskgraph
from taskgraph.parameters import parameters_loader
@@ -148,7 +151,7 @@ def format_taskgraph(options, parameters, logfile=None):
if isinstance(parameters, str):
parameters = parameters_loader(
parameters,
- overrides={"target-kinds": options.get("target_kinds")},
+ overrides=overrides,
strict=False,
)
@@ -182,7 +185,7 @@ def dump_output(out, path=None, params_spec=None):
print(out + "\n", file=fh)
-def generate_taskgraph(options, parameters, logdir):
+def generate_taskgraph(options, parameters, overrides, logdir):
from taskgraph.parameters import Parameters
def logfile(spec):
@@ -198,14 +201,16 @@ def generate_taskgraph(options, parameters, logdir):
# tracebacks a little more readable and avoids additional process overhead.
if len(parameters) == 1:
spec = parameters[0]
- out = format_taskgraph(options, spec, logfile(spec))
+ out = format_taskgraph(options, spec, overrides, logfile(spec))
dump_output(out, options["output_file"])
return
futures = {}
with ProcessPoolExecutor(max_workers=options["max_workers"]) as executor:
for spec in parameters:
- f = executor.submit(format_taskgraph, options, spec, logfile(spec))
+ f = executor.submit(
+ format_taskgraph, options, spec, overrides, logfile(spec)
+ )
futures[f] = spec
for future in as_completed(futures):
@@ -299,6 +304,15 @@ def generate_taskgraph(options, parameters, logdir):
"specified).",
)
@argument(
+ "--force-local-files-changed",
+ default=False,
+ action="store_true",
+ help="Compute the 'files-changed' parameter from local version control, "
+ "even when explicitly using a parameter set that already has it defined. "
+ "Note that this is already the default behaviour when no parameters are "
+ "specified.",
+)
+@argument(
"--no-optimize",
dest="optimize",
action="store_false",
@@ -400,15 +414,21 @@ def show_taskgraph(options):
)
print(f"Generating {options['graph_attr']} @ {cur_ref}", file=sys.stderr)
+ overrides = {
+ "target-kinds": options.get("target_kinds"),
+ }
parameters: List[Any[str, Parameters]] = options.pop("parameters")
if not parameters:
- overrides = {
- "target-kinds": options.get("target_kinds"),
- }
parameters = [
parameters_loader(None, strict=False, overrides=overrides)
] # will use default values
+ # This is the default behaviour anyway, so no need to re-compute.
+ options["force_local_files_changed"] = False
+
+ elif options["force_local_files_changed"]:
+ overrides["files-changed"] = sorted(get_locally_changed_files(GECKO))
+
for param in parameters[:]:
if isinstance(param, str) and os.path.isdir(param):
parameters.remove(param)
@@ -434,7 +454,7 @@ def show_taskgraph(options):
# to setup its `mach` based logging.
setup_logging()
- generate_taskgraph(options, parameters, logdir)
+ generate_taskgraph(options, parameters, overrides, logdir)
if options["diff"]:
assert diffdir is not None
@@ -464,7 +484,7 @@ def show_taskgraph(options):
diffdir, f"{options['graph_attr']}_{base_ref}"
)
print(f"Generating {options['graph_attr']} @ {base_ref}", file=sys.stderr)
- generate_taskgraph(options, parameters, logdir)
+ generate_taskgraph(options, parameters, overrides, logdir)
finally:
repo.update(cur_ref)
diff --git a/taskcluster/gecko_taskgraph/manifests/firefox_candidates.yml b/taskcluster/gecko_taskgraph/manifests/firefox_candidates.yml
index fb58439509..af0ea79aa4 100644
--- a/taskcluster/gecko_taskgraph/manifests/firefox_candidates.yml
+++ b/taskcluster/gecko_taskgraph/manifests/firefox_candidates.yml
@@ -391,6 +391,8 @@ mapping:
- win32-shippable
- win64-devedition
- win32-devedition
+ - win64-aarch64-shippable
+ - win64-aarch64-devedition
locale_prefix: 'multi/'
pretty_name: Firefox Setup ${version}.msix
checksums_path: ${path_platform}/multi/Firefox Setup ${version}.msix
diff --git a/taskcluster/gecko_taskgraph/manifests/firefox_nightly.yml b/taskcluster/gecko_taskgraph/manifests/firefox_nightly.yml
index d413ede3bd..421caba08f 100644
--- a/taskcluster/gecko_taskgraph/manifests/firefox_nightly.yml
+++ b/taskcluster/gecko_taskgraph/manifests/firefox_nightly.yml
@@ -465,6 +465,7 @@ mapping:
- repackage-signing-shippable-l10n-msix
only_for_platforms:
- win64-shippable
+ - win64-aarch64-shippable
- win32-shippable
pretty_name: firefox-${version}.multi.${filename_platform}.installer.msix
checksums_path: firefox-${version}.multi.${filename_platform}.installer.msix
diff --git a/taskcluster/gecko_taskgraph/optimize/strategies.py b/taskcluster/gecko_taskgraph/optimize/strategies.py
index 4d0d23a5ac..ffc395385b 100644
--- a/taskcluster/gecko_taskgraph/optimize/strategies.py
+++ b/taskcluster/gecko_taskgraph/optimize/strategies.py
@@ -9,8 +9,7 @@ import mozpack.path as mozpath
from mozbuild.base import MozbuildObject
from mozbuild.util import memoize
from taskgraph.optimize.base import OptimizationStrategy, register_strategy
-
-from gecko_taskgraph import files_changed
+from taskgraph.util.path import match as match_path
logger = logging.getLogger(__name__)
@@ -18,9 +17,7 @@ logger = logging.getLogger(__name__)
@register_strategy("skip-unless-schedules")
class SkipUnlessSchedules(OptimizationStrategy):
@memoize
- def scheduled_by_push(self, repository, revision):
- changed_files = files_changed.get_changed_files(repository, revision)
-
+ def scheduled_by_push(self, files_changed):
mbo = MozbuildObject.from_environment()
# the decision task has a sparse checkout, so, mozbuild_reader will use
# a MercurialRevisionFinder with revision '.', which should be the same
@@ -28,7 +25,7 @@ class SkipUnlessSchedules(OptimizationStrategy):
rdr = mbo.mozbuild_reader(config_mode="empty")
components = set()
- for p, m in rdr.files_info(changed_files).items():
+ for p, m in rdr.files_info(files_changed).items():
components |= set(m["SCHEDULES"].components)
return components
@@ -37,9 +34,7 @@ class SkipUnlessSchedules(OptimizationStrategy):
if params.get("pushlog_id") == -1:
return False
- scheduled = self.scheduled_by_push(
- params["head_repository"], params["head_rev"]
- )
+ scheduled = self.scheduled_by_push(frozenset(params["files_changed"]))
conditions = set(conditions)
# if *any* of the condition components are scheduled, do not optimize
if conditions & scheduled:
@@ -55,8 +50,8 @@ class SkipUnlessHasRelevantTests(OptimizationStrategy):
"""
@memoize
- def get_changed_dirs(self, repo, rev):
- changed = map(mozpath.dirname, files_changed.get_changed_files(repo, rev))
+ def get_changed_dirs(self, files_changed):
+ changed = map(mozpath.dirname, files_changed)
# Filter out empty directories (from files modified in the root).
# Otherwise all tasks would be scheduled.
return {d for d in changed if d}
@@ -65,7 +60,7 @@ class SkipUnlessHasRelevantTests(OptimizationStrategy):
if not task.attributes.get("test_manifests"):
return True
- for d in self.get_changed_dirs(params["head_repository"], params["head_rev"]):
+ for d in self.get_changed_dirs(frozenset(params["files_changed"])):
for t in task.attributes["test_manifests"]:
if t.startswith(d):
logger.debug(
@@ -75,3 +70,33 @@ class SkipUnlessHasRelevantTests(OptimizationStrategy):
)
return False
return True
+
+
+# TODO: This overwrites upstream Taskgraph's `skip-unless-changed`
+# optimization. Once the firefox-android migration is landed and we upgrade
+# upstream Taskgraph to a version that doesn't call files_changed.check`, this
+# class can be deleted. Also remove the `taskgraph.optimize.base.registry` tweak
+# in `gecko_taskgraph.register` at the same time.
+@register_strategy("skip-unless-changed")
+class SkipUnlessChanged(OptimizationStrategy):
+ def check(self, files_changed, patterns):
+ for pattern in patterns:
+ for path in files_changed:
+ if match_path(path, pattern):
+ return True
+ return False
+
+ def should_remove_task(self, task, params, file_patterns):
+ # pushlog_id == -1 - this is the case when run from a cron.yml job or on a git repository
+ if params.get("repository_type") == "hg" and params.get("pushlog_id") == -1:
+ return False
+
+ changed = self.check(params["files_changed"], file_patterns)
+ if not changed:
+ logger.debug(
+ 'no files found matching a pattern in `skip-unless-changed` for "{}"'.format(
+ task.label
+ )
+ )
+ return True
+ return False
diff --git a/taskcluster/gecko_taskgraph/parameters.py b/taskcluster/gecko_taskgraph/parameters.py
index 7e3de1372f..c9cca8acf1 100644
--- a/taskcluster/gecko_taskgraph/parameters.py
+++ b/taskcluster/gecko_taskgraph/parameters.py
@@ -9,6 +9,7 @@ from taskgraph.parameters import extend_parameters_schema
from voluptuous import Any, Optional, Required
from gecko_taskgraph import GECKO
+from gecko_taskgraph.files_changed import get_locally_changed_files
logger = logging.getLogger(__name__)
@@ -18,6 +19,7 @@ gecko_parameters_schema = {
Required("backstop"): bool,
Required("build_number"): int,
Required("enable_always_target"): Any(bool, [str]),
+ Required("files_changed"): [str],
Required("hg_branch"): str,
Required("message"): str,
Required("next_version"): Any(None, str),
@@ -107,6 +109,7 @@ def get_defaults(repo_root=None):
"base_repository": "https://hg.mozilla.org/mozilla-unified",
"build_number": 1,
"enable_always_target": ["docker-image"],
+ "files_changed": sorted(get_locally_changed_files(repo_root)),
"head_repository": "https://hg.mozilla.org/mozilla-central",
"hg_branch": "default",
"message": "",
diff --git a/taskcluster/gecko_taskgraph/target_tasks.py b/taskcluster/gecko_taskgraph/target_tasks.py
index fcbfab4e17..e004cfb3e2 100644
--- a/taskcluster/gecko_taskgraph/target_tasks.py
+++ b/taskcluster/gecko_taskgraph/target_tasks.py
@@ -37,7 +37,7 @@ UNCOMMON_TRY_TASK_LABELS = [
r"android-geckoview-docs",
r"android-hw",
# Windows tasks
- r"windows10-64-ref-hw",
+ r"windows11-64-2009-hw-ref",
r"windows10-aarch64-qr",
# Linux tasks
r"linux-", # hide all linux32 tasks by default - bug 1599197
@@ -52,6 +52,9 @@ UNCOMMON_TRY_TASK_LABELS = [
# Hide shippable versions of tests we have opt versions of because the non-shippable
# versions are faster to run. This is mostly perf tests.
r"-shippable(?!.*(awsy|browsertime|marionette-headless|mochitest-devtools-chrome-fis|raptor|talos|web-platform-tests-wdspec-headless|mochitest-plain-headless))", # noqa - too long
+ r"nightly-simulation",
+ # Can't actually run on try
+ r"notarization",
]
@@ -622,12 +625,6 @@ def target_tasks_promote_desktop(full_task_graph, parameters, graph_config):
mozilla_{beta,release} tasks, plus l10n, beetmover, balrog, etc."""
def filter(task):
- # Bug 1758507 - geckoview ships in the promote phase
- if not parameters["release_type"].startswith("esr") and is_geckoview(
- task, parameters
- ):
- return True
-
if task.attributes.get("shipping_product") != parameters["release_product"]:
return False
@@ -645,14 +642,6 @@ def target_tasks_promote_desktop(full_task_graph, parameters, graph_config):
return [l for l, t in full_task_graph.tasks.items() if filter(t)]
-def is_geckoview(task, parameters):
- return (
- task.attributes.get("shipping_product") == "fennec"
- and task.kind in ("beetmover-geckoview", "upload-symbols")
- and parameters["release_product"] == "firefox"
- )
-
-
@_target_task("push_desktop")
def target_tasks_push_desktop(full_task_graph, parameters, graph_config):
"""Select the set of tasks required to push a build of desktop to cdns.
@@ -771,35 +760,6 @@ def target_tasks_kaios(full_task_graph, parameters, graph_config):
return [l for l, t in full_task_graph.tasks.items() if filter(t)]
-@_target_task("ship_geckoview")
-def target_tasks_ship_geckoview(full_task_graph, parameters, graph_config):
- """Select the set of tasks required to ship geckoview nightly. The
- nightly build process involves a pipeline of builds and an upload to
- maven.mozilla.org."""
- index_path = (
- f"{graph_config['trust-domain']}.v2.{parameters['project']}.revision."
- f"{parameters['head_rev']}.taskgraph.decision-ship-geckoview"
- )
- if os.environ.get("MOZ_AUTOMATION") and retry(
- index_exists,
- args=(index_path,),
- kwargs={
- "reason": "to avoid triggering multiple nightlies off the same revision",
- },
- ):
- return []
-
- def filter(task):
- # XXX Starting 69, we don't ship Fennec Nightly anymore. We just want geckoview to be
- # uploaded
- return task.attributes.get("shipping_product") == "fennec" and task.kind in (
- "beetmover-geckoview",
- "upload-symbols",
- )
-
- return [l for l, t in full_task_graph.tasks.items() if filter(t)]
-
-
@_target_task("custom-car_perf_testing")
def target_tasks_custom_car_perf_testing(full_task_graph, parameters, graph_config):
"""Select tasks required for running daily performance tests for custom chromium-as-release."""
@@ -844,6 +804,9 @@ def target_tasks_general_perf_testing(full_task_graph, parameters, graph_config)
if "tp6-bench" in try_name:
return False
+ if "tp7" in try_name:
+ return False
+
# Bug 1867669 - Temporarily disable all live site tests
if "live" in try_name and "sheriffed" not in try_name:
return False
@@ -856,11 +819,7 @@ def target_tasks_general_perf_testing(full_task_graph, parameters, graph_config)
if "tp6" in try_name and "essential" not in try_name:
return False
return True
- if "chromium" in try_name:
- if "tp6" in try_name and "essential" not in try_name:
- return False
- return True
- # chromium-as-release has it's own cron
+ # chromium-as-release has its own cron
if "custom-car" in try_name:
return False
if "-live" in try_name:
@@ -920,8 +879,13 @@ def target_tasks_general_perf_testing(full_task_graph, parameters, graph_config)
# Don't run android CaR sp tests as we already have a cron for this.
if "m-car" in try_name:
return False
+ if "fenix" in try_name:
+ return False
if "speedometer" in try_name:
return True
+ if "motionmark" in try_name and "1-3" in try_name:
+ if "chrome-m" in try_name:
+ return True
return False
return [l for l, t in full_task_graph.tasks.items() if filter(t)]
@@ -976,7 +940,7 @@ def target_tasks_nightly_linux(full_task_graph, parameters, graph_config):
nightly build process involves a pipeline of builds, signing,
and, eventually, uploading the tasks to balrog."""
filter = make_desktop_nightly_filter(
- {"linux64-shippable", "linux-shippable", "linux-aarch64-shippable"}
+ {"linux64-shippable", "linux-shippable", "linux64-aarch64-shippable"}
)
return [l for l, t in full_task_graph.tasks.items() if filter(t, parameters)]
@@ -1078,6 +1042,28 @@ def target_tasks_nightly_desktop(full_task_graph, parameters, graph_config):
)
+@_target_task("nightly_all")
+def target_tasks_nightly_all(full_task_graph, parameters, graph_config):
+ """Select the set of tasks required for a nightly build of firefox desktop and android"""
+ index_path = (
+ f"{graph_config['trust-domain']}.v2.{parameters['project']}.revision."
+ f"{parameters['head_rev']}.taskgraph.decision-nightly-all"
+ )
+ if os.environ.get("MOZ_AUTOMATION") and retry(
+ index_exists,
+ args=(index_path,),
+ kwargs={
+ "reason": "to avoid triggering multiple nightlies off the same revision",
+ },
+ ):
+ return []
+
+ return list(
+ set(target_tasks_nightly_desktop(full_task_graph, parameters, graph_config))
+ | set(target_tasks_nightly_android(full_task_graph, parameters, graph_config))
+ )
+
+
# Run Searchfox analysis once daily.
@_target_task("searchfox_index")
def target_tasks_searchfox(full_task_graph, parameters, graph_config):
@@ -1456,7 +1442,6 @@ def target_tasks_raptor_tp6m(full_task_graph, parameters, graph_config):
"browsertime" in try_name
and "amazon" in try_name
and "search" not in try_name
- and "fenix" in try_name
):
return True
@@ -1592,16 +1577,6 @@ def target_tasks_l10n_cross_channel(full_task_graph, parameters, graph_config):
return [l for l, t in full_task_graph.tasks.items() if filter(t)]
-@_target_task("are-we-esmified-yet")
-def target_tasks_are_we_esmified_yet(full_task_graph, parameters, graph_config):
- """
- select the task to track the progress of the esmification project
- """
- return [
- l for l, t in full_task_graph.tasks.items() if t.kind == "are-we-esmified-yet"
- ]
-
-
@_target_task("eslint-build")
def target_tasks_eslint_build(full_task_graph, parameters, graph_config):
"""Select the task to run additional ESLint rules which require a build."""
@@ -1632,3 +1607,49 @@ def target_tasks_snap_upstream_tests(full_task_graph, parameters, graph_config):
for name, task in full_task_graph.tasks.items():
if "snap-upstream-test" in name and not "-try" in name:
yield name
+
+
+@_target_task("nightly-android")
+def target_tasks_nightly_android(full_task_graph, parameters, graph_config):
+ def filter(task, parameters):
+ # geckoview
+ if task.attributes.get("shipping_product") == "fennec" and task.kind in (
+ "beetmover-geckoview",
+ "upload-symbols",
+ ):
+ return True
+
+ # fenix/focus/a-c
+ build_type = task.attributes.get("build-type", "")
+ return build_type in (
+ "nightly",
+ "focus-nightly",
+ "fenix-nightly",
+ "fenix-nightly-firebase",
+ "focus-nightly-firebase",
+ )
+
+ index_path = (
+ f"{graph_config['trust-domain']}.v2.{parameters['project']}.branch."
+ f"{parameters['head_ref']}.revision.{parameters['head_rev']}.taskgraph.decision-nightly-android"
+ )
+ if os.environ.get("MOZ_AUTOMATION") and retry(
+ index_exists,
+ args=(index_path,),
+ kwargs={
+ "reason": "to avoid triggering multiple nightlies off the same revision",
+ },
+ ):
+ return []
+
+ return [l for l, t in full_task_graph.tasks.items() if filter(t, parameters)]
+
+
+@_target_task("android-l10n-import")
+def target_tasks_android_l10n_import(full_task_graph, parameters, graph_config):
+ return [l for l, t in full_task_graph.tasks.items() if l == "android-l10n-import"]
+
+
+@_target_task("android-l10n-sync")
+def target_tasks_android_l10n_sync(full_task_graph, parameters, graph_config):
+ return [l for l, t in full_task_graph.tasks.items() if l == "android-l10n-sync"]
diff --git a/taskcluster/gecko_taskgraph/test/conftest.py b/taskcluster/gecko_taskgraph/test/conftest.py
index 360c2da65e..ff3d6ce2bd 100644
--- a/taskcluster/gecko_taskgraph/test/conftest.py
+++ b/taskcluster/gecko_taskgraph/test/conftest.py
@@ -151,6 +151,20 @@ class FakeOptimization(OptimizationStrategy):
return False
+class FakeTransformConfig:
+ kind = "fake-kind"
+ path = "/root/ci/fake-kind"
+ config = {}
+ params = FakeParameters()
+ kind_dependencies_tasks = {}
+ graph_config = {}
+ write_artifacts = False
+
+ def __init__(self, **kwargs):
+ for k, v in kwargs.items():
+ setattr(self, k, v)
+
+
@pytest.fixture
def maketgg(monkeypatch):
def inner(target_tasks=None, kinds=[("_fake", [])], params=None):
@@ -195,12 +209,16 @@ def maketgg(monkeypatch):
@pytest.fixture
def run_transform():
graph_config = fake_load_graph_config("/root")
- kind = FakeKind.create("fake", {}, graph_config)
+ config = FakeTransformConfig(graph_config=graph_config)
+
+ def inner(xform, tasks, **extra_config):
+ if extra_config:
+ for k, v in extra_config.items():
+ setattr(config, k, v)
- def inner(xform, tasks):
if isinstance(tasks, dict):
tasks = [tasks]
- return xform(kind.config, tasks)
+ return xform(config, tasks)
return inner
diff --git a/taskcluster/gecko_taskgraph/test/python.toml b/taskcluster/gecko_taskgraph/test/python.toml
index 597a02d8aa..fd71a9c2bd 100644
--- a/taskcluster/gecko_taskgraph/test/python.toml
+++ b/taskcluster/gecko_taskgraph/test/python.toml
@@ -17,6 +17,8 @@ subsuite = "taskgraph"
["test_taskcluster_yml.py"]
+["test_transforms_build_schedules.py"]
+
["test_transforms_job.py"]
["test_transforms_test.py"]
diff --git a/taskcluster/gecko_taskgraph/test/test_decision.py b/taskcluster/gecko_taskgraph/test/test_decision.py
index 8440b8e13f..53186b70fb 100644
--- a/taskcluster/gecko_taskgraph/test/test_decision.py
+++ b/taskcluster/gecko_taskgraph/test/test_decision.py
@@ -7,7 +7,6 @@ import json
import os
import shutil
import tempfile
-import unittest
from unittest.mock import patch
import pytest
@@ -18,6 +17,7 @@ from gecko_taskgraph import decision
from gecko_taskgraph.parameters import register_parameters
FAKE_GRAPH_CONFIG = {"product-dir": "browser", "taskgraph": {}}
+TTC_FILE = os.path.join(os.getcwd(), "try_task_config.json")
@pytest.fixture(scope="module", autouse=True)
@@ -25,150 +25,172 @@ def register():
register_parameters()
-class TestDecision(unittest.TestCase):
- def test_write_artifact_json(self):
- data = [{"some": "data"}]
- tmpdir = tempfile.mkdtemp()
- try:
- decision.ARTIFACTS_DIR = os.path.join(tmpdir, "artifacts")
- decision.write_artifact("artifact.json", data)
- with open(os.path.join(decision.ARTIFACTS_DIR, "artifact.json")) as f:
- self.assertEqual(json.load(f), data)
- finally:
- if os.path.exists(tmpdir):
- shutil.rmtree(tmpdir)
- decision.ARTIFACTS_DIR = "artifacts"
-
- def test_write_artifact_yml(self):
- data = [{"some": "data"}]
- tmpdir = tempfile.mkdtemp()
- try:
- decision.ARTIFACTS_DIR = os.path.join(tmpdir, "artifacts")
- decision.write_artifact("artifact.yml", data)
- self.assertEqual(load_yaml(decision.ARTIFACTS_DIR, "artifact.yml"), data)
- finally:
- if os.path.exists(tmpdir):
- shutil.rmtree(tmpdir)
- decision.ARTIFACTS_DIR = "artifacts"
-
-
-class TestGetDecisionParameters(unittest.TestCase):
- ttc_file = os.path.join(os.getcwd(), "try_task_config.json")
-
- def setUp(self):
- self.options = {
- "base_repository": "https://hg.mozilla.org/mozilla-unified",
- "head_repository": "https://hg.mozilla.org/mozilla-central",
- "head_rev": "abcd",
- "head_ref": "ef01",
- "head_tag": "",
- "message": "",
- "project": "mozilla-central",
- "pushlog_id": "143",
- "pushdate": 1503691511,
- "owner": "nobody@mozilla.com",
- "repository_type": "hg",
- "tasks_for": "hg-push",
- "level": "3",
- }
-
- @patch("gecko_taskgraph.decision.get_hg_revision_branch")
- @patch("gecko_taskgraph.decision._determine_more_accurate_base_rev")
- def test_simple_options(
- self, mock_determine_more_accurate_base_rev, mock_get_hg_revision_branch
- ):
- mock_get_hg_revision_branch.return_value = "default"
- mock_determine_more_accurate_base_rev.return_value = "baserev"
- with MockedOpen({self.ttc_file: None}):
- params = decision.get_decision_parameters(FAKE_GRAPH_CONFIG, self.options)
- self.assertEqual(params["pushlog_id"], "143")
- self.assertEqual(params["build_date"], 1503691511)
- self.assertEqual(params["hg_branch"], "default")
- self.assertEqual(params["moz_build_date"], "20170825200511")
- self.assertEqual(params["try_mode"], None)
- self.assertEqual(params["try_options"], None)
- self.assertEqual(params["try_task_config"], {})
-
- @patch("gecko_taskgraph.decision.get_hg_revision_branch")
- @patch("gecko_taskgraph.decision._determine_more_accurate_base_rev")
- def test_no_email_owner(
- self, mock_determine_more_accurate_base_rev, mock_get_hg_revision_branch
- ):
- mock_get_hg_revision_branch.return_value = "default"
- mock_determine_more_accurate_base_rev.return_value = "baserev"
- self.options["owner"] = "ffxbld"
- with MockedOpen({self.ttc_file: None}):
- params = decision.get_decision_parameters(FAKE_GRAPH_CONFIG, self.options)
- self.assertEqual(params["owner"], "ffxbld@noreply.mozilla.org")
-
- @patch("gecko_taskgraph.decision.get_hg_revision_branch")
- @patch("gecko_taskgraph.decision.get_hg_commit_message")
- @patch("gecko_taskgraph.decision._determine_more_accurate_base_rev")
- def test_try_options(
- self,
- mock_determine_more_accurate_base_rev,
- mock_get_hg_commit_message,
- mock_get_hg_revision_branch,
- ):
- mock_get_hg_commit_message.return_value = "try: -b do -t all --artifact"
- mock_get_hg_revision_branch.return_value = "default"
- mock_determine_more_accurate_base_rev.return_value = "baserev"
- self.options["project"] = "try"
- with MockedOpen({self.ttc_file: None}):
- params = decision.get_decision_parameters(FAKE_GRAPH_CONFIG, self.options)
- self.assertEqual(params["try_mode"], "try_option_syntax")
- self.assertEqual(params["try_options"]["build_types"], "do")
- self.assertEqual(params["try_options"]["unittests"], "all")
- self.assertEqual(
- params["try_task_config"],
+@pytest.fixture(scope="module")
+def options():
+ return {
+ "base_repository": "https://hg.mozilla.org/mozilla-unified",
+ "head_repository": "https://hg.mozilla.org/mozilla-central",
+ "head_rev": "abcd",
+ "head_ref": "ef01",
+ "head_tag": "",
+ "message": "",
+ "project": "mozilla-central",
+ "pushlog_id": "143",
+ "pushdate": 1503691511,
+ "owner": "nobody@mozilla.com",
+ "repository_type": "hg",
+ "tasks_for": "hg-push",
+ "level": "3",
+ }
+
+
+def test_write_artifact_json():
+ data = [{"some": "data"}]
+ tmpdir = tempfile.mkdtemp()
+ try:
+ decision.ARTIFACTS_DIR = os.path.join(tmpdir, "artifacts")
+ decision.write_artifact("artifact.json", data)
+ with open(os.path.join(decision.ARTIFACTS_DIR, "artifact.json")) as f:
+ assert json.load(f) == data
+ finally:
+ if os.path.exists(tmpdir):
+ shutil.rmtree(tmpdir)
+ decision.ARTIFACTS_DIR = "artifacts"
+
+
+def test_write_artifact_yml():
+ data = [{"some": "data"}]
+ tmpdir = tempfile.mkdtemp()
+ try:
+ decision.ARTIFACTS_DIR = os.path.join(tmpdir, "artifacts")
+ decision.write_artifact("artifact.yml", data)
+ assert load_yaml(decision.ARTIFACTS_DIR, "artifact.yml") == data
+ finally:
+ if os.path.exists(tmpdir):
+ shutil.rmtree(tmpdir)
+ decision.ARTIFACTS_DIR = "artifacts"
+
+
+@patch("gecko_taskgraph.decision.get_hg_revision_branch")
+@patch("gecko_taskgraph.decision.get_hg_commit_message")
+@patch("gecko_taskgraph.decision._determine_more_accurate_base_rev")
+@patch("gecko_taskgraph.decision.get_changed_files")
+@pytest.mark.parametrize(
+ "extra_options,commit_msg,ttc,expected",
+ (
+ pytest.param(
+ {},
+ None,
+ None,
{
- "gecko-profile": False,
- "use-artifact-builds": True,
- "env": {},
+ "pushlog_id": "143",
+ "build_date": 1503691511,
+ "files_changed": ["bar/baz.md", "foo.txt"],
+ "hg_branch": "default",
+ "moz_build_date": "20170825200511",
+ "try_mode": None,
+ "try_options": None,
+ "try_task_config": {},
},
- )
-
- @patch("gecko_taskgraph.decision.get_hg_revision_branch")
- @patch("gecko_taskgraph.decision.get_hg_commit_message")
- @patch("gecko_taskgraph.decision._determine_more_accurate_base_rev")
- def test_try_task_config(
- self,
- mock_get_hg_commit_message,
- mock_get_hg_revision_branch,
- mock_determine_more_accurate_base_rev,
- ):
- mock_get_hg_commit_message.return_value = "Fuzzy query=foo"
- mock_get_hg_revision_branch.return_value = "default"
- mock_determine_more_accurate_base_rev.return_value = "baserev"
- ttc = {"tasks": ["a", "b"]}
- self.options["project"] = "try"
- with MockedOpen({self.ttc_file: json.dumps(ttc)}):
- params = decision.get_decision_parameters(FAKE_GRAPH_CONFIG, self.options)
- self.assertEqual(params["try_mode"], "try_task_config")
- self.assertEqual(params["try_options"], None)
- self.assertEqual(params["try_task_config"], ttc)
-
- def test_try_syntax_from_message_empty(self):
- self.assertEqual(decision.try_syntax_from_message(""), "")
-
- def test_try_syntax_from_message_no_try_syntax(self):
- self.assertEqual(decision.try_syntax_from_message("abc | def"), "")
-
- def test_try_syntax_from_message_initial_try_syntax(self):
- self.assertEqual(
- decision.try_syntax_from_message("try: -f -o -o"), "try: -f -o -o"
- )
-
- def test_try_syntax_from_message_initial_try_syntax_multiline(self):
- self.assertEqual(
- decision.try_syntax_from_message("try: -f -o -o\nabc\ndef"), "try: -f -o -o"
- )
-
- def test_try_syntax_from_message_embedded_try_syntax_multiline(self):
- self.assertEqual(
- decision.try_syntax_from_message("some stuff\ntry: -f -o -o\nabc\ndef"),
+ id="simple_options",
+ ),
+ pytest.param(
+ {"owner": "ffxbld"},
+ None,
+ None,
+ {
+ "owner": "ffxbld@noreply.mozilla.org",
+ },
+ id="no_email_owner",
+ ),
+ pytest.param(
+ {"project": "try"},
+ "try: -b do -t all --artifact",
+ None,
+ {
+ "try_mode": "try_option_syntax",
+ "try_options": {
+ "build_types": "do",
+ "include_nightly": False,
+ "interactive": False,
+ "jobs": None,
+ "no_retry": False,
+ "notifications": None,
+ "platforms": "all",
+ "raptor": "none",
+ "raptor_trigger_tests": 1,
+ "tag": None,
+ "talos": "all",
+ "talos_trigger_tests": 1,
+ "taskcluster_worker": False,
+ "trigger_tests": 1,
+ "unittests": "all",
+ },
+ "try_task_config": {
+ "gecko-profile": False,
+ "use-artifact-builds": True,
+ "env": {},
+ },
+ },
+ id="try_options",
+ ),
+ pytest.param(
+ {
+ "project": "try",
+ },
+ "Fuzzy query=foo",
+ {"tasks": ["a", "b"]},
+ {
+ "try_mode": "try_task_config",
+ "try_options": None,
+ "try_task_config": {"tasks": ["a", "b"]},
+ },
+ id="try_task_config",
+ ),
+ ),
+)
+def test_get_decision_parameters(
+ mock_get_changed_files,
+ mock_determine_more_accurate_base_rev,
+ mock_get_hg_commit_message,
+ mock_get_hg_revision_branch,
+ options,
+ extra_options,
+ commit_msg,
+ ttc,
+ expected,
+):
+ mock_get_hg_revision_branch.return_value = "default"
+ mock_get_hg_commit_message.return_value = commit_msg or "commit message"
+ mock_determine_more_accurate_base_rev.return_value = "baserev"
+ mock_get_changed_files.return_value = ["foo.txt", "bar/baz.md"]
+
+ options.update(extra_options)
+ contents = None
+ if ttc:
+ contents = json.dumps(ttc)
+ with MockedOpen({TTC_FILE: contents}):
+ params = decision.get_decision_parameters(FAKE_GRAPH_CONFIG, options)
+
+ for key in expected:
+ assert params[key] == expected[key], f"key {key} does not match!"
+
+
+@pytest.mark.parametrize(
+ "msg, expected",
+ (
+ pytest.param("", "", id="empty"),
+ pytest.param("abc | def", "", id="no_try_syntax"),
+ pytest.param("try: -f -o -o", "try: -f -o -o", id="initial_try_syntax"),
+ pytest.param(
+ "some stuff\ntry: -f -o -o\nabc\ndef",
"try: -f -o -o",
- )
+ id="embedded_try_syntax_multiline",
+ ),
+ ),
+)
+def test_try_syntax_from_message(msg, expected):
+ assert decision.try_syntax_from_message(msg) == expected
if __name__ == "__main__":
diff --git a/taskcluster/gecko_taskgraph/test/test_transforms_build_schedules.py b/taskcluster/gecko_taskgraph/test/test_transforms_build_schedules.py
new file mode 100644
index 0000000000..a693461c68
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/test/test_transforms_build_schedules.py
@@ -0,0 +1,56 @@
+import pytest
+from mozunit import main
+
+from gecko_taskgraph.transforms.build_schedules import set_build_schedules_optimization
+
+
+@pytest.mark.parametrize(
+ "kind,task,expected",
+ (
+ pytest.param("build", {"when": "foo"}, None, id="no-op"),
+ pytest.param(
+ "build",
+ {"attributes": {"build_platform": "linux64/opt"}},
+ {"build": ["linux", "firefox"]},
+ id="build",
+ ),
+ pytest.param(
+ "build-components",
+ {},
+ {"build": ["android", "fenix", "focus-android"]},
+ id="build-components",
+ ),
+ pytest.param(
+ "build-bundle",
+ {"name": "build-bundle-fenix"},
+ {"build": ["android", "fenix"]},
+ id="build-bundle-fenix",
+ ),
+ pytest.param(
+ "build-apk",
+ {"name": "fenix"},
+ {"build": ["android", "fenix"]},
+ id="build-apk-fenix",
+ ),
+ pytest.param(
+ "build-apk",
+ {"name": "build-apk-focus"},
+ {"build": ["android", "focus-android"]},
+ id="build-apk-focus",
+ ),
+ pytest.param(
+ "build-apk",
+ {"name": "build-apk-klar"},
+ {"build": ["android", "focus-android"]},
+ id="build-apk-klar",
+ ),
+ ),
+)
+def test_build_schedules(run_transform, kind, task, expected):
+ tasks = list(run_transform(set_build_schedules_optimization, [task], kind=kind))
+ assert len(tasks) == 1
+ assert tasks[0].get("optimization") == expected
+
+
+if __name__ == "__main__":
+ main()
diff --git a/taskcluster/gecko_taskgraph/test/test_transforms_test.py b/taskcluster/gecko_taskgraph/test/test_transforms_test.py
index 1e5067a2b5..d61eff5769 100644
--- a/taskcluster/gecko_taskgraph/test/test_transforms_test.py
+++ b/taskcluster/gecko_taskgraph/test/test_transforms_test.py
@@ -235,16 +235,16 @@ def test_split_variants(monkeypatch, run_full_config_transform, make_test_task):
pytest.param(
{
"attributes": {},
- "test-platform": "windows10-64-2004-ref-hw-2017-ccov/debug",
+ "test-platform": "windows11-64-2009-hw-ref-ccov/debug",
},
{
"platform": {
"arch": "64",
- "machine": "ref-hw-2017",
+ "machine": "hw-ref",
"os": {
- "build": "2004",
+ "build": "2009",
"name": "windows",
- "version": "10",
+ "version": "11",
},
},
"build": {
diff --git a/taskcluster/gecko_taskgraph/test/test_util_backstop.py b/taskcluster/gecko_taskgraph/test/test_util_backstop.py
index af9aabd5af..0a2bdc6ae4 100644
--- a/taskcluster/gecko_taskgraph/test/test_util_backstop.py
+++ b/taskcluster/gecko_taskgraph/test/test_util_backstop.py
@@ -18,20 +18,22 @@ from gecko_taskgraph.util.backstop import (
is_backstop,
)
-LAST_BACKSTOP_ID = 0
+LAST_BACKSTOP_PUSHID = 1
LAST_BACKSTOP_PUSHDATE = mktime(datetime.now().timetuple())
DEFAULT_RESPONSES = {
"index": {
"status": 200,
- "json": {"taskId": LAST_BACKSTOP_ID},
+ "json": {"taskId": LAST_BACKSTOP_PUSHID},
},
"artifact": {
"status": 200,
"body": dedent(
"""
pushdate: {}
+ pushlog_id: "{}"
""".format(
- LAST_BACKSTOP_PUSHDATE
+ LAST_BACKSTOP_PUSHDATE,
+ LAST_BACKSTOP_PUSHID,
)
),
},
@@ -50,7 +52,8 @@ def params():
"head_rev": "abcdef",
"project": "autoland",
"pushdate": LAST_BACKSTOP_PUSHDATE + 1,
- "pushlog_id": LAST_BACKSTOP_ID + 1,
+ "pushlog_id": f"{LAST_BACKSTOP_PUSHID + 1}",
+ "target_tasks_method": "default",
}
@@ -61,7 +64,7 @@ def params():
{
"index": {"status": 404},
},
- {"pushlog_id": 1},
+ {"pushlog_id": "1"},
True,
id="no previous backstop",
),
@@ -78,8 +81,8 @@ def params():
pytest.param(
DEFAULT_RESPONSES,
{
- "pushlog_id": LAST_BACKSTOP_ID + 1,
- "pushdate": LAST_BACKSTOP_PUSHDATE + 1,
+ "pushlog_id": f"{LAST_BACKSTOP_PUSHID + BACKSTOP_PUSH_INTERVAL - 1}",
+ "pushdate": LAST_BACKSTOP_PUSHDATE + (BACKSTOP_TIME_INTERVAL * 60) - 1,
},
False,
id="not a backstop",
@@ -87,10 +90,26 @@ def params():
pytest.param(
{},
{
- "pushlog_id": BACKSTOP_PUSH_INTERVAL,
+ "target_tasks_method": "nothing",
+ },
+ False,
+ id="dontbuild",
+ ),
+ pytest.param(
+ DEFAULT_RESPONSES,
+ {
+ "pushlog_id": f"{LAST_BACKSTOP_PUSHID + BACKSTOP_PUSH_INTERVAL}",
+ },
+ True,
+ id="interval",
+ ),
+ pytest.param(
+ DEFAULT_RESPONSES,
+ {
+ "pushlog_id": f"{LAST_BACKSTOP_PUSHID + BACKSTOP_PUSH_INTERVAL + 1}",
},
True,
- id="backstop interval",
+ id="greater than interval",
),
pytest.param(
DEFAULT_RESPONSES,
@@ -104,7 +123,7 @@ def params():
{},
{
"project": "try",
- "pushlog_id": BACKSTOP_PUSH_INTERVAL,
+ "pushlog_id": f"{BACKSTOP_PUSH_INTERVAL}",
},
False,
id="try not a backstop",
@@ -138,13 +157,12 @@ def test_is_backstop(responses, params, response_args, extra_params, expected):
**{"trust-domain": "gecko", "project": params["project"]}
)
),
- "artifact": get_artifact_url(LAST_BACKSTOP_ID, "public/parameters.yml"),
- "status": get_task_url(LAST_BACKSTOP_ID) + "/status",
+ "artifact": get_artifact_url(LAST_BACKSTOP_PUSHID, "public/parameters.yml"),
+ "status": get_task_url(LAST_BACKSTOP_PUSHID) + "/status",
}
for key in ("index", "status", "artifact"):
if key in response_args:
- print(urls[key])
responses.add(responses.GET, urls[key], **response_args[key])
params.update(extra_params)
diff --git a/taskcluster/gecko_taskgraph/transforms/build_attrs.py b/taskcluster/gecko_taskgraph/transforms/build_attrs.py
index 9cda71718a..fda9888fe1 100644
--- a/taskcluster/gecko_taskgraph/transforms/build_attrs.py
+++ b/taskcluster/gecko_taskgraph/transforms/build_attrs.py
@@ -4,8 +4,6 @@
from taskgraph.transforms.base import TransformSequence
-from gecko_taskgraph.util.platforms import platform_family
-
transforms = TransformSequence()
@@ -34,17 +32,3 @@ def set_build_attributes(config, jobs):
)
yield job
-
-
-@transforms.add
-def set_schedules_optimization(config, jobs):
- """Set the `skip-unless-affected` optimization based on the build platform."""
- for job in jobs:
- # don't add skip-unless-schedules if there's already a when defined
- if "when" in job:
- yield job
- continue
-
- build_platform = job["attributes"]["build_platform"]
- job.setdefault("optimization", {"build": [platform_family(build_platform)]})
- yield job
diff --git a/taskcluster/gecko_taskgraph/transforms/build_schedules.py b/taskcluster/gecko_taskgraph/transforms/build_schedules.py
new file mode 100644
index 0000000000..ed6262b8b2
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/build_schedules.py
@@ -0,0 +1,48 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from taskgraph.transforms.base import TransformSequence
+
+from gecko_taskgraph.util.platforms import platform_family
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def set_build_schedules_optimization(config, tasks):
+ """Set the `build` optimization based on the build platform."""
+ for task in tasks:
+ # don't add an optimization if there's already one defined
+ if "when" in task or "optimization" in task:
+ yield task
+ continue
+
+ schedules = []
+ if config.kind == "build":
+ family = platform_family(task["attributes"]["build_platform"])
+ schedules = [family]
+
+ if "android" not in family:
+ # These are not GeckoView builds, so are associated with Firefox.
+ schedules.append("firefox")
+
+ elif config.kind in (
+ "build-components",
+ "build-samples-browser",
+ "test-components",
+ ):
+ # These are Android components builds and can only impact Fenix or Focus.
+ schedules = ["android", "fenix", "focus-android"]
+
+ elif config.kind in ("build-apk", "build-bundle", "test-apk", "ui-test-apk"):
+ # These are APK builds for Fenix or Focus
+ schedules = ["android"]
+
+ if "fenix" in task["name"]:
+ schedules.append("fenix")
+ elif "focus" in task["name"] or "klar" in task["name"]:
+ schedules.append("focus-android")
+
+ task["optimization"] = {"build": schedules}
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/condprof.py b/taskcluster/gecko_taskgraph/transforms/condprof.py
index 516c1d8f20..f39a9b09ab 100644
--- a/taskcluster/gecko_taskgraph/transforms/condprof.py
+++ b/taskcluster/gecko_taskgraph/transforms/condprof.py
@@ -28,7 +28,7 @@ diff_description_schema = Schema(
Optional("run-on-projects"): task_description_schema["run-on-projects"],
Optional("scopes"): task_description_schema["scopes"],
Optional("treeherder"): task_description_schema["treeherder"],
- Optional("use-system-python"): bool,
+ Optional("use-python"): job_description_schema["use-python"],
Optional("worker"): job_description_schema["worker"],
Optional("worker-type"): task_description_schema["worker-type"],
}
@@ -84,8 +84,8 @@ def generate_scenarios(config, tasks):
"fetches": copy_task(task["fetches"]),
}
- use_system_python = task.get("use-system-python", None)
- if use_system_python is not None:
- taskdesc["use-system-python"] = use_system_python
+ use_taskcluster_python = task.get("use-python", "system")
+ if use_taskcluster_python != "system":
+ taskdesc["use-python"] = use_taskcluster_python
yield taskdesc
diff --git a/taskcluster/gecko_taskgraph/transforms/geckodriver_mac_notarization.py b/taskcluster/gecko_taskgraph/transforms/geckodriver_mac_notarization.py
index 2f0d8dd2aa..016d642b9b 100644
--- a/taskcluster/gecko_taskgraph/transforms/geckodriver_mac_notarization.py
+++ b/taskcluster/gecko_taskgraph/transforms/geckodriver_mac_notarization.py
@@ -2,7 +2,7 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
-Transform the repackage signing task into an actual task description.
+Transform the geckodriver notarization task into an actual task description.
"""
from taskgraph.transforms.base import TransformSequence
@@ -14,7 +14,7 @@ from gecko_taskgraph.transforms.task import task_description_schema
from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
from gecko_taskgraph.util.scriptworker import add_scope_prefix
-repackage_signing_description_schema = Schema(
+geckodriver_notarization_description_schema = Schema(
{
Optional("label"): str,
Optional("treeherder"): task_description_schema["treeherder"],
@@ -38,7 +38,7 @@ def remove_name(config, jobs):
yield job
-transforms.add_validate(repackage_signing_description_schema)
+transforms.add_validate(geckodriver_notarization_description_schema)
@transforms.add
diff --git a/taskcluster/gecko_taskgraph/transforms/job/__init__.py b/taskcluster/gecko_taskgraph/transforms/job/__init__.py
index b87f7e0955..54cedf513a 100644
--- a/taskcluster/gecko_taskgraph/transforms/job/__init__.py
+++ b/taskcluster/gecko_taskgraph/transforms/job/__init__.py
@@ -14,11 +14,12 @@ import json
import logging
import mozpack.path as mozpath
+from packaging.version import Version
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.python_path import import_sibling_modules
from taskgraph.util.schema import Schema, validate_schema
from taskgraph.util.taskcluster import get_artifact_prefix
-from voluptuous import Any, Exclusive, Extra, Optional, Required
+from voluptuous import Any, Coerce, Exclusive, Extra, Optional, Required
from gecko_taskgraph.transforms.cached_tasks import order_tasks
from gecko_taskgraph.transforms.task import task_description_schema
@@ -62,7 +63,7 @@ job_description_schema = Schema(
"optimization"
],
Optional("use-sccache"): task_description_schema["use-sccache"],
- Optional("use-system-python"): bool,
+ Optional("use-python"): Any("system", "default", Coerce(Version)),
Optional("priority"): task_description_schema["priority"],
# The "when" section contains descriptions of the circumstances under which
# this task should be included in the task graph. This will be converted
@@ -245,9 +246,15 @@ def get_attribute(dict, key, attributes, attribute_name):
@transforms.add
def use_system_python(config, jobs):
for job in jobs:
- if job.pop("use-system-python", True):
+ taskcluster_python = job.pop("use-python", "system")
+ if taskcluster_python == "system":
yield job
else:
+ if taskcluster_python == "default":
+ python_version = "python" # the taskcluster default alias
+ else:
+ python_version = f"python-{taskcluster_python}"
+
fetches = job.setdefault("fetches", {})
toolchain = fetches.setdefault("toolchain", [])
if "win" in job["worker"]["os"]:
@@ -259,7 +266,7 @@ def use_system_python(config, jobs):
else:
raise ValueError("unexpected worker.os value {}".format(platform))
- toolchain.append("{}-python".format(platform))
+ toolchain.append(f"{platform}-{python_version}")
worker = job.setdefault("worker", {})
env = worker.setdefault("env", {})
diff --git a/taskcluster/gecko_taskgraph/transforms/job/mach.py b/taskcluster/gecko_taskgraph/transforms/job/mach.py
index 775213f8fe..5f830ec04b 100644
--- a/taskcluster/gecko_taskgraph/transforms/job/mach.py
+++ b/taskcluster/gecko_taskgraph/transforms/job/mach.py
@@ -50,7 +50,7 @@ def configure_mach(config, job, taskdesc):
if python:
del run["python-version"]
- if taskdesc.get("use-system-python"):
+ if taskdesc.get("use-python", "system") == "system":
if worker["os"] == "macosx" and python == 3:
python = "/usr/local/bin/python3"
diff --git a/taskcluster/gecko_taskgraph/transforms/job/mozharness.py b/taskcluster/gecko_taskgraph/transforms/job/mozharness.py
index 4d7293ec51..ada5b85ea3 100644
--- a/taskcluster/gecko_taskgraph/transforms/job/mozharness.py
+++ b/taskcluster/gecko_taskgraph/transforms/job/mozharness.py
@@ -289,7 +289,7 @@ def mozharness_on_generic_worker(config, job, taskdesc):
system_python_dir = ""
gecko_path = "$GECKO_PATH"
- if run.get("use-system-python", True):
+ if run.get("use-python", "system") == "system":
python_bindir = system_python_dir
else:
# $MOZ_PYTHON_HOME is going to be substituted in run-task, when we
diff --git a/taskcluster/gecko_taskgraph/transforms/release_deps.py b/taskcluster/gecko_taskgraph/transforms/release_deps.py
index e44af576eb..aab8a2f60a 100644
--- a/taskcluster/gecko_taskgraph/transforms/release_deps.py
+++ b/taskcluster/gecko_taskgraph/transforms/release_deps.py
@@ -44,6 +44,17 @@ def add_dependencies(config, jobs):
!= job["attributes"]["build_platform"]
):
continue
+
+ # TODO get rid of the release-type match
+ if product == "firefox-android":
+ # exclude beta tasks from release graph and vice versa
+ from android_taskgraph.release_type import does_task_match_release_type
+
+ if not does_task_match_release_type(
+ dep_task, config.params["release_type"]
+ ):
+ continue
+
# Add matching product tasks to deps
if (
dep_task.task.get("shipping-product") == product
diff --git a/taskcluster/gecko_taskgraph/transforms/release_notifications.py b/taskcluster/gecko_taskgraph/transforms/release_notifications.py
index 86109ec5ed..071e5de8a3 100644
--- a/taskcluster/gecko_taskgraph/transforms/release_notifications.py
+++ b/taskcluster/gecko_taskgraph/transforms/release_notifications.py
@@ -39,6 +39,9 @@ def add_notifications(config, jobs):
resolve_keyed_by(
notifications, "emails", label, project=config.params["project"]
)
+ resolve_keyed_by(
+ notifications, "message", label, project=config.params["project"]
+ )
emails = notifications["emails"]
format_kwargs = dict(
task=job,
diff --git a/taskcluster/gecko_taskgraph/transforms/signing.py b/taskcluster/gecko_taskgraph/transforms/signing.py
index e55ad47f42..1bf91effd1 100644
--- a/taskcluster/gecko_taskgraph/transforms/signing.py
+++ b/taskcluster/gecko_taskgraph/transforms/signing.py
@@ -12,10 +12,7 @@ from taskgraph.util.schema import Schema, taskref_or_string
from voluptuous import Optional, Required
from gecko_taskgraph.transforms.task import task_description_schema
-from gecko_taskgraph.util.attributes import (
- copy_attributes_from_dependent_job,
- release_level,
-)
+from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
from gecko_taskgraph.util.scriptworker import (
add_scope_prefix,
get_signing_cert_scope_per_platform,
@@ -77,30 +74,11 @@ transforms.add_validate(signing_description_schema)
@transforms.add
-def add_entitlements_link(config, jobs):
- for job in jobs:
- dep_job = get_primary_dependency(config, job)
- entitlements_path = evaluate_keyed_by(
- config.graph_config["mac-notarization"]["mac-entitlements"],
- "mac entitlements",
- {
- "platform": dep_job.attributes.get("build_platform"),
- "release-level": release_level(config.params["project"]),
- },
- )
- if entitlements_path:
- job["entitlements-url"] = config.params.file_url(
- entitlements_path,
- )
- yield job
-
-
-@transforms.add
def add_requirements_link(config, jobs):
for job in jobs:
dep_job = get_primary_dependency(config, job)
requirements_path = evaluate_keyed_by(
- config.graph_config["mac-notarization"]["mac-requirements"],
+ config.graph_config["mac-signing"]["mac-requirements"],
"mac requirements",
{
"platform": dep_job.attributes.get("build_platform"),
diff --git a/taskcluster/gecko_taskgraph/transforms/task.py b/taskcluster/gecko_taskgraph/transforms/task.py
index 3129742ea9..4bfe0e9f6d 100644
--- a/taskcluster/gecko_taskgraph/transforms/task.py
+++ b/taskcluster/gecko_taskgraph/transforms/task.py
@@ -1349,6 +1349,23 @@ def build_push_addons_payload(config, task, task_def):
],
},
Optional("merge-info"): object,
+ Optional("android-l10n-import-info"): {
+ Required("from-repo-url"): str,
+ Required("toml-info"): [
+ {
+ Required("toml-path"): str,
+ Required("dest-path"): str,
+ }
+ ],
+ },
+ Optional("android-l10n-sync-info"): {
+ Required("from-repo-url"): str,
+ Required("toml-info"): [
+ {
+ Required("toml-path"): str,
+ }
+ ],
+ },
},
)
def build_treescript_payload(config, task, task_def):
@@ -1412,6 +1429,38 @@ def build_treescript_payload(config, task, task_def):
task_def["payload"]["merge_info"] = merge_info
actions.append("merge_day")
+ if worker.get("android-l10n-import-info"):
+ android_l10n_import_info = {}
+ for k, v in worker["android-l10n-import-info"].items():
+ android_l10n_import_info[k.replace("-", "_")] = worker[
+ "android-l10n-import-info"
+ ][k]
+ android_l10n_import_info["toml_info"] = [
+ {
+ param_name.replace("-", "_"): param_value
+ for param_name, param_value in entry.items()
+ }
+ for entry in worker["android-l10n-import-info"]["toml-info"]
+ ]
+ task_def["payload"]["android_l10n_import_info"] = android_l10n_import_info
+ actions.append("android_l10n_import")
+
+ if worker.get("android-l10n-sync-info"):
+ android_l10n_sync_info = {}
+ for k, v in worker["android-l10n-sync-info"].items():
+ android_l10n_sync_info[k.replace("-", "_")] = worker[
+ "android-l10n-sync-info"
+ ][k]
+ android_l10n_sync_info["toml_info"] = [
+ {
+ param_name.replace("-", "_"): param_value
+ for param_name, param_value in entry.items()
+ }
+ for entry in worker["android-l10n-sync-info"]["toml-info"]
+ ]
+ task_def["payload"]["android_l10n_sync_info"] = android_l10n_sync_info
+ actions.append("android_l10n_sync")
+
if worker["push"]:
actions.append("push")
diff --git a/taskcluster/gecko_taskgraph/transforms/test/__init__.py b/taskcluster/gecko_taskgraph/transforms/test/__init__.py
index 92704bf18c..19ab8d289f 100644
--- a/taskcluster/gecko_taskgraph/transforms/test/__init__.py
+++ b/taskcluster/gecko_taskgraph/transforms/test/__init__.py
@@ -27,6 +27,7 @@ from taskgraph.util.schema import Schema, optionally_keyed_by, resolve_keyed_by
from voluptuous import Any, Exclusive, Optional, Required
from gecko_taskgraph.optimize.schema import OptimizationSchema
+from gecko_taskgraph.transforms.job import job_description_schema
from gecko_taskgraph.transforms.test.other import get_mobile_project
from gecko_taskgraph.util.chunking import manifest_loaders
@@ -118,7 +119,9 @@ test_description_schema = Schema(
Required("run-without-variant"): optionally_keyed_by("test-platform", bool),
# The EC2 instance size to run these tests on.
Required("instance-size"): optionally_keyed_by(
- "test-platform", Any("default", "large", "xlarge")
+ "test-platform",
+ "variant",
+ Any("default", "large", "large-noscratch", "xlarge", "xlarge-noscratch"),
),
# type of virtualization or hardware required by test.
Required("virtualization"): optionally_keyed_by(
@@ -265,11 +268,14 @@ test_description_schema = Schema(
str,
None,
{Required("index"): str, Required("name"): str},
+ {Required("upstream-task"): str, Required("name"): str},
),
),
# A list of artifacts to install from 'fetch' tasks. Validation deferred
# to 'job' transforms.
Optional("fetches"): object,
+ # A list of extra dependencies
+ Optional("dependencies"): object,
# Raptor / browsertime specific keys, defer validation to 'raptor.py'
# transform.
Optional("raptor"): object,
@@ -279,6 +285,8 @@ test_description_schema = Schema(
Optional("subtest"): str,
# Define if a given task supports artifact builds or not, see bug 1695325.
Optional("supports-artifact-builds"): bool,
+ # Version of python used to run the task
+ Optional("use-python"): job_description_schema["use-python"],
}
)
@@ -346,6 +354,7 @@ def set_defaults(config, tasks):
task.setdefault("run-without-variant", True)
task.setdefault("variants", [])
task.setdefault("supports-artifact-builds", True)
+ task.setdefault("use-python", "system")
task["mozharness"].setdefault("extra-options", [])
task["mozharness"].setdefault("requires-signed-builds", False)
@@ -484,6 +493,9 @@ def make_job_description(config, tasks):
if task["mozharness"]["requires-signed-builds"] is True:
jobdesc["dependencies"]["build-signing"] = task["build-signing-label"]
+ if "dependencies" in task:
+ jobdesc["dependencies"].update(task["dependencies"])
+
if "expires-after" in task:
jobdesc["expires-after"] = task["expires-after"]
diff --git a/taskcluster/gecko_taskgraph/transforms/test/chunk.py b/taskcluster/gecko_taskgraph/transforms/test/chunk.py
index 7f832c57df..8219c41664 100644
--- a/taskcluster/gecko_taskgraph/transforms/test/chunk.py
+++ b/taskcluster/gecko_taskgraph/transforms/test/chunk.py
@@ -44,8 +44,7 @@ def set_test_verify_chunks(config, tasks):
task["chunks"] = perfile_number_of_chunks(
is_try(config.params),
env.get("MOZHARNESS_TEST_PATHS", ""),
- config.params.get("head_repository", ""),
- config.params.get("head_rev", ""),
+ frozenset(config.params["files_changed"]),
task["test-name"],
)
diff --git a/taskcluster/gecko_taskgraph/transforms/test/other.py b/taskcluster/gecko_taskgraph/transforms/test/other.py
index b8cb95cff7..5d54467001 100644
--- a/taskcluster/gecko_taskgraph/transforms/test/other.py
+++ b/taskcluster/gecko_taskgraph/transforms/test/other.py
@@ -2,6 +2,7 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import copy
import hashlib
import json
import re
@@ -12,7 +13,11 @@ from taskgraph.transforms.base import TransformSequence
from taskgraph.util.attributes import keymatch
from taskgraph.util.keyed_by import evaluate_keyed_by
from taskgraph.util.schema import Schema, resolve_keyed_by
-from taskgraph.util.taskcluster import get_artifact_path, get_index_url
+from taskgraph.util.taskcluster import (
+ get_artifact_path,
+ get_artifact_url,
+ get_index_url,
+)
from voluptuous import Any, Optional, Required
from gecko_taskgraph.transforms.test.variant import TEST_VARIANTS
@@ -99,6 +104,25 @@ def setup_talos(config, tasks):
if config.params.get("project", None):
extra_options.append("--project=%s" % config.params["project"])
+ if "pdfpaint" in task["try-name"]:
+ max_chunks = 10
+ for chunk in range(1, max_chunks + 1):
+ new_task = copy.deepcopy(task)
+ new_task["mozharness"]["extra-options"].append(
+ f"--pdfPaintChunk={chunk}"
+ )
+ new_task["test-name"] = task["test-name"].replace(
+ "pdfpaint", f"pdfpaint-{chunk}"
+ )
+ new_task["try-name"] = task["try-name"].replace(
+ "pdfpaint", f"pdfpaint-{chunk}"
+ )
+ new_task["treeherder-symbol"] = task["treeherder-symbol"].replace(
+ "pdfpaint", f"pdfpaint-{chunk}"
+ )
+ yield new_task
+ continue
+
yield task
@@ -246,6 +270,7 @@ def handle_keyed_by(config, tasks):
"webrender-run-on-projects",
"mozharness.requires-signed-builds",
"build-signing-label",
+ "dependencies",
]
for task in tasks:
for field in fields:
@@ -292,10 +317,17 @@ def set_target(config, tasks):
target = "target.tar.bz2"
if isinstance(target, dict):
- # TODO Remove hardcoded mobile artifact prefix
- index_url = get_index_url(target["index"])
- installer_url = "{}/artifacts/public/{}".format(index_url, target["name"])
- task["mozharness"]["installer-url"] = installer_url
+ if "index" in target:
+ # TODO Remove hardcoded mobile artifact prefix
+ index_url = get_index_url(target["index"])
+ installer_url = "{}/artifacts/public/{}".format(
+ index_url, target["name"]
+ )
+ task["mozharness"]["installer-url"] = installer_url
+ else:
+ task["mozharness"]["installer-url"] = get_artifact_url(
+ f'<{target["upstream-task"]}>', target["name"]
+ )
else:
task["mozharness"]["build-artifact-name"] = get_artifact_path(task, target)
@@ -363,39 +395,39 @@ def setup_browsertime(config, tasks):
cd_fetches = {
"android.*": [
- "linux64-chromedriver-120",
- "linux64-chromedriver-121",
"linux64-chromedriver-122",
+ "linux64-chromedriver-123",
+ "linux64-chromedriver-124",
],
"linux.*": [
- "linux64-chromedriver-120",
- "linux64-chromedriver-121",
"linux64-chromedriver-122",
+ "linux64-chromedriver-123",
+ "linux64-chromedriver-124",
],
"macosx1015.*": [
- "mac64-chromedriver-120",
- "mac64-chromedriver-121",
"mac64-chromedriver-122",
+ "mac64-chromedriver-123",
+ "mac64-chromedriver-124",
],
"macosx1400.*": [
- "mac-arm-chromedriver-120",
- "mac-arm-chromedriver-121",
"mac-arm-chromedriver-122",
+ "mac-arm-chromedriver-123",
+ "mac-arm-chromedriver-124",
],
"windows.*aarch64.*": [
- "win32-chromedriver-120",
"win32-chromedriver-121",
"win32-chromedriver-122",
+ "win32-chromedriver-123",
],
"windows.*-32.*": [
- "win32-chromedriver-120",
- "win32-chromedriver-121",
"win32-chromedriver-122",
+ "win32-chromedriver-123",
+ "win32-chromedriver-124",
],
"windows.*-64.*": [
- "win32-chromedriver-120",
- "win32-chromedriver-121",
"win32-chromedriver-122",
+ "win32-chromedriver-123",
+ "win64-chromedriver-124",
],
}
@@ -419,11 +451,7 @@ def setup_browsertime(config, tasks):
# Only add the chromedriver fetches when chrome is running
for platform in cd_fetches:
fs["by-test-platform"][platform].extend(cd_fetches[platform])
- if (
- "--app=chromium" in extra_options
- or "--app=custom-car" in extra_options
- or "--app=cstm-car-m" in extra_options
- ):
+ if "--app=custom-car" in extra_options or "--app=cstm-car-m" in extra_options:
for platform in chromium_fetches:
fs["by-test-platform"][platform].extend(chromium_fetches[platform])
@@ -791,7 +819,7 @@ test_setting_description_schema = Schema(
},
Optional("device"): str,
Optional("display"): "wayland",
- Optional("machine"): Any("ref-hw-2017", "hw-ref"),
+ Optional("machine"): "hw-ref",
},
"build": {
Required("type"): Any("opt", "debug", "debug-isolated-process"),
@@ -852,7 +880,6 @@ def set_test_setting(config, tasks):
# TODO Rename these so they don't have a dash.
dash_attrs = [
"clang-trunk",
- "ref-hw-2017",
"hw-ref",
]
dash_token = "%D%"
@@ -908,9 +935,6 @@ def set_test_setting(config, tasks):
if parts[0].isdigit():
os_build = parts.pop(0)
- if parts and parts[0] == "ref-hw-2017":
- machine = parts.pop(0)
-
if parts and parts[0] == "hw-ref":
machine = parts.pop(0)
@@ -1102,6 +1126,7 @@ def set_schedules_components(config, tasks):
schedules.add(category)
schedules.add(platform_family(task["build-platform"]))
+ schedules.add("firefox")
task["schedules-component"] = sorted(schedules)
yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/test/raptor.py b/taskcluster/gecko_taskgraph/transforms/test/raptor.py
index 18e21e6a1e..ca35749037 100644
--- a/taskcluster/gecko_taskgraph/transforms/test/raptor.py
+++ b/taskcluster/gecko_taskgraph/transforms/test/raptor.py
@@ -76,7 +76,6 @@ def split_apps(config, tests):
app_symbols = {
"chrome": "ChR",
"chrome-m": "ChR",
- "chromium": "Cr",
"fenix": "fenix",
"refbrow": "refbrow",
"safari": "Saf",
diff --git a/taskcluster/gecko_taskgraph/transforms/test/worker.py b/taskcluster/gecko_taskgraph/transforms/test/worker.py
index 873347459c..51b12de51d 100644
--- a/taskcluster/gecko_taskgraph/transforms/test/worker.py
+++ b/taskcluster/gecko_taskgraph/transforms/test/worker.py
@@ -7,8 +7,10 @@ from taskgraph.transforms.base import TransformSequence
# default worker types keyed by instance-size
LINUX_WORKER_TYPES = {
"large": "t-linux-large",
+ "large-noscratch": "t-linux-large-noscratch",
"xlarge": "t-linux-xlarge",
- "default": "t-linux-large",
+ "xlarge-noscratch": "t-linux-xlarge-noscratch",
+ "default": "t-linux-large-noscratch",
}
# windows worker types keyed by test-platform and virtualization
@@ -23,11 +25,6 @@ WINDOWS_WORKER_TYPES = {
"virtual-with-gpu": "t-win10-64-gpu-s",
"hardware": "t-win10-64-1803-hw",
},
- "windows10-64-ref-hw-2017": {
- "virtual": "t-win10-64",
- "virtual-with-gpu": "t-win10-64-gpu-s",
- "hardware": "t-win10-64-ref-hw",
- },
"windows11-64-2009-hw-ref-shippable": {
"virtual": "win11-64-2009-hw-ref",
"virtual-with-gpu": "win11-64-2009-hw-ref",
@@ -130,12 +127,8 @@ def set_worker_type(config, tasks):
elif test_platform.startswith("win"):
# figure out what platform the job needs to run on
if task["virtualization"] == "hardware":
- # some jobs like talos and reftest run on real h/w - those are all win10
- if test_platform.startswith("windows10-64-ref-hw-2017"):
- win_worker_type_platform = WINDOWS_WORKER_TYPES[
- "windows10-64-ref-hw-2017"
- ]
- elif test_platform.startswith("windows11-64-2009-hw-ref"):
+ # some jobs like talos and reftest run on real h/w
+ if test_platform.startswith("windows11-64-2009-hw-ref"):
win_worker_type_platform = WINDOWS_WORKER_TYPES[
"windows11-64-2009-hw-ref"
]
diff --git a/taskcluster/gecko_taskgraph/transforms/test_apk.py b/taskcluster/gecko_taskgraph/transforms/test_apk.py
new file mode 100644
index 0000000000..b00657b91e
--- /dev/null
+++ b/taskcluster/gecko_taskgraph/transforms/test_apk.py
@@ -0,0 +1,33 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Apply some defaults and minor modifications to the jobs defined in the test
+kinds.
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.util.schema import resolve_keyed_by
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def resolve_keys(config, tasks):
+ for task in tasks:
+ for key in (
+ "routes",
+ "scopes",
+ "extra.notify",
+ ):
+ resolve_keyed_by(
+ task,
+ key,
+ item_name=task["name"],
+ **{
+ "level": config.params["level"],
+ }
+ )
+ yield task
diff --git a/taskcluster/gecko_taskgraph/transforms/update_verify_config.py b/taskcluster/gecko_taskgraph/transforms/update_verify_config.py
index 2d1cd40877..4e516f173c 100644
--- a/taskcluster/gecko_taskgraph/transforms/update_verify_config.py
+++ b/taskcluster/gecko_taskgraph/transforms/update_verify_config.py
@@ -101,6 +101,8 @@ def add_command(config, tasks):
get_branch_rev(config),
"--output-file",
"update-verify.cfg",
+ "--local-repo",
+ ".",
]
repo_path = urlsplit(get_branch_repo(config)).path.lstrip("/")
diff --git a/taskcluster/gecko_taskgraph/util/backstop.py b/taskcluster/gecko_taskgraph/util/backstop.py
index 26c9a4fb91..18c9166083 100644
--- a/taskcluster/gecko_taskgraph/util/backstop.py
+++ b/taskcluster/gecko_taskgraph/util/backstop.py
@@ -37,22 +37,17 @@ def is_backstop(
return True
project = params["project"]
- pushid = int(params["pushlog_id"])
- pushdate = int(params["pushdate"])
-
if project in TRY_PROJECTS:
return False
if project not in integration_projects:
return True
- # On every Nth push, want to run all tasks.
- if pushid % push_interval == 0:
- return True
-
- if time_interval <= 0:
+ # This push was explicitly set to run nothing (e.g via DONTBUILD), so
+ # shouldn't be a backstop candidate.
+ if params["target_tasks_method"] == "nothing":
return False
- # We also want to ensure we run all tasks at least once per N minutes.
+ # Find the last backstop to compute push and time intervals.
subs = {"trust-domain": trust_domain, "project": project}
index = BACKSTOP_INDEX.format(**subs)
@@ -67,9 +62,7 @@ def is_backstop(
return True
try:
- last_pushdate = get_artifact(last_backstop_id, "public/parameters.yml")[
- "pushdate"
- ]
+ last_params = get_artifact(last_backstop_id, "public/parameters.yml")
except HTTPError as e:
# If the last backstop decision task exists in the index, but
# parameters.yml isn't available yet, it means the decision task is
@@ -79,6 +72,15 @@ def is_backstop(
return False
raise
- if (pushdate - last_pushdate) / 60 >= time_interval:
+ # On every Nth push, want to run all tasks.
+ if int(params["pushlog_id"]) - int(last_params["pushlog_id"]) >= push_interval:
+ return True
+
+ if time_interval <= 0:
+ return False
+
+ # We also want to ensure we run all tasks at least once per N minutes.
+ if (params["pushdate"] - last_params["pushdate"]) / 60 >= time_interval:
return True
+
return False
diff --git a/taskcluster/gecko_taskgraph/util/chunking.py b/taskcluster/gecko_taskgraph/util/chunking.py
index a0ed56de78..a8ae4d8b6b 100644
--- a/taskcluster/gecko_taskgraph/util/chunking.py
+++ b/taskcluster/gecko_taskgraph/util/chunking.py
@@ -101,8 +101,8 @@ def guess_mozinfo_from_task(task, repo=""):
("linux", "1804"): "18.04",
("macosx", "1015"): "10.15",
("macosx", "1100"): "11.00",
- ("windows", "7"): "6.1",
("windows", "10"): "10.0",
+ ("windows", "11"): "11.0",
}
for (name, old_ver), new_ver in os_versions.items():
if p_os["name"] == name and p_os["version"] == old_ver:
diff --git a/taskcluster/gecko_taskgraph/util/hg.py b/taskcluster/gecko_taskgraph/util/hg.py
index 18a92fbd0d..17d341cdc0 100644
--- a/taskcluster/gecko_taskgraph/util/hg.py
+++ b/taskcluster/gecko_taskgraph/util/hg.py
@@ -101,7 +101,7 @@ def get_json_automationrelevance(repository, revision):
logger.debug("Querying version control for metadata: %s", url)
def get_automationrelevance():
- response = requests.get(url, timeout=30)
+ response = requests.get(url, timeout=60)
return response.json()
return retry(get_automationrelevance, attempts=10, sleeptime=10)
diff --git a/taskcluster/gecko_taskgraph/util/perfile.py b/taskcluster/gecko_taskgraph/util/perfile.py
index 4e82d87dad..4c18ca98be 100644
--- a/taskcluster/gecko_taskgraph/util/perfile.py
+++ b/taskcluster/gecko_taskgraph/util/perfile.py
@@ -12,15 +12,12 @@ import taskgraph
from mozbuild.util import memoize
from mozpack.path import match as mozpackmatch
-from gecko_taskgraph import files_changed
-
-from .. import GECKO
-
logger = logging.getLogger(__name__)
@memoize
-def perfile_number_of_chunks(is_try, try_task_config, head_repository, head_rev, type):
+def perfile_number_of_chunks(is_try, try_task_config, files_changed, type):
+ changed_files = set(files_changed)
if taskgraph.fast and not is_try:
# When iterating on taskgraph changes, the exact number of chunks that
# test-verify runs usually isn't important, so skip it when going fast.
@@ -55,17 +52,11 @@ def perfile_number_of_chunks(is_try, try_task_config, head_repository, head_rev,
# Returning 0 means no tests to run, this captures non test-verify tasks
return 1
- changed_files = set()
if try_task_config:
suite_to_paths = json.loads(try_task_config)
specified_files = itertools.chain.from_iterable(suite_to_paths.values())
changed_files.update(specified_files)
- if is_try:
- changed_files.update(files_changed.get_locally_changed_files(GECKO))
- else:
- changed_files.update(files_changed.get_changed_files(head_repository, head_rev))
-
test_count = 0
for pattern in file_patterns:
for path in changed_files: