From def92d1b8e9d373e2f6f27c366d578d97d8960c6 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Wed, 15 May 2024 05:34:50 +0200 Subject: Merging upstream version 126.0. Signed-off-by: Daniel Baumann --- taskcluster/android_taskgraph/util/__init__.py | 0 taskcluster/android_taskgraph/util/group_by.py | 51 ++++ taskcluster/android_taskgraph/util/scriptworker.py | 283 +++++++++++++++++++++ 3 files changed, 334 insertions(+) create mode 100644 taskcluster/android_taskgraph/util/__init__.py create mode 100644 taskcluster/android_taskgraph/util/group_by.py create mode 100644 taskcluster/android_taskgraph/util/scriptworker.py (limited to 'taskcluster/android_taskgraph/util') diff --git a/taskcluster/android_taskgraph/util/__init__.py b/taskcluster/android_taskgraph/util/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/taskcluster/android_taskgraph/util/group_by.py b/taskcluster/android_taskgraph/util/group_by.py new file mode 100644 index 0000000000..3c3b10980c --- /dev/null +++ b/taskcluster/android_taskgraph/util/group_by.py @@ -0,0 +1,51 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from taskgraph.util.dependencies import group_by + + +@group_by("component") +def component_grouping(config, tasks): + groups = {} + for task in tasks: + component = task.attributes.get("component") + if component == "all": + continue + + build_type = task.attributes.get("build-type") + groups.setdefault((component, build_type), []).append(task) + + tasks_for_all_components = [ + task + for task in tasks + if task.attributes.get("component") == "all" + # We just want to depend on the task that waits on all chunks. This way + # we have a single dependency for that kind + and task.attributes.get("is_final_chunked_task", True) + ] + for (_, build_type), tasks in groups.items(): + tasks.extend( + [ + task + for task in tasks_for_all_components + if task.attributes.get("build-type") == build_type + ] + ) + + return groups.values() + + +@group_by("build-type") +def build_type_grouping(config, tasks): + groups = {} + for task in tasks: + # We just want to depend on the task that waits on all chunks. This way + # we have a single dependency for that kind + if not task.attributes.get("is_final_chunked_task", True): + continue + + build_type = task.attributes.get("build-type") + groups.setdefault(build_type, []).append(task) + + return groups.values() diff --git a/taskcluster/android_taskgraph/util/scriptworker.py b/taskcluster/android_taskgraph/util/scriptworker.py new file mode 100644 index 0000000000..e438f19d26 --- /dev/null +++ b/taskcluster/android_taskgraph/util/scriptworker.py @@ -0,0 +1,283 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import itertools +import os +from copy import deepcopy +from datetime import datetime + +import jsone +from taskgraph.util.memoize import memoize +from taskgraph.util.schema import resolve_keyed_by +from taskgraph.util.taskcluster import get_artifact_prefix +from taskgraph.util.yaml import load_yaml + +cached_load_yaml = memoize(load_yaml) + + +def generate_beetmover_upstream_artifacts( + config, job, platform, locale=None, dependencies=None, **kwargs +): + """Generate the upstream artifacts for beetmover, using the artifact map. + + Currently only applies to beetmover tasks. + + Args: + job (dict): The current job being generated + dependencies (list): A list of the job's dependency labels. + platform (str): The current build platform + locale (str): The current locale being beetmoved. + + Returns: + list: A list of dictionaries conforming to the upstream_artifacts spec. + """ + base_artifact_prefix = get_artifact_prefix(job) + resolve_keyed_by( + job, + "attributes.artifact_map", + "artifact map", + **{ + "release-type": config.params["release_type"], + "platform": platform, + }, + ) + map_config = deepcopy(cached_load_yaml(job["attributes"]["artifact_map"])) + upstream_artifacts = list() + + if not locale: + locales = map_config["default_locales"] + elif isinstance(locale, list): + locales = locale + else: + locales = [locale] + + if not dependencies: + if job.get("dependencies"): + dependencies = job["dependencies"].keys() + elif job.get("primary-dependency"): + dependencies = [job["primary-dependency"].kind] + else: + raise Exception("Unsupported type of dependency. Got job: {}".format(job)) + + for locale, dep in itertools.product(locales, dependencies): + paths = list() + + for filename in map_config["mapping"]: + if dep not in map_config["mapping"][filename]["from"]: + continue + if locale != "multi" and not map_config["mapping"][filename]["all_locales"]: + continue + if ( + "only_for_platforms" in map_config["mapping"][filename] + and platform + not in map_config["mapping"][filename]["only_for_platforms"] + ): + continue + if ( + "not_for_platforms" in map_config["mapping"][filename] + and platform in map_config["mapping"][filename]["not_for_platforms"] + ): + continue + if "partials_only" in map_config["mapping"][filename]: + continue + # The next time we look at this file it might be a different locale. + file_config = deepcopy(map_config["mapping"][filename]) + resolve_keyed_by( + file_config, + "source_path_modifier", + "source path modifier", + locale=locale, + ) + + kwargs["locale"] = locale + + paths.append( + os.path.join( + base_artifact_prefix, + jsone.render(file_config["source_path_modifier"], kwargs), + jsone.render(filename, kwargs), + ) + ) + + if job.get("dependencies") and getattr( + job["dependencies"][dep], "release_artifacts", None + ): + paths = [ + path + for path in paths + if path in job["dependencies"][dep].release_artifacts + ] + + if not paths: + continue + + upstream_artifacts.append( + { + "taskId": {"task-reference": "<{}>".format(dep)}, + "taskType": map_config["tasktype_map"].get(dep), + "paths": sorted(paths), + "locale": locale, + } + ) + + upstream_artifacts.sort(key=lambda u: u["paths"]) + return upstream_artifacts + + +def generate_beetmover_artifact_map(config, job, **kwargs): + """Generate the beetmover artifact map. + + Currently only applies to beetmover tasks. + + Args: + config (): Current taskgraph configuration. + job (dict): The current job being generated + Common kwargs: + platform (str): The current build platform + locale (str): The current locale being beetmoved. + + Returns: + list: A list of dictionaries containing source->destination + maps for beetmover. + """ + platform = kwargs.get("platform", "") + resolve_keyed_by( + job, + "attributes.artifact_map", + job["label"], + **{ + "release-type": config.params["release_type"], + "platform": platform, + }, + ) + map_config = deepcopy(cached_load_yaml(job["attributes"]["artifact_map"])) + base_artifact_prefix = map_config.get( + "base_artifact_prefix", get_artifact_prefix(job) + ) + + artifacts = list() + + dependencies = job["dependencies"].keys() + + if kwargs.get("locale"): + if isinstance(kwargs["locale"], list): + locales = kwargs["locale"] + else: + locales = [kwargs["locale"]] + else: + locales = map_config["default_locales"] + + resolve_keyed_by( + map_config, + "s3_bucket_paths", + job["label"], + **{"build-type": job["attributes"]["build-type"]}, + ) + + for locale, dep in sorted(itertools.product(locales, dependencies)): + paths = dict() + for filename in map_config["mapping"]: + # Relevancy checks + if dep not in map_config["mapping"][filename]["from"]: + # We don't get this file from this dependency. + continue + if locale != "multi" and not map_config["mapping"][filename]["all_locales"]: + # This locale either doesn't produce or shouldn't upload this file. + continue + if ( + "only_for_platforms" in map_config["mapping"][filename] + and platform + not in map_config["mapping"][filename]["only_for_platforms"] + ): + # This platform either doesn't produce or shouldn't upload this file. + continue + if ( + "not_for_platforms" in map_config["mapping"][filename] + and platform in map_config["mapping"][filename]["not_for_platforms"] + ): + # This platform either doesn't produce or shouldn't upload this file. + continue + if "partials_only" in map_config["mapping"][filename]: + continue + + # deepcopy because the next time we look at this file the locale will differ. + file_config = deepcopy(map_config["mapping"][filename]) + + for field in [ + "destinations", + "locale_prefix", + "source_path_modifier", + "update_balrog_manifest", + "pretty_name", + "checksums_path", + ]: + resolve_keyed_by(file_config, field, job["label"], locale=locale) + + # This format string should ideally be in the configuration file, + # but this would mean keeping variable names in sync between code + config. + destinations = [ + "{s3_bucket_path}/{dest_path}/{filename}".format( + s3_bucket_path=bucket_path, + dest_path=dest_path, + filename=file_config.get("pretty_name", filename), + ) + for dest_path, bucket_path in itertools.product( + file_config["destinations"], map_config["s3_bucket_paths"] + ) + ] + # Creating map entries + # Key must be artifact path, to avoid trampling duplicates, such + # as public/build/target.apk and public/build/multi/target.apk + key = os.path.join( + base_artifact_prefix, + file_config["source_path_modifier"], + filename, + ) + + paths[key] = { + "destinations": destinations, + } + if file_config.get("checksums_path"): + paths[key]["checksums_path"] = file_config["checksums_path"] + + # optional flag: balrog manifest + if file_config.get("update_balrog_manifest"): + paths[key]["update_balrog_manifest"] = True + if file_config.get("balrog_format"): + paths[key]["balrog_format"] = file_config["balrog_format"] + + if not paths: + # No files for this dependency/locale combination. + continue + + # Render all variables for the artifact map + platforms = deepcopy(map_config.get("platform_names", {})) + if platform: + for key in platforms.keys(): + resolve_keyed_by(platforms, key, job["label"], platform=platform) + + version = config.params["version"] + upload_date = datetime.fromtimestamp(config.params["build_date"]) + + if "nightly" in job["attributes"].get("build-type", ""): + folder_prefix = upload_date.strftime("%Y/%m/%Y-%m-%d-%H-%M-%S-") + # TODO: Remove this when version.txt has versioning fixed + version = version.split("-")[0] + else: + folder_prefix = f"{version}/android/" + + kwargs.update( + {"locale": locale, "version": version, "folder_prefix": folder_prefix} + ) + kwargs.update(**platforms) + paths = jsone.render(paths, kwargs) + artifacts.append( + { + "taskId": {"task-reference": "<{}>".format(dep)}, + "locale": locale, + "paths": paths, + } + ) + + return artifacts -- cgit v1.2.3