summaryrefslogtreecommitdiffstats
path: root/taskcluster/gecko_taskgraph/transforms/repackage_partner.py
blob: 582a86dfad90433e4b14210f27fc2c886adf8182 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
Transform the repackage task into an actual task description.
"""


import copy

from taskgraph.transforms.base import TransformSequence
from taskgraph.util.schema import optionally_keyed_by, resolve_keyed_by
from taskgraph.util.taskcluster import get_artifact_prefix
from voluptuous import Optional, Required

from gecko_taskgraph.loader.single_dep import schema
from gecko_taskgraph.transforms.repackage import (
    PACKAGE_FORMATS as PACKAGE_FORMATS_VANILLA,
)
from gecko_taskgraph.transforms.task import task_description_schema
from gecko_taskgraph.util.attributes import copy_attributes_from_dependent_job
from gecko_taskgraph.util.partners import get_partner_config_by_kind
from gecko_taskgraph.util.platforms import archive_format, executable_extension
from gecko_taskgraph.util.workertypes import worker_type_implementation


def _by_platform(arg):
    return optionally_keyed_by("build-platform", arg)


# When repacking the stub installer we need to pass a zip file and package name to the
# repackage task. This is not needed for vanilla stub but analogous to the full installer.
PACKAGE_FORMATS = copy.deepcopy(PACKAGE_FORMATS_VANILLA)
PACKAGE_FORMATS["installer-stub"]["inputs"]["package"] = "target-stub{archive_format}"
PACKAGE_FORMATS["installer-stub"]["args"].extend(["--package-name", "{package-name}"])

packaging_description_schema = schema.extend(
    {
        # unique label to describe this repackaging task
        Optional("label"): str,
        # Routes specific to this task, if defined
        Optional("routes"): [str],
        # passed through directly to the job description
        Optional("extra"): task_description_schema["extra"],
        # Shipping product and phase
        Optional("shipping-product"): task_description_schema["shipping-product"],
        Optional("shipping-phase"): task_description_schema["shipping-phase"],
        Required("package-formats"): _by_platform([str]),
        # All l10n jobs use mozharness
        Required("mozharness"): {
            # Config files passed to the mozharness script
            Required("config"): _by_platform([str]),
            # Additional paths to look for mozharness configs in. These should be
            # relative to the base of the source checkout
            Optional("config-paths"): [str],
            # if true, perform a checkout of a comm-central based branch inside the
            # gecko checkout
            Optional("comm-checkout"): bool,
        },
        # Override the default priority for the project
        Optional("priority"): task_description_schema["priority"],
    }
)

transforms = TransformSequence()
transforms.add_validate(packaging_description_schema)


@transforms.add
def copy_in_useful_magic(config, jobs):
    """Copy attributes from upstream task to be used for keyed configuration."""
    for job in jobs:
        dep = job["primary-dependency"]
        job["build-platform"] = dep.attributes.get("build_platform")
        yield job


@transforms.add
def handle_keyed_by(config, jobs):
    """Resolve fields that can be keyed by platform, etc."""
    fields = [
        "mozharness.config",
        "package-formats",
    ]
    for job in jobs:
        job = copy.deepcopy(job)  # don't overwrite dict values here
        for field in fields:
            resolve_keyed_by(item=job, field=field, item_name="?")
        yield job


@transforms.add
def make_repackage_description(config, jobs):
    for job in jobs:
        dep_job = job["primary-dependency"]

        label = job.get("label", dep_job.label.replace("signing-", "repackage-"))
        job["label"] = label

        yield job


@transforms.add
def make_job_description(config, jobs):
    for job in jobs:
        dep_job = job["primary-dependency"]
        attributes = copy_attributes_from_dependent_job(dep_job)
        build_platform = attributes["build_platform"]

        if job["build-platform"].startswith("win"):
            if dep_job.kind.endswith("signing"):
                continue
        if job["build-platform"].startswith("macosx"):
            if dep_job.kind.endswith("repack"):
                continue
        dependencies = {dep_job.attributes.get("kind"): dep_job.label}
        dependencies.update(dep_job.dependencies)

        signing_task = None
        for dependency in dependencies.keys():
            if build_platform.startswith("macosx") and dependency.endswith("signing"):
                signing_task = dependency
            elif build_platform.startswith("win") and dependency.endswith("repack"):
                signing_task = dependency

        attributes["repackage_type"] = "repackage"

        repack_id = job["extra"]["repack_id"]

        partner_config = get_partner_config_by_kind(config, config.kind)
        partner, subpartner, _ = repack_id.split("/")
        repack_stub_installer = partner_config[partner][subpartner].get(
            "repack_stub_installer"
        )
        if build_platform.startswith("win32") and repack_stub_installer:
            job["package-formats"].append("installer-stub")

        repackage_config = []
        for format in job.get("package-formats"):
            command = copy.deepcopy(PACKAGE_FORMATS[format])
            substs = {
                "archive_format": archive_format(build_platform),
                "executable_extension": executable_extension(build_platform),
            }
            command["inputs"] = {
                name: filename.format(**substs)
                for name, filename in command["inputs"].items()
            }
            repackage_config.append(command)

        run = job.get("mozharness", {})
        run.update(
            {
                "using": "mozharness",
                "script": "mozharness/scripts/repackage.py",
                "job-script": "taskcluster/scripts/builder/repackage.sh",
                "actions": ["setup", "repackage"],
                "extra-config": {
                    "repackage_config": repackage_config,
                },
            }
        )

        worker = {
            "chain-of-trust": True,
            "max-run-time": 3600,
            "taskcluster-proxy": True if get_artifact_prefix(dep_job) else False,
            "env": {
                "REPACK_ID": repack_id,
            },
            # Don't add generic artifact directory.
            "skip-artifacts": True,
        }

        worker_type = "b-linux-gcp"
        worker["docker-image"] = {"in-tree": "debian11-amd64-build"}

        worker["artifacts"] = _generate_task_output_files(
            dep_job,
            worker_type_implementation(config.graph_config, config.params, worker_type),
            repackage_config,
            partner=repack_id,
        )

        description = (
            "Repackaging for repack_id '{repack_id}' for build '"
            "{build_platform}/{build_type}'".format(
                repack_id=job["extra"]["repack_id"],
                build_platform=attributes.get("build_platform"),
                build_type=attributes.get("build_type"),
            )
        )

        task = {
            "label": job["label"],
            "description": description,
            "worker-type": worker_type,
            "dependencies": dependencies,
            "attributes": attributes,
            "scopes": ["queue:get-artifact:releng/partner/*"],
            "run-on-projects": dep_job.attributes.get("run_on_projects"),
            "routes": job.get("routes", []),
            "extra": job.get("extra", {}),
            "worker": worker,
            "run": run,
            "fetches": _generate_download_config(
                dep_job,
                build_platform,
                signing_task,
                partner=repack_id,
                project=config.params["project"],
                repack_stub_installer=repack_stub_installer,
            ),
        }

        # we may have reduced the priority for partner jobs, otherwise task.py will set it
        if job.get("priority"):
            task["priority"] = job["priority"]
        if build_platform.startswith("macosx"):
            task.setdefault("fetches", {}).setdefault("toolchain", []).extend(
                [
                    "linux64-libdmg",
                    "linux64-hfsplus",
                    "linux64-node",
                ]
            )
        yield task


def _generate_download_config(
    task,
    build_platform,
    signing_task,
    partner=None,
    project=None,
    repack_stub_installer=False,
):
    locale_path = f"{partner}/" if partner else ""

    if build_platform.startswith("macosx"):
        return {
            signing_task: [
                {
                    "artifact": f"{locale_path}target.tar.gz",
                    "extract": False,
                },
            ],
        }
    if build_platform.startswith("win"):
        download_config = [
            {
                "artifact": f"{locale_path}target.zip",
                "extract": False,
            },
            f"{locale_path}setup.exe",
        ]
        if build_platform.startswith("win32") and repack_stub_installer:
            download_config.extend(
                [
                    {
                        "artifact": f"{locale_path}target-stub.zip",
                        "extract": False,
                    },
                    f"{locale_path}setup-stub.exe",
                ]
            )
        return {signing_task: download_config}

    raise NotImplementedError(f'Unsupported build_platform: "{build_platform}"')


def _generate_task_output_files(task, worker_implementation, repackage_config, partner):
    """We carefully generate an explicit list here, but there's an artifacts directory
    too, courtesy of generic_worker_add_artifacts() (windows) or docker_worker_add_artifacts().
    Any errors here are likely masked by that.
    """
    partner_output_path = f"{partner}/"
    artifact_prefix = get_artifact_prefix(task)

    if worker_implementation == ("docker-worker", "linux"):
        local_prefix = "/builds/worker/workspace/"
    elif worker_implementation == ("generic-worker", "windows"):
        local_prefix = "workspace/"
    else:
        raise NotImplementedError(
            f'Unsupported worker implementation: "{worker_implementation}"'
        )

    output_files = []
    for config in repackage_config:
        output_files.append(
            {
                "type": "file",
                "path": "{}outputs/{}{}".format(
                    local_prefix, partner_output_path, config["output"]
                ),
                "name": "{}/{}{}".format(
                    artifact_prefix, partner_output_path, config["output"]
                ),
            }
        )
    return output_files