1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
|
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
Source-test jobs can run on multiple platforms. These transforms allow jobs
with either `platform` or a list of `platforms`, and set the appropriate
treeherder configuration and attributes for that platform.
"""
import copy
import os
import taskgraph
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.attributes import keymatch
from taskgraph.util.schema import Schema, optionally_keyed_by, resolve_keyed_by
from taskgraph.util.treeherder import join_symbol, split_symbol
from voluptuous import Any, Extra, Optional, Required
from gecko_taskgraph.transforms.job import job_description_schema
from gecko_taskgraph.util.hg import get_json_automationrelevance
source_test_description_schema = Schema(
{
# most fields are passed directly through as job fields, and are not
# repeated here
Extra: object,
# The platform on which this task runs. This will be used to set up attributes
# (for try selection) and treeherder metadata (for display). If given as a list,
# the job will be "split" into multiple tasks, one with each platform.
Required("platform"): Any(str, [str]),
# Build labels required for the task. If this key is provided it must
# contain a build label for the task platform.
# The task will then depend on a build task, and the installer url will be
# saved to the GECKO_INSTALLER_URL environment variable.
Optional("require-build"): optionally_keyed_by("project", {str: str}),
# These fields can be keyed by "platform", and are otherwise identical to
# job descriptions.
Required("worker-type"): optionally_keyed_by(
"platform", job_description_schema["worker-type"]
),
Required("worker"): optionally_keyed_by(
"platform", job_description_schema["worker"]
),
Optional("python-version"): [int],
Optional("dependencies"): {
k: optionally_keyed_by("platform", v)
for k, v in job_description_schema["dependencies"].items()
},
# A list of artifacts to install from 'fetch' tasks.
Optional("fetches"): {
str: optionally_keyed_by(
"platform", job_description_schema["fetches"][str]
),
},
}
)
transforms = TransformSequence()
transforms.add_validate(source_test_description_schema)
@transforms.add
def set_job_name(config, jobs):
for job in jobs:
if "job-from" in job and job["job-from"] != "kind.yml":
from_name = os.path.splitext(job["job-from"])[0]
job["name"] = "{}-{}".format(from_name, job["name"])
yield job
@transforms.add
def expand_platforms(config, jobs):
for job in jobs:
if isinstance(job["platform"], str):
yield job
continue
for platform in job["platform"]:
pjob = copy.deepcopy(job)
pjob["platform"] = platform
if "name" in pjob:
pjob["name"] = "{}-{}".format(pjob["name"], platform)
else:
pjob["label"] = "{}-{}".format(pjob["label"], platform)
yield pjob
@transforms.add
def split_python(config, jobs):
for job in jobs:
key = "python-version"
versions = job.pop(key, [])
if not versions:
yield job
continue
for version in versions:
group = f"py{version}"
pyjob = copy.deepcopy(job)
if "name" in pyjob:
pyjob["name"] += f"-{group}"
else:
pyjob["label"] += f"-{group}"
symbol = split_symbol(pyjob["treeherder"]["symbol"])[1]
pyjob["treeherder"]["symbol"] = join_symbol(group, symbol)
pyjob["run"][key] = version
yield pyjob
@transforms.add
def split_jsshell(config, jobs):
all_shells = {"sm": "Spidermonkey", "v8": "Google V8"}
for job in jobs:
if not job["name"].startswith("jsshell"):
yield job
continue
test = job.pop("test")
for shell in job.get("shell", all_shells.keys()):
assert shell in all_shells
new_job = copy.deepcopy(job)
new_job["name"] = "{}-{}".format(new_job["name"], shell)
new_job["description"] = "{} on {}".format(
new_job["description"], all_shells[shell]
)
new_job["shell"] = shell
group = f"js-bench-{shell}"
symbol = split_symbol(new_job["treeherder"]["symbol"])[1]
new_job["treeherder"]["symbol"] = join_symbol(group, symbol)
run = new_job["run"]
run["mach"] = run["mach"].format(
shell=shell, SHELL=shell.upper(), test=test
)
yield new_job
def add_build_dependency(config, job):
"""
Add build dependency to the job and installer_url to env.
"""
key = job["platform"]
build_labels = job.pop("require-build", {})
matches = keymatch(build_labels, key)
if not matches:
raise Exception(
"No build platform found. "
"Define 'require-build' for {} in the task config.".format(key)
)
if len(matches) > 1:
raise Exception(f"More than one build platform found for '{key}'.")
label = matches[0]
deps = job.setdefault("dependencies", {})
deps.update({"build": label})
@transforms.add
def handle_platform(config, jobs):
"""
Handle the 'platform' property, setting up treeherder context as well as
try-related attributes.
"""
fields = [
"always-target",
"fetches.toolchain",
"require-build",
"worker-type",
"worker",
]
for job in jobs:
platform = job["platform"]
for field in fields:
resolve_keyed_by(
job, field, item_name=job["name"], project=config.params["project"]
)
for field in job.get("dependencies", {}):
resolve_keyed_by(
job,
f"dependencies.{field}",
item_name=job["name"],
project=config.params["project"],
)
if "treeherder" in job:
job["treeherder"].setdefault("platform", platform)
if "require-build" in job:
add_build_dependency(config, job)
del job["platform"]
yield job
@transforms.add
def handle_shell(config, jobs):
"""
Handle the 'shell' property.
"""
fields = [
"run-on-projects",
"worker.env",
]
for job in jobs:
if not job.get("shell"):
yield job
continue
for field in fields:
resolve_keyed_by(job, field, item_name=job["name"])
del job["shell"]
yield job
@transforms.add
def set_code_review_env(config, jobs):
"""
Add a CODE_REVIEW environment variable when running in code-review bot mode
"""
is_code_review = config.params["target_tasks_method"] == "codereview"
for job in jobs:
attrs = job.get("attributes", {})
if is_code_review and attrs.get("code-review") is True:
env = job["worker"].setdefault("env", {})
env["CODE_REVIEW"] = "1"
yield job
@transforms.add
def set_base_revision_in_tgdiff(config, jobs):
# Don't attempt to download 'json-automation' locally as the revision may
# not exist in the repository.
if not os.environ.get("MOZ_AUTOMATION") or taskgraph.fast:
yield from jobs
return
data = get_json_automationrelevance(
config.params["head_repository"], config.params["head_rev"]
)
for job in jobs:
if job["name"] != "taskgraph-diff":
yield job
continue
job["run"]["command-context"] = {
"base_rev": data["changesets"][0]["parents"][0]
}
yield job
@transforms.add
def set_worker_exit_code(config, jobs):
for job in jobs:
worker = job["worker"]
worker.setdefault("retry-exit-status", [])
if 137 not in worker["retry-exit-status"]:
worker["retry-exit-status"].append(137)
yield job
|