1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
|
#!/usr/bin/env python3
# mypy: allow-untyped-defs
"""Wrapper script for running jobs in Taskcluster
This is intended for running test jobs in Taskcluster. The script
takes a two positional arguments which are the name of the test job
and the script to actually run.
The name of the test job is used to determine whether the script should be run
for this push (this is in lieu of having a proper decision task). There are
several ways that the script can be scheduled to run
1. The output of wpt test-jobs includes the job name
2. The job name is included in a job declaration (see below)
3. The string "all" is included in the job declaration
4. The job name is set to "all"
A job declaration is a line appearing in the pull request body (for
pull requests) or first commit message (for pushes) of the form:
tc-jobs: job1,job2,[...]
In addition, there are a number of keyword arguments used to set options for the
environment in which the jobs run. Documentation for these is in the command help.
As well as running the script, the script sets two environment variables;
GITHUB_BRANCH which is the branch that the commits will merge into (if it's a PR)
or the branch that the commits are on (if it's a push), and GITHUB_PULL_REQUEST
which is the string "false" if the event triggering this job wasn't a pull request
or the pull request number if it was. The semantics of these variables are chosen
to match the corresponding TRAVIS_* variables.
Note: for local testing in the Docker image the script ought to still work, but
full functionality requires that the TASK_EVENT environment variable is set to
the serialization of a GitHub event payload.
"""
import argparse
import fnmatch
import json
import os
import subprocess
import sys
import tarfile
import tempfile
import zipfile
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from wpt.utils import get_download_to_descriptor # type: ignore
root = os.path.abspath(
os.path.join(os.path.dirname(__file__),
os.pardir,
os.pardir))
def run(cmd, return_stdout=False, **kwargs):
print(" ".join(cmd))
if return_stdout:
f = subprocess.check_output
if "encoding" not in kwargs:
kwargs["encoding"] = "utf-8"
else:
f = subprocess.check_call
return f(cmd, **kwargs)
def start(cmd):
print(" ".join(cmd))
subprocess.Popen(cmd)
def get_parser():
p = argparse.ArgumentParser()
p.add_argument("--oom-killer",
action="store_true",
default=False,
help="Run userspace OOM killer")
p.add_argument("--hosts",
dest="hosts_file",
action="store_true",
default=True,
help="Setup wpt entries in hosts file")
p.add_argument("--no-hosts",
dest="hosts_file",
action="store_false",
help="Don't setup wpt entries in hosts file")
p.add_argument("--browser",
action="append",
default=[],
help="Browsers that will be used in the job")
p.add_argument("--channel",
default=None,
choices=["experimental", "dev", "nightly", "beta", "stable"],
help="Chrome browser channel")
p.add_argument("--xvfb",
action="store_true",
help="Start xvfb")
p.add_argument("--install-certificates", action="store_true", default=None,
help="Install web-platform.test certificates to UA store")
p.add_argument("--no-install-certificates", action="store_false", default=None,
help="Don't install web-platform.test certificates to UA store")
p.add_argument("--no-setup-repository", action="store_false", dest="setup_repository",
help="Don't run any repository setup steps, instead use the existing worktree. "
"This is useful for local testing.")
p.add_argument("--checkout",
help="Revision to checkout before starting job")
p.add_argument("--ref",
help="Git ref for the commit that should be run")
p.add_argument("--head-rev",
help="Commit at the head of the branch when the decision task ran")
p.add_argument("--merge-rev",
help="Provisional merge commit for PR when the decision task ran")
p.add_argument("script",
help="Script to run for the job")
p.add_argument("script_args",
nargs=argparse.REMAINDER,
help="Additional arguments to pass to the script")
return p
def start_userspace_oom_killer():
# Start userspace OOM killer: https://github.com/rfjakob/earlyoom
# It will report memory usage every minute and prefer to kill browsers.
start(["sudo", "earlyoom", "-p", "-r", "60", "--prefer=(chrome|firefox)", "--avoid=python"])
def make_hosts_file():
run(["sudo", "sh", "-c", "./wpt make-hosts-file >> /etc/hosts"])
def checkout_revision(rev):
run(["git", "checkout", "--quiet", rev])
def install_certificates():
run(["sudo", "cp", "tools/certs/cacert.pem",
"/usr/local/share/ca-certificates/cacert.crt"])
run(["sudo", "update-ca-certificates"])
def install_chrome(channel):
if channel in ("experimental", "dev"):
deb_archive = "google-chrome-unstable_current_amd64.deb"
elif channel == "beta":
deb_archive = "google-chrome-beta_current_amd64.deb"
elif channel == "stable":
deb_archive = "google-chrome-stable_current_amd64.deb"
else:
raise ValueError("Unrecognized release channel: %s" % channel)
dest = os.path.join("/tmp", deb_archive)
deb_url = "https://dl.google.com/linux/direct/%s" % deb_archive
with open(dest, "wb") as f:
get_download_to_descriptor(f, deb_url)
run(["sudo", "apt-get", "-qqy", "update"])
run(["sudo", "gdebi", "-qn", "/tmp/%s" % deb_archive])
def start_xvfb():
start(["sudo", "Xvfb", os.environ["DISPLAY"], "-screen", "0",
"%sx%sx%s" % (os.environ["SCREEN_WIDTH"],
os.environ["SCREEN_HEIGHT"],
os.environ["SCREEN_DEPTH"])])
start(["sudo", "fluxbox", "-display", os.environ["DISPLAY"]])
def set_variables(event):
# Set some variables that we use to get the commits on the current branch
ref_prefix = "refs/heads/"
pull_request = "false"
branch = None
if "pull_request" in event:
pull_request = str(event["pull_request"]["number"])
# Note that this is the branch that a PR will merge to,
# not the branch name for the PR
branch = event["pull_request"]["base"]["ref"]
elif "ref" in event:
branch = event["ref"]
if branch.startswith(ref_prefix):
branch = branch[len(ref_prefix):]
os.environ["GITHUB_PULL_REQUEST"] = pull_request
if branch:
os.environ["GITHUB_BRANCH"] = branch
def task_url(task_id):
root_url = os.environ['TASKCLUSTER_ROOT_URL']
if root_url == 'https://taskcluster.net':
queue_base = "https://queue.taskcluster.net/v1/task"
else:
queue_base = root_url + "/api/queue/v1/task"
return "%s/%s" % (queue_base, task_id)
def download_artifacts(artifacts):
artifact_list_by_task = {}
for artifact in artifacts:
base_url = task_url(artifact["task"])
if artifact["task"] not in artifact_list_by_task:
with tempfile.TemporaryFile() as f:
get_download_to_descriptor(f, base_url + "/artifacts")
f.seek(0)
artifacts_data = json.load(f)
artifact_list_by_task[artifact["task"]] = artifacts_data
artifacts_data = artifact_list_by_task[artifact["task"]]
print("DEBUG: Got artifacts %s" % artifacts_data)
found = False
for candidate in artifacts_data["artifacts"]:
print("DEBUG: candidate: %s glob: %s" % (candidate["name"], artifact["glob"]))
if fnmatch.fnmatch(candidate["name"], artifact["glob"]):
found = True
print("INFO: Fetching aritfact %s from task %s" % (candidate["name"], artifact["task"]))
file_name = candidate["name"].rsplit("/", 1)[1]
url = base_url + "/artifacts/" + candidate["name"]
dest_path = os.path.expanduser(os.path.join("~", artifact["dest"], file_name))
dest_dir = os.path.dirname(dest_path)
if not os.path.exists(dest_dir):
os.makedirs(dest_dir)
with open(dest_path, "wb") as f:
get_download_to_descriptor(f, url)
if artifact.get("extract"):
unpack(dest_path)
if not found:
print("WARNING: No artifact found matching %s in task %s" % (artifact["glob"], artifact["task"]))
def unpack(path):
dest = os.path.dirname(path)
if tarfile.is_tarfile(path):
run(["tar", "-xf", path], cwd=os.path.dirname(path))
elif zipfile.is_zipfile(path):
with zipfile.ZipFile(path) as archive:
archive.extractall(dest)
else:
print("ERROR: Don't know how to extract %s" % path)
raise Exception
def setup_environment(args):
if "TASK_ARTIFACTS" in os.environ:
artifacts = json.loads(os.environ["TASK_ARTIFACTS"])
download_artifacts(artifacts)
if args.hosts_file:
make_hosts_file()
if args.install_certificates:
install_certificates()
if "chrome" in args.browser:
assert args.channel is not None
install_chrome(args.channel)
if args.xvfb:
start_xvfb()
if args.oom_killer:
start_userspace_oom_killer()
def setup_repository(args):
is_pr = os.environ.get("GITHUB_PULL_REQUEST", "false") != "false"
# Initially task_head points at the same commit as the ref we want to test.
# However that may not be the same commit as we actually want to test if
# the branch changed since the decision task ran. The branch may have
# changed because someone has pushed more commits (either to the PR
# or later commits to the branch), or because someone has pushed to the
# base branch for the PR.
#
# In that case we take a different approach depending on whether this is a
# PR or a push to a branch.
# If this is a push to a branch, and the original commit is still fetchable,
# we try to fetch that (it may not be in the case of e.g. a force push).
# If it's not fetchable then we fail the run.
# For a PR we are testing the provisional merge commit. If that's changed it
# could be that the PR branch was updated or the base branch was updated. In the
# former case we fail the run because testing an old commit is a waste of
# resources. In the latter case we assume it's OK to use the current merge
# instead of the one at the time the decision task ran.
if args.ref:
if is_pr:
assert args.ref.endswith("/merge")
expected_head = args.merge_rev
else:
expected_head = args.head_rev
task_head = run(["git", "rev-parse", "task_head"], return_stdout=True).strip()
if task_head != expected_head:
if not is_pr:
try:
run(["git", "fetch", "origin", expected_head])
run(["git", "reset", "--hard", expected_head])
except subprocess.CalledProcessError:
print("CRITICAL: task_head points at %s, expected %s and "
"unable to fetch expected commit.\n"
"This may be because the branch was updated" % (task_head, expected_head))
sys.exit(1)
else:
# Convert the refs/pulls/<id>/merge to refs/pulls/<id>/head
head_ref = args.ref.rsplit("/", 1)[0] + "/head"
try:
remote_head = run(["git", "ls-remote", "origin", head_ref],
return_stdout=True).split("\t")[0]
except subprocess.CalledProcessError:
print("CRITICAL: Failed to read remote ref %s" % head_ref)
sys.exit(1)
if remote_head != args.head_rev:
print("CRITICAL: task_head points at %s, expected %s. "
"This may be because the branch was updated" % (task_head, expected_head))
sys.exit(1)
print("INFO: Merge commit changed from %s to %s due to base branch changes. "
"Running task anyway." % (expected_head, task_head))
if os.environ.get("GITHUB_PULL_REQUEST", "false") != "false":
parents = run(["git", "rev-parse", "task_head^@"],
return_stdout=True).strip().split()
if len(parents) == 2:
base_head = parents[0]
pr_head = parents[1]
run(["git", "branch", "base_head", base_head])
run(["git", "branch", "pr_head", pr_head])
else:
print("ERROR: Pull request HEAD wasn't a 2-parent merge commit; "
"expected to test the merge of PR into the base")
commit = run(["git", "rev-parse", "task_head"],
return_stdout=True).strip()
print("HEAD: %s" % commit)
print("Parents: %s" % ", ".join(parents))
sys.exit(1)
branch = os.environ.get("GITHUB_BRANCH")
if branch:
# Ensure that the remote base branch exists
# TODO: move this somewhere earlier in the task
run(["git", "fetch", "--quiet", "origin", "%s:%s" % (branch, branch)])
checkout_rev = args.checkout if args.checkout is not None else "task_head"
checkout_revision(checkout_rev)
refs = run(["git", "for-each-ref", "refs/heads"], return_stdout=True)
print("INFO: git refs:\n%s" % refs)
print("INFO: checked out commit:\n%s" % run(["git", "rev-parse", "HEAD"],
return_stdout=True))
def fetch_event_data():
try:
task_id = os.environ["TASK_ID"]
except KeyError:
print("WARNING: Missing TASK_ID environment variable")
# For example under local testing
return None
with tempfile.TemporaryFile() as f:
get_download_to_descriptor(f, task_url(task_id))
f.seek(0)
task_data = json.load(f)
event_data = task_data.get("extra", {}).get("github_event")
if event_data is not None:
return json.loads(event_data)
def include_job(job):
# Only for supporting pre decision-task PRs
# Special case things that unconditionally run on pushes,
# assuming a higher layer is filtering the required list of branches
if "GITHUB_PULL_REQUEST" not in os.environ:
return True
if (os.environ["GITHUB_PULL_REQUEST"] == "false" and
job == "run-all"):
return True
jobs_str = run([os.path.join(root, "wpt"),
"test-jobs"], return_stdout=True)
print(jobs_str)
return job in set(jobs_str.splitlines())
def main():
args = get_parser().parse_args()
if "TASK_EVENT" in os.environ:
event = json.loads(os.environ["TASK_EVENT"])
else:
event = fetch_event_data()
if event:
set_variables(event)
if args.setup_repository:
setup_repository(args)
# Hack for backwards compatibility
if args.script in ["run-all", "lint", "update_built", "tools_unittest",
"wpt_integration", "resources_unittest",
"wptrunner_infrastructure", "stability", "affected_tests"]:
job = args.script
if not include_job(job):
return
args.script = args.script_args[0]
args.script_args = args.script_args[1:]
# Run the job
setup_environment(args)
os.chdir(root)
cmd = [args.script] + args.script_args
print(" ".join(cmd))
sys.exit(subprocess.call(cmd))
if __name__ == "__main__":
main() # type: ignore
|