summaryrefslogtreecommitdiffstats
path: root/third_party/python
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-06-12 05:43:14 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-06-12 05:43:14 +0000
commit8dd16259287f58f9273002717ec4d27e97127719 (patch)
tree3863e62a53829a84037444beab3abd4ed9dfc7d0 /third_party/python
parentReleasing progress-linux version 126.0.1-1~progress7.99u1. (diff)
downloadfirefox-8dd16259287f58f9273002717ec4d27e97127719.tar.xz
firefox-8dd16259287f58f9273002717ec4d27e97127719.zip
Merging upstream version 127.0.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'third_party/python')
-rw-r--r--third_party/python/glean_parser/glean_parser-14.0.1.dist-info/AUTHORS.md (renamed from third_party/python/glean_parser/glean_parser-13.0.1.dist-info/AUTHORS.md)0
-rw-r--r--third_party/python/glean_parser/glean_parser-14.0.1.dist-info/LICENSE (renamed from third_party/python/glean_parser/glean_parser-13.0.1.dist-info/LICENSE)0
-rw-r--r--third_party/python/glean_parser/glean_parser-14.0.1.dist-info/METADATA (renamed from third_party/python/glean_parser/glean_parser-13.0.1.dist-info/METADATA)15
-rw-r--r--third_party/python/glean_parser/glean_parser-14.0.1.dist-info/RECORD (renamed from third_party/python/glean_parser/glean_parser-13.0.1.dist-info/RECORD)36
-rw-r--r--third_party/python/glean_parser/glean_parser-14.0.1.dist-info/WHEEL (renamed from third_party/python/glean_parser/glean_parser-13.0.1.dist-info/WHEEL)0
-rw-r--r--third_party/python/glean_parser/glean_parser-14.0.1.dist-info/entry_points.txt (renamed from third_party/python/glean_parser/glean_parser-13.0.1.dist-info/entry_points.txt)0
-rw-r--r--third_party/python/glean_parser/glean_parser-14.0.1.dist-info/top_level.txt (renamed from third_party/python/glean_parser/glean_parser-13.0.1.dist-info/top_level.txt)0
-rw-r--r--third_party/python/glean_parser/glean_parser/javascript_server.py16
-rw-r--r--third_party/python/glean_parser/glean_parser/kotlin.py25
-rw-r--r--third_party/python/glean_parser/glean_parser/parser.py24
-rw-r--r--third_party/python/glean_parser/glean_parser/pings.py6
-rw-r--r--third_party/python/glean_parser/glean_parser/schemas/pings.2-0-0.schema.yaml22
-rw-r--r--third_party/python/glean_parser/glean_parser/swift.py23
-rw-r--r--third_party/python/glean_parser/glean_parser/templates/javascript_server.jinja254
-rw-r--r--third_party/python/glean_parser/glean_parser/templates/kotlin.jinja266
-rw-r--r--third_party/python/glean_parser/glean_parser/templates/rust.jinja22
-rw-r--r--third_party/python/glean_parser/glean_parser/templates/swift.jinja249
-rw-r--r--third_party/python/glean_parser/glean_parser/util.py2
-rw-r--r--third_party/python/poetry.lock15
-rw-r--r--third_party/python/requirements.in4
-rw-r--r--third_party/python/requirements.txt13
-rw-r--r--third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/METADATA28
-rw-r--r--third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/RECORD80
-rw-r--r--third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/LICENSE (renamed from third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/LICENSE)0
-rw-r--r--third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/METADATA123
-rw-r--r--third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/RECORD79
-rw-r--r--third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/WHEEL (renamed from third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/WHEEL)2
-rw-r--r--third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/entry_points.txt (renamed from third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/entry_points.txt)0
-rw-r--r--third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/top_level.txt (renamed from third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/top_level.txt)0
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/__init__.py2
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/actions/add_new_jobs.py2
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/actions/cancel.py4
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/actions/cancel_all.py4
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/actions/rebuild_cached_tasks.py2
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/actions/registry.py34
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/actions/retrigger.py26
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/actions/util.py15
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/config.py20
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/create.py2
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/decision.py23
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/docker.py48
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/files_changed.py91
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/generator.py8
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/loader/default.py4
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/main.py83
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/morph.py1
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/optimize/base.py12
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/optimize/strategies.py16
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/parameters.py5
-rwxr-xr-xthird_party/python/taskcluster_taskgraph/taskgraph/run-task/run-task14
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/target_tasks.py8
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/transforms/__init__.py3
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/transforms/base.py2
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/transforms/code_review.py10
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/transforms/docker_image.py16
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/transforms/fetch.py65
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/transforms/from_deps.py21
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/transforms/run/__init__.py (renamed from third_party/python/taskcluster_taskgraph/taskgraph/transforms/job/__init__.py)228
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/transforms/run/common.py (renamed from third_party/python/taskcluster_taskgraph/taskgraph/transforms/job/common.py)40
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/transforms/run/index_search.py (renamed from third_party/python/taskcluster_taskgraph/taskgraph/transforms/job/index_search.py)8
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/transforms/run/run_task.py (renamed from third_party/python/taskcluster_taskgraph/taskgraph/transforms/job/run_task.py)36
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/transforms/run/toolchain.py (renamed from third_party/python/taskcluster_taskgraph/taskgraph/transforms/job/toolchain.py)38
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/transforms/task.py61
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/transforms/task_context.py10
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/util/archive.py52
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/util/cached_tasks.py45
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/util/decision.py79
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/util/docker.py21
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/util/hash.py11
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/util/keyed_by.py16
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/util/memoize.py35
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/util/parameterization.py21
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/util/schema.py12
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/util/set_name.py34
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/util/shell.py2
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/util/taskcluster.py71
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/util/time.py4
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/util/treeherder.py15
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/util/vcs.py36
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/util/verify.py27
-rw-r--r--third_party/python/taskcluster_taskgraph/taskgraph/util/yaml.py5
81 files changed, 1223 insertions, 909 deletions
diff --git a/third_party/python/glean_parser/glean_parser-13.0.1.dist-info/AUTHORS.md b/third_party/python/glean_parser/glean_parser-14.0.1.dist-info/AUTHORS.md
index 525116ee7e..525116ee7e 100644
--- a/third_party/python/glean_parser/glean_parser-13.0.1.dist-info/AUTHORS.md
+++ b/third_party/python/glean_parser/glean_parser-14.0.1.dist-info/AUTHORS.md
diff --git a/third_party/python/glean_parser/glean_parser-13.0.1.dist-info/LICENSE b/third_party/python/glean_parser/glean_parser-14.0.1.dist-info/LICENSE
index a612ad9813..a612ad9813 100644
--- a/third_party/python/glean_parser/glean_parser-13.0.1.dist-info/LICENSE
+++ b/third_party/python/glean_parser/glean_parser-14.0.1.dist-info/LICENSE
diff --git a/third_party/python/glean_parser/glean_parser-13.0.1.dist-info/METADATA b/third_party/python/glean_parser/glean_parser-14.0.1.dist-info/METADATA
index 0bab2150ba..65030fd86b 100644
--- a/third_party/python/glean_parser/glean_parser-13.0.1.dist-info/METADATA
+++ b/third_party/python/glean_parser/glean_parser-14.0.1.dist-info/METADATA
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
-Name: glean-parser
-Version: 13.0.1
+Name: glean_parser
+Version: 14.0.1
Summary: Parser tools for Mozilla's Glean telemetry
Home-page: https://github.com/mozilla/glean_parser
Author: The Glean Team
@@ -77,7 +77,16 @@ $ glean_parser check < ping.json
# Changelog
-## Unreleased
+## 14.0.1
+
+- BUGFIX: Fix missing `ping_arg` in util.py ([#687](https://github.com/mozilla/glean_parser/pull/687))
+
+## 14.0.0
+
+- BREAKING CHANGE: Expose the optional `enabled` property on pings, defaulting to `enabled: true` ([#681](https://github.com/mozilla/glean_parser/pull/681))
+- BREAKING CHANGE: Support metadata field `ping_schedule` for pings ([bug 1804711](https://bugzilla.mozilla.org/show_bug.cgi?id=1804711))
+- Add support for event metric type in server JavaScript outputter ([DENG-2407](https://mozilla-hub.atlassian.net/browse/DENG-2407))
+- Add Swift and Kotlin codegen support for the object metric type object ([#685](https://github.com/mozilla/glean_parser/pull/685))
## 13.0.1
diff --git a/third_party/python/glean_parser/glean_parser-13.0.1.dist-info/RECORD b/third_party/python/glean_parser/glean_parser-14.0.1.dist-info/RECORD
index 8ebf523fd7..700ca80797 100644
--- a/third_party/python/glean_parser/glean_parser-13.0.1.dist-info/RECORD
+++ b/third_party/python/glean_parser/glean_parser-14.0.1.dist-info/RECORD
@@ -4,45 +4,45 @@ glean_parser/coverage.py,sha256=2IwC4XMDtDamMkBFoYilmqJzW4gyypq65YVCur8SNas,4405
glean_parser/data_review.py,sha256=BweeeTkNNS6HrIDkztawhbDByrk_-Avxpg7YeST3VAs,2152
glean_parser/go_server.py,sha256=s6lxK9IAFY55pNl3Rv4MHlV-nQwSoyhO9ppTQE9VCik,5346
glean_parser/javascript.py,sha256=w4ZhNBHBKWYk0h3t7G0Ud2tR__hRqzn9dlEXNKLdQrA,11230
-glean_parser/javascript_server.py,sha256=x75JfOaveEkPQe3ozYXdtDb1Zks-PxzncDOizsJbYos,7972
-glean_parser/kotlin.py,sha256=5z8_74xlqvHDsedwZhGf1_qb7swPEgIZumkJIuj3ef8,12598
+glean_parser/javascript_server.py,sha256=PZSTl63TR3cY8Y99jXMOLu-8rzgQarymzjnHJm9aYK0,8389
+glean_parser/kotlin.py,sha256=5nXnen4s2YOj503Z77HVTUgDHWdulB8BMl8vOie38o4,13365
glean_parser/lint.py,sha256=STqdgyOhR4Q3fHivSizgn9bOOyqrNHhzjaqyJxz6qzI,19948
glean_parser/markdown.py,sha256=GkCr1CrV6mnRQseT6FO1-JJ7Eup8X3lxUfRMBTxXpe4,9066
glean_parser/metrics.py,sha256=YAO8wPuRHTLkdT9M4zh9ZwoFI1_VS8O9oQqwZNYyDp0,14612
-glean_parser/parser.py,sha256=cUOnvSXKfEBg8YTpRcWiPcMwpFpK1TTqsVO_zjUtpR4,15309
-glean_parser/pings.py,sha256=AQ-fBmIx2GKQv6J2NyTFfHHZzSnApZZoC770LlstkoI,3180
+glean_parser/parser.py,sha256=3-uF-Hi5LlvdFc1NxZOKX0EoEyekZGnZV094eTIJut0,16361
+glean_parser/pings.py,sha256=-CIiMBVOTFULmNybV8YTFI7vmfOYOGQ5TD9hEfYPUII,3435
glean_parser/python_server.py,sha256=ERpYcbSwF19xKFagxX0mZAvlR1y6D7Ah5DSvW8LipCY,4791
glean_parser/ruby_server.py,sha256=e5lkfcLQAUMUBQDCjqNU82LkdUzT5x-G6HOnsUInbsU,5190
glean_parser/rust.py,sha256=UEHeIZlToxCBelfec5sl_l_uLZfk8f_OUXqa_ZoEvnk,7330
-glean_parser/swift.py,sha256=T1BSGahd9wUd6VDeNC89SdN6M34jKXDlydMpSI0QLOs,8379
+glean_parser/swift.py,sha256=paUzF6tItdktFwIQYCKsYpqXfn8zxR2coU_jMYrmwlc,8957
glean_parser/tags.py,sha256=bemKYvcbMO4JrghiNSe-A4BNNDtx_FlUPkgrPPJy84Y,1391
glean_parser/translate.py,sha256=luKQoraARZ2tjenHs0SVtCxflnYaMkzPYFfKEdKdSqQ,8403
glean_parser/translation_options.py,sha256=Lxzr6G7MP0tC_ZYlZXftS4j0SLiqO-5mGVTEc7ggXis,2037
-glean_parser/util.py,sha256=v81watw5nSPGRlFNNpTb7iUv9NZObiFIbyyg2oZ6EnY,16149
+glean_parser/util.py,sha256=wftmoWBUQM_o7pUwdhBp3HuDCVHIBw1PXtrfxwPLD0Q,16187
glean_parser/validate_ping.py,sha256=0TNvILH6dtzJDys3W8Kqorw6kk03me73OCUDtpoHcXU,2118
glean_parser/schemas/metrics.1-0-0.schema.yaml,sha256=cND3cvi6iBfPUVmtfIBQfGJV9AALpbvN7nu8E33_J-o,19566
glean_parser/schemas/metrics.2-0-0.schema.yaml,sha256=wx1q0L4C0-Vcwk1SPU6t8OfjDEQvgrwwEG6xfSHO1MI,26365
glean_parser/schemas/pings.1-0-0.schema.yaml,sha256=hwCnsKpEysmrmVp-QHGBArEkVY3vaU1rVsxlTwhAzws,4315
-glean_parser/schemas/pings.2-0-0.schema.yaml,sha256=vDyvFT8KwAwaqyWHG4y6pFNrsc3NO7OyDDagA2eTeqM,5415
+glean_parser/schemas/pings.2-0-0.schema.yaml,sha256=f8PClAlMoLTmX6ANq8Ai0CpiE74i3LOgU5SoTJpoh0M,6149
glean_parser/schemas/tags.1-0-0.schema.yaml,sha256=OGXIJlvvVW1vaqB_NVZnwKeZ-sLlfH57vjBSHbj6DNI,1231
glean_parser/templates/data_review.jinja2,sha256=jeYU29T1zLSyu9fKBBFu5BFPfIw8_hmOUXw8RXhRXK8,3287
glean_parser/templates/go_server.jinja2,sha256=Jy1e0uQqr_WZNoj-AWnygRmygX2jyj_GQMMV8mSah2k,6825
glean_parser/templates/javascript.buildinfo.jinja2,sha256=4mXiZCQIk9if4lxlA05kpSIL4a95IdwGwqle2OqqNAs,474
glean_parser/templates/javascript.jinja2,sha256=cT_bG-jC6m4afECXmcsqHwiiHjRuVtJnfv90OD2Mwxw,2669
-glean_parser/templates/javascript_server.jinja2,sha256=H991yQOKJMwSgM0bLEA-Q5Z15LWsfEPh6bTYz_owSCU,9423
+glean_parser/templates/javascript_server.jinja2,sha256=k-XI3QIhHQ1vbIPqSMTmCu93b1oZhm7KLmx9LfO3IJ0,9472
glean_parser/templates/kotlin.buildinfo.jinja2,sha256=X0lk2SNu5OIIj2i6mUyF9CWFQIonLgfqkgT5fA-5G6c,920
glean_parser/templates/kotlin.geckoview.jinja2,sha256=MJOgtoDXmBjE9pwk-G6T89y36RZuMbDWM_-DBN_gFJo,5099
-glean_parser/templates/kotlin.jinja2,sha256=3DqUMXJRkmTvSp_5IRyvGmw5iXYWdox7coMFe3YDxcc,5247
+glean_parser/templates/kotlin.jinja2,sha256=npMgDdWD9OItOZQ-dyLQZn_IKgnzee2EdJynhUa1ig8,7690
glean_parser/templates/markdown.jinja2,sha256=vAHHGGm28HRDPd3zO_wQMAUZIuxE9uQ7hl3NpXxcKV4,3425
glean_parser/templates/python_server.jinja2,sha256=gu2C1rkn760IqBCG2SWaK7o32T1ify94wDEsudLPUg8,7260
glean_parser/templates/qmldir.jinja2,sha256=m6IGsp-tgTiOfQ7VN8XW6GqX0gJqJkt3B6Pkaul6FVo,156
glean_parser/templates/ruby_server.jinja2,sha256=vm4BEenOqzomQNTLFfMOzlWHARnsWUjTBbnR-v2cadI,6247
-glean_parser/templates/rust.jinja2,sha256=wlV0OZvV3Mk2ulrqFkN1vGjdsahsupEy2TQvWxQKzww,5439
-glean_parser/templates/swift.jinja2,sha256=xkvVsTpfK0QK3tI32wGqzxm2hqFNaBQ6Y71rKIsCmAI,4944
-glean_parser-13.0.1.dist-info/AUTHORS.md,sha256=yxgj8MioO4wUnrh0gmfb8l3DJJrf-l4HmmEDbQsbbNI,455
-glean_parser-13.0.1.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725
-glean_parser-13.0.1.dist-info/METADATA,sha256=UYz6ZRXyv3ODi3yl2vRQHZVdm0XGerFp8pIOGWGwOKw,31604
-glean_parser-13.0.1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
-glean_parser-13.0.1.dist-info/entry_points.txt,sha256=mf9d3sv8BwSjjR58x9KDnpVkONCnv3fPQC2NjJl15Xg,68
-glean_parser-13.0.1.dist-info/top_level.txt,sha256=q7T3duD-9tYZFyDry6Wv2LcdMsK2jGnzdDFhxWcT2Z8,13
-glean_parser-13.0.1.dist-info/RECORD,,
+glean_parser/templates/rust.jinja2,sha256=Ir_JqWRIUs1KLoYNDolgTRjWfWdzzBfouCP-YeTJa-c,5495
+glean_parser/templates/swift.jinja2,sha256=4f993l_zZk_Tz1efiz3nbvDK1H3Uq3dWQ2T6glT9XQ4,6695
+glean_parser-14.0.1.dist-info/AUTHORS.md,sha256=yxgj8MioO4wUnrh0gmfb8l3DJJrf-l4HmmEDbQsbbNI,455
+glean_parser-14.0.1.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725
+glean_parser-14.0.1.dist-info/METADATA,sha256=Ghvw-Y7woQUJ38P8TYT5TFt8sL61GJoZPBajaB0WLeQ,32276
+glean_parser-14.0.1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
+glean_parser-14.0.1.dist-info/entry_points.txt,sha256=mf9d3sv8BwSjjR58x9KDnpVkONCnv3fPQC2NjJl15Xg,68
+glean_parser-14.0.1.dist-info/top_level.txt,sha256=q7T3duD-9tYZFyDry6Wv2LcdMsK2jGnzdDFhxWcT2Z8,13
+glean_parser-14.0.1.dist-info/RECORD,,
diff --git a/third_party/python/glean_parser/glean_parser-13.0.1.dist-info/WHEEL b/third_party/python/glean_parser/glean_parser-14.0.1.dist-info/WHEEL
index bab98d6758..bab98d6758 100644
--- a/third_party/python/glean_parser/glean_parser-13.0.1.dist-info/WHEEL
+++ b/third_party/python/glean_parser/glean_parser-14.0.1.dist-info/WHEEL
diff --git a/third_party/python/glean_parser/glean_parser-13.0.1.dist-info/entry_points.txt b/third_party/python/glean_parser/glean_parser-14.0.1.dist-info/entry_points.txt
index 08fde9d655..08fde9d655 100644
--- a/third_party/python/glean_parser/glean_parser-13.0.1.dist-info/entry_points.txt
+++ b/third_party/python/glean_parser/glean_parser-14.0.1.dist-info/entry_points.txt
diff --git a/third_party/python/glean_parser/glean_parser-13.0.1.dist-info/top_level.txt b/third_party/python/glean_parser/glean_parser-14.0.1.dist-info/top_level.txt
index a7f3a37918..a7f3a37918 100644
--- a/third_party/python/glean_parser/glean_parser-13.0.1.dist-info/top_level.txt
+++ b/third_party/python/glean_parser/glean_parser-14.0.1.dist-info/top_level.txt
diff --git a/third_party/python/glean_parser/glean_parser/javascript_server.py b/third_party/python/glean_parser/glean_parser/javascript_server.py
index f5099d2660..060575f38c 100644
--- a/third_party/python/glean_parser/glean_parser/javascript_server.py
+++ b/third_party/python/glean_parser/glean_parser/javascript_server.py
@@ -42,9 +42,12 @@ from . import util
SUPPORTED_METRIC_TYPES = ["string", "event"]
-def event_class_name(ping_name: str, event_metric_exists: bool) -> str:
+def event_class_name(
+ ping_name: str, metrics_by_type: Dict[str, List[metrics.Metric]]
+) -> str:
# For compatibility with FxA codebase we don't want to add "Logger" suffix
# when custom pings without event metrics are used.
+ event_metric_exists = "event" in metrics_by_type
suffix = "Logger" if event_metric_exists else ""
return util.Camelize(ping_name) + "ServerEvent" + suffix
@@ -61,10 +64,13 @@ def generate_js_metric_type(metric: metrics.Metric) -> str:
return metric.type
-def generate_ping_factory_method(ping: str, event_metric_exists: bool) -> str:
+def generate_ping_factory_method(
+ ping: str, metrics_by_type: Dict[str, List[metrics.Metric]]
+) -> str:
# `ServerEventLogger` better describes role of the class that this factory
# method generates, but for compatibility with existing FxA codebase
# we use `Event` suffix if no event metrics are defined.
+ event_metric_exists = "event" in metrics_by_type
suffix = "ServerEventLogger" if event_metric_exists else "Event"
return f"create{util.Camelize(ping)}{suffix}"
@@ -136,6 +142,12 @@ def output(
metrics_list = metrics_by_type.setdefault(metric.type, [])
metrics_list.append(metric)
+ # Order pings_to_metrics for backwards compatibility with the existing FxA codebase.
+ # Put pings without `event` type metrics first.
+ ping_to_metrics = dict(
+ sorted(ping_to_metrics.items(), key=lambda item: "event" in item[1])
+ )
+
PING_METRIC_ERROR_MSG = (
" Server-side environment is simplified and this"
+ " parser doesn't generate individual metric files. Make sure to pass all"
diff --git a/third_party/python/glean_parser/glean_parser/kotlin.py b/third_party/python/glean_parser/glean_parser/kotlin.py
index 82cc63d237..6d9ea8dcf8 100644
--- a/third_party/python/glean_parser/glean_parser/kotlin.py
+++ b/third_party/python/glean_parser/glean_parser/kotlin.py
@@ -107,6 +107,11 @@ def type_name(obj: Union[metrics.Metric, pings.Ping]) -> str:
return "{}<{}>".format(class_name(obj.type), generic)
+ generate_structure = getattr(obj, "_generate_structure", [])
+ if len(generate_structure):
+ generic = util.Camelize(obj.name) + "Object"
+ return "{}<{}>".format(class_name(obj.type), generic)
+
return class_name(obj.type)
@@ -125,6 +130,21 @@ def extra_type_name(typ: str) -> str:
return "UNSUPPORTED"
+def structure_type_name(typ: str) -> str:
+ """
+ Returns the corresponding Kotlin type for structure items.
+ """
+
+ if typ == "boolean":
+ return "Boolean"
+ elif typ == "string":
+ return "String"
+ elif typ == "number":
+ return "Int"
+ else:
+ return "UNSUPPORTED"
+
+
def class_name(obj_type: str) -> str:
"""
Returns the Kotlin class name for a given metric or ping type.
@@ -320,6 +340,7 @@ def output_kotlin(
("type_name", type_name),
("extra_type_name", extra_type_name),
("class_name", class_name),
+ ("structure_type_name", structure_type_name),
),
)
@@ -333,6 +354,9 @@ def output_kotlin(
has_labeled_metrics = any(
getattr(metric, "labeled", False) for metric in category_val.values()
)
+ has_object_metrics = any(
+ isinstance(metric, metrics.Object) for metric in category_val.values()
+ )
with filepath.open("w", encoding="utf-8") as fd:
fd.write(
@@ -346,6 +370,7 @@ def output_kotlin(
ping_args=util.ping_args,
namespace=namespace,
has_labeled_metrics=has_labeled_metrics,
+ has_object_metrics=has_object_metrics,
glean_namespace=glean_namespace,
)
)
diff --git a/third_party/python/glean_parser/glean_parser/parser.py b/third_party/python/glean_parser/glean_parser/parser.py
index 5ca584ac1e..158676be73 100644
--- a/third_party/python/glean_parser/glean_parser/parser.py
+++ b/third_party/python/glean_parser/glean_parser/parser.py
@@ -11,7 +11,7 @@ Code for parsing metrics.yaml files.
import functools
from pathlib import Path
import textwrap
-from typing import Any, Dict, Generator, Iterable, Optional, Tuple, Union
+from typing import Any, cast, Dict, Generator, Iterable, Optional, Set, Tuple, Union
import jsonschema # type: ignore
from jsonschema.exceptions import ValidationError # type: ignore
@@ -267,6 +267,7 @@ def _instantiate_pings(
"""
global_no_lint = content.get("no_lint", [])
assert isinstance(global_no_lint, list)
+ ping_schedule_reverse_map: Dict[str, Set[str]] = dict()
for ping_key, ping_val in sorted(content.items()):
if ping_key.startswith("$"):
@@ -284,6 +285,22 @@ def _instantiate_pings(
if not isinstance(ping_val, dict):
raise TypeError(f"Invalid content for ping {ping_key}")
ping_val["name"] = ping_key
+
+ if "metadata" in ping_val and "ping_schedule" in ping_val["metadata"]:
+ if ping_key in ping_val["metadata"]["ping_schedule"]:
+ yield util.format_error(
+ filepath,
+ f"For ping '{ping_key}'",
+ "ping_schedule contains its own ping name",
+ )
+ continue
+ for ping_schedule in ping_val["metadata"]["ping_schedule"]:
+ if ping_schedule not in ping_schedule_reverse_map:
+ ping_schedule_reverse_map[ping_schedule] = set()
+ ping_schedule_reverse_map[ping_schedule].add(ping_key)
+
+ del ping_val["metadata"]["ping_schedule"]
+
try:
ping_obj = Ping(
defined_in=getattr(ping_val, "defined_in", None),
@@ -313,6 +330,11 @@ def _instantiate_pings(
all_objects.setdefault("pings", {})[ping_key] = ping_obj
sources[ping_key] = filepath
+ for scheduler, scheduled in ping_schedule_reverse_map.items():
+ if isinstance(all_objects["pings"][scheduler], Ping):
+ scheduler_obj: Ping = cast(Ping, all_objects["pings"][scheduler])
+ scheduler_obj.schedules_pings = sorted(list(scheduled))
+
def _instantiate_tags(
all_objects: ObjectTree,
diff --git a/third_party/python/glean_parser/glean_parser/pings.py b/third_party/python/glean_parser/glean_parser/pings.py
index b4145ea68d..b3f2476c9a 100644
--- a/third_party/python/glean_parser/glean_parser/pings.py
+++ b/third_party/python/glean_parser/glean_parser/pings.py
@@ -31,6 +31,7 @@ class Ping:
reasons: Optional[Dict[str, str]] = None,
defined_in: Optional[Dict] = None,
no_lint: Optional[List[str]] = None,
+ enabled: Optional[bool] = None,
_validated: bool = False,
):
# Avoid cyclical import
@@ -46,6 +47,10 @@ class Ping:
self.metadata = metadata
self.precise_timestamps = self.metadata.get("precise_timestamps", True)
self.include_info_sections = self.metadata.get("include_info_sections", True)
+ if enabled is None:
+ enabled = True
+ self.enabled = enabled
+ self.schedules_pings: List[str] = []
if data_reviews is None:
data_reviews = []
self.data_reviews = data_reviews
@@ -94,6 +99,7 @@ class Ping:
modified_dict = util.remove_output_params(
modified_dict, "include_info_sections"
)
+ modified_dict = util.remove_output_params(modified_dict, "schedules_pings")
return modified_dict
def identifier(self) -> str:
diff --git a/third_party/python/glean_parser/glean_parser/schemas/pings.2-0-0.schema.yaml b/third_party/python/glean_parser/glean_parser/schemas/pings.2-0-0.schema.yaml
index 6679a8066b..345812c805 100644
--- a/third_party/python/glean_parser/glean_parser/schemas/pings.2-0-0.schema.yaml
+++ b/third_party/python/glean_parser/glean_parser/schemas/pings.2-0-0.schema.yaml
@@ -96,6 +96,16 @@ additionalProperties:
Interaction with `include_client_id`: `include_client_id` only takes
effect when `metadata.include_info_sections` is `true`.
type: boolean
+ ping_schedule:
+ title: Ping Schedule
+ description: |
+ An optional array of ping names. When one of the listed pings is
+ sent, then this ping will also be sent. A ping cannot list its own
+ name in `ping_schedule`.
+ type: array
+ items:
+ type: string
+ maxLength: 30
default: {}
@@ -175,6 +185,18 @@ additionalProperties:
additionalProperties:
type: string
+ enabled:
+ title: Whether or not this ping is enabled
+ description: |
+ **Optional.**
+
+ When `true`, the ping will be sent as usual.
+ When `false`, the ping will not be sent, but the data will continue to
+ be collected but will not be cleared when the ping is submitted.
+
+ Defaults to `true` if omitted.
+ type: boolean
+
no_lint:
title: Lint checks to skip
description: |
diff --git a/third_party/python/glean_parser/glean_parser/swift.py b/third_party/python/glean_parser/glean_parser/swift.py
index c745c4d9ac..b121933b0f 100644
--- a/third_party/python/glean_parser/glean_parser/swift.py
+++ b/third_party/python/glean_parser/glean_parser/swift.py
@@ -106,12 +106,17 @@ def type_name(obj: Union[metrics.Metric, pings.Ping]) -> str:
return "{}<{}>".format(class_name(obj.type), generic)
+ generate_structure = getattr(obj, "_generate_structure", [])
+ if len(generate_structure):
+ generic = util.Camelize(obj.name) + "Object"
+ return "{}<{}>".format(class_name(obj.type), generic)
+
return class_name(obj.type)
def extra_type_name(typ: str) -> str:
"""
- Returns the corresponding Kotlin type for event's extra key types.
+ Returns the corresponding Swift type for event's extra key types.
"""
if typ == "boolean":
@@ -124,6 +129,21 @@ def extra_type_name(typ: str) -> str:
return "UNSUPPORTED"
+def structure_type_name(typ: str) -> str:
+ """
+ Returns the corresponding Swift type for structure items.
+ """
+
+ if typ == "boolean":
+ return "Bool"
+ elif typ == "string":
+ return "String"
+ elif typ == "number":
+ return "Int64"
+ else:
+ return "UNSUPPORTED"
+
+
def class_name(obj_type: str) -> str:
"""
Returns the Swift class name for a given metric or ping type.
@@ -215,6 +235,7 @@ def output_swift(
("class_name", class_name),
("variable_name", variable_name),
("extra_type_name", extra_type_name),
+ ("structure_type_name", structure_type_name),
),
)
diff --git a/third_party/python/glean_parser/glean_parser/templates/javascript_server.jinja2 b/third_party/python/glean_parser/glean_parser/templates/javascript_server.jinja2
index 0a89f081f6..9df299fd2b 100644
--- a/third_party/python/glean_parser/glean_parser/templates/javascript_server.jinja2
+++ b/third_party/python/glean_parser/glean_parser/templates/javascript_server.jinja2
@@ -21,7 +21,7 @@ type LoggerOptions = { app: string; fmt?: 'heka' };
type Event = {
category: string;
name: string;
- extra: Record<string, any>;
+ extra?: Record<string, any>;
timestamp?: number;
};
{% endif %}
@@ -30,14 +30,14 @@ type Event = {
let _logger{% if lang == "typescript" %}: Logger{% endif %};
{% for ping, metrics_by_type in pings.items() %}
-class {{ ping|event_class_name(event_metric_exists) }} {
+class {{ ping|event_class_name(metrics_by_type) }} {
{% if lang == "typescript" %}
_applicationId: string;
_appDisplayVersion: string;
_channel: string;
{% endif %}
/**
- * Create {{ ping|event_class_name(event_metric_exists) }} instance.
+ * Create {{ ping|event_class_name(metrics_by_type) }} instance.
*
* @param {string} applicationId - The application ID.
* @param {string} appDisplayVersion - The application display version.
@@ -72,7 +72,7 @@ class {{ ping|event_class_name(event_metric_exists) }} {
{% endif %}
}
}
- {% if event_metric_exists %}
+ {% if 'event' in metrics_by_type %}
#record({
{% else %}
/**
@@ -99,28 +99,28 @@ class {{ ping|event_class_name(event_metric_exists) }} {
{% endfor %}
{% endif %}
{% endfor %}
- {% if event_metric_exists %}
+ {% if 'event' in metrics_by_type %}
event,
{% endif %}
{% if lang == "typescript" %}
}: {
- user_agent: string,
- ip_address: string,
+ user_agent: string;
+ ip_address: string;
{% for metric_type, metrics in metrics_by_type.items() %}
{% if metric_type != 'event' %}
{% for metric in metrics %}
- {{ metric|metric_argument_name }}: {{ metric|js_metric_type }},
+ {{ metric|metric_argument_name }}: {{ metric|js_metric_type }};
{% endfor %}
{% endif %}
{% endfor %}
- {% if event_metric_exists %}
- event: Event
+ {% if 'event' in metrics_by_type %}
+ event: Event;
{% endif %}
{% endif %}
}) {
const now = new Date();
const timestamp = now.toISOString();
- {% if event_metric_exists %}
+ {% if 'event' in metrics_by_type %}
event.timestamp = now.getTime();
{% endif %}
const eventPayload = {
@@ -135,7 +135,7 @@ class {{ ping|event_class_name(event_metric_exists) }} {
{% endif %}
{% endfor %}
},
- {% if event_metric_exists %}
+ {% if 'event' in metrics_by_type %}
events: [event],
{% endif %}
ping_info: {
@@ -171,7 +171,7 @@ class {{ ping|event_class_name(event_metric_exists) }} {
// this is similar to how FxA currently logs with mozlog: https://github.com/mozilla/fxa/blob/4c5c702a7fcbf6f8c6b1f175e9172cdd21471eac/packages/fxa-auth-server/lib/log.js#L289
_logger.info(GLEAN_EVENT_MOZLOG_TYPE, ping);
}
- {% if event_metric_exists %}
+ {% if 'event' in metrics_by_type %}
{% for event in metrics_by_type["event"] %}
/**
* Record and submit a {{ event.category }}_{{ event.name }} event:
@@ -209,27 +209,27 @@ class {{ ping|event_class_name(event_metric_exists) }} {
{% endfor %}
{% if lang == "typescript" %}
}: {
- user_agent: string,
- ip_address: string,
+ user_agent: string;
+ ip_address: string;
{% for metric_type, metrics in metrics_by_type.items() %}
{% if metric_type != 'event' %}
{% for metric in metrics %}
- {{ metric|metric_argument_name }}: {{ metric|js_metric_type }},
+ {{ metric|metric_argument_name }}: {{ metric|js_metric_type }};
{% endfor %}
{% endif %}
{% endfor %}
{% for extra, metadata in event.extra_keys.items() %}
- {{ extra }}: {{metadata.type}},
+ {{ extra }}: {{metadata.type}};
{% endfor %}
{% endif %}
}) {
- let event = {
- 'category': '{{ event.category }}',
- 'name': '{{ event.name }}',
+ const event = {
+ category: '{{ event.category }}',
+ name: '{{ event.name }}',
{% if event.extra_keys %}
- 'extra': {
+ extra: {
{% for extra, metadata in event.extra_keys.items() %}
- '{{ extra }}': {{ extra }},
+ {{ extra }}: {{ extra }},
{% endfor %}
},
{% endif %}
@@ -244,14 +244,14 @@ class {{ ping|event_class_name(event_metric_exists) }} {
{% endfor %}
{% endif %}
{% endfor %}
- event
+ event,
});
}
{% endfor %}
{% endif %}
}
{% endfor %}
-{% for ping in pings %}
+{% for ping, metrics_by_type in pings.items() %}
/**
* Factory function that creates an instance of Glean Server Event Logger to
@@ -262,11 +262,11 @@ class {{ ping|event_class_name(event_metric_exists) }} {
* @param {Object} logger_options - The logger options.
* @returns {EventsServerEventLogger} An instance of EventsServerEventLogger.
*/
-export const {{ ping|factory_method(event_metric_exists) }} = function ({
+export const {{ ping|factory_method(metrics_by_type) }} = function ({
applicationId,
appDisplayVersion,
channel,
- logger_options
+ logger_options,
{% if lang == "typescript" %}
}: {
applicationId: string;
@@ -275,7 +275,7 @@ export const {{ ping|factory_method(event_metric_exists) }} = function ({
logger_options: LoggerOptions;
{% endif %}
}) {
- return new {{ ping|event_class_name(event_metric_exists) }}(
+ return new {{ ping|event_class_name(metrics_by_type) }}(
applicationId,
appDisplayVersion,
channel,
diff --git a/third_party/python/glean_parser/glean_parser/templates/kotlin.jinja2 b/third_party/python/glean_parser/glean_parser/templates/kotlin.jinja2
index bd800af01d..71ba386a4c 100644
--- a/third_party/python/glean_parser/glean_parser/templates/kotlin.jinja2
+++ b/third_party/python/glean_parser/glean_parser/templates/kotlin.jinja2
@@ -66,6 +66,61 @@ data class {{ obj.name|Camelize }}{{ suffix }}(
}
{%- endmacro -%}
+{%- macro generate_structure(name, struct) %}
+{%- if struct.type == "array" -%}
+ @Serializable
+ data class {{ name }}(var items: MutableList<{{ name }}Item> = mutableListOf()) : ObjectSerialize {
+ fun add(elem: {{ name }}Item) = items.add(elem)
+
+ fun addAll(elements: Collection<{{ name }}Item>) = items.addAll(elements)
+
+ fun clear() = items.clear()
+
+ fun remove(element: {{ name }}Item) = items.remove(element)
+ fun removeAll(elements: Collection<{{ name }}Item>) = items.removeAll(elements)
+ fun removeAt(index: Int) = items.removeAt(index)
+
+ fun set(index: Int, element: {{ name }}Item) = items.set(index, element)
+
+ override fun intoSerializedObject(): String {
+ return Json.encodeToString(items)
+ }
+ }
+
+ {{ generate_structure(name ~ "Item", struct["items"]) }}
+
+{%- elif struct.type == "object" -%}
+ @Serializable
+ data class {{ name }}(
+ {% for itemname, val in struct.properties.items() %}
+ {% if val.type == "object" %}
+ var {{itemname|camelize}}: {{ name ~ "Item" ~ itemname|Camelize ~ "Object" }}? = null,
+ {% elif val.type == "array" %}
+ var {{itemname|camelize}}: {{ name ~ "Item" ~ itemname|Camelize }} = {{ name ~ "Item" ~ itemname|Camelize }}(),
+ {% else %}
+ var {{itemname|camelize}}: {{val.type|structure_type_name}}? = null,
+ {% endif %}
+ {% endfor %}
+ ): ObjectSerialize {
+ override fun intoSerializedObject(): String {
+ return Json.encodeToString(this)
+ }
+ }
+
+ {% for itemname, val in struct.properties.items() %}
+ {% if val.type == "array" %}
+ {% set nested_name = name ~ "Item" ~ itemname|Camelize %}
+ {{ generate_structure(nested_name, val) }}
+ {% elif val.type == "object" %}
+ {% set nested_name = name ~ "Item" ~ itemname|Camelize ~ "Object" %}
+ {{ generate_structure(nested_name, val) }}
+ {% endif %}
+ {% endfor %}
+
+{% endif %}
+
+{% endmacro %}
+
/* ktlint-disable no-blank-line-before-rbrace */
@file:Suppress("PackageNaming", "MaxLineLength")
package {{ namespace }}
@@ -76,8 +131,9 @@ import {{ glean_namespace }}.private.HistogramType // ktlint-disable import-orde
import {{ glean_namespace }}.private.Lifetime // ktlint-disable import-ordering no-unused-imports
import {{ glean_namespace }}.private.MemoryUnit // ktlint-disable import-ordering no-unused-imports
import {{ glean_namespace }}.private.NoExtras // ktlint-disable import-ordering no-unused-imports
-import {{ glean_namespace }}.private.ReasonCode // ktlint-disable import-ordering no-unused-imports
import {{ glean_namespace }}.private.NoReasonCodes // ktlint-disable import-ordering no-unused-imports
+import {{ glean_namespace }}.private.ObjectSerialize // ktlint-disable import-ordering no-unused-imports
+import {{ glean_namespace }}.private.ReasonCode // ktlint-disable import-ordering no-unused-imports
import {{ glean_namespace }}.private.TimeUnit // ktlint-disable import-ordering no-unused-imports
{% for obj_type in obj_types %}
import {{ glean_namespace }}.private.{{ obj_type }} // ktlint-disable import-ordering
@@ -85,6 +141,11 @@ import {{ glean_namespace }}.private.{{ obj_type }} // ktlint-disable import-ord
{% if has_labeled_metrics %}
import {{ glean_namespace }}.private.LabeledMetricType // ktlint-disable import-ordering
{% endif %}
+{% if has_object_metrics %}
+import kotlinx.serialization.Serializable
+import kotlinx.serialization.encodeToString
+import kotlinx.serialization.json.Json
+{% endif %}
internal object {{ category_name|Camelize }} {
{% for obj in objs.values() %}
@@ -97,6 +158,9 @@ internal object {{ category_name|Camelize }} {
{% endfor %}
{% endif %}
{% else %}
+ {% if obj|attr("_generate_structure") %}
+ {{ generate_structure(obj.name|Camelize ~ "Object", obj._generate_structure) }}
+ {%- endif %}
{% if obj|attr("_generate_enums") %}
{% for name, suffix in obj["_generate_enums"] %}
{% if obj|attr(name)|length %}
diff --git a/third_party/python/glean_parser/glean_parser/templates/rust.jinja2 b/third_party/python/glean_parser/glean_parser/templates/rust.jinja2
index 4c54dd2b2c..269a007ac5 100644
--- a/third_party/python/glean_parser/glean_parser/templates/rust.jinja2
+++ b/third_party/python/glean_parser/glean_parser/templates/rust.jinja2
@@ -87,7 +87,7 @@ impl ExtraKeys for {{ obj.name|Camelize }}{{ suffix }} {
/// {{ obj.description|wordwrap() | replace('\n', '\n/// ') }}
#[rustfmt::skip]
pub static {{ obj.name|snake_case }}: ::glean::private::__export::Lazy<::glean::private::PingType> =
- ::glean::private::__export::Lazy::new(|| ::glean::private::PingType::new("{{ obj.name }}", {{ obj.include_client_id|rust }}, {{ obj.send_if_empty|rust }}, {{ obj.precise_timestamps|rust }}, {{ obj.include_info_sections|rust }}, {{ obj.reason_codes|rust }}));
+ ::glean::private::__export::Lazy::new(|| ::glean::private::PingType::new("{{ obj.name }}", {{ obj.include_client_id|rust }}, {{ obj.send_if_empty|rust }}, {{ obj.precise_timestamps|rust }}, {{ obj.include_info_sections|rust }}, {{ obj.enabled|rust }}, {{ obj.schedules_pings|rust }}, {{ obj.reason_codes|rust }}));
{% endfor %}
{% else %}
pub mod {{ category.name|snake_case }} {
diff --git a/third_party/python/glean_parser/glean_parser/templates/swift.jinja2 b/third_party/python/glean_parser/glean_parser/templates/swift.jinja2
index 714bf20ec2..fe51a078bc 100644
--- a/third_party/python/glean_parser/glean_parser/templates/swift.jinja2
+++ b/third_party/python/glean_parser/glean_parser/templates/swift.jinja2
@@ -11,7 +11,7 @@ Jinja2 template is not. Please file bugs! #}
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-{% macro obj_declaration(obj, suffix='', access='') %}
+{%- macro obj_declaration(obj, suffix='', access='') %}
{{ access }}static let {{ obj.name|camelize|variable_name }}{{ suffix }} = {{ obj|type_name }}( // generated from {{ obj.identifier() }}
CommonMetricData(
{% for arg_name in common_metric_args if obj[arg_name] is defined %}
@@ -24,7 +24,7 @@ Jinja2 template is not. Please file bugs! #}
)
{% endmacro %}
-{% macro struct_decl(obj, name, suffix) %}
+{%- macro struct_decl(obj, name, suffix) %}
struct {{ obj.name|Camelize }}{{ suffix }}: EventExtras {
{% for item, typ in obj|attr(name) %}
var {{ item|camelize|variable_name }}: {{typ|extra_type_name}}?
@@ -44,6 +44,46 @@ struct {{ obj.name|Camelize }}{{ suffix }}: EventExtras {
}
{% endmacro %}
+{%- macro generate_structure(name, struct) %}
+{%- if struct.type == "array" -%}
+ typealias {{ name }} = [{{ name }}Item]
+
+ {{ generate_structure(name ~ "Item", struct["items"]) }}
+
+{%- elif struct.type == "object" -%}
+ struct {{ name }}: Codable, Equatable, ObjectSerialize {
+ {% for itemname, val in struct.properties.items() %}
+ {% if val.type == "object" %}
+ var {{itemname|camelize|variable_name}}: {{ name ~ "Item" ~ itemname|Camelize ~ "Object" }}?
+ {% elif val.type == "array" %}
+ var {{itemname|camelize|variable_name}}: {{ name ~ "Item" ~ itemname|Camelize }}
+ {% else %}
+ var {{itemname|camelize|variable_name}}: {{val.type|structure_type_name}}?
+ {% endif %}
+ {% endfor %}
+
+ func intoSerializedObject() -> String {
+ let jsonEncoder = JSONEncoder()
+ let jsonData = try! jsonEncoder.encode(self)
+ let json = String(data: jsonData, encoding: String.Encoding.utf8)!
+ return json
+ }
+ }
+
+ {% for itemname, val in struct.properties.items() %}
+ {% if val.type == "array" %}
+ {% set nested_name = name ~ "Item" ~ itemname|Camelize %}
+ {{ generate_structure(nested_name, val) }}
+ {% elif val.type == "object" %}
+ {% set nested_name = name ~ "Item" ~ itemname|Camelize ~ "Object" %}
+ {{ generate_structure(nested_name, val) }}
+ {% endif %}
+ {% endfor %}
+
+{%- endif -%}
+
+{% endmacro %}
+
{% if not allow_reserved %}
import {{ glean_namespace }}
@@ -97,6 +137,8 @@ extension {{ namespace }} {
sendIfEmpty: {{obj.send_if_empty|swift}},
preciseTimestamps: {{obj.precise_timestamps|swift}},
includeInfoSections: {{obj.include_info_sections|swift}},
+ enabled: {{obj.enabled|swift}},
+ schedulesPings: {{obj.schedules_pings|swift}},
reasonCodes: {{obj.reason_codes|swift}}
)
@@ -106,6 +148,9 @@ extension {{ namespace }} {
{% else %}
enum {{ category.name|Camelize }} {
{% for obj in category.objs.values() %}
+ {% if obj|attr("_generate_structure") %}
+ {{ generate_structure(obj.name|Camelize ~ "Object", obj._generate_structure) }}
+ {%- endif %}
{% if obj|attr("_generate_enums") %}
{% for name, suffix in obj["_generate_enums"] %}
{% if obj|attr(name)|length %}
diff --git a/third_party/python/glean_parser/glean_parser/util.py b/third_party/python/glean_parser/glean_parser/util.py
index f8bc7d4f53..a61e318dbe 100644
--- a/third_party/python/glean_parser/glean_parser/util.py
+++ b/third_party/python/glean_parser/glean_parser/util.py
@@ -531,6 +531,8 @@ ping_args = [
"send_if_empty",
"precise_timestamps",
"include_info_sections",
+ "enabled",
+ "schedules_pings",
"reason_codes",
]
diff --git a/third_party/python/poetry.lock b/third_party/python/poetry.lock
index 97513f8ba5..0547486b27 100644
--- a/third_party/python/poetry.lock
+++ b/third_party/python/poetry.lock
@@ -592,14 +592,14 @@ files = [
[[package]]
name = "glean-parser"
-version = "13.0.1"
+version = "14.0.1"
description = "Parser tools for Mozilla's Glean telemetry"
category = "main"
optional = false
python-versions = "*"
files = [
- {file = "glean_parser-13.0.1-py3-none-any.whl", hash = "sha256:8421c88f3673dd195d0cde635f4f09c9bfd0c9709ad3d28c8b201b3b7145e257"},
- {file = "glean_parser-13.0.1.tar.gz", hash = "sha256:feead4cbec6930ed38a48df5bae9eb4ee486bb4026ddf2f3206b85f80279d1e7"},
+ {file = "glean_parser-14.0.1-py3-none-any.whl", hash = "sha256:3275ca235885c99da659fa7d9bf929b8fb020df79d26fcbec317328c369cd039"},
+ {file = "glean_parser-14.0.1.tar.gz", hash = "sha256:3e9e5f99ad8592300e364b70d6247b21c445774a73a2ad274677fb58a0065809"},
]
[package.dependencies]
@@ -1161,7 +1161,6 @@ files = [
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
- {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"},
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
{file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
{file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
@@ -1376,14 +1375,14 @@ test = ["aiofiles", "coverage", "flake8", "httmock", "httptest", "hypothesis", "
[[package]]
name = "taskcluster-taskgraph"
-version = "6.3.0"
+version = "8.0.1"
description = "Build taskcluster taskgraphs"
category = "main"
optional = false
python-versions = "*"
files = [
- {file = "taskcluster-taskgraph-6.3.0.tar.gz", hash = "sha256:a32ac3aad6aa90c593268bee8864d9f773e86e1f53d0b513d128d59b52c1e20b"},
- {file = "taskcluster_taskgraph-6.3.0-py3-none-any.whl", hash = "sha256:43ce187215ab8658c06ad80f46c4606ce51b9986f4365d541416eecf9d6a2c28"},
+ {file = "taskcluster-taskgraph-8.0.1.tar.gz", hash = "sha256:21387537bbebab2a7b1890d03e20e49379bdda65efd45ca7fb8d01f5c29e1797"},
+ {file = "taskcluster_taskgraph-8.0.1-py3-none-any.whl", hash = "sha256:14500bc703f64eb002c0cd505caaf2d34ffc0ae66d109b108e738661da1ae09c"},
]
[package.dependencies]
@@ -1625,4 +1624,4 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=4.6)", "pytest-black (
[metadata]
lock-version = "2.0"
python-versions = "^3.8"
-content-hash = "cef77da3299e7849f5039e8d9017216048d4ca56af298209e5bf3db7f92c2d4c"
+content-hash = "8e72dc9ba9b4f08d27d90f99666459a814d1bb293c68de222614ea57db5b70ef"
diff --git a/third_party/python/requirements.in b/third_party/python/requirements.in
index 9915e91957..abcfc79239 100644
--- a/third_party/python/requirements.in
+++ b/third_party/python/requirements.in
@@ -22,7 +22,7 @@ fluent.migrate==0.13.0
fluent.syntax==0.19.0
# Pin `frozenlist` as it is required for `aiohttp`. Use minimum required version.
frozenlist==1.1.1
-glean_parser==13.0.1
+glean_parser==14.0.1
importlib-metadata==6.0.0
# required for compatibility with Flask >= 2 in tools/tryselect/selectors/chooser
jinja2==3.1.2
@@ -53,7 +53,7 @@ setuptools==68.0.0
six==1.16.0
slugid==2.0.0
taskcluster==44.2.2
-taskcluster-taskgraph==6.3.0
+taskcluster-taskgraph==8.0.1
taskcluster-urls==13.0.1
toml==0.10.2
tomlkit==0.12.3
diff --git a/third_party/python/requirements.txt b/third_party/python/requirements.txt
index eedc022c50..3b8c897e8a 100644
--- a/third_party/python/requirements.txt
+++ b/third_party/python/requirements.txt
@@ -275,9 +275,9 @@ frozenlist==1.1.1 ; python_version >= "3.8" and python_version < "4.0" \
giturlparse==0.10.0 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:04ba1a3a099c3093fa8d24a422913c6a9b2c2cd22bcffc939cf72e3e98f672d7 \
--hash=sha256:2595ab291d30717cda8474b874c9fd509f1b9802ad7f6968c36a45e4b13eb337
-glean-parser==13.0.1 ; python_version >= "3.8" and python_version < "4.0" \
- --hash=sha256:8421c88f3673dd195d0cde635f4f09c9bfd0c9709ad3d28c8b201b3b7145e257 \
- --hash=sha256:feead4cbec6930ed38a48df5bae9eb4ee486bb4026ddf2f3206b85f80279d1e7
+glean-parser==14.0.1 ; python_version >= "3.8" and python_version < "4.0" \
+ --hash=sha256:3275ca235885c99da659fa7d9bf929b8fb020df79d26fcbec317328c369cd039 \
+ --hash=sha256:3e9e5f99ad8592300e364b70d6247b21c445774a73a2ad274677fb58a0065809
idna==2.10 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6 \
--hash=sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0
@@ -492,7 +492,6 @@ pyyaml==6.0.1 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4 \
--hash=sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba \
--hash=sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8 \
- --hash=sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef \
--hash=sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5 \
--hash=sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd \
--hash=sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3 \
@@ -540,9 +539,9 @@ six==1.16.0 ; python_version >= "3.8" and python_version < "4.0" \
slugid==2.0.0 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:a950d98b72691178bdd4d6c52743c4a2aa039207cf7a97d71060a111ff9ba297 \
--hash=sha256:aec8b0e01c4ad32e38e12d609eab3ec912fd129aaf6b2ded0199b56a5f8fd67c
-taskcluster-taskgraph==6.3.0 ; python_version >= "3.8" and python_version < "4.0" \
- --hash=sha256:43ce187215ab8658c06ad80f46c4606ce51b9986f4365d541416eecf9d6a2c28 \
- --hash=sha256:a32ac3aad6aa90c593268bee8864d9f773e86e1f53d0b513d128d59b52c1e20b
+taskcluster-taskgraph==8.0.1 ; python_version >= "3.8" and python_version < "4.0" \
+ --hash=sha256:14500bc703f64eb002c0cd505caaf2d34ffc0ae66d109b108e738661da1ae09c \
+ --hash=sha256:21387537bbebab2a7b1890d03e20e49379bdda65efd45ca7fb8d01f5c29e1797
taskcluster-urls==13.0.1 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:5e25e7e6818e8877178b175ff43d2e6548afad72694aa125f404a7329ece0973 \
--hash=sha256:b25e122ecec249c4299ac7b20b08db76e3e2025bdaeb699a9d444556de5fd367 \
diff --git a/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/METADATA b/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/METADATA
deleted file mode 100644
index 536b4274f6..0000000000
--- a/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/METADATA
+++ /dev/null
@@ -1,28 +0,0 @@
-Metadata-Version: 2.1
-Name: taskcluster-taskgraph
-Version: 6.3.0
-Summary: Build taskcluster taskgraphs
-Home-page: https://github.com/taskcluster/taskgraph
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Environment :: Console
-Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)
-Classifier: Programming Language :: Python :: 3.7
-Classifier: Programming Language :: Python :: 3.8
-Classifier: Programming Language :: Python :: 3.9
-Classifier: Programming Language :: Python :: 3.10
-Classifier: Programming Language :: Python :: 3.11
-Classifier: Topic :: Software Development
-License-File: LICENSE
-Requires-Dist: appdirs (>=1.4)
-Requires-Dist: cookiecutter (~=2.1)
-Requires-Dist: json-e (>=2.7)
-Requires-Dist: mozilla-repo-urls
-Requires-Dist: PyYAML (>=5.3.1)
-Requires-Dist: redo (>=2.0)
-Requires-Dist: requests (>=2.25)
-Requires-Dist: slugid (>=2.0)
-Requires-Dist: taskcluster-urls (>=11.0)
-Requires-Dist: voluptuous (>=0.12.1)
-Provides-Extra: load-image
-Requires-Dist: zstandard ; extra == 'load-image'
-
diff --git a/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/RECORD b/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/RECORD
deleted file mode 100644
index 3a6dfdfc35..0000000000
--- a/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/RECORD
+++ /dev/null
@@ -1,80 +0,0 @@
-taskgraph/__init__.py,sha256=ILqRnb_Cy7WBFggPsK8BML-nmWySW-capstDs3pWb-c,729
-taskgraph/config.py,sha256=XJYKaA9Egn7aiyZ0v70VCq3Kc-XkK08CK2LDsDfsDR8,4822
-taskgraph/create.py,sha256=MeWVr5gKJefjwK_3_xZUcDDu2NVH97gbUuu1dw_I9hA,5184
-taskgraph/decision.py,sha256=qARBTlLYJ7NVw3aflrspRn_hFmvKcrXJ058yao_4b7A,12882
-taskgraph/docker.py,sha256=6tdGVrKFNonznRJSP4IDZEhKnjV-wYKsR0nXnoDOvZk,7924
-taskgraph/files_changed.py,sha256=W3_gEgUT-mVH9DaaU_8X6gYpftrqBU3kgveGbzPLziU,2793
-taskgraph/filter_tasks.py,sha256=R7tYXiaVPGIkQ6O1c9-QJrKZ59m9pFXCloUlPraVnZU,866
-taskgraph/generator.py,sha256=AmkMCVNmj5spJhRfpSx7-zP3v8OU7i8zAbGMROLLEG8,15668
-taskgraph/graph.py,sha256=bHUsv2pPa2SSaWgBY-ItIj7REPd0o4fFYrwoQbwFKTY,4680
-taskgraph/main.py,sha256=UHSywURHwD56w2vGHgjA8O7K1yaCltgMXlJuuFfFjvY,26802
-taskgraph/morph.py,sha256=Q6weAi-xpJM4XoKA2mM6gVXQYLnE1YSws53vTZygMkY,9192
-taskgraph/parameters.py,sha256=xaEUElvdKhxHeJNRMF-6JBFDFiVO1Es2fm38PJQ1JA4,12134
-taskgraph/target_tasks.py,sha256=41BIVwiATy8DCQujPduTtnFmgHlKOfw6RPGL4b20WO8,3324
-taskgraph/task.py,sha256=tRr7WhJ2qjYXi-77wva17CpfK53m6W_cl-xzks_GGaQ,3240
-taskgraph/taskgraph.py,sha256=Fh5cX8LrgYmkpVP_uhpfRgHSKHfZjO-VGSmnFUjEru0,2434
-taskgraph/actions/__init__.py,sha256=lVP1e0YyELg7-_42MWWDbT0cKv_p53BApVE6vWOiPww,416
-taskgraph/actions/add_new_jobs.py,sha256=HAfuRDzFti_YmeudxqVl6hgrEbm-ki5-jSCDMC0HBDE,1836
-taskgraph/actions/cancel.py,sha256=UQSt_6y3S6PXNmUo_mNaUOuDvK2bixWjzdjTKXieEEg,1309
-taskgraph/actions/cancel_all.py,sha256=zrKgnW63gMGS5yldJieDt-GAR_XTiGRgybWAipIUCqQ,1941
-taskgraph/actions/rebuild_cached_tasks.py,sha256=UrVAvTmkkF4TAB5vNSpK1kJqMhMkKAMGmrifxH9kQJQ,1086
-taskgraph/actions/registry.py,sha256=xmhoEGMyYj6TTRFwMowZAUp0aqvtLvdVfmRWM7Yh7xo,13122
-taskgraph/actions/retrigger.py,sha256=wF08p_CgsfqraYelc3JLmPcqBFcO-Yt8gZZLlJZBixQ,9387
-taskgraph/actions/util.py,sha256=TxWxMWiKZeuKRwqiUawzjzpa5VF5AWgAKCLy7YaKG80,10661
-taskgraph/loader/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-taskgraph/loader/default.py,sha256=ND_Sx7yx7io1B-6rWEGkg3UIy7iO3DvSLMXwcEqF1N8,1185
-taskgraph/loader/transform.py,sha256=olUBPjxk3eEIg25sduxlcyqhjoig4ts5kPlT_zs6g9g,2147
-taskgraph/optimize/__init__.py,sha256=Oqpq1RW8QzOcu7zaMlNQ3BHT9ws9e_93FWfCqzNcQps,123
-taskgraph/optimize/base.py,sha256=WvoDNewyHG46IQbG3th-aau9OxSKegsYNfvdOEmunbA,18341
-taskgraph/optimize/strategies.py,sha256=IifMlxppVrIABsvn6UBwQYBFUdxkmyZz_FOtK6yNPps,2380
-taskgraph/run-task/fetch-content,sha256=G1aAvZlTg0yWHqxhSxi4RvfxW-KBJ5JwnGtWRqfH_bg,29990
-taskgraph/run-task/hgrc,sha256=BybWLDR89bWi3pE5T05UqmDHs02CbLypE-omLZWU6Uk,896
-taskgraph/run-task/robustcheckout.py,sha256=vPKvHb3fIIJli9ZVZG88XYoa8Sohy2JrpmH6pDgBDHI,30813
-taskgraph/run-task/run-task,sha256=Mpr195iq9eOh6B4MBpPzEDlxeNyJq0Fa2yrtlJunlXE,45434
-taskgraph/transforms/__init__.py,sha256=aw1dz2sRWZcbTILl6SVDuqIEw0mDdjSYu3LCVs-RLXE,110
-taskgraph/transforms/base.py,sha256=LFw2NwhrSriI3vbcCttArTFb7uHxckQpHeFZmatofvM,5146
-taskgraph/transforms/cached_tasks.py,sha256=Z10VD1kEBVXJvj8qSsNTq2mYpklh0V1EN8OT6QK3v_E,2607
-taskgraph/transforms/chunking.py,sha256=7z9oXiA2dDguYwJPaZYCi-fEzbc--O9avZAFS3vP_kg,2592
-taskgraph/transforms/code_review.py,sha256=eE2xrDtdD_n3HT3caQ2HGAkPm6Uutdm4hDCpCoFjEps,707
-taskgraph/transforms/docker_image.py,sha256=AUuWMx43FcQfgbXy4_2Sjae0cWrh5XWMMcJ3ItcoKes,7606
-taskgraph/transforms/fetch.py,sha256=ORnxpVidOQtI1q1xeHl1c1jlShXD8R_jTGC2CX3lLM4,10479
-taskgraph/transforms/from_deps.py,sha256=1mdjIWYshVI2zBywzB3JEqOyvqgVjFvarcQt9PLDSc4,8950
-taskgraph/transforms/notify.py,sha256=0sga-Ls9dhWLAsL0FBjXmVbbduee8LAZp_1pHBQR0iI,6019
-taskgraph/transforms/task.py,sha256=0oQYH7Upjus0-gzCrYbE0tUKZQUEv6Uq1adGBqiNM60,52254
-taskgraph/transforms/task_context.py,sha256=FxZwT69ozierogtlCTNvk7zCW52d0HdhCaJN7EDmI1s,4272
-taskgraph/transforms/job/__init__.py,sha256=JbNpqdoJRId24QVGe821r6u7Zvm2fTNvME_PMGunaoU,17706
-taskgraph/transforms/job/common.py,sha256=ldlbRI8sdEd-eUcre4GtXMerUg0RQZ_XSe9GwAkfI3I,5897
-taskgraph/transforms/job/index_search.py,sha256=Ngh9FFu1bx2kHVTChW2vcrbnb3SzMneRHopXk18RfB4,1220
-taskgraph/transforms/job/run_task.py,sha256=s9gq1bPdzBB0j2OguXJpWn1-S5Ctltqo4aLsB4kzpUc,8385
-taskgraph/transforms/job/toolchain.py,sha256=GOqIvp1MgtV-6whi2ofgSCFB7GolikZbfLXz0C1h0vc,6015
-taskgraph/util/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-taskgraph/util/archive.py,sha256=nzYn8cQ3NfLAeV-2SuTNoeQ6hg8m40f6FQcSTyVIKwQ,2855
-taskgraph/util/attributes.py,sha256=pPOFmwkDQQ-IqfDpVghZ10YI_qXRY4Bi5JP3xr6XVvc,2964
-taskgraph/util/cached_tasks.py,sha256=o-yJ91wlWbzoDB2GvKPpGcDE27_IEMgczp_figEBjV8,3406
-taskgraph/util/decision.py,sha256=uTC143FpTKQkGff5jIz3voWRYXBCHgx-XAm7FMW53hE,2433
-taskgraph/util/dependencies.py,sha256=3Qba3zI87JYR5fk5FndGzEVW-5NIzzZrBf9rVYcnLD0,2734
-taskgraph/util/docker.py,sha256=rTbzUt8S6s3N1r8gmwHrqsIY9VZ7TDWBM-jZQ5w0P_U,7762
-taskgraph/util/hash.py,sha256=31sQmDwQOavA5hWsmzWDNFoFTaTp5a7qLSQLNTEALD8,1661
-taskgraph/util/keyed_by.py,sha256=cgBH4tG8eH5UUrm5q4ODG7A4fzkGAOI7feVoZy3V8Ho,3419
-taskgraph/util/memoize.py,sha256=XDlwc-56gzoY8QTwOoiCOYL-igX7JoMcY-9Ih80Euc8,1331
-taskgraph/util/parameterization.py,sha256=dzxh8Bc8MBKoDMwj2V2AQab9UrC-JcM3tg0hDVTWpjc,3184
-taskgraph/util/path.py,sha256=e-JloOQV2-Oua_pe335bv4xWAB07vb82TKpu_zCOl0w,4466
-taskgraph/util/python_path.py,sha256=ed4F5z2mId56LauVczgxm_LGxgQi8XlxlYDgXOPZyII,1576
-taskgraph/util/readonlydict.py,sha256=XzTG-gqGqWVlSkDxSyOL6Ur7Z0ONhIJ9DVLWV3q4q1w,787
-taskgraph/util/schema.py,sha256=JGd0Imjfv6JKCY_tjJtOYwI6uwKUaNgzAcvcZj5WE6A,8323
-taskgraph/util/shell.py,sha256=MB9zHVSvxgOuszgmKr2rWUDahANZkbHHNkjjagZG_3I,1317
-taskgraph/util/taskcluster.py,sha256=cGUGvkrefRHngjyZm_iQRYKRlGi4jMIr7ky0fi_YBrg,12445
-taskgraph/util/taskgraph.py,sha256=ecKEvTfmLVvEKLPO_0g34CqVvc0iCzuNMh3064BZNrE,1969
-taskgraph/util/templates.py,sha256=HGTaIKCpAwEzBDHq0cDai1HJjPJrdnHsjJz6N4LVpKI,2139
-taskgraph/util/time.py,sha256=pNFcTH-iYRfm2-okm1lMATc4B5wO-_FXbOFXEtXD27g,3390
-taskgraph/util/treeherder.py,sha256=A3rpPUQB60Gn1Yx-OZgKuWWGJ8x0-6tcdeeslzco9ag,2687
-taskgraph/util/vcs.py,sha256=54Haq2XyC5CmPnjrPRQZY5wUeoFsaV9pWTYvBjPcVMA,18917
-taskgraph/util/verify.py,sha256=cSd7EeP9hUvp-5WOvKDHrvpFAGb_LuiNPxPp0-YmNEA,8947
-taskgraph/util/workertypes.py,sha256=1wgM6vLrlgtyv8854anVIs0Bx11kV8JJJaKcOHJc2j0,2498
-taskgraph/util/yaml.py,sha256=hfKI_D8Q7dimq4_VvO3WEh8CJsTrsIMwN6set7HIQbY,990
-taskcluster_taskgraph-6.3.0.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725
-taskcluster_taskgraph-6.3.0.dist-info/METADATA,sha256=MgIgtvNBRjc0CjnoD-7KHLPpz3sGlja2CZU3GzUMW84,1046
-taskcluster_taskgraph-6.3.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
-taskcluster_taskgraph-6.3.0.dist-info/entry_points.txt,sha256=2hxDzE3qq_sHh-J3ROqwpxgQgxO-196phWAQREl2-XA,50
-taskcluster_taskgraph-6.3.0.dist-info/top_level.txt,sha256=3JNeYn_hNiNXC7DrdH_vcv-WYSE7QdgGjdvUYvSjVp0,10
-taskcluster_taskgraph-6.3.0.dist-info/RECORD,,
diff --git a/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/LICENSE b/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/LICENSE
index a612ad9813..a612ad9813 100644
--- a/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/LICENSE
+++ b/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/LICENSE
diff --git a/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/METADATA b/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/METADATA
new file mode 100644
index 0000000000..e549db9aa3
--- /dev/null
+++ b/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/METADATA
@@ -0,0 +1,123 @@
+Metadata-Version: 2.1
+Name: taskcluster-taskgraph
+Version: 8.0.1
+Summary: Build taskcluster taskgraphs
+Home-page: https://github.com/taskcluster/taskgraph
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Console
+Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Topic :: Software Development
+License-File: LICENSE
+Requires-Dist: appdirs >=1.4
+Requires-Dist: cookiecutter ~=2.1
+Requires-Dist: json-e >=2.7
+Requires-Dist: mozilla-repo-urls
+Requires-Dist: PyYAML >=5.3.1
+Requires-Dist: redo >=2.0
+Requires-Dist: requests >=2.25
+Requires-Dist: slugid >=2.0
+Requires-Dist: taskcluster-urls >=11.0
+Requires-Dist: voluptuous >=0.12.1
+Provides-Extra: load-image
+Requires-Dist: zstandard ; extra == 'load-image'
+
+
+.. image:: https://firefox-ci-tc.services.mozilla.com/api/github/v1/repository/taskcluster/taskgraph/main/badge.svg
+ :target: https://firefox-ci-tc.services.mozilla.com/api/github/v1/repository/taskcluster/taskgraph/main/latest
+ :alt: Task Status
+
+.. image:: https://results.pre-commit.ci/badge/github/taskcluster/taskgraph/main.svg
+ :target: https://results.pre-commit.ci/latest/github/taskcluster/taskgraph/main
+ :alt: pre-commit.ci status
+
+.. image:: https://codecov.io/gh/taskcluster/taskgraph/branch/main/graph/badge.svg?token=GJIV52ZQNP
+ :target: https://codecov.io/gh/taskcluster/taskgraph
+ :alt: Code Coverage
+
+.. image:: https://badge.fury.io/py/taskcluster-taskgraph.svg
+ :target: https://badge.fury.io/py/taskcluster-taskgraph
+ :alt: Pypi Version
+
+.. image:: https://readthedocs.org/projects/taskcluster-taskgraph/badge/?version=latest
+ :target: https://taskcluster-taskgraph.readthedocs.io/en/latest/?badge=latest
+ :alt: Documentation Status
+
+.. image:: https://img.shields.io/badge/license-MPL%202.0-orange.svg
+ :target: http://mozilla.org/MPL/2.0
+ :alt: License
+
+Taskgraph
+=========
+
+Taskgraph is a Python library to generate graphs of tasks for the `Taskcluster
+CI`_ service. It is the recommended approach for configuring tasks once your
+project outgrows a single `.taskcluster.yml`_ file and is what powers the over
+30,000 tasks and counting that make up Firefox's CI.
+
+For more information and usage instructions, `see the docs`_.
+
+How It Works
+------------
+
+Taskgraph leverages the fact that Taskcluster is a generic task execution
+platform. This means that tasks can be scheduled via its `comprehensive API`_,
+and aren't limited to being triggered in response to supported events.
+
+Taskgraph leverages this execution platform to allow CI systems to scale to any
+size or complexity.
+
+1. A *decision task* is created via Taskcluster's normal `.taskcluster.yml`_
+ file. This task invokes ``taskgraph``.
+2. Taskgraph evaluates a series of yaml based task definitions (similar to
+ those other CI offerings provide).
+3. Taskgraph applies transforms on top of these task definitions. Transforms
+ are Python functions that can programmatically alter or even clone a task
+ definition.
+4. Taskgraph applies some optional optimization logic to remove unnecessary
+ tasks.
+5. Taskgraph submits the resulting *task graph* to Taskcluster via its API.
+
+Taskgraph's combination of declarative task configuration combined with
+programmatic alteration are what allow it to support CI systems of any scale.
+Taskgraph is the library that powers the 30,000+ tasks making up `Firefox's
+CI`_.
+
+.. _Taskcluster CI: https://taskcluster.net/
+.. _comprehensive API: https://docs.taskcluster.net/docs/reference/platform/queue/api
+.. _.taskcluster.yml: https://docs.taskcluster.net/docs/reference/integrations/github/taskcluster-yml-v1
+.. _Firefox's CI: https://treeherder.mozilla.org/jobs?repo=mozilla-central
+.. _see the docs: https://taskcluster-taskgraph.readthedocs.io
+
+Installation
+------------
+
+Taskgraph supports Python 3.8 and up, and can be installed from Pypi:
+
+.. code-block::
+
+ pip install taskcluster-taskgraph
+
+
+Alternatively, the repo can be cloned and installed directly:
+
+.. code-block::
+
+ git clone https://github.com/taskcluster/taskgraph
+ cd taskgraph
+ python setup.py install
+
+In both cases, it's recommended to use a Python `virtual environment`_.
+
+.. _virtual environment: https://docs.python.org/3/tutorial/venv.html
+
+Get Involved
+------------
+
+If you'd like to get involved, please see our `contributing docs`_!
+
+.. _contributing docs: https://github.com/taskcluster/taskgraph/blob/main/CONTRIBUTING.rst
diff --git a/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/RECORD b/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/RECORD
new file mode 100644
index 0000000000..c04e803ff2
--- /dev/null
+++ b/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/RECORD
@@ -0,0 +1,79 @@
+taskgraph/__init__.py,sha256=hCl3NLzC-cVXlKhuzf0-_0wd0gYmNA3oshXfTaa9DNQ,729
+taskgraph/config.py,sha256=8vntWUrPwGds22mFKYAgcsD4Mr8hoONTv2ssGBcClLw,5108
+taskgraph/create.py,sha256=_zokjSM3ZaO04l2LiMhenE8qXDZVfYvueIIu5hGUhzc,5185
+taskgraph/decision.py,sha256=sG0CIj9OSOdfN65LSt6dRYFWbns9_JraVC5fQU1_7oc,13012
+taskgraph/docker.py,sha256=rk-tAMycHnapFyR2Q-XJXzC2A4uv0i-VykLZfwl-pRo,8417
+taskgraph/filter_tasks.py,sha256=R7tYXiaVPGIkQ6O1c9-QJrKZ59m9pFXCloUlPraVnZU,866
+taskgraph/generator.py,sha256=zrH1zfy-8akksKTSOf6e4FEsdOd5y7-h1Jne_2Jabcc,15703
+taskgraph/graph.py,sha256=bHUsv2pPa2SSaWgBY-ItIj7REPd0o4fFYrwoQbwFKTY,4680
+taskgraph/main.py,sha256=tgfAEcNUJfmADteL24yJR5u7tzU4v3mzmxiogVSCK8Y,29072
+taskgraph/morph.py,sha256=bwkaSGdTZLcK_rhF2st2mCGv9EHN5WdbnDeuZcqp9UA,9208
+taskgraph/parameters.py,sha256=hrwUHHu4PS79w-fQ3qNnLSyjRto1EDlidE8e1GzIy8U,12272
+taskgraph/target_tasks.py,sha256=9_v66bzmQFELPsfIDGITXrqzsmEiLq1EeuJFhycKL0M,3356
+taskgraph/task.py,sha256=tRr7WhJ2qjYXi-77wva17CpfK53m6W_cl-xzks_GGaQ,3240
+taskgraph/taskgraph.py,sha256=Fh5cX8LrgYmkpVP_uhpfRgHSKHfZjO-VGSmnFUjEru0,2434
+taskgraph/actions/__init__.py,sha256=lVP1e0YyELg7-_42MWWDbT0cKv_p53BApVE6vWOiPww,416
+taskgraph/actions/add_new_jobs.py,sha256=c8vGWGXMr4qqW2Axz9rbBrDopabZB3gf3SVFLBZH8ak,1865
+taskgraph/actions/cancel.py,sha256=xrIzlB5KzcnQ4_HultoIcnlxtbQhUi7723g5K2iQoY0,1263
+taskgraph/actions/cancel_all.py,sha256=zNiHtOiSQQxLyNJYtaW0JKPazHXSgZrq1C6o8DGYxG8,1887
+taskgraph/actions/rebuild_cached_tasks.py,sha256=r1QTri2ey30TdEztUgc-nkiHdJPe8Sbn7FvKeR_kt0Y,1115
+taskgraph/actions/registry.py,sha256=hubblOhL3fbWDRtKv7_6HiD0P94hzQrpjdMkj23CGCg,13564
+taskgraph/actions/retrigger.py,sha256=MKkoZDAe0SKIq6fHqwAc1Ici_wIGRd7MxeBNhwoDEGE,9388
+taskgraph/actions/util.py,sha256=gB8MZb8juP1S7EsLHJivr6BBY2bf5IUiIpN7Mq9-kXo,10964
+taskgraph/loader/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+taskgraph/loader/default.py,sha256=_bBJG6l04v44Jm5HSIEnVndC05NpNmq5L28QfJHk0wo,1185
+taskgraph/loader/transform.py,sha256=olUBPjxk3eEIg25sduxlcyqhjoig4ts5kPlT_zs6g9g,2147
+taskgraph/optimize/__init__.py,sha256=Oqpq1RW8QzOcu7zaMlNQ3BHT9ws9e_93FWfCqzNcQps,123
+taskgraph/optimize/base.py,sha256=wTViUwVmY9sZvlzSuGwkVrETCo0v2OfyNxFFgzJrDNc,18982
+taskgraph/optimize/strategies.py,sha256=UryFI5TizzEF_2NO8MyuKwqVektHfJeG_t0_zZwxEds,2577
+taskgraph/run-task/fetch-content,sha256=G1aAvZlTg0yWHqxhSxi4RvfxW-KBJ5JwnGtWRqfH_bg,29990
+taskgraph/run-task/hgrc,sha256=BybWLDR89bWi3pE5T05UqmDHs02CbLypE-omLZWU6Uk,896
+taskgraph/run-task/robustcheckout.py,sha256=vPKvHb3fIIJli9ZVZG88XYoa8Sohy2JrpmH6pDgBDHI,30813
+taskgraph/run-task/run-task,sha256=ev64Ud2X3482B05aurUcWD93_sZS1aW2N-eVutRHF5k,45753
+taskgraph/transforms/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+taskgraph/transforms/base.py,sha256=z20Yh619srbwuQJWASRtG2_j6NUbWlCujTTCHWLa0GY,5147
+taskgraph/transforms/cached_tasks.py,sha256=Z10VD1kEBVXJvj8qSsNTq2mYpklh0V1EN8OT6QK3v_E,2607
+taskgraph/transforms/chunking.py,sha256=7z9oXiA2dDguYwJPaZYCi-fEzbc--O9avZAFS3vP_kg,2592
+taskgraph/transforms/code_review.py,sha256=tevRFQli3MkzW_0Zhr-hwlVti8hFaXEz94llwhBu_ns,713
+taskgraph/transforms/docker_image.py,sha256=GScS7Lld3YcS57eC30wp3DJM_ATLrmmVfZzINKgC1fM,7546
+taskgraph/transforms/fetch.py,sha256=u1M57LQOi0kHz6FFP1qah3yJh15eXYqQCF_F6r5qjh0,10662
+taskgraph/transforms/from_deps.py,sha256=_cdIefdRkZYWaFJaWpsglivvG8bBGWd4beg7QgNl0Jc,8885
+taskgraph/transforms/notify.py,sha256=0sga-Ls9dhWLAsL0FBjXmVbbduee8LAZp_1pHBQR0iI,6019
+taskgraph/transforms/task.py,sha256=nRzNAxLjA6BsFktZAA9Upqb_pSFNhjoCzKm0QDxvVgM,52586
+taskgraph/transforms/task_context.py,sha256=9v3ke967atAYCtQxIblSFucJA1tum9Q8QpXQeTwNIzU,4278
+taskgraph/transforms/run/__init__.py,sha256=gVJ4eNquKNlygX18OtWTDnl6FFsZlA12bxfvB3kZz14,17761
+taskgraph/transforms/run/common.py,sha256=G3WdMHU5YWUfk1uR6xsxWY7MQKjU9tnqtRDmGttUqt4,5626
+taskgraph/transforms/run/index_search.py,sha256=ABIaX2FFx02o1StZgNAB_ZDXc1lTFO2aUIBH5BuUjtA,1224
+taskgraph/transforms/run/run_task.py,sha256=0GI8syzGtRDT07g_6SXG99JtxDBe09zsW5ltL-aUhYU,8403
+taskgraph/transforms/run/toolchain.py,sha256=KiuBfJ6CShwGYIIljy4i7iYSHFFXF_A_zSvRGUgYboA,6033
+taskgraph/util/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+taskgraph/util/archive.py,sha256=NIqg2Su5PUqOv4JM60uFixsMsOXN26u5paB1Uh6foSI,4707
+taskgraph/util/attributes.py,sha256=pPOFmwkDQQ-IqfDpVghZ10YI_qXRY4Bi5JP3xr6XVvc,2964
+taskgraph/util/cached_tasks.py,sha256=-AqmOjrkI5PMAlAlQl1wShCrs0HA5lWLBgkxTcFstFM,4150
+taskgraph/util/dependencies.py,sha256=3Qba3zI87JYR5fk5FndGzEVW-5NIzzZrBf9rVYcnLD0,2734
+taskgraph/util/docker.py,sha256=ffQ6KloQNz_kwYemSZEkh4xUMVMeotnnwphWZth1PqQ,8112
+taskgraph/util/hash.py,sha256=U5h6WwC3zs0ooX8odc7AjgPQKKFpDXL7PemoyENPJYo,1644
+taskgraph/util/keyed_by.py,sha256=EMWNRRqYB0AS7A4Y4lthYf2HB7G2ercGFf4hN9zwyaY,3348
+taskgraph/util/memoize.py,sha256=CvCGl-_qft062b3GZC4aHbPfEOPtqR9oOUEqvk9aojQ,294
+taskgraph/util/parameterization.py,sha256=DiPE-4jappGMPljDhhZI52BP7dLBGZHu5EI1cW4aRYg,3392
+taskgraph/util/path.py,sha256=e-JloOQV2-Oua_pe335bv4xWAB07vb82TKpu_zCOl0w,4466
+taskgraph/util/python_path.py,sha256=ed4F5z2mId56LauVczgxm_LGxgQi8XlxlYDgXOPZyII,1576
+taskgraph/util/readonlydict.py,sha256=XzTG-gqGqWVlSkDxSyOL6Ur7Z0ONhIJ9DVLWV3q4q1w,787
+taskgraph/util/schema.py,sha256=HmbbJ_i5uxZZHZSJ8sVWaD-VMhZI4ymx0STNcjO5t2M,8260
+taskgraph/util/set_name.py,sha256=cha9awo2nMQ9jfSEcbyNkZkCq_1Yg_kKJTfvDzabHSc,1134
+taskgraph/util/shell.py,sha256=nf__ly0Ikhj92AiEBCQtvyyckm8UfO_3DSgz0SU-7QA,1321
+taskgraph/util/taskcluster.py,sha256=LScpZknMycOOneIcRMf236rCTMRHHGxFTc9Lh7mRKaI,13057
+taskgraph/util/taskgraph.py,sha256=ecKEvTfmLVvEKLPO_0g34CqVvc0iCzuNMh3064BZNrE,1969
+taskgraph/util/templates.py,sha256=HGTaIKCpAwEzBDHq0cDai1HJjPJrdnHsjJz6N4LVpKI,2139
+taskgraph/util/time.py,sha256=XauJ0DbU0fyFvHLzJLG4ehHv9KaKixxETro89GPC1yk,3350
+taskgraph/util/treeherder.py,sha256=kc8jCy_lYduBxVMYOQzWpmI_6i2bRmkQLKq5DGmbiDI,2721
+taskgraph/util/vcs.py,sha256=FjS82fiTsoQ_ArjTCDOtDGfNdVUp_8zvVKB9SoAG3Rs,18019
+taskgraph/util/verify.py,sha256=htrNX7aXMMDzxymsFVcs0kaO5gErFHd62g9cQsZI_WE,8518
+taskgraph/util/workertypes.py,sha256=1wgM6vLrlgtyv8854anVIs0Bx11kV8JJJaKcOHJc2j0,2498
+taskgraph/util/yaml.py,sha256=-LaIf3RROuaSWckOOGN5Iviu-DHWxIChgHn9a7n6ec4,1059
+taskcluster_taskgraph-8.0.1.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725
+taskcluster_taskgraph-8.0.1.dist-info/METADATA,sha256=qg-m62f4BGLh2jBAr_-OQZhraOSciTrv5EyNY0Wwq8I,4688
+taskcluster_taskgraph-8.0.1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
+taskcluster_taskgraph-8.0.1.dist-info/entry_points.txt,sha256=2hxDzE3qq_sHh-J3ROqwpxgQgxO-196phWAQREl2-XA,50
+taskcluster_taskgraph-8.0.1.dist-info/top_level.txt,sha256=3JNeYn_hNiNXC7DrdH_vcv-WYSE7QdgGjdvUYvSjVp0,10
+taskcluster_taskgraph-8.0.1.dist-info/RECORD,,
diff --git a/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/WHEEL b/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/WHEEL
index becc9a66ea..bab98d6758 100644
--- a/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/WHEEL
+++ b/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/WHEEL
@@ -1,5 +1,5 @@
Wheel-Version: 1.0
-Generator: bdist_wheel (0.37.1)
+Generator: bdist_wheel (0.43.0)
Root-Is-Purelib: true
Tag: py3-none-any
diff --git a/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/entry_points.txt b/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/entry_points.txt
index dec40df69f..dec40df69f 100644
--- a/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/entry_points.txt
+++ b/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/entry_points.txt
diff --git a/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/top_level.txt b/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/top_level.txt
index f3840b68ef..f3840b68ef 100644
--- a/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/top_level.txt
+++ b/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/top_level.txt
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/__init__.py b/third_party/python/taskcluster_taskgraph/taskgraph/__init__.py
index 81cc763230..0bd794101c 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/__init__.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/__init__.py
@@ -2,7 +2,7 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-__version__ = "6.3.0"
+__version__ = "8.0.1"
# Maximum number of dependencies a single task can have
# https://docs.taskcluster.net/reference/platform/taskcluster-queue/references/api#createTask
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/actions/add_new_jobs.py b/third_party/python/taskcluster_taskgraph/taskgraph/actions/add_new_jobs.py
index c5e1821546..f635250086 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/actions/add_new_jobs.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/actions/add_new_jobs.py
@@ -40,7 +40,7 @@ from taskgraph.actions.util import (
)
def add_new_jobs_action(parameters, graph_config, input, task_group_id, task_id):
decision_task_id, full_task_graph, label_to_taskid = fetch_graph_and_labels(
- parameters, graph_config
+ parameters, graph_config, task_group_id=task_group_id
)
to_run = []
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/actions/cancel.py b/third_party/python/taskcluster_taskgraph/taskgraph/actions/cancel.py
index 03788c6538..33a5394e68 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/actions/cancel.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/actions/cancel.py
@@ -34,9 +34,7 @@ def cancel_action(parameters, graph_config, input, task_group_id, task_id):
# cannot be cancelled at this time, but it's also not running
# anymore, so we can ignore this error.
logger.info(
- 'Task "{}" is past its deadline and cannot be cancelled.'.format(
- task_id
- )
+ f'Task "{task_id}" is past its deadline and cannot be cancelled.'
)
return
raise
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/actions/cancel_all.py b/third_party/python/taskcluster_taskgraph/taskgraph/actions/cancel_all.py
index d3e0440839..55453b7624 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/actions/cancel_all.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/actions/cancel_all.py
@@ -43,9 +43,7 @@ def cancel_all_action(parameters, graph_config, input, task_group_id, task_id):
# cannot be cancelled at this time, but it's also not running
# anymore, so we can ignore this error.
logger.info(
- "Task {} is past its deadline and cannot be cancelled.".format(
- task_id
- )
+ f"Task {task_id} is past its deadline and cannot be cancelled."
)
return
raise
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/actions/rebuild_cached_tasks.py b/third_party/python/taskcluster_taskgraph/taskgraph/actions/rebuild_cached_tasks.py
index 2b88e6a698..8ea2e37150 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/actions/rebuild_cached_tasks.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/actions/rebuild_cached_tasks.py
@@ -18,7 +18,7 @@ def rebuild_cached_tasks_action(
parameters, graph_config, input, task_group_id, task_id
):
decision_task_id, full_task_graph, label_to_taskid = fetch_graph_and_labels(
- parameters, graph_config
+ parameters, graph_config, task_group_id=task_group_id
)
cached_tasks = [
label
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/actions/registry.py b/third_party/python/taskcluster_taskgraph/taskgraph/actions/registry.py
index 1e909d30c7..20955bd3f2 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/actions/registry.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/actions/registry.py
@@ -154,9 +154,7 @@ def register_callback_action(
], "register_callback_action must be used as decorator"
if not cb_name:
cb_name = name
- assert cb_name not in callbacks, "callback name {} is not unique".format(
- cb_name
- )
+ assert cb_name not in callbacks, f"callback name {cb_name} is not unique"
def action_builder(parameters, graph_config, decision_task_id):
if not available(parameters):
@@ -165,11 +163,11 @@ def register_callback_action(
actionPerm = "generic" if generic else cb_name
# gather up the common decision-task-supplied data for this action
- repo_param = "head_repository"
repository = {
- "url": parameters[repo_param],
+ "url": parameters["head_repository"],
"project": parameters["project"],
"level": parameters["level"],
+ "base_url": parameters["base_repository"],
}
revision = parameters["head_rev"]
@@ -181,6 +179,9 @@ def register_callback_action(
branch = parameters.get("head_ref")
if branch:
push["branch"] = branch
+ base_branch = parameters.get("base_ref")
+ if base_branch and branch != base_branch:
+ push["base_branch"] = base_branch
action = {
"name": name,
@@ -215,13 +216,16 @@ def register_callback_action(
if "/" in actionPerm:
raise Exception("`/` is not allowed in action names; use `-`")
+ if parameters["tasks_for"].startswith("github-pull-request"):
+ hookId = f"in-tree-pr-action-{level}-{actionPerm}/{tcyml_hash}"
+ else:
+ hookId = f"in-tree-action-{level}-{actionPerm}/{tcyml_hash}"
+
rv.update(
{
"kind": "hook",
"hookGroupId": f"project-{trustDomain}",
- "hookId": "in-tree-action-{}-{}/{}".format(
- level, actionPerm, tcyml_hash
- ),
+ "hookId": hookId,
"hookPayload": {
# provide the decision-task parameters as context for triggerHook
"decision": {
@@ -297,16 +301,20 @@ def sanity_check_task_scope(callback, parameters, graph_config):
actionPerm = "generic" if action.generic else action.cb_name
- repo_param = "head_repository"
- raw_url = parameters[repo_param]
+ raw_url = parameters["base_repository"]
parsed_url = parse(raw_url)
- expected_scope = f"assume:{parsed_url.taskcluster_role_prefix}:action:{actionPerm}"
+ action_scope = f"assume:{parsed_url.taskcluster_role_prefix}:action:{actionPerm}"
+ pr_action_scope = (
+ f"assume:{parsed_url.taskcluster_role_prefix}:pr-action:{actionPerm}"
+ )
# the scope should appear literally; no need for a satisfaction check. The use of
# get_current_scopes here calls the auth service through the Taskcluster Proxy, giving
# the precise scopes available to this task.
- if expected_scope not in taskcluster.get_current_scopes():
- raise ValueError(f"Expected task scope {expected_scope} for this action")
+ if not set((action_scope, pr_action_scope)) & set(taskcluster.get_current_scopes()):
+ raise ValueError(
+ f"Expected task scope {action_scope} or {pr_action_scope} for this action"
+ )
def trigger_action_callback(
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/actions/retrigger.py b/third_party/python/taskcluster_taskgraph/taskgraph/actions/retrigger.py
index fd488b35fc..6c6091a47a 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/actions/retrigger.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/actions/retrigger.py
@@ -33,9 +33,7 @@ def _should_retrigger(task_graph, label):
"""
if label not in task_graph:
logger.info(
- "Task {} not in full taskgraph, assuming task should not be retriggered.".format(
- label
- )
+ f"Task {label} not in full taskgraph, assuming task should not be retriggered."
)
return False
return task_graph[label].attributes.get("retrigger", False)
@@ -67,7 +65,9 @@ def retrigger_decision_action(parameters, graph_config, input, task_group_id, ta
# absolute timestamps relative to the current time.
task = taskcluster.get_task_definition(task_id)
task = relativize_datestamps(task)
- create_task_from_def(slugid(), task, parameters["level"])
+ create_task_from_def(
+ slugid(), task, parameters["level"], graph_config["trust-domain"]
+ )
@register_callback_action(
@@ -144,7 +144,7 @@ def retrigger_decision_action(parameters, graph_config, input, task_group_id, ta
)
def retrigger_action(parameters, graph_config, input, task_group_id, task_id):
decision_task_id, full_task_graph, label_to_taskid = fetch_graph_and_labels(
- parameters, graph_config
+ parameters, graph_config, task_group_id=task_group_id
)
task = taskcluster.get_task_definition(task_id)
@@ -155,8 +155,8 @@ def retrigger_action(parameters, graph_config, input, task_group_id, task_id):
if not input.get("force", None) and not _should_retrigger(full_task_graph, label):
logger.info(
- "Not retriggering task {}, task should not be retrigged "
- "and force not specified.".format(label)
+ f"Not retriggering task {label}, task should not be retrigged "
+ "and force not specified."
)
sys.exit(1)
@@ -201,14 +201,12 @@ def rerun_action(parameters, graph_config, input, task_group_id, task_id):
task = taskcluster.get_task_definition(task_id)
parameters = dict(parameters)
decision_task_id, full_task_graph, label_to_taskid = fetch_graph_and_labels(
- parameters, graph_config
+ parameters, graph_config, task_group_id=task_group_id
)
label = task["metadata"]["name"]
if task_id not in label_to_taskid.values():
logger.error(
- "Refusing to rerun {}: taskId {} not in decision task {} label_to_taskid!".format(
- label, task_id, decision_task_id
- )
+ f"Refusing to rerun {label}: taskId {task_id} not in decision task {decision_task_id} label_to_taskid!"
)
_rerun_task(task_id, label)
@@ -218,9 +216,7 @@ def _rerun_task(task_id, label):
state = taskcluster.state_task(task_id)
if state not in RERUN_STATES:
logger.warning(
- "No need to rerun {}: state '{}' not in {}!".format(
- label, state, RERUN_STATES
- )
+ f"No need to rerun {label}: state '{state}' not in {RERUN_STATES}!"
)
return
taskcluster.rerun_task(task_id)
@@ -261,7 +257,7 @@ def _rerun_task(task_id, label):
)
def retrigger_multiple(parameters, graph_config, input, task_group_id, task_id):
decision_task_id, full_task_graph, label_to_taskid = fetch_graph_and_labels(
- parameters, graph_config
+ parameters, graph_config, task_group_id=task_group_id
)
suffixes = []
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/actions/util.py b/third_party/python/taskcluster_taskgraph/taskgraph/actions/util.py
index cf81029da2..41e3b035de 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/actions/util.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/actions/util.py
@@ -32,8 +32,15 @@ def get_parameters(decision_task_id):
return get_artifact(decision_task_id, "public/parameters.yml")
-def fetch_graph_and_labels(parameters, graph_config):
- decision_task_id = find_decision_task(parameters, graph_config)
+def fetch_graph_and_labels(parameters, graph_config, task_group_id=None):
+ try:
+ # Look up the decision_task id in the index
+ decision_task_id = find_decision_task(parameters, graph_config)
+ except KeyError:
+ if not task_group_id:
+ raise
+ # Not found (e.g. from github-pull-request), fall back to the task group id.
+ decision_task_id = task_group_id
# First grab the graph and labels generated during the initial decision task
full_task_graph = get_artifact(decision_task_id, "public/full-task-graph.json")
@@ -90,7 +97,7 @@ def fetch_graph_and_labels(parameters, graph_config):
return (decision_task_id, full_task_graph, label_to_taskid)
-def create_task_from_def(task_id, task_def, level):
+def create_task_from_def(task_id, task_def, level, trust_domain):
"""Create a new task from a definition rather than from a label
that is already in the full-task-graph. The task definition will
have {relative-datestamp': '..'} rendered just like in a decision task.
@@ -98,7 +105,7 @@ def create_task_from_def(task_id, task_def, level):
It is useful if you want to "edit" the full_task_graph and then hand
it to this function. No dependencies will be scheduled. You must handle
this yourself. Seeing how create_tasks handles it might prove helpful."""
- task_def["schedulerId"] = f"gecko-level-{level}"
+ task_def["schedulerId"] = f"{trust_domain}-level-{level}"
label = task_def["metadata"]["name"]
session = get_session()
create.create_task(session, task_id, label, task_def)
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/config.py b/third_party/python/taskcluster_taskgraph/taskgraph/config.py
index 7ea7dc7b33..ac384eab86 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/config.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/config.py
@@ -40,6 +40,11 @@ graph_config_schema = Schema(
description="Default 'deadline' for tasks, in relative date format. "
"Eg: '1 week'",
): optionally_keyed_by("project", str),
+ Optional(
+ "task-expires-after",
+ description="Default 'expires-after' for level 1 tasks, in relative date format. "
+ "Eg: '90 days'",
+ ): str,
Required("workers"): {
Required("aliases"): {
str: {
@@ -62,6 +67,10 @@ graph_config_schema = Schema(
"Defaults to `trust-domain`.",
): str,
Optional(
+ "cache-pull-requests",
+ description="Should tasks from pull requests populate the cache",
+ ): bool,
+ Optional(
"index-path-regexes",
description="Regular expressions matching index paths to be summarized.",
): [str],
@@ -102,28 +111,27 @@ class GraphConfig:
Add the project's taskgraph directory to the python path, and register
any extensions present.
"""
- modify_path = os.path.dirname(self.root_dir)
if GraphConfig._PATH_MODIFIED:
- if GraphConfig._PATH_MODIFIED == modify_path:
+ if GraphConfig._PATH_MODIFIED == self.root_dir:
# Already modified path with the same root_dir.
# We currently need to do this to enable actions to call
# taskgraph_decision, e.g. relpro.
return
raise Exception("Can't register multiple directories on python path.")
- GraphConfig._PATH_MODIFIED = modify_path
- sys.path.insert(0, modify_path)
+ GraphConfig._PATH_MODIFIED = self.root_dir
+ sys.path.insert(0, self.root_dir)
register_path = self["taskgraph"].get("register")
if register_path:
find_object(register_path)(self)
@property
def vcs_root(self):
- if path.split(self.root_dir)[-2:] != ["taskcluster", "ci"]:
+ if path.split(self.root_dir)[-1:] != ["taskcluster"]:
raise Exception(
"Not guessing path to vcs root. "
"Graph config in non-standard location."
)
- return os.path.dirname(os.path.dirname(self.root_dir))
+ return os.path.dirname(self.root_dir)
@property
def taskcluster_yml(self):
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/create.py b/third_party/python/taskcluster_taskgraph/taskgraph/create.py
index deb1ac5348..e8baabb8a8 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/create.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/create.py
@@ -104,7 +104,7 @@ def create_tasks(graph_config, taskgraph, label_to_taskid, params, decision_task
def create_task(session, task_id, label, task_def):
# create the task using 'http://taskcluster/queue', which is proxied to the queue service
- # with credentials appropriate to this job.
+ # with credentials appropriate to this task.
# Resolve timestamps
now = current_json_time(datetime_format=True)
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/decision.py b/third_party/python/taskcluster_taskgraph/taskgraph/decision.py
index ed412f4473..d9eb9f3e90 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/decision.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/decision.py
@@ -46,21 +46,21 @@ try_task_config_schema_v2 = Schema(
)
-def full_task_graph_to_runnable_jobs(full_task_json):
- runnable_jobs = {}
+def full_task_graph_to_runnable_tasks(full_task_json):
+ runnable_tasks = {}
for label, node in full_task_json.items():
if not ("extra" in node["task"] and "treeherder" in node["task"]["extra"]):
continue
th = node["task"]["extra"]["treeherder"]
- runnable_jobs[label] = {"symbol": th["symbol"]}
+ runnable_tasks[label] = {"symbol": th["symbol"]}
for i in ("groupName", "groupSymbol", "collection"):
if i in th:
- runnable_jobs[label][i] = th[i]
+ runnable_tasks[label][i] = th[i]
if th.get("machine", {}).get("platform"):
- runnable_jobs[label]["platform"] = th["machine"]["platform"]
- return runnable_jobs
+ runnable_tasks[label]["platform"] = th["machine"]["platform"]
+ return runnable_tasks
def taskgraph_decision(options, parameters=None):
@@ -104,7 +104,7 @@ def taskgraph_decision(options, parameters=None):
# write out the public/runnable-jobs.json file
write_artifact(
- "runnable-jobs.json", full_task_graph_to_runnable_jobs(full_task_json)
+ "runnable-jobs.json", full_task_graph_to_runnable_tasks(full_task_json)
)
# this is just a test to check whether the from_json() function is working
@@ -185,6 +185,9 @@ def get_decision_parameters(graph_config, options):
# Define default filter list, as most configurations shouldn't need
# custom filters.
+ parameters["files_changed"] = repo.get_changed_files(
+ rev=parameters["head_rev"], base_rev=parameters["base_rev"]
+ )
parameters["filters"] = [
"target_tasks_method",
]
@@ -214,9 +217,9 @@ def get_decision_parameters(graph_config, options):
parameters.update(PER_PROJECT_PARAMETERS[project])
except KeyError:
logger.warning(
- "using default project parameters; add {} to "
- "PER_PROJECT_PARAMETERS in {} to customize behavior "
- "for this project".format(project, __file__)
+ f"using default project parameters; add {project} to "
+ f"PER_PROJECT_PARAMETERS in {__file__} to customize behavior "
+ "for this project"
)
parameters.update(PER_PROJECT_PARAMETERS["default"])
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/docker.py b/third_party/python/taskcluster_taskgraph/taskgraph/docker.py
index 23897cbbee..9f849525fc 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/docker.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/docker.py
@@ -18,6 +18,22 @@ except ImportError as e:
from taskgraph.util import docker
from taskgraph.util.taskcluster import get_artifact_url, get_session
+DEPLOY_WARNING = """
+*****************************************************************
+WARNING: Image is not suitable for deploying/pushing.
+
+To automatically tag the image the following files are required:
+- {image_dir}/REGISTRY
+- {image_dir}/VERSION
+
+The REGISTRY file contains the Docker registry hosting the image.
+A default REGISTRY file may also be defined in the parent docker
+directory.
+
+The VERSION file contains the version of the image.
+*****************************************************************
+"""
+
def get_image_digest(image_name):
from taskgraph.generator import load_tasks_for_kind
@@ -34,7 +50,7 @@ def get_image_digest(image_name):
def load_image_by_name(image_name, tag=None):
from taskgraph.generator import load_tasks_for_kind
- from taskgraph.optimize import IndexSearch
+ from taskgraph.optimize.strategies import IndexSearch
from taskgraph.parameters import Parameters
params = Parameters(
@@ -43,8 +59,9 @@ def load_image_by_name(image_name, tag=None):
)
tasks = load_tasks_for_kind(params, "docker-image")
task = tasks[f"build-docker-image-{image_name}"]
+ deadline = None
task_id = IndexSearch().should_replace_task(
- task, {}, task.optimization.get("index-search", [])
+ task, {}, deadline, task.optimization.get("index-search", [])
)
if task_id in (True, False):
@@ -52,8 +69,10 @@ def load_image_by_name(image_name, tag=None):
"Could not find artifacts for a docker image "
"named `{image_name}`. Local commits and other changes "
"in your checkout may cause this error. Try "
- "updating to a fresh checkout of mozilla-central "
- "to download image.".format(image_name=image_name)
+ "updating to a fresh checkout of {project} "
+ "to download image.".format(
+ image_name=image_name, project=params["project"]
+ )
)
return False
@@ -102,19 +121,18 @@ def build_image(name, tag, args=None):
buf = BytesIO()
docker.stream_context_tar(".", image_dir, buf, "", args)
- subprocess.run(
- ["docker", "image", "build", "--no-cache", "-t", tag, "-"], input=buf.getvalue()
- )
+ cmdargs = ["docker", "image", "build", "--no-cache", "-"]
+ if tag:
+ cmdargs.insert(-1, f"-t={tag}")
+ subprocess.run(cmdargs, input=buf.getvalue())
- print(f"Successfully built {name} and tagged with {tag}")
+ msg = f"Successfully built {name}"
+ if tag:
+ msg += f" and tagged with {tag}"
+ print(msg)
- if tag.endswith(":latest"):
- print("*" * 50)
- print("WARNING: no VERSION file found in image directory.")
- print("Image is not suitable for deploying/pushing.")
- print("Create an image suitable for deploying/pushing by creating")
- print("a VERSION file in the image directory.")
- print("*" * 50)
+ if not tag or tag.endswith(":latest"):
+ print(DEPLOY_WARNING.format(image_dir=os.path.relpath(image_dir), image=name))
def load_image(url, imageName=None, imageTag=None):
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/files_changed.py b/third_party/python/taskcluster_taskgraph/taskgraph/files_changed.py
deleted file mode 100644
index 6be6e5eeee..0000000000
--- a/third_party/python/taskcluster_taskgraph/taskgraph/files_changed.py
+++ /dev/null
@@ -1,91 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-"""
-Support for optimizing tasks based on the set of files that have changed.
-"""
-
-
-import logging
-import os
-
-import requests
-from redo import retry
-
-from .util.memoize import memoize
-from .util.path import match as match_path
-from .util.vcs import get_repository
-
-logger = logging.getLogger(__name__)
-
-
-@memoize
-def get_changed_files(head_repository_url, head_rev, base_rev=None):
- """
- Get the set of files changed between revisions.
- Responses are cached, so multiple calls with the same arguments are OK.
- """
- repo_path = os.getcwd()
- repository = get_repository(repo_path)
-
- if repository.tool == "hg":
- # TODO Use VCS version once tested enough
- return _get_changed_files_json_automationrelevance(
- head_repository_url, head_rev
- )
-
- return repository.get_changed_files(rev=head_rev, base_rev=base_rev)
-
-
-def _get_changed_files_json_automationrelevance(head_repository_url, head_rev):
- """
- Get the set of files changed in the push headed by the given revision.
- """
- url = "{}/json-automationrelevance/{}".format(
- head_repository_url.rstrip("/"), head_rev
- )
- logger.debug("Querying version control for metadata: %s", url)
-
- def get_automationrelevance():
- response = requests.get(url, timeout=30)
- return response.json()
-
- contents = retry(get_automationrelevance, attempts=10, sleeptime=10)
-
- logger.debug(
- "{} commits influencing task scheduling:".format(len(contents["changesets"]))
- )
- changed_files = set()
- for c in contents["changesets"]:
- desc = "" # Support empty desc
- if c["desc"]:
- desc = c["desc"].splitlines()[0].encode("ascii", "ignore")
- logger.debug(" {cset} {desc}".format(cset=c["node"][0:12], desc=desc))
- changed_files |= set(c["files"])
-
- return changed_files
-
-
-def check(params, file_patterns):
- """Determine whether any of the files changed between 2 revisions
- match any of the given file patterns."""
-
- head_repository_url = params.get("head_repository")
- head_rev = params.get("head_rev")
- if not head_repository_url or not head_rev:
- logger.warning(
- "Missing `head_repository` or `head_rev` parameters; "
- "assuming all files have changed"
- )
- return True
-
- base_rev = params.get("base_rev")
- changed_files = get_changed_files(head_repository_url, head_rev, base_rev)
-
- for pattern in file_patterns:
- for path in changed_files:
- if match_path(path, pattern):
- return True
-
- return False
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/generator.py b/third_party/python/taskcluster_taskgraph/taskgraph/generator.py
index 4ed2a41520..d649b91706 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/generator.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/generator.py
@@ -91,7 +91,7 @@ class Kind:
@classmethod
def load(cls, root_dir, graph_config, kind_name):
- path = os.path.join(root_dir, kind_name)
+ path = os.path.join(root_dir, "kinds", kind_name)
kind_yml = os.path.join(path, "kind.yml")
if not os.path.exists(kind_yml):
raise KindNotFound(kind_yml)
@@ -125,13 +125,13 @@ class TaskGraphGenerator:
write_artifacts=False,
):
"""
- @param root_dir: root directory, with subdirectories for each kind
+ @param root_dir: root directory containing the Taskgraph config.yml file
@param parameters: parameters for this task-graph generation, or callable
taking a `GraphConfig` and returning parameters
@type parameters: Union[Parameters, Callable[[GraphConfig], Parameters]]
"""
if root_dir is None:
- root_dir = "taskcluster/ci"
+ root_dir = "taskcluster"
self.root_dir = root_dir
self._parameters = parameters
self._decision_task_id = decision_task_id
@@ -243,7 +243,7 @@ class TaskGraphGenerator:
yield kind
queue.extend(kind.config.get("kind-dependencies", []))
else:
- for kind_name in os.listdir(self.root_dir):
+ for kind_name in os.listdir(os.path.join(self.root_dir, "kinds")):
try:
yield Kind.load(self.root_dir, graph_config, kind_name)
except KindNotFound:
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/loader/default.py b/third_party/python/taskcluster_taskgraph/taskgraph/loader/default.py
index 5b2c258917..f060a1d92d 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/loader/default.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/loader/default.py
@@ -11,7 +11,7 @@ logger = logging.getLogger(__name__)
DEFAULT_TRANSFORMS = [
- "taskgraph.transforms.job:transforms",
+ "taskgraph.transforms.run:transforms",
"taskgraph.transforms.task:transforms",
]
@@ -20,7 +20,7 @@ def loader(kind, path, config, params, loaded_tasks):
"""
This default loader builds on the `transform` loader by providing sensible
default transforms that the majority of simple tasks will need.
- Specifically, `job` and `task` transforms will be appended to the end of the
+ Specifically, `run` and `task` transforms will be appended to the end of the
list of transforms in the kind being loaded.
"""
transform_refs = config.setdefault("transforms", [])
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/main.py b/third_party/python/taskcluster_taskgraph/taskgraph/main.py
index 88a4e2539b..e68cd5a787 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/main.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/main.py
@@ -18,6 +18,7 @@ from concurrent.futures import ProcessPoolExecutor, as_completed
from pathlib import Path
from textwrap import dedent
from typing import Any, List
+from urllib.parse import urlparse
import appdirs
import yaml
@@ -95,7 +96,7 @@ def get_filtered_taskgraph(taskgraph, tasksregex, exclude_keys):
for key in exclude_keys:
obj = task
attrs = key.split(".")
- while attrs[0] in obj:
+ while obj and attrs[0] in obj:
if len(attrs) == 1:
del obj[attrs[0]]
break
@@ -120,7 +121,7 @@ def get_taskgraph_generator(root, parameters):
return TaskGraphGenerator(root_dir=root, parameters=parameters)
-def format_taskgraph(options, parameters, logfile=None):
+def format_taskgraph(options, parameters, overrides, logfile=None):
import taskgraph
from taskgraph.parameters import parameters_loader
@@ -138,7 +139,7 @@ def format_taskgraph(options, parameters, logfile=None):
if isinstance(parameters, str):
parameters = parameters_loader(
parameters,
- overrides={"target-kinds": options.get("target_kinds")},
+ overrides=overrides,
strict=False,
)
@@ -172,7 +173,7 @@ def dump_output(out, path=None, params_spec=None):
print(out + "\n", file=fh)
-def generate_taskgraph(options, parameters, logdir):
+def generate_taskgraph(options, parameters, overrides, logdir):
from taskgraph.parameters import Parameters
def logfile(spec):
@@ -188,14 +189,16 @@ def generate_taskgraph(options, parameters, logdir):
# tracebacks a little more readable and avoids additional process overhead.
if len(parameters) == 1:
spec = parameters[0]
- out = format_taskgraph(options, spec, logfile(spec))
+ out = format_taskgraph(options, spec, overrides, logfile(spec))
dump_output(out, options["output_file"])
return 0
futures = {}
with ProcessPoolExecutor(max_workers=options["max_workers"]) as executor:
for spec in parameters:
- f = executor.submit(format_taskgraph, options, spec, logfile(spec))
+ f = executor.submit(
+ format_taskgraph, options, spec, overrides, logfile(spec)
+ )
futures[f] = spec
returncode = 0
@@ -293,6 +296,15 @@ def generate_taskgraph(options, parameters, logdir):
"specified).",
)
@argument(
+ "--force-local-files-changed",
+ default=False,
+ action="store_true",
+ help="Compute the 'files-changed' parameter from local version control, "
+ "even when explicitly using a parameter set that already has it defined. "
+ "Note that this is already the default behaviour when no parameters are "
+ "specified.",
+)
+@argument(
"--no-optimize",
dest="optimize",
action="store_false",
@@ -366,9 +378,11 @@ def show_taskgraph(options):
diffdir = None
output_file = options["output_file"]
- if options["diff"]:
+ if options["diff"] or options["force_local_files_changed"]:
repo = get_repository(os.getcwd())
+ if options["diff"]:
+ assert repo is not None
if not repo.working_directory_clean():
print(
"abort: can't diff taskgraph with dirty working directory",
@@ -392,15 +406,22 @@ def show_taskgraph(options):
)
print(f"Generating {options['graph_attr']} @ {cur_rev}", file=sys.stderr)
+ overrides = {
+ "target-kinds": options.get("target_kinds"),
+ }
parameters: List[Any[str, Parameters]] = options.pop("parameters")
if not parameters:
- overrides = {
- "target-kinds": options.get("target_kinds"),
- }
parameters = [
parameters_loader(None, strict=False, overrides=overrides)
] # will use default values
+ # This is the default behaviour anyway, so no need to re-compute.
+ options["force_local_files_changed"] = False
+
+ elif options["force_local_files_changed"]:
+ assert repo is not None
+ overrides["files-changed"] = sorted(repo.get_changed_files("AM"))
+
for param in parameters[:]:
if isinstance(param, str) and os.path.isdir(param):
parameters.remove(param)
@@ -426,7 +447,7 @@ def show_taskgraph(options):
# to setup its `mach` based logging.
setup_logging()
- ret = generate_taskgraph(options, parameters, logdir)
+ ret = generate_taskgraph(options, parameters, overrides, logdir)
if options["diff"]:
assert diffdir is not None
@@ -450,7 +471,7 @@ def show_taskgraph(options):
diffdir, f"{options['graph_attr']}_{base_rev_file}"
)
print(f"Generating {options['graph_attr']} @ {base_rev}", file=sys.stderr)
- ret |= generate_taskgraph(options, parameters, logdir)
+ ret |= generate_taskgraph(options, parameters, overrides, logdir)
finally:
repo.update(cur_rev)
@@ -463,6 +484,8 @@ def show_taskgraph(options):
f"--label={options['graph_attr']}@{cur_rev}",
]
+ non_fatal_failures = []
+
for spec in parameters:
base_path = os.path.join(
diffdir, f"{options['graph_attr']}_{base_rev_file}"
@@ -475,7 +498,20 @@ def show_taskgraph(options):
base_path += f"_{params_name}"
cur_path += f"_{params_name}"
+ # If the base or cur files are missing it means that generation
+ # failed. If one of them failed but not the other, the failure is
+ # likely due to the patch making changes to taskgraph in modules
+ # that don't get reloaded (safe to ignore). If both generations
+ # failed, there's likely a real issue.
+ base_missing = not os.path.isfile(base_path)
+ cur_missing = not os.path.isfile(cur_path)
+ if base_missing != cur_missing: # != is equivalent to XOR for booleans
+ non_fatal_failures.append(os.path.basename(base_path))
+ continue
+
try:
+ # If the output file(s) are missing, this command will raise
+ # CalledProcessError with a returncode > 1.
proc = subprocess.run(
diffcmd + [base_path, cur_path],
capture_output=True,
@@ -500,6 +536,16 @@ def show_taskgraph(options):
params_spec=spec if len(parameters) > 1 else None,
)
+ if non_fatal_failures:
+ failstr = "\n ".join(sorted(non_fatal_failures))
+ print(
+ "WARNING: Diff skipped for the following generation{s} "
+ "due to failures:\n {failstr}".format(
+ s="s" if len(non_fatal_failures) > 1 else "", failstr=failstr
+ ),
+ file=sys.stderr,
+ )
+
if options["format"] != "json":
print(
"If you were expecting differences in task bodies "
@@ -661,7 +707,7 @@ def decision(options):
@argument(
"--root",
"-r",
- default="taskcluster/ci",
+ default="taskcluster",
help="root of the taskgraph definition relative to topsrcdir",
)
def action_callback(options):
@@ -697,7 +743,7 @@ def action_callback(options):
@argument(
"--root",
"-r",
- default="taskcluster/ci",
+ default="taskcluster",
help="root of the taskgraph definition relative to topsrcdir",
)
@argument(
@@ -835,6 +881,10 @@ def init_taskgraph(options):
)
return 1
+ context["repo_name"] = urlparse(repo_url).path.rsplit("/", 1)[-1]
+ if context["repo_name"].endswith(".git"):
+ context["repo_name"] = context["repo_name"][: -len(".git")]
+
# Generate the project.
cookiecutter(
options["template"],
@@ -867,6 +917,11 @@ def setup_logging():
def main(args=sys.argv[1:]):
setup_logging()
parser = create_parser()
+
+ if not args:
+ parser.print_help()
+ sys.exit(1)
+
args = parser.parse_args(args)
try:
return args.command(vars(args))
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/morph.py b/third_party/python/taskcluster_taskgraph/taskgraph/morph.py
index bfa1560270..e4bb268ab8 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/morph.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/morph.py
@@ -38,6 +38,7 @@ registered_morphs = []
def register_morph(func):
registered_morphs.append(func)
+ return func
def amend_taskgraph(taskgraph, label_to_taskid, to_add):
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/optimize/base.py b/third_party/python/taskcluster_taskgraph/taskgraph/optimize/base.py
index 367b94e1de..e5477d35b7 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/optimize/base.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/optimize/base.py
@@ -271,14 +271,19 @@ def replace_tasks(
dependencies_of = target_task_graph.graph.links_dict()
for label in target_task_graph.graph.visit_postorder():
+ logger.debug(f"replace_tasks: {label}")
# if we're not allowed to optimize, that's easy..
if label in do_not_optimize:
+ logger.debug(f"replace_tasks: {label} is in do_not_optimize")
continue
# if this task depends on un-replaced, un-removed tasks, do not replace
if any(
l not in replaced and l not in removed_tasks for l in dependencies_of[label]
):
+ logger.debug(
+ f"replace_tasks: {label} depends on an unreplaced or unremoved task"
+ )
continue
# if the task already exists, that's an easy replacement
@@ -287,6 +292,7 @@ def replace_tasks(
label_to_taskid[label] = repl
replaced.add(label)
opt_counts["existing_tasks"] += 1
+ logger.debug(f"replace_tasks: {label} replaced from existing_tasks")
continue
# call the optimization strategy
@@ -304,14 +310,20 @@ def replace_tasks(
repl = opt.should_replace_task(task, params, deadline, arg)
if repl:
if repl is True:
+ logger.debug(f"replace_tasks: {label} removed by optimization strategy")
# True means remove this task; get_subgraph will catch any
# problems with removed tasks being depended on
removed_tasks.add(label)
else:
+ logger.debug(
+ f"replace_tasks: {label} replaced by optimization strategy"
+ )
label_to_taskid[label] = repl
replaced.add(label)
opt_counts[opt_by] += 1
continue
+ else:
+ logger.debug(f"replace_tasks: {label} kept by optimization strategy")
_log_optimization("replaced", opt_counts)
return replaced
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/optimize/strategies.py b/third_party/python/taskcluster_taskgraph/taskgraph/optimize/strategies.py
index 973b550632..5baecfe645 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/optimize/strategies.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/optimize/strategies.py
@@ -1,8 +1,8 @@
import logging
from datetime import datetime
-from taskgraph import files_changed
from taskgraph.optimize.base import OptimizationStrategy, register_strategy
+from taskgraph.util.path import match as match_path
from taskgraph.util.taskcluster import find_task_id, status_task
logger = logging.getLogger(__name__)
@@ -48,17 +48,23 @@ class IndexSearch(OptimizationStrategy):
@register_strategy("skip-unless-changed")
class SkipUnlessChanged(OptimizationStrategy):
+
+ def check(self, files_changed, patterns):
+ for pattern in patterns:
+ for path in files_changed:
+ if match_path(path, pattern):
+ return True
+ return False
+
def should_remove_task(self, task, params, file_patterns):
# pushlog_id == -1 - this is the case when run from a cron.yml job or on a git repository
if params.get("repository_type") == "hg" and params.get("pushlog_id") == -1:
return False
- changed = files_changed.check(params, file_patterns)
+ changed = self.check(params["files_changed"], file_patterns)
if not changed:
logger.debug(
- 'no files found matching a pattern in `skip-unless-changed` for "{}"'.format(
- task.label
- )
+ f'no files found matching a pattern in `skip-unless-changed` for "{task.label}"'
)
return True
return False
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/parameters.py b/third_party/python/taskcluster_taskgraph/taskgraph/parameters.py
index 48571d97ad..c69b201e34 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/parameters.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/parameters.py
@@ -40,6 +40,7 @@ base_schema = Schema(
Required("do_not_optimize"): [str],
Required("enable_always_target"): Any(bool, [str]),
Required("existing_tasks"): {str: str},
+ Required("files_changed"): [str],
Required("filters"): [str],
Required("head_ref"): str,
Required("head_repository"): str,
@@ -86,6 +87,7 @@ def _get_defaults(repo_root=None):
# Use fake values if no repo is detected.
repo = Mock(branch="", head_rev="", tool="git")
repo.get_url.return_value = ""
+ repo.get_changed_files.return_value = []
try:
repo_url = repo.get_url()
@@ -108,6 +110,7 @@ def _get_defaults(repo_root=None):
"do_not_optimize": [],
"enable_always_target": True,
"existing_tasks": {},
+ "files_changed": repo.get_changed_files("AM"),
"filters": ["target_tasks_method"],
"head_ref": repo.branch or repo.head_rev,
"head_repository": repo_url,
@@ -284,7 +287,7 @@ class Parameters(ReadOnlyDict):
else:
raise ParameterMismatch(
"Don't know how to determine file URL for non-github"
- "repo: {}".format(repo)
+ f"repo: {repo}"
)
else:
raise RuntimeError(
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/run-task/run-task b/third_party/python/taskcluster_taskgraph/taskgraph/run-task/run-task
index 267b5283ea..f3a343de33 100755
--- a/third_party/python/taskcluster_taskgraph/taskgraph/run-task/run-task
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/run-task/run-task
@@ -1,4 +1,4 @@
-#!/usr/bin/python3 -u
+#!/usr/bin/env -S python3 -u
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
@@ -624,6 +624,11 @@ def git_checkout(
"Must specify both ssh_key_file and ssh_known_hosts_file, if either are specified",
)
+ # Bypass Git's "safe directory" feature as the destination could be
+ # coming from a cache and therefore cloned by a different user.
+ args = ["git", "config", "--global", "--add", "safe.directory", Path(destination_path).as_posix()]
+ retry_required_command(b"vcs", args, extra_env=env)
+
if not os.path.exists(destination_path):
# Repository doesn't already exist, needs to be cloned
args = [
@@ -782,9 +787,7 @@ def hg_checkout(
branch: Optional[str],
revision: Optional[str],
):
- if IS_MACOSX:
- hg_bin = "/tools/python27-mercurial/bin/hg"
- elif IS_POSIX:
+ if IS_MACOSX or IS_POSIX:
hg_bin = "hg"
elif IS_WINDOWS:
# This is where OCC installs it in the AMIs.
@@ -1007,7 +1010,8 @@ def install_pip_requirements(repositories):
if not requirements:
return
- cmd = [sys.executable, "-mpip", "install"]
+ # TODO: Stop using system Python (#381)
+ cmd = [sys.executable, "-mpip", "install", "--break-system-packages"]
if os.environ.get("PIP_DISABLE_REQUIRE_HASHES") != "1":
cmd.append("--require-hashes")
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/target_tasks.py b/third_party/python/taskcluster_taskgraph/taskgraph/target_tasks.py
index 1119a1c960..7f44b6ab60 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/target_tasks.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/target_tasks.py
@@ -14,7 +14,7 @@ _target_task_methods = {}
_GIT_REFS_HEADS_PREFIX = "refs/heads/"
-def _target_task(name):
+def register_target_task(name):
def wrap(func):
_target_task_methods[name] = func
return func
@@ -81,7 +81,7 @@ def standard_filter(task, parameters):
)
-@_target_task("default")
+@register_target_task("default")
def target_tasks_default(full_task_graph, parameters, graph_config):
"""Target the tasks which have indicated they should be run on this project
via the `run_on_projects` attributes."""
@@ -90,7 +90,7 @@ def target_tasks_default(full_task_graph, parameters, graph_config):
]
-@_target_task("codereview")
+@register_target_task("codereview")
def target_tasks_codereview(full_task_graph, parameters, graph_config):
"""Target the tasks which have indicated they should be run on this project
via the `run_on_projects` attributes."""
@@ -101,7 +101,7 @@ def target_tasks_codereview(full_task_graph, parameters, graph_config):
]
-@_target_task("nothing")
+@register_target_task("nothing")
def target_tasks_nothing(full_task_graph, parameters, graph_config):
"""Select nothing, for DONTBUILD pushes"""
return []
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/__init__.py b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/__init__.py
index 4fa7b5fc0c..e69de29bb2 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/__init__.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/__init__.py
@@ -1,3 +0,0 @@
-from taskgraph.transforms import ( # noqa: Added for backwards compat
- notify as release_notifications,
-)
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/base.py b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/base.py
index e6fcd2400c..fda0c584fc 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/base.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/base.py
@@ -147,7 +147,7 @@ class ValidateSchema:
kind=config.kind, name=task["name"]
)
elif "label" in task:
- error = "In job {label!r}:".format(label=task["label"])
+ error = "In task {label!r}:".format(label=task["label"])
elif "primary-dependency" in task:
error = "In {kind} kind task for {dependency!r}:".format(
kind=config.kind, dependency=task["primary-dependency"].label
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/code_review.py b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/code_review.py
index bdb655b97d..2c859c36f6 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/code_review.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/code_review.py
@@ -12,12 +12,12 @@ transforms = TransformSequence()
@transforms.add
-def add_dependencies(config, jobs):
- for job in jobs:
- job.setdefault("soft-dependencies", [])
- job["soft-dependencies"] += [
+def add_dependencies(config, tasks):
+ for task in tasks:
+ task.setdefault("soft-dependencies", [])
+ task["soft-dependencies"] += [
dep_task.label
for dep_task in config.kind_dependencies_tasks.values()
if dep_task.attributes.get("code-review") is True
]
- yield job
+ yield task
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/docker_image.py b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/docker_image.py
index d0c5b9c97b..b58320092b 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/docker_image.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/docker_image.py
@@ -92,9 +92,7 @@ def fill_template(config, tasks):
for p in packages:
if p not in available_packages:
raise Exception(
- "Missing package job for {}-{}: {}".format(
- config.kind, image_name, p
- )
+ f"Missing package job for {config.kind}-{image_name}: {p}"
)
if not taskgraph.fast:
@@ -119,9 +117,7 @@ def fill_template(config, tasks):
digest_data += [json.dumps(args, sort_keys=True)]
context_hashes[image_name] = context_hash
- description = "Build the docker image {} for use by dependent tasks".format(
- image_name
- )
+ description = f"Build the docker image {image_name} for use by dependent tasks"
args["DOCKER_IMAGE_PACKAGES"] = " ".join(f"<{p}>" for p in packages)
@@ -132,6 +128,8 @@ def fill_template(config, tasks):
# burn more CPU once to reduce image size.
zstd_level = "3" if int(config.params["level"]) == 1 else "10"
+ expires = config.graph_config._config.get("task-expires-after", "28 days")
+
# include some information that is useful in reconstructing this task
# from JSON
taskdesc = {
@@ -142,7 +140,7 @@ def fill_template(config, tasks):
"artifact_prefix": "public",
},
"always-target": True,
- "expires-after": "28 days" if config.params.is_try() else "1 year",
+ "expires-after": expires if config.params.is_try() else "1 year",
"scopes": [],
"run-on-projects": [],
"worker-type": "images",
@@ -158,9 +156,7 @@ def fill_template(config, tasks):
],
"env": {
"CONTEXT_TASK_ID": {"task-reference": "<decision>"},
- "CONTEXT_PATH": "public/docker-contexts/{}.tar.gz".format(
- image_name
- ),
+ "CONTEXT_PATH": f"public/docker-contexts/{image_name}.tar.gz",
"HASH": context_hash,
"PROJECT": config.params["project"],
"IMAGE_NAME": image_name,
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/fetch.py b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/fetch.py
index bcb8ff38a6..0e1b739677 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/fetch.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/fetch.py
@@ -32,11 +32,12 @@ FETCH_SCHEMA = Schema(
Optional("task-from"): str,
# Description of the task.
Required("description"): str,
+ Optional("expires-after"): str,
Optional("docker-image"): object,
Optional(
"fetch-alias",
- description="An alias that can be used instead of the real fetch job name in "
- "fetch stanzas for jobs.",
+ description="An alias that can be used instead of the real fetch task name in "
+ "fetch stanzas for tasks.",
): str,
Optional(
"artifact-prefix",
@@ -78,20 +79,20 @@ transforms.add_validate(FETCH_SCHEMA)
@transforms.add
-def process_fetch_job(config, jobs):
- # Converts fetch-url entries to the job schema.
- for job in jobs:
- typ = job["fetch"]["type"]
- name = job["name"]
- fetch = job.pop("fetch")
+def process_fetch_task(config, tasks):
+ # Converts fetch-url entries to the run schema.
+ for task in tasks:
+ typ = task["fetch"]["type"]
+ name = task["name"]
+ fetch = task.pop("fetch")
if typ not in fetch_builders:
raise Exception(f"Unknown fetch type {typ} in fetch {name}")
validate_schema(fetch_builders[typ].schema, fetch, f"In task.fetch {name!r}:")
- job.update(configure_fetch(config, typ, name, fetch))
+ task.update(configure_fetch(config, typ, name, fetch))
- yield job
+ yield task
def configure_fetch(config, typ, name, fetch):
@@ -103,41 +104,41 @@ def configure_fetch(config, typ, name, fetch):
@transforms.add
-def make_task(config, jobs):
+def make_task(config, tasks):
# Fetch tasks are idempotent and immutable. Have them live for
# essentially forever.
if config.params["level"] == "3":
expires = "1000 years"
else:
- expires = "28 days"
+ expires = config.graph_config._config.get("task-expires-after", "28 days")
- for job in jobs:
- name = job["name"]
- artifact_prefix = job.get("artifact-prefix", "public")
- env = job.get("env", {})
+ for task in tasks:
+ name = task["name"]
+ artifact_prefix = task.get("artifact-prefix", "public")
+ env = task.get("env", {})
env.update({"UPLOAD_DIR": "/builds/worker/artifacts"})
- attributes = job.get("attributes", {})
- attributes["fetch-artifact"] = path.join(artifact_prefix, job["artifact_name"])
- alias = job.get("fetch-alias")
+ attributes = task.get("attributes", {})
+ attributes["fetch-artifact"] = path.join(artifact_prefix, task["artifact_name"])
+ alias = task.get("fetch-alias")
if alias:
attributes["fetch-alias"] = alias
- task = {
+ task_desc = {
"attributes": attributes,
"name": name,
- "description": job["description"],
- "expires-after": expires,
+ "description": task["description"],
+ "expires-after": task.get("expires-after", expires),
"label": "fetch-%s" % name,
"run-on-projects": [],
"run": {
"using": "run-task",
"checkout": False,
- "command": job["command"],
+ "command": task["command"],
},
"worker-type": "images",
"worker": {
"chain-of-trust": True,
- "docker-image": job.get("docker-image", {"in-tree": "fetch"}),
+ "docker-image": task.get("docker-image", {"in-tree": "fetch"}),
"env": env,
"max-run-time": 900,
"artifacts": [
@@ -151,29 +152,29 @@ def make_task(config, jobs):
}
if "treeherder" in config.graph_config:
- task["treeherder"] = {
+ task_desc["treeherder"] = {
"symbol": join_symbol("Fetch", name),
"kind": "build",
"platform": "fetch/opt",
"tier": 1,
}
- if job.get("secret", None):
- task["scopes"] = ["secrets:get:" + job.get("secret")]
- task["worker"]["taskcluster-proxy"] = True
+ if task.get("secret", None):
+ task_desc["scopes"] = ["secrets:get:" + task.get("secret")]
+ task_desc["worker"]["taskcluster-proxy"] = True
if not taskgraph.fast:
- cache_name = task["label"].replace(f"{config.kind}-", "", 1)
+ cache_name = task_desc["label"].replace(f"{config.kind}-", "", 1)
# This adds the level to the index path automatically.
add_optimization(
config,
- task,
+ task_desc,
cache_type=CACHE_TYPE,
cache_name=cache_name,
- digest_data=job["digest_data"],
+ digest_data=task["digest_data"],
)
- yield task
+ yield task_desc
@fetch_builder(
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/from_deps.py b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/from_deps.py
index 337d68e4ba..191ef7d56a 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/from_deps.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/from_deps.py
@@ -16,10 +16,11 @@ from textwrap import dedent
from voluptuous import Any, Extra, Optional, Required
from taskgraph.transforms.base import TransformSequence
-from taskgraph.transforms.job import fetches_schema
+from taskgraph.transforms.run import fetches_schema
from taskgraph.util.attributes import attrmatch
from taskgraph.util.dependencies import GROUP_BY_MAP, get_dependencies
from taskgraph.util.schema import Schema, validate_schema
+from taskgraph.util.set_name import SET_NAME_MAP
FROM_DEPS_SCHEMA = Schema(
{
@@ -41,12 +42,14 @@ FROM_DEPS_SCHEMA = Schema(
"set-name",
description=dedent(
"""
- When True, `from_deps` will derive a name for the generated
- tasks from the name of the primary dependency. Defaults to
- True.
+ UPDATE ME AND DOCS
""".lstrip()
),
- ): bool,
+ ): Any(
+ None,
+ *SET_NAME_MAP,
+ {Any(*SET_NAME_MAP): object},
+ ),
Optional(
"with-attributes",
description=dedent(
@@ -170,7 +173,7 @@ def from_deps(config, tasks):
groups = func(config, deps)
# Split the task, one per group.
- set_name = from_deps.get("set-name", True)
+ set_name = from_deps.get("set-name", "strip-kind")
copy_attributes = from_deps.get("copy-attributes", False)
unique_kinds = from_deps.get("unique-kinds", True)
fetches = from_deps.get("fetches", [])
@@ -203,10 +206,8 @@ def from_deps(config, tasks):
primary_dep = [dep for dep in group if dep.kind == primary_kind][0]
if set_name:
- if primary_dep.label.startswith(primary_kind):
- new_task["name"] = primary_dep.label[len(primary_kind) + 1 :]
- else:
- new_task["name"] = primary_dep.label
+ func = SET_NAME_MAP[set_name]
+ new_task["name"] = func(config, deps, primary_dep, primary_kind)
if copy_attributes:
attrs = new_task.setdefault("attributes", {})
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/job/__init__.py b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/run/__init__.py
index 06978ff46d..a783a0dc13 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/job/__init__.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/run/__init__.py
@@ -2,11 +2,11 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
-Convert a job description into a task description.
+Convert a run description into a task description.
-Jobs descriptions are similar to task descriptions, but they specify how to run
-the job at a higher level, using a "run" field that can be interpreted by
-run-using handlers in `taskcluster/taskgraph/transforms/job`.
+Run descriptions are similar to task descriptions, but they specify how to run
+the task at a higher level, using a "run" field that can be interpreted by
+run-using handlers in `taskcluster/taskgraph/transforms/run`.
"""
@@ -28,7 +28,7 @@ from taskgraph.util.workertypes import worker_type_implementation
logger = logging.getLogger(__name__)
# Fetches may be accepted in other transforms and eventually passed along
-# to a `job` (eg: from_deps). Defining this here allows them to re-use
+# to a `task` (eg: from_deps). Defining this here allows them to reuse
# the schema and avoid duplication.
fetches_schema = {
Required("artifact"): str,
@@ -38,9 +38,9 @@ fetches_schema = {
}
# Schema for a build description
-job_description_schema = Schema(
+run_description_schema = Schema(
{
- # The name of the job and the job's label. At least one must be specified,
+ # The name of the task and the task's label. At least one must be specified,
# and the label will be generated from the name if necessary, by prepending
# the kind.
Optional("name"): str,
@@ -55,6 +55,7 @@ job_description_schema = Schema(
Optional("soft-dependencies"): task_description_schema["soft-dependencies"],
Optional("if-dependencies"): task_description_schema["if-dependencies"],
Optional("requires"): task_description_schema["requires"],
+ Optional("deadline-after"): task_description_schema["deadline-after"],
Optional("expires-after"): task_description_schema["expires-after"],
Optional("routes"): task_description_schema["routes"],
Optional("scopes"): task_description_schema["scopes"],
@@ -73,7 +74,7 @@ job_description_schema = Schema(
Optional("needs-sccache"): task_description_schema["needs-sccache"],
# The "when" section contains descriptions of the circumstances under which
# this task should be included in the task graph. This will be converted
- # into an optimization, so it cannot be specified in a job description that
+ # into an optimization, so it cannot be specified in a run description that
# also gives 'optimization'.
Exclusive("when", "optimization"): {
# This task only needs to be run if a file matching one of the given
@@ -89,33 +90,33 @@ job_description_schema = Schema(
fetches_schema,
],
},
- # A description of how to run this job.
+ # A description of how to run this task.
"run": {
- # The key to a job implementation in a peer module to this one
+ # The key to a run implementation in a peer module to this one
"using": str,
# Base work directory used to set up the task.
Optional("workdir"): str,
- # Any remaining content is verified against that job implementation's
+ # Any remaining content is verified against that run implementation's
# own schema.
Extra: object,
},
Required("worker-type"): task_description_schema["worker-type"],
# This object will be passed through to the task description, with additions
- # provided by the job's run-using function
+ # provided by the task's run-using function
Optional("worker"): dict,
}
)
transforms = TransformSequence()
-transforms.add_validate(job_description_schema)
+transforms.add_validate(run_description_schema)
@transforms.add
-def rewrite_when_to_optimization(config, jobs):
- for job in jobs:
- when = job.pop("when", {})
+def rewrite_when_to_optimization(config, tasks):
+ for task in tasks:
+ when = task.pop("when", {})
if not when:
- yield job
+ yield task
continue
files_changed = when.get("files-changed")
@@ -124,63 +125,64 @@ def rewrite_when_to_optimization(config, jobs):
files_changed.append(f"{config.path}/**")
# "only when files changed" implies "skip if files have not changed"
- job["optimization"] = {"skip-unless-changed": files_changed}
+ task["optimization"] = {"skip-unless-changed": files_changed}
- assert "when" not in job
- yield job
+ assert "when" not in task
+ yield task
@transforms.add
-def set_implementation(config, jobs):
- for job in jobs:
- impl, os = worker_type_implementation(config.graph_config, job["worker-type"])
+def set_implementation(config, tasks):
+ for task in tasks:
+ impl, os = worker_type_implementation(config.graph_config, task["worker-type"])
if os:
- job.setdefault("tags", {})["os"] = os
+ task.setdefault("tags", {})["os"] = os
if impl:
- job.setdefault("tags", {})["worker-implementation"] = impl
- worker = job.setdefault("worker", {})
+ task.setdefault("tags", {})["worker-implementation"] = impl
+ worker = task.setdefault("worker", {})
assert "implementation" not in worker
worker["implementation"] = impl
if os:
worker["os"] = os
- yield job
+ yield task
@transforms.add
-def set_label(config, jobs):
- for job in jobs:
- if "label" not in job:
- if "name" not in job:
- raise Exception("job has neither a name nor a label")
- job["label"] = "{}-{}".format(config.kind, job["name"])
- if job.get("name"):
- del job["name"]
- yield job
+def set_label(config, tasks):
+ for task in tasks:
+ if "label" not in task:
+ if "name" not in task:
+ raise Exception("task has neither a name nor a label")
+ task["label"] = "{}-{}".format(config.kind, task["name"])
+ if task.get("name"):
+ del task["name"]
+ yield task
@transforms.add
-def add_resource_monitor(config, jobs):
- for job in jobs:
- if job.get("attributes", {}).get("resource-monitor"):
+def add_resource_monitor(config, tasks):
+ for task in tasks:
+ if task.get("attributes", {}).get("resource-monitor"):
worker_implementation, worker_os = worker_type_implementation(
- config.graph_config, job["worker-type"]
+ config.graph_config, task["worker-type"]
)
# Normalise worker os so that linux-bitbar and similar use linux tools.
- worker_os = worker_os.split("-")[0]
- if "win7" in job["worker-type"]:
+ if worker_os:
+ worker_os = worker_os.split("-")[0]
+ if "win7" in task["worker-type"]:
arch = "32"
else:
arch = "64"
- job.setdefault("fetches", {})
- job["fetches"].setdefault("toolchain", [])
- job["fetches"]["toolchain"].append(f"{worker_os}{arch}-resource-monitor")
+ task.setdefault("fetches", {})
+ task["fetches"].setdefault("toolchain", [])
+ task["fetches"]["toolchain"].append(f"{worker_os}{arch}-resource-monitor")
if worker_implementation == "docker-worker":
artifact_source = "/builds/worker/monitoring/resource-monitor.json"
else:
artifact_source = "monitoring/resource-monitor.json"
- job["worker"].setdefault("artifacts", [])
- job["worker"]["artifacts"].append(
+ task["worker"].setdefault("artifacts", [])
+ task["worker"]["artifacts"].append(
{
"name": "public/monitoring/resource-monitor.json",
"type": "file",
@@ -188,10 +190,10 @@ def add_resource_monitor(config, jobs):
}
)
# Set env for output file
- job["worker"].setdefault("env", {})
- job["worker"]["env"]["RESOURCE_MONITOR_OUTPUT"] = artifact_source
+ task["worker"].setdefault("env", {})
+ task["worker"]["env"]["RESOURCE_MONITOR_OUTPUT"] = artifact_source
- yield job
+ yield task
def get_attribute(dict, key, attributes, attribute_name):
@@ -203,16 +205,16 @@ def get_attribute(dict, key, attributes, attribute_name):
@transforms.add
-def use_fetches(config, jobs):
+def use_fetches(config, tasks):
artifact_names = {}
aliases = {}
extra_env = {}
if config.kind in ("toolchain", "fetch"):
- jobs = list(jobs)
- for job in jobs:
- run = job.get("run", {})
- label = job["label"]
+ tasks = list(tasks)
+ for task in tasks:
+ run = task.get("run", {})
+ label = task["label"]
get_attribute(artifact_names, label, run, "toolchain-artifact")
value = run.get(f"{config.kind}-alias")
if value:
@@ -232,20 +234,20 @@ def use_fetches(config, jobs):
aliases[f"{task.kind}-{value}"] = task.label
artifact_prefixes = {}
- for job in order_tasks(config, jobs):
- artifact_prefixes[job["label"]] = get_artifact_prefix(job)
+ for task in order_tasks(config, tasks):
+ artifact_prefixes[task["label"]] = get_artifact_prefix(task)
- fetches = job.pop("fetches", None)
+ fetches = task.pop("fetches", None)
if not fetches:
- yield job
+ yield task
continue
- job_fetches = []
- name = job.get("name", job.get("label"))
- dependencies = job.setdefault("dependencies", {})
- worker = job.setdefault("worker", {})
+ task_fetches = []
+ name = task.get("name", task.get("label"))
+ dependencies = task.setdefault("dependencies", {})
+ worker = task.setdefault("worker", {})
env = worker.setdefault("env", {})
- prefix = get_artifact_prefix(job)
+ prefix = get_artifact_prefix(task)
for kind in sorted(fetches):
artifacts = fetches[kind]
if kind in ("fetch", "toolchain"):
@@ -254,9 +256,7 @@ def use_fetches(config, jobs):
label = aliases.get(label, label)
if label not in artifact_names:
raise Exception(
- "Missing fetch job for {kind}-{name}: {fetch}".format(
- kind=config.kind, name=name, fetch=fetch_name
- )
+ f"Missing fetch task for {config.kind}-{name}: {fetch_name}"
)
if label in extra_env:
env.update(extra_env[label])
@@ -264,7 +264,7 @@ def use_fetches(config, jobs):
path = artifact_names[label]
dependencies[label] = label
- job_fetches.append(
+ task_fetches.append(
{
"artifact": path,
"task": f"<{label}>",
@@ -274,8 +274,8 @@ def use_fetches(config, jobs):
else:
if kind not in dependencies:
raise Exception(
- "{name} can't fetch {kind} artifacts because "
- "it has no {kind} dependencies!".format(name=name, kind=kind)
+ f"{name} can't fetch {kind} artifacts because "
+ f"it has no {kind} dependencies!"
)
dep_label = dependencies[kind]
if dep_label in artifact_prefixes:
@@ -293,9 +293,11 @@ def use_fetches(config, jobs):
name=name,
kind=kind,
label=dependencies[kind],
- tasks="no tasks"
- if len(dep_tasks) == 0
- else "multiple tasks",
+ tasks=(
+ "no tasks"
+ if len(dep_tasks) == 0
+ else "multiple tasks"
+ ),
)
)
@@ -328,41 +330,43 @@ def use_fetches(config, jobs):
fetch["dest"] = dest
if verify_hash:
fetch["verify-hash"] = verify_hash
- job_fetches.append(fetch)
+ task_fetches.append(fetch)
- job_artifact_prefixes = {
+ task_artifact_prefixes = {
mozpath.dirname(fetch["artifact"])
- for fetch in job_fetches
+ for fetch in task_fetches
if not fetch["artifact"].startswith("public/")
}
- if job_artifact_prefixes:
+ if task_artifact_prefixes:
# Use taskcluster-proxy and request appropriate scope. For example, add
# 'scopes: [queue:get-artifact:path/to/*]' for 'path/to/artifact.tar.xz'.
worker["taskcluster-proxy"] = True
- for prefix in sorted(job_artifact_prefixes):
+ for prefix in sorted(task_artifact_prefixes):
scope = f"queue:get-artifact:{prefix}/*"
- if scope not in job.setdefault("scopes", []):
- job["scopes"].append(scope)
+ if scope not in task.setdefault("scopes", []):
+ task["scopes"].append(scope)
- env["MOZ_FETCHES"] = {"task-reference": json.dumps(job_fetches, sort_keys=True)}
+ env["MOZ_FETCHES"] = {
+ "task-reference": json.dumps(task_fetches, sort_keys=True)
+ }
env.setdefault("MOZ_FETCHES_DIR", "fetches")
- yield job
+ yield task
@transforms.add
-def make_task_description(config, jobs):
+def make_task_description(config, tasks):
"""Given a build description, create a task description"""
- # import plugin modules first, before iterating over jobs
+ # import plugin modules first, before iterating over tasks
import_sibling_modules(exceptions=("common.py",))
- for job in jobs:
+ for task in tasks:
# always-optimized tasks never execute, so have no workdir
- if job["worker"]["implementation"] in ("docker-worker", "generic-worker"):
- job["run"].setdefault("workdir", "/builds/worker")
+ if task["worker"]["implementation"] in ("docker-worker", "generic-worker"):
+ task["run"].setdefault("workdir", "/builds/worker")
- taskdesc = copy.deepcopy(job)
+ taskdesc = copy.deepcopy(task)
# fill in some empty defaults to make run implementations easier
taskdesc.setdefault("attributes", {})
@@ -372,27 +376,27 @@ def make_task_description(config, jobs):
taskdesc.setdefault("scopes", [])
taskdesc.setdefault("extra", {})
- # give the function for job.run.using on this worker implementation a
+ # give the function for task.run.using on this worker implementation a
# chance to set up the task description.
configure_taskdesc_for_run(
- config, job, taskdesc, job["worker"]["implementation"]
+ config, task, taskdesc, task["worker"]["implementation"]
)
del taskdesc["run"]
- # yield only the task description, discarding the job description
+ # yield only the task description, discarding the task description
yield taskdesc
-# A registry of all functions decorated with run_job_using
+# A registry of all functions decorated with run_task_using
registry = {}
-def run_job_using(worker_implementation, run_using, schema=None, defaults={}):
+def run_task_using(worker_implementation, run_using, schema=None, defaults={}):
"""Register the decorated function as able to set up a task description for
- jobs with the given worker implementation and `run.using` property. If
- `schema` is given, the job's run field will be verified to match it.
+ tasks with the given worker implementation and `run.using` property. If
+ `schema` is given, the task's run field will be verified to match it.
- The decorated function should have the signature `using_foo(config, job, taskdesc)`
+ The decorated function should have the signature `using_foo(config, task, taskdesc)`
and should modify the task description in-place. The skeleton of
the task description is already set up, but without a payload."""
@@ -400,11 +404,7 @@ def run_job_using(worker_implementation, run_using, schema=None, defaults={}):
for_run_using = registry.setdefault(run_using, {})
if worker_implementation in for_run_using:
raise Exception(
- "run_job_using({!r}, {!r}) already exists: {!r}".format(
- run_using,
- worker_implementation,
- for_run_using[worker_implementation],
- )
+ f"run_task_using({run_using!r}, {worker_implementation!r}) already exists: {for_run_using[worker_implementation]!r}"
)
for_run_using[worker_implementation] = (func, schema, defaults)
return func
@@ -412,42 +412,40 @@ def run_job_using(worker_implementation, run_using, schema=None, defaults={}):
return wrap
-@run_job_using(
+@run_task_using(
"always-optimized", "always-optimized", Schema({"using": "always-optimized"})
)
-def always_optimized(config, job, taskdesc):
+def always_optimized(config, task, taskdesc):
pass
-def configure_taskdesc_for_run(config, job, taskdesc, worker_implementation):
+def configure_taskdesc_for_run(config, task, taskdesc, worker_implementation):
"""
- Run the appropriate function for this job against the given task
+ Run the appropriate function for this task against the given task
description.
- This will raise an appropriate error if no function exists, or if the job's
+ This will raise an appropriate error if no function exists, or if the task's
run is not valid according to the schema.
"""
- run_using = job["run"]["using"]
+ run_using = task["run"]["using"]
if run_using not in registry:
raise Exception(f"no functions for run.using {run_using!r}")
if worker_implementation not in registry[run_using]:
raise Exception(
- "no functions for run.using {!r} on {!r}".format(
- run_using, worker_implementation
- )
+ f"no functions for run.using {run_using!r} on {worker_implementation!r}"
)
func, schema, defaults = registry[run_using][worker_implementation]
for k, v in defaults.items():
- job["run"].setdefault(k, v)
+ task["run"].setdefault(k, v)
if schema:
validate_schema(
schema,
- job["run"],
- "In job.run using {!r}/{!r} for job {!r}:".format(
- job["run"]["using"], worker_implementation, job["label"]
+ task["run"],
+ "In task.run using {!r}/{!r} for task {!r}:".format(
+ task["run"]["using"], worker_implementation, task["label"]
),
)
- func(config, job, taskdesc)
+ func(config, task, taskdesc)
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/job/common.py b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/run/common.py
index 04708daf81..66466bc5f9 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/job/common.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/run/common.py
@@ -2,7 +2,7 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
-Common support for various job types. These functions are all named after the
+Common support for various task types. These functions are all named after the
worker implementation they operate on, and take the same three parameters, for
consistency.
"""
@@ -21,21 +21,21 @@ def get_vcsdir_name(os):
return "vcs"
-def add_cache(job, taskdesc, name, mount_point, skip_untrusted=False):
+def add_cache(task, taskdesc, name, mount_point, skip_untrusted=False):
"""Adds a cache based on the worker's implementation.
Args:
- job (dict): Task's job description.
+ task (dict): Tasks object.
taskdesc (dict): Target task description to modify.
name (str): Name of the cache.
mount_point (path): Path on the host to mount the cache.
skip_untrusted (bool): Whether cache is used in untrusted environments
(default: False). Only applies to docker-worker.
"""
- if not job["run"].get("use-caches", True):
+ if not task["run"].get("use-caches", True):
return
- worker = job["worker"]
+ worker = task["worker"]
if worker["implementation"] == "docker-worker":
taskdesc["worker"].setdefault("caches", []).append(
@@ -60,7 +60,7 @@ def add_cache(job, taskdesc, name, mount_point, skip_untrusted=False):
pass
-def add_artifacts(config, job, taskdesc, path):
+def add_artifacts(config, task, taskdesc, path):
taskdesc["worker"].setdefault("artifacts", []).append(
{
"name": get_artifact_prefix(taskdesc),
@@ -70,28 +70,28 @@ def add_artifacts(config, job, taskdesc, path):
)
-def docker_worker_add_artifacts(config, job, taskdesc):
+def docker_worker_add_artifacts(config, task, taskdesc):
"""Adds an artifact directory to the task"""
- path = "{workdir}/artifacts/".format(**job["run"])
+ path = "{workdir}/artifacts/".format(**task["run"])
taskdesc["worker"]["env"]["UPLOAD_DIR"] = path
- add_artifacts(config, job, taskdesc, path)
+ add_artifacts(config, task, taskdesc, path)
-def generic_worker_add_artifacts(config, job, taskdesc):
+def generic_worker_add_artifacts(config, task, taskdesc):
"""Adds an artifact directory to the task"""
# The path is the location on disk; it doesn't necessarily
# mean the artifacts will be public or private; that is set via the name
# attribute in add_artifacts.
- add_artifacts(config, job, taskdesc, path=get_artifact_prefix(taskdesc))
+ add_artifacts(config, task, taskdesc, path=get_artifact_prefix(taskdesc))
-def support_vcs_checkout(config, job, taskdesc, repo_configs, sparse=False):
- """Update a job/task with parameters to enable a VCS checkout.
+def support_vcs_checkout(config, task, taskdesc, repo_configs, sparse=False):
+ """Update a task with parameters to enable a VCS checkout.
This can only be used with ``run-task`` tasks, as the cache name is
reserved for ``run-task`` tasks.
"""
- worker = job["worker"]
+ worker = task["worker"]
is_mac = worker["os"] == "macosx"
is_win = worker["os"] == "windows"
is_linux = worker["os"] == "linux"
@@ -102,7 +102,7 @@ def support_vcs_checkout(config, job, taskdesc, repo_configs, sparse=False):
checkoutdir = "./build"
hgstore = "y:/hg-shared"
elif is_docker:
- checkoutdir = "{workdir}/checkouts".format(**job["run"])
+ checkoutdir = "{workdir}/checkouts".format(**task["run"])
hgstore = f"{checkoutdir}/hg-store"
else:
checkoutdir = "./checkouts"
@@ -130,13 +130,7 @@ def support_vcs_checkout(config, job, taskdesc, repo_configs, sparse=False):
if sparse:
cache_name += "-sparse"
- # Workers using Mercurial >= 5.8 will enable revlog-compression-zstd, which
- # workers using older versions can't understand, so they can't share cache.
- # At the moment, only docker workers use the newer version.
- if is_docker:
- cache_name += "-hg58"
-
- add_cache(job, taskdesc, cache_name, checkoutdir)
+ add_cache(task, taskdesc, cache_name, checkoutdir)
env = taskdesc["worker"].setdefault("env", {})
env.update(
@@ -167,5 +161,5 @@ def support_vcs_checkout(config, job, taskdesc, repo_configs, sparse=False):
taskdesc["scopes"].append(f"secrets:get:{repo_config.ssh_secret_name}")
# only some worker platforms have taskcluster-proxy enabled
- if job["worker"]["implementation"] in ("docker-worker",):
+ if task["worker"]["implementation"] in ("docker-worker",):
taskdesc["worker"]["taskcluster-proxy"] = True
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/job/index_search.py b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/run/index_search.py
index 09b48fe594..c25946980e 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/job/index_search.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/run/index_search.py
@@ -12,7 +12,7 @@ phase will replace the task with the task from the other graph.
from voluptuous import Required
from taskgraph.transforms.base import TransformSequence
-from taskgraph.transforms.job import run_job_using
+from taskgraph.transforms.run import run_task_using
from taskgraph.util.schema import Schema
transforms = TransformSequence()
@@ -29,9 +29,9 @@ run_task_schema = Schema(
)
-@run_job_using("always-optimized", "index-search", schema=run_task_schema)
-def fill_template(config, job, taskdesc):
- run = job["run"]
+@run_task_using("always-optimized", "index-search", schema=run_task_schema)
+def fill_template(config, task, taskdesc):
+ run = task["run"]
taskdesc["optimization"] = {
"index-search": [index.format(**config.params) for index in run["index-search"]]
}
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/job/run_task.py b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/run/run_task.py
index 6337673611..c2fbef83b0 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/job/run_task.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/run/run_task.py
@@ -2,7 +2,7 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
-Support for running jobs that are invoked via the `run-task` script.
+Support for running tasks that are invoked via the `run-task` script.
"""
import dataclasses
@@ -10,8 +10,8 @@ import os
from voluptuous import Any, Optional, Required
-from taskgraph.transforms.job import run_job_using
-from taskgraph.transforms.job.common import support_vcs_checkout
+from taskgraph.transforms.run import run_task_using
+from taskgraph.transforms.run.common import support_vcs_checkout
from taskgraph.transforms.task import taskref_or_string
from taskgraph.util import path, taskcluster
from taskgraph.util.schema import Schema
@@ -25,7 +25,7 @@ run_task_schema = Schema(
{
Required("using"): "run-task",
# if true, add a cache at ~worker/.cache, which is where things like pip
- # tend to hide their caches. This cache is never added for level-1 jobs.
+ # tend to hide their caches. This cache is never added for level-1 tasks.
# TODO Once bug 1526028 is fixed, this and 'use-caches' should be merged.
Required("cache-dotcache"): bool,
# Whether or not to use caches.
@@ -58,8 +58,8 @@ run_task_schema = Schema(
)
-def common_setup(config, job, taskdesc, command):
- run = job["run"]
+def common_setup(config, task, taskdesc, command):
+ run = task["run"]
if run["checkout"]:
repo_configs = config.repo_configs
if len(repo_configs) > 1 and run["checkout"] is True:
@@ -72,7 +72,7 @@ def common_setup(config, job, taskdesc, command):
support_vcs_checkout(
config,
- job,
+ task,
taskdesc,
repo_configs=repo_configs,
sparse=bool(run["sparse-profile"]),
@@ -97,7 +97,7 @@ def common_setup(config, job, taskdesc, command):
raise Exception(
"Found `{{checkout}}` interpolation in `cwd` for task {name} "
"but the task doesn't have a checkout: {cwd}".format(
- cwd=run["cwd"], name=job.get("name", job.get("label"))
+ cwd=run["cwd"], name=task.get("name", task.get("label"))
)
)
@@ -126,14 +126,14 @@ def script_url(config, script):
return f"{tc_url}/api/queue/v1/task/{task_id}/artifacts/public/{script}"
-@run_job_using(
+@run_task_using(
"docker-worker", "run-task", schema=run_task_schema, defaults=worker_defaults
)
-def docker_worker_run_task(config, job, taskdesc):
- run = job["run"]
- worker = taskdesc["worker"] = job["worker"]
+def docker_worker_run_task(config, task, taskdesc):
+ run = task["run"]
+ worker = taskdesc["worker"] = task["worker"]
command = run.pop("run-task-command", ["/usr/local/bin/run-task"])
- common_setup(config, job, taskdesc, command)
+ common_setup(config, task, taskdesc, command)
if run.get("cache-dotcache"):
worker["caches"].append(
@@ -158,12 +158,12 @@ def docker_worker_run_task(config, job, taskdesc):
worker["command"] = command
-@run_job_using(
+@run_task_using(
"generic-worker", "run-task", schema=run_task_schema, defaults=worker_defaults
)
-def generic_worker_run_task(config, job, taskdesc):
- run = job["run"]
- worker = taskdesc["worker"] = job["worker"]
+def generic_worker_run_task(config, task, taskdesc):
+ run = task["run"]
+ worker = taskdesc["worker"] = task["worker"]
is_win = worker["os"] == "windows"
is_mac = worker["os"] == "macosx"
is_bitbar = worker["os"] == "linux-bitbar"
@@ -177,7 +177,7 @@ def generic_worker_run_task(config, job, taskdesc):
else:
command = ["./run-task"]
- common_setup(config, job, taskdesc, command)
+ common_setup(config, task, taskdesc, command)
worker.setdefault("mounts", [])
if run.get("cache-dotcache"):
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/job/toolchain.py b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/run/toolchain.py
index c9c09542ff..59e66cb973 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/job/toolchain.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/run/toolchain.py
@@ -2,14 +2,14 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
-Support for running toolchain-building jobs via dedicated scripts
+Support for running toolchain-building tasks via dedicated scripts
"""
from voluptuous import ALLOW_EXTRA, Any, Optional, Required
import taskgraph
-from taskgraph.transforms.job import configure_taskdesc_for_run, run_job_using
-from taskgraph.transforms.job.common import (
+from taskgraph.transforms.run import configure_taskdesc_for_run, run_task_using
+from taskgraph.transforms.run.common import (
docker_worker_add_artifacts,
generic_worker_add_artifacts,
get_vcsdir_name,
@@ -36,12 +36,12 @@ toolchain_run_schema = Schema(
# Paths/patterns pointing to files that influence the outcome of a
# toolchain build.
Optional("resources"): [str],
- # Path to the artifact produced by the toolchain job
+ # Path to the artifact produced by the toolchain task
Required("toolchain-artifact"): str,
Optional(
"toolchain-alias",
- description="An alias that can be used instead of the real toolchain job name in "
- "fetch stanzas for jobs.",
+ description="An alias that can be used instead of the real toolchain task name in "
+ "fetch stanzas for tasks.",
): Any(str, [str]),
Optional(
"toolchain-env",
@@ -82,10 +82,10 @@ def get_digest_data(config, run, taskdesc):
return data
-def common_toolchain(config, job, taskdesc, is_docker):
- run = job["run"]
+def common_toolchain(config, task, taskdesc, is_docker):
+ run = task["run"]
- worker = taskdesc["worker"] = job["worker"]
+ worker = taskdesc["worker"] = task["worker"]
worker["chain-of-trust"] = True
srcdir = get_vcsdir_name(worker["os"])
@@ -94,14 +94,14 @@ def common_toolchain(config, job, taskdesc, is_docker):
# If the task doesn't have a docker-image, set a default
worker.setdefault("docker-image", {"in-tree": "toolchain-build"})
- # Allow the job to specify where artifacts come from, but add
+ # Allow the task to specify where artifacts come from, but add
# public/build if it's not there already.
artifacts = worker.setdefault("artifacts", [])
if not any(artifact.get("name") == "public/build" for artifact in artifacts):
if is_docker:
- docker_worker_add_artifacts(config, job, taskdesc)
+ docker_worker_add_artifacts(config, task, taskdesc)
else:
- generic_worker_add_artifacts(config, job, taskdesc)
+ generic_worker_add_artifacts(config, task, taskdesc)
env = worker["env"]
env.update(
@@ -147,7 +147,7 @@ def common_toolchain(config, job, taskdesc, is_docker):
run["command"] = command
- configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
+ configure_taskdesc_for_run(config, task, taskdesc, worker["implementation"])
toolchain_defaults = {
@@ -155,21 +155,21 @@ toolchain_defaults = {
}
-@run_job_using(
+@run_task_using(
"docker-worker",
"toolchain-script",
schema=toolchain_run_schema,
defaults=toolchain_defaults,
)
-def docker_worker_toolchain(config, job, taskdesc):
- common_toolchain(config, job, taskdesc, is_docker=True)
+def docker_worker_toolchain(config, task, taskdesc):
+ common_toolchain(config, task, taskdesc, is_docker=True)
-@run_job_using(
+@run_task_using(
"generic-worker",
"toolchain-script",
schema=toolchain_run_schema,
defaults=toolchain_defaults,
)
-def generic_worker_toolchain(config, job, taskdesc):
- common_toolchain(config, job, taskdesc, is_docker=False)
+def generic_worker_toolchain(config, task, taskdesc):
+ common_toolchain(config, task, taskdesc, is_docker=False)
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/task.py b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/task.py
index c55de78513..168b8c00c9 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/task.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/task.py
@@ -110,7 +110,7 @@ task_description_schema = Schema(
# section of the kind (delimited by "-") all smooshed together.
# Eg: "test" becomes "T", "docker-image" becomes "DI", etc.
"symbol": Optional(str),
- # the job kind
+ # the task kind
# If "build" or "test" is found in the kind name, this defaults
# to the appropriate value. Otherwise, defaults to "other"
"kind": Optional(Any("build", "test", "other")),
@@ -129,7 +129,7 @@ task_description_schema = Schema(
Optional("index"): {
# the name of the product this build produces
"product": str,
- # the names to use for this job in the TaskCluster index
+ # the names to use for this task in the TaskCluster index
"job-name": str,
# Type of gecko v2 index to use
"type": str,
@@ -179,7 +179,7 @@ task_description_schema = Schema(
# be substituted in this string:
# {level} -- the scm level of this push
"worker-type": str,
- # Whether the job should use sccache compiler caching.
+ # Whether the task should use sccache compiler caching.
Required("needs-sccache"): bool,
# information specific to the worker implementation that will run this task
Optional("worker"): {
@@ -196,7 +196,7 @@ TC_TREEHERDER_SCHEMA_URL = (
UNKNOWN_GROUP_NAME = (
- "Treeherder group {} (from {}) has no name; " "add it to taskcluster/ci/config.yml"
+ "Treeherder group {} (from {}) has no name; " "add it to taskcluster/config.yml"
)
V2_ROUTE_TEMPLATES = [
@@ -266,7 +266,7 @@ def index_builder(name):
UNSUPPORTED_INDEX_PRODUCT_ERROR = """\
The index product {product} is not in the list of configured products in
-`taskcluster/ci/config.yml'.
+`taskcluster/config.yml'.
"""
@@ -317,7 +317,7 @@ def verify_index(config, index):
{
# only one type is supported by any of the workers right now
"type": "persistent",
- # name of the cache, allowing re-use by subsequent tasks naming the
+ # name of the cache, allowing reuse by subsequent tasks naming the
# same cache
"name": str,
# location in the task image where the cache will be mounted
@@ -364,6 +364,9 @@ def build_docker_worker_payload(config, task, task_def):
if "in-tree" in image:
name = image["in-tree"]
docker_image_task = "build-docker-image-" + image["in-tree"]
+ assert "docker-image" not in task.get(
+ "dependencies", ()
+ ), "docker-image key in dependencies object is reserved"
task.setdefault("dependencies", {})["docker-image"] = docker_image_task
image = {
@@ -487,19 +490,19 @@ def build_docker_worker_payload(config, task, task_def):
# run-task knows how to validate caches.
#
- # To help ensure new run-task features and bug fixes don't interfere
- # with existing caches, we seed the hash of run-task into cache names.
- # So, any time run-task changes, we should get a fresh set of caches.
- # This means run-task can make changes to cache interaction at any time
- # without regards for backwards or future compatibility.
+ # To help ensure new run-task features and bug fixes, as well as the
+ # versions of tools such as mercurial or git, don't interfere with
+ # existing caches, we seed the underlying docker-image task id into
+ # cache names, for tasks using in-tree Docker images.
#
# But this mechanism only works for in-tree Docker images that are built
# with the current run-task! For out-of-tree Docker images, we have no
# way of knowing their content of run-task. So, in addition to varying
# cache names by the contents of run-task, we also take the Docker image
- # name into consideration. This means that different Docker images will
- # never share the same cache. This is a bit unfortunate. But it is the
- # safest thing to do. Fortunately, most images are defined in-tree.
+ # name into consideration.
+ #
+ # This means that different Docker images will never share the same
+ # cache. This is a bit unfortunate, but is the safest thing to do.
#
# For out-of-tree Docker images, we don't strictly need to incorporate
# the run-task content into the cache name. However, doing so preserves
@@ -520,6 +523,8 @@ def build_docker_worker_payload(config, task, task_def):
out_of_tree_image.encode("utf-8")
).hexdigest()
suffix += name_hash[0:12]
+ else:
+ suffix += "-<docker-image>"
else:
suffix = cache_version
@@ -539,13 +544,15 @@ def build_docker_worker_payload(config, task, task_def):
suffix=suffix,
)
caches[name] = cache["mount-point"]
- task_def["scopes"].append("docker-worker:cache:%s" % name)
+ task_def["scopes"].append(
+ {"task-reference": "docker-worker:cache:%s" % name}
+ )
# Assertion: only run-task is interested in this.
if run_task:
payload["env"]["TASKCLUSTER_CACHES"] = ";".join(sorted(caches.values()))
- payload["cache"] = caches
+ payload["cache"] = {"task-reference": caches}
# And send down volumes information to run-task as well.
if run_task and worker.get("volumes"):
@@ -752,7 +759,7 @@ def build_generic_worker_payload(config, task, task_def):
schema={
# the maximum time to run, in seconds
Required("max-run-time"): int,
- # locale key, if this is a locale beetmover job
+ # locale key, if this is a locale beetmover task
Optional("locale"): str,
Optional("partner-public"): bool,
Required("release-properties"): {
@@ -1075,7 +1082,11 @@ def build_task(config, tasks):
extra["parent"] = os.environ.get("TASK_ID", "")
if "expires-after" not in task:
- task["expires-after"] = "28 days" if config.params.is_try() else "1 year"
+ task["expires-after"] = (
+ config.graph_config._config.get("task-expires-after", "28 days")
+ if config.params.is_try()
+ else "1 year"
+ )
if "deadline-after" not in task:
if "task-deadline-after" in config.graph_config:
@@ -1142,9 +1153,9 @@ def build_task(config, tasks):
config.params["project"] + th_project_suffix, branch_rev
)
)
- task_def["metadata"]["description"] += " ([Treeherder push]({}))".format(
- th_push_link
- )
+ task_def["metadata"][
+ "description"
+ ] += f" ([Treeherder push]({th_push_link}))"
# add the payload and adjust anything else as required (e.g., scopes)
payload_builders[task["worker"]["implementation"]].builder(
@@ -1288,7 +1299,7 @@ def check_caches_are_volumes(task):
Caches and volumes are the only filesystem locations whose content
isn't defined by the Docker image itself. Some caches are optional
- depending on the job environment. We want paths that are potentially
+ depending on the task environment. We want paths that are potentially
caches to have as similar behavior regardless of whether a cache is
used. To help enforce this, we require that all paths used as caches
to be declared as Docker volumes. This check won't catch all offenders.
@@ -1343,7 +1354,9 @@ def check_run_task_caches(config, tasks):
main_command = command[0] if isinstance(command[0], str) else ""
run_task = main_command.endswith("run-task")
- for cache in payload.get("cache", {}):
+ for cache in payload.get("cache", {}).get(
+ "task-reference", payload.get("cache", {})
+ ):
if not cache.startswith(cache_prefix):
raise Exception(
"{} is using a cache ({}) which is not appropriate "
@@ -1364,7 +1377,7 @@ def check_run_task_caches(config, tasks):
"cache name"
)
- if not cache.endswith(suffix):
+ if suffix not in cache:
raise Exception(
f"{task['label']} is using a cache ({cache}) reserved for run-task "
"but the cache name is not dependent on the contents "
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/task_context.py b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/task_context.py
index 5c7ed6af80..bd36d827aa 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/task_context.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/task_context.py
@@ -81,9 +81,9 @@ transforms.add_validate(SCHEMA)
@transforms.add
-def render_task(config, jobs):
- for job in jobs:
- sub_config = job.pop("task-context")
+def render_task(config, tasks):
+ for task in tasks:
+ sub_config = task.pop("task-context")
params_context = {}
for var, path in sub_config.pop("from-parameters", {}).items():
if isinstance(path, str):
@@ -111,11 +111,11 @@ def render_task(config, jobs):
# Now that we have our combined context, we can substitute.
for field in fields:
- container, subfield = job, field
+ container, subfield = task, field
while "." in subfield:
f, subfield = subfield.split(".", 1)
container = container[f]
container[subfield] = substitute(container[subfield], **subs)
- yield job
+ yield task
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/util/archive.py b/third_party/python/taskcluster_taskgraph/taskgraph/util/archive.py
index ee59ba4548..261a031038 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/util/archive.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/util/archive.py
@@ -12,6 +12,40 @@ import tarfile
DEFAULT_MTIME = 1451606400
+# Python 3.9 contains this change:
+# https://github.com/python/cpython/commit/674935b8caf33e47c78f1b8e197b1b77a04992d2
+# which changes the output of tar creation compared to earlier versions.
+# As this code is used to generate tar files that are meant to be deterministic
+# across versions of python (specifically, it's used as part of computing the hash
+# of docker images, which needs to be identical between CI (which uses python 3.8),
+# and developer environments (using arbitrary versions of python, at this point,
+# most probably more recent than 3.9)).
+# What we do is subblass TarInfo so that if used on python >= 3.9, it reproduces the
+# behavior from python < 3.9.
+# Here's how it goes:
+# - the behavior in python >= 3.9 is the same as python < 3.9 when the type encoded
+# in the tarinfo is CHRTYPE or BLKTYPE.
+# - the value of the type is only compared in the context of choosing which behavior
+# to take
+# - we replace the type with the same value (so that using the value has no changes)
+# but that pretends to be the same as CHRTYPE so that the condition that enables the
+# old behavior is taken.
+class HackedType(bytes):
+ def __eq__(self, other):
+ if other == tarfile.CHRTYPE:
+ return True
+ return self == other
+
+
+class TarInfo(tarfile.TarInfo):
+ @staticmethod
+ def _create_header(info, format, encoding, errors):
+ info["type"] = HackedType(info["type"])
+ # ignore type checking because it looks like pyright complains because we're calling a
+ # non-public method
+ return tarfile.TarInfo._create_header(info, format, encoding, errors) # type: ignore
+
+
def create_tar_from_files(fp, files):
"""Create a tar file deterministically.
@@ -25,15 +59,23 @@ def create_tar_from_files(fp, files):
FUTURE accept a filename argument (or create APIs to write files)
"""
- with tarfile.open(name="", mode="w", fileobj=fp, dereference=True) as tf:
+ # The format is explicitly set to tarfile.GNU_FORMAT, because this default format
+ # has been changed in Python 3.8.
+ with tarfile.open(
+ name="", mode="w", fileobj=fp, dereference=True, format=tarfile.GNU_FORMAT
+ ) as tf:
for archive_path, f in sorted(files.items()):
if isinstance(f, str):
- mode = os.stat(f).st_mode
+ s = os.stat(f)
+ mode = s.st_mode
+ size = s.st_size
f = open(f, "rb")
else:
mode = 0o0644
+ size = len(f.read())
+ f.seek(0)
- ti = tarfile.TarInfo(archive_path)
+ ti = TarInfo(archive_path)
ti.mode = mode
ti.type = tarfile.REGTYPE
@@ -56,9 +98,7 @@ def create_tar_from_files(fp, files):
# Set mtime to a constant value.
ti.mtime = DEFAULT_MTIME
- f.seek(0, 2)
- ti.size = f.tell()
- f.seek(0, 0)
+ ti.size = size
# tarfile wants to pass a size argument to read(). So just
# wrap/buffer in a proper file object interface.
tf.addfile(ti, f)
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/util/cached_tasks.py b/third_party/python/taskcluster_taskgraph/taskgraph/util/cached_tasks.py
index 974b114902..1a3baad5be 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/util/cached_tasks.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/util/cached_tasks.py
@@ -7,6 +7,7 @@ import hashlib
import time
TARGET_CACHE_INDEX = "{cache_prefix}.cache.level-{level}.{type}.{name}.hash.{digest}"
+TARGET_PR_CACHE_INDEX = "{cache_prefix}.cache.pr.{type}.{name}.hash.{digest}"
EXTRA_CACHE_INDEXES = [
"{cache_prefix}.cache.level-{level}.{type}.{name}.latest",
"{cache_prefix}.cache.level-{level}.{type}.{name}.pushdate.{build_date_long}",
@@ -53,31 +54,45 @@ def add_optimization(
# We'll try to find a cached version of the toolchain at levels above and
# including the current level, starting at the highest level.
- # Chain-of-trust doesn't handle tasks not built on the tip of a
- # pull-request, so don't look for level-1 tasks if building a pull-request.
index_routes = []
min_level = int(config.params["level"])
- if config.params["tasks_for"] == "github-pull-request":
- min_level = max(min_level, 3)
for level in reversed(range(min_level, 4)):
subs["level"] = level
index_routes.append(TARGET_CACHE_INDEX.format(**subs))
- taskdesc["optimization"] = {"index-search": index_routes}
+ # Pull requests use a different target cache index route. This way we can
+ # be confident they won't be used by anything other than the pull request
+ # that created the cache in the first place.
+ if config.params["tasks_for"].startswith(
+ "github-pull-request"
+ ) and config.graph_config["taskgraph"].get("cache-pull-requests", True):
+ subs["head_ref"] = config.params["head_ref"]
+ if subs["head_ref"].startswith("refs/heads/"):
+ subs["head_ref"] = subs["head_ref"][11:]
+ index_routes.append(TARGET_PR_CACHE_INDEX.format(**subs))
+
+ taskdesc["optimization"] = {"index-search": index_routes}
# ... and cache at the lowest level.
subs["level"] = config.params["level"]
- taskdesc.setdefault("routes", []).append(
- f"index.{TARGET_CACHE_INDEX.format(**subs)}"
- )
- # ... and add some extra routes for humans
- subs["build_date_long"] = time.strftime(
- "%Y.%m.%d.%Y%m%d%H%M%S", time.gmtime(config.params["build_date"])
- )
- taskdesc["routes"].extend(
- [f"index.{route.format(**subs)}" for route in EXTRA_CACHE_INDEXES]
- )
+ if config.params["tasks_for"].startswith("github-pull-request"):
+ if config.graph_config["taskgraph"].get("cache-pull-requests", True):
+ taskdesc.setdefault("routes", []).append(
+ f"index.{TARGET_PR_CACHE_INDEX.format(**subs)}"
+ )
+ else:
+ taskdesc.setdefault("routes", []).append(
+ f"index.{TARGET_CACHE_INDEX.format(**subs)}"
+ )
+
+ # ... and add some extra routes for humans
+ subs["build_date_long"] = time.strftime(
+ "%Y.%m.%d.%Y%m%d%H%M%S", time.gmtime(config.params["build_date"])
+ )
+ taskdesc["routes"].extend(
+ [f"index.{route.format(**subs)}" for route in EXTRA_CACHE_INDEXES]
+ )
taskdesc["attributes"]["cached_task"] = {
"type": cache_type,
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/util/decision.py b/third_party/python/taskcluster_taskgraph/taskgraph/util/decision.py
deleted file mode 100644
index d0e1e1079f..0000000000
--- a/third_party/python/taskcluster_taskgraph/taskgraph/util/decision.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-"""
-Utilities for generating a decision task from :file:`.taskcluster.yml`.
-"""
-
-
-import os
-
-import jsone
-import slugid
-import yaml
-
-from .templates import merge
-from .time import current_json_time
-from .vcs import find_hg_revision_push_info
-
-
-def make_decision_task(params, root, context, head_rev=None):
- """Generate a basic decision task, based on the root .taskcluster.yml"""
- with open(os.path.join(root, ".taskcluster.yml"), "rb") as f:
- taskcluster_yml = yaml.safe_load(f)
-
- if not head_rev:
- head_rev = params["head_rev"]
-
- if params["repository_type"] == "hg":
- pushlog = find_hg_revision_push_info(params["repository_url"], head_rev)
-
- hg_push_context = {
- "pushlog_id": pushlog["pushid"],
- "pushdate": pushlog["pushdate"],
- "owner": pushlog["user"],
- }
- else:
- hg_push_context = {}
-
- slugids = {}
-
- def as_slugid(name):
- # https://github.com/taskcluster/json-e/issues/164
- name = name[0]
- if name not in slugids:
- slugids[name] = slugid.nice()
- return slugids[name]
-
- # provide a similar JSON-e context to what mozilla-taskcluster provides:
- # https://docs.taskcluster.net/reference/integrations/mozilla-taskcluster/docs/taskcluster-yml
- # but with a different tasks_for and an extra `cron` section
- context = merge(
- {
- "repository": {
- "url": params["repository_url"],
- "project": params["project"],
- "level": params["level"],
- },
- "push": merge(
- {
- "revision": params["head_rev"],
- # remainder are fake values, but the decision task expects them anyway
- "comment": " ",
- },
- hg_push_context,
- ),
- "now": current_json_time(),
- "as_slugid": as_slugid,
- },
- context,
- )
-
- rendered = jsone.render(taskcluster_yml, context)
- if len(rendered["tasks"]) != 1:
- raise Exception("Expected .taskcluster.yml to only produce one cron task")
- task = rendered["tasks"][0]
-
- task_id = task.pop("taskId")
- return (task_id, task)
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/util/docker.py b/third_party/python/taskcluster_taskgraph/taskgraph/util/docker.py
index c37a69f98f..13815381ed 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/util/docker.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/util/docker.py
@@ -7,6 +7,7 @@ import hashlib
import io
import os
import re
+from typing import Optional
from taskgraph.util.archive import create_tar_gz_from_files
from taskgraph.util.memoize import memoize
@@ -16,17 +17,27 @@ IMAGE_DIR = os.path.join(".", "taskcluster", "docker")
from .yaml import load_yaml
-def docker_image(name, by_tag=False):
+def docker_image(name: str, by_tag: bool = False) -> Optional[str]:
"""
Resolve in-tree prebuilt docker image to ``<registry>/<repository>@sha256:<digest>``,
or ``<registry>/<repository>:<tag>`` if `by_tag` is `True`.
+
+ Args:
+ name (str): The image to build.
+ by_tag (bool): If True, will apply a tag based on VERSION file.
+ Otherwise will apply a hash based on HASH file.
+ Returns:
+ Optional[str]: Image if it can be resolved, otherwise None.
"""
try:
with open(os.path.join(IMAGE_DIR, name, "REGISTRY")) as f:
registry = f.read().strip()
except OSError:
- with open(os.path.join(IMAGE_DIR, "REGISTRY")) as f:
- registry = f.read().strip()
+ try:
+ with open(os.path.join(IMAGE_DIR, "REGISTRY")) as f:
+ registry = f.read().strip()
+ except OSError:
+ return None
if not by_tag:
hashfile = os.path.join(IMAGE_DIR, name, "HASH")
@@ -34,7 +45,7 @@ def docker_image(name, by_tag=False):
with open(hashfile) as f:
return f"{registry}/{name}@{f.read().strip()}"
except OSError:
- raise Exception(f"Failed to read HASH file {hashfile}")
+ return None
try:
with open(os.path.join(IMAGE_DIR, name, "VERSION")) as f:
@@ -197,7 +208,7 @@ def stream_context_tar(topsrcdir, context_dir, out_file, image_name=None, args=N
@memoize
def image_paths():
"""Return a map of image name to paths containing their Dockerfile."""
- config = load_yaml("taskcluster", "ci", "docker-image", "kind.yml")
+ config = load_yaml("taskcluster", "kinds", "docker-image", "kind.yml")
return {
k: os.path.join(IMAGE_DIR, v.get("definition", k))
for k, v in config["tasks"].items()
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/util/hash.py b/third_party/python/taskcluster_taskgraph/taskgraph/util/hash.py
index 5d884fc318..d42b2ecef9 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/util/hash.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/util/hash.py
@@ -39,10 +39,7 @@ def hash_paths(base_path, patterns):
raise Exception("%s did not match anything" % pattern)
for path in sorted(found):
h.update(
- "{} {}\n".format(
- hash_path(mozpath.abspath(mozpath.join(base_path, path))),
- mozpath.normsep(path),
- ).encode("utf-8")
+ f"{hash_path(mozpath.abspath(mozpath.join(base_path, path)))} {mozpath.normsep(path)}\n".encode()
)
return h.hexdigest()
@@ -55,4 +52,8 @@ def _find_matching_files(base_path, pattern):
@memoize
def _get_all_files(base_path):
- return [str(path) for path in Path(base_path).rglob("*") if path.is_file()]
+ return [
+ mozpath.normsep(str(path))
+ for path in Path(base_path).rglob("*")
+ if path.is_file()
+ ]
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/util/keyed_by.py b/third_party/python/taskcluster_taskgraph/taskgraph/util/keyed_by.py
index 9b0c5a44fb..00c84ba980 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/util/keyed_by.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/util/keyed_by.py
@@ -66,8 +66,8 @@ def evaluate_keyed_by(
# Error out when only 'default' is specified as only alternatives,
# because we don't need to by-{keyed_by} there.
raise Exception(
- "Keyed-by '{}' unnecessary with only value 'default' "
- "found, when determining item {}".format(keyed_by, item_name)
+ f"Keyed-by '{keyed_by}' unnecessary with only value 'default' "
+ f"found, when determining item {item_name}"
)
if key is None:
@@ -76,22 +76,20 @@ def evaluate_keyed_by(
continue
else:
raise Exception(
- "No attribute {} and no value for 'default' found "
- "while determining item {}".format(keyed_by, item_name)
+ f"No attribute {keyed_by} and no value for 'default' found "
+ f"while determining item {item_name}"
)
matches = keymatch(alternatives, key)
if enforce_single_match and len(matches) > 1:
raise Exception(
- "Multiple matching values for {} {!r} found while "
- "determining item {}".format(keyed_by, key, item_name)
+ f"Multiple matching values for {keyed_by} {key!r} found while "
+ f"determining item {item_name}"
)
elif matches:
value = matches[0]
continue
raise Exception(
- "No {} matching {!r} nor 'default' found while determining item {}".format(
- keyed_by, key, item_name
- )
+ f"No {keyed_by} matching {key!r} nor 'default' found while determining item {item_name}"
)
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/util/memoize.py b/third_party/python/taskcluster_taskgraph/taskgraph/util/memoize.py
index 56b513e74c..a4bc50cc26 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/util/memoize.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/util/memoize.py
@@ -2,39 +2,6 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
-# Imported from
-# https://searchfox.org/mozilla-central/rev/c3ebaf6de2d481c262c04bb9657eaf76bf47e2ac/python/mozbuild/mozbuild/util.py#923-949
-
-
import functools
-
-class memoize(dict):
- """A decorator to memoize the results of function calls depending
- on its arguments.
- Both functions and instance methods are handled, although in the
- instance method case, the results are cache in the instance itself.
- """
-
- def __init__(self, func):
- self.func = func
- functools.update_wrapper(self, func)
-
- def __call__(self, *args):
- if args not in self:
- self[args] = self.func(*args)
- return self[args]
-
- def method_call(self, instance, *args):
- name = "_%s" % self.func.__name__
- if not hasattr(instance, name):
- setattr(instance, name, {})
- cache = getattr(instance, name)
- if args not in cache:
- cache[args] = self.func(instance, *args)
- return cache[args]
-
- def __get__(self, instance, cls):
- return functools.update_wrapper(
- functools.partial(self.method_call, instance), self.func
- )
+memoize = functools.lru_cache(maxsize=None) # backwards compatibility shim
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/util/parameterization.py b/third_party/python/taskcluster_taskgraph/taskgraph/util/parameterization.py
index 6233a98a40..1973f6f7df 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/util/parameterization.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/util/parameterization.py
@@ -20,6 +20,12 @@ def _recurse(val, param_fns):
if len(val) == 1:
for param_key, param_fn in param_fns.items():
if set(val.keys()) == {param_key}:
+ if isinstance(val[param_key], dict):
+ # handle `{"task-reference": {"<foo>": "bar"}}`
+ return {
+ param_fn(key): recurse(v)
+ for key, v in val[param_key].items()
+ }
return param_fn(val[param_key])
return {k: recurse(v) for k, v in val.items()}
else:
@@ -74,17 +80,14 @@ def resolve_task_references(label, task_def, task_id, decision_task_id, dependen
task_id = dependencies[dependency]
except KeyError:
raise KeyError(
- "task '{}' has no dependency named '{}'".format(
- label, dependency
- )
+ f"task '{label}' has no dependency named '{dependency}'"
)
- assert artifact_name.startswith(
- "public/"
- ), "artifact-reference only supports public artifacts, not `{}`".format(
- artifact_name
- )
- return get_artifact_url(task_id, artifact_name)
+ use_proxy = False
+ if not artifact_name.startswith("public/"):
+ use_proxy = True
+
+ return get_artifact_url(task_id, artifact_name, use_proxy=use_proxy)
return ARTIFACT_REFERENCE_PATTERN.sub(repl, val)
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/util/schema.py b/third_party/python/taskcluster_taskgraph/taskgraph/util/schema.py
index 3989f71182..02e79a3a27 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/util/schema.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/util/schema.py
@@ -74,7 +74,7 @@ def resolve_keyed_by(
For example, given item::
- job:
+ task:
test-platform: linux128
chunks:
by-test-platform:
@@ -82,10 +82,10 @@ def resolve_keyed_by(
win.*: 6
default: 12
- a call to `resolve_keyed_by(item, 'job.chunks', item['thing-name'])`
+ a call to `resolve_keyed_by(item, 'task.chunks', item['thing-name'])`
would mutate item in-place to::
- job:
+ task:
test-platform: linux128
chunks: 12
@@ -182,7 +182,7 @@ def check_schema(schema):
if not identifier_re.match(k) and not excepted(path):
raise RuntimeError(
"YAML schemas should use dashed lower-case identifiers, "
- "not {!r} @ {}".format(k, path)
+ f"not {k!r} @ {path}"
)
elif isinstance(k, (voluptuous.Optional, voluptuous.Required)):
check_identifier(path, k.schema)
@@ -191,9 +191,7 @@ def check_schema(schema):
check_identifier(path, v)
elif not excepted(path):
raise RuntimeError(
- "Unexpected type in YAML schema: {} @ {}".format(
- type(k).__name__, path
- )
+ f"Unexpected type in YAML schema: {type(k).__name__} @ {path}"
)
if isinstance(sch, collections.abc.Mapping):
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/util/set_name.py b/third_party/python/taskcluster_taskgraph/taskgraph/util/set_name.py
new file mode 100644
index 0000000000..4c27a9cca1
--- /dev/null
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/util/set_name.py
@@ -0,0 +1,34 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# Define a collection of set_name functions
+# Note: this is stored here instead of where it is used in the `from_deps`
+# transform to give consumers a chance to register their own `set_name`
+# handlers before the `from_deps` schema is created.
+SET_NAME_MAP = {}
+
+
+def set_name(name, schema=None):
+ def wrapper(func):
+ assert (
+ name not in SET_NAME_MAP
+ ), f"duplicate set_name function name {name} ({func} and {SET_NAME_MAP[name]})"
+ SET_NAME_MAP[name] = func
+ func.schema = schema
+ return func
+
+ return wrapper
+
+
+@set_name("strip-kind")
+def set_name_strip_kind(config, tasks, primary_dep, primary_kind):
+ if primary_dep.label.startswith(primary_kind):
+ return primary_dep.label[len(primary_kind) + 1 :]
+ else:
+ return primary_dep.label
+
+
+@set_name("retain-kind")
+def set_name_retain_kind(config, tasks, primary_dep, primary_kind):
+ return primary_dep.label
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/util/shell.py b/third_party/python/taskcluster_taskgraph/taskgraph/util/shell.py
index d695767f05..16b71b7d6a 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/util/shell.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/util/shell.py
@@ -14,7 +14,7 @@ def _quote(s):
As a special case, if given an int, returns a string containing the int,
not enclosed in quotes.
"""
- if type(s) == int:
+ if isinstance(s, int):
return "%d" % s
# Empty strings need to be quoted to have any significance
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/util/taskcluster.py b/third_party/python/taskcluster_taskgraph/taskgraph/util/taskcluster.py
index a830a473b3..b467e98a97 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/util/taskcluster.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/util/taskcluster.py
@@ -3,10 +3,12 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import copy
import datetime
import functools
import logging
import os
+from typing import Dict, List, Union
import requests
import taskcluster_urls as liburls
@@ -53,9 +55,11 @@ def get_root_url(use_proxy):
logger.debug(
"Running in Taskcluster instance {}{}".format(
os.environ["TASKCLUSTER_ROOT_URL"],
- " with taskcluster-proxy"
- if "TASKCLUSTER_PROXY_URL" in os.environ
- else "",
+ (
+ " with taskcluster-proxy"
+ if "TASKCLUSTER_PROXY_URL" in os.environ
+ else ""
+ ),
)
)
return liburls.normalize_root_url(os.environ["TASKCLUSTER_ROOT_URL"])
@@ -136,22 +140,9 @@ def _handle_artifact(path, response):
def get_artifact_url(task_id, path, use_proxy=False):
artifact_tmpl = liburls.api(
- get_root_url(False), "queue", "v1", "task/{}/artifacts/{}"
+ get_root_url(use_proxy), "queue", "v1", "task/{}/artifacts/{}"
)
- data = artifact_tmpl.format(task_id, path)
- if use_proxy:
- # Until Bug 1405889 is deployed, we can't download directly
- # from the taskcluster-proxy. Work around by using the /bewit
- # endpoint instead.
- # The bewit URL is the body of a 303 redirect, which we don't
- # want to follow (which fetches a potentially large resource).
- response = _do_request(
- os.environ["TASKCLUSTER_PROXY_URL"] + "/bewit",
- data=data,
- allow_redirects=False,
- )
- return response.text
- return data
+ return artifact_tmpl.format(task_id, path)
def get_artifact(task_id, path, use_proxy=False):
@@ -244,6 +235,7 @@ def get_task_url(task_id, use_proxy=False):
return task_tmpl.format(task_id)
+@memoize
def get_task_definition(task_id, use_proxy=False):
response = _do_request(get_task_url(task_id, use_proxy))
return response.json()
@@ -327,11 +319,7 @@ def get_purge_cache_url(provisioner_id, worker_type, use_proxy=False):
def purge_cache(provisioner_id, worker_type, cache_name, use_proxy=False):
"""Requests a cache purge from the purge-caches service."""
if testing:
- logger.info(
- "Would have purged {}/{}/{}.".format(
- provisioner_id, worker_type, cache_name
- )
- )
+ logger.info(f"Would have purged {provisioner_id}/{worker_type}/{cache_name}.")
else:
logger.info(f"Purging {provisioner_id}/{worker_type}/{cache_name}.")
purge_cache_url = get_purge_cache_url(provisioner_id, worker_type, use_proxy)
@@ -371,3 +359,40 @@ def list_task_group_incomplete_tasks(task_group_id):
params = {"continuationToken": resp.get("continuationToken")}
else:
break
+
+
+@memoize
+def _get_deps(task_ids, use_proxy):
+ upstream_tasks = {}
+ for task_id in task_ids:
+ task_def = get_task_definition(task_id, use_proxy)
+ upstream_tasks[task_def["metadata"]["name"]] = task_id
+
+ upstream_tasks.update(_get_deps(tuple(task_def["dependencies"]), use_proxy))
+
+ return upstream_tasks
+
+
+def get_ancestors(
+ task_ids: Union[List[str], str], use_proxy: bool = False
+) -> Dict[str, str]:
+ """Gets the ancestor tasks of the given task_ids as a dictionary of label -> taskid.
+
+ Args:
+ task_ids (str or [str]): A single task id or a list of task ids to find the ancestors of.
+ use_proxy (bool): See get_root_url.
+
+ Returns:
+ dict: A dict whose keys are task labels and values are task ids.
+ """
+ upstream_tasks: Dict[str, str] = {}
+
+ if isinstance(task_ids, str):
+ task_ids = [task_ids]
+
+ for task_id in task_ids:
+ task_def = get_task_definition(task_id, use_proxy)
+
+ upstream_tasks.update(_get_deps(tuple(task_def["dependencies"]), use_proxy))
+
+ return copy.deepcopy(upstream_tasks)
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/util/time.py b/third_party/python/taskcluster_taskgraph/taskgraph/util/time.py
index e511978b5f..6639e5dddd 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/util/time.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/util/time.py
@@ -73,9 +73,7 @@ def value_of(input_str):
if unit not in ALIASES:
raise UnknownTimeMeasurement(
- "{} is not a valid time measure use one of {}".format(
- unit, sorted(ALIASES.keys())
- )
+ f"{unit} is not a valid time measure use one of {sorted(ALIASES.keys())}"
)
return ALIASES[unit](value)
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/util/treeherder.py b/third_party/python/taskcluster_taskgraph/taskgraph/util/treeherder.py
index cff5f286cc..6bb6dbd137 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/util/treeherder.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/util/treeherder.py
@@ -42,22 +42,25 @@ def replace_group(treeherder_symbol, new_group):
return join_symbol(new_group, symbol)
-def inherit_treeherder_from_dep(job, dep_job):
- """Inherit treeherder defaults from dep_job"""
- treeherder = job.get("treeherder", {})
+def inherit_treeherder_from_dep(task, dep_task):
+ """Inherit treeherder defaults from dep_task"""
+ treeherder = task.get("treeherder", {})
dep_th_platform = (
- dep_job.task.get("extra", {})
+ dep_task.task.get("extra", {})
.get("treeherder", {})
.get("machine", {})
.get("platform", "")
)
dep_th_collection = list(
- dep_job.task.get("extra", {}).get("treeherder", {}).get("collection", {}).keys()
+ dep_task.task.get("extra", {})
+ .get("treeherder", {})
+ .get("collection", {})
+ .keys()
)[0]
treeherder.setdefault("platform", f"{dep_th_platform}/{dep_th_collection}")
treeherder.setdefault(
- "tier", dep_job.task.get("extra", {}).get("treeherder", {}).get("tier", 1)
+ "tier", dep_task.task.get("extra", {}).get("treeherder", {}).get("tier", 1)
)
# Does not set symbol
treeherder.setdefault("kind", "build")
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/util/vcs.py b/third_party/python/taskcluster_taskgraph/taskgraph/util/vcs.py
index 2d967d2645..c2fd0d3236 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/util/vcs.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/util/vcs.py
@@ -10,9 +10,6 @@ import subprocess
from abc import ABC, abstractmethod, abstractproperty
from shutil import which
-import requests
-from redo import retry
-
from taskgraph.util.path import ancestors
PUSHLOG_TMPL = "{}/json-pushes?version=2&changeset={}&tipsonly=1&full=1"
@@ -21,7 +18,7 @@ logger = logging.getLogger(__name__)
class Repository(ABC):
- # Both mercurial and git use sha1 as revision idenfiers. Luckily, both define
+ # Both mercurial and git use sha1 as revision identifiers. Luckily, both define
# the same value as the null revision.
#
# https://github.com/git/git/blob/dc04167d378fb29d30e1647ff6ff51dd182bc9a3/t/oid-info/hash-info#L7
@@ -519,34 +516,3 @@ def get_repository(path):
return GitRepository(path)
raise RuntimeError("Current directory is neither a git or hg repository")
-
-
-def find_hg_revision_push_info(repository, revision):
- """Given the parameters for this action and a revision, find the
- pushlog_id of the revision."""
- pushlog_url = PUSHLOG_TMPL.format(repository, revision)
-
- def query_pushlog(url):
- r = requests.get(pushlog_url, timeout=60)
- r.raise_for_status()
- return r
-
- r = retry(
- query_pushlog,
- args=(pushlog_url,),
- attempts=5,
- sleeptime=10,
- )
- pushes = r.json()["pushes"]
- if len(pushes) != 1:
- raise RuntimeError(
- "Unable to find a single pushlog_id for {} revision {}: {}".format(
- repository, revision, pushes
- )
- )
- pushid = list(pushes.keys())[0]
- return {
- "pushdate": pushes[pushid]["date"],
- "pushid": pushid,
- "user": pushes[pushid]["user"],
- }
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/util/verify.py b/third_party/python/taskcluster_taskgraph/taskgraph/util/verify.py
index e6705c16cf..b5bb0889ae 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/util/verify.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/util/verify.py
@@ -134,10 +134,8 @@ def verify_task_graph_symbol(task, taskgraph, scratch_pad, graph_config, paramet
collection_keys = tuple(sorted(treeherder.get("collection", {}).keys()))
if len(collection_keys) != 1:
raise Exception(
- "Task {} can't be in multiple treeherder collections "
- "(the part of the platform after `/`): {}".format(
- task.label, collection_keys
- )
+ f"Task {task.label} can't be in multiple treeherder collections "
+ f"(the part of the platform after `/`): {collection_keys}"
)
platform = treeherder.get("machine", {}).get("platform")
group_symbol = treeherder.get("groupSymbol")
@@ -175,9 +173,7 @@ def verify_trust_domain_v2_routes(
if route.startswith(route_prefix):
if route in scratch_pad:
raise Exception(
- "conflict between {}:{} for route: {}".format(
- task.label, scratch_pad[route], route
- )
+ f"conflict between {task.label}:{scratch_pad[route]} for route: {route}"
)
else:
scratch_pad[route] = task.label
@@ -206,9 +202,7 @@ def verify_routes_notification_filters(
route_filter = route.split(".")[-1]
if route_filter not in valid_filters:
raise Exception(
- "{} has invalid notification filter ({})".format(
- task.label, route_filter
- )
+ f"{task.label} has invalid notification filter ({route_filter})"
)
@@ -235,12 +229,7 @@ def verify_dependency_tiers(task, taskgraph, scratch_pad, graph_config, paramete
continue
if tier < tiers[d]:
raise Exception(
- "{} (tier {}) cannot depend on {} (tier {})".format(
- task.label,
- printable_tier(tier),
- d,
- printable_tier(tiers[d]),
- )
+ f"{task.label} (tier {printable_tier(tier)}) cannot depend on {d} (tier {printable_tier(tiers[d])})"
)
@@ -262,11 +251,7 @@ def verify_toolchain_alias(task, taskgraph, scratch_pad, graph_config, parameter
if key in scratch_pad:
raise Exception(
"Duplicate toolchain-alias in tasks "
- "`{}`and `{}`: {}".format(
- task.label,
- scratch_pad[key],
- key,
- )
+ f"`{task.label}`and `{scratch_pad[key]}`: {key}"
)
else:
scratch_pad[key] = task.label
diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/util/yaml.py b/third_party/python/taskcluster_taskgraph/taskgraph/util/yaml.py
index 141c7a16d3..a733521527 100644
--- a/third_party/python/taskcluster_taskgraph/taskgraph/util/yaml.py
+++ b/third_party/python/taskcluster_taskgraph/taskgraph/util/yaml.py
@@ -5,7 +5,10 @@
import os
-from yaml.loader import SafeLoader
+try:
+ from yaml import CSafeLoader as SafeLoader
+except ImportError:
+ from yaml import SafeLoader
class UnicodeLoader(SafeLoader):