From 8dd16259287f58f9273002717ec4d27e97127719 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Wed, 12 Jun 2024 07:43:14 +0200 Subject: Merging upstream version 127.0. Signed-off-by: Daniel Baumann --- .../glean_parser-13.0.1.dist-info/AUTHORS.md | 17 - .../glean_parser-13.0.1.dist-info/LICENSE | 373 ---------- .../glean_parser-13.0.1.dist-info/METADATA | 790 -------------------- .../glean_parser-13.0.1.dist-info/RECORD | 48 -- .../glean_parser-13.0.1.dist-info/WHEEL | 5 - .../glean_parser-13.0.1.dist-info/entry_points.txt | 2 - .../glean_parser-13.0.1.dist-info/top_level.txt | 1 - .../glean_parser-14.0.1.dist-info/AUTHORS.md | 17 + .../glean_parser-14.0.1.dist-info/LICENSE | 373 ++++++++++ .../glean_parser-14.0.1.dist-info/METADATA | 799 +++++++++++++++++++++ .../glean_parser-14.0.1.dist-info/RECORD | 48 ++ .../glean_parser-14.0.1.dist-info/WHEEL | 5 + .../glean_parser-14.0.1.dist-info/entry_points.txt | 2 + .../glean_parser-14.0.1.dist-info/top_level.txt | 1 + .../glean_parser/glean_parser/javascript_server.py | 16 +- .../python/glean_parser/glean_parser/kotlin.py | 25 + .../python/glean_parser/glean_parser/parser.py | 24 +- .../python/glean_parser/glean_parser/pings.py | 6 + .../glean_parser/schemas/pings.2-0-0.schema.yaml | 22 + .../python/glean_parser/glean_parser/swift.py | 23 +- .../templates/javascript_server.jinja2 | 54 +- .../glean_parser/templates/kotlin.jinja2 | 66 +- .../glean_parser/templates/rust.jinja2 | 2 +- .../glean_parser/templates/swift.jinja2 | 49 +- .../python/glean_parser/glean_parser/util.py | 2 + third_party/python/poetry.lock | 15 +- third_party/python/requirements.in | 4 +- third_party/python/requirements.txt | 13 +- .../taskcluster_taskgraph-6.3.0.dist-info/LICENSE | 373 ---------- .../taskcluster_taskgraph-6.3.0.dist-info/METADATA | 28 - .../taskcluster_taskgraph-6.3.0.dist-info/RECORD | 80 --- .../taskcluster_taskgraph-6.3.0.dist-info/WHEEL | 5 - .../entry_points.txt | 2 - .../top_level.txt | 1 - .../taskcluster_taskgraph-8.0.1.dist-info/LICENSE | 373 ++++++++++ .../taskcluster_taskgraph-8.0.1.dist-info/METADATA | 123 ++++ .../taskcluster_taskgraph-8.0.1.dist-info/RECORD | 79 ++ .../taskcluster_taskgraph-8.0.1.dist-info/WHEEL | 5 + .../entry_points.txt | 2 + .../top_level.txt | 1 + .../taskcluster_taskgraph/taskgraph/__init__.py | 2 +- .../taskgraph/actions/add_new_jobs.py | 2 +- .../taskgraph/actions/cancel.py | 4 +- .../taskgraph/actions/cancel_all.py | 4 +- .../taskgraph/actions/rebuild_cached_tasks.py | 2 +- .../taskgraph/actions/registry.py | 34 +- .../taskgraph/actions/retrigger.py | 26 +- .../taskgraph/actions/util.py | 15 +- .../taskcluster_taskgraph/taskgraph/config.py | 20 +- .../taskcluster_taskgraph/taskgraph/create.py | 2 +- .../taskcluster_taskgraph/taskgraph/decision.py | 23 +- .../taskcluster_taskgraph/taskgraph/docker.py | 48 +- .../taskgraph/files_changed.py | 91 --- .../taskcluster_taskgraph/taskgraph/generator.py | 8 +- .../taskgraph/loader/default.py | 4 +- .../python/taskcluster_taskgraph/taskgraph/main.py | 83 ++- .../taskcluster_taskgraph/taskgraph/morph.py | 1 + .../taskgraph/optimize/base.py | 12 + .../taskgraph/optimize/strategies.py | 16 +- .../taskcluster_taskgraph/taskgraph/parameters.py | 5 +- .../taskgraph/run-task/run-task | 14 +- .../taskgraph/target_tasks.py | 8 +- .../taskgraph/transforms/__init__.py | 3 - .../taskgraph/transforms/base.py | 2 +- .../taskgraph/transforms/code_review.py | 10 +- .../taskgraph/transforms/docker_image.py | 16 +- .../taskgraph/transforms/fetch.py | 65 +- .../taskgraph/transforms/from_deps.py | 21 +- .../taskgraph/transforms/job/__init__.py | 453 ------------ .../taskgraph/transforms/job/common.py | 171 ----- .../taskgraph/transforms/job/index_search.py | 37 - .../taskgraph/transforms/job/run_task.py | 231 ------ .../taskgraph/transforms/job/toolchain.py | 175 ----- .../taskgraph/transforms/run/__init__.py | 451 ++++++++++++ .../taskgraph/transforms/run/common.py | 165 +++++ .../taskgraph/transforms/run/index_search.py | 37 + .../taskgraph/transforms/run/run_task.py | 231 ++++++ .../taskgraph/transforms/run/toolchain.py | 175 +++++ .../taskgraph/transforms/task.py | 61 +- .../taskgraph/transforms/task_context.py | 10 +- .../taskgraph/util/archive.py | 52 +- .../taskgraph/util/cached_tasks.py | 45 +- .../taskgraph/util/decision.py | 79 -- .../taskcluster_taskgraph/taskgraph/util/docker.py | 21 +- .../taskcluster_taskgraph/taskgraph/util/hash.py | 11 +- .../taskgraph/util/keyed_by.py | 16 +- .../taskgraph/util/memoize.py | 35 +- .../taskgraph/util/parameterization.py | 21 +- .../taskcluster_taskgraph/taskgraph/util/schema.py | 12 +- .../taskgraph/util/set_name.py | 34 + .../taskcluster_taskgraph/taskgraph/util/shell.py | 2 +- .../taskgraph/util/taskcluster.py | 71 +- .../taskcluster_taskgraph/taskgraph/util/time.py | 4 +- .../taskgraph/util/treeherder.py | 15 +- .../taskcluster_taskgraph/taskgraph/util/vcs.py | 36 +- .../taskcluster_taskgraph/taskgraph/util/verify.py | 27 +- .../taskcluster_taskgraph/taskgraph/util/yaml.py | 5 +- 97 files changed, 3706 insertions(+), 3392 deletions(-) delete mode 100644 third_party/python/glean_parser/glean_parser-13.0.1.dist-info/AUTHORS.md delete mode 100644 third_party/python/glean_parser/glean_parser-13.0.1.dist-info/LICENSE delete mode 100644 third_party/python/glean_parser/glean_parser-13.0.1.dist-info/METADATA delete mode 100644 third_party/python/glean_parser/glean_parser-13.0.1.dist-info/RECORD delete mode 100644 third_party/python/glean_parser/glean_parser-13.0.1.dist-info/WHEEL delete mode 100644 third_party/python/glean_parser/glean_parser-13.0.1.dist-info/entry_points.txt delete mode 100644 third_party/python/glean_parser/glean_parser-13.0.1.dist-info/top_level.txt create mode 100644 third_party/python/glean_parser/glean_parser-14.0.1.dist-info/AUTHORS.md create mode 100644 third_party/python/glean_parser/glean_parser-14.0.1.dist-info/LICENSE create mode 100644 third_party/python/glean_parser/glean_parser-14.0.1.dist-info/METADATA create mode 100644 third_party/python/glean_parser/glean_parser-14.0.1.dist-info/RECORD create mode 100644 third_party/python/glean_parser/glean_parser-14.0.1.dist-info/WHEEL create mode 100644 third_party/python/glean_parser/glean_parser-14.0.1.dist-info/entry_points.txt create mode 100644 third_party/python/glean_parser/glean_parser-14.0.1.dist-info/top_level.txt delete mode 100644 third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/LICENSE delete mode 100644 third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/METADATA delete mode 100644 third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/RECORD delete mode 100644 third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/WHEEL delete mode 100644 third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/entry_points.txt delete mode 100644 third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/top_level.txt create mode 100644 third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/LICENSE create mode 100644 third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/METADATA create mode 100644 third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/RECORD create mode 100644 third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/WHEEL create mode 100644 third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/entry_points.txt create mode 100644 third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/top_level.txt delete mode 100644 third_party/python/taskcluster_taskgraph/taskgraph/files_changed.py delete mode 100644 third_party/python/taskcluster_taskgraph/taskgraph/transforms/job/__init__.py delete mode 100644 third_party/python/taskcluster_taskgraph/taskgraph/transforms/job/common.py delete mode 100644 third_party/python/taskcluster_taskgraph/taskgraph/transforms/job/index_search.py delete mode 100644 third_party/python/taskcluster_taskgraph/taskgraph/transforms/job/run_task.py delete mode 100644 third_party/python/taskcluster_taskgraph/taskgraph/transforms/job/toolchain.py create mode 100644 third_party/python/taskcluster_taskgraph/taskgraph/transforms/run/__init__.py create mode 100644 third_party/python/taskcluster_taskgraph/taskgraph/transforms/run/common.py create mode 100644 third_party/python/taskcluster_taskgraph/taskgraph/transforms/run/index_search.py create mode 100644 third_party/python/taskcluster_taskgraph/taskgraph/transforms/run/run_task.py create mode 100644 third_party/python/taskcluster_taskgraph/taskgraph/transforms/run/toolchain.py delete mode 100644 third_party/python/taskcluster_taskgraph/taskgraph/util/decision.py create mode 100644 third_party/python/taskcluster_taskgraph/taskgraph/util/set_name.py (limited to 'third_party/python') diff --git a/third_party/python/glean_parser/glean_parser-13.0.1.dist-info/AUTHORS.md b/third_party/python/glean_parser/glean_parser-13.0.1.dist-info/AUTHORS.md deleted file mode 100644 index 525116ee7e..0000000000 --- a/third_party/python/glean_parser/glean_parser-13.0.1.dist-info/AUTHORS.md +++ /dev/null @@ -1,17 +0,0 @@ -# Credits - -## Development Lead - -- Jan-Erik Rediger -- Alessio Placitelli - -## Contributors - -See [the full list of contributors](https://github.com/mozilla/glean_parser/graphs/contributors). - -## Acknowledgements - -This package was created with -[Cookiecutter](https://github.com/audreyr/cookiecutter) and the -[audreyr/cookiecutter-pypackage](https://github.com/audreyr/cookiecutter-pypackage) -project template. diff --git a/third_party/python/glean_parser/glean_parser-13.0.1.dist-info/LICENSE b/third_party/python/glean_parser/glean_parser-13.0.1.dist-info/LICENSE deleted file mode 100644 index a612ad9813..0000000000 --- a/third_party/python/glean_parser/glean_parser-13.0.1.dist-info/LICENSE +++ /dev/null @@ -1,373 +0,0 @@ -Mozilla Public License Version 2.0 -================================== - -1. Definitions --------------- - -1.1. "Contributor" - means each individual or legal entity that creates, contributes to - the creation of, or owns Covered Software. - -1.2. "Contributor Version" - means the combination of the Contributions of others (if any) used - by a Contributor and that particular Contributor's Contribution. - -1.3. "Contribution" - means Covered Software of a particular Contributor. - -1.4. "Covered Software" - means Source Code Form to which the initial Contributor has attached - the notice in Exhibit A, the Executable Form of such Source Code - Form, and Modifications of such Source Code Form, in each case - including portions thereof. - -1.5. "Incompatible With Secondary Licenses" - means - - (a) that the initial Contributor has attached the notice described - in Exhibit B to the Covered Software; or - - (b) that the Covered Software was made available under the terms of - version 1.1 or earlier of the License, but not also under the - terms of a Secondary License. - -1.6. "Executable Form" - means any form of the work other than Source Code Form. - -1.7. "Larger Work" - means a work that combines Covered Software with other material, in - a separate file or files, that is not Covered Software. - -1.8. "License" - means this document. - -1.9. "Licensable" - means having the right to grant, to the maximum extent possible, - whether at the time of the initial grant or subsequently, any and - all of the rights conveyed by this License. - -1.10. "Modifications" - means any of the following: - - (a) any file in Source Code Form that results from an addition to, - deletion from, or modification of the contents of Covered - Software; or - - (b) any new file in Source Code Form that contains any Covered - Software. - -1.11. "Patent Claims" of a Contributor - means any patent claim(s), including without limitation, method, - process, and apparatus claims, in any patent Licensable by such - Contributor that would be infringed, but for the grant of the - License, by the making, using, selling, offering for sale, having - made, import, or transfer of either its Contributions or its - Contributor Version. - -1.12. "Secondary License" - means either the GNU General Public License, Version 2.0, the GNU - Lesser General Public License, Version 2.1, the GNU Affero General - Public License, Version 3.0, or any later versions of those - licenses. - -1.13. "Source Code Form" - means the form of the work preferred for making modifications. - -1.14. "You" (or "Your") - means an individual or a legal entity exercising rights under this - License. For legal entities, "You" includes any entity that - controls, is controlled by, or is under common control with You. For - purposes of this definition, "control" means (a) the power, direct - or indirect, to cause the direction or management of such entity, - whether by contract or otherwise, or (b) ownership of more than - fifty percent (50%) of the outstanding shares or beneficial - ownership of such entity. - -2. License Grants and Conditions --------------------------------- - -2.1. Grants - -Each Contributor hereby grants You a world-wide, royalty-free, -non-exclusive license: - -(a) under intellectual property rights (other than patent or trademark) - Licensable by such Contributor to use, reproduce, make available, - modify, display, perform, distribute, and otherwise exploit its - Contributions, either on an unmodified basis, with Modifications, or - as part of a Larger Work; and - -(b) under Patent Claims of such Contributor to make, use, sell, offer - for sale, have made, import, and otherwise transfer either its - Contributions or its Contributor Version. - -2.2. Effective Date - -The licenses granted in Section 2.1 with respect to any Contribution -become effective for each Contribution on the date the Contributor first -distributes such Contribution. - -2.3. Limitations on Grant Scope - -The licenses granted in this Section 2 are the only rights granted under -this License. No additional rights or licenses will be implied from the -distribution or licensing of Covered Software under this License. -Notwithstanding Section 2.1(b) above, no patent license is granted by a -Contributor: - -(a) for any code that a Contributor has removed from Covered Software; - or - -(b) for infringements caused by: (i) Your and any other third party's - modifications of Covered Software, or (ii) the combination of its - Contributions with other software (except as part of its Contributor - Version); or - -(c) under Patent Claims infringed by Covered Software in the absence of - its Contributions. - -This License does not grant any rights in the trademarks, service marks, -or logos of any Contributor (except as may be necessary to comply with -the notice requirements in Section 3.4). - -2.4. Subsequent Licenses - -No Contributor makes additional grants as a result of Your choice to -distribute the Covered Software under a subsequent version of this -License (see Section 10.2) or under the terms of a Secondary License (if -permitted under the terms of Section 3.3). - -2.5. Representation - -Each Contributor represents that the Contributor believes its -Contributions are its original creation(s) or it has sufficient rights -to grant the rights to its Contributions conveyed by this License. - -2.6. Fair Use - -This License is not intended to limit any rights You have under -applicable copyright doctrines of fair use, fair dealing, or other -equivalents. - -2.7. Conditions - -Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted -in Section 2.1. - -3. Responsibilities -------------------- - -3.1. Distribution of Source Form - -All distribution of Covered Software in Source Code Form, including any -Modifications that You create or to which You contribute, must be under -the terms of this License. You must inform recipients that the Source -Code Form of the Covered Software is governed by the terms of this -License, and how they can obtain a copy of this License. You may not -attempt to alter or restrict the recipients' rights in the Source Code -Form. - -3.2. Distribution of Executable Form - -If You distribute Covered Software in Executable Form then: - -(a) such Covered Software must also be made available in Source Code - Form, as described in Section 3.1, and You must inform recipients of - the Executable Form how they can obtain a copy of such Source Code - Form by reasonable means in a timely manner, at a charge no more - than the cost of distribution to the recipient; and - -(b) You may distribute such Executable Form under the terms of this - License, or sublicense it under different terms, provided that the - license for the Executable Form does not attempt to limit or alter - the recipients' rights in the Source Code Form under this License. - -3.3. Distribution of a Larger Work - -You may create and distribute a Larger Work under terms of Your choice, -provided that You also comply with the requirements of this License for -the Covered Software. If the Larger Work is a combination of Covered -Software with a work governed by one or more Secondary Licenses, and the -Covered Software is not Incompatible With Secondary Licenses, this -License permits You to additionally distribute such Covered Software -under the terms of such Secondary License(s), so that the recipient of -the Larger Work may, at their option, further distribute the Covered -Software under the terms of either this License or such Secondary -License(s). - -3.4. Notices - -You may not remove or alter the substance of any license notices -(including copyright notices, patent notices, disclaimers of warranty, -or limitations of liability) contained within the Source Code Form of -the Covered Software, except that You may alter any license notices to -the extent required to remedy known factual inaccuracies. - -3.5. Application of Additional Terms - -You may choose to offer, and to charge a fee for, warranty, support, -indemnity or liability obligations to one or more recipients of Covered -Software. However, You may do so only on Your own behalf, and not on -behalf of any Contributor. You must make it absolutely clear that any -such warranty, support, indemnity, or liability obligation is offered by -You alone, and You hereby agree to indemnify every Contributor for any -liability incurred by such Contributor as a result of warranty, support, -indemnity or liability terms You offer. You may include additional -disclaimers of warranty and limitations of liability specific to any -jurisdiction. - -4. Inability to Comply Due to Statute or Regulation ---------------------------------------------------- - -If it is impossible for You to comply with any of the terms of this -License with respect to some or all of the Covered Software due to -statute, judicial order, or regulation then You must: (a) comply with -the terms of this License to the maximum extent possible; and (b) -describe the limitations and the code they affect. Such description must -be placed in a text file included with all distributions of the Covered -Software under this License. Except to the extent prohibited by statute -or regulation, such description must be sufficiently detailed for a -recipient of ordinary skill to be able to understand it. - -5. Termination --------------- - -5.1. The rights granted under this License will terminate automatically -if You fail to comply with any of its terms. However, if You become -compliant, then the rights granted under this License from a particular -Contributor are reinstated (a) provisionally, unless and until such -Contributor explicitly and finally terminates Your grants, and (b) on an -ongoing basis, if such Contributor fails to notify You of the -non-compliance by some reasonable means prior to 60 days after You have -come back into compliance. Moreover, Your grants from a particular -Contributor are reinstated on an ongoing basis if such Contributor -notifies You of the non-compliance by some reasonable means, this is the -first time You have received notice of non-compliance with this License -from such Contributor, and You become compliant prior to 30 days after -Your receipt of the notice. - -5.2. If You initiate litigation against any entity by asserting a patent -infringement claim (excluding declaratory judgment actions, -counter-claims, and cross-claims) alleging that a Contributor Version -directly or indirectly infringes any patent, then the rights granted to -You by any and all Contributors for the Covered Software under Section -2.1 of this License shall terminate. - -5.3. In the event of termination under Sections 5.1 or 5.2 above, all -end user license agreements (excluding distributors and resellers) which -have been validly granted by You or Your distributors under this License -prior to termination shall survive termination. - -************************************************************************ -* * -* 6. Disclaimer of Warranty * -* ------------------------- * -* * -* Covered Software is provided under this License on an "as is" * -* basis, without warranty of any kind, either expressed, implied, or * -* statutory, including, without limitation, warranties that the * -* Covered Software is free of defects, merchantable, fit for a * -* particular purpose or non-infringing. The entire risk as to the * -* quality and performance of the Covered Software is with You. * -* Should any Covered Software prove defective in any respect, You * -* (not any Contributor) assume the cost of any necessary servicing, * -* repair, or correction. This disclaimer of warranty constitutes an * -* essential part of this License. No use of any Covered Software is * -* authorized under this License except under this disclaimer. * -* * -************************************************************************ - -************************************************************************ -* * -* 7. Limitation of Liability * -* -------------------------- * -* * -* Under no circumstances and under no legal theory, whether tort * -* (including negligence), contract, or otherwise, shall any * -* Contributor, or anyone who distributes Covered Software as * -* permitted above, be liable to You for any direct, indirect, * -* special, incidental, or consequential damages of any character * -* including, without limitation, damages for lost profits, loss of * -* goodwill, work stoppage, computer failure or malfunction, or any * -* and all other commercial damages or losses, even if such party * -* shall have been informed of the possibility of such damages. This * -* limitation of liability shall not apply to liability for death or * -* personal injury resulting from such party's negligence to the * -* extent applicable law prohibits such limitation. Some * -* jurisdictions do not allow the exclusion or limitation of * -* incidental or consequential damages, so this exclusion and * -* limitation may not apply to You. * -* * -************************************************************************ - -8. Litigation -------------- - -Any litigation relating to this License may be brought only in the -courts of a jurisdiction where the defendant maintains its principal -place of business and such litigation shall be governed by laws of that -jurisdiction, without reference to its conflict-of-law provisions. -Nothing in this Section shall prevent a party's ability to bring -cross-claims or counter-claims. - -9. Miscellaneous ----------------- - -This License represents the complete agreement concerning the subject -matter hereof. If any provision of this License is held to be -unenforceable, such provision shall be reformed only to the extent -necessary to make it enforceable. Any law or regulation which provides -that the language of a contract shall be construed against the drafter -shall not be used to construe this License against a Contributor. - -10. Versions of the License ---------------------------- - -10.1. New Versions - -Mozilla Foundation is the license steward. Except as provided in Section -10.3, no one other than the license steward has the right to modify or -publish new versions of this License. Each version will be given a -distinguishing version number. - -10.2. Effect of New Versions - -You may distribute the Covered Software under the terms of the version -of the License under which You originally received the Covered Software, -or under the terms of any subsequent version published by the license -steward. - -10.3. Modified Versions - -If you create software not governed by this License, and you want to -create a new license for such software, you may create and use a -modified version of this License if you rename the license and remove -any references to the name of the license steward (except to note that -such modified license differs from this License). - -10.4. Distributing Source Code Form that is Incompatible With Secondary -Licenses - -If You choose to distribute Source Code Form that is Incompatible With -Secondary Licenses under the terms of this version of the License, the -notice described in Exhibit B of this License must be attached. - -Exhibit A - Source Code Form License Notice -------------------------------------------- - - This Source Code Form is subject to the terms of the Mozilla Public - License, v. 2.0. If a copy of the MPL was not distributed with this - file, You can obtain one at http://mozilla.org/MPL/2.0/. - -If it is not possible or desirable to put the notice in a particular -file, then You may include the notice in a location (such as a LICENSE -file in a relevant directory) where a recipient would be likely to look -for such a notice. - -You may add additional accurate notices of copyright ownership. - -Exhibit B - "Incompatible With Secondary Licenses" Notice ---------------------------------------------------------- - - This Source Code Form is "Incompatible With Secondary Licenses", as - defined by the Mozilla Public License, v. 2.0. diff --git a/third_party/python/glean_parser/glean_parser-13.0.1.dist-info/METADATA b/third_party/python/glean_parser/glean_parser-13.0.1.dist-info/METADATA deleted file mode 100644 index 0bab2150ba..0000000000 --- a/third_party/python/glean_parser/glean_parser-13.0.1.dist-info/METADATA +++ /dev/null @@ -1,790 +0,0 @@ -Metadata-Version: 2.1 -Name: glean-parser -Version: 13.0.1 -Summary: Parser tools for Mozilla's Glean telemetry -Home-page: https://github.com/mozilla/glean_parser -Author: The Glean Team -Author-email: glean-team@mozilla.com -Keywords: glean_parser -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: Natural Language :: English -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Description-Content-Type: text/markdown -License-File: LICENSE -License-File: AUTHORS.md -Requires-Dist: appdirs >=1.4 -Requires-Dist: Click >=7 -Requires-Dist: diskcache >=4 -Requires-Dist: Jinja2 >=2.10.1 -Requires-Dist: jsonschema >=3.0.2 -Requires-Dist: PyYAML >=5.3.1 - -# Glean Parser - -Parser tools for Mozilla's Glean telemetry. - -## Features - -Contains various utilities for handling `metrics.yaml` and `pings.yaml` for [the -Glean SDKs](https://mozilla.github.io/glean). This includes producing generated -code for various integrations, linting and coverage testing. - -## Documentation - -- [How to Contribute](https://github.com/mozilla/glean_parser/blob/main/CONTRIBUTING.md). Please file bugs in [bugzilla](https://bugzilla.mozilla.org/enter_bug.cgi?assigned_to=nobody%40mozilla.org&bug_ignored=0&bug_severity=normal&bug_status=NEW&cf_fission_milestone=---&cf_fx_iteration=---&cf_fx_points=---&cf_status_firefox65=---&cf_status_firefox66=---&cf_status_firefox67=---&cf_status_firefox_esr60=---&cf_status_thunderbird_esr60=---&cf_tracking_firefox65=---&cf_tracking_firefox66=---&cf_tracking_firefox67=---&cf_tracking_firefox_esr60=---&cf_tracking_firefox_relnote=---&cf_tracking_thunderbird_esr60=---&product=Data%20Platform%20and%20Tools&component=Glean%3A%20SDK&contenttypemethod=list&contenttypeselection=text%2Fplain&defined_groups=1&flag_type-203=X&flag_type-37=X&flag_type-41=X&flag_type-607=X&flag_type-721=X&flag_type-737=X&flag_type-787=X&flag_type-799=X&flag_type-800=X&flag_type-803=X&flag_type-835=X&flag_type-846=X&flag_type-855=X&flag_type-864=X&flag_type-916=X&flag_type-929=X&flag_type-930=X&flag_type-935=X&flag_type-936=X&flag_type-937=X&form_name=enter_bug&maketemplate=Remember%20values%20as%20bookmarkable%20template&op_sys=Unspecified&priority=P3&&rep_platform=Unspecified&status_whiteboard=%5Btelemetry%3Aglean-rs%3Am%3F%5D&target_milestone=---&version=unspecified). -- [User documentation for Glean](https://mozilla.github.io/glean/). -- [`glean_parser` developer documentation](https://mozilla.github.io/glean_parser/). - -## Requirements - -- Python 3.8 (or later) - -The following library requirements are installed automatically when -`glean_parser` is installed by `pip`. - -- appdirs -- Click -- diskcache -- Jinja2 -- jsonschema -- PyYAML - -## Usage - -```sh -$ glean_parser --help -``` - -Read in `metrics.yaml`, translate to Kotlin format, and -output to `output_dir`: - -```sh -$ glean_parser translate -o output_dir -f kotlin metrics.yaml -``` - -Check a Glean ping against the ping schema: - -```sh -$ glean_parser check < ping.json -``` - - -# Changelog - -## Unreleased - -## 13.0.1 - -- Use faster C yaml parser if available ([#677](https://github.com/mozilla/glean_parser/pull/677)) - -## 13.0.0 - -- BREAKING CHANGE: Support metadata field `include_info_sections` ([bug 1866559](https://bugzilla.mozilla.org/show_bug.cgi?id=1866559)) - -## 12.0.1 - -- Fix Rust codegen for object metric type ([#662](https://github.com/mozilla/glean_parser/pull/662)) - -## 12.0.0 - -- Add new metric type object (only Rust codegen support right now) ([#587](https://github.com/mozilla/glean_parser/pull/587)) - -## 11.1.0 - -- Add Go log outputter (`go_server`) ([#645](https://github.com/mozilla/glean_parser/pull/645)) -- Add Python log outputter (`python_server`) ([MPP-3642](https://mozilla-hub.atlassian.net/browse/MPP-3642)) - -## 11.0.1 - -- Fix javascript_server template to include non-event metric parameters in #record call for event metrics ([#643](https://github.com/mozilla/glean_parser/pull/643)) -- events: Increase extra key limit to 50 ([Bug 1869429](https://bugzilla.mozilla.org/show_bug.cgi?id=1869429)) - -## 11.0.0 - -- Add updated logging logic for Ruby Server ([#642](https://github.com/mozilla/glean_parser/pull/642)) -- Add support for event metric type in server-side JavaScript outputter ([DENG-1736](https://mozilla-hub.atlassian.net/browse/DENG-1736)) -- BREAKING CHANGE: Dropped support for Python 3.7 ([#638](https://github.com/mozilla/glean_parser/pull/638)) -- Add official support for Python 3.11+ ([#638](https://github.com/mozilla/glean_parser/pull/638)) - -## 10.0.3 - -- Warn about empty or TODO-tagged data reviews in the list ([#634](https://github.com/mozilla/glean_parser/pull/634)) -- Allow `unit` field on all metrics, but warn for all but quantity and custom distribution ([#636](https://github.com/mozilla/glean_parser/pull/636)) - -## 10.0.2 - -- Allow `unit` field for string again, but warn about it in the linter ([#634](https://github.com/mozilla/glean_parser/pull/634)) - -## 10.0.1 - -- Allow `unit` field for custom distribution again ([#633](https://github.com/mozilla/glean_parser/pull/633)) - -## 10.0.0 - -- Add Ruby log outputter (`ruby_server`) ([#620](https://github.com/mozilla/glean_parser/pull/620)) -- BREAKING CHANE: `ping` lifetime metrics on the events ping are now disallowed ([#625](https://github.com/mozilla/glean_parser/pull/625)) -- Disallow `unit` field for anything but quantity ([#630](https://github.com/mozilla/glean_parser/pull/630)). - Note that this was already considered the case, now the code enforces it. - -## 9.0.0 - -- BREAKING CHANGE: Dropped support for Python 3.6 ([#615](https://github.com/mozilla/glean_parser/issues/615)) -- Allow metadata to configure precise timestamps in pings ([#592](https://github.com/mozilla/glean_parser/pull/592)) - -## 8.1.1 - -- Small updates to the `javascript_server` tempalte to address lint warnings ([#598](https://github.com/mozilla/glean_parser/pull/598)) - -## 8.1.0 - -- Increased the maximum metric name length in version 2.0.0 schema ([#596](https://github.com/mozilla/glean_parser/pull/596)) - -## 8.0.0 - -- BREAKING CHANGE: Remove exposed `lint_yaml_files` function ([#580](https://github.com/mozilla/glean_parser/pull/580)) -- Rust: Removed `__glean_metric_maps` from the Rust Jinja template. This functionality is better placed downstream ([Bug 1816526](https://bugzilla.mozilla.org/show_bug.cgi?id=1816526)) -- New lint: check that all referenced pings are known ([#584](https://github.com/mozilla/glean_parser/pull/584)) -- Add experimental server-side JavaScript outputter ([FXA-7922](https://mozilla-hub.atlassian.net/browse/FXA-7922)) - -## 7.2.1 - -- Unbreak last minor release ([#579](https://github.com/mozilla/glean_parser/pull/579)) - -## 7.2.0 - -- Remove yamllint integration ([#578](https://github.com/mozilla/glean_parser/pull/578)) - -## 7.1.0 - -- ENHANCEMENT: Labels in `labels:` fields may now contain any printable ASCII characters ([bug 1672273](https://bugzilla.mozilla.org/show_bug.cgi?id=1672273)) -- BUGFIX: Enforce ordering of generation of Pings, Metrics and Tags such that order is deterministic ([bug 1820334](https://bugzilla.mozilla.org/show_bug.cgi?id=1820334)) - -## 7.0.0 - -- BUGFIX: Remove internal-only fields from serialized metrics data ([#550](https://github.com/mozilla/glean_parser/pull/550)) -- FEATURE: New subcommand: `dump` to dump the metrics data as JSON ([#550](https://github.com/mozilla/glean_parser/pull/550)) -- BUGFIX: Kotlin: Generate enums with the right generic bound for ping reason codes ([#551](https://github.com/mozilla/glean_parser/pull/551)). -- **BREAKING CHANGE:** Fully remove support for the old events API ([#549](https://github.com/mozilla/glean_parser/pull/549)) - Adds a new lint `OLD_EVENT_API` to warn about missing `type` attributes on event extra keys. - Note that the Glean SDK already dropped support for the old events API. - -## 6.4.0 - -- BUGFIX: Correct code generation for labeled metrics in Rust ([#533](https://github.com/mozilla/glean_parser/pull/533)) -- BUGFIX: Correctly serialize `Rates` for Rust code ([#530](https://github.com/mozilla/glean_parser/pull/530)) -- Feature: Wrap labeled metric's static labels list as CoW strings (requires updated Glean support) ([#534](https://github.com/mozilla/glean_parser/pull/534)) - -## 6.3.0 - -- events: Increase extras limit to 15 ([bug 1798713](https://bugzilla.mozilla.org/show_bug.cgi?id=1798713)) - -## 6.2.1 - -- Add support for Rate, Denominator and Numerator metrics for JavaScript. ([bug 1793777](https://bugzilla.mozilla.org/show_bug.cgi?id=1793777)) - -## 6.2.0 - -- [data-review] Use a template to generate the Data Review Request template ([bug 1772605](https://bugzilla.mozilla.org/show_bug.cgi?id=1772605)) -- Make tag and no\_lint order deterministic ([#518](https://github.com/mozilla/glean_parser/pull/518)) - -## 6.1.2 - -- Swift: Add a conditional `import Foundation` to support generating metrics when Glean is delivered via the AppServices iOS megazord - -## 6.1.1 - -- Rust: Use correct name for a ping in generated code. - -## 6.1.0 - -- [data-review] Include extra keys' names and descriptions in data review template ([bug 1767027](https://bugzilla.mozilla.org/show_bug.cgi?id=1767027)) -- Raise limit on number of statically-defined labels to 4096. ([bug 1772163](https://bugzilla.mozilla.org/show_bug.cgi?id=1772163)) -- Fix Rust code generation for new UniFFI interface ([#491](https://github.com/mozilla/glean_parser/pull/491), [#494](https://github.com/mozilla/glean_parser/pull/494), [#495](https://github.com/mozilla/glean_parser/pull/495)) - -## 6.0.1 - -- Relax version requirement for MarkupSafe. - Now works with MarkupSafe v1.1.1 to v2.0.1 inclusive again. - -## 6.0.0 - -- BUGFIX: Add missing `extra_args` to Rust constructor generation ([bug 1765855](https://bugzilla.mozilla.org/show_bug.cgi?id=1765855)) -- **Breaking change:** `glean_parser` now generates metrics compatible with the UniFFI-powered Glean SDK. - This is not backwards-compatible with previous versions. -- Generate Rate, Denominator and Numerator metrics for Kotlin and Swift -- Explicitly skip Rate, Denominator and Numerator metrics for JavaScript. - These will cause a build failure by default, but can be turned into warnings on request. - Use `-s fail_rates=false` to enable warning-only mode. - -## 5.1.2 - -- BUGFIX: Revert changes made on v5.1.1. - - The issues addressed by those changes, were non-issues and result of misuse of the APIs. - -## 5.1.1 - -- BUGFIX: Fix issues with Swift templates ([bug 1749494](https://bugzilla.mozilla.org/show_bug.cgi?id=1749494)) - - Make metrics and pings all `public` - - Make pings `static` - -## 5.1.0 - -- Add support for build info generation for JavaScript and Typescript targets ([bug 1749494](https://bugzilla.mozilla.org/show_bug.cgi?id=1749494)) - -## 5.0.1 - -- Fix the logic for the metric expiration by version ([bug 1753194](https://bugzilla.mozilla.org/show_bug.cgi?id=1753194)) - -## 5.0.0 - -- Remove C# support ([#436](https://github.com/mozilla/glean_parser/pull/436)). -- Add support for Rust code generation ([bug 1677434](https://bugzilla.mozilla.org/show_bug.cgi?id=1677434)) -- Report an error if no files are passed ([bug 1751730](https://bugzilla.mozilla.org/show_bug.cgi?id=1751730)) -- [data-review] Report an error if no metrics match provided bug number ([bug 1752576](https://bugzilla.mozilla.org/show_bug.cgi?id=1752576)) -- [data-review] Include notification_emails in list of those responsible ([bug 1752576](https://bugzilla.mozilla.org/show_bug.cgi?id=1752576)) -- Add support for expiring metrics by the provided major version ([bug 1753194](https://bugzilla.mozilla.org/show_bug.cgi?id=1753194)) - -## 4.4.0 - -- Support global file-level tags in metrics.yaml ([bug 1745283](https://bugzilla.mozilla.org/show_bug.cgi?id=1745283)) -- Glinter: Reject metric files if they use `unit` by mistake. It should be `time_unit` ([#432](https://github.com/mozilla/glean_parser/pull/432)). -- Automatically generate a build date when generating build info ([#431](https://github.com/mozilla/glean_parser/pull/431)). - Enabled for Kotlin and Swift. - This can be changed with the `build_date` command line option. - `build_date=0` will use a static unix epoch time. - `build_date=2022-01-03T17:30:00` will parse the ISO8601 string to use (as a UTC timestamp). - Other values will throw an error. - - Example: - - glean_parser translate --format kotlin --option build_date=2021-11-01T01:00:00 path/to/metrics.yaml - -## 4.3.1 - -- BUGFIX: Skip tags for code generation ([#409](https://github.com/mozilla/glean_parser/pull/409)) - -## 4.3.0 - -- Support tags in glean parser ([bug 1734011](https://bugzilla.mozilla.org/show_bug.cgi?id=1734011)) - -## 4.2.0 - -- Improve the schema validation error messages. They will no longer include `OrderedDict(...)` on Python 3.7 and later ([bug 1733395](https://bugzilla.mozilla.org/show_bug.cgi?id=1733395)) -- Officially support Python 3.10 - -## 4.1.1 (2021-09-28) - -- Update private import paths on Javascript / Typescript templates. ([bug 1702468](https://bugzilla.mozilla.org/show_bug.cgi?id=1702468)) - -## 4.1.0 (2021-09-16) - -- Add support for Node.js platform on Javascript / Typescript templates. ([bug 1728982](https://bugzilla.mozilla.org/show_bug.cgi?id=1728982)) - -## 4.0.0 (2021-08-20) - -- Add support for Text metric type ([#374](https://github.com/mozilla/glean_parser/pull/374)) -- Reserve the `default` ping name. It can't be used as a ping name, but it can be used in `send_in_pings` ([#376](https://github.com/mozilla/glean_parser/pull/376)) - -## 3.8.0 (2021-08-18) - -- Expose ping reasons enum on JavaScript / TypeScript templates. ([bug 1719136](https://bugzilla.mozilla.org/show_bug.cgi?id=1719136)) -- Define an interface with the allowed extras for each event on the TypeScript template. ([bug 1693487](https://bugzilla.mozilla.org/show_bug.cgi?id=1693487)) - -## 3.7.0 (2021-07-13) - -- New lint: Check for redundant words in ping names ([#355](https://github.com/mozilla/glean_parser/pull/355)) -- Add support for URL metric type ([#361](https://github.com/mozilla/glean_parser/pull/361)) - -## 3.6.0 (2021-06-11) - -- Add a command `data-review` to generate a skeleton Data Review Request for all metrics matching a supplied bug number. ([bug 1704541](https://bugzilla.mozilla.org/show_bug.cgi?id=1704541)) -- Enable custom distribution outside of GeckoView (`gecko_datapoint` becomes optional) - -## 3.5.0 (2021-06-03) - -- Transform generated folder into QML Module when building Javascript templates for the Qt platform. ([bug 1707896](https://bugzilla.mozilla.org/show_bug.cgi?id=1707896) - - Import the Glean QML module from inside each generated file, removing the requirement to import Glean before importing any of the generated files; - - Prodive a `qmldir` file exposing all generated files; - - Drop the `namespace` option for Javascript templates; - - Add a new `version` option for Javascript templates, required when building for Qt, which expected the Glean QML module version. - -## 3.4.0 (2021-05-28) - -- Add missing import for Kotlin code ([#339](https://github.com/mozilla/glean_parser/pull/339)) -- Use a plain Kotlin type in the generated interface implementation ([#339](https://github.com/mozilla/glean_parser/pull/339)) -- Generate additional generics for event metrics ([#339](https://github.com/mozilla/glean_parser/pull/339)) -- For Kotlin skip generating `GleanBuildInfo.kt` when requested (with `with_buildinfo=false`) ([#341](https://github.com/mozilla/glean_parser/pull/341)) - -## 3.3.2 (2021-05-18) - -- Fix another bug in the Swift code generation when generating extra keys ([#334](https://github.com/mozilla/glean_parser/pull/334)) - -## 3.3.1 (2021-05-18) - -- Fix Swift code generation bug for pings ([#333](https://github.com/mozilla/glean_parser/pull/333)) - -## 3.3.0 (2021-05-18) - -- Generate new event API construct ([#321](https://github.com/mozilla/glean_parser/pull/321)) - -## 3.2.0 (2021-04-28) - -- Add option to add extra introductory text to generated markdown ([#298](https://github.com/mozilla/glean_parser/pull/298)) -- Add support for Qt in Javascript templates ([bug 1706252](https://bugzilla.mozilla.org/show_bug.cgi?id=1706252)) - - Javascript templates will now accept the `platform` option. If this option is set to `qt` - the generated templates will be Qt compatible. Default value is `webext`. - -## 3.1.2 (2021-04-21) - -- BUGFIX: Remove the "DO NOT COMMIT" notice from the documentation. - -## 3.1.1 (2021-04-19) - -- Recommend to not commit as well as to not edit the generated files. ([bug 1706042](https://bugzilla.mozilla.org/show_bug.cgi?id=1706042)) -- BUGFIX: Include import statement for labeled metric subtypes in Javascript and Typescript templates. - -## 3.1.0 (2021-04-16) - -- Add support for labeled metric types in Javascript and Typescript templates. - -## 3.0.0 (2021-04-13) - -- Raise limit on number of statically-defined lables to 100. ([bug 1702263](https://bugzilla.mozilla.org/show_bug.cgi?id=1702263)) -- BUGFIX: Version 2.0.0 of the schema now allows the "special" `glean_.*` ping names for Glean-internal use again. -- Remove support for JWE metric types. - -## 2.5.0 (2021-02-23) - -- Add parser and object model support for `rate` metric type. ([bug 1645166](https://bugzilla.mozilla.org/show_bug.cgi?id=1645166)) -- Add parser and object model support for telemetry_mirror property. ([bug 1685406](https://bugzilla.mozilla.org/show_bug.cgi?id=1685406)) -- Update the Javascript template to match Glean.js expectations. ([bug 1693516](https://bugzilla.mozilla.org/show_bug.cgi?id=1693516)) - - Glean.js has updated it's export strategy. It will now export each metric type as an independent module; - - Glean.js has dropped support for non ES6 modules. -- Add support for generating Typescript code. ([bug 1692157](https://bugzilla.mozilla.org/show_bug.cgi?id=1692157)) - - The templates added generate metrics and pings code for Glean.js. - -## 2.4.0 (2021-02-18) - -- **Experimental:** `glean_parser` has a new subcommand `coverage` to convert raw coverage reports - into something consumable by coverage tools, such as codecov.io -- The path to the file that each metric is defined in is now stored on the - `Metric` object in `defined_in["filepath"]`. - -## 2.3.0 (2021-02-17) - -- Leverage the `glean_namespace` to provide correct import when building for Javascript. - -## 2.2.0 (2021-02-11) - -- The Kotlin generator now generates static build information that can be passed - into `Glean.initialize` to avoid calling the package manager at runtime. - -## 2.1.0 (2021-02-10) - -- Add support for generating Javascript code. - - The templates added generate metrics and pings code for Glean.js. - -## 2.0.0 (2021-02-05) - -- New versions 2.0.0 of the `metrics.yaml` and `pings.yaml` schemas now ship - with `glean_parser`. These schemas are different from version 1.0.0 in the - following ways: - - - Bugs must be specified as URLs. Bug numbers are disallowed. - - The legacy ping names containing underscores are no longer allowed. These - included `deletion_request`, `bookmarks_sync`, `history_sync`, - `session_end`, `all_pings`, `glean_*`). In these cases, the `_` should be - replaced with `-`. - - To upgrade your app or library to use the new schema, replace the version in - the `$schema` value with `2-0-0`. - -- **Breaking change:** It is now an error to use bug numbers (rather than URLs) - in ping definitions. - -- Add the line number that metrics and pings were originally defined in the yaml - files. - -## 1.29.1 (2020-12-17) - -- BUGFIX: Linter output can now be redirected correctly (1675771). - -## 1.29.0 (2020-10-07) - -- **Breaking change:** `glean_parser` will now return an error code when any of - the input files do not exist (unless the `--allow-missing-files` flag is - passed). -- Generated code now includes a comment next to each metric containing the name - of the metric in its original `snake_case` form. -- When metrics don't provide a `unit` parameter, it is not included in the - output (as provided by probe-scraper). - -## 1.28.6 (2020-09-24) - -- BUGFIX: Ensure Kotlin arguments are deterministically ordered - -## 1.28.5 (2020-09-14) - -- Fix deploy step to update pip before deploying to pypi. - -## 1.28.4 (2020-09-14) - -- The `SUPERFLUOUS_NO_LINT` warning has been removed from the glinter. - It likely did more harm than good, and makes it hard to make - `metrics.yaml` files that pass across different versions of - `glean_parser`. -- Expired metrics will now produce a linter warning, `EXPIRED_METRIC`. -- Expiry dates that are more than 730 days (\~2 years) in the future - will produce a linter warning, `EXPIRATION_DATE_TOO_FAR`. -- Allow using the Quantity metric type outside of Gecko. -- New parser configs `custom_is_expired` and `custom_validate_expires` - added. These are both functions that take the `expires` value of the - metric and return a bool. (See `Metric.is_expired` and - `Metric.validate_expires`). These will allow FOG to provide custom - validation for its version-based `expires` values. - -## 1.28.3 (2020-07-28) - -- BUGFIX: Support HashSet and Dictionary in the C\## generated code. - -## 1.28.2 (2020-07-28) - -- BUGFIX: Generate valid C\## code when using Labeled metric types. - -## 1.28.1 (2020-07-24) - -- BUGFIX: Add missing column to correctly render markdown tables in generated - documentation. - -## 1.28.0 (2020-07-23) - -- **Breaking change:** The internal ping `deletion-request` was misnamed in - pings.py causing the linter to not allow use of the correctly named ping for - adding legacy ids to. Consuming apps will need to update their metrics.yaml if - they are using `deletion_request` in any `send_in_pings` to `deletion-request` - after updating. - -## 1.27.0 (2020-07-21) - -- Rename the `data_category` field to `data_sensitivity` to be clearer. - -## 1.26.0 (2020-07-21) - -- Add support for JWE metric types. -- Add a `data_sensitivity` field to all metrics for specifying the type of data - collected in the field. - -## 1.25.0 (2020-07-17) - -- Add support for generating C\## code. -- BUGFIX: The memory unit is now correctly passed to the MemoryDistribution - metric type in Swift. - -## 1.24.0 (2020-06-30) - -- BUGFIX: look for metrics in send\_if\_empty pings. Metrics for these kinds of - pings were being ignored. - -## 1.23.0 (2020-06-27) - -- Support for Python 3.5 has been dropped. -- BUGFIX: The ordering of event extra keys will now match with their enum, - fixing a serious bug where keys of extras may not match the correct values in - the data payload. See . - -## 1.22.0 (2020-05-28) - -- **Breaking change:** (Swift only) Combine all metrics and pings into a single - generated file `Metrics.swift`. - -## 1.21.0 (2020-05-25) - -- `glinter` messages have been improved with more details and to be more - actionable. -- A maximum of 10 `extra_keys` is now enforced for `event` metric types. -- BUGFIX: the `Lifetime` enum values now match the values of the implementation - in mozilla/glean. - -## 1.20.4 (2020-05-07) - -- BUGFIX: yamllint errors are now reported using the correct file name. - -## 1.20.3 (2020-05-06) - -- Support for using `timing_distribution`'s `time_unit` parameter to control - the range of acceptable values is documented. The default unit for this use - case is `nanosecond` to avoid creating a breaking change. See [bug - 1630997](https://bugzilla.mozilla.org/show_bug.cgi?id=1630997) for more - information. - -## 1.20.2 (2020-04-24) - -- Dependencies that depend on the version of Python being used are now specified - using the [Declaring platform specific dependencies syntax in - setuptools](https://setuptools.readthedocs.io/en/latest/setuptools.html##declaring-platform-specific-dependencies). - This means that more recent versions of dependencies are likely to be - installed on Python 3.6 and later, and unnecessary backport libraries won't - be installed on more recent Python versions. - -## 1.20.1 (2020-04-21) - -- The minimum version of the runtime dependencies has been lowered to increase - compatibility with other tools. These minimum versions are now tested in CI, - in addition to testing the latest versions of the dependencies that was - already happening in CI. - -## 1.20.0 (2020-04-15) - -- **Breaking change:** glinter errors found during the `translate` command will - now return an error code. glinter warnings will be displayed, but not return - an error code. -- `glean_parser` now produces a linter warning when `user` lifetime metrics are - set to expire. See [bug - 1604854](https://bugzilla.mozilla.org/show_bug.cgi?id=1604854) for additional - context. - -## 1.19.0 (2020-03-18) - -- **Breaking change:** The regular expression used to validate labels is - stricter and more correct. -- Add more information about pings to markdown documentation: - - State whether the ping includes client id; - - Add list of data review links; - - Add list of related bugs links. -- `glean_parser` now makes it easier to write external translation - functions for different language targets. -- BUGFIX: `glean_parser` now works on 32-bit Windows. - -## 1.18.3 (2020-02-24) - -- Dropped the `inflection` dependency. -- Constrained the `zipp` and `MarkupSafe` transitive dependencies to versions - that support Python 3.5. - -## 1.18.2 (2020-02-14) - -- BUGFIX: Fix rendering of first element of reason list. - -## 1.18.1 (2020-02-14) - -- BUGFIX: Reason codes are displayed in markdown output for built-in - pings as well. -- BUGFIX: Reason descriptions are indented correctly in markdown - output. -- BUGFIX: To avoid a compiler error, the `@JvmName` annotation isn't - added to private members. - -## 1.18.0 (2020-02-13) - -- **Breaking Change (Java API)** Have the metrics names in Java match the names - in Kotlin. See [Bug - 1588060](https://bugzilla.mozilla.org/show_bug.cgi?id=1588060). -- The reasons a ping are sent are now included in the generated markdown - documentation. - -## 1.17.3 (2020-02-05) - -- BUGFIX: The version of Jinja2 now specifies < 3.0, since that version no - longer supports Python 3.5. - -## 1.17.2 (2020-02-05) - -- BUGFIX: Fixes an import error in generated Kotlin code. - -## 1.17.1 (2020-02-05) - -- BUGFIX: Generated Swift code now includes `import Glean`, unless generating - for a Glean-internal build. - -## 1.17.0 (2020-02-03) - -- Remove default schema URL from `validate_ping` -- Make `schema` argument required for CLI -- BUGFIX: Avoid default import in Swift code for Glean itself -- BUGFIX: Restore order of fields in generated Swift code - -## 1.16.0 (2020-01-15) - -- Support for `reason` codes on pings was added. - -## 1.15.6 (2020-02-06) - -- BUGFIX: The version of Jinja2 now specifies < 3.0, since that version no - longer supports Python 3.5 (backported from 1.17.3). - -## 1.15.5 (2019-12-19) - -- BUGFIX: Also allow the legacy name `all_pings` for `send_in_pings` parameter - on metrics - -## 1.15.4 (2019-12-19) - -- BUGFIX: Also allow the legacy name `all_pings` - -## 1.15.3 (2019-12-13) - -- Add project title to markdown template. -- Remove "Sorry about that" from markdown template. -- BUGFIX: Replace dashes in variable names to force proper naming - -## 1.15.2 (2019-12-12) - -- BUGFIX: Use a pure Python library for iso8601 so there is no compilation - required. - -## 1.15.1 (2019-12-12) - -- BUGFIX: Add some additional ping names to the non-kebab-case allow list. - -## 1.15.0 (2019-12-12) - -- Restrict new pings names to be kebab-case and change `all_pings` to - `all-pings` - -## 1.14.0 (2019-12-06) - -- `glean_parser` now supports Python versions 3.5, 3.6, 3.7 and 3.8. - -## 1.13.0 (2019-12-04) - -- The `translate` command will no longer clear extra files in the output - directory. -- BUGFIX: Ensure all newlines in comments are prefixed with comment markers -- BUGFIX: Escape Swift keywords in variable names in generated code -- Generate documentation for pings that are sent if empty - -## 1.12.0 (2019-11-27) - -- Reserve the `deletion_request` ping name -- Added a new flag `send_if_empty` for pings - -## 1.11.0 (2019-11-13) - -- The `glinter` command now performs `yamllint` validation on registry files. - -## 1.10.0 (2019-11-11) - -- The Kotlin linter `detekt` is now run during CI, and for local - testing if installed. -- Python 3.8 is now tested in CI (in addition to Python 3.7). Using - `tox` for this doesn't work in modern versions of CircleCI, so the - `tox` configuration has been removed. -- `yamllint` has been added to test the YAML files on CI. -- ⚠ Metric types that don't yet have implementations in glean-core - have been removed. This includes `enumeration`, `rate`, `usage`, and - `use_counter`, as well as many labeled metrics that don't exist. - -## 1.9.5 (2019-10-22) - -- Allow a Swift lint for generated code -- New lint: Restrict what metric can go into the `baseline` ping -- New lint: Warn for slight misspellings in ping names -- BUGFIX: change Labeled types labels from lists to sets. - -## 1.9.4 (2019-10-16) - -- Use lists instead of sets in Labeled types labels to ensure that the order of - the labels passed to the `metrics.yaml` is kept. -- `glinter` will now check for duplicate labels and error if there are any. - -## 1.9.3 (2019-10-09) - -- Add labels from Labeled types to the Extra column in the Markdown template. - -## 1.9.2 (2019-10-08) - -- BUGFIX: Don't call `is_internal_metric` on `Ping` objects. - -## 1.9.1 (2019-10-07) - -- Don't include Glean internal metrics in the generated markdown. - -## 1.9.0 (2019-10-04) - -- Glinter now warns when bug numbers (rather than URLs) are used. -- BUGFIX: add `HistogramType` and `MemoryUnit` imports in Kotlin generated code. - -## 1.8.4 (2019-10-02) - -- Removed unsupported labeled metric types. - -## 1.8.3 (2019-10-02) - -- Fix indentation for generated Swift code - -## 1.8.2 (2019-10-01) - -- Created labeled metrics and events in Swift code and wrap it in a - configured namespace - -## 1.8.1 (2019-09-27) - -- BUGFIX: `memory_unit` is now passed to the Kotlin generator. - -## 1.8.0 (2019-09-26) - -- A new parser config, `do_not_disable_expired`, was added to turn off the - feature that expired metrics are automatically disabled. This is useful if you - want to retain the disabled value that is explicitly in the `metrics.yaml` - file. -- `glinter` will now report about superfluous `no_lint` entries. - -## 1.7.0 (2019-09-24) - -- A `glinter` tool is now included to find common mistakes in metric naming - and setup. This check is run during `translate` and warnings will be - displayed. ⚠ These warnings will be treated as errors in a future revision. - -## 1.6.1 (2019-09-17) - -- BUGFIX: `GleanGeckoMetricsMapping` must include `LabeledMetricType` - and `CounterMetricType`. - -## 1.6.0 (2019-09-17) - -- NEW: Support for outputting metrics in Swift. -- BUGFIX: Provides a helpful error message when `geckoview_datapoint` is used on - an metric type that doesn't support GeckoView exfiltration. -- Generate a lookup table for Gecko categorical histograms in - `GleanGeckoMetricsMapping`. -- Introduce a 'Swift' output generator. - -## 1.4.1 (2019-08-28) - -- Documentation only. - -## 1.4.0 (2019-08-27) - -- Added support for generating markdown documentation from `metrics.yaml` files. - -## 1.3.0 (2019-08-22) - -- `quantity` metric type has been added. - -## 1.2.1 (2019-08-13) - -- BUGFIX: `includeClientId` was not being output for PingType. - -## 1.2.0 (2019-08-13) - -- `memory_distribution` metric type has been added. -- `custom_distribution` metric type has been added. -- `labeled_timespan` is no longer an allowed metric type. - -## 1.1.0 (2019-08-05) - -- Add a special `all_pings` value to `send_in_pings`. - -## 1.0.0 (2019-07-29) - -- First release to start following strict semver. - -## 0.1.0 (2018-10-15) - -- First release on PyPI. diff --git a/third_party/python/glean_parser/glean_parser-13.0.1.dist-info/RECORD b/third_party/python/glean_parser/glean_parser-13.0.1.dist-info/RECORD deleted file mode 100644 index 8ebf523fd7..0000000000 --- a/third_party/python/glean_parser/glean_parser-13.0.1.dist-info/RECORD +++ /dev/null @@ -1,48 +0,0 @@ -glean_parser/__init__.py,sha256=bJljD052_0y-efcBhYpllICVCXOMHLcXRLNyrvfgt5A,533 -glean_parser/__main__.py,sha256=Rw0PpuQtAvdHJMK1YLozeZkc6x1yjeNZwidu4faovdk,8633 -glean_parser/coverage.py,sha256=2IwC4XMDtDamMkBFoYilmqJzW4gyypq65YVCur8SNas,4405 -glean_parser/data_review.py,sha256=BweeeTkNNS6HrIDkztawhbDByrk_-Avxpg7YeST3VAs,2152 -glean_parser/go_server.py,sha256=s6lxK9IAFY55pNl3Rv4MHlV-nQwSoyhO9ppTQE9VCik,5346 -glean_parser/javascript.py,sha256=w4ZhNBHBKWYk0h3t7G0Ud2tR__hRqzn9dlEXNKLdQrA,11230 -glean_parser/javascript_server.py,sha256=x75JfOaveEkPQe3ozYXdtDb1Zks-PxzncDOizsJbYos,7972 -glean_parser/kotlin.py,sha256=5z8_74xlqvHDsedwZhGf1_qb7swPEgIZumkJIuj3ef8,12598 -glean_parser/lint.py,sha256=STqdgyOhR4Q3fHivSizgn9bOOyqrNHhzjaqyJxz6qzI,19948 -glean_parser/markdown.py,sha256=GkCr1CrV6mnRQseT6FO1-JJ7Eup8X3lxUfRMBTxXpe4,9066 -glean_parser/metrics.py,sha256=YAO8wPuRHTLkdT9M4zh9ZwoFI1_VS8O9oQqwZNYyDp0,14612 -glean_parser/parser.py,sha256=cUOnvSXKfEBg8YTpRcWiPcMwpFpK1TTqsVO_zjUtpR4,15309 -glean_parser/pings.py,sha256=AQ-fBmIx2GKQv6J2NyTFfHHZzSnApZZoC770LlstkoI,3180 -glean_parser/python_server.py,sha256=ERpYcbSwF19xKFagxX0mZAvlR1y6D7Ah5DSvW8LipCY,4791 -glean_parser/ruby_server.py,sha256=e5lkfcLQAUMUBQDCjqNU82LkdUzT5x-G6HOnsUInbsU,5190 -glean_parser/rust.py,sha256=UEHeIZlToxCBelfec5sl_l_uLZfk8f_OUXqa_ZoEvnk,7330 -glean_parser/swift.py,sha256=T1BSGahd9wUd6VDeNC89SdN6M34jKXDlydMpSI0QLOs,8379 -glean_parser/tags.py,sha256=bemKYvcbMO4JrghiNSe-A4BNNDtx_FlUPkgrPPJy84Y,1391 -glean_parser/translate.py,sha256=luKQoraARZ2tjenHs0SVtCxflnYaMkzPYFfKEdKdSqQ,8403 -glean_parser/translation_options.py,sha256=Lxzr6G7MP0tC_ZYlZXftS4j0SLiqO-5mGVTEc7ggXis,2037 -glean_parser/util.py,sha256=v81watw5nSPGRlFNNpTb7iUv9NZObiFIbyyg2oZ6EnY,16149 -glean_parser/validate_ping.py,sha256=0TNvILH6dtzJDys3W8Kqorw6kk03me73OCUDtpoHcXU,2118 -glean_parser/schemas/metrics.1-0-0.schema.yaml,sha256=cND3cvi6iBfPUVmtfIBQfGJV9AALpbvN7nu8E33_J-o,19566 -glean_parser/schemas/metrics.2-0-0.schema.yaml,sha256=wx1q0L4C0-Vcwk1SPU6t8OfjDEQvgrwwEG6xfSHO1MI,26365 -glean_parser/schemas/pings.1-0-0.schema.yaml,sha256=hwCnsKpEysmrmVp-QHGBArEkVY3vaU1rVsxlTwhAzws,4315 -glean_parser/schemas/pings.2-0-0.schema.yaml,sha256=vDyvFT8KwAwaqyWHG4y6pFNrsc3NO7OyDDagA2eTeqM,5415 -glean_parser/schemas/tags.1-0-0.schema.yaml,sha256=OGXIJlvvVW1vaqB_NVZnwKeZ-sLlfH57vjBSHbj6DNI,1231 -glean_parser/templates/data_review.jinja2,sha256=jeYU29T1zLSyu9fKBBFu5BFPfIw8_hmOUXw8RXhRXK8,3287 -glean_parser/templates/go_server.jinja2,sha256=Jy1e0uQqr_WZNoj-AWnygRmygX2jyj_GQMMV8mSah2k,6825 -glean_parser/templates/javascript.buildinfo.jinja2,sha256=4mXiZCQIk9if4lxlA05kpSIL4a95IdwGwqle2OqqNAs,474 -glean_parser/templates/javascript.jinja2,sha256=cT_bG-jC6m4afECXmcsqHwiiHjRuVtJnfv90OD2Mwxw,2669 -glean_parser/templates/javascript_server.jinja2,sha256=H991yQOKJMwSgM0bLEA-Q5Z15LWsfEPh6bTYz_owSCU,9423 -glean_parser/templates/kotlin.buildinfo.jinja2,sha256=X0lk2SNu5OIIj2i6mUyF9CWFQIonLgfqkgT5fA-5G6c,920 -glean_parser/templates/kotlin.geckoview.jinja2,sha256=MJOgtoDXmBjE9pwk-G6T89y36RZuMbDWM_-DBN_gFJo,5099 -glean_parser/templates/kotlin.jinja2,sha256=3DqUMXJRkmTvSp_5IRyvGmw5iXYWdox7coMFe3YDxcc,5247 -glean_parser/templates/markdown.jinja2,sha256=vAHHGGm28HRDPd3zO_wQMAUZIuxE9uQ7hl3NpXxcKV4,3425 -glean_parser/templates/python_server.jinja2,sha256=gu2C1rkn760IqBCG2SWaK7o32T1ify94wDEsudLPUg8,7260 -glean_parser/templates/qmldir.jinja2,sha256=m6IGsp-tgTiOfQ7VN8XW6GqX0gJqJkt3B6Pkaul6FVo,156 -glean_parser/templates/ruby_server.jinja2,sha256=vm4BEenOqzomQNTLFfMOzlWHARnsWUjTBbnR-v2cadI,6247 -glean_parser/templates/rust.jinja2,sha256=wlV0OZvV3Mk2ulrqFkN1vGjdsahsupEy2TQvWxQKzww,5439 -glean_parser/templates/swift.jinja2,sha256=xkvVsTpfK0QK3tI32wGqzxm2hqFNaBQ6Y71rKIsCmAI,4944 -glean_parser-13.0.1.dist-info/AUTHORS.md,sha256=yxgj8MioO4wUnrh0gmfb8l3DJJrf-l4HmmEDbQsbbNI,455 -glean_parser-13.0.1.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725 -glean_parser-13.0.1.dist-info/METADATA,sha256=UYz6ZRXyv3ODi3yl2vRQHZVdm0XGerFp8pIOGWGwOKw,31604 -glean_parser-13.0.1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92 -glean_parser-13.0.1.dist-info/entry_points.txt,sha256=mf9d3sv8BwSjjR58x9KDnpVkONCnv3fPQC2NjJl15Xg,68 -glean_parser-13.0.1.dist-info/top_level.txt,sha256=q7T3duD-9tYZFyDry6Wv2LcdMsK2jGnzdDFhxWcT2Z8,13 -glean_parser-13.0.1.dist-info/RECORD,, diff --git a/third_party/python/glean_parser/glean_parser-13.0.1.dist-info/WHEEL b/third_party/python/glean_parser/glean_parser-13.0.1.dist-info/WHEEL deleted file mode 100644 index bab98d6758..0000000000 --- a/third_party/python/glean_parser/glean_parser-13.0.1.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.43.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/third_party/python/glean_parser/glean_parser-13.0.1.dist-info/entry_points.txt b/third_party/python/glean_parser/glean_parser-13.0.1.dist-info/entry_points.txt deleted file mode 100644 index 08fde9d655..0000000000 --- a/third_party/python/glean_parser/glean_parser-13.0.1.dist-info/entry_points.txt +++ /dev/null @@ -1,2 +0,0 @@ -[console_scripts] -glean_parser = glean_parser.__main__:main_wrapper diff --git a/third_party/python/glean_parser/glean_parser-13.0.1.dist-info/top_level.txt b/third_party/python/glean_parser/glean_parser-13.0.1.dist-info/top_level.txt deleted file mode 100644 index a7f3a37918..0000000000 --- a/third_party/python/glean_parser/glean_parser-13.0.1.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -glean_parser diff --git a/third_party/python/glean_parser/glean_parser-14.0.1.dist-info/AUTHORS.md b/third_party/python/glean_parser/glean_parser-14.0.1.dist-info/AUTHORS.md new file mode 100644 index 0000000000..525116ee7e --- /dev/null +++ b/third_party/python/glean_parser/glean_parser-14.0.1.dist-info/AUTHORS.md @@ -0,0 +1,17 @@ +# Credits + +## Development Lead + +- Jan-Erik Rediger +- Alessio Placitelli + +## Contributors + +See [the full list of contributors](https://github.com/mozilla/glean_parser/graphs/contributors). + +## Acknowledgements + +This package was created with +[Cookiecutter](https://github.com/audreyr/cookiecutter) and the +[audreyr/cookiecutter-pypackage](https://github.com/audreyr/cookiecutter-pypackage) +project template. diff --git a/third_party/python/glean_parser/glean_parser-14.0.1.dist-info/LICENSE b/third_party/python/glean_parser/glean_parser-14.0.1.dist-info/LICENSE new file mode 100644 index 0000000000..a612ad9813 --- /dev/null +++ b/third_party/python/glean_parser/glean_parser-14.0.1.dist-info/LICENSE @@ -0,0 +1,373 @@ +Mozilla Public License Version 2.0 +================================== + +1. Definitions +-------------- + +1.1. "Contributor" + means each individual or legal entity that creates, contributes to + the creation of, or owns Covered Software. + +1.2. "Contributor Version" + means the combination of the Contributions of others (if any) used + by a Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + means Source Code Form to which the initial Contributor has attached + the notice in Exhibit A, the Executable Form of such Source Code + Form, and Modifications of such Source Code Form, in each case + including portions thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + (a) that the initial Contributor has attached the notice described + in Exhibit B to the Covered Software; or + + (b) that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the + terms of a Secondary License. + +1.6. "Executable Form" + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + means a work that combines Covered Software with other material, in + a separate file or files, that is not Covered Software. + +1.8. "License" + means this document. + +1.9. "Licensable" + means having the right to grant, to the maximum extent possible, + whether at the time of the initial grant or subsequently, any and + all of the rights conveyed by this License. + +1.10. "Modifications" + means any of the following: + + (a) any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered + Software; or + + (b) any new file in Source Code Form that contains any Covered + Software. + +1.11. "Patent Claims" of a Contributor + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the + License, by the making, using, selling, offering for sale, having + made, import, or transfer of either its Contributions or its + Contributor Version. + +1.12. "Secondary License" + means either the GNU General Public License, Version 2.0, the GNU + Lesser General Public License, Version 2.1, the GNU Affero General + Public License, Version 3.0, or any later versions of those + licenses. + +1.13. "Source Code Form" + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that + controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct + or indirect, to cause the direction or management of such entity, + whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial + ownership of such entity. + +2. License Grants and Conditions +-------------------------------- + +2.1. Grants + +Each Contributor hereby grants You a world-wide, royalty-free, +non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + +(b) under Patent Claims of such Contributor to make, use, sell, offer + for sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + +The licenses granted in Section 2.1 with respect to any Contribution +become effective for each Contribution on the date the Contributor first +distributes such Contribution. + +2.3. Limitations on Grant Scope + +The licenses granted in this Section 2 are the only rights granted under +this License. No additional rights or licenses will be implied from the +distribution or licensing of Covered Software under this License. +Notwithstanding Section 2.1(b) above, no patent license is granted by a +Contributor: + +(a) for any code that a Contributor has removed from Covered Software; + or + +(b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + +(c) under Patent Claims infringed by Covered Software in the absence of + its Contributions. + +This License does not grant any rights in the trademarks, service marks, +or logos of any Contributor (except as may be necessary to comply with +the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + +No Contributor makes additional grants as a result of Your choice to +distribute the Covered Software under a subsequent version of this +License (see Section 10.2) or under the terms of a Secondary License (if +permitted under the terms of Section 3.3). + +2.5. Representation + +Each Contributor represents that the Contributor believes its +Contributions are its original creation(s) or it has sufficient rights +to grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + +This License is not intended to limit any rights You have under +applicable copyright doctrines of fair use, fair dealing, or other +equivalents. + +2.7. Conditions + +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted +in Section 2.1. + +3. Responsibilities +------------------- + +3.1. Distribution of Source Form + +All distribution of Covered Software in Source Code Form, including any +Modifications that You create or to which You contribute, must be under +the terms of this License. You must inform recipients that the Source +Code Form of the Covered Software is governed by the terms of this +License, and how they can obtain a copy of this License. You may not +attempt to alter or restrict the recipients' rights in the Source Code +Form. + +3.2. Distribution of Executable Form + +If You distribute Covered Software in Executable Form then: + +(a) such Covered Software must also be made available in Source Code + Form, as described in Section 3.1, and You must inform recipients of + the Executable Form how they can obtain a copy of such Source Code + Form by reasonable means in a timely manner, at a charge no more + than the cost of distribution to the recipient; and + +(b) You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter + the recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + +You may create and distribute a Larger Work under terms of Your choice, +provided that You also comply with the requirements of this License for +the Covered Software. If the Larger Work is a combination of Covered +Software with a work governed by one or more Secondary Licenses, and the +Covered Software is not Incompatible With Secondary Licenses, this +License permits You to additionally distribute such Covered Software +under the terms of such Secondary License(s), so that the recipient of +the Larger Work may, at their option, further distribute the Covered +Software under the terms of either this License or such Secondary +License(s). + +3.4. Notices + +You may not remove or alter the substance of any license notices +(including copyright notices, patent notices, disclaimers of warranty, +or limitations of liability) contained within the Source Code Form of +the Covered Software, except that You may alter any license notices to +the extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + +You may choose to offer, and to charge a fee for, warranty, support, +indemnity or liability obligations to one or more recipients of Covered +Software. However, You may do so only on Your own behalf, and not on +behalf of any Contributor. You must make it absolutely clear that any +such warranty, support, indemnity, or liability obligation is offered by +You alone, and You hereby agree to indemnify every Contributor for any +liability incurred by such Contributor as a result of warranty, support, +indemnity or liability terms You offer. You may include additional +disclaimers of warranty and limitations of liability specific to any +jurisdiction. + +4. Inability to Comply Due to Statute or Regulation +--------------------------------------------------- + +If it is impossible for You to comply with any of the terms of this +License with respect to some or all of the Covered Software due to +statute, judicial order, or regulation then You must: (a) comply with +the terms of this License to the maximum extent possible; and (b) +describe the limitations and the code they affect. Such description must +be placed in a text file included with all distributions of the Covered +Software under this License. Except to the extent prohibited by statute +or regulation, such description must be sufficiently detailed for a +recipient of ordinary skill to be able to understand it. + +5. Termination +-------------- + +5.1. The rights granted under this License will terminate automatically +if You fail to comply with any of its terms. However, if You become +compliant, then the rights granted under this License from a particular +Contributor are reinstated (a) provisionally, unless and until such +Contributor explicitly and finally terminates Your grants, and (b) on an +ongoing basis, if such Contributor fails to notify You of the +non-compliance by some reasonable means prior to 60 days after You have +come back into compliance. Moreover, Your grants from a particular +Contributor are reinstated on an ongoing basis if such Contributor +notifies You of the non-compliance by some reasonable means, this is the +first time You have received notice of non-compliance with this License +from such Contributor, and You become compliant prior to 30 days after +Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent +infringement claim (excluding declaratory judgment actions, +counter-claims, and cross-claims) alleging that a Contributor Version +directly or indirectly infringes any patent, then the rights granted to +You by any and all Contributors for the Covered Software under Section +2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all +end user license agreements (excluding distributors and resellers) which +have been validly granted by You or Your distributors under this License +prior to termination shall survive termination. + +************************************************************************ +* * +* 6. Disclaimer of Warranty * +* ------------------------- * +* * +* Covered Software is provided under this License on an "as is" * +* basis, without warranty of any kind, either expressed, implied, or * +* statutory, including, without limitation, warranties that the * +* Covered Software is free of defects, merchantable, fit for a * +* particular purpose or non-infringing. The entire risk as to the * +* quality and performance of the Covered Software is with You. * +* Should any Covered Software prove defective in any respect, You * +* (not any Contributor) assume the cost of any necessary servicing, * +* repair, or correction. This disclaimer of warranty constitutes an * +* essential part of this License. No use of any Covered Software is * +* authorized under this License except under this disclaimer. * +* * +************************************************************************ + +************************************************************************ +* * +* 7. Limitation of Liability * +* -------------------------- * +* * +* Under no circumstances and under no legal theory, whether tort * +* (including negligence), contract, or otherwise, shall any * +* Contributor, or anyone who distributes Covered Software as * +* permitted above, be liable to You for any direct, indirect, * +* special, incidental, or consequential damages of any character * +* including, without limitation, damages for lost profits, loss of * +* goodwill, work stoppage, computer failure or malfunction, or any * +* and all other commercial damages or losses, even if such party * +* shall have been informed of the possibility of such damages. This * +* limitation of liability shall not apply to liability for death or * +* personal injury resulting from such party's negligence to the * +* extent applicable law prohibits such limitation. Some * +* jurisdictions do not allow the exclusion or limitation of * +* incidental or consequential damages, so this exclusion and * +* limitation may not apply to You. * +* * +************************************************************************ + +8. Litigation +------------- + +Any litigation relating to this License may be brought only in the +courts of a jurisdiction where the defendant maintains its principal +place of business and such litigation shall be governed by laws of that +jurisdiction, without reference to its conflict-of-law provisions. +Nothing in this Section shall prevent a party's ability to bring +cross-claims or counter-claims. + +9. Miscellaneous +---------------- + +This License represents the complete agreement concerning the subject +matter hereof. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent +necessary to make it enforceable. Any law or regulation which provides +that the language of a contract shall be construed against the drafter +shall not be used to construe this License against a Contributor. + +10. Versions of the License +--------------------------- + +10.1. New Versions + +Mozilla Foundation is the license steward. Except as provided in Section +10.3, no one other than the license steward has the right to modify or +publish new versions of this License. Each version will be given a +distinguishing version number. + +10.2. Effect of New Versions + +You may distribute the Covered Software under the terms of the version +of the License under which You originally received the Covered Software, +or under the terms of any subsequent version published by the license +steward. + +10.3. Modified Versions + +If you create software not governed by this License, and you want to +create a new license for such software, you may create and use a +modified version of this License if you rename the license and remove +any references to the name of the license steward (except to note that +such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary +Licenses + +If You choose to distribute Source Code Form that is Incompatible With +Secondary Licenses under the terms of this version of the License, the +notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice +------------------------------------------- + + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this + file, You can obtain one at http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular +file, then You may include the notice in a location (such as a LICENSE +file in a relevant directory) where a recipient would be likely to look +for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice +--------------------------------------------------------- + + This Source Code Form is "Incompatible With Secondary Licenses", as + defined by the Mozilla Public License, v. 2.0. diff --git a/third_party/python/glean_parser/glean_parser-14.0.1.dist-info/METADATA b/third_party/python/glean_parser/glean_parser-14.0.1.dist-info/METADATA new file mode 100644 index 0000000000..65030fd86b --- /dev/null +++ b/third_party/python/glean_parser/glean_parser-14.0.1.dist-info/METADATA @@ -0,0 +1,799 @@ +Metadata-Version: 2.1 +Name: glean_parser +Version: 14.0.1 +Summary: Parser tools for Mozilla's Glean telemetry +Home-page: https://github.com/mozilla/glean_parser +Author: The Glean Team +Author-email: glean-team@mozilla.com +Keywords: glean_parser +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Natural Language :: English +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Description-Content-Type: text/markdown +License-File: LICENSE +License-File: AUTHORS.md +Requires-Dist: appdirs >=1.4 +Requires-Dist: Click >=7 +Requires-Dist: diskcache >=4 +Requires-Dist: Jinja2 >=2.10.1 +Requires-Dist: jsonschema >=3.0.2 +Requires-Dist: PyYAML >=5.3.1 + +# Glean Parser + +Parser tools for Mozilla's Glean telemetry. + +## Features + +Contains various utilities for handling `metrics.yaml` and `pings.yaml` for [the +Glean SDKs](https://mozilla.github.io/glean). This includes producing generated +code for various integrations, linting and coverage testing. + +## Documentation + +- [How to Contribute](https://github.com/mozilla/glean_parser/blob/main/CONTRIBUTING.md). Please file bugs in [bugzilla](https://bugzilla.mozilla.org/enter_bug.cgi?assigned_to=nobody%40mozilla.org&bug_ignored=0&bug_severity=normal&bug_status=NEW&cf_fission_milestone=---&cf_fx_iteration=---&cf_fx_points=---&cf_status_firefox65=---&cf_status_firefox66=---&cf_status_firefox67=---&cf_status_firefox_esr60=---&cf_status_thunderbird_esr60=---&cf_tracking_firefox65=---&cf_tracking_firefox66=---&cf_tracking_firefox67=---&cf_tracking_firefox_esr60=---&cf_tracking_firefox_relnote=---&cf_tracking_thunderbird_esr60=---&product=Data%20Platform%20and%20Tools&component=Glean%3A%20SDK&contenttypemethod=list&contenttypeselection=text%2Fplain&defined_groups=1&flag_type-203=X&flag_type-37=X&flag_type-41=X&flag_type-607=X&flag_type-721=X&flag_type-737=X&flag_type-787=X&flag_type-799=X&flag_type-800=X&flag_type-803=X&flag_type-835=X&flag_type-846=X&flag_type-855=X&flag_type-864=X&flag_type-916=X&flag_type-929=X&flag_type-930=X&flag_type-935=X&flag_type-936=X&flag_type-937=X&form_name=enter_bug&maketemplate=Remember%20values%20as%20bookmarkable%20template&op_sys=Unspecified&priority=P3&&rep_platform=Unspecified&status_whiteboard=%5Btelemetry%3Aglean-rs%3Am%3F%5D&target_milestone=---&version=unspecified). +- [User documentation for Glean](https://mozilla.github.io/glean/). +- [`glean_parser` developer documentation](https://mozilla.github.io/glean_parser/). + +## Requirements + +- Python 3.8 (or later) + +The following library requirements are installed automatically when +`glean_parser` is installed by `pip`. + +- appdirs +- Click +- diskcache +- Jinja2 +- jsonschema +- PyYAML + +## Usage + +```sh +$ glean_parser --help +``` + +Read in `metrics.yaml`, translate to Kotlin format, and +output to `output_dir`: + +```sh +$ glean_parser translate -o output_dir -f kotlin metrics.yaml +``` + +Check a Glean ping against the ping schema: + +```sh +$ glean_parser check < ping.json +``` + + +# Changelog + +## 14.0.1 + +- BUGFIX: Fix missing `ping_arg` in util.py ([#687](https://github.com/mozilla/glean_parser/pull/687)) + +## 14.0.0 + +- BREAKING CHANGE: Expose the optional `enabled` property on pings, defaulting to `enabled: true` ([#681](https://github.com/mozilla/glean_parser/pull/681)) +- BREAKING CHANGE: Support metadata field `ping_schedule` for pings ([bug 1804711](https://bugzilla.mozilla.org/show_bug.cgi?id=1804711)) +- Add support for event metric type in server JavaScript outputter ([DENG-2407](https://mozilla-hub.atlassian.net/browse/DENG-2407)) +- Add Swift and Kotlin codegen support for the object metric type object ([#685](https://github.com/mozilla/glean_parser/pull/685)) + +## 13.0.1 + +- Use faster C yaml parser if available ([#677](https://github.com/mozilla/glean_parser/pull/677)) + +## 13.0.0 + +- BREAKING CHANGE: Support metadata field `include_info_sections` ([bug 1866559](https://bugzilla.mozilla.org/show_bug.cgi?id=1866559)) + +## 12.0.1 + +- Fix Rust codegen for object metric type ([#662](https://github.com/mozilla/glean_parser/pull/662)) + +## 12.0.0 + +- Add new metric type object (only Rust codegen support right now) ([#587](https://github.com/mozilla/glean_parser/pull/587)) + +## 11.1.0 + +- Add Go log outputter (`go_server`) ([#645](https://github.com/mozilla/glean_parser/pull/645)) +- Add Python log outputter (`python_server`) ([MPP-3642](https://mozilla-hub.atlassian.net/browse/MPP-3642)) + +## 11.0.1 + +- Fix javascript_server template to include non-event metric parameters in #record call for event metrics ([#643](https://github.com/mozilla/glean_parser/pull/643)) +- events: Increase extra key limit to 50 ([Bug 1869429](https://bugzilla.mozilla.org/show_bug.cgi?id=1869429)) + +## 11.0.0 + +- Add updated logging logic for Ruby Server ([#642](https://github.com/mozilla/glean_parser/pull/642)) +- Add support for event metric type in server-side JavaScript outputter ([DENG-1736](https://mozilla-hub.atlassian.net/browse/DENG-1736)) +- BREAKING CHANGE: Dropped support for Python 3.7 ([#638](https://github.com/mozilla/glean_parser/pull/638)) +- Add official support for Python 3.11+ ([#638](https://github.com/mozilla/glean_parser/pull/638)) + +## 10.0.3 + +- Warn about empty or TODO-tagged data reviews in the list ([#634](https://github.com/mozilla/glean_parser/pull/634)) +- Allow `unit` field on all metrics, but warn for all but quantity and custom distribution ([#636](https://github.com/mozilla/glean_parser/pull/636)) + +## 10.0.2 + +- Allow `unit` field for string again, but warn about it in the linter ([#634](https://github.com/mozilla/glean_parser/pull/634)) + +## 10.0.1 + +- Allow `unit` field for custom distribution again ([#633](https://github.com/mozilla/glean_parser/pull/633)) + +## 10.0.0 + +- Add Ruby log outputter (`ruby_server`) ([#620](https://github.com/mozilla/glean_parser/pull/620)) +- BREAKING CHANE: `ping` lifetime metrics on the events ping are now disallowed ([#625](https://github.com/mozilla/glean_parser/pull/625)) +- Disallow `unit` field for anything but quantity ([#630](https://github.com/mozilla/glean_parser/pull/630)). + Note that this was already considered the case, now the code enforces it. + +## 9.0.0 + +- BREAKING CHANGE: Dropped support for Python 3.6 ([#615](https://github.com/mozilla/glean_parser/issues/615)) +- Allow metadata to configure precise timestamps in pings ([#592](https://github.com/mozilla/glean_parser/pull/592)) + +## 8.1.1 + +- Small updates to the `javascript_server` tempalte to address lint warnings ([#598](https://github.com/mozilla/glean_parser/pull/598)) + +## 8.1.0 + +- Increased the maximum metric name length in version 2.0.0 schema ([#596](https://github.com/mozilla/glean_parser/pull/596)) + +## 8.0.0 + +- BREAKING CHANGE: Remove exposed `lint_yaml_files` function ([#580](https://github.com/mozilla/glean_parser/pull/580)) +- Rust: Removed `__glean_metric_maps` from the Rust Jinja template. This functionality is better placed downstream ([Bug 1816526](https://bugzilla.mozilla.org/show_bug.cgi?id=1816526)) +- New lint: check that all referenced pings are known ([#584](https://github.com/mozilla/glean_parser/pull/584)) +- Add experimental server-side JavaScript outputter ([FXA-7922](https://mozilla-hub.atlassian.net/browse/FXA-7922)) + +## 7.2.1 + +- Unbreak last minor release ([#579](https://github.com/mozilla/glean_parser/pull/579)) + +## 7.2.0 + +- Remove yamllint integration ([#578](https://github.com/mozilla/glean_parser/pull/578)) + +## 7.1.0 + +- ENHANCEMENT: Labels in `labels:` fields may now contain any printable ASCII characters ([bug 1672273](https://bugzilla.mozilla.org/show_bug.cgi?id=1672273)) +- BUGFIX: Enforce ordering of generation of Pings, Metrics and Tags such that order is deterministic ([bug 1820334](https://bugzilla.mozilla.org/show_bug.cgi?id=1820334)) + +## 7.0.0 + +- BUGFIX: Remove internal-only fields from serialized metrics data ([#550](https://github.com/mozilla/glean_parser/pull/550)) +- FEATURE: New subcommand: `dump` to dump the metrics data as JSON ([#550](https://github.com/mozilla/glean_parser/pull/550)) +- BUGFIX: Kotlin: Generate enums with the right generic bound for ping reason codes ([#551](https://github.com/mozilla/glean_parser/pull/551)). +- **BREAKING CHANGE:** Fully remove support for the old events API ([#549](https://github.com/mozilla/glean_parser/pull/549)) + Adds a new lint `OLD_EVENT_API` to warn about missing `type` attributes on event extra keys. + Note that the Glean SDK already dropped support for the old events API. + +## 6.4.0 + +- BUGFIX: Correct code generation for labeled metrics in Rust ([#533](https://github.com/mozilla/glean_parser/pull/533)) +- BUGFIX: Correctly serialize `Rates` for Rust code ([#530](https://github.com/mozilla/glean_parser/pull/530)) +- Feature: Wrap labeled metric's static labels list as CoW strings (requires updated Glean support) ([#534](https://github.com/mozilla/glean_parser/pull/534)) + +## 6.3.0 + +- events: Increase extras limit to 15 ([bug 1798713](https://bugzilla.mozilla.org/show_bug.cgi?id=1798713)) + +## 6.2.1 + +- Add support for Rate, Denominator and Numerator metrics for JavaScript. ([bug 1793777](https://bugzilla.mozilla.org/show_bug.cgi?id=1793777)) + +## 6.2.0 + +- [data-review] Use a template to generate the Data Review Request template ([bug 1772605](https://bugzilla.mozilla.org/show_bug.cgi?id=1772605)) +- Make tag and no\_lint order deterministic ([#518](https://github.com/mozilla/glean_parser/pull/518)) + +## 6.1.2 + +- Swift: Add a conditional `import Foundation` to support generating metrics when Glean is delivered via the AppServices iOS megazord + +## 6.1.1 + +- Rust: Use correct name for a ping in generated code. + +## 6.1.0 + +- [data-review] Include extra keys' names and descriptions in data review template ([bug 1767027](https://bugzilla.mozilla.org/show_bug.cgi?id=1767027)) +- Raise limit on number of statically-defined labels to 4096. ([bug 1772163](https://bugzilla.mozilla.org/show_bug.cgi?id=1772163)) +- Fix Rust code generation for new UniFFI interface ([#491](https://github.com/mozilla/glean_parser/pull/491), [#494](https://github.com/mozilla/glean_parser/pull/494), [#495](https://github.com/mozilla/glean_parser/pull/495)) + +## 6.0.1 + +- Relax version requirement for MarkupSafe. + Now works with MarkupSafe v1.1.1 to v2.0.1 inclusive again. + +## 6.0.0 + +- BUGFIX: Add missing `extra_args` to Rust constructor generation ([bug 1765855](https://bugzilla.mozilla.org/show_bug.cgi?id=1765855)) +- **Breaking change:** `glean_parser` now generates metrics compatible with the UniFFI-powered Glean SDK. + This is not backwards-compatible with previous versions. +- Generate Rate, Denominator and Numerator metrics for Kotlin and Swift +- Explicitly skip Rate, Denominator and Numerator metrics for JavaScript. + These will cause a build failure by default, but can be turned into warnings on request. + Use `-s fail_rates=false` to enable warning-only mode. + +## 5.1.2 + +- BUGFIX: Revert changes made on v5.1.1. + - The issues addressed by those changes, were non-issues and result of misuse of the APIs. + +## 5.1.1 + +- BUGFIX: Fix issues with Swift templates ([bug 1749494](https://bugzilla.mozilla.org/show_bug.cgi?id=1749494)) + - Make metrics and pings all `public` + - Make pings `static` + +## 5.1.0 + +- Add support for build info generation for JavaScript and Typescript targets ([bug 1749494](https://bugzilla.mozilla.org/show_bug.cgi?id=1749494)) + +## 5.0.1 + +- Fix the logic for the metric expiration by version ([bug 1753194](https://bugzilla.mozilla.org/show_bug.cgi?id=1753194)) + +## 5.0.0 + +- Remove C# support ([#436](https://github.com/mozilla/glean_parser/pull/436)). +- Add support for Rust code generation ([bug 1677434](https://bugzilla.mozilla.org/show_bug.cgi?id=1677434)) +- Report an error if no files are passed ([bug 1751730](https://bugzilla.mozilla.org/show_bug.cgi?id=1751730)) +- [data-review] Report an error if no metrics match provided bug number ([bug 1752576](https://bugzilla.mozilla.org/show_bug.cgi?id=1752576)) +- [data-review] Include notification_emails in list of those responsible ([bug 1752576](https://bugzilla.mozilla.org/show_bug.cgi?id=1752576)) +- Add support for expiring metrics by the provided major version ([bug 1753194](https://bugzilla.mozilla.org/show_bug.cgi?id=1753194)) + +## 4.4.0 + +- Support global file-level tags in metrics.yaml ([bug 1745283](https://bugzilla.mozilla.org/show_bug.cgi?id=1745283)) +- Glinter: Reject metric files if they use `unit` by mistake. It should be `time_unit` ([#432](https://github.com/mozilla/glean_parser/pull/432)). +- Automatically generate a build date when generating build info ([#431](https://github.com/mozilla/glean_parser/pull/431)). + Enabled for Kotlin and Swift. + This can be changed with the `build_date` command line option. + `build_date=0` will use a static unix epoch time. + `build_date=2022-01-03T17:30:00` will parse the ISO8601 string to use (as a UTC timestamp). + Other values will throw an error. + + Example: + + glean_parser translate --format kotlin --option build_date=2021-11-01T01:00:00 path/to/metrics.yaml + +## 4.3.1 + +- BUGFIX: Skip tags for code generation ([#409](https://github.com/mozilla/glean_parser/pull/409)) + +## 4.3.0 + +- Support tags in glean parser ([bug 1734011](https://bugzilla.mozilla.org/show_bug.cgi?id=1734011)) + +## 4.2.0 + +- Improve the schema validation error messages. They will no longer include `OrderedDict(...)` on Python 3.7 and later ([bug 1733395](https://bugzilla.mozilla.org/show_bug.cgi?id=1733395)) +- Officially support Python 3.10 + +## 4.1.1 (2021-09-28) + +- Update private import paths on Javascript / Typescript templates. ([bug 1702468](https://bugzilla.mozilla.org/show_bug.cgi?id=1702468)) + +## 4.1.0 (2021-09-16) + +- Add support for Node.js platform on Javascript / Typescript templates. ([bug 1728982](https://bugzilla.mozilla.org/show_bug.cgi?id=1728982)) + +## 4.0.0 (2021-08-20) + +- Add support for Text metric type ([#374](https://github.com/mozilla/glean_parser/pull/374)) +- Reserve the `default` ping name. It can't be used as a ping name, but it can be used in `send_in_pings` ([#376](https://github.com/mozilla/glean_parser/pull/376)) + +## 3.8.0 (2021-08-18) + +- Expose ping reasons enum on JavaScript / TypeScript templates. ([bug 1719136](https://bugzilla.mozilla.org/show_bug.cgi?id=1719136)) +- Define an interface with the allowed extras for each event on the TypeScript template. ([bug 1693487](https://bugzilla.mozilla.org/show_bug.cgi?id=1693487)) + +## 3.7.0 (2021-07-13) + +- New lint: Check for redundant words in ping names ([#355](https://github.com/mozilla/glean_parser/pull/355)) +- Add support for URL metric type ([#361](https://github.com/mozilla/glean_parser/pull/361)) + +## 3.6.0 (2021-06-11) + +- Add a command `data-review` to generate a skeleton Data Review Request for all metrics matching a supplied bug number. ([bug 1704541](https://bugzilla.mozilla.org/show_bug.cgi?id=1704541)) +- Enable custom distribution outside of GeckoView (`gecko_datapoint` becomes optional) + +## 3.5.0 (2021-06-03) + +- Transform generated folder into QML Module when building Javascript templates for the Qt platform. ([bug 1707896](https://bugzilla.mozilla.org/show_bug.cgi?id=1707896) + - Import the Glean QML module from inside each generated file, removing the requirement to import Glean before importing any of the generated files; + - Prodive a `qmldir` file exposing all generated files; + - Drop the `namespace` option for Javascript templates; + - Add a new `version` option for Javascript templates, required when building for Qt, which expected the Glean QML module version. + +## 3.4.0 (2021-05-28) + +- Add missing import for Kotlin code ([#339](https://github.com/mozilla/glean_parser/pull/339)) +- Use a plain Kotlin type in the generated interface implementation ([#339](https://github.com/mozilla/glean_parser/pull/339)) +- Generate additional generics for event metrics ([#339](https://github.com/mozilla/glean_parser/pull/339)) +- For Kotlin skip generating `GleanBuildInfo.kt` when requested (with `with_buildinfo=false`) ([#341](https://github.com/mozilla/glean_parser/pull/341)) + +## 3.3.2 (2021-05-18) + +- Fix another bug in the Swift code generation when generating extra keys ([#334](https://github.com/mozilla/glean_parser/pull/334)) + +## 3.3.1 (2021-05-18) + +- Fix Swift code generation bug for pings ([#333](https://github.com/mozilla/glean_parser/pull/333)) + +## 3.3.0 (2021-05-18) + +- Generate new event API construct ([#321](https://github.com/mozilla/glean_parser/pull/321)) + +## 3.2.0 (2021-04-28) + +- Add option to add extra introductory text to generated markdown ([#298](https://github.com/mozilla/glean_parser/pull/298)) +- Add support for Qt in Javascript templates ([bug 1706252](https://bugzilla.mozilla.org/show_bug.cgi?id=1706252)) + - Javascript templates will now accept the `platform` option. If this option is set to `qt` + the generated templates will be Qt compatible. Default value is `webext`. + +## 3.1.2 (2021-04-21) + +- BUGFIX: Remove the "DO NOT COMMIT" notice from the documentation. + +## 3.1.1 (2021-04-19) + +- Recommend to not commit as well as to not edit the generated files. ([bug 1706042](https://bugzilla.mozilla.org/show_bug.cgi?id=1706042)) +- BUGFIX: Include import statement for labeled metric subtypes in Javascript and Typescript templates. + +## 3.1.0 (2021-04-16) + +- Add support for labeled metric types in Javascript and Typescript templates. + +## 3.0.0 (2021-04-13) + +- Raise limit on number of statically-defined lables to 100. ([bug 1702263](https://bugzilla.mozilla.org/show_bug.cgi?id=1702263)) +- BUGFIX: Version 2.0.0 of the schema now allows the "special" `glean_.*` ping names for Glean-internal use again. +- Remove support for JWE metric types. + +## 2.5.0 (2021-02-23) + +- Add parser and object model support for `rate` metric type. ([bug 1645166](https://bugzilla.mozilla.org/show_bug.cgi?id=1645166)) +- Add parser and object model support for telemetry_mirror property. ([bug 1685406](https://bugzilla.mozilla.org/show_bug.cgi?id=1685406)) +- Update the Javascript template to match Glean.js expectations. ([bug 1693516](https://bugzilla.mozilla.org/show_bug.cgi?id=1693516)) + - Glean.js has updated it's export strategy. It will now export each metric type as an independent module; + - Glean.js has dropped support for non ES6 modules. +- Add support for generating Typescript code. ([bug 1692157](https://bugzilla.mozilla.org/show_bug.cgi?id=1692157)) + - The templates added generate metrics and pings code for Glean.js. + +## 2.4.0 (2021-02-18) + +- **Experimental:** `glean_parser` has a new subcommand `coverage` to convert raw coverage reports + into something consumable by coverage tools, such as codecov.io +- The path to the file that each metric is defined in is now stored on the + `Metric` object in `defined_in["filepath"]`. + +## 2.3.0 (2021-02-17) + +- Leverage the `glean_namespace` to provide correct import when building for Javascript. + +## 2.2.0 (2021-02-11) + +- The Kotlin generator now generates static build information that can be passed + into `Glean.initialize` to avoid calling the package manager at runtime. + +## 2.1.0 (2021-02-10) + +- Add support for generating Javascript code. + - The templates added generate metrics and pings code for Glean.js. + +## 2.0.0 (2021-02-05) + +- New versions 2.0.0 of the `metrics.yaml` and `pings.yaml` schemas now ship + with `glean_parser`. These schemas are different from version 1.0.0 in the + following ways: + + - Bugs must be specified as URLs. Bug numbers are disallowed. + - The legacy ping names containing underscores are no longer allowed. These + included `deletion_request`, `bookmarks_sync`, `history_sync`, + `session_end`, `all_pings`, `glean_*`). In these cases, the `_` should be + replaced with `-`. + + To upgrade your app or library to use the new schema, replace the version in + the `$schema` value with `2-0-0`. + +- **Breaking change:** It is now an error to use bug numbers (rather than URLs) + in ping definitions. + +- Add the line number that metrics and pings were originally defined in the yaml + files. + +## 1.29.1 (2020-12-17) + +- BUGFIX: Linter output can now be redirected correctly (1675771). + +## 1.29.0 (2020-10-07) + +- **Breaking change:** `glean_parser` will now return an error code when any of + the input files do not exist (unless the `--allow-missing-files` flag is + passed). +- Generated code now includes a comment next to each metric containing the name + of the metric in its original `snake_case` form. +- When metrics don't provide a `unit` parameter, it is not included in the + output (as provided by probe-scraper). + +## 1.28.6 (2020-09-24) + +- BUGFIX: Ensure Kotlin arguments are deterministically ordered + +## 1.28.5 (2020-09-14) + +- Fix deploy step to update pip before deploying to pypi. + +## 1.28.4 (2020-09-14) + +- The `SUPERFLUOUS_NO_LINT` warning has been removed from the glinter. + It likely did more harm than good, and makes it hard to make + `metrics.yaml` files that pass across different versions of + `glean_parser`. +- Expired metrics will now produce a linter warning, `EXPIRED_METRIC`. +- Expiry dates that are more than 730 days (\~2 years) in the future + will produce a linter warning, `EXPIRATION_DATE_TOO_FAR`. +- Allow using the Quantity metric type outside of Gecko. +- New parser configs `custom_is_expired` and `custom_validate_expires` + added. These are both functions that take the `expires` value of the + metric and return a bool. (See `Metric.is_expired` and + `Metric.validate_expires`). These will allow FOG to provide custom + validation for its version-based `expires` values. + +## 1.28.3 (2020-07-28) + +- BUGFIX: Support HashSet and Dictionary in the C\## generated code. + +## 1.28.2 (2020-07-28) + +- BUGFIX: Generate valid C\## code when using Labeled metric types. + +## 1.28.1 (2020-07-24) + +- BUGFIX: Add missing column to correctly render markdown tables in generated + documentation. + +## 1.28.0 (2020-07-23) + +- **Breaking change:** The internal ping `deletion-request` was misnamed in + pings.py causing the linter to not allow use of the correctly named ping for + adding legacy ids to. Consuming apps will need to update their metrics.yaml if + they are using `deletion_request` in any `send_in_pings` to `deletion-request` + after updating. + +## 1.27.0 (2020-07-21) + +- Rename the `data_category` field to `data_sensitivity` to be clearer. + +## 1.26.0 (2020-07-21) + +- Add support for JWE metric types. +- Add a `data_sensitivity` field to all metrics for specifying the type of data + collected in the field. + +## 1.25.0 (2020-07-17) + +- Add support for generating C\## code. +- BUGFIX: The memory unit is now correctly passed to the MemoryDistribution + metric type in Swift. + +## 1.24.0 (2020-06-30) + +- BUGFIX: look for metrics in send\_if\_empty pings. Metrics for these kinds of + pings were being ignored. + +## 1.23.0 (2020-06-27) + +- Support for Python 3.5 has been dropped. +- BUGFIX: The ordering of event extra keys will now match with their enum, + fixing a serious bug where keys of extras may not match the correct values in + the data payload. See . + +## 1.22.0 (2020-05-28) + +- **Breaking change:** (Swift only) Combine all metrics and pings into a single + generated file `Metrics.swift`. + +## 1.21.0 (2020-05-25) + +- `glinter` messages have been improved with more details and to be more + actionable. +- A maximum of 10 `extra_keys` is now enforced for `event` metric types. +- BUGFIX: the `Lifetime` enum values now match the values of the implementation + in mozilla/glean. + +## 1.20.4 (2020-05-07) + +- BUGFIX: yamllint errors are now reported using the correct file name. + +## 1.20.3 (2020-05-06) + +- Support for using `timing_distribution`'s `time_unit` parameter to control + the range of acceptable values is documented. The default unit for this use + case is `nanosecond` to avoid creating a breaking change. See [bug + 1630997](https://bugzilla.mozilla.org/show_bug.cgi?id=1630997) for more + information. + +## 1.20.2 (2020-04-24) + +- Dependencies that depend on the version of Python being used are now specified + using the [Declaring platform specific dependencies syntax in + setuptools](https://setuptools.readthedocs.io/en/latest/setuptools.html##declaring-platform-specific-dependencies). + This means that more recent versions of dependencies are likely to be + installed on Python 3.6 and later, and unnecessary backport libraries won't + be installed on more recent Python versions. + +## 1.20.1 (2020-04-21) + +- The minimum version of the runtime dependencies has been lowered to increase + compatibility with other tools. These minimum versions are now tested in CI, + in addition to testing the latest versions of the dependencies that was + already happening in CI. + +## 1.20.0 (2020-04-15) + +- **Breaking change:** glinter errors found during the `translate` command will + now return an error code. glinter warnings will be displayed, but not return + an error code. +- `glean_parser` now produces a linter warning when `user` lifetime metrics are + set to expire. See [bug + 1604854](https://bugzilla.mozilla.org/show_bug.cgi?id=1604854) for additional + context. + +## 1.19.0 (2020-03-18) + +- **Breaking change:** The regular expression used to validate labels is + stricter and more correct. +- Add more information about pings to markdown documentation: + - State whether the ping includes client id; + - Add list of data review links; + - Add list of related bugs links. +- `glean_parser` now makes it easier to write external translation + functions for different language targets. +- BUGFIX: `glean_parser` now works on 32-bit Windows. + +## 1.18.3 (2020-02-24) + +- Dropped the `inflection` dependency. +- Constrained the `zipp` and `MarkupSafe` transitive dependencies to versions + that support Python 3.5. + +## 1.18.2 (2020-02-14) + +- BUGFIX: Fix rendering of first element of reason list. + +## 1.18.1 (2020-02-14) + +- BUGFIX: Reason codes are displayed in markdown output for built-in + pings as well. +- BUGFIX: Reason descriptions are indented correctly in markdown + output. +- BUGFIX: To avoid a compiler error, the `@JvmName` annotation isn't + added to private members. + +## 1.18.0 (2020-02-13) + +- **Breaking Change (Java API)** Have the metrics names in Java match the names + in Kotlin. See [Bug + 1588060](https://bugzilla.mozilla.org/show_bug.cgi?id=1588060). +- The reasons a ping are sent are now included in the generated markdown + documentation. + +## 1.17.3 (2020-02-05) + +- BUGFIX: The version of Jinja2 now specifies < 3.0, since that version no + longer supports Python 3.5. + +## 1.17.2 (2020-02-05) + +- BUGFIX: Fixes an import error in generated Kotlin code. + +## 1.17.1 (2020-02-05) + +- BUGFIX: Generated Swift code now includes `import Glean`, unless generating + for a Glean-internal build. + +## 1.17.0 (2020-02-03) + +- Remove default schema URL from `validate_ping` +- Make `schema` argument required for CLI +- BUGFIX: Avoid default import in Swift code for Glean itself +- BUGFIX: Restore order of fields in generated Swift code + +## 1.16.0 (2020-01-15) + +- Support for `reason` codes on pings was added. + +## 1.15.6 (2020-02-06) + +- BUGFIX: The version of Jinja2 now specifies < 3.0, since that version no + longer supports Python 3.5 (backported from 1.17.3). + +## 1.15.5 (2019-12-19) + +- BUGFIX: Also allow the legacy name `all_pings` for `send_in_pings` parameter + on metrics + +## 1.15.4 (2019-12-19) + +- BUGFIX: Also allow the legacy name `all_pings` + +## 1.15.3 (2019-12-13) + +- Add project title to markdown template. +- Remove "Sorry about that" from markdown template. +- BUGFIX: Replace dashes in variable names to force proper naming + +## 1.15.2 (2019-12-12) + +- BUGFIX: Use a pure Python library for iso8601 so there is no compilation + required. + +## 1.15.1 (2019-12-12) + +- BUGFIX: Add some additional ping names to the non-kebab-case allow list. + +## 1.15.0 (2019-12-12) + +- Restrict new pings names to be kebab-case and change `all_pings` to + `all-pings` + +## 1.14.0 (2019-12-06) + +- `glean_parser` now supports Python versions 3.5, 3.6, 3.7 and 3.8. + +## 1.13.0 (2019-12-04) + +- The `translate` command will no longer clear extra files in the output + directory. +- BUGFIX: Ensure all newlines in comments are prefixed with comment markers +- BUGFIX: Escape Swift keywords in variable names in generated code +- Generate documentation for pings that are sent if empty + +## 1.12.0 (2019-11-27) + +- Reserve the `deletion_request` ping name +- Added a new flag `send_if_empty` for pings + +## 1.11.0 (2019-11-13) + +- The `glinter` command now performs `yamllint` validation on registry files. + +## 1.10.0 (2019-11-11) + +- The Kotlin linter `detekt` is now run during CI, and for local + testing if installed. +- Python 3.8 is now tested in CI (in addition to Python 3.7). Using + `tox` for this doesn't work in modern versions of CircleCI, so the + `tox` configuration has been removed. +- `yamllint` has been added to test the YAML files on CI. +- ⚠ Metric types that don't yet have implementations in glean-core + have been removed. This includes `enumeration`, `rate`, `usage`, and + `use_counter`, as well as many labeled metrics that don't exist. + +## 1.9.5 (2019-10-22) + +- Allow a Swift lint for generated code +- New lint: Restrict what metric can go into the `baseline` ping +- New lint: Warn for slight misspellings in ping names +- BUGFIX: change Labeled types labels from lists to sets. + +## 1.9.4 (2019-10-16) + +- Use lists instead of sets in Labeled types labels to ensure that the order of + the labels passed to the `metrics.yaml` is kept. +- `glinter` will now check for duplicate labels and error if there are any. + +## 1.9.3 (2019-10-09) + +- Add labels from Labeled types to the Extra column in the Markdown template. + +## 1.9.2 (2019-10-08) + +- BUGFIX: Don't call `is_internal_metric` on `Ping` objects. + +## 1.9.1 (2019-10-07) + +- Don't include Glean internal metrics in the generated markdown. + +## 1.9.0 (2019-10-04) + +- Glinter now warns when bug numbers (rather than URLs) are used. +- BUGFIX: add `HistogramType` and `MemoryUnit` imports in Kotlin generated code. + +## 1.8.4 (2019-10-02) + +- Removed unsupported labeled metric types. + +## 1.8.3 (2019-10-02) + +- Fix indentation for generated Swift code + +## 1.8.2 (2019-10-01) + +- Created labeled metrics and events in Swift code and wrap it in a + configured namespace + +## 1.8.1 (2019-09-27) + +- BUGFIX: `memory_unit` is now passed to the Kotlin generator. + +## 1.8.0 (2019-09-26) + +- A new parser config, `do_not_disable_expired`, was added to turn off the + feature that expired metrics are automatically disabled. This is useful if you + want to retain the disabled value that is explicitly in the `metrics.yaml` + file. +- `glinter` will now report about superfluous `no_lint` entries. + +## 1.7.0 (2019-09-24) + +- A `glinter` tool is now included to find common mistakes in metric naming + and setup. This check is run during `translate` and warnings will be + displayed. ⚠ These warnings will be treated as errors in a future revision. + +## 1.6.1 (2019-09-17) + +- BUGFIX: `GleanGeckoMetricsMapping` must include `LabeledMetricType` + and `CounterMetricType`. + +## 1.6.0 (2019-09-17) + +- NEW: Support for outputting metrics in Swift. +- BUGFIX: Provides a helpful error message when `geckoview_datapoint` is used on + an metric type that doesn't support GeckoView exfiltration. +- Generate a lookup table for Gecko categorical histograms in + `GleanGeckoMetricsMapping`. +- Introduce a 'Swift' output generator. + +## 1.4.1 (2019-08-28) + +- Documentation only. + +## 1.4.0 (2019-08-27) + +- Added support for generating markdown documentation from `metrics.yaml` files. + +## 1.3.0 (2019-08-22) + +- `quantity` metric type has been added. + +## 1.2.1 (2019-08-13) + +- BUGFIX: `includeClientId` was not being output for PingType. + +## 1.2.0 (2019-08-13) + +- `memory_distribution` metric type has been added. +- `custom_distribution` metric type has been added. +- `labeled_timespan` is no longer an allowed metric type. + +## 1.1.0 (2019-08-05) + +- Add a special `all_pings` value to `send_in_pings`. + +## 1.0.0 (2019-07-29) + +- First release to start following strict semver. + +## 0.1.0 (2018-10-15) + +- First release on PyPI. diff --git a/third_party/python/glean_parser/glean_parser-14.0.1.dist-info/RECORD b/third_party/python/glean_parser/glean_parser-14.0.1.dist-info/RECORD new file mode 100644 index 0000000000..700ca80797 --- /dev/null +++ b/third_party/python/glean_parser/glean_parser-14.0.1.dist-info/RECORD @@ -0,0 +1,48 @@ +glean_parser/__init__.py,sha256=bJljD052_0y-efcBhYpllICVCXOMHLcXRLNyrvfgt5A,533 +glean_parser/__main__.py,sha256=Rw0PpuQtAvdHJMK1YLozeZkc6x1yjeNZwidu4faovdk,8633 +glean_parser/coverage.py,sha256=2IwC4XMDtDamMkBFoYilmqJzW4gyypq65YVCur8SNas,4405 +glean_parser/data_review.py,sha256=BweeeTkNNS6HrIDkztawhbDByrk_-Avxpg7YeST3VAs,2152 +glean_parser/go_server.py,sha256=s6lxK9IAFY55pNl3Rv4MHlV-nQwSoyhO9ppTQE9VCik,5346 +glean_parser/javascript.py,sha256=w4ZhNBHBKWYk0h3t7G0Ud2tR__hRqzn9dlEXNKLdQrA,11230 +glean_parser/javascript_server.py,sha256=PZSTl63TR3cY8Y99jXMOLu-8rzgQarymzjnHJm9aYK0,8389 +glean_parser/kotlin.py,sha256=5nXnen4s2YOj503Z77HVTUgDHWdulB8BMl8vOie38o4,13365 +glean_parser/lint.py,sha256=STqdgyOhR4Q3fHivSizgn9bOOyqrNHhzjaqyJxz6qzI,19948 +glean_parser/markdown.py,sha256=GkCr1CrV6mnRQseT6FO1-JJ7Eup8X3lxUfRMBTxXpe4,9066 +glean_parser/metrics.py,sha256=YAO8wPuRHTLkdT9M4zh9ZwoFI1_VS8O9oQqwZNYyDp0,14612 +glean_parser/parser.py,sha256=3-uF-Hi5LlvdFc1NxZOKX0EoEyekZGnZV094eTIJut0,16361 +glean_parser/pings.py,sha256=-CIiMBVOTFULmNybV8YTFI7vmfOYOGQ5TD9hEfYPUII,3435 +glean_parser/python_server.py,sha256=ERpYcbSwF19xKFagxX0mZAvlR1y6D7Ah5DSvW8LipCY,4791 +glean_parser/ruby_server.py,sha256=e5lkfcLQAUMUBQDCjqNU82LkdUzT5x-G6HOnsUInbsU,5190 +glean_parser/rust.py,sha256=UEHeIZlToxCBelfec5sl_l_uLZfk8f_OUXqa_ZoEvnk,7330 +glean_parser/swift.py,sha256=paUzF6tItdktFwIQYCKsYpqXfn8zxR2coU_jMYrmwlc,8957 +glean_parser/tags.py,sha256=bemKYvcbMO4JrghiNSe-A4BNNDtx_FlUPkgrPPJy84Y,1391 +glean_parser/translate.py,sha256=luKQoraARZ2tjenHs0SVtCxflnYaMkzPYFfKEdKdSqQ,8403 +glean_parser/translation_options.py,sha256=Lxzr6G7MP0tC_ZYlZXftS4j0SLiqO-5mGVTEc7ggXis,2037 +glean_parser/util.py,sha256=wftmoWBUQM_o7pUwdhBp3HuDCVHIBw1PXtrfxwPLD0Q,16187 +glean_parser/validate_ping.py,sha256=0TNvILH6dtzJDys3W8Kqorw6kk03me73OCUDtpoHcXU,2118 +glean_parser/schemas/metrics.1-0-0.schema.yaml,sha256=cND3cvi6iBfPUVmtfIBQfGJV9AALpbvN7nu8E33_J-o,19566 +glean_parser/schemas/metrics.2-0-0.schema.yaml,sha256=wx1q0L4C0-Vcwk1SPU6t8OfjDEQvgrwwEG6xfSHO1MI,26365 +glean_parser/schemas/pings.1-0-0.schema.yaml,sha256=hwCnsKpEysmrmVp-QHGBArEkVY3vaU1rVsxlTwhAzws,4315 +glean_parser/schemas/pings.2-0-0.schema.yaml,sha256=f8PClAlMoLTmX6ANq8Ai0CpiE74i3LOgU5SoTJpoh0M,6149 +glean_parser/schemas/tags.1-0-0.schema.yaml,sha256=OGXIJlvvVW1vaqB_NVZnwKeZ-sLlfH57vjBSHbj6DNI,1231 +glean_parser/templates/data_review.jinja2,sha256=jeYU29T1zLSyu9fKBBFu5BFPfIw8_hmOUXw8RXhRXK8,3287 +glean_parser/templates/go_server.jinja2,sha256=Jy1e0uQqr_WZNoj-AWnygRmygX2jyj_GQMMV8mSah2k,6825 +glean_parser/templates/javascript.buildinfo.jinja2,sha256=4mXiZCQIk9if4lxlA05kpSIL4a95IdwGwqle2OqqNAs,474 +glean_parser/templates/javascript.jinja2,sha256=cT_bG-jC6m4afECXmcsqHwiiHjRuVtJnfv90OD2Mwxw,2669 +glean_parser/templates/javascript_server.jinja2,sha256=k-XI3QIhHQ1vbIPqSMTmCu93b1oZhm7KLmx9LfO3IJ0,9472 +glean_parser/templates/kotlin.buildinfo.jinja2,sha256=X0lk2SNu5OIIj2i6mUyF9CWFQIonLgfqkgT5fA-5G6c,920 +glean_parser/templates/kotlin.geckoview.jinja2,sha256=MJOgtoDXmBjE9pwk-G6T89y36RZuMbDWM_-DBN_gFJo,5099 +glean_parser/templates/kotlin.jinja2,sha256=npMgDdWD9OItOZQ-dyLQZn_IKgnzee2EdJynhUa1ig8,7690 +glean_parser/templates/markdown.jinja2,sha256=vAHHGGm28HRDPd3zO_wQMAUZIuxE9uQ7hl3NpXxcKV4,3425 +glean_parser/templates/python_server.jinja2,sha256=gu2C1rkn760IqBCG2SWaK7o32T1ify94wDEsudLPUg8,7260 +glean_parser/templates/qmldir.jinja2,sha256=m6IGsp-tgTiOfQ7VN8XW6GqX0gJqJkt3B6Pkaul6FVo,156 +glean_parser/templates/ruby_server.jinja2,sha256=vm4BEenOqzomQNTLFfMOzlWHARnsWUjTBbnR-v2cadI,6247 +glean_parser/templates/rust.jinja2,sha256=Ir_JqWRIUs1KLoYNDolgTRjWfWdzzBfouCP-YeTJa-c,5495 +glean_parser/templates/swift.jinja2,sha256=4f993l_zZk_Tz1efiz3nbvDK1H3Uq3dWQ2T6glT9XQ4,6695 +glean_parser-14.0.1.dist-info/AUTHORS.md,sha256=yxgj8MioO4wUnrh0gmfb8l3DJJrf-l4HmmEDbQsbbNI,455 +glean_parser-14.0.1.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725 +glean_parser-14.0.1.dist-info/METADATA,sha256=Ghvw-Y7woQUJ38P8TYT5TFt8sL61GJoZPBajaB0WLeQ,32276 +glean_parser-14.0.1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92 +glean_parser-14.0.1.dist-info/entry_points.txt,sha256=mf9d3sv8BwSjjR58x9KDnpVkONCnv3fPQC2NjJl15Xg,68 +glean_parser-14.0.1.dist-info/top_level.txt,sha256=q7T3duD-9tYZFyDry6Wv2LcdMsK2jGnzdDFhxWcT2Z8,13 +glean_parser-14.0.1.dist-info/RECORD,, diff --git a/third_party/python/glean_parser/glean_parser-14.0.1.dist-info/WHEEL b/third_party/python/glean_parser/glean_parser-14.0.1.dist-info/WHEEL new file mode 100644 index 0000000000..bab98d6758 --- /dev/null +++ b/third_party/python/glean_parser/glean_parser-14.0.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.43.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/third_party/python/glean_parser/glean_parser-14.0.1.dist-info/entry_points.txt b/third_party/python/glean_parser/glean_parser-14.0.1.dist-info/entry_points.txt new file mode 100644 index 0000000000..08fde9d655 --- /dev/null +++ b/third_party/python/glean_parser/glean_parser-14.0.1.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +glean_parser = glean_parser.__main__:main_wrapper diff --git a/third_party/python/glean_parser/glean_parser-14.0.1.dist-info/top_level.txt b/third_party/python/glean_parser/glean_parser-14.0.1.dist-info/top_level.txt new file mode 100644 index 0000000000..a7f3a37918 --- /dev/null +++ b/third_party/python/glean_parser/glean_parser-14.0.1.dist-info/top_level.txt @@ -0,0 +1 @@ +glean_parser diff --git a/third_party/python/glean_parser/glean_parser/javascript_server.py b/third_party/python/glean_parser/glean_parser/javascript_server.py index f5099d2660..060575f38c 100644 --- a/third_party/python/glean_parser/glean_parser/javascript_server.py +++ b/third_party/python/glean_parser/glean_parser/javascript_server.py @@ -42,9 +42,12 @@ from . import util SUPPORTED_METRIC_TYPES = ["string", "event"] -def event_class_name(ping_name: str, event_metric_exists: bool) -> str: +def event_class_name( + ping_name: str, metrics_by_type: Dict[str, List[metrics.Metric]] +) -> str: # For compatibility with FxA codebase we don't want to add "Logger" suffix # when custom pings without event metrics are used. + event_metric_exists = "event" in metrics_by_type suffix = "Logger" if event_metric_exists else "" return util.Camelize(ping_name) + "ServerEvent" + suffix @@ -61,10 +64,13 @@ def generate_js_metric_type(metric: metrics.Metric) -> str: return metric.type -def generate_ping_factory_method(ping: str, event_metric_exists: bool) -> str: +def generate_ping_factory_method( + ping: str, metrics_by_type: Dict[str, List[metrics.Metric]] +) -> str: # `ServerEventLogger` better describes role of the class that this factory # method generates, but for compatibility with existing FxA codebase # we use `Event` suffix if no event metrics are defined. + event_metric_exists = "event" in metrics_by_type suffix = "ServerEventLogger" if event_metric_exists else "Event" return f"create{util.Camelize(ping)}{suffix}" @@ -136,6 +142,12 @@ def output( metrics_list = metrics_by_type.setdefault(metric.type, []) metrics_list.append(metric) + # Order pings_to_metrics for backwards compatibility with the existing FxA codebase. + # Put pings without `event` type metrics first. + ping_to_metrics = dict( + sorted(ping_to_metrics.items(), key=lambda item: "event" in item[1]) + ) + PING_METRIC_ERROR_MSG = ( " Server-side environment is simplified and this" + " parser doesn't generate individual metric files. Make sure to pass all" diff --git a/third_party/python/glean_parser/glean_parser/kotlin.py b/third_party/python/glean_parser/glean_parser/kotlin.py index 82cc63d237..6d9ea8dcf8 100644 --- a/third_party/python/glean_parser/glean_parser/kotlin.py +++ b/third_party/python/glean_parser/glean_parser/kotlin.py @@ -107,6 +107,11 @@ def type_name(obj: Union[metrics.Metric, pings.Ping]) -> str: return "{}<{}>".format(class_name(obj.type), generic) + generate_structure = getattr(obj, "_generate_structure", []) + if len(generate_structure): + generic = util.Camelize(obj.name) + "Object" + return "{}<{}>".format(class_name(obj.type), generic) + return class_name(obj.type) @@ -125,6 +130,21 @@ def extra_type_name(typ: str) -> str: return "UNSUPPORTED" +def structure_type_name(typ: str) -> str: + """ + Returns the corresponding Kotlin type for structure items. + """ + + if typ == "boolean": + return "Boolean" + elif typ == "string": + return "String" + elif typ == "number": + return "Int" + else: + return "UNSUPPORTED" + + def class_name(obj_type: str) -> str: """ Returns the Kotlin class name for a given metric or ping type. @@ -320,6 +340,7 @@ def output_kotlin( ("type_name", type_name), ("extra_type_name", extra_type_name), ("class_name", class_name), + ("structure_type_name", structure_type_name), ), ) @@ -333,6 +354,9 @@ def output_kotlin( has_labeled_metrics = any( getattr(metric, "labeled", False) for metric in category_val.values() ) + has_object_metrics = any( + isinstance(metric, metrics.Object) for metric in category_val.values() + ) with filepath.open("w", encoding="utf-8") as fd: fd.write( @@ -346,6 +370,7 @@ def output_kotlin( ping_args=util.ping_args, namespace=namespace, has_labeled_metrics=has_labeled_metrics, + has_object_metrics=has_object_metrics, glean_namespace=glean_namespace, ) ) diff --git a/third_party/python/glean_parser/glean_parser/parser.py b/third_party/python/glean_parser/glean_parser/parser.py index 5ca584ac1e..158676be73 100644 --- a/third_party/python/glean_parser/glean_parser/parser.py +++ b/third_party/python/glean_parser/glean_parser/parser.py @@ -11,7 +11,7 @@ Code for parsing metrics.yaml files. import functools from pathlib import Path import textwrap -from typing import Any, Dict, Generator, Iterable, Optional, Tuple, Union +from typing import Any, cast, Dict, Generator, Iterable, Optional, Set, Tuple, Union import jsonschema # type: ignore from jsonschema.exceptions import ValidationError # type: ignore @@ -267,6 +267,7 @@ def _instantiate_pings( """ global_no_lint = content.get("no_lint", []) assert isinstance(global_no_lint, list) + ping_schedule_reverse_map: Dict[str, Set[str]] = dict() for ping_key, ping_val in sorted(content.items()): if ping_key.startswith("$"): @@ -284,6 +285,22 @@ def _instantiate_pings( if not isinstance(ping_val, dict): raise TypeError(f"Invalid content for ping {ping_key}") ping_val["name"] = ping_key + + if "metadata" in ping_val and "ping_schedule" in ping_val["metadata"]: + if ping_key in ping_val["metadata"]["ping_schedule"]: + yield util.format_error( + filepath, + f"For ping '{ping_key}'", + "ping_schedule contains its own ping name", + ) + continue + for ping_schedule in ping_val["metadata"]["ping_schedule"]: + if ping_schedule not in ping_schedule_reverse_map: + ping_schedule_reverse_map[ping_schedule] = set() + ping_schedule_reverse_map[ping_schedule].add(ping_key) + + del ping_val["metadata"]["ping_schedule"] + try: ping_obj = Ping( defined_in=getattr(ping_val, "defined_in", None), @@ -313,6 +330,11 @@ def _instantiate_pings( all_objects.setdefault("pings", {})[ping_key] = ping_obj sources[ping_key] = filepath + for scheduler, scheduled in ping_schedule_reverse_map.items(): + if isinstance(all_objects["pings"][scheduler], Ping): + scheduler_obj: Ping = cast(Ping, all_objects["pings"][scheduler]) + scheduler_obj.schedules_pings = sorted(list(scheduled)) + def _instantiate_tags( all_objects: ObjectTree, diff --git a/third_party/python/glean_parser/glean_parser/pings.py b/third_party/python/glean_parser/glean_parser/pings.py index b4145ea68d..b3f2476c9a 100644 --- a/third_party/python/glean_parser/glean_parser/pings.py +++ b/third_party/python/glean_parser/glean_parser/pings.py @@ -31,6 +31,7 @@ class Ping: reasons: Optional[Dict[str, str]] = None, defined_in: Optional[Dict] = None, no_lint: Optional[List[str]] = None, + enabled: Optional[bool] = None, _validated: bool = False, ): # Avoid cyclical import @@ -46,6 +47,10 @@ class Ping: self.metadata = metadata self.precise_timestamps = self.metadata.get("precise_timestamps", True) self.include_info_sections = self.metadata.get("include_info_sections", True) + if enabled is None: + enabled = True + self.enabled = enabled + self.schedules_pings: List[str] = [] if data_reviews is None: data_reviews = [] self.data_reviews = data_reviews @@ -94,6 +99,7 @@ class Ping: modified_dict = util.remove_output_params( modified_dict, "include_info_sections" ) + modified_dict = util.remove_output_params(modified_dict, "schedules_pings") return modified_dict def identifier(self) -> str: diff --git a/third_party/python/glean_parser/glean_parser/schemas/pings.2-0-0.schema.yaml b/third_party/python/glean_parser/glean_parser/schemas/pings.2-0-0.schema.yaml index 6679a8066b..345812c805 100644 --- a/third_party/python/glean_parser/glean_parser/schemas/pings.2-0-0.schema.yaml +++ b/third_party/python/glean_parser/glean_parser/schemas/pings.2-0-0.schema.yaml @@ -96,6 +96,16 @@ additionalProperties: Interaction with `include_client_id`: `include_client_id` only takes effect when `metadata.include_info_sections` is `true`. type: boolean + ping_schedule: + title: Ping Schedule + description: | + An optional array of ping names. When one of the listed pings is + sent, then this ping will also be sent. A ping cannot list its own + name in `ping_schedule`. + type: array + items: + type: string + maxLength: 30 default: {} @@ -175,6 +185,18 @@ additionalProperties: additionalProperties: type: string + enabled: + title: Whether or not this ping is enabled + description: | + **Optional.** + + When `true`, the ping will be sent as usual. + When `false`, the ping will not be sent, but the data will continue to + be collected but will not be cleared when the ping is submitted. + + Defaults to `true` if omitted. + type: boolean + no_lint: title: Lint checks to skip description: | diff --git a/third_party/python/glean_parser/glean_parser/swift.py b/third_party/python/glean_parser/glean_parser/swift.py index c745c4d9ac..b121933b0f 100644 --- a/third_party/python/glean_parser/glean_parser/swift.py +++ b/third_party/python/glean_parser/glean_parser/swift.py @@ -106,12 +106,17 @@ def type_name(obj: Union[metrics.Metric, pings.Ping]) -> str: return "{}<{}>".format(class_name(obj.type), generic) + generate_structure = getattr(obj, "_generate_structure", []) + if len(generate_structure): + generic = util.Camelize(obj.name) + "Object" + return "{}<{}>".format(class_name(obj.type), generic) + return class_name(obj.type) def extra_type_name(typ: str) -> str: """ - Returns the corresponding Kotlin type for event's extra key types. + Returns the corresponding Swift type for event's extra key types. """ if typ == "boolean": @@ -124,6 +129,21 @@ def extra_type_name(typ: str) -> str: return "UNSUPPORTED" +def structure_type_name(typ: str) -> str: + """ + Returns the corresponding Swift type for structure items. + """ + + if typ == "boolean": + return "Bool" + elif typ == "string": + return "String" + elif typ == "number": + return "Int64" + else: + return "UNSUPPORTED" + + def class_name(obj_type: str) -> str: """ Returns the Swift class name for a given metric or ping type. @@ -215,6 +235,7 @@ def output_swift( ("class_name", class_name), ("variable_name", variable_name), ("extra_type_name", extra_type_name), + ("structure_type_name", structure_type_name), ), ) diff --git a/third_party/python/glean_parser/glean_parser/templates/javascript_server.jinja2 b/third_party/python/glean_parser/glean_parser/templates/javascript_server.jinja2 index 0a89f081f6..9df299fd2b 100644 --- a/third_party/python/glean_parser/glean_parser/templates/javascript_server.jinja2 +++ b/third_party/python/glean_parser/glean_parser/templates/javascript_server.jinja2 @@ -21,7 +21,7 @@ type LoggerOptions = { app: string; fmt?: 'heka' }; type Event = { category: string; name: string; - extra: Record; + extra?: Record; timestamp?: number; }; {% endif %} @@ -30,14 +30,14 @@ type Event = { let _logger{% if lang == "typescript" %}: Logger{% endif %}; {% for ping, metrics_by_type in pings.items() %} -class {{ ping|event_class_name(event_metric_exists) }} { +class {{ ping|event_class_name(metrics_by_type) }} { {% if lang == "typescript" %} _applicationId: string; _appDisplayVersion: string; _channel: string; {% endif %} /** - * Create {{ ping|event_class_name(event_metric_exists) }} instance. + * Create {{ ping|event_class_name(metrics_by_type) }} instance. * * @param {string} applicationId - The application ID. * @param {string} appDisplayVersion - The application display version. @@ -72,7 +72,7 @@ class {{ ping|event_class_name(event_metric_exists) }} { {% endif %} } } - {% if event_metric_exists %} + {% if 'event' in metrics_by_type %} #record({ {% else %} /** @@ -99,28 +99,28 @@ class {{ ping|event_class_name(event_metric_exists) }} { {% endfor %} {% endif %} {% endfor %} - {% if event_metric_exists %} + {% if 'event' in metrics_by_type %} event, {% endif %} {% if lang == "typescript" %} }: { - user_agent: string, - ip_address: string, + user_agent: string; + ip_address: string; {% for metric_type, metrics in metrics_by_type.items() %} {% if metric_type != 'event' %} {% for metric in metrics %} - {{ metric|metric_argument_name }}: {{ metric|js_metric_type }}, + {{ metric|metric_argument_name }}: {{ metric|js_metric_type }}; {% endfor %} {% endif %} {% endfor %} - {% if event_metric_exists %} - event: Event + {% if 'event' in metrics_by_type %} + event: Event; {% endif %} {% endif %} }) { const now = new Date(); const timestamp = now.toISOString(); - {% if event_metric_exists %} + {% if 'event' in metrics_by_type %} event.timestamp = now.getTime(); {% endif %} const eventPayload = { @@ -135,7 +135,7 @@ class {{ ping|event_class_name(event_metric_exists) }} { {% endif %} {% endfor %} }, - {% if event_metric_exists %} + {% if 'event' in metrics_by_type %} events: [event], {% endif %} ping_info: { @@ -171,7 +171,7 @@ class {{ ping|event_class_name(event_metric_exists) }} { // this is similar to how FxA currently logs with mozlog: https://github.com/mozilla/fxa/blob/4c5c702a7fcbf6f8c6b1f175e9172cdd21471eac/packages/fxa-auth-server/lib/log.js#L289 _logger.info(GLEAN_EVENT_MOZLOG_TYPE, ping); } - {% if event_metric_exists %} + {% if 'event' in metrics_by_type %} {% for event in metrics_by_type["event"] %} /** * Record and submit a {{ event.category }}_{{ event.name }} event: @@ -209,27 +209,27 @@ class {{ ping|event_class_name(event_metric_exists) }} { {% endfor %} {% if lang == "typescript" %} }: { - user_agent: string, - ip_address: string, + user_agent: string; + ip_address: string; {% for metric_type, metrics in metrics_by_type.items() %} {% if metric_type != 'event' %} {% for metric in metrics %} - {{ metric|metric_argument_name }}: {{ metric|js_metric_type }}, + {{ metric|metric_argument_name }}: {{ metric|js_metric_type }}; {% endfor %} {% endif %} {% endfor %} {% for extra, metadata in event.extra_keys.items() %} - {{ extra }}: {{metadata.type}}, + {{ extra }}: {{metadata.type}}; {% endfor %} {% endif %} }) { - let event = { - 'category': '{{ event.category }}', - 'name': '{{ event.name }}', + const event = { + category: '{{ event.category }}', + name: '{{ event.name }}', {% if event.extra_keys %} - 'extra': { + extra: { {% for extra, metadata in event.extra_keys.items() %} - '{{ extra }}': {{ extra }}, + {{ extra }}: {{ extra }}, {% endfor %} }, {% endif %} @@ -244,14 +244,14 @@ class {{ ping|event_class_name(event_metric_exists) }} { {% endfor %} {% endif %} {% endfor %} - event + event, }); } {% endfor %} {% endif %} } {% endfor %} -{% for ping in pings %} +{% for ping, metrics_by_type in pings.items() %} /** * Factory function that creates an instance of Glean Server Event Logger to @@ -262,11 +262,11 @@ class {{ ping|event_class_name(event_metric_exists) }} { * @param {Object} logger_options - The logger options. * @returns {EventsServerEventLogger} An instance of EventsServerEventLogger. */ -export const {{ ping|factory_method(event_metric_exists) }} = function ({ +export const {{ ping|factory_method(metrics_by_type) }} = function ({ applicationId, appDisplayVersion, channel, - logger_options + logger_options, {% if lang == "typescript" %} }: { applicationId: string; @@ -275,7 +275,7 @@ export const {{ ping|factory_method(event_metric_exists) }} = function ({ logger_options: LoggerOptions; {% endif %} }) { - return new {{ ping|event_class_name(event_metric_exists) }}( + return new {{ ping|event_class_name(metrics_by_type) }}( applicationId, appDisplayVersion, channel, diff --git a/third_party/python/glean_parser/glean_parser/templates/kotlin.jinja2 b/third_party/python/glean_parser/glean_parser/templates/kotlin.jinja2 index bd800af01d..71ba386a4c 100644 --- a/third_party/python/glean_parser/glean_parser/templates/kotlin.jinja2 +++ b/third_party/python/glean_parser/glean_parser/templates/kotlin.jinja2 @@ -66,6 +66,61 @@ data class {{ obj.name|Camelize }}{{ suffix }}( } {%- endmacro -%} +{%- macro generate_structure(name, struct) %} +{%- if struct.type == "array" -%} + @Serializable + data class {{ name }}(var items: MutableList<{{ name }}Item> = mutableListOf()) : ObjectSerialize { + fun add(elem: {{ name }}Item) = items.add(elem) + + fun addAll(elements: Collection<{{ name }}Item>) = items.addAll(elements) + + fun clear() = items.clear() + + fun remove(element: {{ name }}Item) = items.remove(element) + fun removeAll(elements: Collection<{{ name }}Item>) = items.removeAll(elements) + fun removeAt(index: Int) = items.removeAt(index) + + fun set(index: Int, element: {{ name }}Item) = items.set(index, element) + + override fun intoSerializedObject(): String { + return Json.encodeToString(items) + } + } + + {{ generate_structure(name ~ "Item", struct["items"]) }} + +{%- elif struct.type == "object" -%} + @Serializable + data class {{ name }}( + {% for itemname, val in struct.properties.items() %} + {% if val.type == "object" %} + var {{itemname|camelize}}: {{ name ~ "Item" ~ itemname|Camelize ~ "Object" }}? = null, + {% elif val.type == "array" %} + var {{itemname|camelize}}: {{ name ~ "Item" ~ itemname|Camelize }} = {{ name ~ "Item" ~ itemname|Camelize }}(), + {% else %} + var {{itemname|camelize}}: {{val.type|structure_type_name}}? = null, + {% endif %} + {% endfor %} + ): ObjectSerialize { + override fun intoSerializedObject(): String { + return Json.encodeToString(this) + } + } + + {% for itemname, val in struct.properties.items() %} + {% if val.type == "array" %} + {% set nested_name = name ~ "Item" ~ itemname|Camelize %} + {{ generate_structure(nested_name, val) }} + {% elif val.type == "object" %} + {% set nested_name = name ~ "Item" ~ itemname|Camelize ~ "Object" %} + {{ generate_structure(nested_name, val) }} + {% endif %} + {% endfor %} + +{% endif %} + +{% endmacro %} + /* ktlint-disable no-blank-line-before-rbrace */ @file:Suppress("PackageNaming", "MaxLineLength") package {{ namespace }} @@ -76,8 +131,9 @@ import {{ glean_namespace }}.private.HistogramType // ktlint-disable import-orde import {{ glean_namespace }}.private.Lifetime // ktlint-disable import-ordering no-unused-imports import {{ glean_namespace }}.private.MemoryUnit // ktlint-disable import-ordering no-unused-imports import {{ glean_namespace }}.private.NoExtras // ktlint-disable import-ordering no-unused-imports -import {{ glean_namespace }}.private.ReasonCode // ktlint-disable import-ordering no-unused-imports import {{ glean_namespace }}.private.NoReasonCodes // ktlint-disable import-ordering no-unused-imports +import {{ glean_namespace }}.private.ObjectSerialize // ktlint-disable import-ordering no-unused-imports +import {{ glean_namespace }}.private.ReasonCode // ktlint-disable import-ordering no-unused-imports import {{ glean_namespace }}.private.TimeUnit // ktlint-disable import-ordering no-unused-imports {% for obj_type in obj_types %} import {{ glean_namespace }}.private.{{ obj_type }} // ktlint-disable import-ordering @@ -85,6 +141,11 @@ import {{ glean_namespace }}.private.{{ obj_type }} // ktlint-disable import-ord {% if has_labeled_metrics %} import {{ glean_namespace }}.private.LabeledMetricType // ktlint-disable import-ordering {% endif %} +{% if has_object_metrics %} +import kotlinx.serialization.Serializable +import kotlinx.serialization.encodeToString +import kotlinx.serialization.json.Json +{% endif %} internal object {{ category_name|Camelize }} { {% for obj in objs.values() %} @@ -97,6 +158,9 @@ internal object {{ category_name|Camelize }} { {% endfor %} {% endif %} {% else %} + {% if obj|attr("_generate_structure") %} + {{ generate_structure(obj.name|Camelize ~ "Object", obj._generate_structure) }} + {%- endif %} {% if obj|attr("_generate_enums") %} {% for name, suffix in obj["_generate_enums"] %} {% if obj|attr(name)|length %} diff --git a/third_party/python/glean_parser/glean_parser/templates/rust.jinja2 b/third_party/python/glean_parser/glean_parser/templates/rust.jinja2 index 4c54dd2b2c..269a007ac5 100644 --- a/third_party/python/glean_parser/glean_parser/templates/rust.jinja2 +++ b/third_party/python/glean_parser/glean_parser/templates/rust.jinja2 @@ -87,7 +87,7 @@ impl ExtraKeys for {{ obj.name|Camelize }}{{ suffix }} { /// {{ obj.description|wordwrap() | replace('\n', '\n/// ') }} #[rustfmt::skip] pub static {{ obj.name|snake_case }}: ::glean::private::__export::Lazy<::glean::private::PingType> = - ::glean::private::__export::Lazy::new(|| ::glean::private::PingType::new("{{ obj.name }}", {{ obj.include_client_id|rust }}, {{ obj.send_if_empty|rust }}, {{ obj.precise_timestamps|rust }}, {{ obj.include_info_sections|rust }}, {{ obj.reason_codes|rust }})); + ::glean::private::__export::Lazy::new(|| ::glean::private::PingType::new("{{ obj.name }}", {{ obj.include_client_id|rust }}, {{ obj.send_if_empty|rust }}, {{ obj.precise_timestamps|rust }}, {{ obj.include_info_sections|rust }}, {{ obj.enabled|rust }}, {{ obj.schedules_pings|rust }}, {{ obj.reason_codes|rust }})); {% endfor %} {% else %} pub mod {{ category.name|snake_case }} { diff --git a/third_party/python/glean_parser/glean_parser/templates/swift.jinja2 b/third_party/python/glean_parser/glean_parser/templates/swift.jinja2 index 714bf20ec2..fe51a078bc 100644 --- a/third_party/python/glean_parser/glean_parser/templates/swift.jinja2 +++ b/third_party/python/glean_parser/glean_parser/templates/swift.jinja2 @@ -11,7 +11,7 @@ Jinja2 template is not. Please file bugs! #} /* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ -{% macro obj_declaration(obj, suffix='', access='') %} +{%- macro obj_declaration(obj, suffix='', access='') %} {{ access }}static let {{ obj.name|camelize|variable_name }}{{ suffix }} = {{ obj|type_name }}( // generated from {{ obj.identifier() }} CommonMetricData( {% for arg_name in common_metric_args if obj[arg_name] is defined %} @@ -24,7 +24,7 @@ Jinja2 template is not. Please file bugs! #} ) {% endmacro %} -{% macro struct_decl(obj, name, suffix) %} +{%- macro struct_decl(obj, name, suffix) %} struct {{ obj.name|Camelize }}{{ suffix }}: EventExtras { {% for item, typ in obj|attr(name) %} var {{ item|camelize|variable_name }}: {{typ|extra_type_name}}? @@ -44,6 +44,46 @@ struct {{ obj.name|Camelize }}{{ suffix }}: EventExtras { } {% endmacro %} +{%- macro generate_structure(name, struct) %} +{%- if struct.type == "array" -%} + typealias {{ name }} = [{{ name }}Item] + + {{ generate_structure(name ~ "Item", struct["items"]) }} + +{%- elif struct.type == "object" -%} + struct {{ name }}: Codable, Equatable, ObjectSerialize { + {% for itemname, val in struct.properties.items() %} + {% if val.type == "object" %} + var {{itemname|camelize|variable_name}}: {{ name ~ "Item" ~ itemname|Camelize ~ "Object" }}? + {% elif val.type == "array" %} + var {{itemname|camelize|variable_name}}: {{ name ~ "Item" ~ itemname|Camelize }} + {% else %} + var {{itemname|camelize|variable_name}}: {{val.type|structure_type_name}}? + {% endif %} + {% endfor %} + + func intoSerializedObject() -> String { + let jsonEncoder = JSONEncoder() + let jsonData = try! jsonEncoder.encode(self) + let json = String(data: jsonData, encoding: String.Encoding.utf8)! + return json + } + } + + {% for itemname, val in struct.properties.items() %} + {% if val.type == "array" %} + {% set nested_name = name ~ "Item" ~ itemname|Camelize %} + {{ generate_structure(nested_name, val) }} + {% elif val.type == "object" %} + {% set nested_name = name ~ "Item" ~ itemname|Camelize ~ "Object" %} + {{ generate_structure(nested_name, val) }} + {% endif %} + {% endfor %} + +{%- endif -%} + +{% endmacro %} + {% if not allow_reserved %} import {{ glean_namespace }} @@ -97,6 +137,8 @@ extension {{ namespace }} { sendIfEmpty: {{obj.send_if_empty|swift}}, preciseTimestamps: {{obj.precise_timestamps|swift}}, includeInfoSections: {{obj.include_info_sections|swift}}, + enabled: {{obj.enabled|swift}}, + schedulesPings: {{obj.schedules_pings|swift}}, reasonCodes: {{obj.reason_codes|swift}} ) @@ -106,6 +148,9 @@ extension {{ namespace }} { {% else %} enum {{ category.name|Camelize }} { {% for obj in category.objs.values() %} + {% if obj|attr("_generate_structure") %} + {{ generate_structure(obj.name|Camelize ~ "Object", obj._generate_structure) }} + {%- endif %} {% if obj|attr("_generate_enums") %} {% for name, suffix in obj["_generate_enums"] %} {% if obj|attr(name)|length %} diff --git a/third_party/python/glean_parser/glean_parser/util.py b/third_party/python/glean_parser/glean_parser/util.py index f8bc7d4f53..a61e318dbe 100644 --- a/third_party/python/glean_parser/glean_parser/util.py +++ b/third_party/python/glean_parser/glean_parser/util.py @@ -531,6 +531,8 @@ ping_args = [ "send_if_empty", "precise_timestamps", "include_info_sections", + "enabled", + "schedules_pings", "reason_codes", ] diff --git a/third_party/python/poetry.lock b/third_party/python/poetry.lock index 97513f8ba5..0547486b27 100644 --- a/third_party/python/poetry.lock +++ b/third_party/python/poetry.lock @@ -592,14 +592,14 @@ files = [ [[package]] name = "glean-parser" -version = "13.0.1" +version = "14.0.1" description = "Parser tools for Mozilla's Glean telemetry" category = "main" optional = false python-versions = "*" files = [ - {file = "glean_parser-13.0.1-py3-none-any.whl", hash = "sha256:8421c88f3673dd195d0cde635f4f09c9bfd0c9709ad3d28c8b201b3b7145e257"}, - {file = "glean_parser-13.0.1.tar.gz", hash = "sha256:feead4cbec6930ed38a48df5bae9eb4ee486bb4026ddf2f3206b85f80279d1e7"}, + {file = "glean_parser-14.0.1-py3-none-any.whl", hash = "sha256:3275ca235885c99da659fa7d9bf929b8fb020df79d26fcbec317328c369cd039"}, + {file = "glean_parser-14.0.1.tar.gz", hash = "sha256:3e9e5f99ad8592300e364b70d6247b21c445774a73a2ad274677fb58a0065809"}, ] [package.dependencies] @@ -1161,7 +1161,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -1376,14 +1375,14 @@ test = ["aiofiles", "coverage", "flake8", "httmock", "httptest", "hypothesis", " [[package]] name = "taskcluster-taskgraph" -version = "6.3.0" +version = "8.0.1" description = "Build taskcluster taskgraphs" category = "main" optional = false python-versions = "*" files = [ - {file = "taskcluster-taskgraph-6.3.0.tar.gz", hash = "sha256:a32ac3aad6aa90c593268bee8864d9f773e86e1f53d0b513d128d59b52c1e20b"}, - {file = "taskcluster_taskgraph-6.3.0-py3-none-any.whl", hash = "sha256:43ce187215ab8658c06ad80f46c4606ce51b9986f4365d541416eecf9d6a2c28"}, + {file = "taskcluster-taskgraph-8.0.1.tar.gz", hash = "sha256:21387537bbebab2a7b1890d03e20e49379bdda65efd45ca7fb8d01f5c29e1797"}, + {file = "taskcluster_taskgraph-8.0.1-py3-none-any.whl", hash = "sha256:14500bc703f64eb002c0cd505caaf2d34ffc0ae66d109b108e738661da1ae09c"}, ] [package.dependencies] @@ -1625,4 +1624,4 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=4.6)", "pytest-black ( [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "cef77da3299e7849f5039e8d9017216048d4ca56af298209e5bf3db7f92c2d4c" +content-hash = "8e72dc9ba9b4f08d27d90f99666459a814d1bb293c68de222614ea57db5b70ef" diff --git a/third_party/python/requirements.in b/third_party/python/requirements.in index 9915e91957..abcfc79239 100644 --- a/third_party/python/requirements.in +++ b/third_party/python/requirements.in @@ -22,7 +22,7 @@ fluent.migrate==0.13.0 fluent.syntax==0.19.0 # Pin `frozenlist` as it is required for `aiohttp`. Use minimum required version. frozenlist==1.1.1 -glean_parser==13.0.1 +glean_parser==14.0.1 importlib-metadata==6.0.0 # required for compatibility with Flask >= 2 in tools/tryselect/selectors/chooser jinja2==3.1.2 @@ -53,7 +53,7 @@ setuptools==68.0.0 six==1.16.0 slugid==2.0.0 taskcluster==44.2.2 -taskcluster-taskgraph==6.3.0 +taskcluster-taskgraph==8.0.1 taskcluster-urls==13.0.1 toml==0.10.2 tomlkit==0.12.3 diff --git a/third_party/python/requirements.txt b/third_party/python/requirements.txt index eedc022c50..3b8c897e8a 100644 --- a/third_party/python/requirements.txt +++ b/third_party/python/requirements.txt @@ -275,9 +275,9 @@ frozenlist==1.1.1 ; python_version >= "3.8" and python_version < "4.0" \ giturlparse==0.10.0 ; python_version >= "3.8" and python_version < "4.0" \ --hash=sha256:04ba1a3a099c3093fa8d24a422913c6a9b2c2cd22bcffc939cf72e3e98f672d7 \ --hash=sha256:2595ab291d30717cda8474b874c9fd509f1b9802ad7f6968c36a45e4b13eb337 -glean-parser==13.0.1 ; python_version >= "3.8" and python_version < "4.0" \ - --hash=sha256:8421c88f3673dd195d0cde635f4f09c9bfd0c9709ad3d28c8b201b3b7145e257 \ - --hash=sha256:feead4cbec6930ed38a48df5bae9eb4ee486bb4026ddf2f3206b85f80279d1e7 +glean-parser==14.0.1 ; python_version >= "3.8" and python_version < "4.0" \ + --hash=sha256:3275ca235885c99da659fa7d9bf929b8fb020df79d26fcbec317328c369cd039 \ + --hash=sha256:3e9e5f99ad8592300e364b70d6247b21c445774a73a2ad274677fb58a0065809 idna==2.10 ; python_version >= "3.8" and python_version < "4.0" \ --hash=sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6 \ --hash=sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0 @@ -492,7 +492,6 @@ pyyaml==6.0.1 ; python_version >= "3.8" and python_version < "4.0" \ --hash=sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4 \ --hash=sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba \ --hash=sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8 \ - --hash=sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef \ --hash=sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5 \ --hash=sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd \ --hash=sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3 \ @@ -540,9 +539,9 @@ six==1.16.0 ; python_version >= "3.8" and python_version < "4.0" \ slugid==2.0.0 ; python_version >= "3.8" and python_version < "4.0" \ --hash=sha256:a950d98b72691178bdd4d6c52743c4a2aa039207cf7a97d71060a111ff9ba297 \ --hash=sha256:aec8b0e01c4ad32e38e12d609eab3ec912fd129aaf6b2ded0199b56a5f8fd67c -taskcluster-taskgraph==6.3.0 ; python_version >= "3.8" and python_version < "4.0" \ - --hash=sha256:43ce187215ab8658c06ad80f46c4606ce51b9986f4365d541416eecf9d6a2c28 \ - --hash=sha256:a32ac3aad6aa90c593268bee8864d9f773e86e1f53d0b513d128d59b52c1e20b +taskcluster-taskgraph==8.0.1 ; python_version >= "3.8" and python_version < "4.0" \ + --hash=sha256:14500bc703f64eb002c0cd505caaf2d34ffc0ae66d109b108e738661da1ae09c \ + --hash=sha256:21387537bbebab2a7b1890d03e20e49379bdda65efd45ca7fb8d01f5c29e1797 taskcluster-urls==13.0.1 ; python_version >= "3.8" and python_version < "4.0" \ --hash=sha256:5e25e7e6818e8877178b175ff43d2e6548afad72694aa125f404a7329ece0973 \ --hash=sha256:b25e122ecec249c4299ac7b20b08db76e3e2025bdaeb699a9d444556de5fd367 \ diff --git a/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/LICENSE b/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/LICENSE deleted file mode 100644 index a612ad9813..0000000000 --- a/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/LICENSE +++ /dev/null @@ -1,373 +0,0 @@ -Mozilla Public License Version 2.0 -================================== - -1. Definitions --------------- - -1.1. "Contributor" - means each individual or legal entity that creates, contributes to - the creation of, or owns Covered Software. - -1.2. "Contributor Version" - means the combination of the Contributions of others (if any) used - by a Contributor and that particular Contributor's Contribution. - -1.3. "Contribution" - means Covered Software of a particular Contributor. - -1.4. "Covered Software" - means Source Code Form to which the initial Contributor has attached - the notice in Exhibit A, the Executable Form of such Source Code - Form, and Modifications of such Source Code Form, in each case - including portions thereof. - -1.5. "Incompatible With Secondary Licenses" - means - - (a) that the initial Contributor has attached the notice described - in Exhibit B to the Covered Software; or - - (b) that the Covered Software was made available under the terms of - version 1.1 or earlier of the License, but not also under the - terms of a Secondary License. - -1.6. "Executable Form" - means any form of the work other than Source Code Form. - -1.7. "Larger Work" - means a work that combines Covered Software with other material, in - a separate file or files, that is not Covered Software. - -1.8. "License" - means this document. - -1.9. "Licensable" - means having the right to grant, to the maximum extent possible, - whether at the time of the initial grant or subsequently, any and - all of the rights conveyed by this License. - -1.10. "Modifications" - means any of the following: - - (a) any file in Source Code Form that results from an addition to, - deletion from, or modification of the contents of Covered - Software; or - - (b) any new file in Source Code Form that contains any Covered - Software. - -1.11. "Patent Claims" of a Contributor - means any patent claim(s), including without limitation, method, - process, and apparatus claims, in any patent Licensable by such - Contributor that would be infringed, but for the grant of the - License, by the making, using, selling, offering for sale, having - made, import, or transfer of either its Contributions or its - Contributor Version. - -1.12. "Secondary License" - means either the GNU General Public License, Version 2.0, the GNU - Lesser General Public License, Version 2.1, the GNU Affero General - Public License, Version 3.0, or any later versions of those - licenses. - -1.13. "Source Code Form" - means the form of the work preferred for making modifications. - -1.14. "You" (or "Your") - means an individual or a legal entity exercising rights under this - License. For legal entities, "You" includes any entity that - controls, is controlled by, or is under common control with You. For - purposes of this definition, "control" means (a) the power, direct - or indirect, to cause the direction or management of such entity, - whether by contract or otherwise, or (b) ownership of more than - fifty percent (50%) of the outstanding shares or beneficial - ownership of such entity. - -2. License Grants and Conditions --------------------------------- - -2.1. Grants - -Each Contributor hereby grants You a world-wide, royalty-free, -non-exclusive license: - -(a) under intellectual property rights (other than patent or trademark) - Licensable by such Contributor to use, reproduce, make available, - modify, display, perform, distribute, and otherwise exploit its - Contributions, either on an unmodified basis, with Modifications, or - as part of a Larger Work; and - -(b) under Patent Claims of such Contributor to make, use, sell, offer - for sale, have made, import, and otherwise transfer either its - Contributions or its Contributor Version. - -2.2. Effective Date - -The licenses granted in Section 2.1 with respect to any Contribution -become effective for each Contribution on the date the Contributor first -distributes such Contribution. - -2.3. Limitations on Grant Scope - -The licenses granted in this Section 2 are the only rights granted under -this License. No additional rights or licenses will be implied from the -distribution or licensing of Covered Software under this License. -Notwithstanding Section 2.1(b) above, no patent license is granted by a -Contributor: - -(a) for any code that a Contributor has removed from Covered Software; - or - -(b) for infringements caused by: (i) Your and any other third party's - modifications of Covered Software, or (ii) the combination of its - Contributions with other software (except as part of its Contributor - Version); or - -(c) under Patent Claims infringed by Covered Software in the absence of - its Contributions. - -This License does not grant any rights in the trademarks, service marks, -or logos of any Contributor (except as may be necessary to comply with -the notice requirements in Section 3.4). - -2.4. Subsequent Licenses - -No Contributor makes additional grants as a result of Your choice to -distribute the Covered Software under a subsequent version of this -License (see Section 10.2) or under the terms of a Secondary License (if -permitted under the terms of Section 3.3). - -2.5. Representation - -Each Contributor represents that the Contributor believes its -Contributions are its original creation(s) or it has sufficient rights -to grant the rights to its Contributions conveyed by this License. - -2.6. Fair Use - -This License is not intended to limit any rights You have under -applicable copyright doctrines of fair use, fair dealing, or other -equivalents. - -2.7. Conditions - -Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted -in Section 2.1. - -3. Responsibilities -------------------- - -3.1. Distribution of Source Form - -All distribution of Covered Software in Source Code Form, including any -Modifications that You create or to which You contribute, must be under -the terms of this License. You must inform recipients that the Source -Code Form of the Covered Software is governed by the terms of this -License, and how they can obtain a copy of this License. You may not -attempt to alter or restrict the recipients' rights in the Source Code -Form. - -3.2. Distribution of Executable Form - -If You distribute Covered Software in Executable Form then: - -(a) such Covered Software must also be made available in Source Code - Form, as described in Section 3.1, and You must inform recipients of - the Executable Form how they can obtain a copy of such Source Code - Form by reasonable means in a timely manner, at a charge no more - than the cost of distribution to the recipient; and - -(b) You may distribute such Executable Form under the terms of this - License, or sublicense it under different terms, provided that the - license for the Executable Form does not attempt to limit or alter - the recipients' rights in the Source Code Form under this License. - -3.3. Distribution of a Larger Work - -You may create and distribute a Larger Work under terms of Your choice, -provided that You also comply with the requirements of this License for -the Covered Software. If the Larger Work is a combination of Covered -Software with a work governed by one or more Secondary Licenses, and the -Covered Software is not Incompatible With Secondary Licenses, this -License permits You to additionally distribute such Covered Software -under the terms of such Secondary License(s), so that the recipient of -the Larger Work may, at their option, further distribute the Covered -Software under the terms of either this License or such Secondary -License(s). - -3.4. Notices - -You may not remove or alter the substance of any license notices -(including copyright notices, patent notices, disclaimers of warranty, -or limitations of liability) contained within the Source Code Form of -the Covered Software, except that You may alter any license notices to -the extent required to remedy known factual inaccuracies. - -3.5. Application of Additional Terms - -You may choose to offer, and to charge a fee for, warranty, support, -indemnity or liability obligations to one or more recipients of Covered -Software. However, You may do so only on Your own behalf, and not on -behalf of any Contributor. You must make it absolutely clear that any -such warranty, support, indemnity, or liability obligation is offered by -You alone, and You hereby agree to indemnify every Contributor for any -liability incurred by such Contributor as a result of warranty, support, -indemnity or liability terms You offer. You may include additional -disclaimers of warranty and limitations of liability specific to any -jurisdiction. - -4. Inability to Comply Due to Statute or Regulation ---------------------------------------------------- - -If it is impossible for You to comply with any of the terms of this -License with respect to some or all of the Covered Software due to -statute, judicial order, or regulation then You must: (a) comply with -the terms of this License to the maximum extent possible; and (b) -describe the limitations and the code they affect. Such description must -be placed in a text file included with all distributions of the Covered -Software under this License. Except to the extent prohibited by statute -or regulation, such description must be sufficiently detailed for a -recipient of ordinary skill to be able to understand it. - -5. Termination --------------- - -5.1. The rights granted under this License will terminate automatically -if You fail to comply with any of its terms. However, if You become -compliant, then the rights granted under this License from a particular -Contributor are reinstated (a) provisionally, unless and until such -Contributor explicitly and finally terminates Your grants, and (b) on an -ongoing basis, if such Contributor fails to notify You of the -non-compliance by some reasonable means prior to 60 days after You have -come back into compliance. Moreover, Your grants from a particular -Contributor are reinstated on an ongoing basis if such Contributor -notifies You of the non-compliance by some reasonable means, this is the -first time You have received notice of non-compliance with this License -from such Contributor, and You become compliant prior to 30 days after -Your receipt of the notice. - -5.2. If You initiate litigation against any entity by asserting a patent -infringement claim (excluding declaratory judgment actions, -counter-claims, and cross-claims) alleging that a Contributor Version -directly or indirectly infringes any patent, then the rights granted to -You by any and all Contributors for the Covered Software under Section -2.1 of this License shall terminate. - -5.3. In the event of termination under Sections 5.1 or 5.2 above, all -end user license agreements (excluding distributors and resellers) which -have been validly granted by You or Your distributors under this License -prior to termination shall survive termination. - -************************************************************************ -* * -* 6. Disclaimer of Warranty * -* ------------------------- * -* * -* Covered Software is provided under this License on an "as is" * -* basis, without warranty of any kind, either expressed, implied, or * -* statutory, including, without limitation, warranties that the * -* Covered Software is free of defects, merchantable, fit for a * -* particular purpose or non-infringing. The entire risk as to the * -* quality and performance of the Covered Software is with You. * -* Should any Covered Software prove defective in any respect, You * -* (not any Contributor) assume the cost of any necessary servicing, * -* repair, or correction. This disclaimer of warranty constitutes an * -* essential part of this License. No use of any Covered Software is * -* authorized under this License except under this disclaimer. * -* * -************************************************************************ - -************************************************************************ -* * -* 7. Limitation of Liability * -* -------------------------- * -* * -* Under no circumstances and under no legal theory, whether tort * -* (including negligence), contract, or otherwise, shall any * -* Contributor, or anyone who distributes Covered Software as * -* permitted above, be liable to You for any direct, indirect, * -* special, incidental, or consequential damages of any character * -* including, without limitation, damages for lost profits, loss of * -* goodwill, work stoppage, computer failure or malfunction, or any * -* and all other commercial damages or losses, even if such party * -* shall have been informed of the possibility of such damages. This * -* limitation of liability shall not apply to liability for death or * -* personal injury resulting from such party's negligence to the * -* extent applicable law prohibits such limitation. Some * -* jurisdictions do not allow the exclusion or limitation of * -* incidental or consequential damages, so this exclusion and * -* limitation may not apply to You. * -* * -************************************************************************ - -8. Litigation -------------- - -Any litigation relating to this License may be brought only in the -courts of a jurisdiction where the defendant maintains its principal -place of business and such litigation shall be governed by laws of that -jurisdiction, without reference to its conflict-of-law provisions. -Nothing in this Section shall prevent a party's ability to bring -cross-claims or counter-claims. - -9. Miscellaneous ----------------- - -This License represents the complete agreement concerning the subject -matter hereof. If any provision of this License is held to be -unenforceable, such provision shall be reformed only to the extent -necessary to make it enforceable. Any law or regulation which provides -that the language of a contract shall be construed against the drafter -shall not be used to construe this License against a Contributor. - -10. Versions of the License ---------------------------- - -10.1. New Versions - -Mozilla Foundation is the license steward. Except as provided in Section -10.3, no one other than the license steward has the right to modify or -publish new versions of this License. Each version will be given a -distinguishing version number. - -10.2. Effect of New Versions - -You may distribute the Covered Software under the terms of the version -of the License under which You originally received the Covered Software, -or under the terms of any subsequent version published by the license -steward. - -10.3. Modified Versions - -If you create software not governed by this License, and you want to -create a new license for such software, you may create and use a -modified version of this License if you rename the license and remove -any references to the name of the license steward (except to note that -such modified license differs from this License). - -10.4. Distributing Source Code Form that is Incompatible With Secondary -Licenses - -If You choose to distribute Source Code Form that is Incompatible With -Secondary Licenses under the terms of this version of the License, the -notice described in Exhibit B of this License must be attached. - -Exhibit A - Source Code Form License Notice -------------------------------------------- - - This Source Code Form is subject to the terms of the Mozilla Public - License, v. 2.0. If a copy of the MPL was not distributed with this - file, You can obtain one at http://mozilla.org/MPL/2.0/. - -If it is not possible or desirable to put the notice in a particular -file, then You may include the notice in a location (such as a LICENSE -file in a relevant directory) where a recipient would be likely to look -for such a notice. - -You may add additional accurate notices of copyright ownership. - -Exhibit B - "Incompatible With Secondary Licenses" Notice ---------------------------------------------------------- - - This Source Code Form is "Incompatible With Secondary Licenses", as - defined by the Mozilla Public License, v. 2.0. diff --git a/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/METADATA b/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/METADATA deleted file mode 100644 index 536b4274f6..0000000000 --- a/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/METADATA +++ /dev/null @@ -1,28 +0,0 @@ -Metadata-Version: 2.1 -Name: taskcluster-taskgraph -Version: 6.3.0 -Summary: Build taskcluster taskgraphs -Home-page: https://github.com/taskcluster/taskgraph -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Topic :: Software Development -License-File: LICENSE -Requires-Dist: appdirs (>=1.4) -Requires-Dist: cookiecutter (~=2.1) -Requires-Dist: json-e (>=2.7) -Requires-Dist: mozilla-repo-urls -Requires-Dist: PyYAML (>=5.3.1) -Requires-Dist: redo (>=2.0) -Requires-Dist: requests (>=2.25) -Requires-Dist: slugid (>=2.0) -Requires-Dist: taskcluster-urls (>=11.0) -Requires-Dist: voluptuous (>=0.12.1) -Provides-Extra: load-image -Requires-Dist: zstandard ; extra == 'load-image' - diff --git a/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/RECORD b/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/RECORD deleted file mode 100644 index 3a6dfdfc35..0000000000 --- a/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/RECORD +++ /dev/null @@ -1,80 +0,0 @@ -taskgraph/__init__.py,sha256=ILqRnb_Cy7WBFggPsK8BML-nmWySW-capstDs3pWb-c,729 -taskgraph/config.py,sha256=XJYKaA9Egn7aiyZ0v70VCq3Kc-XkK08CK2LDsDfsDR8,4822 -taskgraph/create.py,sha256=MeWVr5gKJefjwK_3_xZUcDDu2NVH97gbUuu1dw_I9hA,5184 -taskgraph/decision.py,sha256=qARBTlLYJ7NVw3aflrspRn_hFmvKcrXJ058yao_4b7A,12882 -taskgraph/docker.py,sha256=6tdGVrKFNonznRJSP4IDZEhKnjV-wYKsR0nXnoDOvZk,7924 -taskgraph/files_changed.py,sha256=W3_gEgUT-mVH9DaaU_8X6gYpftrqBU3kgveGbzPLziU,2793 -taskgraph/filter_tasks.py,sha256=R7tYXiaVPGIkQ6O1c9-QJrKZ59m9pFXCloUlPraVnZU,866 -taskgraph/generator.py,sha256=AmkMCVNmj5spJhRfpSx7-zP3v8OU7i8zAbGMROLLEG8,15668 -taskgraph/graph.py,sha256=bHUsv2pPa2SSaWgBY-ItIj7REPd0o4fFYrwoQbwFKTY,4680 -taskgraph/main.py,sha256=UHSywURHwD56w2vGHgjA8O7K1yaCltgMXlJuuFfFjvY,26802 -taskgraph/morph.py,sha256=Q6weAi-xpJM4XoKA2mM6gVXQYLnE1YSws53vTZygMkY,9192 -taskgraph/parameters.py,sha256=xaEUElvdKhxHeJNRMF-6JBFDFiVO1Es2fm38PJQ1JA4,12134 -taskgraph/target_tasks.py,sha256=41BIVwiATy8DCQujPduTtnFmgHlKOfw6RPGL4b20WO8,3324 -taskgraph/task.py,sha256=tRr7WhJ2qjYXi-77wva17CpfK53m6W_cl-xzks_GGaQ,3240 -taskgraph/taskgraph.py,sha256=Fh5cX8LrgYmkpVP_uhpfRgHSKHfZjO-VGSmnFUjEru0,2434 -taskgraph/actions/__init__.py,sha256=lVP1e0YyELg7-_42MWWDbT0cKv_p53BApVE6vWOiPww,416 -taskgraph/actions/add_new_jobs.py,sha256=HAfuRDzFti_YmeudxqVl6hgrEbm-ki5-jSCDMC0HBDE,1836 -taskgraph/actions/cancel.py,sha256=UQSt_6y3S6PXNmUo_mNaUOuDvK2bixWjzdjTKXieEEg,1309 -taskgraph/actions/cancel_all.py,sha256=zrKgnW63gMGS5yldJieDt-GAR_XTiGRgybWAipIUCqQ,1941 -taskgraph/actions/rebuild_cached_tasks.py,sha256=UrVAvTmkkF4TAB5vNSpK1kJqMhMkKAMGmrifxH9kQJQ,1086 -taskgraph/actions/registry.py,sha256=xmhoEGMyYj6TTRFwMowZAUp0aqvtLvdVfmRWM7Yh7xo,13122 -taskgraph/actions/retrigger.py,sha256=wF08p_CgsfqraYelc3JLmPcqBFcO-Yt8gZZLlJZBixQ,9387 -taskgraph/actions/util.py,sha256=TxWxMWiKZeuKRwqiUawzjzpa5VF5AWgAKCLy7YaKG80,10661 -taskgraph/loader/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -taskgraph/loader/default.py,sha256=ND_Sx7yx7io1B-6rWEGkg3UIy7iO3DvSLMXwcEqF1N8,1185 -taskgraph/loader/transform.py,sha256=olUBPjxk3eEIg25sduxlcyqhjoig4ts5kPlT_zs6g9g,2147 -taskgraph/optimize/__init__.py,sha256=Oqpq1RW8QzOcu7zaMlNQ3BHT9ws9e_93FWfCqzNcQps,123 -taskgraph/optimize/base.py,sha256=WvoDNewyHG46IQbG3th-aau9OxSKegsYNfvdOEmunbA,18341 -taskgraph/optimize/strategies.py,sha256=IifMlxppVrIABsvn6UBwQYBFUdxkmyZz_FOtK6yNPps,2380 -taskgraph/run-task/fetch-content,sha256=G1aAvZlTg0yWHqxhSxi4RvfxW-KBJ5JwnGtWRqfH_bg,29990 -taskgraph/run-task/hgrc,sha256=BybWLDR89bWi3pE5T05UqmDHs02CbLypE-omLZWU6Uk,896 -taskgraph/run-task/robustcheckout.py,sha256=vPKvHb3fIIJli9ZVZG88XYoa8Sohy2JrpmH6pDgBDHI,30813 -taskgraph/run-task/run-task,sha256=Mpr195iq9eOh6B4MBpPzEDlxeNyJq0Fa2yrtlJunlXE,45434 -taskgraph/transforms/__init__.py,sha256=aw1dz2sRWZcbTILl6SVDuqIEw0mDdjSYu3LCVs-RLXE,110 -taskgraph/transforms/base.py,sha256=LFw2NwhrSriI3vbcCttArTFb7uHxckQpHeFZmatofvM,5146 -taskgraph/transforms/cached_tasks.py,sha256=Z10VD1kEBVXJvj8qSsNTq2mYpklh0V1EN8OT6QK3v_E,2607 -taskgraph/transforms/chunking.py,sha256=7z9oXiA2dDguYwJPaZYCi-fEzbc--O9avZAFS3vP_kg,2592 -taskgraph/transforms/code_review.py,sha256=eE2xrDtdD_n3HT3caQ2HGAkPm6Uutdm4hDCpCoFjEps,707 -taskgraph/transforms/docker_image.py,sha256=AUuWMx43FcQfgbXy4_2Sjae0cWrh5XWMMcJ3ItcoKes,7606 -taskgraph/transforms/fetch.py,sha256=ORnxpVidOQtI1q1xeHl1c1jlShXD8R_jTGC2CX3lLM4,10479 -taskgraph/transforms/from_deps.py,sha256=1mdjIWYshVI2zBywzB3JEqOyvqgVjFvarcQt9PLDSc4,8950 -taskgraph/transforms/notify.py,sha256=0sga-Ls9dhWLAsL0FBjXmVbbduee8LAZp_1pHBQR0iI,6019 -taskgraph/transforms/task.py,sha256=0oQYH7Upjus0-gzCrYbE0tUKZQUEv6Uq1adGBqiNM60,52254 -taskgraph/transforms/task_context.py,sha256=FxZwT69ozierogtlCTNvk7zCW52d0HdhCaJN7EDmI1s,4272 -taskgraph/transforms/job/__init__.py,sha256=JbNpqdoJRId24QVGe821r6u7Zvm2fTNvME_PMGunaoU,17706 -taskgraph/transforms/job/common.py,sha256=ldlbRI8sdEd-eUcre4GtXMerUg0RQZ_XSe9GwAkfI3I,5897 -taskgraph/transforms/job/index_search.py,sha256=Ngh9FFu1bx2kHVTChW2vcrbnb3SzMneRHopXk18RfB4,1220 -taskgraph/transforms/job/run_task.py,sha256=s9gq1bPdzBB0j2OguXJpWn1-S5Ctltqo4aLsB4kzpUc,8385 -taskgraph/transforms/job/toolchain.py,sha256=GOqIvp1MgtV-6whi2ofgSCFB7GolikZbfLXz0C1h0vc,6015 -taskgraph/util/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -taskgraph/util/archive.py,sha256=nzYn8cQ3NfLAeV-2SuTNoeQ6hg8m40f6FQcSTyVIKwQ,2855 -taskgraph/util/attributes.py,sha256=pPOFmwkDQQ-IqfDpVghZ10YI_qXRY4Bi5JP3xr6XVvc,2964 -taskgraph/util/cached_tasks.py,sha256=o-yJ91wlWbzoDB2GvKPpGcDE27_IEMgczp_figEBjV8,3406 -taskgraph/util/decision.py,sha256=uTC143FpTKQkGff5jIz3voWRYXBCHgx-XAm7FMW53hE,2433 -taskgraph/util/dependencies.py,sha256=3Qba3zI87JYR5fk5FndGzEVW-5NIzzZrBf9rVYcnLD0,2734 -taskgraph/util/docker.py,sha256=rTbzUt8S6s3N1r8gmwHrqsIY9VZ7TDWBM-jZQ5w0P_U,7762 -taskgraph/util/hash.py,sha256=31sQmDwQOavA5hWsmzWDNFoFTaTp5a7qLSQLNTEALD8,1661 -taskgraph/util/keyed_by.py,sha256=cgBH4tG8eH5UUrm5q4ODG7A4fzkGAOI7feVoZy3V8Ho,3419 -taskgraph/util/memoize.py,sha256=XDlwc-56gzoY8QTwOoiCOYL-igX7JoMcY-9Ih80Euc8,1331 -taskgraph/util/parameterization.py,sha256=dzxh8Bc8MBKoDMwj2V2AQab9UrC-JcM3tg0hDVTWpjc,3184 -taskgraph/util/path.py,sha256=e-JloOQV2-Oua_pe335bv4xWAB07vb82TKpu_zCOl0w,4466 -taskgraph/util/python_path.py,sha256=ed4F5z2mId56LauVczgxm_LGxgQi8XlxlYDgXOPZyII,1576 -taskgraph/util/readonlydict.py,sha256=XzTG-gqGqWVlSkDxSyOL6Ur7Z0ONhIJ9DVLWV3q4q1w,787 -taskgraph/util/schema.py,sha256=JGd0Imjfv6JKCY_tjJtOYwI6uwKUaNgzAcvcZj5WE6A,8323 -taskgraph/util/shell.py,sha256=MB9zHVSvxgOuszgmKr2rWUDahANZkbHHNkjjagZG_3I,1317 -taskgraph/util/taskcluster.py,sha256=cGUGvkrefRHngjyZm_iQRYKRlGi4jMIr7ky0fi_YBrg,12445 -taskgraph/util/taskgraph.py,sha256=ecKEvTfmLVvEKLPO_0g34CqVvc0iCzuNMh3064BZNrE,1969 -taskgraph/util/templates.py,sha256=HGTaIKCpAwEzBDHq0cDai1HJjPJrdnHsjJz6N4LVpKI,2139 -taskgraph/util/time.py,sha256=pNFcTH-iYRfm2-okm1lMATc4B5wO-_FXbOFXEtXD27g,3390 -taskgraph/util/treeherder.py,sha256=A3rpPUQB60Gn1Yx-OZgKuWWGJ8x0-6tcdeeslzco9ag,2687 -taskgraph/util/vcs.py,sha256=54Haq2XyC5CmPnjrPRQZY5wUeoFsaV9pWTYvBjPcVMA,18917 -taskgraph/util/verify.py,sha256=cSd7EeP9hUvp-5WOvKDHrvpFAGb_LuiNPxPp0-YmNEA,8947 -taskgraph/util/workertypes.py,sha256=1wgM6vLrlgtyv8854anVIs0Bx11kV8JJJaKcOHJc2j0,2498 -taskgraph/util/yaml.py,sha256=hfKI_D8Q7dimq4_VvO3WEh8CJsTrsIMwN6set7HIQbY,990 -taskcluster_taskgraph-6.3.0.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725 -taskcluster_taskgraph-6.3.0.dist-info/METADATA,sha256=MgIgtvNBRjc0CjnoD-7KHLPpz3sGlja2CZU3GzUMW84,1046 -taskcluster_taskgraph-6.3.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 -taskcluster_taskgraph-6.3.0.dist-info/entry_points.txt,sha256=2hxDzE3qq_sHh-J3ROqwpxgQgxO-196phWAQREl2-XA,50 -taskcluster_taskgraph-6.3.0.dist-info/top_level.txt,sha256=3JNeYn_hNiNXC7DrdH_vcv-WYSE7QdgGjdvUYvSjVp0,10 -taskcluster_taskgraph-6.3.0.dist-info/RECORD,, diff --git a/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/WHEEL b/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/WHEEL deleted file mode 100644 index becc9a66ea..0000000000 --- a/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.37.1) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/entry_points.txt b/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/entry_points.txt deleted file mode 100644 index dec40df69f..0000000000 --- a/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/entry_points.txt +++ /dev/null @@ -1,2 +0,0 @@ -[console_scripts] -taskgraph = taskgraph.main:main diff --git a/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/top_level.txt b/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/top_level.txt deleted file mode 100644 index f3840b68ef..0000000000 --- a/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-6.3.0.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -taskgraph diff --git a/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/LICENSE b/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/LICENSE new file mode 100644 index 0000000000..a612ad9813 --- /dev/null +++ b/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/LICENSE @@ -0,0 +1,373 @@ +Mozilla Public License Version 2.0 +================================== + +1. Definitions +-------------- + +1.1. "Contributor" + means each individual or legal entity that creates, contributes to + the creation of, or owns Covered Software. + +1.2. "Contributor Version" + means the combination of the Contributions of others (if any) used + by a Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + means Source Code Form to which the initial Contributor has attached + the notice in Exhibit A, the Executable Form of such Source Code + Form, and Modifications of such Source Code Form, in each case + including portions thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + (a) that the initial Contributor has attached the notice described + in Exhibit B to the Covered Software; or + + (b) that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the + terms of a Secondary License. + +1.6. "Executable Form" + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + means a work that combines Covered Software with other material, in + a separate file or files, that is not Covered Software. + +1.8. "License" + means this document. + +1.9. "Licensable" + means having the right to grant, to the maximum extent possible, + whether at the time of the initial grant or subsequently, any and + all of the rights conveyed by this License. + +1.10. "Modifications" + means any of the following: + + (a) any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered + Software; or + + (b) any new file in Source Code Form that contains any Covered + Software. + +1.11. "Patent Claims" of a Contributor + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the + License, by the making, using, selling, offering for sale, having + made, import, or transfer of either its Contributions or its + Contributor Version. + +1.12. "Secondary License" + means either the GNU General Public License, Version 2.0, the GNU + Lesser General Public License, Version 2.1, the GNU Affero General + Public License, Version 3.0, or any later versions of those + licenses. + +1.13. "Source Code Form" + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that + controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct + or indirect, to cause the direction or management of such entity, + whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial + ownership of such entity. + +2. License Grants and Conditions +-------------------------------- + +2.1. Grants + +Each Contributor hereby grants You a world-wide, royalty-free, +non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + +(b) under Patent Claims of such Contributor to make, use, sell, offer + for sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + +The licenses granted in Section 2.1 with respect to any Contribution +become effective for each Contribution on the date the Contributor first +distributes such Contribution. + +2.3. Limitations on Grant Scope + +The licenses granted in this Section 2 are the only rights granted under +this License. No additional rights or licenses will be implied from the +distribution or licensing of Covered Software under this License. +Notwithstanding Section 2.1(b) above, no patent license is granted by a +Contributor: + +(a) for any code that a Contributor has removed from Covered Software; + or + +(b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + +(c) under Patent Claims infringed by Covered Software in the absence of + its Contributions. + +This License does not grant any rights in the trademarks, service marks, +or logos of any Contributor (except as may be necessary to comply with +the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + +No Contributor makes additional grants as a result of Your choice to +distribute the Covered Software under a subsequent version of this +License (see Section 10.2) or under the terms of a Secondary License (if +permitted under the terms of Section 3.3). + +2.5. Representation + +Each Contributor represents that the Contributor believes its +Contributions are its original creation(s) or it has sufficient rights +to grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + +This License is not intended to limit any rights You have under +applicable copyright doctrines of fair use, fair dealing, or other +equivalents. + +2.7. Conditions + +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted +in Section 2.1. + +3. Responsibilities +------------------- + +3.1. Distribution of Source Form + +All distribution of Covered Software in Source Code Form, including any +Modifications that You create or to which You contribute, must be under +the terms of this License. You must inform recipients that the Source +Code Form of the Covered Software is governed by the terms of this +License, and how they can obtain a copy of this License. You may not +attempt to alter or restrict the recipients' rights in the Source Code +Form. + +3.2. Distribution of Executable Form + +If You distribute Covered Software in Executable Form then: + +(a) such Covered Software must also be made available in Source Code + Form, as described in Section 3.1, and You must inform recipients of + the Executable Form how they can obtain a copy of such Source Code + Form by reasonable means in a timely manner, at a charge no more + than the cost of distribution to the recipient; and + +(b) You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter + the recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + +You may create and distribute a Larger Work under terms of Your choice, +provided that You also comply with the requirements of this License for +the Covered Software. If the Larger Work is a combination of Covered +Software with a work governed by one or more Secondary Licenses, and the +Covered Software is not Incompatible With Secondary Licenses, this +License permits You to additionally distribute such Covered Software +under the terms of such Secondary License(s), so that the recipient of +the Larger Work may, at their option, further distribute the Covered +Software under the terms of either this License or such Secondary +License(s). + +3.4. Notices + +You may not remove or alter the substance of any license notices +(including copyright notices, patent notices, disclaimers of warranty, +or limitations of liability) contained within the Source Code Form of +the Covered Software, except that You may alter any license notices to +the extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + +You may choose to offer, and to charge a fee for, warranty, support, +indemnity or liability obligations to one or more recipients of Covered +Software. However, You may do so only on Your own behalf, and not on +behalf of any Contributor. You must make it absolutely clear that any +such warranty, support, indemnity, or liability obligation is offered by +You alone, and You hereby agree to indemnify every Contributor for any +liability incurred by such Contributor as a result of warranty, support, +indemnity or liability terms You offer. You may include additional +disclaimers of warranty and limitations of liability specific to any +jurisdiction. + +4. Inability to Comply Due to Statute or Regulation +--------------------------------------------------- + +If it is impossible for You to comply with any of the terms of this +License with respect to some or all of the Covered Software due to +statute, judicial order, or regulation then You must: (a) comply with +the terms of this License to the maximum extent possible; and (b) +describe the limitations and the code they affect. Such description must +be placed in a text file included with all distributions of the Covered +Software under this License. Except to the extent prohibited by statute +or regulation, such description must be sufficiently detailed for a +recipient of ordinary skill to be able to understand it. + +5. Termination +-------------- + +5.1. The rights granted under this License will terminate automatically +if You fail to comply with any of its terms. However, if You become +compliant, then the rights granted under this License from a particular +Contributor are reinstated (a) provisionally, unless and until such +Contributor explicitly and finally terminates Your grants, and (b) on an +ongoing basis, if such Contributor fails to notify You of the +non-compliance by some reasonable means prior to 60 days after You have +come back into compliance. Moreover, Your grants from a particular +Contributor are reinstated on an ongoing basis if such Contributor +notifies You of the non-compliance by some reasonable means, this is the +first time You have received notice of non-compliance with this License +from such Contributor, and You become compliant prior to 30 days after +Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent +infringement claim (excluding declaratory judgment actions, +counter-claims, and cross-claims) alleging that a Contributor Version +directly or indirectly infringes any patent, then the rights granted to +You by any and all Contributors for the Covered Software under Section +2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all +end user license agreements (excluding distributors and resellers) which +have been validly granted by You or Your distributors under this License +prior to termination shall survive termination. + +************************************************************************ +* * +* 6. Disclaimer of Warranty * +* ------------------------- * +* * +* Covered Software is provided under this License on an "as is" * +* basis, without warranty of any kind, either expressed, implied, or * +* statutory, including, without limitation, warranties that the * +* Covered Software is free of defects, merchantable, fit for a * +* particular purpose or non-infringing. The entire risk as to the * +* quality and performance of the Covered Software is with You. * +* Should any Covered Software prove defective in any respect, You * +* (not any Contributor) assume the cost of any necessary servicing, * +* repair, or correction. This disclaimer of warranty constitutes an * +* essential part of this License. No use of any Covered Software is * +* authorized under this License except under this disclaimer. * +* * +************************************************************************ + +************************************************************************ +* * +* 7. Limitation of Liability * +* -------------------------- * +* * +* Under no circumstances and under no legal theory, whether tort * +* (including negligence), contract, or otherwise, shall any * +* Contributor, or anyone who distributes Covered Software as * +* permitted above, be liable to You for any direct, indirect, * +* special, incidental, or consequential damages of any character * +* including, without limitation, damages for lost profits, loss of * +* goodwill, work stoppage, computer failure or malfunction, or any * +* and all other commercial damages or losses, even if such party * +* shall have been informed of the possibility of such damages. This * +* limitation of liability shall not apply to liability for death or * +* personal injury resulting from such party's negligence to the * +* extent applicable law prohibits such limitation. Some * +* jurisdictions do not allow the exclusion or limitation of * +* incidental or consequential damages, so this exclusion and * +* limitation may not apply to You. * +* * +************************************************************************ + +8. Litigation +------------- + +Any litigation relating to this License may be brought only in the +courts of a jurisdiction where the defendant maintains its principal +place of business and such litigation shall be governed by laws of that +jurisdiction, without reference to its conflict-of-law provisions. +Nothing in this Section shall prevent a party's ability to bring +cross-claims or counter-claims. + +9. Miscellaneous +---------------- + +This License represents the complete agreement concerning the subject +matter hereof. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent +necessary to make it enforceable. Any law or regulation which provides +that the language of a contract shall be construed against the drafter +shall not be used to construe this License against a Contributor. + +10. Versions of the License +--------------------------- + +10.1. New Versions + +Mozilla Foundation is the license steward. Except as provided in Section +10.3, no one other than the license steward has the right to modify or +publish new versions of this License. Each version will be given a +distinguishing version number. + +10.2. Effect of New Versions + +You may distribute the Covered Software under the terms of the version +of the License under which You originally received the Covered Software, +or under the terms of any subsequent version published by the license +steward. + +10.3. Modified Versions + +If you create software not governed by this License, and you want to +create a new license for such software, you may create and use a +modified version of this License if you rename the license and remove +any references to the name of the license steward (except to note that +such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary +Licenses + +If You choose to distribute Source Code Form that is Incompatible With +Secondary Licenses under the terms of this version of the License, the +notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice +------------------------------------------- + + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this + file, You can obtain one at http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular +file, then You may include the notice in a location (such as a LICENSE +file in a relevant directory) where a recipient would be likely to look +for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice +--------------------------------------------------------- + + This Source Code Form is "Incompatible With Secondary Licenses", as + defined by the Mozilla Public License, v. 2.0. diff --git a/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/METADATA b/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/METADATA new file mode 100644 index 0000000000..e549db9aa3 --- /dev/null +++ b/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/METADATA @@ -0,0 +1,123 @@ +Metadata-Version: 2.1 +Name: taskcluster-taskgraph +Version: 8.0.1 +Summary: Build taskcluster taskgraphs +Home-page: https://github.com/taskcluster/taskgraph +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Software Development +License-File: LICENSE +Requires-Dist: appdirs >=1.4 +Requires-Dist: cookiecutter ~=2.1 +Requires-Dist: json-e >=2.7 +Requires-Dist: mozilla-repo-urls +Requires-Dist: PyYAML >=5.3.1 +Requires-Dist: redo >=2.0 +Requires-Dist: requests >=2.25 +Requires-Dist: slugid >=2.0 +Requires-Dist: taskcluster-urls >=11.0 +Requires-Dist: voluptuous >=0.12.1 +Provides-Extra: load-image +Requires-Dist: zstandard ; extra == 'load-image' + + +.. image:: https://firefox-ci-tc.services.mozilla.com/api/github/v1/repository/taskcluster/taskgraph/main/badge.svg + :target: https://firefox-ci-tc.services.mozilla.com/api/github/v1/repository/taskcluster/taskgraph/main/latest + :alt: Task Status + +.. image:: https://results.pre-commit.ci/badge/github/taskcluster/taskgraph/main.svg + :target: https://results.pre-commit.ci/latest/github/taskcluster/taskgraph/main + :alt: pre-commit.ci status + +.. image:: https://codecov.io/gh/taskcluster/taskgraph/branch/main/graph/badge.svg?token=GJIV52ZQNP + :target: https://codecov.io/gh/taskcluster/taskgraph + :alt: Code Coverage + +.. image:: https://badge.fury.io/py/taskcluster-taskgraph.svg + :target: https://badge.fury.io/py/taskcluster-taskgraph + :alt: Pypi Version + +.. image:: https://readthedocs.org/projects/taskcluster-taskgraph/badge/?version=latest + :target: https://taskcluster-taskgraph.readthedocs.io/en/latest/?badge=latest + :alt: Documentation Status + +.. image:: https://img.shields.io/badge/license-MPL%202.0-orange.svg + :target: http://mozilla.org/MPL/2.0 + :alt: License + +Taskgraph +========= + +Taskgraph is a Python library to generate graphs of tasks for the `Taskcluster +CI`_ service. It is the recommended approach for configuring tasks once your +project outgrows a single `.taskcluster.yml`_ file and is what powers the over +30,000 tasks and counting that make up Firefox's CI. + +For more information and usage instructions, `see the docs`_. + +How It Works +------------ + +Taskgraph leverages the fact that Taskcluster is a generic task execution +platform. This means that tasks can be scheduled via its `comprehensive API`_, +and aren't limited to being triggered in response to supported events. + +Taskgraph leverages this execution platform to allow CI systems to scale to any +size or complexity. + +1. A *decision task* is created via Taskcluster's normal `.taskcluster.yml`_ + file. This task invokes ``taskgraph``. +2. Taskgraph evaluates a series of yaml based task definitions (similar to + those other CI offerings provide). +3. Taskgraph applies transforms on top of these task definitions. Transforms + are Python functions that can programmatically alter or even clone a task + definition. +4. Taskgraph applies some optional optimization logic to remove unnecessary + tasks. +5. Taskgraph submits the resulting *task graph* to Taskcluster via its API. + +Taskgraph's combination of declarative task configuration combined with +programmatic alteration are what allow it to support CI systems of any scale. +Taskgraph is the library that powers the 30,000+ tasks making up `Firefox's +CI`_. + +.. _Taskcluster CI: https://taskcluster.net/ +.. _comprehensive API: https://docs.taskcluster.net/docs/reference/platform/queue/api +.. _.taskcluster.yml: https://docs.taskcluster.net/docs/reference/integrations/github/taskcluster-yml-v1 +.. _Firefox's CI: https://treeherder.mozilla.org/jobs?repo=mozilla-central +.. _see the docs: https://taskcluster-taskgraph.readthedocs.io + +Installation +------------ + +Taskgraph supports Python 3.8 and up, and can be installed from Pypi: + +.. code-block:: + + pip install taskcluster-taskgraph + + +Alternatively, the repo can be cloned and installed directly: + +.. code-block:: + + git clone https://github.com/taskcluster/taskgraph + cd taskgraph + python setup.py install + +In both cases, it's recommended to use a Python `virtual environment`_. + +.. _virtual environment: https://docs.python.org/3/tutorial/venv.html + +Get Involved +------------ + +If you'd like to get involved, please see our `contributing docs`_! + +.. _contributing docs: https://github.com/taskcluster/taskgraph/blob/main/CONTRIBUTING.rst diff --git a/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/RECORD b/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/RECORD new file mode 100644 index 0000000000..c04e803ff2 --- /dev/null +++ b/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/RECORD @@ -0,0 +1,79 @@ +taskgraph/__init__.py,sha256=hCl3NLzC-cVXlKhuzf0-_0wd0gYmNA3oshXfTaa9DNQ,729 +taskgraph/config.py,sha256=8vntWUrPwGds22mFKYAgcsD4Mr8hoONTv2ssGBcClLw,5108 +taskgraph/create.py,sha256=_zokjSM3ZaO04l2LiMhenE8qXDZVfYvueIIu5hGUhzc,5185 +taskgraph/decision.py,sha256=sG0CIj9OSOdfN65LSt6dRYFWbns9_JraVC5fQU1_7oc,13012 +taskgraph/docker.py,sha256=rk-tAMycHnapFyR2Q-XJXzC2A4uv0i-VykLZfwl-pRo,8417 +taskgraph/filter_tasks.py,sha256=R7tYXiaVPGIkQ6O1c9-QJrKZ59m9pFXCloUlPraVnZU,866 +taskgraph/generator.py,sha256=zrH1zfy-8akksKTSOf6e4FEsdOd5y7-h1Jne_2Jabcc,15703 +taskgraph/graph.py,sha256=bHUsv2pPa2SSaWgBY-ItIj7REPd0o4fFYrwoQbwFKTY,4680 +taskgraph/main.py,sha256=tgfAEcNUJfmADteL24yJR5u7tzU4v3mzmxiogVSCK8Y,29072 +taskgraph/morph.py,sha256=bwkaSGdTZLcK_rhF2st2mCGv9EHN5WdbnDeuZcqp9UA,9208 +taskgraph/parameters.py,sha256=hrwUHHu4PS79w-fQ3qNnLSyjRto1EDlidE8e1GzIy8U,12272 +taskgraph/target_tasks.py,sha256=9_v66bzmQFELPsfIDGITXrqzsmEiLq1EeuJFhycKL0M,3356 +taskgraph/task.py,sha256=tRr7WhJ2qjYXi-77wva17CpfK53m6W_cl-xzks_GGaQ,3240 +taskgraph/taskgraph.py,sha256=Fh5cX8LrgYmkpVP_uhpfRgHSKHfZjO-VGSmnFUjEru0,2434 +taskgraph/actions/__init__.py,sha256=lVP1e0YyELg7-_42MWWDbT0cKv_p53BApVE6vWOiPww,416 +taskgraph/actions/add_new_jobs.py,sha256=c8vGWGXMr4qqW2Axz9rbBrDopabZB3gf3SVFLBZH8ak,1865 +taskgraph/actions/cancel.py,sha256=xrIzlB5KzcnQ4_HultoIcnlxtbQhUi7723g5K2iQoY0,1263 +taskgraph/actions/cancel_all.py,sha256=zNiHtOiSQQxLyNJYtaW0JKPazHXSgZrq1C6o8DGYxG8,1887 +taskgraph/actions/rebuild_cached_tasks.py,sha256=r1QTri2ey30TdEztUgc-nkiHdJPe8Sbn7FvKeR_kt0Y,1115 +taskgraph/actions/registry.py,sha256=hubblOhL3fbWDRtKv7_6HiD0P94hzQrpjdMkj23CGCg,13564 +taskgraph/actions/retrigger.py,sha256=MKkoZDAe0SKIq6fHqwAc1Ici_wIGRd7MxeBNhwoDEGE,9388 +taskgraph/actions/util.py,sha256=gB8MZb8juP1S7EsLHJivr6BBY2bf5IUiIpN7Mq9-kXo,10964 +taskgraph/loader/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +taskgraph/loader/default.py,sha256=_bBJG6l04v44Jm5HSIEnVndC05NpNmq5L28QfJHk0wo,1185 +taskgraph/loader/transform.py,sha256=olUBPjxk3eEIg25sduxlcyqhjoig4ts5kPlT_zs6g9g,2147 +taskgraph/optimize/__init__.py,sha256=Oqpq1RW8QzOcu7zaMlNQ3BHT9ws9e_93FWfCqzNcQps,123 +taskgraph/optimize/base.py,sha256=wTViUwVmY9sZvlzSuGwkVrETCo0v2OfyNxFFgzJrDNc,18982 +taskgraph/optimize/strategies.py,sha256=UryFI5TizzEF_2NO8MyuKwqVektHfJeG_t0_zZwxEds,2577 +taskgraph/run-task/fetch-content,sha256=G1aAvZlTg0yWHqxhSxi4RvfxW-KBJ5JwnGtWRqfH_bg,29990 +taskgraph/run-task/hgrc,sha256=BybWLDR89bWi3pE5T05UqmDHs02CbLypE-omLZWU6Uk,896 +taskgraph/run-task/robustcheckout.py,sha256=vPKvHb3fIIJli9ZVZG88XYoa8Sohy2JrpmH6pDgBDHI,30813 +taskgraph/run-task/run-task,sha256=ev64Ud2X3482B05aurUcWD93_sZS1aW2N-eVutRHF5k,45753 +taskgraph/transforms/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +taskgraph/transforms/base.py,sha256=z20Yh619srbwuQJWASRtG2_j6NUbWlCujTTCHWLa0GY,5147 +taskgraph/transforms/cached_tasks.py,sha256=Z10VD1kEBVXJvj8qSsNTq2mYpklh0V1EN8OT6QK3v_E,2607 +taskgraph/transforms/chunking.py,sha256=7z9oXiA2dDguYwJPaZYCi-fEzbc--O9avZAFS3vP_kg,2592 +taskgraph/transforms/code_review.py,sha256=tevRFQli3MkzW_0Zhr-hwlVti8hFaXEz94llwhBu_ns,713 +taskgraph/transforms/docker_image.py,sha256=GScS7Lld3YcS57eC30wp3DJM_ATLrmmVfZzINKgC1fM,7546 +taskgraph/transforms/fetch.py,sha256=u1M57LQOi0kHz6FFP1qah3yJh15eXYqQCF_F6r5qjh0,10662 +taskgraph/transforms/from_deps.py,sha256=_cdIefdRkZYWaFJaWpsglivvG8bBGWd4beg7QgNl0Jc,8885 +taskgraph/transforms/notify.py,sha256=0sga-Ls9dhWLAsL0FBjXmVbbduee8LAZp_1pHBQR0iI,6019 +taskgraph/transforms/task.py,sha256=nRzNAxLjA6BsFktZAA9Upqb_pSFNhjoCzKm0QDxvVgM,52586 +taskgraph/transforms/task_context.py,sha256=9v3ke967atAYCtQxIblSFucJA1tum9Q8QpXQeTwNIzU,4278 +taskgraph/transforms/run/__init__.py,sha256=gVJ4eNquKNlygX18OtWTDnl6FFsZlA12bxfvB3kZz14,17761 +taskgraph/transforms/run/common.py,sha256=G3WdMHU5YWUfk1uR6xsxWY7MQKjU9tnqtRDmGttUqt4,5626 +taskgraph/transforms/run/index_search.py,sha256=ABIaX2FFx02o1StZgNAB_ZDXc1lTFO2aUIBH5BuUjtA,1224 +taskgraph/transforms/run/run_task.py,sha256=0GI8syzGtRDT07g_6SXG99JtxDBe09zsW5ltL-aUhYU,8403 +taskgraph/transforms/run/toolchain.py,sha256=KiuBfJ6CShwGYIIljy4i7iYSHFFXF_A_zSvRGUgYboA,6033 +taskgraph/util/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +taskgraph/util/archive.py,sha256=NIqg2Su5PUqOv4JM60uFixsMsOXN26u5paB1Uh6foSI,4707 +taskgraph/util/attributes.py,sha256=pPOFmwkDQQ-IqfDpVghZ10YI_qXRY4Bi5JP3xr6XVvc,2964 +taskgraph/util/cached_tasks.py,sha256=-AqmOjrkI5PMAlAlQl1wShCrs0HA5lWLBgkxTcFstFM,4150 +taskgraph/util/dependencies.py,sha256=3Qba3zI87JYR5fk5FndGzEVW-5NIzzZrBf9rVYcnLD0,2734 +taskgraph/util/docker.py,sha256=ffQ6KloQNz_kwYemSZEkh4xUMVMeotnnwphWZth1PqQ,8112 +taskgraph/util/hash.py,sha256=U5h6WwC3zs0ooX8odc7AjgPQKKFpDXL7PemoyENPJYo,1644 +taskgraph/util/keyed_by.py,sha256=EMWNRRqYB0AS7A4Y4lthYf2HB7G2ercGFf4hN9zwyaY,3348 +taskgraph/util/memoize.py,sha256=CvCGl-_qft062b3GZC4aHbPfEOPtqR9oOUEqvk9aojQ,294 +taskgraph/util/parameterization.py,sha256=DiPE-4jappGMPljDhhZI52BP7dLBGZHu5EI1cW4aRYg,3392 +taskgraph/util/path.py,sha256=e-JloOQV2-Oua_pe335bv4xWAB07vb82TKpu_zCOl0w,4466 +taskgraph/util/python_path.py,sha256=ed4F5z2mId56LauVczgxm_LGxgQi8XlxlYDgXOPZyII,1576 +taskgraph/util/readonlydict.py,sha256=XzTG-gqGqWVlSkDxSyOL6Ur7Z0ONhIJ9DVLWV3q4q1w,787 +taskgraph/util/schema.py,sha256=HmbbJ_i5uxZZHZSJ8sVWaD-VMhZI4ymx0STNcjO5t2M,8260 +taskgraph/util/set_name.py,sha256=cha9awo2nMQ9jfSEcbyNkZkCq_1Yg_kKJTfvDzabHSc,1134 +taskgraph/util/shell.py,sha256=nf__ly0Ikhj92AiEBCQtvyyckm8UfO_3DSgz0SU-7QA,1321 +taskgraph/util/taskcluster.py,sha256=LScpZknMycOOneIcRMf236rCTMRHHGxFTc9Lh7mRKaI,13057 +taskgraph/util/taskgraph.py,sha256=ecKEvTfmLVvEKLPO_0g34CqVvc0iCzuNMh3064BZNrE,1969 +taskgraph/util/templates.py,sha256=HGTaIKCpAwEzBDHq0cDai1HJjPJrdnHsjJz6N4LVpKI,2139 +taskgraph/util/time.py,sha256=XauJ0DbU0fyFvHLzJLG4ehHv9KaKixxETro89GPC1yk,3350 +taskgraph/util/treeherder.py,sha256=kc8jCy_lYduBxVMYOQzWpmI_6i2bRmkQLKq5DGmbiDI,2721 +taskgraph/util/vcs.py,sha256=FjS82fiTsoQ_ArjTCDOtDGfNdVUp_8zvVKB9SoAG3Rs,18019 +taskgraph/util/verify.py,sha256=htrNX7aXMMDzxymsFVcs0kaO5gErFHd62g9cQsZI_WE,8518 +taskgraph/util/workertypes.py,sha256=1wgM6vLrlgtyv8854anVIs0Bx11kV8JJJaKcOHJc2j0,2498 +taskgraph/util/yaml.py,sha256=-LaIf3RROuaSWckOOGN5Iviu-DHWxIChgHn9a7n6ec4,1059 +taskcluster_taskgraph-8.0.1.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725 +taskcluster_taskgraph-8.0.1.dist-info/METADATA,sha256=qg-m62f4BGLh2jBAr_-OQZhraOSciTrv5EyNY0Wwq8I,4688 +taskcluster_taskgraph-8.0.1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92 +taskcluster_taskgraph-8.0.1.dist-info/entry_points.txt,sha256=2hxDzE3qq_sHh-J3ROqwpxgQgxO-196phWAQREl2-XA,50 +taskcluster_taskgraph-8.0.1.dist-info/top_level.txt,sha256=3JNeYn_hNiNXC7DrdH_vcv-WYSE7QdgGjdvUYvSjVp0,10 +taskcluster_taskgraph-8.0.1.dist-info/RECORD,, diff --git a/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/WHEEL b/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/WHEEL new file mode 100644 index 0000000000..bab98d6758 --- /dev/null +++ b/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.43.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/entry_points.txt b/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/entry_points.txt new file mode 100644 index 0000000000..dec40df69f --- /dev/null +++ b/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +taskgraph = taskgraph.main:main diff --git a/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/top_level.txt b/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/top_level.txt new file mode 100644 index 0000000000..f3840b68ef --- /dev/null +++ b/third_party/python/taskcluster_taskgraph/taskcluster_taskgraph-8.0.1.dist-info/top_level.txt @@ -0,0 +1 @@ +taskgraph diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/__init__.py b/third_party/python/taskcluster_taskgraph/taskgraph/__init__.py index 81cc763230..0bd794101c 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/__init__.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/__init__.py @@ -2,7 +2,7 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -__version__ = "6.3.0" +__version__ = "8.0.1" # Maximum number of dependencies a single task can have # https://docs.taskcluster.net/reference/platform/taskcluster-queue/references/api#createTask diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/actions/add_new_jobs.py b/third_party/python/taskcluster_taskgraph/taskgraph/actions/add_new_jobs.py index c5e1821546..f635250086 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/actions/add_new_jobs.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/actions/add_new_jobs.py @@ -40,7 +40,7 @@ from taskgraph.actions.util import ( ) def add_new_jobs_action(parameters, graph_config, input, task_group_id, task_id): decision_task_id, full_task_graph, label_to_taskid = fetch_graph_and_labels( - parameters, graph_config + parameters, graph_config, task_group_id=task_group_id ) to_run = [] diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/actions/cancel.py b/third_party/python/taskcluster_taskgraph/taskgraph/actions/cancel.py index 03788c6538..33a5394e68 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/actions/cancel.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/actions/cancel.py @@ -34,9 +34,7 @@ def cancel_action(parameters, graph_config, input, task_group_id, task_id): # cannot be cancelled at this time, but it's also not running # anymore, so we can ignore this error. logger.info( - 'Task "{}" is past its deadline and cannot be cancelled.'.format( - task_id - ) + f'Task "{task_id}" is past its deadline and cannot be cancelled.' ) return raise diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/actions/cancel_all.py b/third_party/python/taskcluster_taskgraph/taskgraph/actions/cancel_all.py index d3e0440839..55453b7624 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/actions/cancel_all.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/actions/cancel_all.py @@ -43,9 +43,7 @@ def cancel_all_action(parameters, graph_config, input, task_group_id, task_id): # cannot be cancelled at this time, but it's also not running # anymore, so we can ignore this error. logger.info( - "Task {} is past its deadline and cannot be cancelled.".format( - task_id - ) + f"Task {task_id} is past its deadline and cannot be cancelled." ) return raise diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/actions/rebuild_cached_tasks.py b/third_party/python/taskcluster_taskgraph/taskgraph/actions/rebuild_cached_tasks.py index 2b88e6a698..8ea2e37150 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/actions/rebuild_cached_tasks.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/actions/rebuild_cached_tasks.py @@ -18,7 +18,7 @@ def rebuild_cached_tasks_action( parameters, graph_config, input, task_group_id, task_id ): decision_task_id, full_task_graph, label_to_taskid = fetch_graph_and_labels( - parameters, graph_config + parameters, graph_config, task_group_id=task_group_id ) cached_tasks = [ label diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/actions/registry.py b/third_party/python/taskcluster_taskgraph/taskgraph/actions/registry.py index 1e909d30c7..20955bd3f2 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/actions/registry.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/actions/registry.py @@ -154,9 +154,7 @@ def register_callback_action( ], "register_callback_action must be used as decorator" if not cb_name: cb_name = name - assert cb_name not in callbacks, "callback name {} is not unique".format( - cb_name - ) + assert cb_name not in callbacks, f"callback name {cb_name} is not unique" def action_builder(parameters, graph_config, decision_task_id): if not available(parameters): @@ -165,11 +163,11 @@ def register_callback_action( actionPerm = "generic" if generic else cb_name # gather up the common decision-task-supplied data for this action - repo_param = "head_repository" repository = { - "url": parameters[repo_param], + "url": parameters["head_repository"], "project": parameters["project"], "level": parameters["level"], + "base_url": parameters["base_repository"], } revision = parameters["head_rev"] @@ -181,6 +179,9 @@ def register_callback_action( branch = parameters.get("head_ref") if branch: push["branch"] = branch + base_branch = parameters.get("base_ref") + if base_branch and branch != base_branch: + push["base_branch"] = base_branch action = { "name": name, @@ -215,13 +216,16 @@ def register_callback_action( if "/" in actionPerm: raise Exception("`/` is not allowed in action names; use `-`") + if parameters["tasks_for"].startswith("github-pull-request"): + hookId = f"in-tree-pr-action-{level}-{actionPerm}/{tcyml_hash}" + else: + hookId = f"in-tree-action-{level}-{actionPerm}/{tcyml_hash}" + rv.update( { "kind": "hook", "hookGroupId": f"project-{trustDomain}", - "hookId": "in-tree-action-{}-{}/{}".format( - level, actionPerm, tcyml_hash - ), + "hookId": hookId, "hookPayload": { # provide the decision-task parameters as context for triggerHook "decision": { @@ -297,16 +301,20 @@ def sanity_check_task_scope(callback, parameters, graph_config): actionPerm = "generic" if action.generic else action.cb_name - repo_param = "head_repository" - raw_url = parameters[repo_param] + raw_url = parameters["base_repository"] parsed_url = parse(raw_url) - expected_scope = f"assume:{parsed_url.taskcluster_role_prefix}:action:{actionPerm}" + action_scope = f"assume:{parsed_url.taskcluster_role_prefix}:action:{actionPerm}" + pr_action_scope = ( + f"assume:{parsed_url.taskcluster_role_prefix}:pr-action:{actionPerm}" + ) # the scope should appear literally; no need for a satisfaction check. The use of # get_current_scopes here calls the auth service through the Taskcluster Proxy, giving # the precise scopes available to this task. - if expected_scope not in taskcluster.get_current_scopes(): - raise ValueError(f"Expected task scope {expected_scope} for this action") + if not set((action_scope, pr_action_scope)) & set(taskcluster.get_current_scopes()): + raise ValueError( + f"Expected task scope {action_scope} or {pr_action_scope} for this action" + ) def trigger_action_callback( diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/actions/retrigger.py b/third_party/python/taskcluster_taskgraph/taskgraph/actions/retrigger.py index fd488b35fc..6c6091a47a 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/actions/retrigger.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/actions/retrigger.py @@ -33,9 +33,7 @@ def _should_retrigger(task_graph, label): """ if label not in task_graph: logger.info( - "Task {} not in full taskgraph, assuming task should not be retriggered.".format( - label - ) + f"Task {label} not in full taskgraph, assuming task should not be retriggered." ) return False return task_graph[label].attributes.get("retrigger", False) @@ -67,7 +65,9 @@ def retrigger_decision_action(parameters, graph_config, input, task_group_id, ta # absolute timestamps relative to the current time. task = taskcluster.get_task_definition(task_id) task = relativize_datestamps(task) - create_task_from_def(slugid(), task, parameters["level"]) + create_task_from_def( + slugid(), task, parameters["level"], graph_config["trust-domain"] + ) @register_callback_action( @@ -144,7 +144,7 @@ def retrigger_decision_action(parameters, graph_config, input, task_group_id, ta ) def retrigger_action(parameters, graph_config, input, task_group_id, task_id): decision_task_id, full_task_graph, label_to_taskid = fetch_graph_and_labels( - parameters, graph_config + parameters, graph_config, task_group_id=task_group_id ) task = taskcluster.get_task_definition(task_id) @@ -155,8 +155,8 @@ def retrigger_action(parameters, graph_config, input, task_group_id, task_id): if not input.get("force", None) and not _should_retrigger(full_task_graph, label): logger.info( - "Not retriggering task {}, task should not be retrigged " - "and force not specified.".format(label) + f"Not retriggering task {label}, task should not be retrigged " + "and force not specified." ) sys.exit(1) @@ -201,14 +201,12 @@ def rerun_action(parameters, graph_config, input, task_group_id, task_id): task = taskcluster.get_task_definition(task_id) parameters = dict(parameters) decision_task_id, full_task_graph, label_to_taskid = fetch_graph_and_labels( - parameters, graph_config + parameters, graph_config, task_group_id=task_group_id ) label = task["metadata"]["name"] if task_id not in label_to_taskid.values(): logger.error( - "Refusing to rerun {}: taskId {} not in decision task {} label_to_taskid!".format( - label, task_id, decision_task_id - ) + f"Refusing to rerun {label}: taskId {task_id} not in decision task {decision_task_id} label_to_taskid!" ) _rerun_task(task_id, label) @@ -218,9 +216,7 @@ def _rerun_task(task_id, label): state = taskcluster.state_task(task_id) if state not in RERUN_STATES: logger.warning( - "No need to rerun {}: state '{}' not in {}!".format( - label, state, RERUN_STATES - ) + f"No need to rerun {label}: state '{state}' not in {RERUN_STATES}!" ) return taskcluster.rerun_task(task_id) @@ -261,7 +257,7 @@ def _rerun_task(task_id, label): ) def retrigger_multiple(parameters, graph_config, input, task_group_id, task_id): decision_task_id, full_task_graph, label_to_taskid = fetch_graph_and_labels( - parameters, graph_config + parameters, graph_config, task_group_id=task_group_id ) suffixes = [] diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/actions/util.py b/third_party/python/taskcluster_taskgraph/taskgraph/actions/util.py index cf81029da2..41e3b035de 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/actions/util.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/actions/util.py @@ -32,8 +32,15 @@ def get_parameters(decision_task_id): return get_artifact(decision_task_id, "public/parameters.yml") -def fetch_graph_and_labels(parameters, graph_config): - decision_task_id = find_decision_task(parameters, graph_config) +def fetch_graph_and_labels(parameters, graph_config, task_group_id=None): + try: + # Look up the decision_task id in the index + decision_task_id = find_decision_task(parameters, graph_config) + except KeyError: + if not task_group_id: + raise + # Not found (e.g. from github-pull-request), fall back to the task group id. + decision_task_id = task_group_id # First grab the graph and labels generated during the initial decision task full_task_graph = get_artifact(decision_task_id, "public/full-task-graph.json") @@ -90,7 +97,7 @@ def fetch_graph_and_labels(parameters, graph_config): return (decision_task_id, full_task_graph, label_to_taskid) -def create_task_from_def(task_id, task_def, level): +def create_task_from_def(task_id, task_def, level, trust_domain): """Create a new task from a definition rather than from a label that is already in the full-task-graph. The task definition will have {relative-datestamp': '..'} rendered just like in a decision task. @@ -98,7 +105,7 @@ def create_task_from_def(task_id, task_def, level): It is useful if you want to "edit" the full_task_graph and then hand it to this function. No dependencies will be scheduled. You must handle this yourself. Seeing how create_tasks handles it might prove helpful.""" - task_def["schedulerId"] = f"gecko-level-{level}" + task_def["schedulerId"] = f"{trust_domain}-level-{level}" label = task_def["metadata"]["name"] session = get_session() create.create_task(session, task_id, label, task_def) diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/config.py b/third_party/python/taskcluster_taskgraph/taskgraph/config.py index 7ea7dc7b33..ac384eab86 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/config.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/config.py @@ -40,6 +40,11 @@ graph_config_schema = Schema( description="Default 'deadline' for tasks, in relative date format. " "Eg: '1 week'", ): optionally_keyed_by("project", str), + Optional( + "task-expires-after", + description="Default 'expires-after' for level 1 tasks, in relative date format. " + "Eg: '90 days'", + ): str, Required("workers"): { Required("aliases"): { str: { @@ -61,6 +66,10 @@ graph_config_schema = Schema( description="The taskcluster index prefix to use for caching tasks. " "Defaults to `trust-domain`.", ): str, + Optional( + "cache-pull-requests", + description="Should tasks from pull requests populate the cache", + ): bool, Optional( "index-path-regexes", description="Regular expressions matching index paths to be summarized.", @@ -102,28 +111,27 @@ class GraphConfig: Add the project's taskgraph directory to the python path, and register any extensions present. """ - modify_path = os.path.dirname(self.root_dir) if GraphConfig._PATH_MODIFIED: - if GraphConfig._PATH_MODIFIED == modify_path: + if GraphConfig._PATH_MODIFIED == self.root_dir: # Already modified path with the same root_dir. # We currently need to do this to enable actions to call # taskgraph_decision, e.g. relpro. return raise Exception("Can't register multiple directories on python path.") - GraphConfig._PATH_MODIFIED = modify_path - sys.path.insert(0, modify_path) + GraphConfig._PATH_MODIFIED = self.root_dir + sys.path.insert(0, self.root_dir) register_path = self["taskgraph"].get("register") if register_path: find_object(register_path)(self) @property def vcs_root(self): - if path.split(self.root_dir)[-2:] != ["taskcluster", "ci"]: + if path.split(self.root_dir)[-1:] != ["taskcluster"]: raise Exception( "Not guessing path to vcs root. " "Graph config in non-standard location." ) - return os.path.dirname(os.path.dirname(self.root_dir)) + return os.path.dirname(self.root_dir) @property def taskcluster_yml(self): diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/create.py b/third_party/python/taskcluster_taskgraph/taskgraph/create.py index deb1ac5348..e8baabb8a8 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/create.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/create.py @@ -104,7 +104,7 @@ def create_tasks(graph_config, taskgraph, label_to_taskid, params, decision_task def create_task(session, task_id, label, task_def): # create the task using 'http://taskcluster/queue', which is proxied to the queue service - # with credentials appropriate to this job. + # with credentials appropriate to this task. # Resolve timestamps now = current_json_time(datetime_format=True) diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/decision.py b/third_party/python/taskcluster_taskgraph/taskgraph/decision.py index ed412f4473..d9eb9f3e90 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/decision.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/decision.py @@ -46,21 +46,21 @@ try_task_config_schema_v2 = Schema( ) -def full_task_graph_to_runnable_jobs(full_task_json): - runnable_jobs = {} +def full_task_graph_to_runnable_tasks(full_task_json): + runnable_tasks = {} for label, node in full_task_json.items(): if not ("extra" in node["task"] and "treeherder" in node["task"]["extra"]): continue th = node["task"]["extra"]["treeherder"] - runnable_jobs[label] = {"symbol": th["symbol"]} + runnable_tasks[label] = {"symbol": th["symbol"]} for i in ("groupName", "groupSymbol", "collection"): if i in th: - runnable_jobs[label][i] = th[i] + runnable_tasks[label][i] = th[i] if th.get("machine", {}).get("platform"): - runnable_jobs[label]["platform"] = th["machine"]["platform"] - return runnable_jobs + runnable_tasks[label]["platform"] = th["machine"]["platform"] + return runnable_tasks def taskgraph_decision(options, parameters=None): @@ -104,7 +104,7 @@ def taskgraph_decision(options, parameters=None): # write out the public/runnable-jobs.json file write_artifact( - "runnable-jobs.json", full_task_graph_to_runnable_jobs(full_task_json) + "runnable-jobs.json", full_task_graph_to_runnable_tasks(full_task_json) ) # this is just a test to check whether the from_json() function is working @@ -185,6 +185,9 @@ def get_decision_parameters(graph_config, options): # Define default filter list, as most configurations shouldn't need # custom filters. + parameters["files_changed"] = repo.get_changed_files( + rev=parameters["head_rev"], base_rev=parameters["base_rev"] + ) parameters["filters"] = [ "target_tasks_method", ] @@ -214,9 +217,9 @@ def get_decision_parameters(graph_config, options): parameters.update(PER_PROJECT_PARAMETERS[project]) except KeyError: logger.warning( - "using default project parameters; add {} to " - "PER_PROJECT_PARAMETERS in {} to customize behavior " - "for this project".format(project, __file__) + f"using default project parameters; add {project} to " + f"PER_PROJECT_PARAMETERS in {__file__} to customize behavior " + "for this project" ) parameters.update(PER_PROJECT_PARAMETERS["default"]) diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/docker.py b/third_party/python/taskcluster_taskgraph/taskgraph/docker.py index 23897cbbee..9f849525fc 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/docker.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/docker.py @@ -18,6 +18,22 @@ except ImportError as e: from taskgraph.util import docker from taskgraph.util.taskcluster import get_artifact_url, get_session +DEPLOY_WARNING = """ +***************************************************************** +WARNING: Image is not suitable for deploying/pushing. + +To automatically tag the image the following files are required: +- {image_dir}/REGISTRY +- {image_dir}/VERSION + +The REGISTRY file contains the Docker registry hosting the image. +A default REGISTRY file may also be defined in the parent docker +directory. + +The VERSION file contains the version of the image. +***************************************************************** +""" + def get_image_digest(image_name): from taskgraph.generator import load_tasks_for_kind @@ -34,7 +50,7 @@ def get_image_digest(image_name): def load_image_by_name(image_name, tag=None): from taskgraph.generator import load_tasks_for_kind - from taskgraph.optimize import IndexSearch + from taskgraph.optimize.strategies import IndexSearch from taskgraph.parameters import Parameters params = Parameters( @@ -43,8 +59,9 @@ def load_image_by_name(image_name, tag=None): ) tasks = load_tasks_for_kind(params, "docker-image") task = tasks[f"build-docker-image-{image_name}"] + deadline = None task_id = IndexSearch().should_replace_task( - task, {}, task.optimization.get("index-search", []) + task, {}, deadline, task.optimization.get("index-search", []) ) if task_id in (True, False): @@ -52,8 +69,10 @@ def load_image_by_name(image_name, tag=None): "Could not find artifacts for a docker image " "named `{image_name}`. Local commits and other changes " "in your checkout may cause this error. Try " - "updating to a fresh checkout of mozilla-central " - "to download image.".format(image_name=image_name) + "updating to a fresh checkout of {project} " + "to download image.".format( + image_name=image_name, project=params["project"] + ) ) return False @@ -102,19 +121,18 @@ def build_image(name, tag, args=None): buf = BytesIO() docker.stream_context_tar(".", image_dir, buf, "", args) - subprocess.run( - ["docker", "image", "build", "--no-cache", "-t", tag, "-"], input=buf.getvalue() - ) + cmdargs = ["docker", "image", "build", "--no-cache", "-"] + if tag: + cmdargs.insert(-1, f"-t={tag}") + subprocess.run(cmdargs, input=buf.getvalue()) - print(f"Successfully built {name} and tagged with {tag}") + msg = f"Successfully built {name}" + if tag: + msg += f" and tagged with {tag}" + print(msg) - if tag.endswith(":latest"): - print("*" * 50) - print("WARNING: no VERSION file found in image directory.") - print("Image is not suitable for deploying/pushing.") - print("Create an image suitable for deploying/pushing by creating") - print("a VERSION file in the image directory.") - print("*" * 50) + if not tag or tag.endswith(":latest"): + print(DEPLOY_WARNING.format(image_dir=os.path.relpath(image_dir), image=name)) def load_image(url, imageName=None, imageTag=None): diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/files_changed.py b/third_party/python/taskcluster_taskgraph/taskgraph/files_changed.py deleted file mode 100644 index 6be6e5eeee..0000000000 --- a/third_party/python/taskcluster_taskgraph/taskgraph/files_changed.py +++ /dev/null @@ -1,91 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - -""" -Support for optimizing tasks based on the set of files that have changed. -""" - - -import logging -import os - -import requests -from redo import retry - -from .util.memoize import memoize -from .util.path import match as match_path -from .util.vcs import get_repository - -logger = logging.getLogger(__name__) - - -@memoize -def get_changed_files(head_repository_url, head_rev, base_rev=None): - """ - Get the set of files changed between revisions. - Responses are cached, so multiple calls with the same arguments are OK. - """ - repo_path = os.getcwd() - repository = get_repository(repo_path) - - if repository.tool == "hg": - # TODO Use VCS version once tested enough - return _get_changed_files_json_automationrelevance( - head_repository_url, head_rev - ) - - return repository.get_changed_files(rev=head_rev, base_rev=base_rev) - - -def _get_changed_files_json_automationrelevance(head_repository_url, head_rev): - """ - Get the set of files changed in the push headed by the given revision. - """ - url = "{}/json-automationrelevance/{}".format( - head_repository_url.rstrip("/"), head_rev - ) - logger.debug("Querying version control for metadata: %s", url) - - def get_automationrelevance(): - response = requests.get(url, timeout=30) - return response.json() - - contents = retry(get_automationrelevance, attempts=10, sleeptime=10) - - logger.debug( - "{} commits influencing task scheduling:".format(len(contents["changesets"])) - ) - changed_files = set() - for c in contents["changesets"]: - desc = "" # Support empty desc - if c["desc"]: - desc = c["desc"].splitlines()[0].encode("ascii", "ignore") - logger.debug(" {cset} {desc}".format(cset=c["node"][0:12], desc=desc)) - changed_files |= set(c["files"]) - - return changed_files - - -def check(params, file_patterns): - """Determine whether any of the files changed between 2 revisions - match any of the given file patterns.""" - - head_repository_url = params.get("head_repository") - head_rev = params.get("head_rev") - if not head_repository_url or not head_rev: - logger.warning( - "Missing `head_repository` or `head_rev` parameters; " - "assuming all files have changed" - ) - return True - - base_rev = params.get("base_rev") - changed_files = get_changed_files(head_repository_url, head_rev, base_rev) - - for pattern in file_patterns: - for path in changed_files: - if match_path(path, pattern): - return True - - return False diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/generator.py b/third_party/python/taskcluster_taskgraph/taskgraph/generator.py index 4ed2a41520..d649b91706 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/generator.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/generator.py @@ -91,7 +91,7 @@ class Kind: @classmethod def load(cls, root_dir, graph_config, kind_name): - path = os.path.join(root_dir, kind_name) + path = os.path.join(root_dir, "kinds", kind_name) kind_yml = os.path.join(path, "kind.yml") if not os.path.exists(kind_yml): raise KindNotFound(kind_yml) @@ -125,13 +125,13 @@ class TaskGraphGenerator: write_artifacts=False, ): """ - @param root_dir: root directory, with subdirectories for each kind + @param root_dir: root directory containing the Taskgraph config.yml file @param parameters: parameters for this task-graph generation, or callable taking a `GraphConfig` and returning parameters @type parameters: Union[Parameters, Callable[[GraphConfig], Parameters]] """ if root_dir is None: - root_dir = "taskcluster/ci" + root_dir = "taskcluster" self.root_dir = root_dir self._parameters = parameters self._decision_task_id = decision_task_id @@ -243,7 +243,7 @@ class TaskGraphGenerator: yield kind queue.extend(kind.config.get("kind-dependencies", [])) else: - for kind_name in os.listdir(self.root_dir): + for kind_name in os.listdir(os.path.join(self.root_dir, "kinds")): try: yield Kind.load(self.root_dir, graph_config, kind_name) except KindNotFound: diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/loader/default.py b/third_party/python/taskcluster_taskgraph/taskgraph/loader/default.py index 5b2c258917..f060a1d92d 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/loader/default.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/loader/default.py @@ -11,7 +11,7 @@ logger = logging.getLogger(__name__) DEFAULT_TRANSFORMS = [ - "taskgraph.transforms.job:transforms", + "taskgraph.transforms.run:transforms", "taskgraph.transforms.task:transforms", ] @@ -20,7 +20,7 @@ def loader(kind, path, config, params, loaded_tasks): """ This default loader builds on the `transform` loader by providing sensible default transforms that the majority of simple tasks will need. - Specifically, `job` and `task` transforms will be appended to the end of the + Specifically, `run` and `task` transforms will be appended to the end of the list of transforms in the kind being loaded. """ transform_refs = config.setdefault("transforms", []) diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/main.py b/third_party/python/taskcluster_taskgraph/taskgraph/main.py index 88a4e2539b..e68cd5a787 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/main.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/main.py @@ -18,6 +18,7 @@ from concurrent.futures import ProcessPoolExecutor, as_completed from pathlib import Path from textwrap import dedent from typing import Any, List +from urllib.parse import urlparse import appdirs import yaml @@ -95,7 +96,7 @@ def get_filtered_taskgraph(taskgraph, tasksregex, exclude_keys): for key in exclude_keys: obj = task attrs = key.split(".") - while attrs[0] in obj: + while obj and attrs[0] in obj: if len(attrs) == 1: del obj[attrs[0]] break @@ -120,7 +121,7 @@ def get_taskgraph_generator(root, parameters): return TaskGraphGenerator(root_dir=root, parameters=parameters) -def format_taskgraph(options, parameters, logfile=None): +def format_taskgraph(options, parameters, overrides, logfile=None): import taskgraph from taskgraph.parameters import parameters_loader @@ -138,7 +139,7 @@ def format_taskgraph(options, parameters, logfile=None): if isinstance(parameters, str): parameters = parameters_loader( parameters, - overrides={"target-kinds": options.get("target_kinds")}, + overrides=overrides, strict=False, ) @@ -172,7 +173,7 @@ def dump_output(out, path=None, params_spec=None): print(out + "\n", file=fh) -def generate_taskgraph(options, parameters, logdir): +def generate_taskgraph(options, parameters, overrides, logdir): from taskgraph.parameters import Parameters def logfile(spec): @@ -188,14 +189,16 @@ def generate_taskgraph(options, parameters, logdir): # tracebacks a little more readable and avoids additional process overhead. if len(parameters) == 1: spec = parameters[0] - out = format_taskgraph(options, spec, logfile(spec)) + out = format_taskgraph(options, spec, overrides, logfile(spec)) dump_output(out, options["output_file"]) return 0 futures = {} with ProcessPoolExecutor(max_workers=options["max_workers"]) as executor: for spec in parameters: - f = executor.submit(format_taskgraph, options, spec, logfile(spec)) + f = executor.submit( + format_taskgraph, options, spec, overrides, logfile(spec) + ) futures[f] = spec returncode = 0 @@ -292,6 +295,15 @@ def generate_taskgraph(options, parameters, logdir): "generations will happen from the same invocation (one per parameters " "specified).", ) +@argument( + "--force-local-files-changed", + default=False, + action="store_true", + help="Compute the 'files-changed' parameter from local version control, " + "even when explicitly using a parameter set that already has it defined. " + "Note that this is already the default behaviour when no parameters are " + "specified.", +) @argument( "--no-optimize", dest="optimize", @@ -366,9 +378,11 @@ def show_taskgraph(options): diffdir = None output_file = options["output_file"] - if options["diff"]: + if options["diff"] or options["force_local_files_changed"]: repo = get_repository(os.getcwd()) + if options["diff"]: + assert repo is not None if not repo.working_directory_clean(): print( "abort: can't diff taskgraph with dirty working directory", @@ -392,15 +406,22 @@ def show_taskgraph(options): ) print(f"Generating {options['graph_attr']} @ {cur_rev}", file=sys.stderr) + overrides = { + "target-kinds": options.get("target_kinds"), + } parameters: List[Any[str, Parameters]] = options.pop("parameters") if not parameters: - overrides = { - "target-kinds": options.get("target_kinds"), - } parameters = [ parameters_loader(None, strict=False, overrides=overrides) ] # will use default values + # This is the default behaviour anyway, so no need to re-compute. + options["force_local_files_changed"] = False + + elif options["force_local_files_changed"]: + assert repo is not None + overrides["files-changed"] = sorted(repo.get_changed_files("AM")) + for param in parameters[:]: if isinstance(param, str) and os.path.isdir(param): parameters.remove(param) @@ -426,7 +447,7 @@ def show_taskgraph(options): # to setup its `mach` based logging. setup_logging() - ret = generate_taskgraph(options, parameters, logdir) + ret = generate_taskgraph(options, parameters, overrides, logdir) if options["diff"]: assert diffdir is not None @@ -450,7 +471,7 @@ def show_taskgraph(options): diffdir, f"{options['graph_attr']}_{base_rev_file}" ) print(f"Generating {options['graph_attr']} @ {base_rev}", file=sys.stderr) - ret |= generate_taskgraph(options, parameters, logdir) + ret |= generate_taskgraph(options, parameters, overrides, logdir) finally: repo.update(cur_rev) @@ -463,6 +484,8 @@ def show_taskgraph(options): f"--label={options['graph_attr']}@{cur_rev}", ] + non_fatal_failures = [] + for spec in parameters: base_path = os.path.join( diffdir, f"{options['graph_attr']}_{base_rev_file}" @@ -475,7 +498,20 @@ def show_taskgraph(options): base_path += f"_{params_name}" cur_path += f"_{params_name}" + # If the base or cur files are missing it means that generation + # failed. If one of them failed but not the other, the failure is + # likely due to the patch making changes to taskgraph in modules + # that don't get reloaded (safe to ignore). If both generations + # failed, there's likely a real issue. + base_missing = not os.path.isfile(base_path) + cur_missing = not os.path.isfile(cur_path) + if base_missing != cur_missing: # != is equivalent to XOR for booleans + non_fatal_failures.append(os.path.basename(base_path)) + continue + try: + # If the output file(s) are missing, this command will raise + # CalledProcessError with a returncode > 1. proc = subprocess.run( diffcmd + [base_path, cur_path], capture_output=True, @@ -500,6 +536,16 @@ def show_taskgraph(options): params_spec=spec if len(parameters) > 1 else None, ) + if non_fatal_failures: + failstr = "\n ".join(sorted(non_fatal_failures)) + print( + "WARNING: Diff skipped for the following generation{s} " + "due to failures:\n {failstr}".format( + s="s" if len(non_fatal_failures) > 1 else "", failstr=failstr + ), + file=sys.stderr, + ) + if options["format"] != "json": print( "If you were expecting differences in task bodies " @@ -661,7 +707,7 @@ def decision(options): @argument( "--root", "-r", - default="taskcluster/ci", + default="taskcluster", help="root of the taskgraph definition relative to topsrcdir", ) def action_callback(options): @@ -697,7 +743,7 @@ def action_callback(options): @argument( "--root", "-r", - default="taskcluster/ci", + default="taskcluster", help="root of the taskgraph definition relative to topsrcdir", ) @argument( @@ -835,6 +881,10 @@ def init_taskgraph(options): ) return 1 + context["repo_name"] = urlparse(repo_url).path.rsplit("/", 1)[-1] + if context["repo_name"].endswith(".git"): + context["repo_name"] = context["repo_name"][: -len(".git")] + # Generate the project. cookiecutter( options["template"], @@ -867,6 +917,11 @@ def setup_logging(): def main(args=sys.argv[1:]): setup_logging() parser = create_parser() + + if not args: + parser.print_help() + sys.exit(1) + args = parser.parse_args(args) try: return args.command(vars(args)) diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/morph.py b/third_party/python/taskcluster_taskgraph/taskgraph/morph.py index bfa1560270..e4bb268ab8 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/morph.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/morph.py @@ -38,6 +38,7 @@ registered_morphs = [] def register_morph(func): registered_morphs.append(func) + return func def amend_taskgraph(taskgraph, label_to_taskid, to_add): diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/optimize/base.py b/third_party/python/taskcluster_taskgraph/taskgraph/optimize/base.py index 367b94e1de..e5477d35b7 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/optimize/base.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/optimize/base.py @@ -271,14 +271,19 @@ def replace_tasks( dependencies_of = target_task_graph.graph.links_dict() for label in target_task_graph.graph.visit_postorder(): + logger.debug(f"replace_tasks: {label}") # if we're not allowed to optimize, that's easy.. if label in do_not_optimize: + logger.debug(f"replace_tasks: {label} is in do_not_optimize") continue # if this task depends on un-replaced, un-removed tasks, do not replace if any( l not in replaced and l not in removed_tasks for l in dependencies_of[label] ): + logger.debug( + f"replace_tasks: {label} depends on an unreplaced or unremoved task" + ) continue # if the task already exists, that's an easy replacement @@ -287,6 +292,7 @@ def replace_tasks( label_to_taskid[label] = repl replaced.add(label) opt_counts["existing_tasks"] += 1 + logger.debug(f"replace_tasks: {label} replaced from existing_tasks") continue # call the optimization strategy @@ -304,14 +310,20 @@ def replace_tasks( repl = opt.should_replace_task(task, params, deadline, arg) if repl: if repl is True: + logger.debug(f"replace_tasks: {label} removed by optimization strategy") # True means remove this task; get_subgraph will catch any # problems with removed tasks being depended on removed_tasks.add(label) else: + logger.debug( + f"replace_tasks: {label} replaced by optimization strategy" + ) label_to_taskid[label] = repl replaced.add(label) opt_counts[opt_by] += 1 continue + else: + logger.debug(f"replace_tasks: {label} kept by optimization strategy") _log_optimization("replaced", opt_counts) return replaced diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/optimize/strategies.py b/third_party/python/taskcluster_taskgraph/taskgraph/optimize/strategies.py index 973b550632..5baecfe645 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/optimize/strategies.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/optimize/strategies.py @@ -1,8 +1,8 @@ import logging from datetime import datetime -from taskgraph import files_changed from taskgraph.optimize.base import OptimizationStrategy, register_strategy +from taskgraph.util.path import match as match_path from taskgraph.util.taskcluster import find_task_id, status_task logger = logging.getLogger(__name__) @@ -48,17 +48,23 @@ class IndexSearch(OptimizationStrategy): @register_strategy("skip-unless-changed") class SkipUnlessChanged(OptimizationStrategy): + + def check(self, files_changed, patterns): + for pattern in patterns: + for path in files_changed: + if match_path(path, pattern): + return True + return False + def should_remove_task(self, task, params, file_patterns): # pushlog_id == -1 - this is the case when run from a cron.yml job or on a git repository if params.get("repository_type") == "hg" and params.get("pushlog_id") == -1: return False - changed = files_changed.check(params, file_patterns) + changed = self.check(params["files_changed"], file_patterns) if not changed: logger.debug( - 'no files found matching a pattern in `skip-unless-changed` for "{}"'.format( - task.label - ) + f'no files found matching a pattern in `skip-unless-changed` for "{task.label}"' ) return True return False diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/parameters.py b/third_party/python/taskcluster_taskgraph/taskgraph/parameters.py index 48571d97ad..c69b201e34 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/parameters.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/parameters.py @@ -40,6 +40,7 @@ base_schema = Schema( Required("do_not_optimize"): [str], Required("enable_always_target"): Any(bool, [str]), Required("existing_tasks"): {str: str}, + Required("files_changed"): [str], Required("filters"): [str], Required("head_ref"): str, Required("head_repository"): str, @@ -86,6 +87,7 @@ def _get_defaults(repo_root=None): # Use fake values if no repo is detected. repo = Mock(branch="", head_rev="", tool="git") repo.get_url.return_value = "" + repo.get_changed_files.return_value = [] try: repo_url = repo.get_url() @@ -108,6 +110,7 @@ def _get_defaults(repo_root=None): "do_not_optimize": [], "enable_always_target": True, "existing_tasks": {}, + "files_changed": repo.get_changed_files("AM"), "filters": ["target_tasks_method"], "head_ref": repo.branch or repo.head_rev, "head_repository": repo_url, @@ -284,7 +287,7 @@ class Parameters(ReadOnlyDict): else: raise ParameterMismatch( "Don't know how to determine file URL for non-github" - "repo: {}".format(repo) + f"repo: {repo}" ) else: raise RuntimeError( diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/run-task/run-task b/third_party/python/taskcluster_taskgraph/taskgraph/run-task/run-task index 267b5283ea..f3a343de33 100755 --- a/third_party/python/taskcluster_taskgraph/taskgraph/run-task/run-task +++ b/third_party/python/taskcluster_taskgraph/taskgraph/run-task/run-task @@ -1,4 +1,4 @@ -#!/usr/bin/python3 -u +#!/usr/bin/env -S python3 -u # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. @@ -624,6 +624,11 @@ def git_checkout( "Must specify both ssh_key_file and ssh_known_hosts_file, if either are specified", ) + # Bypass Git's "safe directory" feature as the destination could be + # coming from a cache and therefore cloned by a different user. + args = ["git", "config", "--global", "--add", "safe.directory", Path(destination_path).as_posix()] + retry_required_command(b"vcs", args, extra_env=env) + if not os.path.exists(destination_path): # Repository doesn't already exist, needs to be cloned args = [ @@ -782,9 +787,7 @@ def hg_checkout( branch: Optional[str], revision: Optional[str], ): - if IS_MACOSX: - hg_bin = "/tools/python27-mercurial/bin/hg" - elif IS_POSIX: + if IS_MACOSX or IS_POSIX: hg_bin = "hg" elif IS_WINDOWS: # This is where OCC installs it in the AMIs. @@ -1007,7 +1010,8 @@ def install_pip_requirements(repositories): if not requirements: return - cmd = [sys.executable, "-mpip", "install"] + # TODO: Stop using system Python (#381) + cmd = [sys.executable, "-mpip", "install", "--break-system-packages"] if os.environ.get("PIP_DISABLE_REQUIRE_HASHES") != "1": cmd.append("--require-hashes") diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/target_tasks.py b/third_party/python/taskcluster_taskgraph/taskgraph/target_tasks.py index 1119a1c960..7f44b6ab60 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/target_tasks.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/target_tasks.py @@ -14,7 +14,7 @@ _target_task_methods = {} _GIT_REFS_HEADS_PREFIX = "refs/heads/" -def _target_task(name): +def register_target_task(name): def wrap(func): _target_task_methods[name] = func return func @@ -81,7 +81,7 @@ def standard_filter(task, parameters): ) -@_target_task("default") +@register_target_task("default") def target_tasks_default(full_task_graph, parameters, graph_config): """Target the tasks which have indicated they should be run on this project via the `run_on_projects` attributes.""" @@ -90,7 +90,7 @@ def target_tasks_default(full_task_graph, parameters, graph_config): ] -@_target_task("codereview") +@register_target_task("codereview") def target_tasks_codereview(full_task_graph, parameters, graph_config): """Target the tasks which have indicated they should be run on this project via the `run_on_projects` attributes.""" @@ -101,7 +101,7 @@ def target_tasks_codereview(full_task_graph, parameters, graph_config): ] -@_target_task("nothing") +@register_target_task("nothing") def target_tasks_nothing(full_task_graph, parameters, graph_config): """Select nothing, for DONTBUILD pushes""" return [] diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/__init__.py b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/__init__.py index 4fa7b5fc0c..e69de29bb2 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/__init__.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/__init__.py @@ -1,3 +0,0 @@ -from taskgraph.transforms import ( # noqa: Added for backwards compat - notify as release_notifications, -) diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/base.py b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/base.py index e6fcd2400c..fda0c584fc 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/base.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/base.py @@ -147,7 +147,7 @@ class ValidateSchema: kind=config.kind, name=task["name"] ) elif "label" in task: - error = "In job {label!r}:".format(label=task["label"]) + error = "In task {label!r}:".format(label=task["label"]) elif "primary-dependency" in task: error = "In {kind} kind task for {dependency!r}:".format( kind=config.kind, dependency=task["primary-dependency"].label diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/code_review.py b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/code_review.py index bdb655b97d..2c859c36f6 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/code_review.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/code_review.py @@ -12,12 +12,12 @@ transforms = TransformSequence() @transforms.add -def add_dependencies(config, jobs): - for job in jobs: - job.setdefault("soft-dependencies", []) - job["soft-dependencies"] += [ +def add_dependencies(config, tasks): + for task in tasks: + task.setdefault("soft-dependencies", []) + task["soft-dependencies"] += [ dep_task.label for dep_task in config.kind_dependencies_tasks.values() if dep_task.attributes.get("code-review") is True ] - yield job + yield task diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/docker_image.py b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/docker_image.py index d0c5b9c97b..b58320092b 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/docker_image.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/docker_image.py @@ -92,9 +92,7 @@ def fill_template(config, tasks): for p in packages: if p not in available_packages: raise Exception( - "Missing package job for {}-{}: {}".format( - config.kind, image_name, p - ) + f"Missing package job for {config.kind}-{image_name}: {p}" ) if not taskgraph.fast: @@ -119,9 +117,7 @@ def fill_template(config, tasks): digest_data += [json.dumps(args, sort_keys=True)] context_hashes[image_name] = context_hash - description = "Build the docker image {} for use by dependent tasks".format( - image_name - ) + description = f"Build the docker image {image_name} for use by dependent tasks" args["DOCKER_IMAGE_PACKAGES"] = " ".join(f"<{p}>" for p in packages) @@ -132,6 +128,8 @@ def fill_template(config, tasks): # burn more CPU once to reduce image size. zstd_level = "3" if int(config.params["level"]) == 1 else "10" + expires = config.graph_config._config.get("task-expires-after", "28 days") + # include some information that is useful in reconstructing this task # from JSON taskdesc = { @@ -142,7 +140,7 @@ def fill_template(config, tasks): "artifact_prefix": "public", }, "always-target": True, - "expires-after": "28 days" if config.params.is_try() else "1 year", + "expires-after": expires if config.params.is_try() else "1 year", "scopes": [], "run-on-projects": [], "worker-type": "images", @@ -158,9 +156,7 @@ def fill_template(config, tasks): ], "env": { "CONTEXT_TASK_ID": {"task-reference": ""}, - "CONTEXT_PATH": "public/docker-contexts/{}.tar.gz".format( - image_name - ), + "CONTEXT_PATH": f"public/docker-contexts/{image_name}.tar.gz", "HASH": context_hash, "PROJECT": config.params["project"], "IMAGE_NAME": image_name, diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/fetch.py b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/fetch.py index bcb8ff38a6..0e1b739677 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/fetch.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/fetch.py @@ -32,11 +32,12 @@ FETCH_SCHEMA = Schema( Optional("task-from"): str, # Description of the task. Required("description"): str, + Optional("expires-after"): str, Optional("docker-image"): object, Optional( "fetch-alias", - description="An alias that can be used instead of the real fetch job name in " - "fetch stanzas for jobs.", + description="An alias that can be used instead of the real fetch task name in " + "fetch stanzas for tasks.", ): str, Optional( "artifact-prefix", @@ -78,20 +79,20 @@ transforms.add_validate(FETCH_SCHEMA) @transforms.add -def process_fetch_job(config, jobs): - # Converts fetch-url entries to the job schema. - for job in jobs: - typ = job["fetch"]["type"] - name = job["name"] - fetch = job.pop("fetch") +def process_fetch_task(config, tasks): + # Converts fetch-url entries to the run schema. + for task in tasks: + typ = task["fetch"]["type"] + name = task["name"] + fetch = task.pop("fetch") if typ not in fetch_builders: raise Exception(f"Unknown fetch type {typ} in fetch {name}") validate_schema(fetch_builders[typ].schema, fetch, f"In task.fetch {name!r}:") - job.update(configure_fetch(config, typ, name, fetch)) + task.update(configure_fetch(config, typ, name, fetch)) - yield job + yield task def configure_fetch(config, typ, name, fetch): @@ -103,41 +104,41 @@ def configure_fetch(config, typ, name, fetch): @transforms.add -def make_task(config, jobs): +def make_task(config, tasks): # Fetch tasks are idempotent and immutable. Have them live for # essentially forever. if config.params["level"] == "3": expires = "1000 years" else: - expires = "28 days" + expires = config.graph_config._config.get("task-expires-after", "28 days") - for job in jobs: - name = job["name"] - artifact_prefix = job.get("artifact-prefix", "public") - env = job.get("env", {}) + for task in tasks: + name = task["name"] + artifact_prefix = task.get("artifact-prefix", "public") + env = task.get("env", {}) env.update({"UPLOAD_DIR": "/builds/worker/artifacts"}) - attributes = job.get("attributes", {}) - attributes["fetch-artifact"] = path.join(artifact_prefix, job["artifact_name"]) - alias = job.get("fetch-alias") + attributes = task.get("attributes", {}) + attributes["fetch-artifact"] = path.join(artifact_prefix, task["artifact_name"]) + alias = task.get("fetch-alias") if alias: attributes["fetch-alias"] = alias - task = { + task_desc = { "attributes": attributes, "name": name, - "description": job["description"], - "expires-after": expires, + "description": task["description"], + "expires-after": task.get("expires-after", expires), "label": "fetch-%s" % name, "run-on-projects": [], "run": { "using": "run-task", "checkout": False, - "command": job["command"], + "command": task["command"], }, "worker-type": "images", "worker": { "chain-of-trust": True, - "docker-image": job.get("docker-image", {"in-tree": "fetch"}), + "docker-image": task.get("docker-image", {"in-tree": "fetch"}), "env": env, "max-run-time": 900, "artifacts": [ @@ -151,29 +152,29 @@ def make_task(config, jobs): } if "treeherder" in config.graph_config: - task["treeherder"] = { + task_desc["treeherder"] = { "symbol": join_symbol("Fetch", name), "kind": "build", "platform": "fetch/opt", "tier": 1, } - if job.get("secret", None): - task["scopes"] = ["secrets:get:" + job.get("secret")] - task["worker"]["taskcluster-proxy"] = True + if task.get("secret", None): + task_desc["scopes"] = ["secrets:get:" + task.get("secret")] + task_desc["worker"]["taskcluster-proxy"] = True if not taskgraph.fast: - cache_name = task["label"].replace(f"{config.kind}-", "", 1) + cache_name = task_desc["label"].replace(f"{config.kind}-", "", 1) # This adds the level to the index path automatically. add_optimization( config, - task, + task_desc, cache_type=CACHE_TYPE, cache_name=cache_name, - digest_data=job["digest_data"], + digest_data=task["digest_data"], ) - yield task + yield task_desc @fetch_builder( diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/from_deps.py b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/from_deps.py index 337d68e4ba..191ef7d56a 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/from_deps.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/from_deps.py @@ -16,10 +16,11 @@ from textwrap import dedent from voluptuous import Any, Extra, Optional, Required from taskgraph.transforms.base import TransformSequence -from taskgraph.transforms.job import fetches_schema +from taskgraph.transforms.run import fetches_schema from taskgraph.util.attributes import attrmatch from taskgraph.util.dependencies import GROUP_BY_MAP, get_dependencies from taskgraph.util.schema import Schema, validate_schema +from taskgraph.util.set_name import SET_NAME_MAP FROM_DEPS_SCHEMA = Schema( { @@ -41,12 +42,14 @@ FROM_DEPS_SCHEMA = Schema( "set-name", description=dedent( """ - When True, `from_deps` will derive a name for the generated - tasks from the name of the primary dependency. Defaults to - True. + UPDATE ME AND DOCS """.lstrip() ), - ): bool, + ): Any( + None, + *SET_NAME_MAP, + {Any(*SET_NAME_MAP): object}, + ), Optional( "with-attributes", description=dedent( @@ -170,7 +173,7 @@ def from_deps(config, tasks): groups = func(config, deps) # Split the task, one per group. - set_name = from_deps.get("set-name", True) + set_name = from_deps.get("set-name", "strip-kind") copy_attributes = from_deps.get("copy-attributes", False) unique_kinds = from_deps.get("unique-kinds", True) fetches = from_deps.get("fetches", []) @@ -203,10 +206,8 @@ def from_deps(config, tasks): primary_dep = [dep for dep in group if dep.kind == primary_kind][0] if set_name: - if primary_dep.label.startswith(primary_kind): - new_task["name"] = primary_dep.label[len(primary_kind) + 1 :] - else: - new_task["name"] = primary_dep.label + func = SET_NAME_MAP[set_name] + new_task["name"] = func(config, deps, primary_dep, primary_kind) if copy_attributes: attrs = new_task.setdefault("attributes", {}) diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/job/__init__.py b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/job/__init__.py deleted file mode 100644 index 06978ff46d..0000000000 --- a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/job/__init__.py +++ /dev/null @@ -1,453 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -""" -Convert a job description into a task description. - -Jobs descriptions are similar to task descriptions, but they specify how to run -the job at a higher level, using a "run" field that can be interpreted by -run-using handlers in `taskcluster/taskgraph/transforms/job`. -""" - - -import copy -import json -import logging - -from voluptuous import Any, Exclusive, Extra, Optional, Required - -from taskgraph.transforms.base import TransformSequence -from taskgraph.transforms.cached_tasks import order_tasks -from taskgraph.transforms.task import task_description_schema -from taskgraph.util import path as mozpath -from taskgraph.util.python_path import import_sibling_modules -from taskgraph.util.schema import Schema, validate_schema -from taskgraph.util.taskcluster import get_artifact_prefix -from taskgraph.util.workertypes import worker_type_implementation - -logger = logging.getLogger(__name__) - -# Fetches may be accepted in other transforms and eventually passed along -# to a `job` (eg: from_deps). Defining this here allows them to re-use -# the schema and avoid duplication. -fetches_schema = { - Required("artifact"): str, - Optional("dest"): str, - Optional("extract"): bool, - Optional("verify-hash"): bool, -} - -# Schema for a build description -job_description_schema = Schema( - { - # The name of the job and the job's label. At least one must be specified, - # and the label will be generated from the name if necessary, by prepending - # the kind. - Optional("name"): str, - Optional("label"): str, - # the following fields are passed directly through to the task description, - # possibly modified by the run implementation. See - # taskcluster/taskgraph/transforms/task.py for the schema details. - Required("description"): task_description_schema["description"], - Optional("attributes"): task_description_schema["attributes"], - Optional("task-from"): task_description_schema["task-from"], - Optional("dependencies"): task_description_schema["dependencies"], - Optional("soft-dependencies"): task_description_schema["soft-dependencies"], - Optional("if-dependencies"): task_description_schema["if-dependencies"], - Optional("requires"): task_description_schema["requires"], - Optional("expires-after"): task_description_schema["expires-after"], - Optional("routes"): task_description_schema["routes"], - Optional("scopes"): task_description_schema["scopes"], - Optional("tags"): task_description_schema["tags"], - Optional("extra"): task_description_schema["extra"], - Optional("treeherder"): task_description_schema["treeherder"], - Optional("index"): task_description_schema["index"], - Optional("run-on-projects"): task_description_schema["run-on-projects"], - Optional("run-on-tasks-for"): task_description_schema["run-on-tasks-for"], - Optional("run-on-git-branches"): task_description_schema["run-on-git-branches"], - Optional("shipping-phase"): task_description_schema["shipping-phase"], - Optional("always-target"): task_description_schema["always-target"], - Exclusive("optimization", "optimization"): task_description_schema[ - "optimization" - ], - Optional("needs-sccache"): task_description_schema["needs-sccache"], - # The "when" section contains descriptions of the circumstances under which - # this task should be included in the task graph. This will be converted - # into an optimization, so it cannot be specified in a job description that - # also gives 'optimization'. - Exclusive("when", "optimization"): { - # This task only needs to be run if a file matching one of the given - # patterns has changed in the push. The patterns use the mozpack - # match function (python/mozbuild/mozpack/path.py). - Optional("files-changed"): [str], - }, - # A list of artifacts to install from 'fetch' tasks. - Optional("fetches"): { - Any("toolchain", "fetch"): [str], - str: [ - str, - fetches_schema, - ], - }, - # A description of how to run this job. - "run": { - # The key to a job implementation in a peer module to this one - "using": str, - # Base work directory used to set up the task. - Optional("workdir"): str, - # Any remaining content is verified against that job implementation's - # own schema. - Extra: object, - }, - Required("worker-type"): task_description_schema["worker-type"], - # This object will be passed through to the task description, with additions - # provided by the job's run-using function - Optional("worker"): dict, - } -) - -transforms = TransformSequence() -transforms.add_validate(job_description_schema) - - -@transforms.add -def rewrite_when_to_optimization(config, jobs): - for job in jobs: - when = job.pop("when", {}) - if not when: - yield job - continue - - files_changed = when.get("files-changed") - - # implicitly add task config directory. - files_changed.append(f"{config.path}/**") - - # "only when files changed" implies "skip if files have not changed" - job["optimization"] = {"skip-unless-changed": files_changed} - - assert "when" not in job - yield job - - -@transforms.add -def set_implementation(config, jobs): - for job in jobs: - impl, os = worker_type_implementation(config.graph_config, job["worker-type"]) - if os: - job.setdefault("tags", {})["os"] = os - if impl: - job.setdefault("tags", {})["worker-implementation"] = impl - worker = job.setdefault("worker", {}) - assert "implementation" not in worker - worker["implementation"] = impl - if os: - worker["os"] = os - yield job - - -@transforms.add -def set_label(config, jobs): - for job in jobs: - if "label" not in job: - if "name" not in job: - raise Exception("job has neither a name nor a label") - job["label"] = "{}-{}".format(config.kind, job["name"]) - if job.get("name"): - del job["name"] - yield job - - -@transforms.add -def add_resource_monitor(config, jobs): - for job in jobs: - if job.get("attributes", {}).get("resource-monitor"): - worker_implementation, worker_os = worker_type_implementation( - config.graph_config, job["worker-type"] - ) - # Normalise worker os so that linux-bitbar and similar use linux tools. - worker_os = worker_os.split("-")[0] - if "win7" in job["worker-type"]: - arch = "32" - else: - arch = "64" - job.setdefault("fetches", {}) - job["fetches"].setdefault("toolchain", []) - job["fetches"]["toolchain"].append(f"{worker_os}{arch}-resource-monitor") - - if worker_implementation == "docker-worker": - artifact_source = "/builds/worker/monitoring/resource-monitor.json" - else: - artifact_source = "monitoring/resource-monitor.json" - job["worker"].setdefault("artifacts", []) - job["worker"]["artifacts"].append( - { - "name": "public/monitoring/resource-monitor.json", - "type": "file", - "path": artifact_source, - } - ) - # Set env for output file - job["worker"].setdefault("env", {}) - job["worker"]["env"]["RESOURCE_MONITOR_OUTPUT"] = artifact_source - - yield job - - -def get_attribute(dict, key, attributes, attribute_name): - """Get `attribute_name` from the given `attributes` dict, and if there - is a corresponding value, set `key` in `dict` to that value.""" - value = attributes.get(attribute_name) - if value: - dict[key] = value - - -@transforms.add -def use_fetches(config, jobs): - artifact_names = {} - aliases = {} - extra_env = {} - - if config.kind in ("toolchain", "fetch"): - jobs = list(jobs) - for job in jobs: - run = job.get("run", {}) - label = job["label"] - get_attribute(artifact_names, label, run, "toolchain-artifact") - value = run.get(f"{config.kind}-alias") - if value: - aliases[f"{config.kind}-{value}"] = label - - for task in config.kind_dependencies_tasks.values(): - if task.kind in ("fetch", "toolchain"): - get_attribute( - artifact_names, - task.label, - task.attributes, - f"{task.kind}-artifact", - ) - get_attribute(extra_env, task.label, task.attributes, f"{task.kind}-env") - value = task.attributes.get(f"{task.kind}-alias") - if value: - aliases[f"{task.kind}-{value}"] = task.label - - artifact_prefixes = {} - for job in order_tasks(config, jobs): - artifact_prefixes[job["label"]] = get_artifact_prefix(job) - - fetches = job.pop("fetches", None) - if not fetches: - yield job - continue - - job_fetches = [] - name = job.get("name", job.get("label")) - dependencies = job.setdefault("dependencies", {}) - worker = job.setdefault("worker", {}) - env = worker.setdefault("env", {}) - prefix = get_artifact_prefix(job) - for kind in sorted(fetches): - artifacts = fetches[kind] - if kind in ("fetch", "toolchain"): - for fetch_name in sorted(artifacts): - label = f"{kind}-{fetch_name}" - label = aliases.get(label, label) - if label not in artifact_names: - raise Exception( - "Missing fetch job for {kind}-{name}: {fetch}".format( - kind=config.kind, name=name, fetch=fetch_name - ) - ) - if label in extra_env: - env.update(extra_env[label]) - - path = artifact_names[label] - - dependencies[label] = label - job_fetches.append( - { - "artifact": path, - "task": f"<{label}>", - "extract": True, - } - ) - else: - if kind not in dependencies: - raise Exception( - "{name} can't fetch {kind} artifacts because " - "it has no {kind} dependencies!".format(name=name, kind=kind) - ) - dep_label = dependencies[kind] - if dep_label in artifact_prefixes: - prefix = artifact_prefixes[dep_label] - else: - dep_tasks = [ - task - for label, task in config.kind_dependencies_tasks.items() - if label == dep_label - ] - if len(dep_tasks) != 1: - raise Exception( - "{name} can't fetch {kind} artifacts because " - "there are {tasks} with label {label} in kind dependencies!".format( - name=name, - kind=kind, - label=dependencies[kind], - tasks="no tasks" - if len(dep_tasks) == 0 - else "multiple tasks", - ) - ) - - prefix = get_artifact_prefix(dep_tasks[0]) - - def cmp_artifacts(a): - if isinstance(a, str): - return a - else: - return a["artifact"] - - for artifact in sorted(artifacts, key=cmp_artifacts): - if isinstance(artifact, str): - path = artifact - dest = None - extract = True - verify_hash = False - else: - path = artifact["artifact"] - dest = artifact.get("dest") - extract = artifact.get("extract", True) - verify_hash = artifact.get("verify-hash", False) - - fetch = { - "artifact": f"{prefix}/{path}", - "task": f"<{kind}>", - "extract": extract, - } - if dest is not None: - fetch["dest"] = dest - if verify_hash: - fetch["verify-hash"] = verify_hash - job_fetches.append(fetch) - - job_artifact_prefixes = { - mozpath.dirname(fetch["artifact"]) - for fetch in job_fetches - if not fetch["artifact"].startswith("public/") - } - if job_artifact_prefixes: - # Use taskcluster-proxy and request appropriate scope. For example, add - # 'scopes: [queue:get-artifact:path/to/*]' for 'path/to/artifact.tar.xz'. - worker["taskcluster-proxy"] = True - for prefix in sorted(job_artifact_prefixes): - scope = f"queue:get-artifact:{prefix}/*" - if scope not in job.setdefault("scopes", []): - job["scopes"].append(scope) - - env["MOZ_FETCHES"] = {"task-reference": json.dumps(job_fetches, sort_keys=True)} - - env.setdefault("MOZ_FETCHES_DIR", "fetches") - - yield job - - -@transforms.add -def make_task_description(config, jobs): - """Given a build description, create a task description""" - # import plugin modules first, before iterating over jobs - import_sibling_modules(exceptions=("common.py",)) - - for job in jobs: - # always-optimized tasks never execute, so have no workdir - if job["worker"]["implementation"] in ("docker-worker", "generic-worker"): - job["run"].setdefault("workdir", "/builds/worker") - - taskdesc = copy.deepcopy(job) - - # fill in some empty defaults to make run implementations easier - taskdesc.setdefault("attributes", {}) - taskdesc.setdefault("dependencies", {}) - taskdesc.setdefault("soft-dependencies", []) - taskdesc.setdefault("routes", []) - taskdesc.setdefault("scopes", []) - taskdesc.setdefault("extra", {}) - - # give the function for job.run.using on this worker implementation a - # chance to set up the task description. - configure_taskdesc_for_run( - config, job, taskdesc, job["worker"]["implementation"] - ) - del taskdesc["run"] - - # yield only the task description, discarding the job description - yield taskdesc - - -# A registry of all functions decorated with run_job_using -registry = {} - - -def run_job_using(worker_implementation, run_using, schema=None, defaults={}): - """Register the decorated function as able to set up a task description for - jobs with the given worker implementation and `run.using` property. If - `schema` is given, the job's run field will be verified to match it. - - The decorated function should have the signature `using_foo(config, job, taskdesc)` - and should modify the task description in-place. The skeleton of - the task description is already set up, but without a payload.""" - - def wrap(func): - for_run_using = registry.setdefault(run_using, {}) - if worker_implementation in for_run_using: - raise Exception( - "run_job_using({!r}, {!r}) already exists: {!r}".format( - run_using, - worker_implementation, - for_run_using[worker_implementation], - ) - ) - for_run_using[worker_implementation] = (func, schema, defaults) - return func - - return wrap - - -@run_job_using( - "always-optimized", "always-optimized", Schema({"using": "always-optimized"}) -) -def always_optimized(config, job, taskdesc): - pass - - -def configure_taskdesc_for_run(config, job, taskdesc, worker_implementation): - """ - Run the appropriate function for this job against the given task - description. - - This will raise an appropriate error if no function exists, or if the job's - run is not valid according to the schema. - """ - run_using = job["run"]["using"] - if run_using not in registry: - raise Exception(f"no functions for run.using {run_using!r}") - - if worker_implementation not in registry[run_using]: - raise Exception( - "no functions for run.using {!r} on {!r}".format( - run_using, worker_implementation - ) - ) - - func, schema, defaults = registry[run_using][worker_implementation] - for k, v in defaults.items(): - job["run"].setdefault(k, v) - - if schema: - validate_schema( - schema, - job["run"], - "In job.run using {!r}/{!r} for job {!r}:".format( - job["run"]["using"], worker_implementation, job["label"] - ), - ) - func(config, job, taskdesc) diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/job/common.py b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/job/common.py deleted file mode 100644 index 04708daf81..0000000000 --- a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/job/common.py +++ /dev/null @@ -1,171 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -""" -Common support for various job types. These functions are all named after the -worker implementation they operate on, and take the same three parameters, for -consistency. -""" - - -import hashlib -import json - -from taskgraph.util.taskcluster import get_artifact_prefix - - -def get_vcsdir_name(os): - if os == "windows": - return "src" - else: - return "vcs" - - -def add_cache(job, taskdesc, name, mount_point, skip_untrusted=False): - """Adds a cache based on the worker's implementation. - - Args: - job (dict): Task's job description. - taskdesc (dict): Target task description to modify. - name (str): Name of the cache. - mount_point (path): Path on the host to mount the cache. - skip_untrusted (bool): Whether cache is used in untrusted environments - (default: False). Only applies to docker-worker. - """ - if not job["run"].get("use-caches", True): - return - - worker = job["worker"] - - if worker["implementation"] == "docker-worker": - taskdesc["worker"].setdefault("caches", []).append( - { - "type": "persistent", - "name": name, - "mount-point": mount_point, - "skip-untrusted": skip_untrusted, - } - ) - - elif worker["implementation"] == "generic-worker": - taskdesc["worker"].setdefault("mounts", []).append( - { - "cache-name": name, - "directory": mount_point, - } - ) - - else: - # Caches not implemented - pass - - -def add_artifacts(config, job, taskdesc, path): - taskdesc["worker"].setdefault("artifacts", []).append( - { - "name": get_artifact_prefix(taskdesc), - "path": path, - "type": "directory", - } - ) - - -def docker_worker_add_artifacts(config, job, taskdesc): - """Adds an artifact directory to the task""" - path = "{workdir}/artifacts/".format(**job["run"]) - taskdesc["worker"]["env"]["UPLOAD_DIR"] = path - add_artifacts(config, job, taskdesc, path) - - -def generic_worker_add_artifacts(config, job, taskdesc): - """Adds an artifact directory to the task""" - # The path is the location on disk; it doesn't necessarily - # mean the artifacts will be public or private; that is set via the name - # attribute in add_artifacts. - add_artifacts(config, job, taskdesc, path=get_artifact_prefix(taskdesc)) - - -def support_vcs_checkout(config, job, taskdesc, repo_configs, sparse=False): - """Update a job/task with parameters to enable a VCS checkout. - - This can only be used with ``run-task`` tasks, as the cache name is - reserved for ``run-task`` tasks. - """ - worker = job["worker"] - is_mac = worker["os"] == "macosx" - is_win = worker["os"] == "windows" - is_linux = worker["os"] == "linux" - is_docker = worker["implementation"] == "docker-worker" - assert is_mac or is_win or is_linux - - if is_win: - checkoutdir = "./build" - hgstore = "y:/hg-shared" - elif is_docker: - checkoutdir = "{workdir}/checkouts".format(**job["run"]) - hgstore = f"{checkoutdir}/hg-store" - else: - checkoutdir = "./checkouts" - hgstore = f"{checkoutdir}/hg-shared" - - vcsdir = checkoutdir + "/" + get_vcsdir_name(worker["os"]) - cache_name = "checkouts" - - # Robust checkout does not clean up subrepositories, so ensure that tasks - # that checkout different sets of paths have separate caches. - # See https://bugzilla.mozilla.org/show_bug.cgi?id=1631610 - if len(repo_configs) > 1: - checkout_paths = { - "\t".join([repo_config.path, repo_config.prefix]) - for repo_config in sorted( - repo_configs.values(), key=lambda repo_config: repo_config.path - ) - } - checkout_paths_str = "\n".join(checkout_paths).encode("utf-8") - digest = hashlib.sha256(checkout_paths_str).hexdigest() - cache_name += f"-repos-{digest}" - - # Sparse checkouts need their own cache because they can interfere - # with clients that aren't sparse aware. - if sparse: - cache_name += "-sparse" - - # Workers using Mercurial >= 5.8 will enable revlog-compression-zstd, which - # workers using older versions can't understand, so they can't share cache. - # At the moment, only docker workers use the newer version. - if is_docker: - cache_name += "-hg58" - - add_cache(job, taskdesc, cache_name, checkoutdir) - - env = taskdesc["worker"].setdefault("env", {}) - env.update( - { - "HG_STORE_PATH": hgstore, - "REPOSITORIES": json.dumps( - {repo.prefix: repo.name for repo in repo_configs.values()} - ), - "VCS_PATH": vcsdir, - } - ) - for repo_config in repo_configs.values(): - env.update( - { - f"{repo_config.prefix.upper()}_{key}": value - for key, value in { - "BASE_REPOSITORY": repo_config.base_repository, - "HEAD_REPOSITORY": repo_config.head_repository, - "HEAD_REV": repo_config.head_rev, - "HEAD_REF": repo_config.head_ref, - "REPOSITORY_TYPE": repo_config.type, - "SSH_SECRET_NAME": repo_config.ssh_secret_name, - }.items() - if value is not None - } - ) - if repo_config.ssh_secret_name: - taskdesc["scopes"].append(f"secrets:get:{repo_config.ssh_secret_name}") - - # only some worker platforms have taskcluster-proxy enabled - if job["worker"]["implementation"] in ("docker-worker",): - taskdesc["worker"]["taskcluster-proxy"] = True diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/job/index_search.py b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/job/index_search.py deleted file mode 100644 index 09b48fe594..0000000000 --- a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/job/index_search.py +++ /dev/null @@ -1,37 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - -""" -This transform allows including indexed tasks from other projects in the -current taskgraph. The transform takes a list of indexes, and the optimization -phase will replace the task with the task from the other graph. -""" - - -from voluptuous import Required - -from taskgraph.transforms.base import TransformSequence -from taskgraph.transforms.job import run_job_using -from taskgraph.util.schema import Schema - -transforms = TransformSequence() - -run_task_schema = Schema( - { - Required("using"): "index-search", - Required( - "index-search", - "A list of indexes in decreasing order of priority at which to lookup for this " - "task. This is interpolated with the graph parameters.", - ): [str], - } -) - - -@run_job_using("always-optimized", "index-search", schema=run_task_schema) -def fill_template(config, job, taskdesc): - run = job["run"] - taskdesc["optimization"] = { - "index-search": [index.format(**config.params) for index in run["index-search"]] - } diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/job/run_task.py b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/job/run_task.py deleted file mode 100644 index 6337673611..0000000000 --- a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/job/run_task.py +++ /dev/null @@ -1,231 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -""" -Support for running jobs that are invoked via the `run-task` script. -""" - -import dataclasses -import os - -from voluptuous import Any, Optional, Required - -from taskgraph.transforms.job import run_job_using -from taskgraph.transforms.job.common import support_vcs_checkout -from taskgraph.transforms.task import taskref_or_string -from taskgraph.util import path, taskcluster -from taskgraph.util.schema import Schema - -EXEC_COMMANDS = { - "bash": ["bash", "-cx"], - "powershell": ["powershell.exe", "-ExecutionPolicy", "Bypass"], -} - -run_task_schema = Schema( - { - Required("using"): "run-task", - # if true, add a cache at ~worker/.cache, which is where things like pip - # tend to hide their caches. This cache is never added for level-1 jobs. - # TODO Once bug 1526028 is fixed, this and 'use-caches' should be merged. - Required("cache-dotcache"): bool, - # Whether or not to use caches. - Optional("use-caches"): bool, - # if true (the default), perform a checkout on the worker - Required("checkout"): Any(bool, {str: dict}), - Optional( - "cwd", - description="Path to run command in. If a checkout is present, the path " - "to the checkout will be interpolated with the key `checkout`", - ): str, - # The sparse checkout profile to use. Value is the filename relative to the - # directory where sparse profiles are defined (build/sparse-profiles/). - Required("sparse-profile"): Any(str, None), - # The command arguments to pass to the `run-task` script, after the - # checkout arguments. If a list, it will be passed directly; otherwise - # it will be included in a single argument to the command specified by - # `exec-with`. - Required("command"): Any([taskref_or_string], taskref_or_string), - # What to execute the command with in the event command is a string. - Optional("exec-with"): Any(*list(EXEC_COMMANDS)), - # Command used to invoke the `run-task` script. Can be used if the script - # or Python installation is in a non-standard location on the workers. - Optional("run-task-command"): list, - # Base work directory used to set up the task. - Required("workdir"): str, - # Whether to run as root. (defaults to False) - Optional("run-as-root"): bool, - } -) - - -def common_setup(config, job, taskdesc, command): - run = job["run"] - if run["checkout"]: - repo_configs = config.repo_configs - if len(repo_configs) > 1 and run["checkout"] is True: - raise Exception("Must explicitly specify checkouts with multiple repos.") - elif run["checkout"] is not True: - repo_configs = { - repo: dataclasses.replace(repo_configs[repo], **config) - for (repo, config) in run["checkout"].items() - } - - support_vcs_checkout( - config, - job, - taskdesc, - repo_configs=repo_configs, - sparse=bool(run["sparse-profile"]), - ) - - vcs_path = taskdesc["worker"]["env"]["VCS_PATH"] - for repo_config in repo_configs.values(): - checkout_path = path.join(vcs_path, repo_config.path) - command.append(f"--{repo_config.prefix}-checkout={checkout_path}") - - if run["sparse-profile"]: - command.append( - "--{}-sparse-profile=build/sparse-profiles/{}".format( - repo_config.prefix, - run["sparse-profile"], - ) - ) - - if "cwd" in run: - run["cwd"] = path.normpath(run["cwd"].format(checkout=vcs_path)) - elif "cwd" in run and "{checkout}" in run["cwd"]: - raise Exception( - "Found `{{checkout}}` interpolation in `cwd` for task {name} " - "but the task doesn't have a checkout: {cwd}".format( - cwd=run["cwd"], name=job.get("name", job.get("label")) - ) - ) - - if "cwd" in run: - command.extend(("--task-cwd", run["cwd"])) - - taskdesc["worker"].setdefault("env", {})["MOZ_SCM_LEVEL"] = config.params["level"] - - -worker_defaults = { - "cache-dotcache": False, - "checkout": True, - "sparse-profile": None, - "run-as-root": False, -} - - -def script_url(config, script): - if "MOZ_AUTOMATION" in os.environ and "TASK_ID" not in os.environ: - raise Exception("TASK_ID must be defined to use run-task on generic-worker") - task_id = os.environ.get("TASK_ID", "") - # use_proxy = False to avoid having all generic-workers turn on proxy - # Assumes the cluster allows anonymous downloads of public artifacts - tc_url = taskcluster.get_root_url(False) - # TODO: Use util/taskcluster.py:get_artifact_url once hack for Bug 1405889 is removed - return f"{tc_url}/api/queue/v1/task/{task_id}/artifacts/public/{script}" - - -@run_job_using( - "docker-worker", "run-task", schema=run_task_schema, defaults=worker_defaults -) -def docker_worker_run_task(config, job, taskdesc): - run = job["run"] - worker = taskdesc["worker"] = job["worker"] - command = run.pop("run-task-command", ["/usr/local/bin/run-task"]) - common_setup(config, job, taskdesc, command) - - if run.get("cache-dotcache"): - worker["caches"].append( - { - "type": "persistent", - "name": "{project}-dotcache".format(**config.params), - "mount-point": "{workdir}/.cache".format(**run), - "skip-untrusted": True, - } - ) - - run_command = run["command"] - - # dict is for the case of `{'task-reference': str}`. - if isinstance(run_command, str) or isinstance(run_command, dict): - exec_cmd = EXEC_COMMANDS[run.pop("exec-with", "bash")] - run_command = exec_cmd + [run_command] - if run["run-as-root"]: - command.extend(("--user", "root", "--group", "root")) - command.append("--") - command.extend(run_command) - worker["command"] = command - - -@run_job_using( - "generic-worker", "run-task", schema=run_task_schema, defaults=worker_defaults -) -def generic_worker_run_task(config, job, taskdesc): - run = job["run"] - worker = taskdesc["worker"] = job["worker"] - is_win = worker["os"] == "windows" - is_mac = worker["os"] == "macosx" - is_bitbar = worker["os"] == "linux-bitbar" - - command = run.pop("run-task-command", None) - if not command: - if is_win: - command = ["C:/mozilla-build/python3/python3.exe", "run-task"] - elif is_mac: - command = ["/tools/python36/bin/python3", "run-task"] - else: - command = ["./run-task"] - - common_setup(config, job, taskdesc, command) - - worker.setdefault("mounts", []) - if run.get("cache-dotcache"): - worker["mounts"].append( - { - "cache-name": "{project}-dotcache".format(**config.params), - "directory": "{workdir}/.cache".format(**run), - } - ) - worker["mounts"].append( - { - "content": { - "url": script_url(config, "run-task"), - }, - "file": "./run-task", - } - ) - if worker.get("env", {}).get("MOZ_FETCHES"): - worker["mounts"].append( - { - "content": { - "url": script_url(config, "fetch-content"), - }, - "file": "./fetch-content", - } - ) - - run_command = run["command"] - - if isinstance(run_command, str): - if is_win: - run_command = f'"{run_command}"' - exec_cmd = EXEC_COMMANDS[run.pop("exec-with", "bash")] - run_command = exec_cmd + [run_command] - - if run["run-as-root"]: - command.extend(("--user", "root", "--group", "root")) - command.append("--") - if is_bitbar: - # Use the bitbar wrapper script which sets up the device and adb - # environment variables - command.append("/builds/taskcluster/script.py") - command.extend(run_command) - - if is_win: - worker["command"] = [" ".join(command)] - else: - worker["command"] = [ - ["chmod", "+x", "run-task"], - command, - ] diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/job/toolchain.py b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/job/toolchain.py deleted file mode 100644 index c9c09542ff..0000000000 --- a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/job/toolchain.py +++ /dev/null @@ -1,175 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -""" -Support for running toolchain-building jobs via dedicated scripts -""" - -from voluptuous import ALLOW_EXTRA, Any, Optional, Required - -import taskgraph -from taskgraph.transforms.job import configure_taskdesc_for_run, run_job_using -from taskgraph.transforms.job.common import ( - docker_worker_add_artifacts, - generic_worker_add_artifacts, - get_vcsdir_name, -) -from taskgraph.util.hash import hash_paths -from taskgraph.util.schema import Schema -from taskgraph.util.shell import quote as shell_quote - -CACHE_TYPE = "toolchains.v3" - -toolchain_run_schema = Schema( - { - Required("using"): "toolchain-script", - # The script (in taskcluster/scripts/misc) to run. - Required("script"): str, - # Arguments to pass to the script. - Optional("arguments"): [str], - # Sparse profile to give to checkout using `run-task`. If given, - # a filename in `build/sparse-profiles`. Defaults to - # "toolchain-build", i.e., to - # `build/sparse-profiles/toolchain-build`. If `None`, instructs - # `run-task` to not use a sparse profile at all. - Required("sparse-profile"): Any(str, None), - # Paths/patterns pointing to files that influence the outcome of a - # toolchain build. - Optional("resources"): [str], - # Path to the artifact produced by the toolchain job - Required("toolchain-artifact"): str, - Optional( - "toolchain-alias", - description="An alias that can be used instead of the real toolchain job name in " - "fetch stanzas for jobs.", - ): Any(str, [str]), - Optional( - "toolchain-env", - description="Additional env variables to add to the worker when using this toolchain", - ): {str: object}, - # Base work directory used to set up the task. - Required("workdir"): str, - }, - extra=ALLOW_EXTRA, -) - - -def get_digest_data(config, run, taskdesc): - files = list(run.pop("resources", [])) - # The script - files.append("taskcluster/scripts/toolchain/{}".format(run["script"])) - - # Accumulate dependency hashes for index generation. - data = [hash_paths(config.graph_config.vcs_root, files)] - - data.append(taskdesc["attributes"]["toolchain-artifact"]) - - # If the task uses an in-tree docker image, we want it to influence - # the index path as well. Ideally, the content of the docker image itself - # should have an influence, but at the moment, we can't get that - # information here. So use the docker image name as a proxy. Not a lot of - # changes to docker images actually have an impact on the resulting - # toolchain artifact, so we'll just rely on such important changes to be - # accompanied with a docker image name change. - image = taskdesc["worker"].get("docker-image", {}).get("in-tree") - if image: - data.append(image) - - # Likewise script arguments should influence the index. - args = run.get("arguments") - if args: - data.extend(args) - return data - - -def common_toolchain(config, job, taskdesc, is_docker): - run = job["run"] - - worker = taskdesc["worker"] = job["worker"] - worker["chain-of-trust"] = True - - srcdir = get_vcsdir_name(worker["os"]) - - if is_docker: - # If the task doesn't have a docker-image, set a default - worker.setdefault("docker-image", {"in-tree": "toolchain-build"}) - - # Allow the job to specify where artifacts come from, but add - # public/build if it's not there already. - artifacts = worker.setdefault("artifacts", []) - if not any(artifact.get("name") == "public/build" for artifact in artifacts): - if is_docker: - docker_worker_add_artifacts(config, job, taskdesc) - else: - generic_worker_add_artifacts(config, job, taskdesc) - - env = worker["env"] - env.update( - { - "MOZ_BUILD_DATE": config.params["moz_build_date"], - "MOZ_SCM_LEVEL": config.params["level"], - } - ) - - attributes = taskdesc.setdefault("attributes", {}) - attributes["toolchain-artifact"] = run.pop("toolchain-artifact") - if "toolchain-alias" in run: - attributes["toolchain-alias"] = run.pop("toolchain-alias") - if "toolchain-env" in run: - attributes["toolchain-env"] = run.pop("toolchain-env") - - if not taskgraph.fast: - name = taskdesc["label"].replace(f"{config.kind}-", "", 1) - taskdesc["cache"] = { - "type": CACHE_TYPE, - "name": name, - "digest-data": get_digest_data(config, run, taskdesc), - } - - script = run.pop("script") - run["using"] = "run-task" - run["cwd"] = "{checkout}/.." - - if script.endswith(".ps1"): - run["exec-with"] = "powershell" - - command = [f"{srcdir}/taskcluster/scripts/toolchain/{script}"] + run.pop( - "arguments", [] - ) - - if not is_docker: - # Don't quote the first item in the command because it purposely contains - # an environment variable that is not meant to be quoted. - if len(command) > 1: - command = command[0] + " " + shell_quote(*command[1:]) - else: - command = command[0] - - run["command"] = command - - configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"]) - - -toolchain_defaults = { - "sparse-profile": "toolchain-build", -} - - -@run_job_using( - "docker-worker", - "toolchain-script", - schema=toolchain_run_schema, - defaults=toolchain_defaults, -) -def docker_worker_toolchain(config, job, taskdesc): - common_toolchain(config, job, taskdesc, is_docker=True) - - -@run_job_using( - "generic-worker", - "toolchain-script", - schema=toolchain_run_schema, - defaults=toolchain_defaults, -) -def generic_worker_toolchain(config, job, taskdesc): - common_toolchain(config, job, taskdesc, is_docker=False) diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/run/__init__.py b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/run/__init__.py new file mode 100644 index 0000000000..a783a0dc13 --- /dev/null +++ b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/run/__init__.py @@ -0,0 +1,451 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Convert a run description into a task description. + +Run descriptions are similar to task descriptions, but they specify how to run +the task at a higher level, using a "run" field that can be interpreted by +run-using handlers in `taskcluster/taskgraph/transforms/run`. +""" + + +import copy +import json +import logging + +from voluptuous import Any, Exclusive, Extra, Optional, Required + +from taskgraph.transforms.base import TransformSequence +from taskgraph.transforms.cached_tasks import order_tasks +from taskgraph.transforms.task import task_description_schema +from taskgraph.util import path as mozpath +from taskgraph.util.python_path import import_sibling_modules +from taskgraph.util.schema import Schema, validate_schema +from taskgraph.util.taskcluster import get_artifact_prefix +from taskgraph.util.workertypes import worker_type_implementation + +logger = logging.getLogger(__name__) + +# Fetches may be accepted in other transforms and eventually passed along +# to a `task` (eg: from_deps). Defining this here allows them to reuse +# the schema and avoid duplication. +fetches_schema = { + Required("artifact"): str, + Optional("dest"): str, + Optional("extract"): bool, + Optional("verify-hash"): bool, +} + +# Schema for a build description +run_description_schema = Schema( + { + # The name of the task and the task's label. At least one must be specified, + # and the label will be generated from the name if necessary, by prepending + # the kind. + Optional("name"): str, + Optional("label"): str, + # the following fields are passed directly through to the task description, + # possibly modified by the run implementation. See + # taskcluster/taskgraph/transforms/task.py for the schema details. + Required("description"): task_description_schema["description"], + Optional("attributes"): task_description_schema["attributes"], + Optional("task-from"): task_description_schema["task-from"], + Optional("dependencies"): task_description_schema["dependencies"], + Optional("soft-dependencies"): task_description_schema["soft-dependencies"], + Optional("if-dependencies"): task_description_schema["if-dependencies"], + Optional("requires"): task_description_schema["requires"], + Optional("deadline-after"): task_description_schema["deadline-after"], + Optional("expires-after"): task_description_schema["expires-after"], + Optional("routes"): task_description_schema["routes"], + Optional("scopes"): task_description_schema["scopes"], + Optional("tags"): task_description_schema["tags"], + Optional("extra"): task_description_schema["extra"], + Optional("treeherder"): task_description_schema["treeherder"], + Optional("index"): task_description_schema["index"], + Optional("run-on-projects"): task_description_schema["run-on-projects"], + Optional("run-on-tasks-for"): task_description_schema["run-on-tasks-for"], + Optional("run-on-git-branches"): task_description_schema["run-on-git-branches"], + Optional("shipping-phase"): task_description_schema["shipping-phase"], + Optional("always-target"): task_description_schema["always-target"], + Exclusive("optimization", "optimization"): task_description_schema[ + "optimization" + ], + Optional("needs-sccache"): task_description_schema["needs-sccache"], + # The "when" section contains descriptions of the circumstances under which + # this task should be included in the task graph. This will be converted + # into an optimization, so it cannot be specified in a run description that + # also gives 'optimization'. + Exclusive("when", "optimization"): { + # This task only needs to be run if a file matching one of the given + # patterns has changed in the push. The patterns use the mozpack + # match function (python/mozbuild/mozpack/path.py). + Optional("files-changed"): [str], + }, + # A list of artifacts to install from 'fetch' tasks. + Optional("fetches"): { + Any("toolchain", "fetch"): [str], + str: [ + str, + fetches_schema, + ], + }, + # A description of how to run this task. + "run": { + # The key to a run implementation in a peer module to this one + "using": str, + # Base work directory used to set up the task. + Optional("workdir"): str, + # Any remaining content is verified against that run implementation's + # own schema. + Extra: object, + }, + Required("worker-type"): task_description_schema["worker-type"], + # This object will be passed through to the task description, with additions + # provided by the task's run-using function + Optional("worker"): dict, + } +) + +transforms = TransformSequence() +transforms.add_validate(run_description_schema) + + +@transforms.add +def rewrite_when_to_optimization(config, tasks): + for task in tasks: + when = task.pop("when", {}) + if not when: + yield task + continue + + files_changed = when.get("files-changed") + + # implicitly add task config directory. + files_changed.append(f"{config.path}/**") + + # "only when files changed" implies "skip if files have not changed" + task["optimization"] = {"skip-unless-changed": files_changed} + + assert "when" not in task + yield task + + +@transforms.add +def set_implementation(config, tasks): + for task in tasks: + impl, os = worker_type_implementation(config.graph_config, task["worker-type"]) + if os: + task.setdefault("tags", {})["os"] = os + if impl: + task.setdefault("tags", {})["worker-implementation"] = impl + worker = task.setdefault("worker", {}) + assert "implementation" not in worker + worker["implementation"] = impl + if os: + worker["os"] = os + yield task + + +@transforms.add +def set_label(config, tasks): + for task in tasks: + if "label" not in task: + if "name" not in task: + raise Exception("task has neither a name nor a label") + task["label"] = "{}-{}".format(config.kind, task["name"]) + if task.get("name"): + del task["name"] + yield task + + +@transforms.add +def add_resource_monitor(config, tasks): + for task in tasks: + if task.get("attributes", {}).get("resource-monitor"): + worker_implementation, worker_os = worker_type_implementation( + config.graph_config, task["worker-type"] + ) + # Normalise worker os so that linux-bitbar and similar use linux tools. + if worker_os: + worker_os = worker_os.split("-")[0] + if "win7" in task["worker-type"]: + arch = "32" + else: + arch = "64" + task.setdefault("fetches", {}) + task["fetches"].setdefault("toolchain", []) + task["fetches"]["toolchain"].append(f"{worker_os}{arch}-resource-monitor") + + if worker_implementation == "docker-worker": + artifact_source = "/builds/worker/monitoring/resource-monitor.json" + else: + artifact_source = "monitoring/resource-monitor.json" + task["worker"].setdefault("artifacts", []) + task["worker"]["artifacts"].append( + { + "name": "public/monitoring/resource-monitor.json", + "type": "file", + "path": artifact_source, + } + ) + # Set env for output file + task["worker"].setdefault("env", {}) + task["worker"]["env"]["RESOURCE_MONITOR_OUTPUT"] = artifact_source + + yield task + + +def get_attribute(dict, key, attributes, attribute_name): + """Get `attribute_name` from the given `attributes` dict, and if there + is a corresponding value, set `key` in `dict` to that value.""" + value = attributes.get(attribute_name) + if value: + dict[key] = value + + +@transforms.add +def use_fetches(config, tasks): + artifact_names = {} + aliases = {} + extra_env = {} + + if config.kind in ("toolchain", "fetch"): + tasks = list(tasks) + for task in tasks: + run = task.get("run", {}) + label = task["label"] + get_attribute(artifact_names, label, run, "toolchain-artifact") + value = run.get(f"{config.kind}-alias") + if value: + aliases[f"{config.kind}-{value}"] = label + + for task in config.kind_dependencies_tasks.values(): + if task.kind in ("fetch", "toolchain"): + get_attribute( + artifact_names, + task.label, + task.attributes, + f"{task.kind}-artifact", + ) + get_attribute(extra_env, task.label, task.attributes, f"{task.kind}-env") + value = task.attributes.get(f"{task.kind}-alias") + if value: + aliases[f"{task.kind}-{value}"] = task.label + + artifact_prefixes = {} + for task in order_tasks(config, tasks): + artifact_prefixes[task["label"]] = get_artifact_prefix(task) + + fetches = task.pop("fetches", None) + if not fetches: + yield task + continue + + task_fetches = [] + name = task.get("name", task.get("label")) + dependencies = task.setdefault("dependencies", {}) + worker = task.setdefault("worker", {}) + env = worker.setdefault("env", {}) + prefix = get_artifact_prefix(task) + for kind in sorted(fetches): + artifacts = fetches[kind] + if kind in ("fetch", "toolchain"): + for fetch_name in sorted(artifacts): + label = f"{kind}-{fetch_name}" + label = aliases.get(label, label) + if label not in artifact_names: + raise Exception( + f"Missing fetch task for {config.kind}-{name}: {fetch_name}" + ) + if label in extra_env: + env.update(extra_env[label]) + + path = artifact_names[label] + + dependencies[label] = label + task_fetches.append( + { + "artifact": path, + "task": f"<{label}>", + "extract": True, + } + ) + else: + if kind not in dependencies: + raise Exception( + f"{name} can't fetch {kind} artifacts because " + f"it has no {kind} dependencies!" + ) + dep_label = dependencies[kind] + if dep_label in artifact_prefixes: + prefix = artifact_prefixes[dep_label] + else: + dep_tasks = [ + task + for label, task in config.kind_dependencies_tasks.items() + if label == dep_label + ] + if len(dep_tasks) != 1: + raise Exception( + "{name} can't fetch {kind} artifacts because " + "there are {tasks} with label {label} in kind dependencies!".format( + name=name, + kind=kind, + label=dependencies[kind], + tasks=( + "no tasks" + if len(dep_tasks) == 0 + else "multiple tasks" + ), + ) + ) + + prefix = get_artifact_prefix(dep_tasks[0]) + + def cmp_artifacts(a): + if isinstance(a, str): + return a + else: + return a["artifact"] + + for artifact in sorted(artifacts, key=cmp_artifacts): + if isinstance(artifact, str): + path = artifact + dest = None + extract = True + verify_hash = False + else: + path = artifact["artifact"] + dest = artifact.get("dest") + extract = artifact.get("extract", True) + verify_hash = artifact.get("verify-hash", False) + + fetch = { + "artifact": f"{prefix}/{path}", + "task": f"<{kind}>", + "extract": extract, + } + if dest is not None: + fetch["dest"] = dest + if verify_hash: + fetch["verify-hash"] = verify_hash + task_fetches.append(fetch) + + task_artifact_prefixes = { + mozpath.dirname(fetch["artifact"]) + for fetch in task_fetches + if not fetch["artifact"].startswith("public/") + } + if task_artifact_prefixes: + # Use taskcluster-proxy and request appropriate scope. For example, add + # 'scopes: [queue:get-artifact:path/to/*]' for 'path/to/artifact.tar.xz'. + worker["taskcluster-proxy"] = True + for prefix in sorted(task_artifact_prefixes): + scope = f"queue:get-artifact:{prefix}/*" + if scope not in task.setdefault("scopes", []): + task["scopes"].append(scope) + + env["MOZ_FETCHES"] = { + "task-reference": json.dumps(task_fetches, sort_keys=True) + } + + env.setdefault("MOZ_FETCHES_DIR", "fetches") + + yield task + + +@transforms.add +def make_task_description(config, tasks): + """Given a build description, create a task description""" + # import plugin modules first, before iterating over tasks + import_sibling_modules(exceptions=("common.py",)) + + for task in tasks: + # always-optimized tasks never execute, so have no workdir + if task["worker"]["implementation"] in ("docker-worker", "generic-worker"): + task["run"].setdefault("workdir", "/builds/worker") + + taskdesc = copy.deepcopy(task) + + # fill in some empty defaults to make run implementations easier + taskdesc.setdefault("attributes", {}) + taskdesc.setdefault("dependencies", {}) + taskdesc.setdefault("soft-dependencies", []) + taskdesc.setdefault("routes", []) + taskdesc.setdefault("scopes", []) + taskdesc.setdefault("extra", {}) + + # give the function for task.run.using on this worker implementation a + # chance to set up the task description. + configure_taskdesc_for_run( + config, task, taskdesc, task["worker"]["implementation"] + ) + del taskdesc["run"] + + # yield only the task description, discarding the task description + yield taskdesc + + +# A registry of all functions decorated with run_task_using +registry = {} + + +def run_task_using(worker_implementation, run_using, schema=None, defaults={}): + """Register the decorated function as able to set up a task description for + tasks with the given worker implementation and `run.using` property. If + `schema` is given, the task's run field will be verified to match it. + + The decorated function should have the signature `using_foo(config, task, taskdesc)` + and should modify the task description in-place. The skeleton of + the task description is already set up, but without a payload.""" + + def wrap(func): + for_run_using = registry.setdefault(run_using, {}) + if worker_implementation in for_run_using: + raise Exception( + f"run_task_using({run_using!r}, {worker_implementation!r}) already exists: {for_run_using[worker_implementation]!r}" + ) + for_run_using[worker_implementation] = (func, schema, defaults) + return func + + return wrap + + +@run_task_using( + "always-optimized", "always-optimized", Schema({"using": "always-optimized"}) +) +def always_optimized(config, task, taskdesc): + pass + + +def configure_taskdesc_for_run(config, task, taskdesc, worker_implementation): + """ + Run the appropriate function for this task against the given task + description. + + This will raise an appropriate error if no function exists, or if the task's + run is not valid according to the schema. + """ + run_using = task["run"]["using"] + if run_using not in registry: + raise Exception(f"no functions for run.using {run_using!r}") + + if worker_implementation not in registry[run_using]: + raise Exception( + f"no functions for run.using {run_using!r} on {worker_implementation!r}" + ) + + func, schema, defaults = registry[run_using][worker_implementation] + for k, v in defaults.items(): + task["run"].setdefault(k, v) + + if schema: + validate_schema( + schema, + task["run"], + "In task.run using {!r}/{!r} for task {!r}:".format( + task["run"]["using"], worker_implementation, task["label"] + ), + ) + func(config, task, taskdesc) diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/run/common.py b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/run/common.py new file mode 100644 index 0000000000..66466bc5f9 --- /dev/null +++ b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/run/common.py @@ -0,0 +1,165 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Common support for various task types. These functions are all named after the +worker implementation they operate on, and take the same three parameters, for +consistency. +""" + + +import hashlib +import json + +from taskgraph.util.taskcluster import get_artifact_prefix + + +def get_vcsdir_name(os): + if os == "windows": + return "src" + else: + return "vcs" + + +def add_cache(task, taskdesc, name, mount_point, skip_untrusted=False): + """Adds a cache based on the worker's implementation. + + Args: + task (dict): Tasks object. + taskdesc (dict): Target task description to modify. + name (str): Name of the cache. + mount_point (path): Path on the host to mount the cache. + skip_untrusted (bool): Whether cache is used in untrusted environments + (default: False). Only applies to docker-worker. + """ + if not task["run"].get("use-caches", True): + return + + worker = task["worker"] + + if worker["implementation"] == "docker-worker": + taskdesc["worker"].setdefault("caches", []).append( + { + "type": "persistent", + "name": name, + "mount-point": mount_point, + "skip-untrusted": skip_untrusted, + } + ) + + elif worker["implementation"] == "generic-worker": + taskdesc["worker"].setdefault("mounts", []).append( + { + "cache-name": name, + "directory": mount_point, + } + ) + + else: + # Caches not implemented + pass + + +def add_artifacts(config, task, taskdesc, path): + taskdesc["worker"].setdefault("artifacts", []).append( + { + "name": get_artifact_prefix(taskdesc), + "path": path, + "type": "directory", + } + ) + + +def docker_worker_add_artifacts(config, task, taskdesc): + """Adds an artifact directory to the task""" + path = "{workdir}/artifacts/".format(**task["run"]) + taskdesc["worker"]["env"]["UPLOAD_DIR"] = path + add_artifacts(config, task, taskdesc, path) + + +def generic_worker_add_artifacts(config, task, taskdesc): + """Adds an artifact directory to the task""" + # The path is the location on disk; it doesn't necessarily + # mean the artifacts will be public or private; that is set via the name + # attribute in add_artifacts. + add_artifacts(config, task, taskdesc, path=get_artifact_prefix(taskdesc)) + + +def support_vcs_checkout(config, task, taskdesc, repo_configs, sparse=False): + """Update a task with parameters to enable a VCS checkout. + + This can only be used with ``run-task`` tasks, as the cache name is + reserved for ``run-task`` tasks. + """ + worker = task["worker"] + is_mac = worker["os"] == "macosx" + is_win = worker["os"] == "windows" + is_linux = worker["os"] == "linux" + is_docker = worker["implementation"] == "docker-worker" + assert is_mac or is_win or is_linux + + if is_win: + checkoutdir = "./build" + hgstore = "y:/hg-shared" + elif is_docker: + checkoutdir = "{workdir}/checkouts".format(**task["run"]) + hgstore = f"{checkoutdir}/hg-store" + else: + checkoutdir = "./checkouts" + hgstore = f"{checkoutdir}/hg-shared" + + vcsdir = checkoutdir + "/" + get_vcsdir_name(worker["os"]) + cache_name = "checkouts" + + # Robust checkout does not clean up subrepositories, so ensure that tasks + # that checkout different sets of paths have separate caches. + # See https://bugzilla.mozilla.org/show_bug.cgi?id=1631610 + if len(repo_configs) > 1: + checkout_paths = { + "\t".join([repo_config.path, repo_config.prefix]) + for repo_config in sorted( + repo_configs.values(), key=lambda repo_config: repo_config.path + ) + } + checkout_paths_str = "\n".join(checkout_paths).encode("utf-8") + digest = hashlib.sha256(checkout_paths_str).hexdigest() + cache_name += f"-repos-{digest}" + + # Sparse checkouts need their own cache because they can interfere + # with clients that aren't sparse aware. + if sparse: + cache_name += "-sparse" + + add_cache(task, taskdesc, cache_name, checkoutdir) + + env = taskdesc["worker"].setdefault("env", {}) + env.update( + { + "HG_STORE_PATH": hgstore, + "REPOSITORIES": json.dumps( + {repo.prefix: repo.name for repo in repo_configs.values()} + ), + "VCS_PATH": vcsdir, + } + ) + for repo_config in repo_configs.values(): + env.update( + { + f"{repo_config.prefix.upper()}_{key}": value + for key, value in { + "BASE_REPOSITORY": repo_config.base_repository, + "HEAD_REPOSITORY": repo_config.head_repository, + "HEAD_REV": repo_config.head_rev, + "HEAD_REF": repo_config.head_ref, + "REPOSITORY_TYPE": repo_config.type, + "SSH_SECRET_NAME": repo_config.ssh_secret_name, + }.items() + if value is not None + } + ) + if repo_config.ssh_secret_name: + taskdesc["scopes"].append(f"secrets:get:{repo_config.ssh_secret_name}") + + # only some worker platforms have taskcluster-proxy enabled + if task["worker"]["implementation"] in ("docker-worker",): + taskdesc["worker"]["taskcluster-proxy"] = True diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/run/index_search.py b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/run/index_search.py new file mode 100644 index 0000000000..c25946980e --- /dev/null +++ b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/run/index_search.py @@ -0,0 +1,37 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +""" +This transform allows including indexed tasks from other projects in the +current taskgraph. The transform takes a list of indexes, and the optimization +phase will replace the task with the task from the other graph. +""" + + +from voluptuous import Required + +from taskgraph.transforms.base import TransformSequence +from taskgraph.transforms.run import run_task_using +from taskgraph.util.schema import Schema + +transforms = TransformSequence() + +run_task_schema = Schema( + { + Required("using"): "index-search", + Required( + "index-search", + "A list of indexes in decreasing order of priority at which to lookup for this " + "task. This is interpolated with the graph parameters.", + ): [str], + } +) + + +@run_task_using("always-optimized", "index-search", schema=run_task_schema) +def fill_template(config, task, taskdesc): + run = task["run"] + taskdesc["optimization"] = { + "index-search": [index.format(**config.params) for index in run["index-search"]] + } diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/run/run_task.py b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/run/run_task.py new file mode 100644 index 0000000000..c2fbef83b0 --- /dev/null +++ b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/run/run_task.py @@ -0,0 +1,231 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Support for running tasks that are invoked via the `run-task` script. +""" + +import dataclasses +import os + +from voluptuous import Any, Optional, Required + +from taskgraph.transforms.run import run_task_using +from taskgraph.transforms.run.common import support_vcs_checkout +from taskgraph.transforms.task import taskref_or_string +from taskgraph.util import path, taskcluster +from taskgraph.util.schema import Schema + +EXEC_COMMANDS = { + "bash": ["bash", "-cx"], + "powershell": ["powershell.exe", "-ExecutionPolicy", "Bypass"], +} + +run_task_schema = Schema( + { + Required("using"): "run-task", + # if true, add a cache at ~worker/.cache, which is where things like pip + # tend to hide their caches. This cache is never added for level-1 tasks. + # TODO Once bug 1526028 is fixed, this and 'use-caches' should be merged. + Required("cache-dotcache"): bool, + # Whether or not to use caches. + Optional("use-caches"): bool, + # if true (the default), perform a checkout on the worker + Required("checkout"): Any(bool, {str: dict}), + Optional( + "cwd", + description="Path to run command in. If a checkout is present, the path " + "to the checkout will be interpolated with the key `checkout`", + ): str, + # The sparse checkout profile to use. Value is the filename relative to the + # directory where sparse profiles are defined (build/sparse-profiles/). + Required("sparse-profile"): Any(str, None), + # The command arguments to pass to the `run-task` script, after the + # checkout arguments. If a list, it will be passed directly; otherwise + # it will be included in a single argument to the command specified by + # `exec-with`. + Required("command"): Any([taskref_or_string], taskref_or_string), + # What to execute the command with in the event command is a string. + Optional("exec-with"): Any(*list(EXEC_COMMANDS)), + # Command used to invoke the `run-task` script. Can be used if the script + # or Python installation is in a non-standard location on the workers. + Optional("run-task-command"): list, + # Base work directory used to set up the task. + Required("workdir"): str, + # Whether to run as root. (defaults to False) + Optional("run-as-root"): bool, + } +) + + +def common_setup(config, task, taskdesc, command): + run = task["run"] + if run["checkout"]: + repo_configs = config.repo_configs + if len(repo_configs) > 1 and run["checkout"] is True: + raise Exception("Must explicitly specify checkouts with multiple repos.") + elif run["checkout"] is not True: + repo_configs = { + repo: dataclasses.replace(repo_configs[repo], **config) + for (repo, config) in run["checkout"].items() + } + + support_vcs_checkout( + config, + task, + taskdesc, + repo_configs=repo_configs, + sparse=bool(run["sparse-profile"]), + ) + + vcs_path = taskdesc["worker"]["env"]["VCS_PATH"] + for repo_config in repo_configs.values(): + checkout_path = path.join(vcs_path, repo_config.path) + command.append(f"--{repo_config.prefix}-checkout={checkout_path}") + + if run["sparse-profile"]: + command.append( + "--{}-sparse-profile=build/sparse-profiles/{}".format( + repo_config.prefix, + run["sparse-profile"], + ) + ) + + if "cwd" in run: + run["cwd"] = path.normpath(run["cwd"].format(checkout=vcs_path)) + elif "cwd" in run and "{checkout}" in run["cwd"]: + raise Exception( + "Found `{{checkout}}` interpolation in `cwd` for task {name} " + "but the task doesn't have a checkout: {cwd}".format( + cwd=run["cwd"], name=task.get("name", task.get("label")) + ) + ) + + if "cwd" in run: + command.extend(("--task-cwd", run["cwd"])) + + taskdesc["worker"].setdefault("env", {})["MOZ_SCM_LEVEL"] = config.params["level"] + + +worker_defaults = { + "cache-dotcache": False, + "checkout": True, + "sparse-profile": None, + "run-as-root": False, +} + + +def script_url(config, script): + if "MOZ_AUTOMATION" in os.environ and "TASK_ID" not in os.environ: + raise Exception("TASK_ID must be defined to use run-task on generic-worker") + task_id = os.environ.get("TASK_ID", "") + # use_proxy = False to avoid having all generic-workers turn on proxy + # Assumes the cluster allows anonymous downloads of public artifacts + tc_url = taskcluster.get_root_url(False) + # TODO: Use util/taskcluster.py:get_artifact_url once hack for Bug 1405889 is removed + return f"{tc_url}/api/queue/v1/task/{task_id}/artifacts/public/{script}" + + +@run_task_using( + "docker-worker", "run-task", schema=run_task_schema, defaults=worker_defaults +) +def docker_worker_run_task(config, task, taskdesc): + run = task["run"] + worker = taskdesc["worker"] = task["worker"] + command = run.pop("run-task-command", ["/usr/local/bin/run-task"]) + common_setup(config, task, taskdesc, command) + + if run.get("cache-dotcache"): + worker["caches"].append( + { + "type": "persistent", + "name": "{project}-dotcache".format(**config.params), + "mount-point": "{workdir}/.cache".format(**run), + "skip-untrusted": True, + } + ) + + run_command = run["command"] + + # dict is for the case of `{'task-reference': str}`. + if isinstance(run_command, str) or isinstance(run_command, dict): + exec_cmd = EXEC_COMMANDS[run.pop("exec-with", "bash")] + run_command = exec_cmd + [run_command] + if run["run-as-root"]: + command.extend(("--user", "root", "--group", "root")) + command.append("--") + command.extend(run_command) + worker["command"] = command + + +@run_task_using( + "generic-worker", "run-task", schema=run_task_schema, defaults=worker_defaults +) +def generic_worker_run_task(config, task, taskdesc): + run = task["run"] + worker = taskdesc["worker"] = task["worker"] + is_win = worker["os"] == "windows" + is_mac = worker["os"] == "macosx" + is_bitbar = worker["os"] == "linux-bitbar" + + command = run.pop("run-task-command", None) + if not command: + if is_win: + command = ["C:/mozilla-build/python3/python3.exe", "run-task"] + elif is_mac: + command = ["/tools/python36/bin/python3", "run-task"] + else: + command = ["./run-task"] + + common_setup(config, task, taskdesc, command) + + worker.setdefault("mounts", []) + if run.get("cache-dotcache"): + worker["mounts"].append( + { + "cache-name": "{project}-dotcache".format(**config.params), + "directory": "{workdir}/.cache".format(**run), + } + ) + worker["mounts"].append( + { + "content": { + "url": script_url(config, "run-task"), + }, + "file": "./run-task", + } + ) + if worker.get("env", {}).get("MOZ_FETCHES"): + worker["mounts"].append( + { + "content": { + "url": script_url(config, "fetch-content"), + }, + "file": "./fetch-content", + } + ) + + run_command = run["command"] + + if isinstance(run_command, str): + if is_win: + run_command = f'"{run_command}"' + exec_cmd = EXEC_COMMANDS[run.pop("exec-with", "bash")] + run_command = exec_cmd + [run_command] + + if run["run-as-root"]: + command.extend(("--user", "root", "--group", "root")) + command.append("--") + if is_bitbar: + # Use the bitbar wrapper script which sets up the device and adb + # environment variables + command.append("/builds/taskcluster/script.py") + command.extend(run_command) + + if is_win: + worker["command"] = [" ".join(command)] + else: + worker["command"] = [ + ["chmod", "+x", "run-task"], + command, + ] diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/run/toolchain.py b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/run/toolchain.py new file mode 100644 index 0000000000..59e66cb973 --- /dev/null +++ b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/run/toolchain.py @@ -0,0 +1,175 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Support for running toolchain-building tasks via dedicated scripts +""" + +from voluptuous import ALLOW_EXTRA, Any, Optional, Required + +import taskgraph +from taskgraph.transforms.run import configure_taskdesc_for_run, run_task_using +from taskgraph.transforms.run.common import ( + docker_worker_add_artifacts, + generic_worker_add_artifacts, + get_vcsdir_name, +) +from taskgraph.util.hash import hash_paths +from taskgraph.util.schema import Schema +from taskgraph.util.shell import quote as shell_quote + +CACHE_TYPE = "toolchains.v3" + +toolchain_run_schema = Schema( + { + Required("using"): "toolchain-script", + # The script (in taskcluster/scripts/misc) to run. + Required("script"): str, + # Arguments to pass to the script. + Optional("arguments"): [str], + # Sparse profile to give to checkout using `run-task`. If given, + # a filename in `build/sparse-profiles`. Defaults to + # "toolchain-build", i.e., to + # `build/sparse-profiles/toolchain-build`. If `None`, instructs + # `run-task` to not use a sparse profile at all. + Required("sparse-profile"): Any(str, None), + # Paths/patterns pointing to files that influence the outcome of a + # toolchain build. + Optional("resources"): [str], + # Path to the artifact produced by the toolchain task + Required("toolchain-artifact"): str, + Optional( + "toolchain-alias", + description="An alias that can be used instead of the real toolchain task name in " + "fetch stanzas for tasks.", + ): Any(str, [str]), + Optional( + "toolchain-env", + description="Additional env variables to add to the worker when using this toolchain", + ): {str: object}, + # Base work directory used to set up the task. + Required("workdir"): str, + }, + extra=ALLOW_EXTRA, +) + + +def get_digest_data(config, run, taskdesc): + files = list(run.pop("resources", [])) + # The script + files.append("taskcluster/scripts/toolchain/{}".format(run["script"])) + + # Accumulate dependency hashes for index generation. + data = [hash_paths(config.graph_config.vcs_root, files)] + + data.append(taskdesc["attributes"]["toolchain-artifact"]) + + # If the task uses an in-tree docker image, we want it to influence + # the index path as well. Ideally, the content of the docker image itself + # should have an influence, but at the moment, we can't get that + # information here. So use the docker image name as a proxy. Not a lot of + # changes to docker images actually have an impact on the resulting + # toolchain artifact, so we'll just rely on such important changes to be + # accompanied with a docker image name change. + image = taskdesc["worker"].get("docker-image", {}).get("in-tree") + if image: + data.append(image) + + # Likewise script arguments should influence the index. + args = run.get("arguments") + if args: + data.extend(args) + return data + + +def common_toolchain(config, task, taskdesc, is_docker): + run = task["run"] + + worker = taskdesc["worker"] = task["worker"] + worker["chain-of-trust"] = True + + srcdir = get_vcsdir_name(worker["os"]) + + if is_docker: + # If the task doesn't have a docker-image, set a default + worker.setdefault("docker-image", {"in-tree": "toolchain-build"}) + + # Allow the task to specify where artifacts come from, but add + # public/build if it's not there already. + artifacts = worker.setdefault("artifacts", []) + if not any(artifact.get("name") == "public/build" for artifact in artifacts): + if is_docker: + docker_worker_add_artifacts(config, task, taskdesc) + else: + generic_worker_add_artifacts(config, task, taskdesc) + + env = worker["env"] + env.update( + { + "MOZ_BUILD_DATE": config.params["moz_build_date"], + "MOZ_SCM_LEVEL": config.params["level"], + } + ) + + attributes = taskdesc.setdefault("attributes", {}) + attributes["toolchain-artifact"] = run.pop("toolchain-artifact") + if "toolchain-alias" in run: + attributes["toolchain-alias"] = run.pop("toolchain-alias") + if "toolchain-env" in run: + attributes["toolchain-env"] = run.pop("toolchain-env") + + if not taskgraph.fast: + name = taskdesc["label"].replace(f"{config.kind}-", "", 1) + taskdesc["cache"] = { + "type": CACHE_TYPE, + "name": name, + "digest-data": get_digest_data(config, run, taskdesc), + } + + script = run.pop("script") + run["using"] = "run-task" + run["cwd"] = "{checkout}/.." + + if script.endswith(".ps1"): + run["exec-with"] = "powershell" + + command = [f"{srcdir}/taskcluster/scripts/toolchain/{script}"] + run.pop( + "arguments", [] + ) + + if not is_docker: + # Don't quote the first item in the command because it purposely contains + # an environment variable that is not meant to be quoted. + if len(command) > 1: + command = command[0] + " " + shell_quote(*command[1:]) + else: + command = command[0] + + run["command"] = command + + configure_taskdesc_for_run(config, task, taskdesc, worker["implementation"]) + + +toolchain_defaults = { + "sparse-profile": "toolchain-build", +} + + +@run_task_using( + "docker-worker", + "toolchain-script", + schema=toolchain_run_schema, + defaults=toolchain_defaults, +) +def docker_worker_toolchain(config, task, taskdesc): + common_toolchain(config, task, taskdesc, is_docker=True) + + +@run_task_using( + "generic-worker", + "toolchain-script", + schema=toolchain_run_schema, + defaults=toolchain_defaults, +) +def generic_worker_toolchain(config, task, taskdesc): + common_toolchain(config, task, taskdesc, is_docker=False) diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/task.py b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/task.py index c55de78513..168b8c00c9 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/task.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/task.py @@ -110,7 +110,7 @@ task_description_schema = Schema( # section of the kind (delimited by "-") all smooshed together. # Eg: "test" becomes "T", "docker-image" becomes "DI", etc. "symbol": Optional(str), - # the job kind + # the task kind # If "build" or "test" is found in the kind name, this defaults # to the appropriate value. Otherwise, defaults to "other" "kind": Optional(Any("build", "test", "other")), @@ -129,7 +129,7 @@ task_description_schema = Schema( Optional("index"): { # the name of the product this build produces "product": str, - # the names to use for this job in the TaskCluster index + # the names to use for this task in the TaskCluster index "job-name": str, # Type of gecko v2 index to use "type": str, @@ -179,7 +179,7 @@ task_description_schema = Schema( # be substituted in this string: # {level} -- the scm level of this push "worker-type": str, - # Whether the job should use sccache compiler caching. + # Whether the task should use sccache compiler caching. Required("needs-sccache"): bool, # information specific to the worker implementation that will run this task Optional("worker"): { @@ -196,7 +196,7 @@ TC_TREEHERDER_SCHEMA_URL = ( UNKNOWN_GROUP_NAME = ( - "Treeherder group {} (from {}) has no name; " "add it to taskcluster/ci/config.yml" + "Treeherder group {} (from {}) has no name; " "add it to taskcluster/config.yml" ) V2_ROUTE_TEMPLATES = [ @@ -266,7 +266,7 @@ def index_builder(name): UNSUPPORTED_INDEX_PRODUCT_ERROR = """\ The index product {product} is not in the list of configured products in -`taskcluster/ci/config.yml'. +`taskcluster/config.yml'. """ @@ -317,7 +317,7 @@ def verify_index(config, index): { # only one type is supported by any of the workers right now "type": "persistent", - # name of the cache, allowing re-use by subsequent tasks naming the + # name of the cache, allowing reuse by subsequent tasks naming the # same cache "name": str, # location in the task image where the cache will be mounted @@ -364,6 +364,9 @@ def build_docker_worker_payload(config, task, task_def): if "in-tree" in image: name = image["in-tree"] docker_image_task = "build-docker-image-" + image["in-tree"] + assert "docker-image" not in task.get( + "dependencies", () + ), "docker-image key in dependencies object is reserved" task.setdefault("dependencies", {})["docker-image"] = docker_image_task image = { @@ -487,19 +490,19 @@ def build_docker_worker_payload(config, task, task_def): # run-task knows how to validate caches. # - # To help ensure new run-task features and bug fixes don't interfere - # with existing caches, we seed the hash of run-task into cache names. - # So, any time run-task changes, we should get a fresh set of caches. - # This means run-task can make changes to cache interaction at any time - # without regards for backwards or future compatibility. + # To help ensure new run-task features and bug fixes, as well as the + # versions of tools such as mercurial or git, don't interfere with + # existing caches, we seed the underlying docker-image task id into + # cache names, for tasks using in-tree Docker images. # # But this mechanism only works for in-tree Docker images that are built # with the current run-task! For out-of-tree Docker images, we have no # way of knowing their content of run-task. So, in addition to varying # cache names by the contents of run-task, we also take the Docker image - # name into consideration. This means that different Docker images will - # never share the same cache. This is a bit unfortunate. But it is the - # safest thing to do. Fortunately, most images are defined in-tree. + # name into consideration. + # + # This means that different Docker images will never share the same + # cache. This is a bit unfortunate, but is the safest thing to do. # # For out-of-tree Docker images, we don't strictly need to incorporate # the run-task content into the cache name. However, doing so preserves @@ -520,6 +523,8 @@ def build_docker_worker_payload(config, task, task_def): out_of_tree_image.encode("utf-8") ).hexdigest() suffix += name_hash[0:12] + else: + suffix += "-" else: suffix = cache_version @@ -539,13 +544,15 @@ def build_docker_worker_payload(config, task, task_def): suffix=suffix, ) caches[name] = cache["mount-point"] - task_def["scopes"].append("docker-worker:cache:%s" % name) + task_def["scopes"].append( + {"task-reference": "docker-worker:cache:%s" % name} + ) # Assertion: only run-task is interested in this. if run_task: payload["env"]["TASKCLUSTER_CACHES"] = ";".join(sorted(caches.values())) - payload["cache"] = caches + payload["cache"] = {"task-reference": caches} # And send down volumes information to run-task as well. if run_task and worker.get("volumes"): @@ -752,7 +759,7 @@ def build_generic_worker_payload(config, task, task_def): schema={ # the maximum time to run, in seconds Required("max-run-time"): int, - # locale key, if this is a locale beetmover job + # locale key, if this is a locale beetmover task Optional("locale"): str, Optional("partner-public"): bool, Required("release-properties"): { @@ -1075,7 +1082,11 @@ def build_task(config, tasks): extra["parent"] = os.environ.get("TASK_ID", "") if "expires-after" not in task: - task["expires-after"] = "28 days" if config.params.is_try() else "1 year" + task["expires-after"] = ( + config.graph_config._config.get("task-expires-after", "28 days") + if config.params.is_try() + else "1 year" + ) if "deadline-after" not in task: if "task-deadline-after" in config.graph_config: @@ -1142,9 +1153,9 @@ def build_task(config, tasks): config.params["project"] + th_project_suffix, branch_rev ) ) - task_def["metadata"]["description"] += " ([Treeherder push]({}))".format( - th_push_link - ) + task_def["metadata"][ + "description" + ] += f" ([Treeherder push]({th_push_link}))" # add the payload and adjust anything else as required (e.g., scopes) payload_builders[task["worker"]["implementation"]].builder( @@ -1288,7 +1299,7 @@ def check_caches_are_volumes(task): Caches and volumes are the only filesystem locations whose content isn't defined by the Docker image itself. Some caches are optional - depending on the job environment. We want paths that are potentially + depending on the task environment. We want paths that are potentially caches to have as similar behavior regardless of whether a cache is used. To help enforce this, we require that all paths used as caches to be declared as Docker volumes. This check won't catch all offenders. @@ -1343,7 +1354,9 @@ def check_run_task_caches(config, tasks): main_command = command[0] if isinstance(command[0], str) else "" run_task = main_command.endswith("run-task") - for cache in payload.get("cache", {}): + for cache in payload.get("cache", {}).get( + "task-reference", payload.get("cache", {}) + ): if not cache.startswith(cache_prefix): raise Exception( "{} is using a cache ({}) which is not appropriate " @@ -1364,7 +1377,7 @@ def check_run_task_caches(config, tasks): "cache name" ) - if not cache.endswith(suffix): + if suffix not in cache: raise Exception( f"{task['label']} is using a cache ({cache}) reserved for run-task " "but the cache name is not dependent on the contents " diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/task_context.py b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/task_context.py index 5c7ed6af80..bd36d827aa 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/transforms/task_context.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/transforms/task_context.py @@ -81,9 +81,9 @@ transforms.add_validate(SCHEMA) @transforms.add -def render_task(config, jobs): - for job in jobs: - sub_config = job.pop("task-context") +def render_task(config, tasks): + for task in tasks: + sub_config = task.pop("task-context") params_context = {} for var, path in sub_config.pop("from-parameters", {}).items(): if isinstance(path, str): @@ -111,11 +111,11 @@ def render_task(config, jobs): # Now that we have our combined context, we can substitute. for field in fields: - container, subfield = job, field + container, subfield = task, field while "." in subfield: f, subfield = subfield.split(".", 1) container = container[f] container[subfield] = substitute(container[subfield], **subs) - yield job + yield task diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/util/archive.py b/third_party/python/taskcluster_taskgraph/taskgraph/util/archive.py index ee59ba4548..261a031038 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/util/archive.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/util/archive.py @@ -12,6 +12,40 @@ import tarfile DEFAULT_MTIME = 1451606400 +# Python 3.9 contains this change: +# https://github.com/python/cpython/commit/674935b8caf33e47c78f1b8e197b1b77a04992d2 +# which changes the output of tar creation compared to earlier versions. +# As this code is used to generate tar files that are meant to be deterministic +# across versions of python (specifically, it's used as part of computing the hash +# of docker images, which needs to be identical between CI (which uses python 3.8), +# and developer environments (using arbitrary versions of python, at this point, +# most probably more recent than 3.9)). +# What we do is subblass TarInfo so that if used on python >= 3.9, it reproduces the +# behavior from python < 3.9. +# Here's how it goes: +# - the behavior in python >= 3.9 is the same as python < 3.9 when the type encoded +# in the tarinfo is CHRTYPE or BLKTYPE. +# - the value of the type is only compared in the context of choosing which behavior +# to take +# - we replace the type with the same value (so that using the value has no changes) +# but that pretends to be the same as CHRTYPE so that the condition that enables the +# old behavior is taken. +class HackedType(bytes): + def __eq__(self, other): + if other == tarfile.CHRTYPE: + return True + return self == other + + +class TarInfo(tarfile.TarInfo): + @staticmethod + def _create_header(info, format, encoding, errors): + info["type"] = HackedType(info["type"]) + # ignore type checking because it looks like pyright complains because we're calling a + # non-public method + return tarfile.TarInfo._create_header(info, format, encoding, errors) # type: ignore + + def create_tar_from_files(fp, files): """Create a tar file deterministically. @@ -25,15 +59,23 @@ def create_tar_from_files(fp, files): FUTURE accept a filename argument (or create APIs to write files) """ - with tarfile.open(name="", mode="w", fileobj=fp, dereference=True) as tf: + # The format is explicitly set to tarfile.GNU_FORMAT, because this default format + # has been changed in Python 3.8. + with tarfile.open( + name="", mode="w", fileobj=fp, dereference=True, format=tarfile.GNU_FORMAT + ) as tf: for archive_path, f in sorted(files.items()): if isinstance(f, str): - mode = os.stat(f).st_mode + s = os.stat(f) + mode = s.st_mode + size = s.st_size f = open(f, "rb") else: mode = 0o0644 + size = len(f.read()) + f.seek(0) - ti = tarfile.TarInfo(archive_path) + ti = TarInfo(archive_path) ti.mode = mode ti.type = tarfile.REGTYPE @@ -56,9 +98,7 @@ def create_tar_from_files(fp, files): # Set mtime to a constant value. ti.mtime = DEFAULT_MTIME - f.seek(0, 2) - ti.size = f.tell() - f.seek(0, 0) + ti.size = size # tarfile wants to pass a size argument to read(). So just # wrap/buffer in a proper file object interface. tf.addfile(ti, f) diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/util/cached_tasks.py b/third_party/python/taskcluster_taskgraph/taskgraph/util/cached_tasks.py index 974b114902..1a3baad5be 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/util/cached_tasks.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/util/cached_tasks.py @@ -7,6 +7,7 @@ import hashlib import time TARGET_CACHE_INDEX = "{cache_prefix}.cache.level-{level}.{type}.{name}.hash.{digest}" +TARGET_PR_CACHE_INDEX = "{cache_prefix}.cache.pr.{type}.{name}.hash.{digest}" EXTRA_CACHE_INDEXES = [ "{cache_prefix}.cache.level-{level}.{type}.{name}.latest", "{cache_prefix}.cache.level-{level}.{type}.{name}.pushdate.{build_date_long}", @@ -53,31 +54,45 @@ def add_optimization( # We'll try to find a cached version of the toolchain at levels above and # including the current level, starting at the highest level. - # Chain-of-trust doesn't handle tasks not built on the tip of a - # pull-request, so don't look for level-1 tasks if building a pull-request. index_routes = [] min_level = int(config.params["level"]) - if config.params["tasks_for"] == "github-pull-request": - min_level = max(min_level, 3) for level in reversed(range(min_level, 4)): subs["level"] = level index_routes.append(TARGET_CACHE_INDEX.format(**subs)) - taskdesc["optimization"] = {"index-search": index_routes} + # Pull requests use a different target cache index route. This way we can + # be confident they won't be used by anything other than the pull request + # that created the cache in the first place. + if config.params["tasks_for"].startswith( + "github-pull-request" + ) and config.graph_config["taskgraph"].get("cache-pull-requests", True): + subs["head_ref"] = config.params["head_ref"] + if subs["head_ref"].startswith("refs/heads/"): + subs["head_ref"] = subs["head_ref"][11:] + index_routes.append(TARGET_PR_CACHE_INDEX.format(**subs)) + + taskdesc["optimization"] = {"index-search": index_routes} # ... and cache at the lowest level. subs["level"] = config.params["level"] - taskdesc.setdefault("routes", []).append( - f"index.{TARGET_CACHE_INDEX.format(**subs)}" - ) - # ... and add some extra routes for humans - subs["build_date_long"] = time.strftime( - "%Y.%m.%d.%Y%m%d%H%M%S", time.gmtime(config.params["build_date"]) - ) - taskdesc["routes"].extend( - [f"index.{route.format(**subs)}" for route in EXTRA_CACHE_INDEXES] - ) + if config.params["tasks_for"].startswith("github-pull-request"): + if config.graph_config["taskgraph"].get("cache-pull-requests", True): + taskdesc.setdefault("routes", []).append( + f"index.{TARGET_PR_CACHE_INDEX.format(**subs)}" + ) + else: + taskdesc.setdefault("routes", []).append( + f"index.{TARGET_CACHE_INDEX.format(**subs)}" + ) + + # ... and add some extra routes for humans + subs["build_date_long"] = time.strftime( + "%Y.%m.%d.%Y%m%d%H%M%S", time.gmtime(config.params["build_date"]) + ) + taskdesc["routes"].extend( + [f"index.{route.format(**subs)}" for route in EXTRA_CACHE_INDEXES] + ) taskdesc["attributes"]["cached_task"] = { "type": cache_type, diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/util/decision.py b/third_party/python/taskcluster_taskgraph/taskgraph/util/decision.py deleted file mode 100644 index d0e1e1079f..0000000000 --- a/third_party/python/taskcluster_taskgraph/taskgraph/util/decision.py +++ /dev/null @@ -1,79 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - -""" -Utilities for generating a decision task from :file:`.taskcluster.yml`. -""" - - -import os - -import jsone -import slugid -import yaml - -from .templates import merge -from .time import current_json_time -from .vcs import find_hg_revision_push_info - - -def make_decision_task(params, root, context, head_rev=None): - """Generate a basic decision task, based on the root .taskcluster.yml""" - with open(os.path.join(root, ".taskcluster.yml"), "rb") as f: - taskcluster_yml = yaml.safe_load(f) - - if not head_rev: - head_rev = params["head_rev"] - - if params["repository_type"] == "hg": - pushlog = find_hg_revision_push_info(params["repository_url"], head_rev) - - hg_push_context = { - "pushlog_id": pushlog["pushid"], - "pushdate": pushlog["pushdate"], - "owner": pushlog["user"], - } - else: - hg_push_context = {} - - slugids = {} - - def as_slugid(name): - # https://github.com/taskcluster/json-e/issues/164 - name = name[0] - if name not in slugids: - slugids[name] = slugid.nice() - return slugids[name] - - # provide a similar JSON-e context to what mozilla-taskcluster provides: - # https://docs.taskcluster.net/reference/integrations/mozilla-taskcluster/docs/taskcluster-yml - # but with a different tasks_for and an extra `cron` section - context = merge( - { - "repository": { - "url": params["repository_url"], - "project": params["project"], - "level": params["level"], - }, - "push": merge( - { - "revision": params["head_rev"], - # remainder are fake values, but the decision task expects them anyway - "comment": " ", - }, - hg_push_context, - ), - "now": current_json_time(), - "as_slugid": as_slugid, - }, - context, - ) - - rendered = jsone.render(taskcluster_yml, context) - if len(rendered["tasks"]) != 1: - raise Exception("Expected .taskcluster.yml to only produce one cron task") - task = rendered["tasks"][0] - - task_id = task.pop("taskId") - return (task_id, task) diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/util/docker.py b/third_party/python/taskcluster_taskgraph/taskgraph/util/docker.py index c37a69f98f..13815381ed 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/util/docker.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/util/docker.py @@ -7,6 +7,7 @@ import hashlib import io import os import re +from typing import Optional from taskgraph.util.archive import create_tar_gz_from_files from taskgraph.util.memoize import memoize @@ -16,17 +17,27 @@ IMAGE_DIR = os.path.join(".", "taskcluster", "docker") from .yaml import load_yaml -def docker_image(name, by_tag=False): +def docker_image(name: str, by_tag: bool = False) -> Optional[str]: """ Resolve in-tree prebuilt docker image to ``/@sha256:``, or ``/:`` if `by_tag` is `True`. + + Args: + name (str): The image to build. + by_tag (bool): If True, will apply a tag based on VERSION file. + Otherwise will apply a hash based on HASH file. + Returns: + Optional[str]: Image if it can be resolved, otherwise None. """ try: with open(os.path.join(IMAGE_DIR, name, "REGISTRY")) as f: registry = f.read().strip() except OSError: - with open(os.path.join(IMAGE_DIR, "REGISTRY")) as f: - registry = f.read().strip() + try: + with open(os.path.join(IMAGE_DIR, "REGISTRY")) as f: + registry = f.read().strip() + except OSError: + return None if not by_tag: hashfile = os.path.join(IMAGE_DIR, name, "HASH") @@ -34,7 +45,7 @@ def docker_image(name, by_tag=False): with open(hashfile) as f: return f"{registry}/{name}@{f.read().strip()}" except OSError: - raise Exception(f"Failed to read HASH file {hashfile}") + return None try: with open(os.path.join(IMAGE_DIR, name, "VERSION")) as f: @@ -197,7 +208,7 @@ def stream_context_tar(topsrcdir, context_dir, out_file, image_name=None, args=N @memoize def image_paths(): """Return a map of image name to paths containing their Dockerfile.""" - config = load_yaml("taskcluster", "ci", "docker-image", "kind.yml") + config = load_yaml("taskcluster", "kinds", "docker-image", "kind.yml") return { k: os.path.join(IMAGE_DIR, v.get("definition", k)) for k, v in config["tasks"].items() diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/util/hash.py b/third_party/python/taskcluster_taskgraph/taskgraph/util/hash.py index 5d884fc318..d42b2ecef9 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/util/hash.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/util/hash.py @@ -39,10 +39,7 @@ def hash_paths(base_path, patterns): raise Exception("%s did not match anything" % pattern) for path in sorted(found): h.update( - "{} {}\n".format( - hash_path(mozpath.abspath(mozpath.join(base_path, path))), - mozpath.normsep(path), - ).encode("utf-8") + f"{hash_path(mozpath.abspath(mozpath.join(base_path, path)))} {mozpath.normsep(path)}\n".encode() ) return h.hexdigest() @@ -55,4 +52,8 @@ def _find_matching_files(base_path, pattern): @memoize def _get_all_files(base_path): - return [str(path) for path in Path(base_path).rglob("*") if path.is_file()] + return [ + mozpath.normsep(str(path)) + for path in Path(base_path).rglob("*") + if path.is_file() + ] diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/util/keyed_by.py b/third_party/python/taskcluster_taskgraph/taskgraph/util/keyed_by.py index 9b0c5a44fb..00c84ba980 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/util/keyed_by.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/util/keyed_by.py @@ -66,8 +66,8 @@ def evaluate_keyed_by( # Error out when only 'default' is specified as only alternatives, # because we don't need to by-{keyed_by} there. raise Exception( - "Keyed-by '{}' unnecessary with only value 'default' " - "found, when determining item {}".format(keyed_by, item_name) + f"Keyed-by '{keyed_by}' unnecessary with only value 'default' " + f"found, when determining item {item_name}" ) if key is None: @@ -76,22 +76,20 @@ def evaluate_keyed_by( continue else: raise Exception( - "No attribute {} and no value for 'default' found " - "while determining item {}".format(keyed_by, item_name) + f"No attribute {keyed_by} and no value for 'default' found " + f"while determining item {item_name}" ) matches = keymatch(alternatives, key) if enforce_single_match and len(matches) > 1: raise Exception( - "Multiple matching values for {} {!r} found while " - "determining item {}".format(keyed_by, key, item_name) + f"Multiple matching values for {keyed_by} {key!r} found while " + f"determining item {item_name}" ) elif matches: value = matches[0] continue raise Exception( - "No {} matching {!r} nor 'default' found while determining item {}".format( - keyed_by, key, item_name - ) + f"No {keyed_by} matching {key!r} nor 'default' found while determining item {item_name}" ) diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/util/memoize.py b/third_party/python/taskcluster_taskgraph/taskgraph/util/memoize.py index 56b513e74c..a4bc50cc26 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/util/memoize.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/util/memoize.py @@ -2,39 +2,6 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. -# Imported from -# https://searchfox.org/mozilla-central/rev/c3ebaf6de2d481c262c04bb9657eaf76bf47e2ac/python/mozbuild/mozbuild/util.py#923-949 - - import functools - -class memoize(dict): - """A decorator to memoize the results of function calls depending - on its arguments. - Both functions and instance methods are handled, although in the - instance method case, the results are cache in the instance itself. - """ - - def __init__(self, func): - self.func = func - functools.update_wrapper(self, func) - - def __call__(self, *args): - if args not in self: - self[args] = self.func(*args) - return self[args] - - def method_call(self, instance, *args): - name = "_%s" % self.func.__name__ - if not hasattr(instance, name): - setattr(instance, name, {}) - cache = getattr(instance, name) - if args not in cache: - cache[args] = self.func(instance, *args) - return cache[args] - - def __get__(self, instance, cls): - return functools.update_wrapper( - functools.partial(self.method_call, instance), self.func - ) +memoize = functools.lru_cache(maxsize=None) # backwards compatibility shim diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/util/parameterization.py b/third_party/python/taskcluster_taskgraph/taskgraph/util/parameterization.py index 6233a98a40..1973f6f7df 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/util/parameterization.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/util/parameterization.py @@ -20,6 +20,12 @@ def _recurse(val, param_fns): if len(val) == 1: for param_key, param_fn in param_fns.items(): if set(val.keys()) == {param_key}: + if isinstance(val[param_key], dict): + # handle `{"task-reference": {"": "bar"}}` + return { + param_fn(key): recurse(v) + for key, v in val[param_key].items() + } return param_fn(val[param_key]) return {k: recurse(v) for k, v in val.items()} else: @@ -74,17 +80,14 @@ def resolve_task_references(label, task_def, task_id, decision_task_id, dependen task_id = dependencies[dependency] except KeyError: raise KeyError( - "task '{}' has no dependency named '{}'".format( - label, dependency - ) + f"task '{label}' has no dependency named '{dependency}'" ) - assert artifact_name.startswith( - "public/" - ), "artifact-reference only supports public artifacts, not `{}`".format( - artifact_name - ) - return get_artifact_url(task_id, artifact_name) + use_proxy = False + if not artifact_name.startswith("public/"): + use_proxy = True + + return get_artifact_url(task_id, artifact_name, use_proxy=use_proxy) return ARTIFACT_REFERENCE_PATTERN.sub(repl, val) diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/util/schema.py b/third_party/python/taskcluster_taskgraph/taskgraph/util/schema.py index 3989f71182..02e79a3a27 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/util/schema.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/util/schema.py @@ -74,7 +74,7 @@ def resolve_keyed_by( For example, given item:: - job: + task: test-platform: linux128 chunks: by-test-platform: @@ -82,10 +82,10 @@ def resolve_keyed_by( win.*: 6 default: 12 - a call to `resolve_keyed_by(item, 'job.chunks', item['thing-name'])` + a call to `resolve_keyed_by(item, 'task.chunks', item['thing-name'])` would mutate item in-place to:: - job: + task: test-platform: linux128 chunks: 12 @@ -182,7 +182,7 @@ def check_schema(schema): if not identifier_re.match(k) and not excepted(path): raise RuntimeError( "YAML schemas should use dashed lower-case identifiers, " - "not {!r} @ {}".format(k, path) + f"not {k!r} @ {path}" ) elif isinstance(k, (voluptuous.Optional, voluptuous.Required)): check_identifier(path, k.schema) @@ -191,9 +191,7 @@ def check_schema(schema): check_identifier(path, v) elif not excepted(path): raise RuntimeError( - "Unexpected type in YAML schema: {} @ {}".format( - type(k).__name__, path - ) + f"Unexpected type in YAML schema: {type(k).__name__} @ {path}" ) if isinstance(sch, collections.abc.Mapping): diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/util/set_name.py b/third_party/python/taskcluster_taskgraph/taskgraph/util/set_name.py new file mode 100644 index 0000000000..4c27a9cca1 --- /dev/null +++ b/third_party/python/taskcluster_taskgraph/taskgraph/util/set_name.py @@ -0,0 +1,34 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# Define a collection of set_name functions +# Note: this is stored here instead of where it is used in the `from_deps` +# transform to give consumers a chance to register their own `set_name` +# handlers before the `from_deps` schema is created. +SET_NAME_MAP = {} + + +def set_name(name, schema=None): + def wrapper(func): + assert ( + name not in SET_NAME_MAP + ), f"duplicate set_name function name {name} ({func} and {SET_NAME_MAP[name]})" + SET_NAME_MAP[name] = func + func.schema = schema + return func + + return wrapper + + +@set_name("strip-kind") +def set_name_strip_kind(config, tasks, primary_dep, primary_kind): + if primary_dep.label.startswith(primary_kind): + return primary_dep.label[len(primary_kind) + 1 :] + else: + return primary_dep.label + + +@set_name("retain-kind") +def set_name_retain_kind(config, tasks, primary_dep, primary_kind): + return primary_dep.label diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/util/shell.py b/third_party/python/taskcluster_taskgraph/taskgraph/util/shell.py index d695767f05..16b71b7d6a 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/util/shell.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/util/shell.py @@ -14,7 +14,7 @@ def _quote(s): As a special case, if given an int, returns a string containing the int, not enclosed in quotes. """ - if type(s) == int: + if isinstance(s, int): return "%d" % s # Empty strings need to be quoted to have any significance diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/util/taskcluster.py b/third_party/python/taskcluster_taskgraph/taskgraph/util/taskcluster.py index a830a473b3..b467e98a97 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/util/taskcluster.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/util/taskcluster.py @@ -3,10 +3,12 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. +import copy import datetime import functools import logging import os +from typing import Dict, List, Union import requests import taskcluster_urls as liburls @@ -53,9 +55,11 @@ def get_root_url(use_proxy): logger.debug( "Running in Taskcluster instance {}{}".format( os.environ["TASKCLUSTER_ROOT_URL"], - " with taskcluster-proxy" - if "TASKCLUSTER_PROXY_URL" in os.environ - else "", + ( + " with taskcluster-proxy" + if "TASKCLUSTER_PROXY_URL" in os.environ + else "" + ), ) ) return liburls.normalize_root_url(os.environ["TASKCLUSTER_ROOT_URL"]) @@ -136,22 +140,9 @@ def _handle_artifact(path, response): def get_artifact_url(task_id, path, use_proxy=False): artifact_tmpl = liburls.api( - get_root_url(False), "queue", "v1", "task/{}/artifacts/{}" + get_root_url(use_proxy), "queue", "v1", "task/{}/artifacts/{}" ) - data = artifact_tmpl.format(task_id, path) - if use_proxy: - # Until Bug 1405889 is deployed, we can't download directly - # from the taskcluster-proxy. Work around by using the /bewit - # endpoint instead. - # The bewit URL is the body of a 303 redirect, which we don't - # want to follow (which fetches a potentially large resource). - response = _do_request( - os.environ["TASKCLUSTER_PROXY_URL"] + "/bewit", - data=data, - allow_redirects=False, - ) - return response.text - return data + return artifact_tmpl.format(task_id, path) def get_artifact(task_id, path, use_proxy=False): @@ -244,6 +235,7 @@ def get_task_url(task_id, use_proxy=False): return task_tmpl.format(task_id) +@memoize def get_task_definition(task_id, use_proxy=False): response = _do_request(get_task_url(task_id, use_proxy)) return response.json() @@ -327,11 +319,7 @@ def get_purge_cache_url(provisioner_id, worker_type, use_proxy=False): def purge_cache(provisioner_id, worker_type, cache_name, use_proxy=False): """Requests a cache purge from the purge-caches service.""" if testing: - logger.info( - "Would have purged {}/{}/{}.".format( - provisioner_id, worker_type, cache_name - ) - ) + logger.info(f"Would have purged {provisioner_id}/{worker_type}/{cache_name}.") else: logger.info(f"Purging {provisioner_id}/{worker_type}/{cache_name}.") purge_cache_url = get_purge_cache_url(provisioner_id, worker_type, use_proxy) @@ -371,3 +359,40 @@ def list_task_group_incomplete_tasks(task_group_id): params = {"continuationToken": resp.get("continuationToken")} else: break + + +@memoize +def _get_deps(task_ids, use_proxy): + upstream_tasks = {} + for task_id in task_ids: + task_def = get_task_definition(task_id, use_proxy) + upstream_tasks[task_def["metadata"]["name"]] = task_id + + upstream_tasks.update(_get_deps(tuple(task_def["dependencies"]), use_proxy)) + + return upstream_tasks + + +def get_ancestors( + task_ids: Union[List[str], str], use_proxy: bool = False +) -> Dict[str, str]: + """Gets the ancestor tasks of the given task_ids as a dictionary of label -> taskid. + + Args: + task_ids (str or [str]): A single task id or a list of task ids to find the ancestors of. + use_proxy (bool): See get_root_url. + + Returns: + dict: A dict whose keys are task labels and values are task ids. + """ + upstream_tasks: Dict[str, str] = {} + + if isinstance(task_ids, str): + task_ids = [task_ids] + + for task_id in task_ids: + task_def = get_task_definition(task_id, use_proxy) + + upstream_tasks.update(_get_deps(tuple(task_def["dependencies"]), use_proxy)) + + return copy.deepcopy(upstream_tasks) diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/util/time.py b/third_party/python/taskcluster_taskgraph/taskgraph/util/time.py index e511978b5f..6639e5dddd 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/util/time.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/util/time.py @@ -73,9 +73,7 @@ def value_of(input_str): if unit not in ALIASES: raise UnknownTimeMeasurement( - "{} is not a valid time measure use one of {}".format( - unit, sorted(ALIASES.keys()) - ) + f"{unit} is not a valid time measure use one of {sorted(ALIASES.keys())}" ) return ALIASES[unit](value) diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/util/treeherder.py b/third_party/python/taskcluster_taskgraph/taskgraph/util/treeherder.py index cff5f286cc..6bb6dbd137 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/util/treeherder.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/util/treeherder.py @@ -42,22 +42,25 @@ def replace_group(treeherder_symbol, new_group): return join_symbol(new_group, symbol) -def inherit_treeherder_from_dep(job, dep_job): - """Inherit treeherder defaults from dep_job""" - treeherder = job.get("treeherder", {}) +def inherit_treeherder_from_dep(task, dep_task): + """Inherit treeherder defaults from dep_task""" + treeherder = task.get("treeherder", {}) dep_th_platform = ( - dep_job.task.get("extra", {}) + dep_task.task.get("extra", {}) .get("treeherder", {}) .get("machine", {}) .get("platform", "") ) dep_th_collection = list( - dep_job.task.get("extra", {}).get("treeherder", {}).get("collection", {}).keys() + dep_task.task.get("extra", {}) + .get("treeherder", {}) + .get("collection", {}) + .keys() )[0] treeherder.setdefault("platform", f"{dep_th_platform}/{dep_th_collection}") treeherder.setdefault( - "tier", dep_job.task.get("extra", {}).get("treeherder", {}).get("tier", 1) + "tier", dep_task.task.get("extra", {}).get("treeherder", {}).get("tier", 1) ) # Does not set symbol treeherder.setdefault("kind", "build") diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/util/vcs.py b/third_party/python/taskcluster_taskgraph/taskgraph/util/vcs.py index 2d967d2645..c2fd0d3236 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/util/vcs.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/util/vcs.py @@ -10,9 +10,6 @@ import subprocess from abc import ABC, abstractmethod, abstractproperty from shutil import which -import requests -from redo import retry - from taskgraph.util.path import ancestors PUSHLOG_TMPL = "{}/json-pushes?version=2&changeset={}&tipsonly=1&full=1" @@ -21,7 +18,7 @@ logger = logging.getLogger(__name__) class Repository(ABC): - # Both mercurial and git use sha1 as revision idenfiers. Luckily, both define + # Both mercurial and git use sha1 as revision identifiers. Luckily, both define # the same value as the null revision. # # https://github.com/git/git/blob/dc04167d378fb29d30e1647ff6ff51dd182bc9a3/t/oid-info/hash-info#L7 @@ -519,34 +516,3 @@ def get_repository(path): return GitRepository(path) raise RuntimeError("Current directory is neither a git or hg repository") - - -def find_hg_revision_push_info(repository, revision): - """Given the parameters for this action and a revision, find the - pushlog_id of the revision.""" - pushlog_url = PUSHLOG_TMPL.format(repository, revision) - - def query_pushlog(url): - r = requests.get(pushlog_url, timeout=60) - r.raise_for_status() - return r - - r = retry( - query_pushlog, - args=(pushlog_url,), - attempts=5, - sleeptime=10, - ) - pushes = r.json()["pushes"] - if len(pushes) != 1: - raise RuntimeError( - "Unable to find a single pushlog_id for {} revision {}: {}".format( - repository, revision, pushes - ) - ) - pushid = list(pushes.keys())[0] - return { - "pushdate": pushes[pushid]["date"], - "pushid": pushid, - "user": pushes[pushid]["user"], - } diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/util/verify.py b/third_party/python/taskcluster_taskgraph/taskgraph/util/verify.py index e6705c16cf..b5bb0889ae 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/util/verify.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/util/verify.py @@ -134,10 +134,8 @@ def verify_task_graph_symbol(task, taskgraph, scratch_pad, graph_config, paramet collection_keys = tuple(sorted(treeherder.get("collection", {}).keys())) if len(collection_keys) != 1: raise Exception( - "Task {} can't be in multiple treeherder collections " - "(the part of the platform after `/`): {}".format( - task.label, collection_keys - ) + f"Task {task.label} can't be in multiple treeherder collections " + f"(the part of the platform after `/`): {collection_keys}" ) platform = treeherder.get("machine", {}).get("platform") group_symbol = treeherder.get("groupSymbol") @@ -175,9 +173,7 @@ def verify_trust_domain_v2_routes( if route.startswith(route_prefix): if route in scratch_pad: raise Exception( - "conflict between {}:{} for route: {}".format( - task.label, scratch_pad[route], route - ) + f"conflict between {task.label}:{scratch_pad[route]} for route: {route}" ) else: scratch_pad[route] = task.label @@ -206,9 +202,7 @@ def verify_routes_notification_filters( route_filter = route.split(".")[-1] if route_filter not in valid_filters: raise Exception( - "{} has invalid notification filter ({})".format( - task.label, route_filter - ) + f"{task.label} has invalid notification filter ({route_filter})" ) @@ -235,12 +229,7 @@ def verify_dependency_tiers(task, taskgraph, scratch_pad, graph_config, paramete continue if tier < tiers[d]: raise Exception( - "{} (tier {}) cannot depend on {} (tier {})".format( - task.label, - printable_tier(tier), - d, - printable_tier(tiers[d]), - ) + f"{task.label} (tier {printable_tier(tier)}) cannot depend on {d} (tier {printable_tier(tiers[d])})" ) @@ -262,11 +251,7 @@ def verify_toolchain_alias(task, taskgraph, scratch_pad, graph_config, parameter if key in scratch_pad: raise Exception( "Duplicate toolchain-alias in tasks " - "`{}`and `{}`: {}".format( - task.label, - scratch_pad[key], - key, - ) + f"`{task.label}`and `{scratch_pad[key]}`: {key}" ) else: scratch_pad[key] = task.label diff --git a/third_party/python/taskcluster_taskgraph/taskgraph/util/yaml.py b/third_party/python/taskcluster_taskgraph/taskgraph/util/yaml.py index 141c7a16d3..a733521527 100644 --- a/third_party/python/taskcluster_taskgraph/taskgraph/util/yaml.py +++ b/third_party/python/taskcluster_taskgraph/taskgraph/util/yaml.py @@ -5,7 +5,10 @@ import os -from yaml.loader import SafeLoader +try: + from yaml import CSafeLoader as SafeLoader +except ImportError: + from yaml import SafeLoader class UnicodeLoader(SafeLoader): -- cgit v1.2.3