diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-07 19:33:14 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-07 19:33:14 +0000 |
commit | 36d22d82aa202bb199967e9512281e9a53db42c9 (patch) | |
tree | 105e8c98ddea1c1e4784a60a5a6410fa416be2de /third_party/rust/syn | |
parent | Initial commit. (diff) | |
download | firefox-esr-upstream.tar.xz firefox-esr-upstream.zip |
Adding upstream version 115.7.0esr.upstream/115.7.0esrupstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to '')
145 files changed, 70602 insertions, 0 deletions
diff --git a/third_party/rust/syn/.cargo-checksum.json b/third_party/rust/syn/.cargo-checksum.json new file mode 100644 index 0000000000..edd04e5cd1 --- /dev/null +++ b/third_party/rust/syn/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{"Cargo.toml":"1ff565970239963d56cbfdd20476fd265fcf2d050fc5ed92a298686321985230","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"ea9f2b6340b302b5608d2bedcda7e2d707f3eaebf4cc983c02d55071ead7096f","benches/file.rs":"3d737ef3878f6e242b003af9bd539e565f98439a12ee44d9548d84e3fdd7af0c","benches/rust.rs":"11ac9fe898a7bf1bd63e8a8cc9c08bd795b01f0248215cff99afaaf28ce87fab","build.rs":"b815649fd2929d3debd93a58f5da2fb8eba506047a6a5ba538347305828a87b0","src/attr.rs":"234d9cebe2c5e92cd0f5e1117bf5755037e2e905788a337000a65d4bd82b63aa","src/await.rs":"8aa22e3c201cb2bdb6b4817fa00901f308ab06817607aa7b884c58c957705969","src/bigint.rs":"efc7f64959980653d73fe4f8bc2a3a2904dc05f45b02c6dc15cd316fa3d7c338","src/buffer.rs":"4d15f35273d485261be4f1a765ae03abc1daee9fc9dac5fb4f9b624d6b22cb58","src/custom_keyword.rs":"5c706fc3611e73d16b8c019d7ecb848a86b1ccfcd9e556f80bb6e6a4abe058a8","src/custom_punctuation.rs":"8a666298e774b0d326642f0f73284f6677d0d0a7c9e4a712c9c98d010b4d8a2c","src/data.rs":"75d2c2b5d6a01bf8a6fa2845e41663d8045a78b4b191f1a1bd7c93619d20017a","src/derive.rs":"ee24a202be2d36ccdff576dd9cd765e94b33ef2286946e6725d75b08e777d462","src/discouraged.rs":"6c6a9298f8d24f578da119557bc588f3bd928f7b79fca27d6bdfe3e786dd005f","src/drops.rs":"013385f1dd95663f1afab41abc1e2eea04181998644828935ca564c74d6462ae","src/error.rs":"b30e738fdab7d10e126350e09f7ad907bf4dc14e684b9ed9eeea001c7ee356e0","src/export.rs":"0cf50d70c32d5fddba8b1193032df62e560237c113df3e86ba26b565cc82838e","src/expr.rs":"5eea3828f3291b0ce5463ed5f0c23fc8a39aeceae68a3247ae02ae467dd35a98","src/ext.rs":"1f648cff1d705a1cea64b32b77482b97a82d2fe0aaf63b40cade91e5c02dc969","src/file.rs":"f86697655222ae294215114f4eae8e6b0b5e2a935d6c479ff8f8f889c4efd2e2","src/gen/clone.rs":"76e89fe155fedf43bc4a252af7e35319b82ce455f584bad8698fdc3f9b7f5d4e","src/gen/debug.rs":"4b05e474e864ce6bf1a5a6ab48ee6c0ecdf41a0d750237990cf2e31963bc1208","src/gen/eq.rs":"79f84836fdcd5cfa352f38055dab7c3246c7757650946c1c701234b11021652a","src/gen/fold.rs":"fcd6a05c8c8e0c36e7ede8593002528b553c8b648fbed452106fd6a8a8c9212a","src/gen/hash.rs":"575e8beae303c1eabda12bf76cbd82672268c502a8ebb8517aab18b40fdbc44e","src/gen/visit.rs":"ced9f6c17d2b3eb3553faab710cb2b3d44d6bca7d1862c8c5da09c3d45debecb","src/gen/visit_mut.rs":"966ea340c53461bf8a1c6bed3c882e4ab8b8907fd18ac35531266f7891ae5f46","src/gen_helper.rs":"ea6c66388365971db6a2fc86cbb208f7eacde77e245bc8623f27a3642a3d7741","src/generics.rs":"b81ce0d3ea0f7accef4590d5181cecc4589395865abaea60b0470da727f17340","src/group.rs":"166f0fbb365471ffa3e4f554b72c2b460cbf7e3a1f9bec6c01ef6bbbcd751041","src/ident.rs":"2443e43561abea7eea577b141422258237a663499c839923d8a5ca6fea2470db","src/item.rs":"419c4d6135a7ca7b8f94b5ba038b6af8fcb3939ae807153a19e3c82e9b01e0b7","src/lib.rs":"8c152481907905472fc3e4aae63f82ed78d4d16cf8cc286675727668760c7f2e","src/lifetime.rs":"b18862ef1e690037a4f308ea897debad7bc5038584e3b26c6d8809752ea0e3c2","src/lit.rs":"fc06ddd523f7f9971d8abdb4c8d5d51030ffb3d6810615d5575ae210a7800695","src/lookahead.rs":"e2c2b6d55906421e83dab51463b58bc6dcb582f1bff9303c8b62afefb8d71e5f","src/mac.rs":"004cb89f9697564f6c9ee837e08ead68463ef946fb4c13c6c105adf2ba364b2b","src/macros.rs":"936f503c2fcde602f05220954ecaf87625c6138d0af13d33d56c7b6530110084","src/op.rs":"9d499022902743a6a0a19223b356449a979b90e60552d0446497d72750e646a4","src/parse.rs":"7b2f8caddf25a5734cbcdf7cbf043cbf9afbc07b484966cd59ddfcec9f970fb3","src/parse_macro_input.rs":"a5d16859b782bb6a2754c1066468a2f1ea05b57390caa32175bb84064973be7b","src/parse_quote.rs":"d7d996f1382c68b5fbfd4b7327ce1d389cd43c3bb3c4f382a35994d0bb79d8ab","src/pat.rs":"b2de04ae6c01df50eab9d1c3908287aca8424adc2007b926c7bcf74d1f64d40a","src/path.rs":"58a4fb3b1ff76d32cfd84a3914f8cadbf55b363c1929222b362b7465385520ac","src/print.rs":"da6529c1d9d21aaf6c835f66b4e67eacb7cf91a10eb5e9a2143b49bf99b3b5e1","src/punctuated.rs":"44c29523dee76605be2531674fe21ed2f1bbd02559aac8b7a49c70af23129ca1","src/reserved.rs":"e70e028bd55cfa43e23cab4ba29e4dc53a3d91eff685ef2b6e57efc2b87a3428","src/sealed.rs":"896a495a5340eec898527f18bd4ddca408ea03ea0ee3af30074ff48deace778d","src/span.rs":"748c51c6feb223c26d3b1701f5bb98aee823666c775c98106cfa24fe29d8cec1","src/spanned.rs":"3ca016a943637653ab98e373dfb826a120f3c159867346fa38a844439944eb39","src/stmt.rs":"601a6914f1e0bf97ae0d31d474a531d195b8c251a4ded11aa8746ac0018d367b","src/thread.rs":"815eca6bd64f4eef7c447f0809e84108f5428ff50225224b373efd8fbb696874","src/token.rs":"5e423a696f80e281c322f37c87577f9fdc28607e9c007e24896a2b12da62d5ad","src/tt.rs":"32402645b6e82ef1e882945721b59b5fb7b0ee337d1972876362ecacef643d0f","src/ty.rs":"9befd22f8c8ac731b7f68008552a1335797a3ef19184190eec0e103e4ebe18a7","src/verbatim.rs":"96d4280e4556a1841b8dcb306bc35a94d18f71dceb63f3c27a4fe7f776191760","src/whitespace.rs":"e63dd0aa3d34029f17766a8b09c1a6e4479e36c552c8b7023d710a399333aace","tests/common/eq.rs":"e930fb0bdcec3e787986b56785b1db580e5a26a5131df2f2b91a6da37069de15","tests/common/mod.rs":"432ad35577f836a20b517d8c26ed994ac25fe73ef2f461c67688b61b99762015","tests/common/parse.rs":"81580f23583723f7a2a337c4d13ebc021057cd825562fb4e474caa7cc641fed9","tests/debug/gen.rs":"1b7f875344cb04a7dd3df62deac2f410a9d107c097986e68006d87465f5f5306","tests/debug/mod.rs":"3a6bb799f478101f71c84c6f1a854a58afe2f9db43c39017909346ca20262d94","tests/macros/mod.rs":"aff805b35cfd55aef6a1359ff747e4023afcb08d69d86aff4c19465d29dda088","tests/regression.rs":"86731134bfb9bb693d9a4fc62393027de80a8bf031109ea6c7ea475b1ebdde8d","tests/regression/issue1108.rs":"adcc55a42239d344da74216ed85fc14153ddd6ca4dec4872d8339604ba78c185","tests/regression/issue1235.rs":"a2266b10c3f7c7af5734817ab0a3e8b309b51e7d177b63f26e67e6b744d280b0","tests/repo/mod.rs":"159c2c4b6416d26ac42ffc35f6cb587c4c1e2b0f24de9aa42b0337a534d7d86d","tests/repo/progress.rs":"c08d0314a7f3ecf760d471f27da3cd2a500aeb9f1c8331bffb2aa648f9fabf3f","tests/test_asyncness.rs":"cff01db49d28ab23b0b258bc6c0a5cc4071be4fe7248eef344a5d79d2fb649b7","tests/test_attribute.rs":"0ffd99384e1a52ae17d9fed5c4053e411e8f9018decef07ffa621d1faa7329d8","tests/test_derive_input.rs":"62bb86aaaaf730187a46ff700a8e3b2d1a163039b109b6a483aa44ed2b6806fe","tests/test_expr.rs":"41eb343829ad36cdea40cd06d45a90765e7fe6f1e47dd550daf1b6096c3a7b44","tests/test_generics.rs":"54b7d2afc19aa6e9049585f4c8f7d3f0c29ac3bd11a2c769e9df76f18a4f5ecb","tests/test_grouping.rs":"6276c3c73bba649dec5c97904ad2492879f918bc887a2c425d095c654ca0d925","tests/test_ident.rs":"9eb53d1e21edf23e7c9e14dc74dcc2b2538e9221e19dbcc0a44e3acc2e90f3f6","tests/test_item.rs":"a3642c80066f1e7787becfd0278af90a6b7968d6c1249e25e81663aa454cfb2a","tests/test_iterators.rs":"9cf6fde17853ce7d5617e1de9ef901c47ca35c0f1c2dd668c0d0604d7b48598c","tests/test_lit.rs":"19740ea9cd4a980bcab9b0dcaa4b032bb6ebb137fa5e4237140b97da1d9679fa","tests/test_meta.rs":"65d4586d131f6cac66694ca5e936748ec4e7f7423af6d8da509240e6be14800b","tests/test_parse_buffer.rs":"68d857f776396d064fcc0023c37093c2fbf75ee68e8241d4014d00d1423c18e9","tests/test_parse_stream.rs":"bf1db6fab7ac396fa61012faccbe6ffbc9c3d795ed2900be75e91c5b09b0c62f","tests/test_pat.rs":"d4465f4fc3fd5d6e534ba8efabe1e0ed6da89de4ac7c96effa6bfb880c4287cf","tests/test_path.rs":"71092a5ae2c9143b92a8fe15a92d39958b3c28bd4d4275cfb2d22cbdd53ada07","tests/test_precedence.rs":"736eee861c4c7a3d7d4387d2fb1b5eced1541790d34974f72b0a5532797e73c3","tests/test_receiver.rs":"084eca59984b9a18651da52f2c4407355da3de1335916a12477652999e2d01cc","tests/test_round_trip.rs":"c3c415413d5177a728c7cbbfb7ef44aebbc6a2c821dd56695156e9e33636fd57","tests/test_shebang.rs":"f5772cadad5b56e3112cb16308b779f92bce1c3a48091fc9933deb2276a69331","tests/test_should_parse.rs":"1d3535698a446e2755bfc360676bdb161841a1f454cdef6e7556c6d06a95c89d","tests/test_size.rs":"6720d55569808244ab011364c39931f06aa509cd05f98ab908b0670e8501b3c8","tests/test_stmt.rs":"0601fc32131b5501dfcdc4b4248d46bf21e0a98a49eb19439e1a46869dfb30b7","tests/test_token_trees.rs":"43e56a701817e3c3bfd0cae54a457dd7a38ccb3ca19da41e2b995fdf20e6ed18","tests/test_ty.rs":"f71d7f7f1c038aaabea8dd4c03c0d5752c76d570f8b4885a81659825bbb4d576","tests/test_visibility.rs":"7456fcb3a6634db509748aededff9c2d8b242d511a3e5ee3022e40b232892704","tests/zzz_stable.rs":"2a862e59cb446235ed99aec0e6ada8e16d3ecc30229b29d825b7c0bbc2602989"},"package":"1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5"}
\ No newline at end of file diff --git a/third_party/rust/syn/Cargo.toml b/third_party/rust/syn/Cargo.toml new file mode 100644 index 0000000000..c2a36013ec --- /dev/null +++ b/third_party/rust/syn/Cargo.toml @@ -0,0 +1,147 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g., crates.io) dependencies. +# +# If you are reading this file be aware that the original Cargo.toml +# will likely look very different (and much more reasonable). +# See Cargo.toml.orig for the original contents. + +[package] +edition = "2018" +rust-version = "1.31" +name = "syn" +version = "1.0.107" +authors = ["David Tolnay <dtolnay@gmail.com>"] +include = [ + "/benches/**", + "/build.rs", + "/Cargo.toml", + "/LICENSE-APACHE", + "/LICENSE-MIT", + "/README.md", + "/src/**", + "/tests/**", +] +description = "Parser for Rust source code" +documentation = "https://docs.rs/syn" +readme = "README.md" +keywords = [ + "macros", + "syn", +] +categories = [ + "development-tools::procedural-macro-helpers", + "parser-implementations", +] +license = "MIT OR Apache-2.0" +repository = "https://github.com/dtolnay/syn" + +[package.metadata.docs.rs] +all-features = true +targets = ["x86_64-unknown-linux-gnu"] +rustdoc-args = [ + "--cfg", + "doc_cfg", +] + +[package.metadata.playground] +features = [ + "full", + "visit", + "visit-mut", + "fold", + "extra-traits", +] + +[lib] +doc-scrape-examples = false + +[[bench]] +name = "rust" +harness = false +required-features = [ + "full", + "parsing", +] + +[[bench]] +name = "file" +required-features = [ + "full", + "parsing", +] + +[dependencies.proc-macro2] +version = "1.0.46" +default-features = false + +[dependencies.quote] +version = "1.0" +optional = true +default-features = false + +[dependencies.unicode-ident] +version = "1.0" + +[dev-dependencies.anyhow] +version = "1.0" + +[dev-dependencies.automod] +version = "1.0" + +[dev-dependencies.flate2] +version = "1.0" + +[dev-dependencies.insta] +version = "1.0" + +[dev-dependencies.rayon] +version = "1.0" + +[dev-dependencies.ref-cast] +version = "1.0" + +[dev-dependencies.regex] +version = "1.0" + +[dev-dependencies.reqwest] +version = "0.11" +features = ["blocking"] + +[dev-dependencies.syn-test-suite] +version = "0" + +[dev-dependencies.tar] +version = "0.4.16" + +[dev-dependencies.termcolor] +version = "1.0" + +[dev-dependencies.walkdir] +version = "2.1" + +[features] +clone-impls = [] +default = [ + "derive", + "parsing", + "printing", + "clone-impls", + "proc-macro", +] +derive = [] +extra-traits = [] +fold = [] +full = [] +parsing = [] +printing = ["quote"] +proc-macro = [ + "proc-macro2/proc-macro", + "quote/proc-macro", +] +test = ["syn-test-suite/all-features"] +visit = [] +visit-mut = [] diff --git a/third_party/rust/syn/LICENSE-APACHE b/third_party/rust/syn/LICENSE-APACHE new file mode 100644 index 0000000000..16fe87b06e --- /dev/null +++ b/third_party/rust/syn/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/third_party/rust/syn/LICENSE-MIT b/third_party/rust/syn/LICENSE-MIT new file mode 100644 index 0000000000..31aa79387f --- /dev/null +++ b/third_party/rust/syn/LICENSE-MIT @@ -0,0 +1,23 @@ +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/third_party/rust/syn/README.md b/third_party/rust/syn/README.md new file mode 100644 index 0000000000..eeef83dd58 --- /dev/null +++ b/third_party/rust/syn/README.md @@ -0,0 +1,285 @@ +Parser for Rust source code +=========================== + +[<img alt="github" src="https://img.shields.io/badge/github-dtolnay/syn-8da0cb?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/dtolnay/syn) +[<img alt="crates.io" src="https://img.shields.io/crates/v/syn.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/syn) +[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-syn-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs" height="20">](https://docs.rs/syn) +[<img alt="build status" src="https://img.shields.io/github/actions/workflow/status/dtolnay/syn/ci.yml?branch=master&style=for-the-badge" height="20">](https://github.com/dtolnay/syn/actions?query=branch%3Amaster) + +Syn is a parsing library for parsing a stream of Rust tokens into a syntax tree +of Rust source code. + +Currently this library is geared toward use in Rust procedural macros, but +contains some APIs that may be useful more generally. + +- **Data structures** — Syn provides a complete syntax tree that can represent + any valid Rust source code. The syntax tree is rooted at [`syn::File`] which + represents a full source file, but there are other entry points that may be + useful to procedural macros including [`syn::Item`], [`syn::Expr`] and + [`syn::Type`]. + +- **Derives** — Of particular interest to derive macros is [`syn::DeriveInput`] + which is any of the three legal input items to a derive macro. An example + below shows using this type in a library that can derive implementations of a + user-defined trait. + +- **Parsing** — Parsing in Syn is built around [parser functions] with the + signature `fn(ParseStream) -> Result<T>`. Every syntax tree node defined by + Syn is individually parsable and may be used as a building block for custom + syntaxes, or you may dream up your own brand new syntax without involving any + of our syntax tree types. + +- **Location information** — Every token parsed by Syn is associated with a + `Span` that tracks line and column information back to the source of that + token. These spans allow a procedural macro to display detailed error messages + pointing to all the right places in the user's code. There is an example of + this below. + +- **Feature flags** — Functionality is aggressively feature gated so your + procedural macros enable only what they need, and do not pay in compile time + for all the rest. + +[`syn::File`]: https://docs.rs/syn/1.0/syn/struct.File.html +[`syn::Item`]: https://docs.rs/syn/1.0/syn/enum.Item.html +[`syn::Expr`]: https://docs.rs/syn/1.0/syn/enum.Expr.html +[`syn::Type`]: https://docs.rs/syn/1.0/syn/enum.Type.html +[`syn::DeriveInput`]: https://docs.rs/syn/1.0/syn/struct.DeriveInput.html +[parser functions]: https://docs.rs/syn/1.0/syn/parse/index.html + +*Version requirement: Syn supports rustc 1.31 and up.* + +[*Release notes*](https://github.com/dtolnay/syn/releases) + +<br> + +## Resources + +The best way to learn about procedural macros is by writing some. Consider +working through [this procedural macro workshop][workshop] to get familiar with +the different types of procedural macros. The workshop contains relevant links +into the Syn documentation as you work through each project. + +[workshop]: https://github.com/dtolnay/proc-macro-workshop + +<br> + +## Example of a derive macro + +The canonical derive macro using Syn looks like this. We write an ordinary Rust +function tagged with a `proc_macro_derive` attribute and the name of the trait +we are deriving. Any time that derive appears in the user's code, the Rust +compiler passes their data structure as tokens into our macro. We get to execute +arbitrary Rust code to figure out what to do with those tokens, then hand some +tokens back to the compiler to compile into the user's crate. + +[`TokenStream`]: https://doc.rust-lang.org/proc_macro/struct.TokenStream.html + +```toml +[dependencies] +syn = "1.0" +quote = "1.0" + +[lib] +proc-macro = true +``` + +```rust +use proc_macro::TokenStream; +use quote::quote; +use syn::{parse_macro_input, DeriveInput}; + +#[proc_macro_derive(MyMacro)] +pub fn my_macro(input: TokenStream) -> TokenStream { + // Parse the input tokens into a syntax tree + let input = parse_macro_input!(input as DeriveInput); + + // Build the output, possibly using quasi-quotation + let expanded = quote! { + // ... + }; + + // Hand the output tokens back to the compiler + TokenStream::from(expanded) +} +``` + +The [`heapsize`] example directory shows a complete working implementation of a +derive macro. It works on any Rust compiler 1.31+. The example derives a +`HeapSize` trait which computes an estimate of the amount of heap memory owned +by a value. + +[`heapsize`]: examples/heapsize + +```rust +pub trait HeapSize { + /// Total number of bytes of heap memory owned by `self`. + fn heap_size_of_children(&self) -> usize; +} +``` + +The derive macro allows users to write `#[derive(HeapSize)]` on data structures +in their program. + +```rust +#[derive(HeapSize)] +struct Demo<'a, T: ?Sized> { + a: Box<T>, + b: u8, + c: &'a str, + d: String, +} +``` + +<br> + +## Spans and error reporting + +The token-based procedural macro API provides great control over where the +compiler's error messages are displayed in user code. Consider the error the +user sees if one of their field types does not implement `HeapSize`. + +```rust +#[derive(HeapSize)] +struct Broken { + ok: String, + bad: std::thread::Thread, +} +``` + +By tracking span information all the way through the expansion of a procedural +macro as shown in the `heapsize` example, token-based macros in Syn are able to +trigger errors that directly pinpoint the source of the problem. + +```console +error[E0277]: the trait bound `std::thread::Thread: HeapSize` is not satisfied + --> src/main.rs:7:5 + | +7 | bad: std::thread::Thread, + | ^^^^^^^^^^^^^^^^^^^^^^^^ the trait `HeapSize` is not implemented for `std::thread::Thread` +``` + +<br> + +## Parsing a custom syntax + +The [`lazy-static`] example directory shows the implementation of a +`functionlike!(...)` procedural macro in which the input tokens are parsed using +Syn's parsing API. + +[`lazy-static`]: examples/lazy-static + +The example reimplements the popular `lazy_static` crate from crates.io as a +procedural macro. + +```rust +lazy_static! { + static ref USERNAME: Regex = Regex::new("^[a-z0-9_-]{3,16}$").unwrap(); +} +``` + +The implementation shows how to trigger custom warnings and error messages on +the macro input. + +```console +warning: come on, pick a more creative name + --> src/main.rs:10:16 + | +10 | static ref FOO: String = "lazy_static".to_owned(); + | ^^^ +``` + +<br> + +## Testing + +When testing macros, we often care not just that the macro can be used +successfully but also that when the macro is provided with invalid input it +produces maximally helpful error messages. Consider using the [`trybuild`] crate +to write tests for errors that are emitted by your macro or errors detected by +the Rust compiler in the expanded code following misuse of the macro. Such tests +help avoid regressions from later refactors that mistakenly make an error no +longer trigger or be less helpful than it used to be. + +[`trybuild`]: https://github.com/dtolnay/trybuild + +<br> + +## Debugging + +When developing a procedural macro it can be helpful to look at what the +generated code looks like. Use `cargo rustc -- -Zunstable-options +--pretty=expanded` or the [`cargo expand`] subcommand. + +[`cargo expand`]: https://github.com/dtolnay/cargo-expand + +To show the expanded code for some crate that uses your procedural macro, run +`cargo expand` from that crate. To show the expanded code for one of your own +test cases, run `cargo expand --test the_test_case` where the last argument is +the name of the test file without the `.rs` extension. + +This write-up by Brandon W Maister discusses debugging in more detail: +[Debugging Rust's new Custom Derive system][debugging]. + +[debugging]: https://quodlibetor.github.io/posts/debugging-rusts-new-custom-derive-system/ + +<br> + +## Optional features + +Syn puts a lot of functionality behind optional features in order to optimize +compile time for the most common use cases. The following features are +available. + +- **`derive`** *(enabled by default)* — Data structures for representing the + possible input to a derive macro, including structs and enums and types. +- **`full`** — Data structures for representing the syntax tree of all valid + Rust source code, including items and expressions. +- **`parsing`** *(enabled by default)* — Ability to parse input tokens into a + syntax tree node of a chosen type. +- **`printing`** *(enabled by default)* — Ability to print a syntax tree node as + tokens of Rust source code. +- **`visit`** — Trait for traversing a syntax tree. +- **`visit-mut`** — Trait for traversing and mutating in place a syntax tree. +- **`fold`** — Trait for transforming an owned syntax tree. +- **`clone-impls`** *(enabled by default)* — Clone impls for all syntax tree + types. +- **`extra-traits`** — Debug, Eq, PartialEq, Hash impls for all syntax tree + types. +- **`proc-macro`** *(enabled by default)* — Runtime dependency on the dynamic + library libproc_macro from rustc toolchain. + +<br> + +## Proc macro shim + +Syn operates on the token representation provided by the [proc-macro2] crate +from crates.io rather than using the compiler's built in proc-macro crate +directly. This enables code using Syn to execute outside of the context of a +procedural macro, such as in unit tests or build.rs, and we avoid needing +incompatible ecosystems for proc macros vs non-macro use cases. + +In general all of your code should be written against proc-macro2 rather than +proc-macro. The one exception is in the signatures of procedural macro entry +points, which are required by the language to use `proc_macro::TokenStream`. + +The proc-macro2 crate will automatically detect and use the compiler's data +structures when a procedural macro is active. + +[proc-macro2]: https://docs.rs/proc-macro2/1.0/proc_macro2/ + +<br> + +#### License + +<sup> +Licensed under either of <a href="LICENSE-APACHE">Apache License, Version +2.0</a> or <a href="LICENSE-MIT">MIT license</a> at your option. +</sup> + +<br> + +<sub> +Unless you explicitly state otherwise, any contribution intentionally submitted +for inclusion in this crate by you, as defined in the Apache-2.0 license, shall +be dual licensed as above, without any additional terms or conditions. +</sub> diff --git a/third_party/rust/syn/benches/file.rs b/third_party/rust/syn/benches/file.rs new file mode 100644 index 0000000000..bd4a247df6 --- /dev/null +++ b/third_party/rust/syn/benches/file.rs @@ -0,0 +1,55 @@ +// $ cargo bench --features full,test --bench file + +#![feature(rustc_private, test)] +#![recursion_limit = "1024"] +#![allow( + clippy::items_after_statements, + clippy::missing_panics_doc, + clippy::must_use_candidate +)] + +extern crate test; + +#[macro_use] +#[path = "../tests/macros/mod.rs"] +mod macros; + +#[path = "../tests/common/mod.rs"] +mod common; +#[path = "../tests/repo/mod.rs"] +pub mod repo; + +use proc_macro2::{Span, TokenStream}; +use std::fs; +use std::str::FromStr; +use syn::parse::{ParseStream, Parser}; +use test::Bencher; + +const FILE: &str = "tests/rust/library/core/src/str/mod.rs"; + +fn get_tokens() -> TokenStream { + repo::clone_rust(); + let content = fs::read_to_string(FILE).unwrap(); + TokenStream::from_str(&content).unwrap() +} + +#[bench] +fn baseline(b: &mut Bencher) { + let tokens = get_tokens(); + b.iter(|| drop(tokens.clone())); +} + +#[bench] +fn create_token_buffer(b: &mut Bencher) { + let tokens = get_tokens(); + fn immediate_fail(_input: ParseStream) -> syn::Result<()> { + Err(syn::Error::new(Span::call_site(), "")) + } + b.iter(|| immediate_fail.parse2(tokens.clone())); +} + +#[bench] +fn parse_file(b: &mut Bencher) { + let tokens = get_tokens(); + b.iter(|| syn::parse2::<syn::File>(tokens.clone())); +} diff --git a/third_party/rust/syn/benches/rust.rs b/third_party/rust/syn/benches/rust.rs new file mode 100644 index 0000000000..e3f8f550ab --- /dev/null +++ b/third_party/rust/syn/benches/rust.rs @@ -0,0 +1,170 @@ +// $ cargo bench --features full,test --bench rust +// +// Syn only, useful for profiling: +// $ RUSTFLAGS='--cfg syn_only' cargo build --release --features full,test --bench rust + +#![cfg_attr(not(syn_only), feature(rustc_private))] +#![recursion_limit = "1024"] +#![allow(clippy::cast_lossless, clippy::unnecessary_wraps)] + +#[macro_use] +#[path = "../tests/macros/mod.rs"] +mod macros; + +#[path = "../tests/common/mod.rs"] +mod common; +#[path = "../tests/repo/mod.rs"] +mod repo; + +use std::fs; +use std::time::{Duration, Instant}; + +#[cfg(not(syn_only))] +mod tokenstream_parse { + use proc_macro2::TokenStream; + use std::str::FromStr; + + pub fn bench(content: &str) -> Result<(), ()> { + TokenStream::from_str(content).map(drop).map_err(drop) + } +} + +mod syn_parse { + pub fn bench(content: &str) -> Result<(), ()> { + syn::parse_file(content).map(drop).map_err(drop) + } +} + +#[cfg(not(syn_only))] +mod librustc_parse { + extern crate rustc_data_structures; + extern crate rustc_error_messages; + extern crate rustc_errors; + extern crate rustc_parse; + extern crate rustc_session; + extern crate rustc_span; + + use rustc_data_structures::sync::Lrc; + use rustc_error_messages::FluentBundle; + use rustc_errors::{emitter::Emitter, translation::Translate, Diagnostic, Handler}; + use rustc_session::parse::ParseSess; + use rustc_span::source_map::{FilePathMapping, SourceMap}; + use rustc_span::{edition::Edition, FileName}; + + pub fn bench(content: &str) -> Result<(), ()> { + struct SilentEmitter; + + impl Emitter for SilentEmitter { + fn emit_diagnostic(&mut self, _diag: &Diagnostic) {} + fn source_map(&self) -> Option<&Lrc<SourceMap>> { + None + } + } + + impl Translate for SilentEmitter { + fn fluent_bundle(&self) -> Option<&Lrc<FluentBundle>> { + None + } + fn fallback_fluent_bundle(&self) -> &FluentBundle { + panic!("silent emitter attempted to translate a diagnostic"); + } + } + + rustc_span::create_session_if_not_set_then(Edition::Edition2018, |_| { + let cm = Lrc::new(SourceMap::new(FilePathMapping::empty())); + let emitter = Box::new(SilentEmitter); + let handler = Handler::with_emitter(false, None, emitter); + let sess = ParseSess::with_span_handler(handler, cm); + if let Err(diagnostic) = rustc_parse::parse_crate_from_source_str( + FileName::Custom("bench".to_owned()), + content.to_owned(), + &sess, + ) { + diagnostic.cancel(); + return Err(()); + }; + Ok(()) + }) + } +} + +#[cfg(not(syn_only))] +mod read_from_disk { + pub fn bench(content: &str) -> Result<(), ()> { + _ = content; + Ok(()) + } +} + +fn exec(mut codepath: impl FnMut(&str) -> Result<(), ()>) -> Duration { + let begin = Instant::now(); + let mut success = 0; + let mut total = 0; + + walkdir::WalkDir::new("tests/rust/src") + .into_iter() + .filter_entry(repo::base_dir_filter) + .for_each(|entry| { + let entry = entry.unwrap(); + let path = entry.path(); + if path.is_dir() { + return; + } + let content = fs::read_to_string(path).unwrap(); + let ok = codepath(&content).is_ok(); + success += ok as usize; + total += 1; + if !ok { + eprintln!("FAIL {}", path.display()); + } + }); + + assert_eq!(success, total); + begin.elapsed() +} + +fn main() { + repo::clone_rust(); + + macro_rules! testcases { + ($($(#[$cfg:meta])* $name:ident,)*) => { + [ + $( + $(#[$cfg])* + (stringify!($name), $name::bench as fn(&str) -> Result<(), ()>), + )* + ] + }; + } + + #[cfg(not(syn_only))] + { + let mut lines = 0; + let mut files = 0; + exec(|content| { + lines += content.lines().count(); + files += 1; + Ok(()) + }); + eprintln!("\n{} lines in {} files", lines, files); + } + + for (name, f) in testcases!( + #[cfg(not(syn_only))] + read_from_disk, + #[cfg(not(syn_only))] + tokenstream_parse, + syn_parse, + #[cfg(not(syn_only))] + librustc_parse, + ) { + eprint!("{:20}", format!("{}:", name)); + let elapsed = exec(f); + eprintln!( + "elapsed={}.{:03}s", + elapsed.as_secs(), + elapsed.subsec_millis(), + ); + } + eprintln!(); +} diff --git a/third_party/rust/syn/build.rs b/third_party/rust/syn/build.rs new file mode 100644 index 0000000000..1a2c077bf0 --- /dev/null +++ b/third_party/rust/syn/build.rs @@ -0,0 +1,51 @@ +use std::env; +use std::process::Command; +use std::str; + +// The rustc-cfg strings below are *not* public API. Please let us know by +// opening a GitHub issue if your build environment requires some way to enable +// these cfgs other than by executing our build script. +fn main() { + let compiler = match rustc_version() { + Some(compiler) => compiler, + None => return, + }; + + if compiler.minor < 36 { + println!("cargo:rustc-cfg=syn_omit_await_from_token_macro"); + } + + if compiler.minor < 39 { + println!("cargo:rustc-cfg=syn_no_const_vec_new"); + } + + if compiler.minor < 40 { + println!("cargo:rustc-cfg=syn_no_non_exhaustive"); + } + + if compiler.minor < 56 { + println!("cargo:rustc-cfg=syn_no_negative_literal_parse"); + } + + if !compiler.nightly { + println!("cargo:rustc-cfg=syn_disable_nightly_tests"); + } +} + +struct Compiler { + minor: u32, + nightly: bool, +} + +fn rustc_version() -> Option<Compiler> { + let rustc = env::var_os("RUSTC")?; + let output = Command::new(rustc).arg("--version").output().ok()?; + let version = str::from_utf8(&output.stdout).ok()?; + let mut pieces = version.split('.'); + if pieces.next() != Some("rustc 1") { + return None; + } + let minor = pieces.next()?.parse().ok()?; + let nightly = version.contains("nightly") || version.ends_with("-dev"); + Some(Compiler { minor, nightly }) +} diff --git a/third_party/rust/syn/src/attr.rs b/third_party/rust/syn/src/attr.rs new file mode 100644 index 0000000000..bace94f43c --- /dev/null +++ b/third_party/rust/syn/src/attr.rs @@ -0,0 +1,662 @@ +use super::*; +use crate::punctuated::Punctuated; +use proc_macro2::TokenStream; +use std::iter; +use std::slice; + +#[cfg(feature = "parsing")] +use crate::parse::{Parse, ParseBuffer, ParseStream, Parser, Result}; +#[cfg(feature = "parsing")] +use crate::punctuated::Pair; + +ast_struct! { + /// An attribute like `#[repr(transparent)]`. + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + /// + /// <br> + /// + /// # Syntax + /// + /// Rust has six types of attributes. + /// + /// - Outer attributes like `#[repr(transparent)]`. These appear outside or + /// in front of the item they describe. + /// - Inner attributes like `#![feature(proc_macro)]`. These appear inside + /// of the item they describe, usually a module. + /// - Outer doc comments like `/// # Example`. + /// - Inner doc comments like `//! Please file an issue`. + /// - Outer block comments `/** # Example */`. + /// - Inner block comments `/*! Please file an issue */`. + /// + /// The `style` field of type `AttrStyle` distinguishes whether an attribute + /// is outer or inner. Doc comments and block comments are promoted to + /// attributes, as this is how they are processed by the compiler and by + /// `macro_rules!` macros. + /// + /// The `path` field gives the possibly colon-delimited path against which + /// the attribute is resolved. It is equal to `"doc"` for desugared doc + /// comments. The `tokens` field contains the rest of the attribute body as + /// tokens. + /// + /// ```text + /// #[derive(Copy)] #[crate::precondition x < 5] + /// ^^^^^^~~~~~~ ^^^^^^^^^^^^^^^^^^^ ~~~~~ + /// path tokens path tokens + /// ``` + /// + /// <br> + /// + /// # Parsing from tokens to Attribute + /// + /// This type does not implement the [`Parse`] trait and thus cannot be + /// parsed directly by [`ParseStream::parse`]. Instead use + /// [`ParseStream::call`] with one of the two parser functions + /// [`Attribute::parse_outer`] or [`Attribute::parse_inner`] depending on + /// which you intend to parse. + /// + /// [`Parse`]: parse::Parse + /// [`ParseStream::parse`]: parse::ParseBuffer::parse + /// [`ParseStream::call`]: parse::ParseBuffer::call + /// + /// ``` + /// use syn::{Attribute, Ident, Result, Token}; + /// use syn::parse::{Parse, ParseStream}; + /// + /// // Parses a unit struct with attributes. + /// // + /// // #[path = "s.tmpl"] + /// // struct S; + /// struct UnitStruct { + /// attrs: Vec<Attribute>, + /// struct_token: Token![struct], + /// name: Ident, + /// semi_token: Token![;], + /// } + /// + /// impl Parse for UnitStruct { + /// fn parse(input: ParseStream) -> Result<Self> { + /// Ok(UnitStruct { + /// attrs: input.call(Attribute::parse_outer)?, + /// struct_token: input.parse()?, + /// name: input.parse()?, + /// semi_token: input.parse()?, + /// }) + /// } + /// } + /// ``` + /// + /// <p><br></p> + /// + /// # Parsing from Attribute to structured arguments + /// + /// The grammar of attributes in Rust is very flexible, which makes the + /// syntax tree not that useful on its own. In particular, arguments of the + /// attribute are held in an arbitrary `tokens: TokenStream`. Macros are + /// expected to check the `path` of the attribute, decide whether they + /// recognize it, and then parse the remaining tokens according to whatever + /// grammar they wish to require for that kind of attribute. + /// + /// If the attribute you are parsing is expected to conform to the + /// conventional structured form of attribute, use [`parse_meta()`] to + /// obtain that structured representation. If the attribute follows some + /// other grammar of its own, use [`parse_args()`] to parse that into the + /// expected data structure. + /// + /// [`parse_meta()`]: Attribute::parse_meta + /// [`parse_args()`]: Attribute::parse_args + /// + /// <p><br></p> + /// + /// # Doc comments + /// + /// The compiler transforms doc comments, such as `/// comment` and `/*! + /// comment */`, into attributes before macros are expanded. Each comment is + /// expanded into an attribute of the form `#[doc = r"comment"]`. + /// + /// As an example, the following `mod` items are expanded identically: + /// + /// ``` + /// # use syn::{ItemMod, parse_quote}; + /// let doc: ItemMod = parse_quote! { + /// /// Single line doc comments + /// /// We write so many! + /// /** + /// * Multi-line comments... + /// * May span many lines + /// */ + /// mod example { + /// //! Of course, they can be inner too + /// /*! And fit in a single line */ + /// } + /// }; + /// let attr: ItemMod = parse_quote! { + /// #[doc = r" Single line doc comments"] + /// #[doc = r" We write so many!"] + /// #[doc = r" + /// * Multi-line comments... + /// * May span many lines + /// "] + /// mod example { + /// #![doc = r" Of course, they can be inner too"] + /// #![doc = r" And fit in a single line "] + /// } + /// }; + /// assert_eq!(doc, attr); + /// ``` + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct Attribute { + pub pound_token: Token![#], + pub style: AttrStyle, + pub bracket_token: token::Bracket, + pub path: Path, + pub tokens: TokenStream, + } +} + +impl Attribute { + /// Parses the content of the attribute, consisting of the path and tokens, + /// as a [`Meta`] if possible. + /// + /// *This function is available only if Syn is built with the `"parsing"` + /// feature.* + #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn parse_meta(&self) -> Result<Meta> { + fn clone_ident_segment(segment: &PathSegment) -> PathSegment { + PathSegment { + ident: segment.ident.clone(), + arguments: PathArguments::None, + } + } + + let path = Path { + leading_colon: self + .path + .leading_colon + .as_ref() + .map(|colon| Token![::](colon.spans)), + segments: self + .path + .segments + .pairs() + .map(|pair| match pair { + Pair::Punctuated(seg, punct) => { + Pair::Punctuated(clone_ident_segment(seg), Token![::](punct.spans)) + } + Pair::End(seg) => Pair::End(clone_ident_segment(seg)), + }) + .collect(), + }; + + let parser = |input: ParseStream| parsing::parse_meta_after_path(path, input); + parse::Parser::parse2(parser, self.tokens.clone()) + } + + /// Parse the arguments to the attribute as a syntax tree. + /// + /// This is similar to `syn::parse2::<T>(attr.tokens)` except that: + /// + /// - the surrounding delimiters are *not* included in the input to the + /// parser; and + /// - the error message has a more useful span when `tokens` is empty. + /// + /// ```text + /// #[my_attr(value < 5)] + /// ^^^^^^^^^ what gets parsed + /// ``` + /// + /// *This function is available only if Syn is built with the `"parsing"` + /// feature.* + #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn parse_args<T: Parse>(&self) -> Result<T> { + self.parse_args_with(T::parse) + } + + /// Parse the arguments to the attribute using the given parser. + /// + /// *This function is available only if Syn is built with the `"parsing"` + /// feature.* + #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn parse_args_with<F: Parser>(&self, parser: F) -> Result<F::Output> { + let parser = |input: ParseStream| { + let args = enter_args(self, input)?; + parse::parse_stream(parser, &args) + }; + parser.parse2(self.tokens.clone()) + } + + /// Parses zero or more outer attributes from the stream. + /// + /// *This function is available only if Syn is built with the `"parsing"` + /// feature.* + #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn parse_outer(input: ParseStream) -> Result<Vec<Self>> { + let mut attrs = Vec::new(); + while input.peek(Token![#]) { + attrs.push(input.call(parsing::single_parse_outer)?); + } + Ok(attrs) + } + + /// Parses zero or more inner attributes from the stream. + /// + /// *This function is available only if Syn is built with the `"parsing"` + /// feature.* + #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn parse_inner(input: ParseStream) -> Result<Vec<Self>> { + let mut attrs = Vec::new(); + parsing::parse_inner(input, &mut attrs)?; + Ok(attrs) + } +} + +#[cfg(feature = "parsing")] +fn expected_parentheses(attr: &Attribute) -> String { + let style = match attr.style { + AttrStyle::Outer => "#", + AttrStyle::Inner(_) => "#!", + }; + + let mut path = String::new(); + for segment in &attr.path.segments { + if !path.is_empty() || attr.path.leading_colon.is_some() { + path += "::"; + } + path += &segment.ident.to_string(); + } + + format!("{}[{}(...)]", style, path) +} + +#[cfg(feature = "parsing")] +fn enter_args<'a>(attr: &Attribute, input: ParseStream<'a>) -> Result<ParseBuffer<'a>> { + if input.is_empty() { + let expected = expected_parentheses(attr); + let msg = format!("expected attribute arguments in parentheses: {}", expected); + return Err(crate::error::new2( + attr.pound_token.span, + attr.bracket_token.span, + msg, + )); + } else if input.peek(Token![=]) { + let expected = expected_parentheses(attr); + let msg = format!("expected parentheses: {}", expected); + return Err(input.error(msg)); + }; + + let content; + if input.peek(token::Paren) { + parenthesized!(content in input); + } else if input.peek(token::Bracket) { + bracketed!(content in input); + } else if input.peek(token::Brace) { + braced!(content in input); + } else { + return Err(input.error("unexpected token in attribute arguments")); + } + + if input.is_empty() { + Ok(content) + } else { + Err(input.error("unexpected token in attribute arguments")) + } +} + +ast_enum! { + /// Distinguishes between attributes that decorate an item and attributes + /// that are contained within an item. + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + /// + /// # Outer attributes + /// + /// - `#[repr(transparent)]` + /// - `/// # Example` + /// - `/** Please file an issue */` + /// + /// # Inner attributes + /// + /// - `#![feature(proc_macro)]` + /// - `//! # Example` + /// - `/*! Please file an issue */` + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub enum AttrStyle { + Outer, + Inner(Token![!]), + } +} + +ast_enum_of_structs! { + /// Content of a compile-time structured attribute. + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + /// + /// ## Path + /// + /// A meta path is like the `test` in `#[test]`. + /// + /// ## List + /// + /// A meta list is like the `derive(Copy)` in `#[derive(Copy)]`. + /// + /// ## NameValue + /// + /// A name-value meta is like the `path = "..."` in `#[path = + /// "sys/windows.rs"]`. + /// + /// # Syntax tree enum + /// + /// This type is a [syntax tree enum]. + /// + /// [syntax tree enum]: Expr#syntax-tree-enums + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub enum Meta { + Path(Path), + + /// A structured list within an attribute, like `derive(Copy, Clone)`. + List(MetaList), + + /// A name-value pair within an attribute, like `feature = "nightly"`. + NameValue(MetaNameValue), + } +} + +ast_struct! { + /// A structured list within an attribute, like `derive(Copy, Clone)`. + /// + /// *This type is available only if Syn is built with the `"derive"` or + /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct MetaList { + pub path: Path, + pub paren_token: token::Paren, + pub nested: Punctuated<NestedMeta, Token![,]>, + } +} + +ast_struct! { + /// A name-value pair within an attribute, like `feature = "nightly"`. + /// + /// *This type is available only if Syn is built with the `"derive"` or + /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct MetaNameValue { + pub path: Path, + pub eq_token: Token![=], + pub lit: Lit, + } +} + +impl Meta { + /// Returns the identifier that begins this structured meta item. + /// + /// For example this would return the `test` in `#[test]`, the `derive` in + /// `#[derive(Copy)]`, and the `path` in `#[path = "sys/windows.rs"]`. + pub fn path(&self) -> &Path { + match self { + Meta::Path(path) => path, + Meta::List(meta) => &meta.path, + Meta::NameValue(meta) => &meta.path, + } + } +} + +ast_enum_of_structs! { + /// Element of a compile-time attribute list. + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub enum NestedMeta { + /// A structured meta item, like the `Copy` in `#[derive(Copy)]` which + /// would be a nested `Meta::Path`. + Meta(Meta), + + /// A Rust literal, like the `"new_name"` in `#[rename("new_name")]`. + Lit(Lit), + } +} + +/// Conventional argument type associated with an invocation of an attribute +/// macro. +/// +/// For example if we are developing an attribute macro that is intended to be +/// invoked on function items as follows: +/// +/// ``` +/// # const IGNORE: &str = stringify! { +/// #[my_attribute(path = "/v1/refresh")] +/// # }; +/// pub fn refresh() { +/// /* ... */ +/// } +/// ``` +/// +/// The implementation of this macro would want to parse its attribute arguments +/// as type `AttributeArgs`. +/// +/// ``` +/// # extern crate proc_macro; +/// # +/// use proc_macro::TokenStream; +/// use syn::{parse_macro_input, AttributeArgs, ItemFn}; +/// +/// # const IGNORE: &str = stringify! { +/// #[proc_macro_attribute] +/// # }; +/// pub fn my_attribute(args: TokenStream, input: TokenStream) -> TokenStream { +/// let args = parse_macro_input!(args as AttributeArgs); +/// let input = parse_macro_input!(input as ItemFn); +/// +/// /* ... */ +/// # "".parse().unwrap() +/// } +/// ``` +#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] +pub type AttributeArgs = Vec<NestedMeta>; + +pub trait FilterAttrs<'a> { + type Ret: Iterator<Item = &'a Attribute>; + + fn outer(self) -> Self::Ret; + fn inner(self) -> Self::Ret; +} + +impl<'a> FilterAttrs<'a> for &'a [Attribute] { + type Ret = iter::Filter<slice::Iter<'a, Attribute>, fn(&&Attribute) -> bool>; + + fn outer(self) -> Self::Ret { + fn is_outer(attr: &&Attribute) -> bool { + match attr.style { + AttrStyle::Outer => true, + AttrStyle::Inner(_) => false, + } + } + self.iter().filter(is_outer) + } + + fn inner(self) -> Self::Ret { + fn is_inner(attr: &&Attribute) -> bool { + match attr.style { + AttrStyle::Inner(_) => true, + AttrStyle::Outer => false, + } + } + self.iter().filter(is_inner) + } +} + +#[cfg(feature = "parsing")] +pub mod parsing { + use super::*; + use crate::ext::IdentExt; + use crate::parse::{Parse, ParseStream, Result}; + + pub fn parse_inner(input: ParseStream, attrs: &mut Vec<Attribute>) -> Result<()> { + while input.peek(Token![#]) && input.peek2(Token![!]) { + attrs.push(input.call(parsing::single_parse_inner)?); + } + Ok(()) + } + + pub fn single_parse_inner(input: ParseStream) -> Result<Attribute> { + let content; + Ok(Attribute { + pound_token: input.parse()?, + style: AttrStyle::Inner(input.parse()?), + bracket_token: bracketed!(content in input), + path: content.call(Path::parse_mod_style)?, + tokens: content.parse()?, + }) + } + + pub fn single_parse_outer(input: ParseStream) -> Result<Attribute> { + let content; + Ok(Attribute { + pound_token: input.parse()?, + style: AttrStyle::Outer, + bracket_token: bracketed!(content in input), + path: content.call(Path::parse_mod_style)?, + tokens: content.parse()?, + }) + } + + // Like Path::parse_mod_style but accepts keywords in the path. + fn parse_meta_path(input: ParseStream) -> Result<Path> { + Ok(Path { + leading_colon: input.parse()?, + segments: { + let mut segments = Punctuated::new(); + while input.peek(Ident::peek_any) { + let ident = Ident::parse_any(input)?; + segments.push_value(PathSegment::from(ident)); + if !input.peek(Token![::]) { + break; + } + let punct = input.parse()?; + segments.push_punct(punct); + } + if segments.is_empty() { + return Err(input.error("expected path")); + } else if segments.trailing_punct() { + return Err(input.error("expected path segment")); + } + segments + }, + }) + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for Meta { + fn parse(input: ParseStream) -> Result<Self> { + let path = input.call(parse_meta_path)?; + parse_meta_after_path(path, input) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for MetaList { + fn parse(input: ParseStream) -> Result<Self> { + let path = input.call(parse_meta_path)?; + parse_meta_list_after_path(path, input) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for MetaNameValue { + fn parse(input: ParseStream) -> Result<Self> { + let path = input.call(parse_meta_path)?; + parse_meta_name_value_after_path(path, input) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for NestedMeta { + fn parse(input: ParseStream) -> Result<Self> { + if input.peek(Lit) && !(input.peek(LitBool) && input.peek2(Token![=])) { + input.parse().map(NestedMeta::Lit) + } else if input.peek(Ident::peek_any) + || input.peek(Token![::]) && input.peek3(Ident::peek_any) + { + input.parse().map(NestedMeta::Meta) + } else { + Err(input.error("expected identifier or literal")) + } + } + } + + pub fn parse_meta_after_path(path: Path, input: ParseStream) -> Result<Meta> { + if input.peek(token::Paren) { + parse_meta_list_after_path(path, input).map(Meta::List) + } else if input.peek(Token![=]) { + parse_meta_name_value_after_path(path, input).map(Meta::NameValue) + } else { + Ok(Meta::Path(path)) + } + } + + fn parse_meta_list_after_path(path: Path, input: ParseStream) -> Result<MetaList> { + let content; + Ok(MetaList { + path, + paren_token: parenthesized!(content in input), + nested: content.parse_terminated(NestedMeta::parse)?, + }) + } + + fn parse_meta_name_value_after_path(path: Path, input: ParseStream) -> Result<MetaNameValue> { + Ok(MetaNameValue { + path, + eq_token: input.parse()?, + lit: input.parse()?, + }) + } +} + +#[cfg(feature = "printing")] +mod printing { + use super::*; + use proc_macro2::TokenStream; + use quote::ToTokens; + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for Attribute { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.pound_token.to_tokens(tokens); + if let AttrStyle::Inner(b) = &self.style { + b.to_tokens(tokens); + } + self.bracket_token.surround(tokens, |tokens| { + self.path.to_tokens(tokens); + self.tokens.to_tokens(tokens); + }); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for MetaList { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.path.to_tokens(tokens); + self.paren_token.surround(tokens, |tokens| { + self.nested.to_tokens(tokens); + }); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for MetaNameValue { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.path.to_tokens(tokens); + self.eq_token.to_tokens(tokens); + self.lit.to_tokens(tokens); + } + } +} diff --git a/third_party/rust/syn/src/await.rs b/third_party/rust/syn/src/await.rs new file mode 100644 index 0000000000..038c6a5d12 --- /dev/null +++ b/third_party/rust/syn/src/await.rs @@ -0,0 +1,2 @@ +// See include!("await.rs") in token.rs. +export_token_macro! {[await]} diff --git a/third_party/rust/syn/src/bigint.rs b/third_party/rust/syn/src/bigint.rs new file mode 100644 index 0000000000..5397d6beee --- /dev/null +++ b/third_party/rust/syn/src/bigint.rs @@ -0,0 +1,66 @@ +use std::ops::{AddAssign, MulAssign}; + +// For implementing base10_digits() accessor on LitInt. +pub struct BigInt { + digits: Vec<u8>, +} + +impl BigInt { + pub fn new() -> Self { + BigInt { digits: Vec::new() } + } + + pub fn to_string(&self) -> String { + let mut repr = String::with_capacity(self.digits.len()); + + let mut has_nonzero = false; + for digit in self.digits.iter().rev() { + has_nonzero |= *digit != 0; + if has_nonzero { + repr.push((*digit + b'0') as char); + } + } + + if repr.is_empty() { + repr.push('0'); + } + + repr + } + + fn reserve_two_digits(&mut self) { + let len = self.digits.len(); + let desired = + len + !self.digits.ends_with(&[0, 0]) as usize + !self.digits.ends_with(&[0]) as usize; + self.digits.resize(desired, 0); + } +} + +impl AddAssign<u8> for BigInt { + // Assumes increment <16. + fn add_assign(&mut self, mut increment: u8) { + self.reserve_two_digits(); + + let mut i = 0; + while increment > 0 { + let sum = self.digits[i] + increment; + self.digits[i] = sum % 10; + increment = sum / 10; + i += 1; + } + } +} + +impl MulAssign<u8> for BigInt { + // Assumes base <=16. + fn mul_assign(&mut self, base: u8) { + self.reserve_two_digits(); + + let mut carry = 0; + for digit in &mut self.digits { + let prod = *digit * base + carry; + *digit = prod % 10; + carry = prod / 10; + } + } +} diff --git a/third_party/rust/syn/src/buffer.rs b/third_party/rust/syn/src/buffer.rs new file mode 100644 index 0000000000..0d5cf30d57 --- /dev/null +++ b/third_party/rust/syn/src/buffer.rs @@ -0,0 +1,398 @@ +//! A stably addressed token buffer supporting efficient traversal based on a +//! cheaply copyable cursor. +//! +//! *This module is available only if Syn is built with the `"parsing"` feature.* + +// This module is heavily commented as it contains most of the unsafe code in +// Syn, and caution should be used when editing it. The public-facing interface +// is 100% safe but the implementation is fragile internally. + +#[cfg(all( + not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))), + feature = "proc-macro" +))] +use crate::proc_macro as pm; +use crate::Lifetime; +use proc_macro2::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree}; +use std::cmp::Ordering; +use std::marker::PhantomData; + +/// Internal type which is used instead of `TokenTree` to represent a token tree +/// within a `TokenBuffer`. +enum Entry { + // Mimicking types from proc-macro. + // Group entries contain the offset to the matching End entry. + Group(Group, usize), + Ident(Ident), + Punct(Punct), + Literal(Literal), + // End entries contain the offset (negative) to the start of the buffer. + End(isize), +} + +/// A buffer that can be efficiently traversed multiple times, unlike +/// `TokenStream` which requires a deep copy in order to traverse more than +/// once. +/// +/// *This type is available only if Syn is built with the `"parsing"` feature.* +pub struct TokenBuffer { + // NOTE: Do not implement clone on this - while the current design could be + // cloned, other designs which could be desirable may not be cloneable. + entries: Box<[Entry]>, +} + +impl TokenBuffer { + fn recursive_new(entries: &mut Vec<Entry>, stream: TokenStream) { + for tt in stream { + match tt { + TokenTree::Ident(ident) => entries.push(Entry::Ident(ident)), + TokenTree::Punct(punct) => entries.push(Entry::Punct(punct)), + TokenTree::Literal(literal) => entries.push(Entry::Literal(literal)), + TokenTree::Group(group) => { + let group_start_index = entries.len(); + entries.push(Entry::End(0)); // we replace this below + Self::recursive_new(entries, group.stream()); + let group_end_index = entries.len(); + entries.push(Entry::End(-(group_end_index as isize))); + let group_end_offset = group_end_index - group_start_index; + entries[group_start_index] = Entry::Group(group, group_end_offset); + } + } + } + } + + /// Creates a `TokenBuffer` containing all the tokens from the input + /// `proc_macro::TokenStream`. + /// + /// *This method is available only if Syn is built with both the `"parsing"` and + /// `"proc-macro"` features.* + #[cfg(all( + not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))), + feature = "proc-macro" + ))] + pub fn new(stream: pm::TokenStream) -> Self { + Self::new2(stream.into()) + } + + /// Creates a `TokenBuffer` containing all the tokens from the input + /// `proc_macro2::TokenStream`. + pub fn new2(stream: TokenStream) -> Self { + let mut entries = Vec::new(); + Self::recursive_new(&mut entries, stream); + entries.push(Entry::End(-(entries.len() as isize))); + Self { + entries: entries.into_boxed_slice(), + } + } + + /// Creates a cursor referencing the first token in the buffer and able to + /// traverse until the end of the buffer. + pub fn begin(&self) -> Cursor { + let ptr = self.entries.as_ptr(); + unsafe { Cursor::create(ptr, ptr.add(self.entries.len() - 1)) } + } +} + +/// A cheaply copyable cursor into a `TokenBuffer`. +/// +/// This cursor holds a shared reference into the immutable data which is used +/// internally to represent a `TokenStream`, and can be efficiently manipulated +/// and copied around. +/// +/// An empty `Cursor` can be created directly, or one may create a `TokenBuffer` +/// object and get a cursor to its first token with `begin()`. +/// +/// Two cursors are equal if they have the same location in the same input +/// stream, and have the same scope. +/// +/// *This type is available only if Syn is built with the `"parsing"` feature.* +pub struct Cursor<'a> { + // The current entry which the `Cursor` is pointing at. + ptr: *const Entry, + // This is the only `Entry::End` object which this cursor is allowed to + // point at. All other `End` objects are skipped over in `Cursor::create`. + scope: *const Entry, + // Cursor is covariant in 'a. This field ensures that our pointers are still + // valid. + marker: PhantomData<&'a Entry>, +} + +impl<'a> Cursor<'a> { + /// Creates a cursor referencing a static empty TokenStream. + pub fn empty() -> Self { + // It's safe in this situation for us to put an `Entry` object in global + // storage, despite it not actually being safe to send across threads + // (`Ident` is a reference into a thread-local table). This is because + // this entry never includes a `Ident` object. + // + // This wrapper struct allows us to break the rules and put a `Sync` + // object in global storage. + struct UnsafeSyncEntry(Entry); + unsafe impl Sync for UnsafeSyncEntry {} + static EMPTY_ENTRY: UnsafeSyncEntry = UnsafeSyncEntry(Entry::End(0)); + + Cursor { + ptr: &EMPTY_ENTRY.0, + scope: &EMPTY_ENTRY.0, + marker: PhantomData, + } + } + + /// This create method intelligently exits non-explicitly-entered + /// `None`-delimited scopes when the cursor reaches the end of them, + /// allowing for them to be treated transparently. + unsafe fn create(mut ptr: *const Entry, scope: *const Entry) -> Self { + // NOTE: If we're looking at a `End`, we want to advance the cursor + // past it, unless `ptr == scope`, which means that we're at the edge of + // our cursor's scope. We should only have `ptr != scope` at the exit + // from None-delimited groups entered with `ignore_none`. + while let Entry::End(_) = *ptr { + if ptr == scope { + break; + } + ptr = ptr.add(1); + } + + Cursor { + ptr, + scope, + marker: PhantomData, + } + } + + /// Get the current entry. + fn entry(self) -> &'a Entry { + unsafe { &*self.ptr } + } + + /// Bump the cursor to point at the next token after the current one. This + /// is undefined behavior if the cursor is currently looking at an + /// `Entry::End`. + /// + /// If the cursor is looking at an `Entry::Group`, the bumped cursor will + /// point at the first token in the group (with the same scope end). + unsafe fn bump_ignore_group(self) -> Cursor<'a> { + Cursor::create(self.ptr.offset(1), self.scope) + } + + /// While the cursor is looking at a `None`-delimited group, move it to look + /// at the first token inside instead. If the group is empty, this will move + /// the cursor past the `None`-delimited group. + /// + /// WARNING: This mutates its argument. + fn ignore_none(&mut self) { + while let Entry::Group(group, _) = self.entry() { + if group.delimiter() == Delimiter::None { + unsafe { *self = self.bump_ignore_group() }; + } else { + break; + } + } + } + + /// Checks whether the cursor is currently pointing at the end of its valid + /// scope. + pub fn eof(self) -> bool { + // We're at eof if we're at the end of our scope. + self.ptr == self.scope + } + + /// If the cursor is pointing at a `Group` with the given delimiter, returns + /// a cursor into that group and one pointing to the next `TokenTree`. + pub fn group(mut self, delim: Delimiter) -> Option<(Cursor<'a>, Span, Cursor<'a>)> { + // If we're not trying to enter a none-delimited group, we want to + // ignore them. We have to make sure to _not_ ignore them when we want + // to enter them, of course. For obvious reasons. + if delim != Delimiter::None { + self.ignore_none(); + } + + if let Entry::Group(group, end_offset) = self.entry() { + if group.delimiter() == delim { + let end_of_group = unsafe { self.ptr.add(*end_offset) }; + let inside_of_group = unsafe { Cursor::create(self.ptr.add(1), end_of_group) }; + let after_group = unsafe { Cursor::create(end_of_group, self.scope) }; + return Some((inside_of_group, group.span(), after_group)); + } + } + + None + } + + /// If the cursor is pointing at a `Ident`, returns it along with a cursor + /// pointing at the next `TokenTree`. + pub fn ident(mut self) -> Option<(Ident, Cursor<'a>)> { + self.ignore_none(); + match self.entry() { + Entry::Ident(ident) => Some((ident.clone(), unsafe { self.bump_ignore_group() })), + _ => None, + } + } + + /// If the cursor is pointing at a `Punct`, returns it along with a cursor + /// pointing at the next `TokenTree`. + pub fn punct(mut self) -> Option<(Punct, Cursor<'a>)> { + self.ignore_none(); + match self.entry() { + Entry::Punct(punct) if punct.as_char() != '\'' => { + Some((punct.clone(), unsafe { self.bump_ignore_group() })) + } + _ => None, + } + } + + /// If the cursor is pointing at a `Literal`, return it along with a cursor + /// pointing at the next `TokenTree`. + pub fn literal(mut self) -> Option<(Literal, Cursor<'a>)> { + self.ignore_none(); + match self.entry() { + Entry::Literal(literal) => Some((literal.clone(), unsafe { self.bump_ignore_group() })), + _ => None, + } + } + + /// If the cursor is pointing at a `Lifetime`, returns it along with a + /// cursor pointing at the next `TokenTree`. + pub fn lifetime(mut self) -> Option<(Lifetime, Cursor<'a>)> { + self.ignore_none(); + match self.entry() { + Entry::Punct(punct) if punct.as_char() == '\'' && punct.spacing() == Spacing::Joint => { + let next = unsafe { self.bump_ignore_group() }; + let (ident, rest) = next.ident()?; + let lifetime = Lifetime { + apostrophe: punct.span(), + ident, + }; + Some((lifetime, rest)) + } + _ => None, + } + } + + /// Copies all remaining tokens visible from this cursor into a + /// `TokenStream`. + pub fn token_stream(self) -> TokenStream { + let mut tts = Vec::new(); + let mut cursor = self; + while let Some((tt, rest)) = cursor.token_tree() { + tts.push(tt); + cursor = rest; + } + tts.into_iter().collect() + } + + /// If the cursor is pointing at a `TokenTree`, returns it along with a + /// cursor pointing at the next `TokenTree`. + /// + /// Returns `None` if the cursor has reached the end of its stream. + /// + /// This method does not treat `None`-delimited groups as transparent, and + /// will return a `Group(None, ..)` if the cursor is looking at one. + pub fn token_tree(self) -> Option<(TokenTree, Cursor<'a>)> { + let (tree, len) = match self.entry() { + Entry::Group(group, end_offset) => (group.clone().into(), *end_offset), + Entry::Literal(literal) => (literal.clone().into(), 1), + Entry::Ident(ident) => (ident.clone().into(), 1), + Entry::Punct(punct) => (punct.clone().into(), 1), + Entry::End(_) => return None, + }; + + let rest = unsafe { Cursor::create(self.ptr.add(len), self.scope) }; + Some((tree, rest)) + } + + /// Returns the `Span` of the current token, or `Span::call_site()` if this + /// cursor points to eof. + pub fn span(self) -> Span { + match self.entry() { + Entry::Group(group, _) => group.span(), + Entry::Literal(literal) => literal.span(), + Entry::Ident(ident) => ident.span(), + Entry::Punct(punct) => punct.span(), + Entry::End(_) => Span::call_site(), + } + } + + /// Skip over the next token without cloning it. Returns `None` if this + /// cursor points to eof. + /// + /// This method treats `'lifetimes` as a single token. + pub(crate) fn skip(self) -> Option<Cursor<'a>> { + let len = match self.entry() { + Entry::End(_) => return None, + + // Treat lifetimes as a single tt for the purposes of 'skip'. + Entry::Punct(punct) if punct.as_char() == '\'' && punct.spacing() == Spacing::Joint => { + match unsafe { &*self.ptr.add(1) } { + Entry::Ident(_) => 2, + _ => 1, + } + } + + Entry::Group(_, end_offset) => *end_offset, + _ => 1, + }; + + Some(unsafe { Cursor::create(self.ptr.add(len), self.scope) }) + } +} + +impl<'a> Copy for Cursor<'a> {} + +impl<'a> Clone for Cursor<'a> { + fn clone(&self) -> Self { + *self + } +} + +impl<'a> Eq for Cursor<'a> {} + +impl<'a> PartialEq for Cursor<'a> { + fn eq(&self, other: &Self) -> bool { + self.ptr == other.ptr + } +} + +impl<'a> PartialOrd for Cursor<'a> { + fn partial_cmp(&self, other: &Self) -> Option<Ordering> { + if same_buffer(*self, *other) { + Some(self.ptr.cmp(&other.ptr)) + } else { + None + } + } +} + +pub(crate) fn same_scope(a: Cursor, b: Cursor) -> bool { + a.scope == b.scope +} + +pub(crate) fn same_buffer(a: Cursor, b: Cursor) -> bool { + unsafe { + match (&*a.scope, &*b.scope) { + (Entry::End(a_offset), Entry::End(b_offset)) => { + a.scope.offset(*a_offset) == b.scope.offset(*b_offset) + } + _ => unreachable!(), + } + } +} + +#[cfg(any(feature = "full", feature = "derive"))] +pub(crate) fn cmp_assuming_same_buffer(a: Cursor, b: Cursor) -> Ordering { + a.ptr.cmp(&b.ptr) +} + +pub(crate) fn open_span_of_group(cursor: Cursor) -> Span { + match cursor.entry() { + Entry::Group(group, _) => group.span_open(), + _ => cursor.span(), + } +} + +pub(crate) fn close_span_of_group(cursor: Cursor) -> Span { + match cursor.entry() { + Entry::Group(group, _) => group.span_close(), + _ => cursor.span(), + } +} diff --git a/third_party/rust/syn/src/custom_keyword.rs b/third_party/rust/syn/src/custom_keyword.rs new file mode 100644 index 0000000000..a3ec9d4cb7 --- /dev/null +++ b/third_party/rust/syn/src/custom_keyword.rs @@ -0,0 +1,253 @@ +/// Define a type that supports parsing and printing a given identifier as if it +/// were a keyword. +/// +/// # Usage +/// +/// As a convention, it is recommended that this macro be invoked within a +/// module called `kw` or `keyword` and that the resulting parser be invoked +/// with a `kw::` or `keyword::` prefix. +/// +/// ``` +/// mod kw { +/// syn::custom_keyword!(whatever); +/// } +/// ``` +/// +/// The generated syntax tree node supports the following operations just like +/// any built-in keyword token. +/// +/// - [Peeking] — `input.peek(kw::whatever)` +/// +/// - [Parsing] — `input.parse::<kw::whatever>()?` +/// +/// - [Printing] — `quote!( ... #whatever_token ... )` +/// +/// - Construction from a [`Span`] — `let whatever_token = kw::whatever(sp)` +/// +/// - Field access to its span — `let sp = whatever_token.span` +/// +/// [Peeking]: crate::parse::ParseBuffer::peek +/// [Parsing]: crate::parse::ParseBuffer::parse +/// [Printing]: quote::ToTokens +/// [`Span`]: proc_macro2::Span +/// +/// # Example +/// +/// This example parses input that looks like `bool = true` or `str = "value"`. +/// The key must be either the identifier `bool` or the identifier `str`. If +/// `bool`, the value may be either `true` or `false`. If `str`, the value may +/// be any string literal. +/// +/// The symbols `bool` and `str` are not reserved keywords in Rust so these are +/// not considered keywords in the `syn::token` module. Like any other +/// identifier that is not a keyword, these can be declared as custom keywords +/// by crates that need to use them as such. +/// +/// ``` +/// use syn::{LitBool, LitStr, Result, Token}; +/// use syn::parse::{Parse, ParseStream}; +/// +/// mod kw { +/// syn::custom_keyword!(bool); +/// syn::custom_keyword!(str); +/// } +/// +/// enum Argument { +/// Bool { +/// bool_token: kw::bool, +/// eq_token: Token![=], +/// value: LitBool, +/// }, +/// Str { +/// str_token: kw::str, +/// eq_token: Token![=], +/// value: LitStr, +/// }, +/// } +/// +/// impl Parse for Argument { +/// fn parse(input: ParseStream) -> Result<Self> { +/// let lookahead = input.lookahead1(); +/// if lookahead.peek(kw::bool) { +/// Ok(Argument::Bool { +/// bool_token: input.parse::<kw::bool>()?, +/// eq_token: input.parse()?, +/// value: input.parse()?, +/// }) +/// } else if lookahead.peek(kw::str) { +/// Ok(Argument::Str { +/// str_token: input.parse::<kw::str>()?, +/// eq_token: input.parse()?, +/// value: input.parse()?, +/// }) +/// } else { +/// Err(lookahead.error()) +/// } +/// } +/// } +/// ``` +#[macro_export] +macro_rules! custom_keyword { + ($ident:ident) => { + #[allow(non_camel_case_types)] + pub struct $ident { + pub span: $crate::__private::Span, + } + + #[doc(hidden)] + #[allow(dead_code, non_snake_case)] + pub fn $ident<__S: $crate::__private::IntoSpans<[$crate::__private::Span; 1]>>( + span: __S, + ) -> $ident { + $ident { + span: $crate::__private::IntoSpans::into_spans(span)[0], + } + } + + impl $crate::__private::Default for $ident { + fn default() -> Self { + $ident { + span: $crate::__private::Span::call_site(), + } + } + } + + $crate::impl_parse_for_custom_keyword!($ident); + $crate::impl_to_tokens_for_custom_keyword!($ident); + $crate::impl_clone_for_custom_keyword!($ident); + $crate::impl_extra_traits_for_custom_keyword!($ident); + }; +} + +// Not public API. +#[cfg(feature = "parsing")] +#[doc(hidden)] +#[macro_export] +macro_rules! impl_parse_for_custom_keyword { + ($ident:ident) => { + // For peek. + impl $crate::token::CustomToken for $ident { + fn peek(cursor: $crate::buffer::Cursor) -> $crate::__private::bool { + if let $crate::__private::Some((ident, _rest)) = cursor.ident() { + ident == stringify!($ident) + } else { + false + } + } + + fn display() -> &'static $crate::__private::str { + concat!("`", stringify!($ident), "`") + } + } + + impl $crate::parse::Parse for $ident { + fn parse(input: $crate::parse::ParseStream) -> $crate::parse::Result<$ident> { + input.step(|cursor| { + if let $crate::__private::Some((ident, rest)) = cursor.ident() { + if ident == stringify!($ident) { + return $crate::__private::Ok(($ident { span: ident.span() }, rest)); + } + } + $crate::__private::Err(cursor.error(concat!( + "expected `", + stringify!($ident), + "`" + ))) + }) + } + } + }; +} + +// Not public API. +#[cfg(not(feature = "parsing"))] +#[doc(hidden)] +#[macro_export] +macro_rules! impl_parse_for_custom_keyword { + ($ident:ident) => {}; +} + +// Not public API. +#[cfg(feature = "printing")] +#[doc(hidden)] +#[macro_export] +macro_rules! impl_to_tokens_for_custom_keyword { + ($ident:ident) => { + impl $crate::__private::ToTokens for $ident { + fn to_tokens(&self, tokens: &mut $crate::__private::TokenStream2) { + let ident = $crate::Ident::new(stringify!($ident), self.span); + $crate::__private::TokenStreamExt::append(tokens, ident); + } + } + }; +} + +// Not public API. +#[cfg(not(feature = "printing"))] +#[doc(hidden)] +#[macro_export] +macro_rules! impl_to_tokens_for_custom_keyword { + ($ident:ident) => {}; +} + +// Not public API. +#[cfg(feature = "clone-impls")] +#[doc(hidden)] +#[macro_export] +macro_rules! impl_clone_for_custom_keyword { + ($ident:ident) => { + impl $crate::__private::Copy for $ident {} + + #[allow(clippy::expl_impl_clone_on_copy)] + impl $crate::__private::Clone for $ident { + fn clone(&self) -> Self { + *self + } + } + }; +} + +// Not public API. +#[cfg(not(feature = "clone-impls"))] +#[doc(hidden)] +#[macro_export] +macro_rules! impl_clone_for_custom_keyword { + ($ident:ident) => {}; +} + +// Not public API. +#[cfg(feature = "extra-traits")] +#[doc(hidden)] +#[macro_export] +macro_rules! impl_extra_traits_for_custom_keyword { + ($ident:ident) => { + impl $crate::__private::Debug for $ident { + fn fmt(&self, f: &mut $crate::__private::Formatter) -> $crate::__private::fmt::Result { + $crate::__private::Formatter::write_str( + f, + concat!("Keyword [", stringify!($ident), "]"), + ) + } + } + + impl $crate::__private::Eq for $ident {} + + impl $crate::__private::PartialEq for $ident { + fn eq(&self, _other: &Self) -> $crate::__private::bool { + true + } + } + + impl $crate::__private::Hash for $ident { + fn hash<__H: $crate::__private::Hasher>(&self, _state: &mut __H) {} + } + }; +} + +// Not public API. +#[cfg(not(feature = "extra-traits"))] +#[doc(hidden)] +#[macro_export] +macro_rules! impl_extra_traits_for_custom_keyword { + ($ident:ident) => {}; +} diff --git a/third_party/rust/syn/src/custom_punctuation.rs b/third_party/rust/syn/src/custom_punctuation.rs new file mode 100644 index 0000000000..118a8453da --- /dev/null +++ b/third_party/rust/syn/src/custom_punctuation.rs @@ -0,0 +1,300 @@ +/// Define a type that supports parsing and printing a multi-character symbol +/// as if it were a punctuation token. +/// +/// # Usage +/// +/// ``` +/// syn::custom_punctuation!(LeftRightArrow, <=>); +/// ``` +/// +/// The generated syntax tree node supports the following operations just like +/// any built-in punctuation token. +/// +/// - [Peeking] — `input.peek(LeftRightArrow)` +/// +/// - [Parsing] — `input.parse::<LeftRightArrow>()?` +/// +/// - [Printing] — `quote!( ... #lrarrow ... )` +/// +/// - Construction from a [`Span`] — `let lrarrow = LeftRightArrow(sp)` +/// +/// - Construction from multiple [`Span`] — `let lrarrow = LeftRightArrow([sp, sp, sp])` +/// +/// - Field access to its spans — `let spans = lrarrow.spans` +/// +/// [Peeking]: crate::parse::ParseBuffer::peek +/// [Parsing]: crate::parse::ParseBuffer::parse +/// [Printing]: quote::ToTokens +/// [`Span`]: proc_macro2::Span +/// +/// # Example +/// +/// ``` +/// use proc_macro2::{TokenStream, TokenTree}; +/// use syn::parse::{Parse, ParseStream, Peek, Result}; +/// use syn::punctuated::Punctuated; +/// use syn::Expr; +/// +/// syn::custom_punctuation!(PathSeparator, </>); +/// +/// // expr </> expr </> expr ... +/// struct PathSegments { +/// segments: Punctuated<Expr, PathSeparator>, +/// } +/// +/// impl Parse for PathSegments { +/// fn parse(input: ParseStream) -> Result<Self> { +/// let mut segments = Punctuated::new(); +/// +/// let first = parse_until(input, PathSeparator)?; +/// segments.push_value(syn::parse2(first)?); +/// +/// while input.peek(PathSeparator) { +/// segments.push_punct(input.parse()?); +/// +/// let next = parse_until(input, PathSeparator)?; +/// segments.push_value(syn::parse2(next)?); +/// } +/// +/// Ok(PathSegments { segments }) +/// } +/// } +/// +/// fn parse_until<E: Peek>(input: ParseStream, end: E) -> Result<TokenStream> { +/// let mut tokens = TokenStream::new(); +/// while !input.is_empty() && !input.peek(end) { +/// let next: TokenTree = input.parse()?; +/// tokens.extend(Some(next)); +/// } +/// Ok(tokens) +/// } +/// +/// fn main() { +/// let input = r#" a::b </> c::d::e "#; +/// let _: PathSegments = syn::parse_str(input).unwrap(); +/// } +/// ``` +#[macro_export] +macro_rules! custom_punctuation { + ($ident:ident, $($tt:tt)+) => { + pub struct $ident { + pub spans: $crate::custom_punctuation_repr!($($tt)+), + } + + #[doc(hidden)] + #[allow(dead_code, non_snake_case)] + pub fn $ident<__S: $crate::__private::IntoSpans<$crate::custom_punctuation_repr!($($tt)+)>>( + spans: __S, + ) -> $ident { + let _validate_len = 0 $(+ $crate::custom_punctuation_len!(strict, $tt))*; + $ident { + spans: $crate::__private::IntoSpans::into_spans(spans) + } + } + + impl $crate::__private::Default for $ident { + fn default() -> Self { + $ident($crate::__private::Span::call_site()) + } + } + + $crate::impl_parse_for_custom_punctuation!($ident, $($tt)+); + $crate::impl_to_tokens_for_custom_punctuation!($ident, $($tt)+); + $crate::impl_clone_for_custom_punctuation!($ident, $($tt)+); + $crate::impl_extra_traits_for_custom_punctuation!($ident, $($tt)+); + }; +} + +// Not public API. +#[cfg(feature = "parsing")] +#[doc(hidden)] +#[macro_export] +macro_rules! impl_parse_for_custom_punctuation { + ($ident:ident, $($tt:tt)+) => { + impl $crate::token::CustomToken for $ident { + fn peek(cursor: $crate::buffer::Cursor) -> bool { + $crate::token::parsing::peek_punct(cursor, $crate::stringify_punct!($($tt)+)) + } + + fn display() -> &'static $crate::__private::str { + concat!("`", $crate::stringify_punct!($($tt)+), "`") + } + } + + impl $crate::parse::Parse for $ident { + fn parse(input: $crate::parse::ParseStream) -> $crate::parse::Result<$ident> { + let spans: $crate::custom_punctuation_repr!($($tt)+) = + $crate::token::parsing::punct(input, $crate::stringify_punct!($($tt)+))?; + Ok($ident(spans)) + } + } + }; +} + +// Not public API. +#[cfg(not(feature = "parsing"))] +#[doc(hidden)] +#[macro_export] +macro_rules! impl_parse_for_custom_punctuation { + ($ident:ident, $($tt:tt)+) => {}; +} + +// Not public API. +#[cfg(feature = "printing")] +#[doc(hidden)] +#[macro_export] +macro_rules! impl_to_tokens_for_custom_punctuation { + ($ident:ident, $($tt:tt)+) => { + impl $crate::__private::ToTokens for $ident { + fn to_tokens(&self, tokens: &mut $crate::__private::TokenStream2) { + $crate::token::printing::punct($crate::stringify_punct!($($tt)+), &self.spans, tokens) + } + } + }; +} + +// Not public API. +#[cfg(not(feature = "printing"))] +#[doc(hidden)] +#[macro_export] +macro_rules! impl_to_tokens_for_custom_punctuation { + ($ident:ident, $($tt:tt)+) => {}; +} + +// Not public API. +#[cfg(feature = "clone-impls")] +#[doc(hidden)] +#[macro_export] +macro_rules! impl_clone_for_custom_punctuation { + ($ident:ident, $($tt:tt)+) => { + impl $crate::__private::Copy for $ident {} + + #[allow(clippy::expl_impl_clone_on_copy)] + impl $crate::__private::Clone for $ident { + fn clone(&self) -> Self { + *self + } + } + }; +} + +// Not public API. +#[cfg(not(feature = "clone-impls"))] +#[doc(hidden)] +#[macro_export] +macro_rules! impl_clone_for_custom_punctuation { + ($ident:ident, $($tt:tt)+) => {}; +} + +// Not public API. +#[cfg(feature = "extra-traits")] +#[doc(hidden)] +#[macro_export] +macro_rules! impl_extra_traits_for_custom_punctuation { + ($ident:ident, $($tt:tt)+) => { + impl $crate::__private::Debug for $ident { + fn fmt(&self, f: &mut $crate::__private::Formatter) -> $crate::__private::fmt::Result { + $crate::__private::Formatter::write_str(f, stringify!($ident)) + } + } + + impl $crate::__private::Eq for $ident {} + + impl $crate::__private::PartialEq for $ident { + fn eq(&self, _other: &Self) -> $crate::__private::bool { + true + } + } + + impl $crate::__private::Hash for $ident { + fn hash<__H: $crate::__private::Hasher>(&self, _state: &mut __H) {} + } + }; +} + +// Not public API. +#[cfg(not(feature = "extra-traits"))] +#[doc(hidden)] +#[macro_export] +macro_rules! impl_extra_traits_for_custom_punctuation { + ($ident:ident, $($tt:tt)+) => {}; +} + +// Not public API. +#[doc(hidden)] +#[macro_export] +macro_rules! custom_punctuation_repr { + ($($tt:tt)+) => { + [$crate::__private::Span; 0 $(+ $crate::custom_punctuation_len!(lenient, $tt))+] + }; +} + +// Not public API. +#[doc(hidden)] +#[macro_export] +#[rustfmt::skip] +macro_rules! custom_punctuation_len { + ($mode:ident, +) => { 1 }; + ($mode:ident, +=) => { 2 }; + ($mode:ident, &) => { 1 }; + ($mode:ident, &&) => { 2 }; + ($mode:ident, &=) => { 2 }; + ($mode:ident, @) => { 1 }; + ($mode:ident, !) => { 1 }; + ($mode:ident, ^) => { 1 }; + ($mode:ident, ^=) => { 2 }; + ($mode:ident, :) => { 1 }; + ($mode:ident, ::) => { 2 }; + ($mode:ident, ,) => { 1 }; + ($mode:ident, /) => { 1 }; + ($mode:ident, /=) => { 2 }; + ($mode:ident, .) => { 1 }; + ($mode:ident, ..) => { 2 }; + ($mode:ident, ...) => { 3 }; + ($mode:ident, ..=) => { 3 }; + ($mode:ident, =) => { 1 }; + ($mode:ident, ==) => { 2 }; + ($mode:ident, >=) => { 2 }; + ($mode:ident, >) => { 1 }; + ($mode:ident, <=) => { 2 }; + ($mode:ident, <) => { 1 }; + ($mode:ident, *=) => { 2 }; + ($mode:ident, !=) => { 2 }; + ($mode:ident, |) => { 1 }; + ($mode:ident, |=) => { 2 }; + ($mode:ident, ||) => { 2 }; + ($mode:ident, #) => { 1 }; + ($mode:ident, ?) => { 1 }; + ($mode:ident, ->) => { 2 }; + ($mode:ident, <-) => { 2 }; + ($mode:ident, %) => { 1 }; + ($mode:ident, %=) => { 2 }; + ($mode:ident, =>) => { 2 }; + ($mode:ident, ;) => { 1 }; + ($mode:ident, <<) => { 2 }; + ($mode:ident, <<=) => { 3 }; + ($mode:ident, >>) => { 2 }; + ($mode:ident, >>=) => { 3 }; + ($mode:ident, *) => { 1 }; + ($mode:ident, -) => { 1 }; + ($mode:ident, -=) => { 2 }; + ($mode:ident, ~) => { 1 }; + (lenient, $tt:tt) => { 0 }; + (strict, $tt:tt) => {{ $crate::custom_punctuation_unexpected!($tt); 0 }}; +} + +// Not public API. +#[doc(hidden)] +#[macro_export] +macro_rules! custom_punctuation_unexpected { + () => {}; +} + +// Not public API. +#[doc(hidden)] +#[macro_export] +macro_rules! stringify_punct { + ($($tt:tt)+) => { + concat!($(stringify!($tt)),+) + }; +} diff --git a/third_party/rust/syn/src/data.rs b/third_party/rust/syn/src/data.rs new file mode 100644 index 0000000000..3b466618f8 --- /dev/null +++ b/third_party/rust/syn/src/data.rs @@ -0,0 +1,493 @@ +use super::*; +use crate::punctuated::Punctuated; + +ast_struct! { + /// An enum variant. + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct Variant { + /// Attributes tagged on the variant. + pub attrs: Vec<Attribute>, + + /// Name of the variant. + pub ident: Ident, + + /// Content stored in the variant. + pub fields: Fields, + + /// Explicit discriminant: `Variant = 1` + pub discriminant: Option<(Token![=], Expr)>, + } +} + +ast_enum_of_structs! { + /// Data stored within an enum variant or struct. + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + /// + /// # Syntax tree enum + /// + /// This type is a [syntax tree enum]. + /// + /// [syntax tree enum]: Expr#syntax-tree-enums + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub enum Fields { + /// Named fields of a struct or struct variant such as `Point { x: f64, + /// y: f64 }`. + Named(FieldsNamed), + + /// Unnamed fields of a tuple struct or tuple variant such as `Some(T)`. + Unnamed(FieldsUnnamed), + + /// Unit struct or unit variant such as `None`. + Unit, + } +} + +ast_struct! { + /// Named fields of a struct or struct variant such as `Point { x: f64, + /// y: f64 }`. + /// + /// *This type is available only if Syn is built with the `"derive"` or + /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct FieldsNamed { + pub brace_token: token::Brace, + pub named: Punctuated<Field, Token![,]>, + } +} + +ast_struct! { + /// Unnamed fields of a tuple struct or tuple variant such as `Some(T)`. + /// + /// *This type is available only if Syn is built with the `"derive"` or + /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct FieldsUnnamed { + pub paren_token: token::Paren, + pub unnamed: Punctuated<Field, Token![,]>, + } +} + +impl Fields { + /// Get an iterator over the borrowed [`Field`] items in this object. This + /// iterator can be used to iterate over a named or unnamed struct or + /// variant's fields uniformly. + pub fn iter(&self) -> punctuated::Iter<Field> { + match self { + Fields::Unit => crate::punctuated::empty_punctuated_iter(), + Fields::Named(f) => f.named.iter(), + Fields::Unnamed(f) => f.unnamed.iter(), + } + } + + /// Get an iterator over the mutably borrowed [`Field`] items in this + /// object. This iterator can be used to iterate over a named or unnamed + /// struct or variant's fields uniformly. + pub fn iter_mut(&mut self) -> punctuated::IterMut<Field> { + match self { + Fields::Unit => crate::punctuated::empty_punctuated_iter_mut(), + Fields::Named(f) => f.named.iter_mut(), + Fields::Unnamed(f) => f.unnamed.iter_mut(), + } + } + + /// Returns the number of fields. + pub fn len(&self) -> usize { + match self { + Fields::Unit => 0, + Fields::Named(f) => f.named.len(), + Fields::Unnamed(f) => f.unnamed.len(), + } + } + + /// Returns `true` if there are zero fields. + pub fn is_empty(&self) -> bool { + match self { + Fields::Unit => true, + Fields::Named(f) => f.named.is_empty(), + Fields::Unnamed(f) => f.unnamed.is_empty(), + } + } +} + +impl IntoIterator for Fields { + type Item = Field; + type IntoIter = punctuated::IntoIter<Field>; + + fn into_iter(self) -> Self::IntoIter { + match self { + Fields::Unit => Punctuated::<Field, ()>::new().into_iter(), + Fields::Named(f) => f.named.into_iter(), + Fields::Unnamed(f) => f.unnamed.into_iter(), + } + } +} + +impl<'a> IntoIterator for &'a Fields { + type Item = &'a Field; + type IntoIter = punctuated::Iter<'a, Field>; + + fn into_iter(self) -> Self::IntoIter { + self.iter() + } +} + +impl<'a> IntoIterator for &'a mut Fields { + type Item = &'a mut Field; + type IntoIter = punctuated::IterMut<'a, Field>; + + fn into_iter(self) -> Self::IntoIter { + self.iter_mut() + } +} + +ast_struct! { + /// A field of a struct or enum variant. + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct Field { + /// Attributes tagged on the field. + pub attrs: Vec<Attribute>, + + /// Visibility of the field. + pub vis: Visibility, + + /// Name of the field, if any. + /// + /// Fields of tuple structs have no names. + pub ident: Option<Ident>, + + pub colon_token: Option<Token![:]>, + + /// Type of the field. + pub ty: Type, + } +} + +ast_enum_of_structs! { + /// The visibility level of an item: inherited or `pub` or + /// `pub(restricted)`. + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + /// + /// # Syntax tree enum + /// + /// This type is a [syntax tree enum]. + /// + /// [syntax tree enum]: Expr#syntax-tree-enums + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub enum Visibility { + /// A public visibility level: `pub`. + Public(VisPublic), + + /// A crate-level visibility: `crate`. + Crate(VisCrate), + + /// A visibility level restricted to some path: `pub(self)` or + /// `pub(super)` or `pub(crate)` or `pub(in some::module)`. + Restricted(VisRestricted), + + /// An inherited visibility, which usually means private. + Inherited, + } +} + +ast_struct! { + /// A public visibility level: `pub`. + /// + /// *This type is available only if Syn is built with the `"derive"` or + /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct VisPublic { + pub pub_token: Token![pub], + } +} + +ast_struct! { + /// A crate-level visibility: `crate`. + /// + /// *This type is available only if Syn is built with the `"derive"` or + /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct VisCrate { + pub crate_token: Token![crate], + } +} + +ast_struct! { + /// A visibility level restricted to some path: `pub(self)` or + /// `pub(super)` or `pub(crate)` or `pub(in some::module)`. + /// + /// *This type is available only if Syn is built with the `"derive"` or + /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct VisRestricted { + pub pub_token: Token![pub], + pub paren_token: token::Paren, + pub in_token: Option<Token![in]>, + pub path: Box<Path>, + } +} + +#[cfg(feature = "parsing")] +pub mod parsing { + use super::*; + use crate::ext::IdentExt; + use crate::parse::discouraged::Speculative; + use crate::parse::{Parse, ParseStream, Result}; + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for Variant { + fn parse(input: ParseStream) -> Result<Self> { + let attrs = input.call(Attribute::parse_outer)?; + let _visibility: Visibility = input.parse()?; + let ident: Ident = input.parse()?; + let fields = if input.peek(token::Brace) { + Fields::Named(input.parse()?) + } else if input.peek(token::Paren) { + Fields::Unnamed(input.parse()?) + } else { + Fields::Unit + }; + let discriminant = if input.peek(Token![=]) { + let eq_token: Token![=] = input.parse()?; + let discriminant: Expr = input.parse()?; + Some((eq_token, discriminant)) + } else { + None + }; + Ok(Variant { + attrs, + ident, + fields, + discriminant, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for FieldsNamed { + fn parse(input: ParseStream) -> Result<Self> { + let content; + Ok(FieldsNamed { + brace_token: braced!(content in input), + named: content.parse_terminated(Field::parse_named)?, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for FieldsUnnamed { + fn parse(input: ParseStream) -> Result<Self> { + let content; + Ok(FieldsUnnamed { + paren_token: parenthesized!(content in input), + unnamed: content.parse_terminated(Field::parse_unnamed)?, + }) + } + } + + impl Field { + /// Parses a named (braced struct) field. + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn parse_named(input: ParseStream) -> Result<Self> { + Ok(Field { + attrs: input.call(Attribute::parse_outer)?, + vis: input.parse()?, + ident: Some(if input.peek(Token![_]) { + input.call(Ident::parse_any) + } else { + input.parse() + }?), + colon_token: Some(input.parse()?), + ty: input.parse()?, + }) + } + + /// Parses an unnamed (tuple struct) field. + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn parse_unnamed(input: ParseStream) -> Result<Self> { + Ok(Field { + attrs: input.call(Attribute::parse_outer)?, + vis: input.parse()?, + ident: None, + colon_token: None, + ty: input.parse()?, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for Visibility { + fn parse(input: ParseStream) -> Result<Self> { + // Recognize an empty None-delimited group, as produced by a $:vis + // matcher that matched no tokens. + if input.peek(token::Group) { + let ahead = input.fork(); + let group = crate::group::parse_group(&ahead)?; + if group.content.is_empty() { + input.advance_to(&ahead); + return Ok(Visibility::Inherited); + } + } + + if input.peek(Token![pub]) { + Self::parse_pub(input) + } else if input.peek(Token![crate]) { + Self::parse_crate(input) + } else { + Ok(Visibility::Inherited) + } + } + } + + impl Visibility { + fn parse_pub(input: ParseStream) -> Result<Self> { + let pub_token = input.parse::<Token![pub]>()?; + + if input.peek(token::Paren) { + let ahead = input.fork(); + + let content; + let paren_token = parenthesized!(content in ahead); + if content.peek(Token![crate]) + || content.peek(Token![self]) + || content.peek(Token![super]) + { + let path = content.call(Ident::parse_any)?; + + // Ensure there are no additional tokens within `content`. + // Without explicitly checking, we may misinterpret a tuple + // field as a restricted visibility, causing a parse error. + // e.g. `pub (crate::A, crate::B)` (Issue #720). + if content.is_empty() { + input.advance_to(&ahead); + return Ok(Visibility::Restricted(VisRestricted { + pub_token, + paren_token, + in_token: None, + path: Box::new(Path::from(path)), + })); + } + } else if content.peek(Token![in]) { + let in_token: Token![in] = content.parse()?; + let path = content.call(Path::parse_mod_style)?; + + input.advance_to(&ahead); + return Ok(Visibility::Restricted(VisRestricted { + pub_token, + paren_token, + in_token: Some(in_token), + path: Box::new(path), + })); + } + } + + Ok(Visibility::Public(VisPublic { pub_token })) + } + + fn parse_crate(input: ParseStream) -> Result<Self> { + if input.peek2(Token![::]) { + Ok(Visibility::Inherited) + } else { + Ok(Visibility::Crate(VisCrate { + crate_token: input.parse()?, + })) + } + } + + #[cfg(feature = "full")] + pub(crate) fn is_some(&self) -> bool { + match self { + Visibility::Inherited => false, + _ => true, + } + } + } +} + +#[cfg(feature = "printing")] +mod printing { + use super::*; + use crate::print::TokensOrDefault; + use proc_macro2::TokenStream; + use quote::{ToTokens, TokenStreamExt}; + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for Variant { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(&self.attrs); + self.ident.to_tokens(tokens); + self.fields.to_tokens(tokens); + if let Some((eq_token, disc)) = &self.discriminant { + eq_token.to_tokens(tokens); + disc.to_tokens(tokens); + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for FieldsNamed { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.brace_token.surround(tokens, |tokens| { + self.named.to_tokens(tokens); + }); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for FieldsUnnamed { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.paren_token.surround(tokens, |tokens| { + self.unnamed.to_tokens(tokens); + }); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for Field { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(&self.attrs); + self.vis.to_tokens(tokens); + if let Some(ident) = &self.ident { + ident.to_tokens(tokens); + TokensOrDefault(&self.colon_token).to_tokens(tokens); + } + self.ty.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for VisPublic { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.pub_token.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for VisCrate { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.crate_token.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for VisRestricted { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.pub_token.to_tokens(tokens); + self.paren_token.surround(tokens, |tokens| { + // TODO: If we have a path which is not "self" or "super" or + // "crate", automatically add the "in" token. + self.in_token.to_tokens(tokens); + self.path.to_tokens(tokens); + }); + } + } +} diff --git a/third_party/rust/syn/src/derive.rs b/third_party/rust/syn/src/derive.rs new file mode 100644 index 0000000000..af9bb91b7a --- /dev/null +++ b/third_party/rust/syn/src/derive.rs @@ -0,0 +1,274 @@ +use super::*; +use crate::punctuated::Punctuated; + +ast_struct! { + /// Data structure sent to a `proc_macro_derive` macro. + /// + /// *This type is available only if Syn is built with the `"derive"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] + pub struct DeriveInput { + /// Attributes tagged on the whole struct or enum. + pub attrs: Vec<Attribute>, + + /// Visibility of the struct or enum. + pub vis: Visibility, + + /// Name of the struct or enum. + pub ident: Ident, + + /// Generics required to complete the definition. + pub generics: Generics, + + /// Data within the struct or enum. + pub data: Data, + } +} + +ast_enum_of_structs! { + /// The storage of a struct, enum or union data structure. + /// + /// *This type is available only if Syn is built with the `"derive"` feature.* + /// + /// # Syntax tree enum + /// + /// This type is a [syntax tree enum]. + /// + /// [syntax tree enum]: Expr#syntax-tree-enums + #[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] + pub enum Data { + /// A struct input to a `proc_macro_derive` macro. + Struct(DataStruct), + + /// An enum input to a `proc_macro_derive` macro. + Enum(DataEnum), + + /// An untagged union input to a `proc_macro_derive` macro. + Union(DataUnion), + } + + do_not_generate_to_tokens +} + +ast_struct! { + /// A struct input to a `proc_macro_derive` macro. + /// + /// *This type is available only if Syn is built with the `"derive"` + /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] + pub struct DataStruct { + pub struct_token: Token![struct], + pub fields: Fields, + pub semi_token: Option<Token![;]>, + } +} + +ast_struct! { + /// An enum input to a `proc_macro_derive` macro. + /// + /// *This type is available only if Syn is built with the `"derive"` + /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] + pub struct DataEnum { + pub enum_token: Token![enum], + pub brace_token: token::Brace, + pub variants: Punctuated<Variant, Token![,]>, + } +} + +ast_struct! { + /// An untagged union input to a `proc_macro_derive` macro. + /// + /// *This type is available only if Syn is built with the `"derive"` + /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] + pub struct DataUnion { + pub union_token: Token![union], + pub fields: FieldsNamed, + } +} + +#[cfg(feature = "parsing")] +pub mod parsing { + use super::*; + use crate::parse::{Parse, ParseStream, Result}; + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for DeriveInput { + fn parse(input: ParseStream) -> Result<Self> { + let attrs = input.call(Attribute::parse_outer)?; + let vis = input.parse::<Visibility>()?; + + let lookahead = input.lookahead1(); + if lookahead.peek(Token![struct]) { + let struct_token = input.parse::<Token![struct]>()?; + let ident = input.parse::<Ident>()?; + let generics = input.parse::<Generics>()?; + let (where_clause, fields, semi) = data_struct(input)?; + Ok(DeriveInput { + attrs, + vis, + ident, + generics: Generics { + where_clause, + ..generics + }, + data: Data::Struct(DataStruct { + struct_token, + fields, + semi_token: semi, + }), + }) + } else if lookahead.peek(Token![enum]) { + let enum_token = input.parse::<Token![enum]>()?; + let ident = input.parse::<Ident>()?; + let generics = input.parse::<Generics>()?; + let (where_clause, brace, variants) = data_enum(input)?; + Ok(DeriveInput { + attrs, + vis, + ident, + generics: Generics { + where_clause, + ..generics + }, + data: Data::Enum(DataEnum { + enum_token, + brace_token: brace, + variants, + }), + }) + } else if lookahead.peek(Token![union]) { + let union_token = input.parse::<Token![union]>()?; + let ident = input.parse::<Ident>()?; + let generics = input.parse::<Generics>()?; + let (where_clause, fields) = data_union(input)?; + Ok(DeriveInput { + attrs, + vis, + ident, + generics: Generics { + where_clause, + ..generics + }, + data: Data::Union(DataUnion { + union_token, + fields, + }), + }) + } else { + Err(lookahead.error()) + } + } + } + + pub fn data_struct( + input: ParseStream, + ) -> Result<(Option<WhereClause>, Fields, Option<Token![;]>)> { + let mut lookahead = input.lookahead1(); + let mut where_clause = None; + if lookahead.peek(Token![where]) { + where_clause = Some(input.parse()?); + lookahead = input.lookahead1(); + } + + if where_clause.is_none() && lookahead.peek(token::Paren) { + let fields = input.parse()?; + + lookahead = input.lookahead1(); + if lookahead.peek(Token![where]) { + where_clause = Some(input.parse()?); + lookahead = input.lookahead1(); + } + + if lookahead.peek(Token![;]) { + let semi = input.parse()?; + Ok((where_clause, Fields::Unnamed(fields), Some(semi))) + } else { + Err(lookahead.error()) + } + } else if lookahead.peek(token::Brace) { + let fields = input.parse()?; + Ok((where_clause, Fields::Named(fields), None)) + } else if lookahead.peek(Token![;]) { + let semi = input.parse()?; + Ok((where_clause, Fields::Unit, Some(semi))) + } else { + Err(lookahead.error()) + } + } + + pub fn data_enum( + input: ParseStream, + ) -> Result<( + Option<WhereClause>, + token::Brace, + Punctuated<Variant, Token![,]>, + )> { + let where_clause = input.parse()?; + + let content; + let brace = braced!(content in input); + let variants = content.parse_terminated(Variant::parse)?; + + Ok((where_clause, brace, variants)) + } + + pub fn data_union(input: ParseStream) -> Result<(Option<WhereClause>, FieldsNamed)> { + let where_clause = input.parse()?; + let fields = input.parse()?; + Ok((where_clause, fields)) + } +} + +#[cfg(feature = "printing")] +mod printing { + use super::*; + use crate::attr::FilterAttrs; + use crate::print::TokensOrDefault; + use proc_macro2::TokenStream; + use quote::ToTokens; + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for DeriveInput { + fn to_tokens(&self, tokens: &mut TokenStream) { + for attr in self.attrs.outer() { + attr.to_tokens(tokens); + } + self.vis.to_tokens(tokens); + match &self.data { + Data::Struct(d) => d.struct_token.to_tokens(tokens), + Data::Enum(d) => d.enum_token.to_tokens(tokens), + Data::Union(d) => d.union_token.to_tokens(tokens), + } + self.ident.to_tokens(tokens); + self.generics.to_tokens(tokens); + match &self.data { + Data::Struct(data) => match &data.fields { + Fields::Named(fields) => { + self.generics.where_clause.to_tokens(tokens); + fields.to_tokens(tokens); + } + Fields::Unnamed(fields) => { + fields.to_tokens(tokens); + self.generics.where_clause.to_tokens(tokens); + TokensOrDefault(&data.semi_token).to_tokens(tokens); + } + Fields::Unit => { + self.generics.where_clause.to_tokens(tokens); + TokensOrDefault(&data.semi_token).to_tokens(tokens); + } + }, + Data::Enum(data) => { + self.generics.where_clause.to_tokens(tokens); + data.brace_token.surround(tokens, |tokens| { + data.variants.to_tokens(tokens); + }); + } + Data::Union(data) => { + self.generics.where_clause.to_tokens(tokens); + data.fields.to_tokens(tokens); + } + } + } + } +} diff --git a/third_party/rust/syn/src/discouraged.rs b/third_party/rust/syn/src/discouraged.rs new file mode 100644 index 0000000000..a46129b6a1 --- /dev/null +++ b/third_party/rust/syn/src/discouraged.rs @@ -0,0 +1,194 @@ +//! Extensions to the parsing API with niche applicability. + +use super::*; + +/// Extensions to the `ParseStream` API to support speculative parsing. +pub trait Speculative { + /// Advance this parse stream to the position of a forked parse stream. + /// + /// This is the opposite operation to [`ParseStream::fork`]. You can fork a + /// parse stream, perform some speculative parsing, then join the original + /// stream to the fork to "commit" the parsing from the fork to the main + /// stream. + /// + /// If you can avoid doing this, you should, as it limits the ability to + /// generate useful errors. That said, it is often the only way to parse + /// syntax of the form `A* B*` for arbitrary syntax `A` and `B`. The problem + /// is that when the fork fails to parse an `A`, it's impossible to tell + /// whether that was because of a syntax error and the user meant to provide + /// an `A`, or that the `A`s are finished and it's time to start parsing + /// `B`s. Use with care. + /// + /// Also note that if `A` is a subset of `B`, `A* B*` can be parsed by + /// parsing `B*` and removing the leading members of `A` from the + /// repetition, bypassing the need to involve the downsides associated with + /// speculative parsing. + /// + /// [`ParseStream::fork`]: ParseBuffer::fork + /// + /// # Example + /// + /// There has been chatter about the possibility of making the colons in the + /// turbofish syntax like `path::to::<T>` no longer required by accepting + /// `path::to<T>` in expression position. Specifically, according to [RFC + /// 2544], [`PathSegment`] parsing should always try to consume a following + /// `<` token as the start of generic arguments, and reset to the `<` if + /// that fails (e.g. the token is acting as a less-than operator). + /// + /// This is the exact kind of parsing behavior which requires the "fork, + /// try, commit" behavior that [`ParseStream::fork`] discourages. With + /// `advance_to`, we can avoid having to parse the speculatively parsed + /// content a second time. + /// + /// This change in behavior can be implemented in syn by replacing just the + /// `Parse` implementation for `PathSegment`: + /// + /// ``` + /// # use syn::ext::IdentExt; + /// use syn::parse::discouraged::Speculative; + /// # use syn::parse::{Parse, ParseStream}; + /// # use syn::{Ident, PathArguments, Result, Token}; + /// + /// pub struct PathSegment { + /// pub ident: Ident, + /// pub arguments: PathArguments, + /// } + /// # + /// # impl<T> From<T> for PathSegment + /// # where + /// # T: Into<Ident>, + /// # { + /// # fn from(ident: T) -> Self { + /// # PathSegment { + /// # ident: ident.into(), + /// # arguments: PathArguments::None, + /// # } + /// # } + /// # } + /// + /// impl Parse for PathSegment { + /// fn parse(input: ParseStream) -> Result<Self> { + /// if input.peek(Token![super]) + /// || input.peek(Token![self]) + /// || input.peek(Token![Self]) + /// || input.peek(Token![crate]) + /// { + /// let ident = input.call(Ident::parse_any)?; + /// return Ok(PathSegment::from(ident)); + /// } + /// + /// let ident = input.parse()?; + /// if input.peek(Token![::]) && input.peek3(Token![<]) { + /// return Ok(PathSegment { + /// ident, + /// arguments: PathArguments::AngleBracketed(input.parse()?), + /// }); + /// } + /// if input.peek(Token![<]) && !input.peek(Token![<=]) { + /// let fork = input.fork(); + /// if let Ok(arguments) = fork.parse() { + /// input.advance_to(&fork); + /// return Ok(PathSegment { + /// ident, + /// arguments: PathArguments::AngleBracketed(arguments), + /// }); + /// } + /// } + /// Ok(PathSegment::from(ident)) + /// } + /// } + /// + /// # syn::parse_str::<PathSegment>("a<b,c>").unwrap(); + /// ``` + /// + /// # Drawbacks + /// + /// The main drawback of this style of speculative parsing is in error + /// presentation. Even if the lookahead is the "correct" parse, the error + /// that is shown is that of the "fallback" parse. To use the same example + /// as the turbofish above, take the following unfinished "turbofish": + /// + /// ```text + /// let _ = f<&'a fn(), for<'a> serde::>(); + /// ``` + /// + /// If this is parsed as generic arguments, we can provide the error message + /// + /// ```text + /// error: expected identifier + /// --> src.rs:L:C + /// | + /// L | let _ = f<&'a fn(), for<'a> serde::>(); + /// | ^ + /// ``` + /// + /// but if parsed using the above speculative parsing, it falls back to + /// assuming that the `<` is a less-than when it fails to parse the generic + /// arguments, and tries to interpret the `&'a` as the start of a labelled + /// loop, resulting in the much less helpful error + /// + /// ```text + /// error: expected `:` + /// --> src.rs:L:C + /// | + /// L | let _ = f<&'a fn(), for<'a> serde::>(); + /// | ^^ + /// ``` + /// + /// This can be mitigated with various heuristics (two examples: show both + /// forks' parse errors, or show the one that consumed more tokens), but + /// when you can control the grammar, sticking to something that can be + /// parsed LL(3) and without the LL(*) speculative parsing this makes + /// possible, displaying reasonable errors becomes much more simple. + /// + /// [RFC 2544]: https://github.com/rust-lang/rfcs/pull/2544 + /// [`PathSegment`]: crate::PathSegment + /// + /// # Performance + /// + /// This method performs a cheap fixed amount of work that does not depend + /// on how far apart the two streams are positioned. + /// + /// # Panics + /// + /// The forked stream in the argument of `advance_to` must have been + /// obtained by forking `self`. Attempting to advance to any other stream + /// will cause a panic. + fn advance_to(&self, fork: &Self); +} + +impl<'a> Speculative for ParseBuffer<'a> { + fn advance_to(&self, fork: &Self) { + if !crate::buffer::same_scope(self.cursor(), fork.cursor()) { + panic!("Fork was not derived from the advancing parse stream"); + } + + let (self_unexp, self_sp) = inner_unexpected(self); + let (fork_unexp, fork_sp) = inner_unexpected(fork); + if !Rc::ptr_eq(&self_unexp, &fork_unexp) { + match (fork_sp, self_sp) { + // Unexpected set on the fork, but not on `self`, copy it over. + (Some(span), None) => { + self_unexp.set(Unexpected::Some(span)); + } + // Unexpected unset. Use chain to propagate errors from fork. + (None, None) => { + fork_unexp.set(Unexpected::Chain(self_unexp)); + + // Ensure toplevel 'unexpected' tokens from the fork don't + // bubble up the chain by replacing the root `unexpected` + // pointer, only 'unexpected' tokens from existing group + // parsers should bubble. + fork.unexpected + .set(Some(Rc::new(Cell::new(Unexpected::None)))); + } + // Unexpected has been set on `self`. No changes needed. + (_, Some(_)) => {} + } + } + + // See comment on `cell` in the struct definition. + self.cell + .set(unsafe { mem::transmute::<Cursor, Cursor<'static>>(fork.cursor()) }); + } +} diff --git a/third_party/rust/syn/src/drops.rs b/third_party/rust/syn/src/drops.rs new file mode 100644 index 0000000000..89b42d82ef --- /dev/null +++ b/third_party/rust/syn/src/drops.rs @@ -0,0 +1,58 @@ +use std::iter; +use std::mem::ManuallyDrop; +use std::ops::{Deref, DerefMut}; +use std::option; +use std::slice; + +#[repr(transparent)] +pub(crate) struct NoDrop<T: ?Sized>(ManuallyDrop<T>); + +impl<T> NoDrop<T> { + pub(crate) fn new(value: T) -> Self + where + T: TrivialDrop, + { + NoDrop(ManuallyDrop::new(value)) + } +} + +impl<T: ?Sized> Deref for NoDrop<T> { + type Target = T; + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl<T: ?Sized> DerefMut for NoDrop<T> { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +pub(crate) trait TrivialDrop {} + +impl<T> TrivialDrop for iter::Empty<T> {} +impl<'a, T> TrivialDrop for slice::Iter<'a, T> {} +impl<'a, T> TrivialDrop for slice::IterMut<'a, T> {} +impl<'a, T> TrivialDrop for option::IntoIter<&'a T> {} +impl<'a, T> TrivialDrop for option::IntoIter<&'a mut T> {} + +#[test] +fn test_needs_drop() { + use std::mem::needs_drop; + + struct NeedsDrop; + + impl Drop for NeedsDrop { + fn drop(&mut self) {} + } + + assert!(needs_drop::<NeedsDrop>()); + + // Test each of the types with a handwritten TrivialDrop impl above. + assert!(!needs_drop::<iter::Empty<NeedsDrop>>()); + assert!(!needs_drop::<slice::Iter<NeedsDrop>>()); + assert!(!needs_drop::<slice::IterMut<NeedsDrop>>()); + assert!(!needs_drop::<option::IntoIter<&NeedsDrop>>()); + assert!(!needs_drop::<option::IntoIter<&mut NeedsDrop>>()); +} diff --git a/third_party/rust/syn/src/error.rs b/third_party/rust/syn/src/error.rs new file mode 100644 index 0000000000..e301367d5e --- /dev/null +++ b/third_party/rust/syn/src/error.rs @@ -0,0 +1,428 @@ +#[cfg(feature = "parsing")] +use crate::buffer::Cursor; +use crate::thread::ThreadBound; +use proc_macro2::{ + Delimiter, Group, Ident, LexError, Literal, Punct, Spacing, Span, TokenStream, TokenTree, +}; +#[cfg(feature = "printing")] +use quote::ToTokens; +use std::fmt::{self, Debug, Display}; +use std::iter::FromIterator; +use std::slice; +use std::vec; + +/// The result of a Syn parser. +pub type Result<T> = std::result::Result<T, Error>; + +/// Error returned when a Syn parser cannot parse the input tokens. +/// +/// # Error reporting in proc macros +/// +/// The correct way to report errors back to the compiler from a procedural +/// macro is by emitting an appropriately spanned invocation of +/// [`compile_error!`] in the generated code. This produces a better diagnostic +/// message than simply panicking the macro. +/// +/// [`compile_error!`]: std::compile_error! +/// +/// When parsing macro input, the [`parse_macro_input!`] macro handles the +/// conversion to `compile_error!` automatically. +/// +/// [`parse_macro_input!`]: crate::parse_macro_input! +/// +/// ``` +/// # extern crate proc_macro; +/// # +/// use proc_macro::TokenStream; +/// use syn::{parse_macro_input, AttributeArgs, ItemFn}; +/// +/// # const IGNORE: &str = stringify! { +/// #[proc_macro_attribute] +/// # }; +/// pub fn my_attr(args: TokenStream, input: TokenStream) -> TokenStream { +/// let args = parse_macro_input!(args as AttributeArgs); +/// let input = parse_macro_input!(input as ItemFn); +/// +/// /* ... */ +/// # TokenStream::new() +/// } +/// ``` +/// +/// For errors that arise later than the initial parsing stage, the +/// [`.to_compile_error()`] or [`.into_compile_error()`] methods can be used to +/// perform an explicit conversion to `compile_error!`. +/// +/// [`.to_compile_error()`]: Error::to_compile_error +/// [`.into_compile_error()`]: Error::into_compile_error +/// +/// ``` +/// # extern crate proc_macro; +/// # +/// # use proc_macro::TokenStream; +/// # use syn::{parse_macro_input, DeriveInput}; +/// # +/// # const IGNORE: &str = stringify! { +/// #[proc_macro_derive(MyDerive)] +/// # }; +/// pub fn my_derive(input: TokenStream) -> TokenStream { +/// let input = parse_macro_input!(input as DeriveInput); +/// +/// // fn(DeriveInput) -> syn::Result<proc_macro2::TokenStream> +/// expand::my_derive(input) +/// .unwrap_or_else(syn::Error::into_compile_error) +/// .into() +/// } +/// # +/// # mod expand { +/// # use proc_macro2::TokenStream; +/// # use syn::{DeriveInput, Result}; +/// # +/// # pub fn my_derive(input: DeriveInput) -> Result<TokenStream> { +/// # unimplemented!() +/// # } +/// # } +/// ``` +pub struct Error { + messages: Vec<ErrorMessage>, +} + +struct ErrorMessage { + // Span is implemented as an index into a thread-local interner to keep the + // size small. It is not safe to access from a different thread. We want + // errors to be Send and Sync to play nicely with the Failure crate, so pin + // the span we're given to its original thread and assume it is + // Span::call_site if accessed from any other thread. + start_span: ThreadBound<Span>, + end_span: ThreadBound<Span>, + message: String, +} + +#[cfg(test)] +struct _Test +where + Error: Send + Sync; + +impl Error { + /// Usually the [`ParseStream::error`] method will be used instead, which + /// automatically uses the correct span from the current position of the + /// parse stream. + /// + /// Use `Error::new` when the error needs to be triggered on some span other + /// than where the parse stream is currently positioned. + /// + /// [`ParseStream::error`]: crate::parse::ParseBuffer::error + /// + /// # Example + /// + /// ``` + /// use syn::{Error, Ident, LitStr, Result, Token}; + /// use syn::parse::ParseStream; + /// + /// // Parses input that looks like `name = "string"` where the key must be + /// // the identifier `name` and the value may be any string literal. + /// // Returns the string literal. + /// fn parse_name(input: ParseStream) -> Result<LitStr> { + /// let name_token: Ident = input.parse()?; + /// if name_token != "name" { + /// // Trigger an error not on the current position of the stream, + /// // but on the position of the unexpected identifier. + /// return Err(Error::new(name_token.span(), "expected `name`")); + /// } + /// input.parse::<Token![=]>()?; + /// let s: LitStr = input.parse()?; + /// Ok(s) + /// } + /// ``` + pub fn new<T: Display>(span: Span, message: T) -> Self { + return new(span, message.to_string()); + + fn new(span: Span, message: String) -> Error { + Error { + messages: vec![ErrorMessage { + start_span: ThreadBound::new(span), + end_span: ThreadBound::new(span), + message, + }], + } + } + } + + /// Creates an error with the specified message spanning the given syntax + /// tree node. + /// + /// Unlike the `Error::new` constructor, this constructor takes an argument + /// `tokens` which is a syntax tree node. This allows the resulting `Error` + /// to attempt to span all tokens inside of `tokens`. While you would + /// typically be able to use the `Spanned` trait with the above `Error::new` + /// constructor, implementation limitations today mean that + /// `Error::new_spanned` may provide a higher-quality error message on + /// stable Rust. + /// + /// When in doubt it's recommended to stick to `Error::new` (or + /// `ParseStream::error`)! + #[cfg(feature = "printing")] + pub fn new_spanned<T: ToTokens, U: Display>(tokens: T, message: U) -> Self { + return new_spanned(tokens.into_token_stream(), message.to_string()); + + fn new_spanned(tokens: TokenStream, message: String) -> Error { + let mut iter = tokens.into_iter(); + let start = iter.next().map_or_else(Span::call_site, |t| t.span()); + let end = iter.last().map_or(start, |t| t.span()); + Error { + messages: vec![ErrorMessage { + start_span: ThreadBound::new(start), + end_span: ThreadBound::new(end), + message, + }], + } + } + } + + /// The source location of the error. + /// + /// Spans are not thread-safe so this function returns `Span::call_site()` + /// if called from a different thread than the one on which the `Error` was + /// originally created. + pub fn span(&self) -> Span { + let start = match self.messages[0].start_span.get() { + Some(span) => *span, + None => return Span::call_site(), + }; + let end = match self.messages[0].end_span.get() { + Some(span) => *span, + None => return Span::call_site(), + }; + start.join(end).unwrap_or(start) + } + + /// Render the error as an invocation of [`compile_error!`]. + /// + /// The [`parse_macro_input!`] macro provides a convenient way to invoke + /// this method correctly in a procedural macro. + /// + /// [`compile_error!`]: std::compile_error! + /// [`parse_macro_input!`]: crate::parse_macro_input! + pub fn to_compile_error(&self) -> TokenStream { + self.messages + .iter() + .map(ErrorMessage::to_compile_error) + .collect() + } + + /// Render the error as an invocation of [`compile_error!`]. + /// + /// [`compile_error!`]: std::compile_error! + /// + /// # Example + /// + /// ``` + /// # extern crate proc_macro; + /// # + /// use proc_macro::TokenStream; + /// use syn::{parse_macro_input, DeriveInput, Error}; + /// + /// # const _: &str = stringify! { + /// #[proc_macro_derive(MyTrait)] + /// # }; + /// pub fn derive_my_trait(input: TokenStream) -> TokenStream { + /// let input = parse_macro_input!(input as DeriveInput); + /// my_trait::expand(input) + /// .unwrap_or_else(Error::into_compile_error) + /// .into() + /// } + /// + /// mod my_trait { + /// use proc_macro2::TokenStream; + /// use syn::{DeriveInput, Result}; + /// + /// pub(crate) fn expand(input: DeriveInput) -> Result<TokenStream> { + /// /* ... */ + /// # unimplemented!() + /// } + /// } + /// ``` + pub fn into_compile_error(self) -> TokenStream { + self.to_compile_error() + } + + /// Add another error message to self such that when `to_compile_error()` is + /// called, both errors will be emitted together. + pub fn combine(&mut self, another: Error) { + self.messages.extend(another.messages); + } +} + +impl ErrorMessage { + fn to_compile_error(&self) -> TokenStream { + let start = self + .start_span + .get() + .cloned() + .unwrap_or_else(Span::call_site); + let end = self.end_span.get().cloned().unwrap_or_else(Span::call_site); + + // compile_error!($message) + TokenStream::from_iter(vec![ + TokenTree::Ident(Ident::new("compile_error", start)), + TokenTree::Punct({ + let mut punct = Punct::new('!', Spacing::Alone); + punct.set_span(start); + punct + }), + TokenTree::Group({ + let mut group = Group::new(Delimiter::Brace, { + TokenStream::from_iter(vec![TokenTree::Literal({ + let mut string = Literal::string(&self.message); + string.set_span(end); + string + })]) + }); + group.set_span(end); + group + }), + ]) + } +} + +#[cfg(feature = "parsing")] +pub fn new_at<T: Display>(scope: Span, cursor: Cursor, message: T) -> Error { + if cursor.eof() { + Error::new(scope, format!("unexpected end of input, {}", message)) + } else { + let span = crate::buffer::open_span_of_group(cursor); + Error::new(span, message) + } +} + +#[cfg(all(feature = "parsing", any(feature = "full", feature = "derive")))] +pub fn new2<T: Display>(start: Span, end: Span, message: T) -> Error { + return new2(start, end, message.to_string()); + + fn new2(start: Span, end: Span, message: String) -> Error { + Error { + messages: vec![ErrorMessage { + start_span: ThreadBound::new(start), + end_span: ThreadBound::new(end), + message, + }], + } + } +} + +impl Debug for Error { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + if self.messages.len() == 1 { + formatter + .debug_tuple("Error") + .field(&self.messages[0]) + .finish() + } else { + formatter + .debug_tuple("Error") + .field(&self.messages) + .finish() + } + } +} + +impl Debug for ErrorMessage { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + Debug::fmt(&self.message, formatter) + } +} + +impl Display for Error { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str(&self.messages[0].message) + } +} + +impl Clone for Error { + fn clone(&self) -> Self { + Error { + messages: self.messages.clone(), + } + } +} + +impl Clone for ErrorMessage { + fn clone(&self) -> Self { + let start = self + .start_span + .get() + .cloned() + .unwrap_or_else(Span::call_site); + let end = self.end_span.get().cloned().unwrap_or_else(Span::call_site); + ErrorMessage { + start_span: ThreadBound::new(start), + end_span: ThreadBound::new(end), + message: self.message.clone(), + } + } +} + +impl std::error::Error for Error {} + +impl From<LexError> for Error { + fn from(err: LexError) -> Self { + Error::new(err.span(), "lex error") + } +} + +impl IntoIterator for Error { + type Item = Error; + type IntoIter = IntoIter; + + fn into_iter(self) -> Self::IntoIter { + IntoIter { + messages: self.messages.into_iter(), + } + } +} + +pub struct IntoIter { + messages: vec::IntoIter<ErrorMessage>, +} + +impl Iterator for IntoIter { + type Item = Error; + + fn next(&mut self) -> Option<Self::Item> { + Some(Error { + messages: vec![self.messages.next()?], + }) + } +} + +impl<'a> IntoIterator for &'a Error { + type Item = Error; + type IntoIter = Iter<'a>; + + fn into_iter(self) -> Self::IntoIter { + Iter { + messages: self.messages.iter(), + } + } +} + +pub struct Iter<'a> { + messages: slice::Iter<'a, ErrorMessage>, +} + +impl<'a> Iterator for Iter<'a> { + type Item = Error; + + fn next(&mut self) -> Option<Self::Item> { + Some(Error { + messages: vec![self.messages.next()?.clone()], + }) + } +} + +impl Extend<Error> for Error { + fn extend<T: IntoIterator<Item = Error>>(&mut self, iter: T) { + for err in iter { + self.combine(err); + } + } +} diff --git a/third_party/rust/syn/src/export.rs b/third_party/rust/syn/src/export.rs new file mode 100644 index 0000000000..f478d091ea --- /dev/null +++ b/third_party/rust/syn/src/export.rs @@ -0,0 +1,39 @@ +pub use std::clone::Clone; +pub use std::cmp::{Eq, PartialEq}; +pub use std::default::Default; +pub use std::fmt::{self, Debug, Formatter}; +pub use std::hash::{Hash, Hasher}; +pub use std::marker::Copy; +pub use std::option::Option::{None, Some}; +pub use std::result::Result::{Err, Ok}; + +#[cfg(feature = "printing")] +pub extern crate quote; + +pub use proc_macro2::{Span, TokenStream as TokenStream2}; + +#[cfg(feature = "parsing")] +pub use crate::group::{parse_braces, parse_brackets, parse_parens}; + +pub use crate::span::IntoSpans; + +#[cfg(all( + not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))), + feature = "proc-macro" +))] +pub use proc_macro::TokenStream; + +#[cfg(feature = "printing")] +pub use quote::{ToTokens, TokenStreamExt}; + +#[allow(non_camel_case_types)] +pub type bool = help::Bool; +#[allow(non_camel_case_types)] +pub type str = help::Str; + +mod help { + pub type Bool = bool; + pub type Str = str; +} + +pub struct private(pub(crate) ()); diff --git a/third_party/rust/syn/src/expr.rs b/third_party/rust/syn/src/expr.rs new file mode 100644 index 0000000000..93a59b0e20 --- /dev/null +++ b/third_party/rust/syn/src/expr.rs @@ -0,0 +1,3558 @@ +use super::*; +use crate::punctuated::Punctuated; +#[cfg(feature = "full")] +use crate::reserved::Reserved; +use proc_macro2::{Span, TokenStream}; +#[cfg(feature = "printing")] +use quote::IdentFragment; +#[cfg(feature = "printing")] +use std::fmt::{self, Display}; +use std::hash::{Hash, Hasher}; +#[cfg(feature = "parsing")] +use std::mem; + +ast_enum_of_structs! { + /// A Rust expression. + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature, but most of the variants are not available unless "full" is enabled.* + /// + /// # Syntax tree enums + /// + /// This type is a syntax tree enum. In Syn this and other syntax tree enums + /// are designed to be traversed using the following rebinding idiom. + /// + /// ``` + /// # use syn::Expr; + /// # + /// # fn example(expr: Expr) { + /// # const IGNORE: &str = stringify! { + /// let expr: Expr = /* ... */; + /// # }; + /// match expr { + /// Expr::MethodCall(expr) => { + /// /* ... */ + /// } + /// Expr::Cast(expr) => { + /// /* ... */ + /// } + /// Expr::If(expr) => { + /// /* ... */ + /// } + /// + /// /* ... */ + /// # _ => {} + /// # } + /// # } + /// ``` + /// + /// We begin with a variable `expr` of type `Expr` that has no fields + /// (because it is an enum), and by matching on it and rebinding a variable + /// with the same name `expr` we effectively imbue our variable with all of + /// the data fields provided by the variant that it turned out to be. So for + /// example above if we ended up in the `MethodCall` case then we get to use + /// `expr.receiver`, `expr.args` etc; if we ended up in the `If` case we get + /// to use `expr.cond`, `expr.then_branch`, `expr.else_branch`. + /// + /// This approach avoids repeating the variant names twice on every line. + /// + /// ``` + /// # use syn::{Expr, ExprMethodCall}; + /// # + /// # fn example(expr: Expr) { + /// // Repetitive; recommend not doing this. + /// match expr { + /// Expr::MethodCall(ExprMethodCall { method, args, .. }) => { + /// # } + /// # _ => {} + /// # } + /// # } + /// ``` + /// + /// In general, the name to which a syntax tree enum variant is bound should + /// be a suitable name for the complete syntax tree enum type. + /// + /// ``` + /// # use syn::{Expr, ExprField}; + /// # + /// # fn example(discriminant: ExprField) { + /// // Binding is called `base` which is the name I would use if I were + /// // assigning `*discriminant.base` without an `if let`. + /// if let Expr::Tuple(base) = *discriminant.base { + /// # } + /// # } + /// ``` + /// + /// A sign that you may not be choosing the right variable names is if you + /// see names getting repeated in your code, like accessing + /// `receiver.receiver` or `pat.pat` or `cond.cond`. + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(not(syn_no_non_exhaustive), non_exhaustive)] + pub enum Expr { + /// A slice literal expression: `[a, b, c, d]`. + Array(ExprArray), + + /// An assignment expression: `a = compute()`. + Assign(ExprAssign), + + /// A compound assignment expression: `counter += 1`. + AssignOp(ExprAssignOp), + + /// An async block: `async { ... }`. + Async(ExprAsync), + + /// An await expression: `fut.await`. + Await(ExprAwait), + + /// A binary operation: `a + b`, `a * b`. + Binary(ExprBinary), + + /// A blocked scope: `{ ... }`. + Block(ExprBlock), + + /// A box expression: `box f`. + Box(ExprBox), + + /// A `break`, with an optional label to break and an optional + /// expression. + Break(ExprBreak), + + /// A function call expression: `invoke(a, b)`. + Call(ExprCall), + + /// A cast expression: `foo as f64`. + Cast(ExprCast), + + /// A closure expression: `|a, b| a + b`. + Closure(ExprClosure), + + /// A `continue`, with an optional label. + Continue(ExprContinue), + + /// Access of a named struct field (`obj.k`) or unnamed tuple struct + /// field (`obj.0`). + Field(ExprField), + + /// A for loop: `for pat in expr { ... }`. + ForLoop(ExprForLoop), + + /// An expression contained within invisible delimiters. + /// + /// This variant is important for faithfully representing the precedence + /// of expressions and is related to `None`-delimited spans in a + /// `TokenStream`. + Group(ExprGroup), + + /// An `if` expression with an optional `else` block: `if expr { ... } + /// else { ... }`. + /// + /// The `else` branch expression may only be an `If` or `Block` + /// expression, not any of the other types of expression. + If(ExprIf), + + /// A square bracketed indexing expression: `vector[2]`. + Index(ExprIndex), + + /// A `let` guard: `let Some(x) = opt`. + Let(ExprLet), + + /// A literal in place of an expression: `1`, `"foo"`. + Lit(ExprLit), + + /// Conditionless loop: `loop { ... }`. + Loop(ExprLoop), + + /// A macro invocation expression: `format!("{}", q)`. + Macro(ExprMacro), + + /// A `match` expression: `match n { Some(n) => {}, None => {} }`. + Match(ExprMatch), + + /// A method call expression: `x.foo::<T>(a, b)`. + MethodCall(ExprMethodCall), + + /// A parenthesized expression: `(a + b)`. + Paren(ExprParen), + + /// A path like `std::mem::replace` possibly containing generic + /// parameters and a qualified self-type. + /// + /// A plain identifier like `x` is a path of length 1. + Path(ExprPath), + + /// A range expression: `1..2`, `1..`, `..2`, `1..=2`, `..=2`. + Range(ExprRange), + + /// A referencing operation: `&a` or `&mut a`. + Reference(ExprReference), + + /// An array literal constructed from one repeated element: `[0u8; N]`. + Repeat(ExprRepeat), + + /// A `return`, with an optional value to be returned. + Return(ExprReturn), + + /// A struct literal expression: `Point { x: 1, y: 1 }`. + /// + /// The `rest` provides the value of the remaining fields as in `S { a: + /// 1, b: 1, ..rest }`. + Struct(ExprStruct), + + /// A try-expression: `expr?`. + Try(ExprTry), + + /// A try block: `try { ... }`. + TryBlock(ExprTryBlock), + + /// A tuple expression: `(a, b, c, d)`. + Tuple(ExprTuple), + + /// A type ascription expression: `foo: f64`. + Type(ExprType), + + /// A unary operation: `!x`, `*x`. + Unary(ExprUnary), + + /// An unsafe block: `unsafe { ... }`. + Unsafe(ExprUnsafe), + + /// Tokens in expression position not interpreted by Syn. + Verbatim(TokenStream), + + /// A while loop: `while expr { ... }`. + While(ExprWhile), + + /// A yield expression: `yield expr`. + Yield(ExprYield), + + // Not public API. + // + // For testing exhaustiveness in downstream code, use the following idiom: + // + // match expr { + // Expr::Array(expr) => {...} + // Expr::Assign(expr) => {...} + // ... + // Expr::Yield(expr) => {...} + // + // #[cfg_attr(test, deny(non_exhaustive_omitted_patterns))] + // _ => { /* some sane fallback */ } + // } + // + // This way we fail your tests but don't break your library when adding + // a variant. You will be notified by a test failure when a variant is + // added, so that you can add code to handle it, but your library will + // continue to compile and work for downstream users in the interim. + #[cfg(syn_no_non_exhaustive)] + #[doc(hidden)] + __NonExhaustive, + } +} + +ast_struct! { + /// A slice literal expression: `[a, b, c, d]`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ExprArray #full { + pub attrs: Vec<Attribute>, + pub bracket_token: token::Bracket, + pub elems: Punctuated<Expr, Token![,]>, + } +} + +ast_struct! { + /// An assignment expression: `a = compute()`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ExprAssign #full { + pub attrs: Vec<Attribute>, + pub left: Box<Expr>, + pub eq_token: Token![=], + pub right: Box<Expr>, + } +} + +ast_struct! { + /// A compound assignment expression: `counter += 1`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ExprAssignOp #full { + pub attrs: Vec<Attribute>, + pub left: Box<Expr>, + pub op: BinOp, + pub right: Box<Expr>, + } +} + +ast_struct! { + /// An async block: `async { ... }`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ExprAsync #full { + pub attrs: Vec<Attribute>, + pub async_token: Token![async], + pub capture: Option<Token![move]>, + pub block: Block, + } +} + +ast_struct! { + /// An await expression: `fut.await`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ExprAwait #full { + pub attrs: Vec<Attribute>, + pub base: Box<Expr>, + pub dot_token: Token![.], + pub await_token: token::Await, + } +} + +ast_struct! { + /// A binary operation: `a + b`, `a * b`. + /// + /// *This type is available only if Syn is built with the `"derive"` or + /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct ExprBinary { + pub attrs: Vec<Attribute>, + pub left: Box<Expr>, + pub op: BinOp, + pub right: Box<Expr>, + } +} + +ast_struct! { + /// A blocked scope: `{ ... }`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ExprBlock #full { + pub attrs: Vec<Attribute>, + pub label: Option<Label>, + pub block: Block, + } +} + +ast_struct! { + /// A box expression: `box f`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ExprBox #full { + pub attrs: Vec<Attribute>, + pub box_token: Token![box], + pub expr: Box<Expr>, + } +} + +ast_struct! { + /// A `break`, with an optional label to break and an optional + /// expression. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ExprBreak #full { + pub attrs: Vec<Attribute>, + pub break_token: Token![break], + pub label: Option<Lifetime>, + pub expr: Option<Box<Expr>>, + } +} + +ast_struct! { + /// A function call expression: `invoke(a, b)`. + /// + /// *This type is available only if Syn is built with the `"derive"` or + /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct ExprCall { + pub attrs: Vec<Attribute>, + pub func: Box<Expr>, + pub paren_token: token::Paren, + pub args: Punctuated<Expr, Token![,]>, + } +} + +ast_struct! { + /// A cast expression: `foo as f64`. + /// + /// *This type is available only if Syn is built with the `"derive"` or + /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct ExprCast { + pub attrs: Vec<Attribute>, + pub expr: Box<Expr>, + pub as_token: Token![as], + pub ty: Box<Type>, + } +} + +ast_struct! { + /// A closure expression: `|a, b| a + b`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ExprClosure #full { + pub attrs: Vec<Attribute>, + pub movability: Option<Token![static]>, + pub asyncness: Option<Token![async]>, + pub capture: Option<Token![move]>, + pub or1_token: Token![|], + pub inputs: Punctuated<Pat, Token![,]>, + pub or2_token: Token![|], + pub output: ReturnType, + pub body: Box<Expr>, + } +} + +ast_struct! { + /// A `continue`, with an optional label. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ExprContinue #full { + pub attrs: Vec<Attribute>, + pub continue_token: Token![continue], + pub label: Option<Lifetime>, + } +} + +ast_struct! { + /// Access of a named struct field (`obj.k`) or unnamed tuple struct + /// field (`obj.0`). + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct ExprField { + pub attrs: Vec<Attribute>, + pub base: Box<Expr>, + pub dot_token: Token![.], + pub member: Member, + } +} + +ast_struct! { + /// A for loop: `for pat in expr { ... }`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ExprForLoop #full { + pub attrs: Vec<Attribute>, + pub label: Option<Label>, + pub for_token: Token![for], + pub pat: Pat, + pub in_token: Token![in], + pub expr: Box<Expr>, + pub body: Block, + } +} + +ast_struct! { + /// An expression contained within invisible delimiters. + /// + /// This variant is important for faithfully representing the precedence + /// of expressions and is related to `None`-delimited spans in a + /// `TokenStream`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ExprGroup #full { + pub attrs: Vec<Attribute>, + pub group_token: token::Group, + pub expr: Box<Expr>, + } +} + +ast_struct! { + /// An `if` expression with an optional `else` block: `if expr { ... } + /// else { ... }`. + /// + /// The `else` branch expression may only be an `If` or `Block` + /// expression, not any of the other types of expression. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ExprIf #full { + pub attrs: Vec<Attribute>, + pub if_token: Token![if], + pub cond: Box<Expr>, + pub then_branch: Block, + pub else_branch: Option<(Token![else], Box<Expr>)>, + } +} + +ast_struct! { + /// A square bracketed indexing expression: `vector[2]`. + /// + /// *This type is available only if Syn is built with the `"derive"` or + /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct ExprIndex { + pub attrs: Vec<Attribute>, + pub expr: Box<Expr>, + pub bracket_token: token::Bracket, + pub index: Box<Expr>, + } +} + +ast_struct! { + /// A `let` guard: `let Some(x) = opt`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ExprLet #full { + pub attrs: Vec<Attribute>, + pub let_token: Token![let], + pub pat: Pat, + pub eq_token: Token![=], + pub expr: Box<Expr>, + } +} + +ast_struct! { + /// A literal in place of an expression: `1`, `"foo"`. + /// + /// *This type is available only if Syn is built with the `"derive"` or + /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct ExprLit { + pub attrs: Vec<Attribute>, + pub lit: Lit, + } +} + +ast_struct! { + /// Conditionless loop: `loop { ... }`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ExprLoop #full { + pub attrs: Vec<Attribute>, + pub label: Option<Label>, + pub loop_token: Token![loop], + pub body: Block, + } +} + +ast_struct! { + /// A macro invocation expression: `format!("{}", q)`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ExprMacro #full { + pub attrs: Vec<Attribute>, + pub mac: Macro, + } +} + +ast_struct! { + /// A `match` expression: `match n { Some(n) => {}, None => {} }`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ExprMatch #full { + pub attrs: Vec<Attribute>, + pub match_token: Token![match], + pub expr: Box<Expr>, + pub brace_token: token::Brace, + pub arms: Vec<Arm>, + } +} + +ast_struct! { + /// A method call expression: `x.foo::<T>(a, b)`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ExprMethodCall #full { + pub attrs: Vec<Attribute>, + pub receiver: Box<Expr>, + pub dot_token: Token![.], + pub method: Ident, + pub turbofish: Option<MethodTurbofish>, + pub paren_token: token::Paren, + pub args: Punctuated<Expr, Token![,]>, + } +} + +ast_struct! { + /// A parenthesized expression: `(a + b)`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct ExprParen { + pub attrs: Vec<Attribute>, + pub paren_token: token::Paren, + pub expr: Box<Expr>, + } +} + +ast_struct! { + /// A path like `std::mem::replace` possibly containing generic + /// parameters and a qualified self-type. + /// + /// A plain identifier like `x` is a path of length 1. + /// + /// *This type is available only if Syn is built with the `"derive"` or + /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct ExprPath { + pub attrs: Vec<Attribute>, + pub qself: Option<QSelf>, + pub path: Path, + } +} + +ast_struct! { + /// A range expression: `1..2`, `1..`, `..2`, `1..=2`, `..=2`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ExprRange #full { + pub attrs: Vec<Attribute>, + pub from: Option<Box<Expr>>, + pub limits: RangeLimits, + pub to: Option<Box<Expr>>, + } +} + +ast_struct! { + /// A referencing operation: `&a` or `&mut a`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ExprReference #full { + pub attrs: Vec<Attribute>, + pub and_token: Token![&], + pub raw: Reserved, + pub mutability: Option<Token![mut]>, + pub expr: Box<Expr>, + } +} + +ast_struct! { + /// An array literal constructed from one repeated element: `[0u8; N]`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ExprRepeat #full { + pub attrs: Vec<Attribute>, + pub bracket_token: token::Bracket, + pub expr: Box<Expr>, + pub semi_token: Token![;], + pub len: Box<Expr>, + } +} + +ast_struct! { + /// A `return`, with an optional value to be returned. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ExprReturn #full { + pub attrs: Vec<Attribute>, + pub return_token: Token![return], + pub expr: Option<Box<Expr>>, + } +} + +ast_struct! { + /// A struct literal expression: `Point { x: 1, y: 1 }`. + /// + /// The `rest` provides the value of the remaining fields as in `S { a: + /// 1, b: 1, ..rest }`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ExprStruct #full { + pub attrs: Vec<Attribute>, + pub path: Path, + pub brace_token: token::Brace, + pub fields: Punctuated<FieldValue, Token![,]>, + pub dot2_token: Option<Token![..]>, + pub rest: Option<Box<Expr>>, + } +} + +ast_struct! { + /// A try-expression: `expr?`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ExprTry #full { + pub attrs: Vec<Attribute>, + pub expr: Box<Expr>, + pub question_token: Token![?], + } +} + +ast_struct! { + /// A try block: `try { ... }`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ExprTryBlock #full { + pub attrs: Vec<Attribute>, + pub try_token: Token![try], + pub block: Block, + } +} + +ast_struct! { + /// A tuple expression: `(a, b, c, d)`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ExprTuple #full { + pub attrs: Vec<Attribute>, + pub paren_token: token::Paren, + pub elems: Punctuated<Expr, Token![,]>, + } +} + +ast_struct! { + /// A type ascription expression: `foo: f64`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ExprType #full { + pub attrs: Vec<Attribute>, + pub expr: Box<Expr>, + pub colon_token: Token![:], + pub ty: Box<Type>, + } +} + +ast_struct! { + /// A unary operation: `!x`, `*x`. + /// + /// *This type is available only if Syn is built with the `"derive"` or + /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct ExprUnary { + pub attrs: Vec<Attribute>, + pub op: UnOp, + pub expr: Box<Expr>, + } +} + +ast_struct! { + /// An unsafe block: `unsafe { ... }`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ExprUnsafe #full { + pub attrs: Vec<Attribute>, + pub unsafe_token: Token![unsafe], + pub block: Block, + } +} + +ast_struct! { + /// A while loop: `while expr { ... }`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ExprWhile #full { + pub attrs: Vec<Attribute>, + pub label: Option<Label>, + pub while_token: Token![while], + pub cond: Box<Expr>, + pub body: Block, + } +} + +ast_struct! { + /// A yield expression: `yield expr`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ExprYield #full { + pub attrs: Vec<Attribute>, + pub yield_token: Token![yield], + pub expr: Option<Box<Expr>>, + } +} + +impl Expr { + #[cfg(all(feature = "parsing", not(syn_no_const_vec_new)))] + const DUMMY: Self = Expr::Path(ExprPath { + attrs: Vec::new(), + qself: None, + path: Path { + leading_colon: None, + segments: Punctuated::new(), + }, + }); + + #[cfg(all(feature = "parsing", feature = "full"))] + pub(crate) fn replace_attrs(&mut self, new: Vec<Attribute>) -> Vec<Attribute> { + match self { + Expr::Box(ExprBox { attrs, .. }) + | Expr::Array(ExprArray { attrs, .. }) + | Expr::Call(ExprCall { attrs, .. }) + | Expr::MethodCall(ExprMethodCall { attrs, .. }) + | Expr::Tuple(ExprTuple { attrs, .. }) + | Expr::Binary(ExprBinary { attrs, .. }) + | Expr::Unary(ExprUnary { attrs, .. }) + | Expr::Lit(ExprLit { attrs, .. }) + | Expr::Cast(ExprCast { attrs, .. }) + | Expr::Type(ExprType { attrs, .. }) + | Expr::Let(ExprLet { attrs, .. }) + | Expr::If(ExprIf { attrs, .. }) + | Expr::While(ExprWhile { attrs, .. }) + | Expr::ForLoop(ExprForLoop { attrs, .. }) + | Expr::Loop(ExprLoop { attrs, .. }) + | Expr::Match(ExprMatch { attrs, .. }) + | Expr::Closure(ExprClosure { attrs, .. }) + | Expr::Unsafe(ExprUnsafe { attrs, .. }) + | Expr::Block(ExprBlock { attrs, .. }) + | Expr::Assign(ExprAssign { attrs, .. }) + | Expr::AssignOp(ExprAssignOp { attrs, .. }) + | Expr::Field(ExprField { attrs, .. }) + | Expr::Index(ExprIndex { attrs, .. }) + | Expr::Range(ExprRange { attrs, .. }) + | Expr::Path(ExprPath { attrs, .. }) + | Expr::Reference(ExprReference { attrs, .. }) + | Expr::Break(ExprBreak { attrs, .. }) + | Expr::Continue(ExprContinue { attrs, .. }) + | Expr::Return(ExprReturn { attrs, .. }) + | Expr::Macro(ExprMacro { attrs, .. }) + | Expr::Struct(ExprStruct { attrs, .. }) + | Expr::Repeat(ExprRepeat { attrs, .. }) + | Expr::Paren(ExprParen { attrs, .. }) + | Expr::Group(ExprGroup { attrs, .. }) + | Expr::Try(ExprTry { attrs, .. }) + | Expr::Async(ExprAsync { attrs, .. }) + | Expr::Await(ExprAwait { attrs, .. }) + | Expr::TryBlock(ExprTryBlock { attrs, .. }) + | Expr::Yield(ExprYield { attrs, .. }) => mem::replace(attrs, new), + Expr::Verbatim(_) => Vec::new(), + + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } + } +} + +ast_enum! { + /// A struct or tuple struct field accessed in a struct literal or field + /// expression. + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub enum Member { + /// A named field like `self.x`. + Named(Ident), + /// An unnamed field like `self.0`. + Unnamed(Index), + } +} + +impl From<Ident> for Member { + fn from(ident: Ident) -> Member { + Member::Named(ident) + } +} + +impl From<Index> for Member { + fn from(index: Index) -> Member { + Member::Unnamed(index) + } +} + +impl From<usize> for Member { + fn from(index: usize) -> Member { + Member::Unnamed(Index::from(index)) + } +} + +impl Eq for Member {} + +impl PartialEq for Member { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Member::Named(this), Member::Named(other)) => this == other, + (Member::Unnamed(this), Member::Unnamed(other)) => this == other, + _ => false, + } + } +} + +impl Hash for Member { + fn hash<H: Hasher>(&self, state: &mut H) { + match self { + Member::Named(m) => m.hash(state), + Member::Unnamed(m) => m.hash(state), + } + } +} + +#[cfg(feature = "printing")] +impl IdentFragment for Member { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + match self { + Member::Named(m) => Display::fmt(m, formatter), + Member::Unnamed(m) => Display::fmt(&m.index, formatter), + } + } + + fn span(&self) -> Option<Span> { + match self { + Member::Named(m) => Some(m.span()), + Member::Unnamed(m) => Some(m.span), + } + } +} + +ast_struct! { + /// The index of an unnamed tuple struct field. + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct Index { + pub index: u32, + pub span: Span, + } +} + +impl From<usize> for Index { + fn from(index: usize) -> Index { + assert!(index < u32::max_value() as usize); + Index { + index: index as u32, + span: Span::call_site(), + } + } +} + +impl Eq for Index {} + +impl PartialEq for Index { + fn eq(&self, other: &Self) -> bool { + self.index == other.index + } +} + +impl Hash for Index { + fn hash<H: Hasher>(&self, state: &mut H) { + self.index.hash(state); + } +} + +#[cfg(feature = "printing")] +impl IdentFragment for Index { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + Display::fmt(&self.index, formatter) + } + + fn span(&self) -> Option<Span> { + Some(self.span) + } +} + +#[cfg(feature = "full")] +ast_struct! { + /// The `::<>` explicit type parameters passed to a method call: + /// `parse::<u64>()`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct MethodTurbofish { + pub colon2_token: Token![::], + pub lt_token: Token![<], + pub args: Punctuated<GenericMethodArgument, Token![,]>, + pub gt_token: Token![>], + } +} + +#[cfg(feature = "full")] +ast_enum! { + /// An individual generic argument to a method, like `T`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub enum GenericMethodArgument { + /// A type argument. + Type(Type), + /// A const expression. Must be inside of a block. + /// + /// NOTE: Identity expressions are represented as Type arguments, as + /// they are indistinguishable syntactically. + Const(Expr), + } +} + +#[cfg(feature = "full")] +ast_struct! { + /// A field-value pair in a struct literal. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct FieldValue { + /// Attributes tagged on the field. + pub attrs: Vec<Attribute>, + + /// Name or index of the field. + pub member: Member, + + /// The colon in `Struct { x: x }`. If written in shorthand like + /// `Struct { x }`, there is no colon. + pub colon_token: Option<Token![:]>, + + /// Value of the field. + pub expr: Expr, + } +} + +#[cfg(feature = "full")] +ast_struct! { + /// A lifetime labeling a `for`, `while`, or `loop`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct Label { + pub name: Lifetime, + pub colon_token: Token![:], + } +} + +#[cfg(feature = "full")] +ast_struct! { + /// One arm of a `match` expression: `0...10 => { return true; }`. + /// + /// As in: + /// + /// ``` + /// # fn f() -> bool { + /// # let n = 0; + /// match n { + /// 0...10 => { + /// return true; + /// } + /// // ... + /// # _ => {} + /// } + /// # false + /// # } + /// ``` + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct Arm { + pub attrs: Vec<Attribute>, + pub pat: Pat, + pub guard: Option<(Token![if], Box<Expr>)>, + pub fat_arrow_token: Token![=>], + pub body: Box<Expr>, + pub comma: Option<Token![,]>, + } +} + +#[cfg(feature = "full")] +ast_enum! { + /// Limit types of a range, inclusive or exclusive. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub enum RangeLimits { + /// Inclusive at the beginning, exclusive at the end. + HalfOpen(Token![..]), + /// Inclusive at the beginning and end. + Closed(Token![..=]), + } +} + +#[cfg(any(feature = "parsing", feature = "printing"))] +#[cfg(feature = "full")] +pub(crate) fn requires_terminator(expr: &Expr) -> bool { + // see https://github.com/rust-lang/rust/blob/2679c38fc/src/librustc_ast/util/classify.rs#L7-L25 + match *expr { + Expr::Unsafe(..) + | Expr::Block(..) + | Expr::If(..) + | Expr::Match(..) + | Expr::While(..) + | Expr::Loop(..) + | Expr::ForLoop(..) + | Expr::Async(..) + | Expr::TryBlock(..) => false, + _ => true, + } +} + +#[cfg(feature = "parsing")] +pub(crate) mod parsing { + use super::*; + #[cfg(feature = "full")] + use crate::parse::ParseBuffer; + use crate::parse::{Parse, ParseStream, Result}; + use crate::path; + #[cfg(feature = "full")] + use proc_macro2::TokenTree; + use std::cmp::Ordering; + + crate::custom_keyword!(raw); + + // When we're parsing expressions which occur before blocks, like in an if + // statement's condition, we cannot parse a struct literal. + // + // Struct literals are ambiguous in certain positions + // https://github.com/rust-lang/rfcs/pull/92 + pub struct AllowStruct(bool); + + enum Precedence { + Any, + Assign, + Range, + Or, + And, + Compare, + BitOr, + BitXor, + BitAnd, + Shift, + Arithmetic, + Term, + Cast, + } + + impl Precedence { + fn of(op: &BinOp) -> Self { + match *op { + BinOp::Add(_) | BinOp::Sub(_) => Precedence::Arithmetic, + BinOp::Mul(_) | BinOp::Div(_) | BinOp::Rem(_) => Precedence::Term, + BinOp::And(_) => Precedence::And, + BinOp::Or(_) => Precedence::Or, + BinOp::BitXor(_) => Precedence::BitXor, + BinOp::BitAnd(_) => Precedence::BitAnd, + BinOp::BitOr(_) => Precedence::BitOr, + BinOp::Shl(_) | BinOp::Shr(_) => Precedence::Shift, + BinOp::Eq(_) + | BinOp::Lt(_) + | BinOp::Le(_) + | BinOp::Ne(_) + | BinOp::Ge(_) + | BinOp::Gt(_) => Precedence::Compare, + BinOp::AddEq(_) + | BinOp::SubEq(_) + | BinOp::MulEq(_) + | BinOp::DivEq(_) + | BinOp::RemEq(_) + | BinOp::BitXorEq(_) + | BinOp::BitAndEq(_) + | BinOp::BitOrEq(_) + | BinOp::ShlEq(_) + | BinOp::ShrEq(_) => Precedence::Assign, + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for Expr { + fn parse(input: ParseStream) -> Result<Self> { + ambiguous_expr(input, AllowStruct(true)) + } + } + + impl Expr { + /// An alternative to the primary `Expr::parse` parser (from the + /// [`Parse`] trait) for ambiguous syntactic positions in which a + /// trailing brace should not be taken as part of the expression. + /// + /// Rust grammar has an ambiguity where braces sometimes turn a path + /// expression into a struct initialization and sometimes do not. In the + /// following code, the expression `S {}` is one expression. Presumably + /// there is an empty struct `struct S {}` defined somewhere which it is + /// instantiating. + /// + /// ``` + /// # struct S; + /// # impl std::ops::Deref for S { + /// # type Target = bool; + /// # fn deref(&self) -> &Self::Target { + /// # &true + /// # } + /// # } + /// let _ = *S {}; + /// + /// // parsed by rustc as: `*(S {})` + /// ``` + /// + /// We would want to parse the above using `Expr::parse` after the `=` + /// token. + /// + /// But in the following, `S {}` is *not* a struct init expression. + /// + /// ``` + /// # const S: &bool = &true; + /// if *S {} {} + /// + /// // parsed by rustc as: + /// // + /// // if (*S) { + /// // /* empty block */ + /// // } + /// // { + /// // /* another empty block */ + /// // } + /// ``` + /// + /// For that reason we would want to parse if-conditions using + /// `Expr::parse_without_eager_brace` after the `if` token. Same for + /// similar syntactic positions such as the condition expr after a + /// `while` token or the expr at the top of a `match`. + /// + /// The Rust grammar's choices around which way this ambiguity is + /// resolved at various syntactic positions is fairly arbitrary. Really + /// either parse behavior could work in most positions, and language + /// designers just decide each case based on which is more likely to be + /// what the programmer had in mind most of the time. + /// + /// ``` + /// # struct S; + /// # fn doc() -> S { + /// if return S {} {} + /// # unreachable!() + /// # } + /// + /// // parsed by rustc as: + /// // + /// // if (return (S {})) { + /// // } + /// // + /// // but could equally well have been this other arbitrary choice: + /// // + /// // if (return S) { + /// // } + /// // {} + /// ``` + /// + /// Note the grammar ambiguity on trailing braces is distinct from + /// precedence and is not captured by assigning a precedence level to + /// the braced struct init expr in relation to other operators. This can + /// be illustrated by `return 0..S {}` vs `match 0..S {}`. The former + /// parses as `return (0..(S {}))` implying tighter precedence for + /// struct init than `..`, while the latter parses as `match (0..S) {}` + /// implying tighter precedence for `..` than struct init, a + /// contradiction. + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(all(feature = "full", feature = "parsing"))))] + pub fn parse_without_eager_brace(input: ParseStream) -> Result<Expr> { + ambiguous_expr(input, AllowStruct(false)) + } + } + + impl Copy for AllowStruct {} + + impl Clone for AllowStruct { + fn clone(&self) -> Self { + *self + } + } + + impl Copy for Precedence {} + + impl Clone for Precedence { + fn clone(&self) -> Self { + *self + } + } + + impl PartialEq for Precedence { + fn eq(&self, other: &Self) -> bool { + *self as u8 == *other as u8 + } + } + + impl PartialOrd for Precedence { + fn partial_cmp(&self, other: &Self) -> Option<Ordering> { + let this = *self as u8; + let other = *other as u8; + Some(this.cmp(&other)) + } + } + + #[cfg(feature = "full")] + fn parse_expr( + input: ParseStream, + mut lhs: Expr, + allow_struct: AllowStruct, + base: Precedence, + ) -> Result<Expr> { + loop { + if input + .fork() + .parse::<BinOp>() + .ok() + .map_or(false, |op| Precedence::of(&op) >= base) + { + let op: BinOp = input.parse()?; + let precedence = Precedence::of(&op); + let mut rhs = unary_expr(input, allow_struct)?; + loop { + let next = peek_precedence(input); + if next > precedence || next == precedence && precedence == Precedence::Assign { + rhs = parse_expr(input, rhs, allow_struct, next)?; + } else { + break; + } + } + lhs = if precedence == Precedence::Assign { + Expr::AssignOp(ExprAssignOp { + attrs: Vec::new(), + left: Box::new(lhs), + op, + right: Box::new(rhs), + }) + } else { + Expr::Binary(ExprBinary { + attrs: Vec::new(), + left: Box::new(lhs), + op, + right: Box::new(rhs), + }) + }; + } else if Precedence::Assign >= base + && input.peek(Token![=]) + && !input.peek(Token![==]) + && !input.peek(Token![=>]) + { + let eq_token: Token![=] = input.parse()?; + let mut rhs = unary_expr(input, allow_struct)?; + loop { + let next = peek_precedence(input); + if next >= Precedence::Assign { + rhs = parse_expr(input, rhs, allow_struct, next)?; + } else { + break; + } + } + lhs = Expr::Assign(ExprAssign { + attrs: Vec::new(), + left: Box::new(lhs), + eq_token, + right: Box::new(rhs), + }); + } else if Precedence::Range >= base && input.peek(Token![..]) { + let limits: RangeLimits = input.parse()?; + let rhs = if input.is_empty() + || input.peek(Token![,]) + || input.peek(Token![;]) + || input.peek(Token![.]) && !input.peek(Token![..]) + || !allow_struct.0 && input.peek(token::Brace) + { + None + } else { + let mut rhs = unary_expr(input, allow_struct)?; + loop { + let next = peek_precedence(input); + if next > Precedence::Range { + rhs = parse_expr(input, rhs, allow_struct, next)?; + } else { + break; + } + } + Some(rhs) + }; + lhs = Expr::Range(ExprRange { + attrs: Vec::new(), + from: Some(Box::new(lhs)), + limits, + to: rhs.map(Box::new), + }); + } else if Precedence::Cast >= base && input.peek(Token![as]) { + let as_token: Token![as] = input.parse()?; + let allow_plus = false; + let allow_group_generic = false; + let ty = ty::parsing::ambig_ty(input, allow_plus, allow_group_generic)?; + check_cast(input)?; + lhs = Expr::Cast(ExprCast { + attrs: Vec::new(), + expr: Box::new(lhs), + as_token, + ty: Box::new(ty), + }); + } else if Precedence::Cast >= base && input.peek(Token![:]) && !input.peek(Token![::]) { + let colon_token: Token![:] = input.parse()?; + let allow_plus = false; + let allow_group_generic = false; + let ty = ty::parsing::ambig_ty(input, allow_plus, allow_group_generic)?; + check_cast(input)?; + lhs = Expr::Type(ExprType { + attrs: Vec::new(), + expr: Box::new(lhs), + colon_token, + ty: Box::new(ty), + }); + } else { + break; + } + } + Ok(lhs) + } + + #[cfg(not(feature = "full"))] + fn parse_expr( + input: ParseStream, + mut lhs: Expr, + allow_struct: AllowStruct, + base: Precedence, + ) -> Result<Expr> { + loop { + if input + .fork() + .parse::<BinOp>() + .ok() + .map_or(false, |op| Precedence::of(&op) >= base) + { + let op: BinOp = input.parse()?; + let precedence = Precedence::of(&op); + let mut rhs = unary_expr(input, allow_struct)?; + loop { + let next = peek_precedence(input); + if next > precedence || next == precedence && precedence == Precedence::Assign { + rhs = parse_expr(input, rhs, allow_struct, next)?; + } else { + break; + } + } + lhs = Expr::Binary(ExprBinary { + attrs: Vec::new(), + left: Box::new(lhs), + op, + right: Box::new(rhs), + }); + } else if Precedence::Cast >= base && input.peek(Token![as]) { + let as_token: Token![as] = input.parse()?; + let allow_plus = false; + let allow_group_generic = false; + let ty = ty::parsing::ambig_ty(input, allow_plus, allow_group_generic)?; + check_cast(input)?; + lhs = Expr::Cast(ExprCast { + attrs: Vec::new(), + expr: Box::new(lhs), + as_token, + ty: Box::new(ty), + }); + } else { + break; + } + } + Ok(lhs) + } + + fn peek_precedence(input: ParseStream) -> Precedence { + if let Ok(op) = input.fork().parse() { + Precedence::of(&op) + } else if input.peek(Token![=]) && !input.peek(Token![=>]) { + Precedence::Assign + } else if input.peek(Token![..]) { + Precedence::Range + } else if input.peek(Token![as]) + || cfg!(feature = "full") && input.peek(Token![:]) && !input.peek(Token![::]) + { + Precedence::Cast + } else { + Precedence::Any + } + } + + // Parse an arbitrary expression. + fn ambiguous_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> { + let lhs = unary_expr(input, allow_struct)?; + parse_expr(input, lhs, allow_struct, Precedence::Any) + } + + #[cfg(feature = "full")] + fn expr_attrs(input: ParseStream) -> Result<Vec<Attribute>> { + let mut attrs = Vec::new(); + loop { + if input.peek(token::Group) { + let ahead = input.fork(); + let group = crate::group::parse_group(&ahead)?; + if !group.content.peek(Token![#]) || group.content.peek2(Token![!]) { + break; + } + let attr = group.content.call(attr::parsing::single_parse_outer)?; + if !group.content.is_empty() { + break; + } + attrs.push(attr); + } else if input.peek(Token![#]) { + attrs.push(input.call(attr::parsing::single_parse_outer)?); + } else { + break; + } + } + Ok(attrs) + } + + // <UnOp> <trailer> + // & <trailer> + // &mut <trailer> + // box <trailer> + #[cfg(feature = "full")] + fn unary_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> { + let begin = input.fork(); + let attrs = input.call(expr_attrs)?; + if input.peek(Token![&]) { + let and_token: Token![&] = input.parse()?; + let raw: Option<raw> = + if input.peek(raw) && (input.peek2(Token![mut]) || input.peek2(Token![const])) { + Some(input.parse()?) + } else { + None + }; + let mutability: Option<Token![mut]> = input.parse()?; + if raw.is_some() && mutability.is_none() { + input.parse::<Token![const]>()?; + } + let expr = Box::new(unary_expr(input, allow_struct)?); + if raw.is_some() { + Ok(Expr::Verbatim(verbatim::between(begin, input))) + } else { + Ok(Expr::Reference(ExprReference { + attrs, + and_token, + raw: Reserved::default(), + mutability, + expr, + })) + } + } else if input.peek(Token![box]) { + expr_box(input, attrs, allow_struct).map(Expr::Box) + } else if input.peek(Token![*]) || input.peek(Token![!]) || input.peek(Token![-]) { + expr_unary(input, attrs, allow_struct).map(Expr::Unary) + } else { + trailer_expr(begin, attrs, input, allow_struct) + } + } + + #[cfg(not(feature = "full"))] + fn unary_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> { + if input.peek(Token![*]) || input.peek(Token![!]) || input.peek(Token![-]) { + Ok(Expr::Unary(ExprUnary { + attrs: Vec::new(), + op: input.parse()?, + expr: Box::new(unary_expr(input, allow_struct)?), + })) + } else { + trailer_expr(input, allow_struct) + } + } + + // <atom> (..<args>) ... + // <atom> . <ident> (..<args>) ... + // <atom> . <ident> ... + // <atom> . <lit> ... + // <atom> [ <expr> ] ... + // <atom> ? ... + #[cfg(feature = "full")] + fn trailer_expr( + begin: ParseBuffer, + mut attrs: Vec<Attribute>, + input: ParseStream, + allow_struct: AllowStruct, + ) -> Result<Expr> { + let atom = atom_expr(input, allow_struct)?; + let mut e = trailer_helper(input, atom)?; + + if let Expr::Verbatim(tokens) = &mut e { + *tokens = verbatim::between(begin, input); + } else { + let inner_attrs = e.replace_attrs(Vec::new()); + attrs.extend(inner_attrs); + e.replace_attrs(attrs); + } + + Ok(e) + } + + #[cfg(feature = "full")] + fn trailer_helper(input: ParseStream, mut e: Expr) -> Result<Expr> { + loop { + if input.peek(token::Paren) { + let content; + e = Expr::Call(ExprCall { + attrs: Vec::new(), + func: Box::new(e), + paren_token: parenthesized!(content in input), + args: content.parse_terminated(Expr::parse)?, + }); + } else if input.peek(Token![.]) + && !input.peek(Token![..]) + && match e { + Expr::Range(_) => false, + _ => true, + } + { + let mut dot_token: Token![.] = input.parse()?; + + let await_token: Option<token::Await> = input.parse()?; + if let Some(await_token) = await_token { + e = Expr::Await(ExprAwait { + attrs: Vec::new(), + base: Box::new(e), + dot_token, + await_token, + }); + continue; + } + + let float_token: Option<LitFloat> = input.parse()?; + if let Some(float_token) = float_token { + if multi_index(&mut e, &mut dot_token, float_token)? { + continue; + } + } + + let member: Member = input.parse()?; + let turbofish = if member.is_named() && input.peek(Token![::]) { + Some(input.parse::<MethodTurbofish>()?) + } else { + None + }; + + if turbofish.is_some() || input.peek(token::Paren) { + if let Member::Named(method) = member { + let content; + e = Expr::MethodCall(ExprMethodCall { + attrs: Vec::new(), + receiver: Box::new(e), + dot_token, + method, + turbofish, + paren_token: parenthesized!(content in input), + args: content.parse_terminated(Expr::parse)?, + }); + continue; + } + } + + e = Expr::Field(ExprField { + attrs: Vec::new(), + base: Box::new(e), + dot_token, + member, + }); + } else if input.peek(token::Bracket) { + let content; + e = Expr::Index(ExprIndex { + attrs: Vec::new(), + expr: Box::new(e), + bracket_token: bracketed!(content in input), + index: content.parse()?, + }); + } else if input.peek(Token![?]) { + e = Expr::Try(ExprTry { + attrs: Vec::new(), + expr: Box::new(e), + question_token: input.parse()?, + }); + } else { + break; + } + } + Ok(e) + } + + #[cfg(not(feature = "full"))] + fn trailer_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> { + let mut e = atom_expr(input, allow_struct)?; + + loop { + if input.peek(token::Paren) { + let content; + e = Expr::Call(ExprCall { + attrs: Vec::new(), + func: Box::new(e), + paren_token: parenthesized!(content in input), + args: content.parse_terminated(Expr::parse)?, + }); + } else if input.peek(Token![.]) && !input.peek(Token![..]) && !input.peek2(token::Await) + { + let mut dot_token: Token![.] = input.parse()?; + let float_token: Option<LitFloat> = input.parse()?; + if let Some(float_token) = float_token { + if multi_index(&mut e, &mut dot_token, float_token)? { + continue; + } + } + e = Expr::Field(ExprField { + attrs: Vec::new(), + base: Box::new(e), + dot_token, + member: input.parse()?, + }); + } else if input.peek(token::Bracket) { + let content; + e = Expr::Index(ExprIndex { + attrs: Vec::new(), + expr: Box::new(e), + bracket_token: bracketed!(content in input), + index: content.parse()?, + }); + } else { + break; + } + } + + Ok(e) + } + + // Parse all atomic expressions which don't have to worry about precedence + // interactions, as they are fully contained. + #[cfg(feature = "full")] + fn atom_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> { + if input.peek(token::Group) + && !input.peek2(Token![::]) + && !input.peek2(Token![!]) + && !input.peek2(token::Brace) + { + input.call(expr_group).map(Expr::Group) + } else if input.peek(Lit) { + input.parse().map(Expr::Lit) + } else if input.peek(Token![async]) + && (input.peek2(token::Brace) || input.peek2(Token![move]) && input.peek3(token::Brace)) + { + input.parse().map(Expr::Async) + } else if input.peek(Token![try]) && input.peek2(token::Brace) { + input.parse().map(Expr::TryBlock) + } else if input.peek(Token![|]) + || input.peek(Token![async]) && (input.peek2(Token![|]) || input.peek2(Token![move])) + || input.peek(Token![static]) + || input.peek(Token![move]) + { + expr_closure(input, allow_struct).map(Expr::Closure) + } else if input.peek(Token![for]) + && input.peek2(Token![<]) + && (input.peek3(Lifetime) || input.peek3(Token![>])) + { + let begin = input.fork(); + input.parse::<BoundLifetimes>()?; + expr_closure(input, allow_struct)?; + let verbatim = verbatim::between(begin, input); + Ok(Expr::Verbatim(verbatim)) + } else if input.peek(Ident) + || input.peek(Token![::]) + || input.peek(Token![<]) + || input.peek(Token![self]) + || input.peek(Token![Self]) + || input.peek(Token![super]) + || input.peek(Token![crate]) + { + path_or_macro_or_struct(input, allow_struct) + } else if input.peek(token::Paren) { + paren_or_tuple(input) + } else if input.peek(Token![break]) { + expr_break(input, allow_struct).map(Expr::Break) + } else if input.peek(Token![continue]) { + input.parse().map(Expr::Continue) + } else if input.peek(Token![return]) { + expr_ret(input, allow_struct).map(Expr::Return) + } else if input.peek(token::Bracket) { + array_or_repeat(input) + } else if input.peek(Token![let]) { + input.parse().map(Expr::Let) + } else if input.peek(Token![if]) { + input.parse().map(Expr::If) + } else if input.peek(Token![while]) { + input.parse().map(Expr::While) + } else if input.peek(Token![for]) { + input.parse().map(Expr::ForLoop) + } else if input.peek(Token![loop]) { + input.parse().map(Expr::Loop) + } else if input.peek(Token![match]) { + input.parse().map(Expr::Match) + } else if input.peek(Token![yield]) { + input.parse().map(Expr::Yield) + } else if input.peek(Token![unsafe]) { + input.parse().map(Expr::Unsafe) + } else if input.peek(Token![const]) { + input.call(expr_const).map(Expr::Verbatim) + } else if input.peek(token::Brace) { + input.parse().map(Expr::Block) + } else if input.peek(Token![..]) { + expr_range(input, allow_struct).map(Expr::Range) + } else if input.peek(Token![_]) { + Ok(Expr::Verbatim(TokenStream::from( + input.parse::<TokenTree>()?, + ))) + } else if input.peek(Lifetime) { + let the_label: Label = input.parse()?; + let mut expr = if input.peek(Token![while]) { + Expr::While(input.parse()?) + } else if input.peek(Token![for]) { + Expr::ForLoop(input.parse()?) + } else if input.peek(Token![loop]) { + Expr::Loop(input.parse()?) + } else if input.peek(token::Brace) { + Expr::Block(input.parse()?) + } else { + return Err(input.error("expected loop or block expression")); + }; + match &mut expr { + Expr::While(ExprWhile { label, .. }) + | Expr::ForLoop(ExprForLoop { label, .. }) + | Expr::Loop(ExprLoop { label, .. }) + | Expr::Block(ExprBlock { label, .. }) => *label = Some(the_label), + _ => unreachable!(), + } + Ok(expr) + } else { + Err(input.error("expected expression")) + } + } + + #[cfg(not(feature = "full"))] + fn atom_expr(input: ParseStream, _allow_struct: AllowStruct) -> Result<Expr> { + if input.peek(Lit) { + input.parse().map(Expr::Lit) + } else if input.peek(token::Paren) { + input.call(expr_paren).map(Expr::Paren) + } else if input.peek(Ident) + || input.peek(Token![::]) + || input.peek(Token![<]) + || input.peek(Token![self]) + || input.peek(Token![Self]) + || input.peek(Token![super]) + || input.peek(Token![crate]) + { + input.parse().map(Expr::Path) + } else { + Err(input.error("unsupported expression; enable syn's features=[\"full\"]")) + } + } + + #[cfg(feature = "full")] + fn path_or_macro_or_struct(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> { + let begin = input.fork(); + let expr: ExprPath = input.parse()?; + + if expr.qself.is_none() && input.peek(Token![!]) && !input.peek(Token![!=]) { + let mut contains_arguments = false; + for segment in &expr.path.segments { + match segment.arguments { + PathArguments::None => {} + PathArguments::AngleBracketed(_) | PathArguments::Parenthesized(_) => { + contains_arguments = true; + } + } + } + + if !contains_arguments { + let bang_token: Token![!] = input.parse()?; + let (delimiter, tokens) = mac::parse_delimiter(input)?; + return Ok(Expr::Macro(ExprMacro { + attrs: Vec::new(), + mac: Macro { + path: expr.path, + bang_token, + delimiter, + tokens, + }, + })); + } + } + + if allow_struct.0 && input.peek(token::Brace) { + let expr_struct = expr_struct_helper(input, expr.path)?; + if expr.qself.is_some() { + Ok(Expr::Verbatim(verbatim::between(begin, input))) + } else { + Ok(Expr::Struct(expr_struct)) + } + } else { + Ok(Expr::Path(expr)) + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ExprMacro { + fn parse(input: ParseStream) -> Result<Self> { + Ok(ExprMacro { + attrs: Vec::new(), + mac: input.parse()?, + }) + } + } + + #[cfg(feature = "full")] + fn paren_or_tuple(input: ParseStream) -> Result<Expr> { + let content; + let paren_token = parenthesized!(content in input); + if content.is_empty() { + return Ok(Expr::Tuple(ExprTuple { + attrs: Vec::new(), + paren_token, + elems: Punctuated::new(), + })); + } + + let first: Expr = content.parse()?; + if content.is_empty() { + return Ok(Expr::Paren(ExprParen { + attrs: Vec::new(), + paren_token, + expr: Box::new(first), + })); + } + + let mut elems = Punctuated::new(); + elems.push_value(first); + while !content.is_empty() { + let punct = content.parse()?; + elems.push_punct(punct); + if content.is_empty() { + break; + } + let value = content.parse()?; + elems.push_value(value); + } + Ok(Expr::Tuple(ExprTuple { + attrs: Vec::new(), + paren_token, + elems, + })) + } + + #[cfg(feature = "full")] + fn array_or_repeat(input: ParseStream) -> Result<Expr> { + let content; + let bracket_token = bracketed!(content in input); + if content.is_empty() { + return Ok(Expr::Array(ExprArray { + attrs: Vec::new(), + bracket_token, + elems: Punctuated::new(), + })); + } + + let first: Expr = content.parse()?; + if content.is_empty() || content.peek(Token![,]) { + let mut elems = Punctuated::new(); + elems.push_value(first); + while !content.is_empty() { + let punct = content.parse()?; + elems.push_punct(punct); + if content.is_empty() { + break; + } + let value = content.parse()?; + elems.push_value(value); + } + Ok(Expr::Array(ExprArray { + attrs: Vec::new(), + bracket_token, + elems, + })) + } else if content.peek(Token![;]) { + let semi_token: Token![;] = content.parse()?; + let len: Expr = content.parse()?; + Ok(Expr::Repeat(ExprRepeat { + attrs: Vec::new(), + bracket_token, + expr: Box::new(first), + semi_token, + len: Box::new(len), + })) + } else { + Err(content.error("expected `,` or `;`")) + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ExprArray { + fn parse(input: ParseStream) -> Result<Self> { + let content; + let bracket_token = bracketed!(content in input); + let mut elems = Punctuated::new(); + + while !content.is_empty() { + let first: Expr = content.parse()?; + elems.push_value(first); + if content.is_empty() { + break; + } + let punct = content.parse()?; + elems.push_punct(punct); + } + + Ok(ExprArray { + attrs: Vec::new(), + bracket_token, + elems, + }) + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ExprRepeat { + fn parse(input: ParseStream) -> Result<Self> { + let content; + Ok(ExprRepeat { + bracket_token: bracketed!(content in input), + attrs: Vec::new(), + expr: content.parse()?, + semi_token: content.parse()?, + len: content.parse()?, + }) + } + } + + #[cfg(feature = "full")] + pub(crate) fn expr_early(input: ParseStream) -> Result<Expr> { + let mut attrs = input.call(expr_attrs)?; + let mut expr = if input.peek(Token![if]) { + Expr::If(input.parse()?) + } else if input.peek(Token![while]) { + Expr::While(input.parse()?) + } else if input.peek(Token![for]) + && !(input.peek2(Token![<]) && (input.peek3(Lifetime) || input.peek3(Token![>]))) + { + Expr::ForLoop(input.parse()?) + } else if input.peek(Token![loop]) { + Expr::Loop(input.parse()?) + } else if input.peek(Token![match]) { + Expr::Match(input.parse()?) + } else if input.peek(Token![try]) && input.peek2(token::Brace) { + Expr::TryBlock(input.parse()?) + } else if input.peek(Token![unsafe]) { + Expr::Unsafe(input.parse()?) + } else if input.peek(Token![const]) { + Expr::Verbatim(input.call(expr_const)?) + } else if input.peek(token::Brace) { + Expr::Block(input.parse()?) + } else { + let allow_struct = AllowStruct(true); + let mut expr = unary_expr(input, allow_struct)?; + + attrs.extend(expr.replace_attrs(Vec::new())); + expr.replace_attrs(attrs); + + return parse_expr(input, expr, allow_struct, Precedence::Any); + }; + + if input.peek(Token![.]) && !input.peek(Token![..]) || input.peek(Token![?]) { + expr = trailer_helper(input, expr)?; + + attrs.extend(expr.replace_attrs(Vec::new())); + expr.replace_attrs(attrs); + + let allow_struct = AllowStruct(true); + return parse_expr(input, expr, allow_struct, Precedence::Any); + } + + attrs.extend(expr.replace_attrs(Vec::new())); + expr.replace_attrs(attrs); + Ok(expr) + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ExprLit { + fn parse(input: ParseStream) -> Result<Self> { + Ok(ExprLit { + attrs: Vec::new(), + lit: input.parse()?, + }) + } + } + + #[cfg(feature = "full")] + fn expr_group(input: ParseStream) -> Result<ExprGroup> { + let group = crate::group::parse_group(input)?; + Ok(ExprGroup { + attrs: Vec::new(), + group_token: group.token, + expr: group.content.parse()?, + }) + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ExprParen { + fn parse(input: ParseStream) -> Result<Self> { + expr_paren(input) + } + } + + fn expr_paren(input: ParseStream) -> Result<ExprParen> { + let content; + Ok(ExprParen { + attrs: Vec::new(), + paren_token: parenthesized!(content in input), + expr: content.parse()?, + }) + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for GenericMethodArgument { + fn parse(input: ParseStream) -> Result<Self> { + if input.peek(Lit) { + let lit = input.parse()?; + return Ok(GenericMethodArgument::Const(Expr::Lit(lit))); + } + + if input.peek(token::Brace) { + let block: ExprBlock = input.parse()?; + return Ok(GenericMethodArgument::Const(Expr::Block(block))); + } + + input.parse().map(GenericMethodArgument::Type) + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for MethodTurbofish { + fn parse(input: ParseStream) -> Result<Self> { + Ok(MethodTurbofish { + colon2_token: input.parse()?, + lt_token: input.parse()?, + args: { + let mut args = Punctuated::new(); + loop { + if input.peek(Token![>]) { + break; + } + let value: GenericMethodArgument = input.parse()?; + args.push_value(value); + if input.peek(Token![>]) { + break; + } + let punct = input.parse()?; + args.push_punct(punct); + } + args + }, + gt_token: input.parse()?, + }) + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ExprLet { + fn parse(input: ParseStream) -> Result<Self> { + Ok(ExprLet { + attrs: Vec::new(), + let_token: input.parse()?, + pat: pat::parsing::multi_pat_with_leading_vert(input)?, + eq_token: input.parse()?, + expr: Box::new({ + let allow_struct = AllowStruct(false); + let lhs = unary_expr(input, allow_struct)?; + parse_expr(input, lhs, allow_struct, Precedence::Compare)? + }), + }) + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ExprIf { + fn parse(input: ParseStream) -> Result<Self> { + let attrs = input.call(Attribute::parse_outer)?; + Ok(ExprIf { + attrs, + if_token: input.parse()?, + cond: Box::new(input.call(Expr::parse_without_eager_brace)?), + then_branch: input.parse()?, + else_branch: { + if input.peek(Token![else]) { + Some(input.call(else_block)?) + } else { + None + } + }, + }) + } + } + + #[cfg(feature = "full")] + fn else_block(input: ParseStream) -> Result<(Token![else], Box<Expr>)> { + let else_token: Token![else] = input.parse()?; + + let lookahead = input.lookahead1(); + let else_branch = if input.peek(Token![if]) { + input.parse().map(Expr::If)? + } else if input.peek(token::Brace) { + Expr::Block(ExprBlock { + attrs: Vec::new(), + label: None, + block: input.parse()?, + }) + } else { + return Err(lookahead.error()); + }; + + Ok((else_token, Box::new(else_branch))) + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ExprForLoop { + fn parse(input: ParseStream) -> Result<Self> { + let mut attrs = input.call(Attribute::parse_outer)?; + let label: Option<Label> = input.parse()?; + let for_token: Token![for] = input.parse()?; + + let pat = pat::parsing::multi_pat_with_leading_vert(input)?; + + let in_token: Token![in] = input.parse()?; + let expr: Expr = input.call(Expr::parse_without_eager_brace)?; + + let content; + let brace_token = braced!(content in input); + attr::parsing::parse_inner(&content, &mut attrs)?; + let stmts = content.call(Block::parse_within)?; + + Ok(ExprForLoop { + attrs, + label, + for_token, + pat, + in_token, + expr: Box::new(expr), + body: Block { brace_token, stmts }, + }) + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ExprLoop { + fn parse(input: ParseStream) -> Result<Self> { + let mut attrs = input.call(Attribute::parse_outer)?; + let label: Option<Label> = input.parse()?; + let loop_token: Token![loop] = input.parse()?; + + let content; + let brace_token = braced!(content in input); + attr::parsing::parse_inner(&content, &mut attrs)?; + let stmts = content.call(Block::parse_within)?; + + Ok(ExprLoop { + attrs, + label, + loop_token, + body: Block { brace_token, stmts }, + }) + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ExprMatch { + fn parse(input: ParseStream) -> Result<Self> { + let mut attrs = input.call(Attribute::parse_outer)?; + let match_token: Token![match] = input.parse()?; + let expr = Expr::parse_without_eager_brace(input)?; + + let content; + let brace_token = braced!(content in input); + attr::parsing::parse_inner(&content, &mut attrs)?; + + let mut arms = Vec::new(); + while !content.is_empty() { + arms.push(content.call(Arm::parse)?); + } + + Ok(ExprMatch { + attrs, + match_token, + expr: Box::new(expr), + brace_token, + arms, + }) + } + } + + macro_rules! impl_by_parsing_expr { + ( + $( + $expr_type:ty, $variant:ident, $msg:expr, + )* + ) => { + $( + #[cfg(all(feature = "full", feature = "printing"))] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for $expr_type { + fn parse(input: ParseStream) -> Result<Self> { + let mut expr: Expr = input.parse()?; + loop { + match expr { + Expr::$variant(inner) => return Ok(inner), + Expr::Group(next) => expr = *next.expr, + _ => return Err(Error::new_spanned(expr, $msg)), + } + } + } + } + )* + }; + } + + impl_by_parsing_expr! { + ExprAssign, Assign, "expected assignment expression", + ExprAssignOp, AssignOp, "expected compound assignment expression", + ExprAwait, Await, "expected await expression", + ExprBinary, Binary, "expected binary operation", + ExprCall, Call, "expected function call expression", + ExprCast, Cast, "expected cast expression", + ExprField, Field, "expected struct field access", + ExprIndex, Index, "expected indexing expression", + ExprMethodCall, MethodCall, "expected method call expression", + ExprRange, Range, "expected range expression", + ExprTry, Try, "expected try expression", + ExprTuple, Tuple, "expected tuple expression", + ExprType, Type, "expected type ascription expression", + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ExprBox { + fn parse(input: ParseStream) -> Result<Self> { + let attrs = Vec::new(); + let allow_struct = AllowStruct(true); + expr_box(input, attrs, allow_struct) + } + } + + #[cfg(feature = "full")] + fn expr_box( + input: ParseStream, + attrs: Vec<Attribute>, + allow_struct: AllowStruct, + ) -> Result<ExprBox> { + Ok(ExprBox { + attrs, + box_token: input.parse()?, + expr: Box::new(unary_expr(input, allow_struct)?), + }) + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ExprUnary { + fn parse(input: ParseStream) -> Result<Self> { + let attrs = Vec::new(); + let allow_struct = AllowStruct(true); + expr_unary(input, attrs, allow_struct) + } + } + + #[cfg(feature = "full")] + fn expr_unary( + input: ParseStream, + attrs: Vec<Attribute>, + allow_struct: AllowStruct, + ) -> Result<ExprUnary> { + Ok(ExprUnary { + attrs, + op: input.parse()?, + expr: Box::new(unary_expr(input, allow_struct)?), + }) + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ExprClosure { + fn parse(input: ParseStream) -> Result<Self> { + let allow_struct = AllowStruct(true); + expr_closure(input, allow_struct) + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ExprReference { + fn parse(input: ParseStream) -> Result<Self> { + let allow_struct = AllowStruct(true); + Ok(ExprReference { + attrs: Vec::new(), + and_token: input.parse()?, + raw: Reserved::default(), + mutability: input.parse()?, + expr: Box::new(unary_expr(input, allow_struct)?), + }) + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ExprBreak { + fn parse(input: ParseStream) -> Result<Self> { + let allow_struct = AllowStruct(true); + expr_break(input, allow_struct) + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ExprReturn { + fn parse(input: ParseStream) -> Result<Self> { + let allow_struct = AllowStruct(true); + expr_ret(input, allow_struct) + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ExprTryBlock { + fn parse(input: ParseStream) -> Result<Self> { + Ok(ExprTryBlock { + attrs: Vec::new(), + try_token: input.parse()?, + block: input.parse()?, + }) + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ExprYield { + fn parse(input: ParseStream) -> Result<Self> { + Ok(ExprYield { + attrs: Vec::new(), + yield_token: input.parse()?, + expr: { + if !input.is_empty() && !input.peek(Token![,]) && !input.peek(Token![;]) { + Some(input.parse()?) + } else { + None + } + }, + }) + } + } + + #[cfg(feature = "full")] + fn expr_closure(input: ParseStream, allow_struct: AllowStruct) -> Result<ExprClosure> { + let movability: Option<Token![static]> = input.parse()?; + let asyncness: Option<Token![async]> = input.parse()?; + let capture: Option<Token![move]> = input.parse()?; + let or1_token: Token![|] = input.parse()?; + + let mut inputs = Punctuated::new(); + loop { + if input.peek(Token![|]) { + break; + } + let value = closure_arg(input)?; + inputs.push_value(value); + if input.peek(Token![|]) { + break; + } + let punct: Token![,] = input.parse()?; + inputs.push_punct(punct); + } + + let or2_token: Token![|] = input.parse()?; + + let (output, body) = if input.peek(Token![->]) { + let arrow_token: Token![->] = input.parse()?; + let ty: Type = input.parse()?; + let body: Block = input.parse()?; + let output = ReturnType::Type(arrow_token, Box::new(ty)); + let block = Expr::Block(ExprBlock { + attrs: Vec::new(), + label: None, + block: body, + }); + (output, block) + } else { + let body = ambiguous_expr(input, allow_struct)?; + (ReturnType::Default, body) + }; + + Ok(ExprClosure { + attrs: Vec::new(), + movability, + asyncness, + capture, + or1_token, + inputs, + or2_token, + output, + body: Box::new(body), + }) + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ExprAsync { + fn parse(input: ParseStream) -> Result<Self> { + Ok(ExprAsync { + attrs: Vec::new(), + async_token: input.parse()?, + capture: input.parse()?, + block: input.parse()?, + }) + } + } + + #[cfg(feature = "full")] + fn closure_arg(input: ParseStream) -> Result<Pat> { + let attrs = input.call(Attribute::parse_outer)?; + let mut pat: Pat = input.parse()?; + + if input.peek(Token![:]) { + Ok(Pat::Type(PatType { + attrs, + pat: Box::new(pat), + colon_token: input.parse()?, + ty: input.parse()?, + })) + } else { + match &mut pat { + Pat::Box(pat) => pat.attrs = attrs, + Pat::Ident(pat) => pat.attrs = attrs, + Pat::Lit(pat) => pat.attrs = attrs, + Pat::Macro(pat) => pat.attrs = attrs, + Pat::Or(pat) => pat.attrs = attrs, + Pat::Path(pat) => pat.attrs = attrs, + Pat::Range(pat) => pat.attrs = attrs, + Pat::Reference(pat) => pat.attrs = attrs, + Pat::Rest(pat) => pat.attrs = attrs, + Pat::Slice(pat) => pat.attrs = attrs, + Pat::Struct(pat) => pat.attrs = attrs, + Pat::Tuple(pat) => pat.attrs = attrs, + Pat::TupleStruct(pat) => pat.attrs = attrs, + Pat::Type(_) => unreachable!(), + Pat::Verbatim(_) => {} + Pat::Wild(pat) => pat.attrs = attrs, + + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } + Ok(pat) + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ExprWhile { + fn parse(input: ParseStream) -> Result<Self> { + let mut attrs = input.call(Attribute::parse_outer)?; + let label: Option<Label> = input.parse()?; + let while_token: Token![while] = input.parse()?; + let cond = Expr::parse_without_eager_brace(input)?; + + let content; + let brace_token = braced!(content in input); + attr::parsing::parse_inner(&content, &mut attrs)?; + let stmts = content.call(Block::parse_within)?; + + Ok(ExprWhile { + attrs, + label, + while_token, + cond: Box::new(cond), + body: Block { brace_token, stmts }, + }) + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for Label { + fn parse(input: ParseStream) -> Result<Self> { + Ok(Label { + name: input.parse()?, + colon_token: input.parse()?, + }) + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for Option<Label> { + fn parse(input: ParseStream) -> Result<Self> { + if input.peek(Lifetime) { + input.parse().map(Some) + } else { + Ok(None) + } + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ExprContinue { + fn parse(input: ParseStream) -> Result<Self> { + Ok(ExprContinue { + attrs: Vec::new(), + continue_token: input.parse()?, + label: input.parse()?, + }) + } + } + + #[cfg(feature = "full")] + fn expr_break(input: ParseStream, allow_struct: AllowStruct) -> Result<ExprBreak> { + Ok(ExprBreak { + attrs: Vec::new(), + break_token: input.parse()?, + label: input.parse()?, + expr: { + if input.is_empty() + || input.peek(Token![,]) + || input.peek(Token![;]) + || !allow_struct.0 && input.peek(token::Brace) + { + None + } else { + let expr = ambiguous_expr(input, allow_struct)?; + Some(Box::new(expr)) + } + }, + }) + } + + #[cfg(feature = "full")] + fn expr_ret(input: ParseStream, allow_struct: AllowStruct) -> Result<ExprReturn> { + Ok(ExprReturn { + attrs: Vec::new(), + return_token: input.parse()?, + expr: { + if input.is_empty() || input.peek(Token![,]) || input.peek(Token![;]) { + None + } else { + // NOTE: return is greedy and eats blocks after it even when in a + // position where structs are not allowed, such as in if statement + // conditions. For example: + // + // if return { println!("A") } {} // Prints "A" + let expr = ambiguous_expr(input, allow_struct)?; + Some(Box::new(expr)) + } + }, + }) + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for FieldValue { + fn parse(input: ParseStream) -> Result<Self> { + let attrs = input.call(Attribute::parse_outer)?; + let member: Member = input.parse()?; + let (colon_token, value) = if input.peek(Token![:]) || !member.is_named() { + let colon_token: Token![:] = input.parse()?; + let value: Expr = input.parse()?; + (Some(colon_token), value) + } else if let Member::Named(ident) = &member { + let value = Expr::Path(ExprPath { + attrs: Vec::new(), + qself: None, + path: Path::from(ident.clone()), + }); + (None, value) + } else { + unreachable!() + }; + + Ok(FieldValue { + attrs, + member, + colon_token, + expr: value, + }) + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ExprStruct { + fn parse(input: ParseStream) -> Result<Self> { + let path: Path = input.parse()?; + expr_struct_helper(input, path) + } + } + + #[cfg(feature = "full")] + fn expr_struct_helper(input: ParseStream, path: Path) -> Result<ExprStruct> { + let content; + let brace_token = braced!(content in input); + + let mut fields = Punctuated::new(); + while !content.is_empty() { + if content.peek(Token![..]) { + return Ok(ExprStruct { + attrs: Vec::new(), + brace_token, + path, + fields, + dot2_token: Some(content.parse()?), + rest: if content.is_empty() { + None + } else { + Some(Box::new(content.parse()?)) + }, + }); + } + + fields.push(content.parse()?); + if content.is_empty() { + break; + } + let punct: Token![,] = content.parse()?; + fields.push_punct(punct); + } + + Ok(ExprStruct { + attrs: Vec::new(), + brace_token, + path, + fields, + dot2_token: None, + rest: None, + }) + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ExprUnsafe { + fn parse(input: ParseStream) -> Result<Self> { + let unsafe_token: Token![unsafe] = input.parse()?; + + let content; + let brace_token = braced!(content in input); + let inner_attrs = content.call(Attribute::parse_inner)?; + let stmts = content.call(Block::parse_within)?; + + Ok(ExprUnsafe { + attrs: inner_attrs, + unsafe_token, + block: Block { brace_token, stmts }, + }) + } + } + + #[cfg(feature = "full")] + pub(crate) fn expr_const(input: ParseStream) -> Result<TokenStream> { + let begin = input.fork(); + input.parse::<Token![const]>()?; + + let content; + braced!(content in input); + content.call(Attribute::parse_inner)?; + content.call(Block::parse_within)?; + + Ok(verbatim::between(begin, input)) + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ExprBlock { + fn parse(input: ParseStream) -> Result<Self> { + let mut attrs = input.call(Attribute::parse_outer)?; + let label: Option<Label> = input.parse()?; + + let content; + let brace_token = braced!(content in input); + attr::parsing::parse_inner(&content, &mut attrs)?; + let stmts = content.call(Block::parse_within)?; + + Ok(ExprBlock { + attrs, + label, + block: Block { brace_token, stmts }, + }) + } + } + + #[cfg(feature = "full")] + fn expr_range(input: ParseStream, allow_struct: AllowStruct) -> Result<ExprRange> { + Ok(ExprRange { + attrs: Vec::new(), + from: None, + limits: input.parse()?, + to: { + if input.is_empty() + || input.peek(Token![,]) + || input.peek(Token![;]) + || input.peek(Token![.]) && !input.peek(Token![..]) + || !allow_struct.0 && input.peek(token::Brace) + { + None + } else { + let to = ambiguous_expr(input, allow_struct)?; + Some(Box::new(to)) + } + }, + }) + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for RangeLimits { + fn parse(input: ParseStream) -> Result<Self> { + let lookahead = input.lookahead1(); + if lookahead.peek(Token![..=]) { + input.parse().map(RangeLimits::Closed) + } else if lookahead.peek(Token![...]) { + let dot3: Token![...] = input.parse()?; + Ok(RangeLimits::Closed(Token![..=](dot3.spans))) + } else if lookahead.peek(Token![..]) { + input.parse().map(RangeLimits::HalfOpen) + } else { + Err(lookahead.error()) + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ExprPath { + fn parse(input: ParseStream) -> Result<Self> { + #[cfg(not(feature = "full"))] + let attrs = Vec::new(); + #[cfg(feature = "full")] + let attrs = input.call(Attribute::parse_outer)?; + + let (qself, path) = path::parsing::qpath(input, true)?; + + Ok(ExprPath { attrs, qself, path }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for Member { + fn parse(input: ParseStream) -> Result<Self> { + if input.peek(Ident) { + input.parse().map(Member::Named) + } else if input.peek(LitInt) { + input.parse().map(Member::Unnamed) + } else { + Err(input.error("expected identifier or integer")) + } + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for Arm { + fn parse(input: ParseStream) -> Result<Arm> { + let requires_comma; + Ok(Arm { + attrs: input.call(Attribute::parse_outer)?, + pat: pat::parsing::multi_pat_with_leading_vert(input)?, + guard: { + if input.peek(Token![if]) { + let if_token: Token![if] = input.parse()?; + let guard: Expr = input.parse()?; + Some((if_token, Box::new(guard))) + } else { + None + } + }, + fat_arrow_token: input.parse()?, + body: { + let body = input.call(expr_early)?; + requires_comma = requires_terminator(&body); + Box::new(body) + }, + comma: { + if requires_comma && !input.is_empty() { + Some(input.parse()?) + } else { + input.parse()? + } + }, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for Index { + fn parse(input: ParseStream) -> Result<Self> { + let lit: LitInt = input.parse()?; + if lit.suffix().is_empty() { + Ok(Index { + index: lit + .base10_digits() + .parse() + .map_err(|err| Error::new(lit.span(), err))?, + span: lit.span(), + }) + } else { + Err(Error::new(lit.span(), "expected unsuffixed integer")) + } + } + } + + fn multi_index(e: &mut Expr, dot_token: &mut Token![.], float: LitFloat) -> Result<bool> { + let mut float_repr = float.to_string(); + let trailing_dot = float_repr.ends_with('.'); + if trailing_dot { + float_repr.truncate(float_repr.len() - 1); + } + for part in float_repr.split('.') { + let index = crate::parse_str(part).map_err(|err| Error::new(float.span(), err))?; + #[cfg(not(syn_no_const_vec_new))] + let base = mem::replace(e, Expr::DUMMY); + #[cfg(syn_no_const_vec_new)] + let base = mem::replace(e, Expr::Verbatim(TokenStream::new())); + *e = Expr::Field(ExprField { + attrs: Vec::new(), + base: Box::new(base), + dot_token: Token![.](dot_token.span), + member: Member::Unnamed(index), + }); + *dot_token = Token![.](float.span()); + } + Ok(!trailing_dot) + } + + #[cfg(feature = "full")] + impl Member { + fn is_named(&self) -> bool { + match *self { + Member::Named(_) => true, + Member::Unnamed(_) => false, + } + } + } + + fn check_cast(input: ParseStream) -> Result<()> { + let kind = if input.peek(Token![.]) && !input.peek(Token![..]) { + if input.peek2(token::Await) { + "`.await`" + } else if input.peek2(Ident) && (input.peek3(token::Paren) || input.peek3(Token![::])) { + "a method call" + } else { + "a field access" + } + } else if input.peek(Token![?]) { + "`?`" + } else if input.peek(token::Bracket) { + "indexing" + } else if input.peek(token::Paren) { + "a function call" + } else { + return Ok(()); + }; + let msg = format!("casts cannot be followed by {}", kind); + Err(input.error(msg)) + } +} + +#[cfg(feature = "printing")] +pub(crate) mod printing { + use super::*; + #[cfg(feature = "full")] + use crate::attr::FilterAttrs; + use proc_macro2::{Literal, TokenStream}; + use quote::{ToTokens, TokenStreamExt}; + + // If the given expression is a bare `ExprStruct`, wraps it in parenthesis + // before appending it to `TokenStream`. + #[cfg(feature = "full")] + fn wrap_bare_struct(tokens: &mut TokenStream, e: &Expr) { + if let Expr::Struct(_) = *e { + token::Paren::default().surround(tokens, |tokens| { + e.to_tokens(tokens); + }); + } else { + e.to_tokens(tokens); + } + } + + #[cfg(feature = "full")] + pub(crate) fn outer_attrs_to_tokens(attrs: &[Attribute], tokens: &mut TokenStream) { + tokens.append_all(attrs.outer()); + } + + #[cfg(feature = "full")] + fn inner_attrs_to_tokens(attrs: &[Attribute], tokens: &mut TokenStream) { + tokens.append_all(attrs.inner()); + } + + #[cfg(not(feature = "full"))] + pub(crate) fn outer_attrs_to_tokens(_attrs: &[Attribute], _tokens: &mut TokenStream) {} + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprBox { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.box_token.to_tokens(tokens); + self.expr.to_tokens(tokens); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprArray { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.bracket_token.surround(tokens, |tokens| { + self.elems.to_tokens(tokens); + }); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprCall { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.func.to_tokens(tokens); + self.paren_token.surround(tokens, |tokens| { + self.args.to_tokens(tokens); + }); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprMethodCall { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.receiver.to_tokens(tokens); + self.dot_token.to_tokens(tokens); + self.method.to_tokens(tokens); + self.turbofish.to_tokens(tokens); + self.paren_token.surround(tokens, |tokens| { + self.args.to_tokens(tokens); + }); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for MethodTurbofish { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.colon2_token.to_tokens(tokens); + self.lt_token.to_tokens(tokens); + self.args.to_tokens(tokens); + self.gt_token.to_tokens(tokens); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for GenericMethodArgument { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + GenericMethodArgument::Type(t) => t.to_tokens(tokens), + GenericMethodArgument::Const(c) => c.to_tokens(tokens), + } + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprTuple { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.paren_token.surround(tokens, |tokens| { + self.elems.to_tokens(tokens); + // If we only have one argument, we need a trailing comma to + // distinguish ExprTuple from ExprParen. + if self.elems.len() == 1 && !self.elems.trailing_punct() { + <Token![,]>::default().to_tokens(tokens); + } + }); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprBinary { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.left.to_tokens(tokens); + self.op.to_tokens(tokens); + self.right.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprUnary { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.op.to_tokens(tokens); + self.expr.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprLit { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.lit.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprCast { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.expr.to_tokens(tokens); + self.as_token.to_tokens(tokens); + self.ty.to_tokens(tokens); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprType { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.expr.to_tokens(tokens); + self.colon_token.to_tokens(tokens); + self.ty.to_tokens(tokens); + } + } + + #[cfg(feature = "full")] + fn maybe_wrap_else(tokens: &mut TokenStream, else_: &Option<(Token![else], Box<Expr>)>) { + if let Some((else_token, else_)) = else_ { + else_token.to_tokens(tokens); + + // If we are not one of the valid expressions to exist in an else + // clause, wrap ourselves in a block. + match **else_ { + Expr::If(_) | Expr::Block(_) => { + else_.to_tokens(tokens); + } + _ => { + token::Brace::default().surround(tokens, |tokens| { + else_.to_tokens(tokens); + }); + } + } + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprLet { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.let_token.to_tokens(tokens); + self.pat.to_tokens(tokens); + self.eq_token.to_tokens(tokens); + wrap_bare_struct(tokens, &self.expr); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprIf { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.if_token.to_tokens(tokens); + wrap_bare_struct(tokens, &self.cond); + self.then_branch.to_tokens(tokens); + maybe_wrap_else(tokens, &self.else_branch); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprWhile { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.label.to_tokens(tokens); + self.while_token.to_tokens(tokens); + wrap_bare_struct(tokens, &self.cond); + self.body.brace_token.surround(tokens, |tokens| { + inner_attrs_to_tokens(&self.attrs, tokens); + tokens.append_all(&self.body.stmts); + }); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprForLoop { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.label.to_tokens(tokens); + self.for_token.to_tokens(tokens); + self.pat.to_tokens(tokens); + self.in_token.to_tokens(tokens); + wrap_bare_struct(tokens, &self.expr); + self.body.brace_token.surround(tokens, |tokens| { + inner_attrs_to_tokens(&self.attrs, tokens); + tokens.append_all(&self.body.stmts); + }); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprLoop { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.label.to_tokens(tokens); + self.loop_token.to_tokens(tokens); + self.body.brace_token.surround(tokens, |tokens| { + inner_attrs_to_tokens(&self.attrs, tokens); + tokens.append_all(&self.body.stmts); + }); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprMatch { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.match_token.to_tokens(tokens); + wrap_bare_struct(tokens, &self.expr); + self.brace_token.surround(tokens, |tokens| { + inner_attrs_to_tokens(&self.attrs, tokens); + for (i, arm) in self.arms.iter().enumerate() { + arm.to_tokens(tokens); + // Ensure that we have a comma after a non-block arm, except + // for the last one. + let is_last = i == self.arms.len() - 1; + if !is_last && requires_terminator(&arm.body) && arm.comma.is_none() { + <Token![,]>::default().to_tokens(tokens); + } + } + }); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprAsync { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.async_token.to_tokens(tokens); + self.capture.to_tokens(tokens); + self.block.to_tokens(tokens); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprAwait { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.base.to_tokens(tokens); + self.dot_token.to_tokens(tokens); + self.await_token.to_tokens(tokens); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprTryBlock { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.try_token.to_tokens(tokens); + self.block.to_tokens(tokens); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprYield { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.yield_token.to_tokens(tokens); + self.expr.to_tokens(tokens); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprClosure { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.movability.to_tokens(tokens); + self.asyncness.to_tokens(tokens); + self.capture.to_tokens(tokens); + self.or1_token.to_tokens(tokens); + self.inputs.to_tokens(tokens); + self.or2_token.to_tokens(tokens); + self.output.to_tokens(tokens); + self.body.to_tokens(tokens); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprUnsafe { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.unsafe_token.to_tokens(tokens); + self.block.brace_token.surround(tokens, |tokens| { + inner_attrs_to_tokens(&self.attrs, tokens); + tokens.append_all(&self.block.stmts); + }); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprBlock { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.label.to_tokens(tokens); + self.block.brace_token.surround(tokens, |tokens| { + inner_attrs_to_tokens(&self.attrs, tokens); + tokens.append_all(&self.block.stmts); + }); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprAssign { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.left.to_tokens(tokens); + self.eq_token.to_tokens(tokens); + self.right.to_tokens(tokens); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprAssignOp { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.left.to_tokens(tokens); + self.op.to_tokens(tokens); + self.right.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprField { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.base.to_tokens(tokens); + self.dot_token.to_tokens(tokens); + self.member.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for Member { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + Member::Named(ident) => ident.to_tokens(tokens), + Member::Unnamed(index) => index.to_tokens(tokens), + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for Index { + fn to_tokens(&self, tokens: &mut TokenStream) { + let mut lit = Literal::i64_unsuffixed(i64::from(self.index)); + lit.set_span(self.span); + tokens.append(lit); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprIndex { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.expr.to_tokens(tokens); + self.bracket_token.surround(tokens, |tokens| { + self.index.to_tokens(tokens); + }); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for RangeLimits { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + RangeLimits::HalfOpen(t) => t.to_tokens(tokens), + RangeLimits::Closed(t) => t.to_tokens(tokens), + } + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprRange { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.from.to_tokens(tokens); + self.limits.to_tokens(tokens); + self.to.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprPath { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + path::printing::print_path(tokens, &self.qself, &self.path); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprReference { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.and_token.to_tokens(tokens); + self.mutability.to_tokens(tokens); + self.expr.to_tokens(tokens); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprBreak { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.break_token.to_tokens(tokens); + self.label.to_tokens(tokens); + self.expr.to_tokens(tokens); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprContinue { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.continue_token.to_tokens(tokens); + self.label.to_tokens(tokens); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprReturn { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.return_token.to_tokens(tokens); + self.expr.to_tokens(tokens); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprMacro { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.mac.to_tokens(tokens); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprStruct { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.path.to_tokens(tokens); + self.brace_token.surround(tokens, |tokens| { + self.fields.to_tokens(tokens); + if let Some(dot2_token) = &self.dot2_token { + dot2_token.to_tokens(tokens); + } else if self.rest.is_some() { + Token![..](Span::call_site()).to_tokens(tokens); + } + self.rest.to_tokens(tokens); + }); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprRepeat { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.bracket_token.surround(tokens, |tokens| { + self.expr.to_tokens(tokens); + self.semi_token.to_tokens(tokens); + self.len.to_tokens(tokens); + }); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprGroup { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.group_token.surround(tokens, |tokens| { + self.expr.to_tokens(tokens); + }); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprParen { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.paren_token.surround(tokens, |tokens| { + self.expr.to_tokens(tokens); + }); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ExprTry { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.expr.to_tokens(tokens); + self.question_token.to_tokens(tokens); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for Label { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.name.to_tokens(tokens); + self.colon_token.to_tokens(tokens); + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for FieldValue { + fn to_tokens(&self, tokens: &mut TokenStream) { + outer_attrs_to_tokens(&self.attrs, tokens); + self.member.to_tokens(tokens); + if let Some(colon_token) = &self.colon_token { + colon_token.to_tokens(tokens); + self.expr.to_tokens(tokens); + } + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for Arm { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(&self.attrs); + self.pat.to_tokens(tokens); + if let Some((if_token, guard)) = &self.guard { + if_token.to_tokens(tokens); + guard.to_tokens(tokens); + } + self.fat_arrow_token.to_tokens(tokens); + self.body.to_tokens(tokens); + self.comma.to_tokens(tokens); + } + } +} diff --git a/third_party/rust/syn/src/ext.rs b/third_party/rust/syn/src/ext.rs new file mode 100644 index 0000000000..98d5550f48 --- /dev/null +++ b/third_party/rust/syn/src/ext.rs @@ -0,0 +1,139 @@ +//! Extension traits to provide parsing methods on foreign types. +//! +//! *This module is available only if Syn is built with the `"parsing"` feature.* + +use crate::buffer::Cursor; +use crate::parse::Peek; +use crate::parse::{ParseStream, Result}; +use crate::sealed::lookahead; +use crate::token::CustomToken; +use proc_macro2::Ident; + +/// Additional methods for `Ident` not provided by proc-macro2 or libproc_macro. +/// +/// This trait is sealed and cannot be implemented for types outside of Syn. It +/// is implemented only for `proc_macro2::Ident`. +/// +/// *This trait is available only if Syn is built with the `"parsing"` feature.* +pub trait IdentExt: Sized + private::Sealed { + /// Parses any identifier including keywords. + /// + /// This is useful when parsing macro input which allows Rust keywords as + /// identifiers. + /// + /// # Example + /// + /// ``` + /// use syn::{Error, Ident, Result, Token}; + /// use syn::ext::IdentExt; + /// use syn::parse::ParseStream; + /// + /// mod kw { + /// syn::custom_keyword!(name); + /// } + /// + /// // Parses input that looks like `name = NAME` where `NAME` can be + /// // any identifier. + /// // + /// // Examples: + /// // + /// // name = anything + /// // name = impl + /// fn parse_dsl(input: ParseStream) -> Result<Ident> { + /// input.parse::<kw::name>()?; + /// input.parse::<Token![=]>()?; + /// let name = input.call(Ident::parse_any)?; + /// Ok(name) + /// } + /// ``` + fn parse_any(input: ParseStream) -> Result<Self>; + + /// Peeks any identifier including keywords. Usage: + /// `input.peek(Ident::peek_any)` + /// + /// This is different from `input.peek(Ident)` which only returns true in + /// the case of an ident which is not a Rust keyword. + #[allow(non_upper_case_globals)] + const peek_any: private::PeekFn = private::PeekFn; + + /// Strips the raw marker `r#`, if any, from the beginning of an ident. + /// + /// - unraw(`x`) = `x` + /// - unraw(`move`) = `move` + /// - unraw(`r#move`) = `move` + /// + /// # Example + /// + /// In the case of interop with other languages like Python that have a + /// different set of keywords than Rust, we might come across macro input + /// that involves raw identifiers to refer to ordinary variables in the + /// other language with a name that happens to be a Rust keyword. + /// + /// The function below appends an identifier from the caller's input onto a + /// fixed prefix. Without using `unraw()`, this would tend to produce + /// invalid identifiers like `__pyo3_get_r#move`. + /// + /// ``` + /// use proc_macro2::Span; + /// use syn::Ident; + /// use syn::ext::IdentExt; + /// + /// fn ident_for_getter(variable: &Ident) -> Ident { + /// let getter = format!("__pyo3_get_{}", variable.unraw()); + /// Ident::new(&getter, Span::call_site()) + /// } + /// ``` + fn unraw(&self) -> Ident; +} + +impl IdentExt for Ident { + fn parse_any(input: ParseStream) -> Result<Self> { + input.step(|cursor| match cursor.ident() { + Some((ident, rest)) => Ok((ident, rest)), + None => Err(cursor.error("expected ident")), + }) + } + + fn unraw(&self) -> Ident { + let string = self.to_string(); + if string.starts_with("r#") { + Ident::new(&string[2..], self.span()) + } else { + self.clone() + } + } +} + +impl Peek for private::PeekFn { + type Token = private::IdentAny; +} + +impl CustomToken for private::IdentAny { + fn peek(cursor: Cursor) -> bool { + cursor.ident().is_some() + } + + fn display() -> &'static str { + "identifier" + } +} + +impl lookahead::Sealed for private::PeekFn {} + +mod private { + use proc_macro2::Ident; + + pub trait Sealed {} + + impl Sealed for Ident {} + + pub struct PeekFn; + pub struct IdentAny; + + impl Copy for PeekFn {} + impl Clone for PeekFn { + fn clone(&self) -> Self { + *self + } + } +} diff --git a/third_party/rust/syn/src/file.rs b/third_party/rust/syn/src/file.rs new file mode 100644 index 0000000000..280484f980 --- /dev/null +++ b/third_party/rust/syn/src/file.rs @@ -0,0 +1,125 @@ +use super::*; + +ast_struct! { + /// A complete file of Rust source code. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + /// + /// # Example + /// + /// Parse a Rust source file into a `syn::File` and print out a debug + /// representation of the syntax tree. + /// + /// ``` + /// use std::env; + /// use std::fs::File; + /// use std::io::Read; + /// use std::process; + /// + /// fn main() { + /// # } + /// # + /// # fn fake_main() { + /// let mut args = env::args(); + /// let _ = args.next(); // executable name + /// + /// let filename = match (args.next(), args.next()) { + /// (Some(filename), None) => filename, + /// _ => { + /// eprintln!("Usage: dump-syntax path/to/filename.rs"); + /// process::exit(1); + /// } + /// }; + /// + /// let mut file = File::open(&filename).expect("Unable to open file"); + /// + /// let mut src = String::new(); + /// file.read_to_string(&mut src).expect("Unable to read file"); + /// + /// let syntax = syn::parse_file(&src).expect("Unable to parse file"); + /// + /// // Debug impl is available if Syn is built with "extra-traits" feature. + /// println!("{:#?}", syntax); + /// } + /// ``` + /// + /// Running with its own source code as input, this program prints output + /// that begins with: + /// + /// ```text + /// File { + /// shebang: None, + /// attrs: [], + /// items: [ + /// Use( + /// ItemUse { + /// attrs: [], + /// vis: Inherited, + /// use_token: Use, + /// leading_colon: None, + /// tree: Path( + /// UsePath { + /// ident: Ident( + /// std, + /// ), + /// colon2_token: Colon2, + /// tree: Name( + /// UseName { + /// ident: Ident( + /// env, + /// ), + /// }, + /// ), + /// }, + /// ), + /// semi_token: Semi, + /// }, + /// ), + /// ... + /// ``` + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct File { + pub shebang: Option<String>, + pub attrs: Vec<Attribute>, + pub items: Vec<Item>, + } +} + +#[cfg(feature = "parsing")] +pub mod parsing { + use super::*; + use crate::parse::{Parse, ParseStream, Result}; + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for File { + fn parse(input: ParseStream) -> Result<Self> { + Ok(File { + shebang: None, + attrs: input.call(Attribute::parse_inner)?, + items: { + let mut items = Vec::new(); + while !input.is_empty() { + items.push(input.parse()?); + } + items + }, + }) + } + } +} + +#[cfg(feature = "printing")] +mod printing { + use super::*; + use crate::attr::FilterAttrs; + use proc_macro2::TokenStream; + use quote::{ToTokens, TokenStreamExt}; + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for File { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.inner()); + tokens.append_all(&self.items); + } + } +} diff --git a/third_party/rust/syn/src/gen/clone.rs b/third_party/rust/syn/src/gen/clone.rs new file mode 100644 index 0000000000..a413e3ec70 --- /dev/null +++ b/third_party/rust/syn/src/gen/clone.rs @@ -0,0 +1,2241 @@ +// This file is @generated by syn-internal-codegen. +// It is not intended for manual editing. + +#![allow(clippy::clone_on_copy, clippy::expl_impl_clone_on_copy)] +use crate::*; +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for Abi { + fn clone(&self) -> Self { + Abi { + extern_token: self.extern_token.clone(), + name: self.name.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for AngleBracketedGenericArguments { + fn clone(&self) -> Self { + AngleBracketedGenericArguments { + colon2_token: self.colon2_token.clone(), + lt_token: self.lt_token.clone(), + args: self.args.clone(), + gt_token: self.gt_token.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for Arm { + fn clone(&self) -> Self { + Arm { + attrs: self.attrs.clone(), + pat: self.pat.clone(), + guard: self.guard.clone(), + fat_arrow_token: self.fat_arrow_token.clone(), + body: self.body.clone(), + comma: self.comma.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Copy for AttrStyle {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for AttrStyle { + fn clone(&self) -> Self { + *self + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for Attribute { + fn clone(&self) -> Self { + Attribute { + pound_token: self.pound_token.clone(), + style: self.style.clone(), + bracket_token: self.bracket_token.clone(), + path: self.path.clone(), + tokens: self.tokens.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for BareFnArg { + fn clone(&self) -> Self { + BareFnArg { + attrs: self.attrs.clone(), + name: self.name.clone(), + ty: self.ty.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Copy for BinOp {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for BinOp { + fn clone(&self) -> Self { + *self + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for Binding { + fn clone(&self) -> Self { + Binding { + ident: self.ident.clone(), + eq_token: self.eq_token.clone(), + ty: self.ty.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for Block { + fn clone(&self) -> Self { + Block { + brace_token: self.brace_token.clone(), + stmts: self.stmts.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for BoundLifetimes { + fn clone(&self) -> Self { + BoundLifetimes { + for_token: self.for_token.clone(), + lt_token: self.lt_token.clone(), + lifetimes: self.lifetimes.clone(), + gt_token: self.gt_token.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ConstParam { + fn clone(&self) -> Self { + ConstParam { + attrs: self.attrs.clone(), + const_token: self.const_token.clone(), + ident: self.ident.clone(), + colon_token: self.colon_token.clone(), + ty: self.ty.clone(), + eq_token: self.eq_token.clone(), + default: self.default.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for Constraint { + fn clone(&self) -> Self { + Constraint { + ident: self.ident.clone(), + colon_token: self.colon_token.clone(), + bounds: self.bounds.clone(), + } + } +} +#[cfg(feature = "derive")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for Data { + fn clone(&self) -> Self { + match self { + Data::Struct(v0) => Data::Struct(v0.clone()), + Data::Enum(v0) => Data::Enum(v0.clone()), + Data::Union(v0) => Data::Union(v0.clone()), + } + } +} +#[cfg(feature = "derive")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for DataEnum { + fn clone(&self) -> Self { + DataEnum { + enum_token: self.enum_token.clone(), + brace_token: self.brace_token.clone(), + variants: self.variants.clone(), + } + } +} +#[cfg(feature = "derive")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for DataStruct { + fn clone(&self) -> Self { + DataStruct { + struct_token: self.struct_token.clone(), + fields: self.fields.clone(), + semi_token: self.semi_token.clone(), + } + } +} +#[cfg(feature = "derive")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for DataUnion { + fn clone(&self) -> Self { + DataUnion { + union_token: self.union_token.clone(), + fields: self.fields.clone(), + } + } +} +#[cfg(feature = "derive")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for DeriveInput { + fn clone(&self) -> Self { + DeriveInput { + attrs: self.attrs.clone(), + vis: self.vis.clone(), + ident: self.ident.clone(), + generics: self.generics.clone(), + data: self.data.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for Expr { + fn clone(&self) -> Self { + match self { + #[cfg(feature = "full")] + Expr::Array(v0) => Expr::Array(v0.clone()), + #[cfg(feature = "full")] + Expr::Assign(v0) => Expr::Assign(v0.clone()), + #[cfg(feature = "full")] + Expr::AssignOp(v0) => Expr::AssignOp(v0.clone()), + #[cfg(feature = "full")] + Expr::Async(v0) => Expr::Async(v0.clone()), + #[cfg(feature = "full")] + Expr::Await(v0) => Expr::Await(v0.clone()), + Expr::Binary(v0) => Expr::Binary(v0.clone()), + #[cfg(feature = "full")] + Expr::Block(v0) => Expr::Block(v0.clone()), + #[cfg(feature = "full")] + Expr::Box(v0) => Expr::Box(v0.clone()), + #[cfg(feature = "full")] + Expr::Break(v0) => Expr::Break(v0.clone()), + Expr::Call(v0) => Expr::Call(v0.clone()), + Expr::Cast(v0) => Expr::Cast(v0.clone()), + #[cfg(feature = "full")] + Expr::Closure(v0) => Expr::Closure(v0.clone()), + #[cfg(feature = "full")] + Expr::Continue(v0) => Expr::Continue(v0.clone()), + Expr::Field(v0) => Expr::Field(v0.clone()), + #[cfg(feature = "full")] + Expr::ForLoop(v0) => Expr::ForLoop(v0.clone()), + #[cfg(feature = "full")] + Expr::Group(v0) => Expr::Group(v0.clone()), + #[cfg(feature = "full")] + Expr::If(v0) => Expr::If(v0.clone()), + Expr::Index(v0) => Expr::Index(v0.clone()), + #[cfg(feature = "full")] + Expr::Let(v0) => Expr::Let(v0.clone()), + Expr::Lit(v0) => Expr::Lit(v0.clone()), + #[cfg(feature = "full")] + Expr::Loop(v0) => Expr::Loop(v0.clone()), + #[cfg(feature = "full")] + Expr::Macro(v0) => Expr::Macro(v0.clone()), + #[cfg(feature = "full")] + Expr::Match(v0) => Expr::Match(v0.clone()), + #[cfg(feature = "full")] + Expr::MethodCall(v0) => Expr::MethodCall(v0.clone()), + Expr::Paren(v0) => Expr::Paren(v0.clone()), + Expr::Path(v0) => Expr::Path(v0.clone()), + #[cfg(feature = "full")] + Expr::Range(v0) => Expr::Range(v0.clone()), + #[cfg(feature = "full")] + Expr::Reference(v0) => Expr::Reference(v0.clone()), + #[cfg(feature = "full")] + Expr::Repeat(v0) => Expr::Repeat(v0.clone()), + #[cfg(feature = "full")] + Expr::Return(v0) => Expr::Return(v0.clone()), + #[cfg(feature = "full")] + Expr::Struct(v0) => Expr::Struct(v0.clone()), + #[cfg(feature = "full")] + Expr::Try(v0) => Expr::Try(v0.clone()), + #[cfg(feature = "full")] + Expr::TryBlock(v0) => Expr::TryBlock(v0.clone()), + #[cfg(feature = "full")] + Expr::Tuple(v0) => Expr::Tuple(v0.clone()), + #[cfg(feature = "full")] + Expr::Type(v0) => Expr::Type(v0.clone()), + Expr::Unary(v0) => Expr::Unary(v0.clone()), + #[cfg(feature = "full")] + Expr::Unsafe(v0) => Expr::Unsafe(v0.clone()), + Expr::Verbatim(v0) => Expr::Verbatim(v0.clone()), + #[cfg(feature = "full")] + Expr::While(v0) => Expr::While(v0.clone()), + #[cfg(feature = "full")] + Expr::Yield(v0) => Expr::Yield(v0.clone()), + #[cfg(any(syn_no_non_exhaustive, not(feature = "full")))] + _ => unreachable!(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprArray { + fn clone(&self) -> Self { + ExprArray { + attrs: self.attrs.clone(), + bracket_token: self.bracket_token.clone(), + elems: self.elems.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprAssign { + fn clone(&self) -> Self { + ExprAssign { + attrs: self.attrs.clone(), + left: self.left.clone(), + eq_token: self.eq_token.clone(), + right: self.right.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprAssignOp { + fn clone(&self) -> Self { + ExprAssignOp { + attrs: self.attrs.clone(), + left: self.left.clone(), + op: self.op.clone(), + right: self.right.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprAsync { + fn clone(&self) -> Self { + ExprAsync { + attrs: self.attrs.clone(), + async_token: self.async_token.clone(), + capture: self.capture.clone(), + block: self.block.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprAwait { + fn clone(&self) -> Self { + ExprAwait { + attrs: self.attrs.clone(), + base: self.base.clone(), + dot_token: self.dot_token.clone(), + await_token: self.await_token.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprBinary { + fn clone(&self) -> Self { + ExprBinary { + attrs: self.attrs.clone(), + left: self.left.clone(), + op: self.op.clone(), + right: self.right.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprBlock { + fn clone(&self) -> Self { + ExprBlock { + attrs: self.attrs.clone(), + label: self.label.clone(), + block: self.block.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprBox { + fn clone(&self) -> Self { + ExprBox { + attrs: self.attrs.clone(), + box_token: self.box_token.clone(), + expr: self.expr.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprBreak { + fn clone(&self) -> Self { + ExprBreak { + attrs: self.attrs.clone(), + break_token: self.break_token.clone(), + label: self.label.clone(), + expr: self.expr.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprCall { + fn clone(&self) -> Self { + ExprCall { + attrs: self.attrs.clone(), + func: self.func.clone(), + paren_token: self.paren_token.clone(), + args: self.args.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprCast { + fn clone(&self) -> Self { + ExprCast { + attrs: self.attrs.clone(), + expr: self.expr.clone(), + as_token: self.as_token.clone(), + ty: self.ty.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprClosure { + fn clone(&self) -> Self { + ExprClosure { + attrs: self.attrs.clone(), + movability: self.movability.clone(), + asyncness: self.asyncness.clone(), + capture: self.capture.clone(), + or1_token: self.or1_token.clone(), + inputs: self.inputs.clone(), + or2_token: self.or2_token.clone(), + output: self.output.clone(), + body: self.body.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprContinue { + fn clone(&self) -> Self { + ExprContinue { + attrs: self.attrs.clone(), + continue_token: self.continue_token.clone(), + label: self.label.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprField { + fn clone(&self) -> Self { + ExprField { + attrs: self.attrs.clone(), + base: self.base.clone(), + dot_token: self.dot_token.clone(), + member: self.member.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprForLoop { + fn clone(&self) -> Self { + ExprForLoop { + attrs: self.attrs.clone(), + label: self.label.clone(), + for_token: self.for_token.clone(), + pat: self.pat.clone(), + in_token: self.in_token.clone(), + expr: self.expr.clone(), + body: self.body.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprGroup { + fn clone(&self) -> Self { + ExprGroup { + attrs: self.attrs.clone(), + group_token: self.group_token.clone(), + expr: self.expr.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprIf { + fn clone(&self) -> Self { + ExprIf { + attrs: self.attrs.clone(), + if_token: self.if_token.clone(), + cond: self.cond.clone(), + then_branch: self.then_branch.clone(), + else_branch: self.else_branch.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprIndex { + fn clone(&self) -> Self { + ExprIndex { + attrs: self.attrs.clone(), + expr: self.expr.clone(), + bracket_token: self.bracket_token.clone(), + index: self.index.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprLet { + fn clone(&self) -> Self { + ExprLet { + attrs: self.attrs.clone(), + let_token: self.let_token.clone(), + pat: self.pat.clone(), + eq_token: self.eq_token.clone(), + expr: self.expr.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprLit { + fn clone(&self) -> Self { + ExprLit { + attrs: self.attrs.clone(), + lit: self.lit.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprLoop { + fn clone(&self) -> Self { + ExprLoop { + attrs: self.attrs.clone(), + label: self.label.clone(), + loop_token: self.loop_token.clone(), + body: self.body.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprMacro { + fn clone(&self) -> Self { + ExprMacro { + attrs: self.attrs.clone(), + mac: self.mac.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprMatch { + fn clone(&self) -> Self { + ExprMatch { + attrs: self.attrs.clone(), + match_token: self.match_token.clone(), + expr: self.expr.clone(), + brace_token: self.brace_token.clone(), + arms: self.arms.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprMethodCall { + fn clone(&self) -> Self { + ExprMethodCall { + attrs: self.attrs.clone(), + receiver: self.receiver.clone(), + dot_token: self.dot_token.clone(), + method: self.method.clone(), + turbofish: self.turbofish.clone(), + paren_token: self.paren_token.clone(), + args: self.args.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprParen { + fn clone(&self) -> Self { + ExprParen { + attrs: self.attrs.clone(), + paren_token: self.paren_token.clone(), + expr: self.expr.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprPath { + fn clone(&self) -> Self { + ExprPath { + attrs: self.attrs.clone(), + qself: self.qself.clone(), + path: self.path.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprRange { + fn clone(&self) -> Self { + ExprRange { + attrs: self.attrs.clone(), + from: self.from.clone(), + limits: self.limits.clone(), + to: self.to.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprReference { + fn clone(&self) -> Self { + ExprReference { + attrs: self.attrs.clone(), + and_token: self.and_token.clone(), + raw: self.raw.clone(), + mutability: self.mutability.clone(), + expr: self.expr.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprRepeat { + fn clone(&self) -> Self { + ExprRepeat { + attrs: self.attrs.clone(), + bracket_token: self.bracket_token.clone(), + expr: self.expr.clone(), + semi_token: self.semi_token.clone(), + len: self.len.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprReturn { + fn clone(&self) -> Self { + ExprReturn { + attrs: self.attrs.clone(), + return_token: self.return_token.clone(), + expr: self.expr.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprStruct { + fn clone(&self) -> Self { + ExprStruct { + attrs: self.attrs.clone(), + path: self.path.clone(), + brace_token: self.brace_token.clone(), + fields: self.fields.clone(), + dot2_token: self.dot2_token.clone(), + rest: self.rest.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprTry { + fn clone(&self) -> Self { + ExprTry { + attrs: self.attrs.clone(), + expr: self.expr.clone(), + question_token: self.question_token.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprTryBlock { + fn clone(&self) -> Self { + ExprTryBlock { + attrs: self.attrs.clone(), + try_token: self.try_token.clone(), + block: self.block.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprTuple { + fn clone(&self) -> Self { + ExprTuple { + attrs: self.attrs.clone(), + paren_token: self.paren_token.clone(), + elems: self.elems.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprType { + fn clone(&self) -> Self { + ExprType { + attrs: self.attrs.clone(), + expr: self.expr.clone(), + colon_token: self.colon_token.clone(), + ty: self.ty.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprUnary { + fn clone(&self) -> Self { + ExprUnary { + attrs: self.attrs.clone(), + op: self.op.clone(), + expr: self.expr.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprUnsafe { + fn clone(&self) -> Self { + ExprUnsafe { + attrs: self.attrs.clone(), + unsafe_token: self.unsafe_token.clone(), + block: self.block.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprWhile { + fn clone(&self) -> Self { + ExprWhile { + attrs: self.attrs.clone(), + label: self.label.clone(), + while_token: self.while_token.clone(), + cond: self.cond.clone(), + body: self.body.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ExprYield { + fn clone(&self) -> Self { + ExprYield { + attrs: self.attrs.clone(), + yield_token: self.yield_token.clone(), + expr: self.expr.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for Field { + fn clone(&self) -> Self { + Field { + attrs: self.attrs.clone(), + vis: self.vis.clone(), + ident: self.ident.clone(), + colon_token: self.colon_token.clone(), + ty: self.ty.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for FieldPat { + fn clone(&self) -> Self { + FieldPat { + attrs: self.attrs.clone(), + member: self.member.clone(), + colon_token: self.colon_token.clone(), + pat: self.pat.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for FieldValue { + fn clone(&self) -> Self { + FieldValue { + attrs: self.attrs.clone(), + member: self.member.clone(), + colon_token: self.colon_token.clone(), + expr: self.expr.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for Fields { + fn clone(&self) -> Self { + match self { + Fields::Named(v0) => Fields::Named(v0.clone()), + Fields::Unnamed(v0) => Fields::Unnamed(v0.clone()), + Fields::Unit => Fields::Unit, + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for FieldsNamed { + fn clone(&self) -> Self { + FieldsNamed { + brace_token: self.brace_token.clone(), + named: self.named.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for FieldsUnnamed { + fn clone(&self) -> Self { + FieldsUnnamed { + paren_token: self.paren_token.clone(), + unnamed: self.unnamed.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for File { + fn clone(&self) -> Self { + File { + shebang: self.shebang.clone(), + attrs: self.attrs.clone(), + items: self.items.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for FnArg { + fn clone(&self) -> Self { + match self { + FnArg::Receiver(v0) => FnArg::Receiver(v0.clone()), + FnArg::Typed(v0) => FnArg::Typed(v0.clone()), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ForeignItem { + fn clone(&self) -> Self { + match self { + ForeignItem::Fn(v0) => ForeignItem::Fn(v0.clone()), + ForeignItem::Static(v0) => ForeignItem::Static(v0.clone()), + ForeignItem::Type(v0) => ForeignItem::Type(v0.clone()), + ForeignItem::Macro(v0) => ForeignItem::Macro(v0.clone()), + ForeignItem::Verbatim(v0) => ForeignItem::Verbatim(v0.clone()), + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ForeignItemFn { + fn clone(&self) -> Self { + ForeignItemFn { + attrs: self.attrs.clone(), + vis: self.vis.clone(), + sig: self.sig.clone(), + semi_token: self.semi_token.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ForeignItemMacro { + fn clone(&self) -> Self { + ForeignItemMacro { + attrs: self.attrs.clone(), + mac: self.mac.clone(), + semi_token: self.semi_token.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ForeignItemStatic { + fn clone(&self) -> Self { + ForeignItemStatic { + attrs: self.attrs.clone(), + vis: self.vis.clone(), + static_token: self.static_token.clone(), + mutability: self.mutability.clone(), + ident: self.ident.clone(), + colon_token: self.colon_token.clone(), + ty: self.ty.clone(), + semi_token: self.semi_token.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ForeignItemType { + fn clone(&self) -> Self { + ForeignItemType { + attrs: self.attrs.clone(), + vis: self.vis.clone(), + type_token: self.type_token.clone(), + ident: self.ident.clone(), + semi_token: self.semi_token.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for GenericArgument { + fn clone(&self) -> Self { + match self { + GenericArgument::Lifetime(v0) => GenericArgument::Lifetime(v0.clone()), + GenericArgument::Type(v0) => GenericArgument::Type(v0.clone()), + GenericArgument::Const(v0) => GenericArgument::Const(v0.clone()), + GenericArgument::Binding(v0) => GenericArgument::Binding(v0.clone()), + GenericArgument::Constraint(v0) => GenericArgument::Constraint(v0.clone()), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for GenericMethodArgument { + fn clone(&self) -> Self { + match self { + GenericMethodArgument::Type(v0) => GenericMethodArgument::Type(v0.clone()), + GenericMethodArgument::Const(v0) => GenericMethodArgument::Const(v0.clone()), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for GenericParam { + fn clone(&self) -> Self { + match self { + GenericParam::Type(v0) => GenericParam::Type(v0.clone()), + GenericParam::Lifetime(v0) => GenericParam::Lifetime(v0.clone()), + GenericParam::Const(v0) => GenericParam::Const(v0.clone()), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for Generics { + fn clone(&self) -> Self { + Generics { + lt_token: self.lt_token.clone(), + params: self.params.clone(), + gt_token: self.gt_token.clone(), + where_clause: self.where_clause.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ImplItem { + fn clone(&self) -> Self { + match self { + ImplItem::Const(v0) => ImplItem::Const(v0.clone()), + ImplItem::Method(v0) => ImplItem::Method(v0.clone()), + ImplItem::Type(v0) => ImplItem::Type(v0.clone()), + ImplItem::Macro(v0) => ImplItem::Macro(v0.clone()), + ImplItem::Verbatim(v0) => ImplItem::Verbatim(v0.clone()), + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ImplItemConst { + fn clone(&self) -> Self { + ImplItemConst { + attrs: self.attrs.clone(), + vis: self.vis.clone(), + defaultness: self.defaultness.clone(), + const_token: self.const_token.clone(), + ident: self.ident.clone(), + colon_token: self.colon_token.clone(), + ty: self.ty.clone(), + eq_token: self.eq_token.clone(), + expr: self.expr.clone(), + semi_token: self.semi_token.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ImplItemMacro { + fn clone(&self) -> Self { + ImplItemMacro { + attrs: self.attrs.clone(), + mac: self.mac.clone(), + semi_token: self.semi_token.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ImplItemMethod { + fn clone(&self) -> Self { + ImplItemMethod { + attrs: self.attrs.clone(), + vis: self.vis.clone(), + defaultness: self.defaultness.clone(), + sig: self.sig.clone(), + block: self.block.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ImplItemType { + fn clone(&self) -> Self { + ImplItemType { + attrs: self.attrs.clone(), + vis: self.vis.clone(), + defaultness: self.defaultness.clone(), + type_token: self.type_token.clone(), + ident: self.ident.clone(), + generics: self.generics.clone(), + eq_token: self.eq_token.clone(), + ty: self.ty.clone(), + semi_token: self.semi_token.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for Index { + fn clone(&self) -> Self { + Index { + index: self.index.clone(), + span: self.span.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for Item { + fn clone(&self) -> Self { + match self { + Item::Const(v0) => Item::Const(v0.clone()), + Item::Enum(v0) => Item::Enum(v0.clone()), + Item::ExternCrate(v0) => Item::ExternCrate(v0.clone()), + Item::Fn(v0) => Item::Fn(v0.clone()), + Item::ForeignMod(v0) => Item::ForeignMod(v0.clone()), + Item::Impl(v0) => Item::Impl(v0.clone()), + Item::Macro(v0) => Item::Macro(v0.clone()), + Item::Macro2(v0) => Item::Macro2(v0.clone()), + Item::Mod(v0) => Item::Mod(v0.clone()), + Item::Static(v0) => Item::Static(v0.clone()), + Item::Struct(v0) => Item::Struct(v0.clone()), + Item::Trait(v0) => Item::Trait(v0.clone()), + Item::TraitAlias(v0) => Item::TraitAlias(v0.clone()), + Item::Type(v0) => Item::Type(v0.clone()), + Item::Union(v0) => Item::Union(v0.clone()), + Item::Use(v0) => Item::Use(v0.clone()), + Item::Verbatim(v0) => Item::Verbatim(v0.clone()), + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ItemConst { + fn clone(&self) -> Self { + ItemConst { + attrs: self.attrs.clone(), + vis: self.vis.clone(), + const_token: self.const_token.clone(), + ident: self.ident.clone(), + colon_token: self.colon_token.clone(), + ty: self.ty.clone(), + eq_token: self.eq_token.clone(), + expr: self.expr.clone(), + semi_token: self.semi_token.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ItemEnum { + fn clone(&self) -> Self { + ItemEnum { + attrs: self.attrs.clone(), + vis: self.vis.clone(), + enum_token: self.enum_token.clone(), + ident: self.ident.clone(), + generics: self.generics.clone(), + brace_token: self.brace_token.clone(), + variants: self.variants.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ItemExternCrate { + fn clone(&self) -> Self { + ItemExternCrate { + attrs: self.attrs.clone(), + vis: self.vis.clone(), + extern_token: self.extern_token.clone(), + crate_token: self.crate_token.clone(), + ident: self.ident.clone(), + rename: self.rename.clone(), + semi_token: self.semi_token.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ItemFn { + fn clone(&self) -> Self { + ItemFn { + attrs: self.attrs.clone(), + vis: self.vis.clone(), + sig: self.sig.clone(), + block: self.block.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ItemForeignMod { + fn clone(&self) -> Self { + ItemForeignMod { + attrs: self.attrs.clone(), + abi: self.abi.clone(), + brace_token: self.brace_token.clone(), + items: self.items.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ItemImpl { + fn clone(&self) -> Self { + ItemImpl { + attrs: self.attrs.clone(), + defaultness: self.defaultness.clone(), + unsafety: self.unsafety.clone(), + impl_token: self.impl_token.clone(), + generics: self.generics.clone(), + trait_: self.trait_.clone(), + self_ty: self.self_ty.clone(), + brace_token: self.brace_token.clone(), + items: self.items.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ItemMacro { + fn clone(&self) -> Self { + ItemMacro { + attrs: self.attrs.clone(), + ident: self.ident.clone(), + mac: self.mac.clone(), + semi_token: self.semi_token.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ItemMacro2 { + fn clone(&self) -> Self { + ItemMacro2 { + attrs: self.attrs.clone(), + vis: self.vis.clone(), + macro_token: self.macro_token.clone(), + ident: self.ident.clone(), + rules: self.rules.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ItemMod { + fn clone(&self) -> Self { + ItemMod { + attrs: self.attrs.clone(), + vis: self.vis.clone(), + mod_token: self.mod_token.clone(), + ident: self.ident.clone(), + content: self.content.clone(), + semi: self.semi.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ItemStatic { + fn clone(&self) -> Self { + ItemStatic { + attrs: self.attrs.clone(), + vis: self.vis.clone(), + static_token: self.static_token.clone(), + mutability: self.mutability.clone(), + ident: self.ident.clone(), + colon_token: self.colon_token.clone(), + ty: self.ty.clone(), + eq_token: self.eq_token.clone(), + expr: self.expr.clone(), + semi_token: self.semi_token.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ItemStruct { + fn clone(&self) -> Self { + ItemStruct { + attrs: self.attrs.clone(), + vis: self.vis.clone(), + struct_token: self.struct_token.clone(), + ident: self.ident.clone(), + generics: self.generics.clone(), + fields: self.fields.clone(), + semi_token: self.semi_token.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ItemTrait { + fn clone(&self) -> Self { + ItemTrait { + attrs: self.attrs.clone(), + vis: self.vis.clone(), + unsafety: self.unsafety.clone(), + auto_token: self.auto_token.clone(), + trait_token: self.trait_token.clone(), + ident: self.ident.clone(), + generics: self.generics.clone(), + colon_token: self.colon_token.clone(), + supertraits: self.supertraits.clone(), + brace_token: self.brace_token.clone(), + items: self.items.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ItemTraitAlias { + fn clone(&self) -> Self { + ItemTraitAlias { + attrs: self.attrs.clone(), + vis: self.vis.clone(), + trait_token: self.trait_token.clone(), + ident: self.ident.clone(), + generics: self.generics.clone(), + eq_token: self.eq_token.clone(), + bounds: self.bounds.clone(), + semi_token: self.semi_token.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ItemType { + fn clone(&self) -> Self { + ItemType { + attrs: self.attrs.clone(), + vis: self.vis.clone(), + type_token: self.type_token.clone(), + ident: self.ident.clone(), + generics: self.generics.clone(), + eq_token: self.eq_token.clone(), + ty: self.ty.clone(), + semi_token: self.semi_token.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ItemUnion { + fn clone(&self) -> Self { + ItemUnion { + attrs: self.attrs.clone(), + vis: self.vis.clone(), + union_token: self.union_token.clone(), + ident: self.ident.clone(), + generics: self.generics.clone(), + fields: self.fields.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ItemUse { + fn clone(&self) -> Self { + ItemUse { + attrs: self.attrs.clone(), + vis: self.vis.clone(), + use_token: self.use_token.clone(), + leading_colon: self.leading_colon.clone(), + tree: self.tree.clone(), + semi_token: self.semi_token.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for Label { + fn clone(&self) -> Self { + Label { + name: self.name.clone(), + colon_token: self.colon_token.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for LifetimeDef { + fn clone(&self) -> Self { + LifetimeDef { + attrs: self.attrs.clone(), + lifetime: self.lifetime.clone(), + colon_token: self.colon_token.clone(), + bounds: self.bounds.clone(), + } + } +} +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for Lit { + fn clone(&self) -> Self { + match self { + Lit::Str(v0) => Lit::Str(v0.clone()), + Lit::ByteStr(v0) => Lit::ByteStr(v0.clone()), + Lit::Byte(v0) => Lit::Byte(v0.clone()), + Lit::Char(v0) => Lit::Char(v0.clone()), + Lit::Int(v0) => Lit::Int(v0.clone()), + Lit::Float(v0) => Lit::Float(v0.clone()), + Lit::Bool(v0) => Lit::Bool(v0.clone()), + Lit::Verbatim(v0) => Lit::Verbatim(v0.clone()), + } + } +} +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for LitBool { + fn clone(&self) -> Self { + LitBool { + value: self.value.clone(), + span: self.span.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for Local { + fn clone(&self) -> Self { + Local { + attrs: self.attrs.clone(), + let_token: self.let_token.clone(), + pat: self.pat.clone(), + init: self.init.clone(), + semi_token: self.semi_token.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for Macro { + fn clone(&self) -> Self { + Macro { + path: self.path.clone(), + bang_token: self.bang_token.clone(), + delimiter: self.delimiter.clone(), + tokens: self.tokens.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for MacroDelimiter { + fn clone(&self) -> Self { + match self { + MacroDelimiter::Paren(v0) => MacroDelimiter::Paren(v0.clone()), + MacroDelimiter::Brace(v0) => MacroDelimiter::Brace(v0.clone()), + MacroDelimiter::Bracket(v0) => MacroDelimiter::Bracket(v0.clone()), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for Member { + fn clone(&self) -> Self { + match self { + Member::Named(v0) => Member::Named(v0.clone()), + Member::Unnamed(v0) => Member::Unnamed(v0.clone()), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for Meta { + fn clone(&self) -> Self { + match self { + Meta::Path(v0) => Meta::Path(v0.clone()), + Meta::List(v0) => Meta::List(v0.clone()), + Meta::NameValue(v0) => Meta::NameValue(v0.clone()), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for MetaList { + fn clone(&self) -> Self { + MetaList { + path: self.path.clone(), + paren_token: self.paren_token.clone(), + nested: self.nested.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for MetaNameValue { + fn clone(&self) -> Self { + MetaNameValue { + path: self.path.clone(), + eq_token: self.eq_token.clone(), + lit: self.lit.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for MethodTurbofish { + fn clone(&self) -> Self { + MethodTurbofish { + colon2_token: self.colon2_token.clone(), + lt_token: self.lt_token.clone(), + args: self.args.clone(), + gt_token: self.gt_token.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for NestedMeta { + fn clone(&self) -> Self { + match self { + NestedMeta::Meta(v0) => NestedMeta::Meta(v0.clone()), + NestedMeta::Lit(v0) => NestedMeta::Lit(v0.clone()), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ParenthesizedGenericArguments { + fn clone(&self) -> Self { + ParenthesizedGenericArguments { + paren_token: self.paren_token.clone(), + inputs: self.inputs.clone(), + output: self.output.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for Pat { + fn clone(&self) -> Self { + match self { + Pat::Box(v0) => Pat::Box(v0.clone()), + Pat::Ident(v0) => Pat::Ident(v0.clone()), + Pat::Lit(v0) => Pat::Lit(v0.clone()), + Pat::Macro(v0) => Pat::Macro(v0.clone()), + Pat::Or(v0) => Pat::Or(v0.clone()), + Pat::Path(v0) => Pat::Path(v0.clone()), + Pat::Range(v0) => Pat::Range(v0.clone()), + Pat::Reference(v0) => Pat::Reference(v0.clone()), + Pat::Rest(v0) => Pat::Rest(v0.clone()), + Pat::Slice(v0) => Pat::Slice(v0.clone()), + Pat::Struct(v0) => Pat::Struct(v0.clone()), + Pat::Tuple(v0) => Pat::Tuple(v0.clone()), + Pat::TupleStruct(v0) => Pat::TupleStruct(v0.clone()), + Pat::Type(v0) => Pat::Type(v0.clone()), + Pat::Verbatim(v0) => Pat::Verbatim(v0.clone()), + Pat::Wild(v0) => Pat::Wild(v0.clone()), + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for PatBox { + fn clone(&self) -> Self { + PatBox { + attrs: self.attrs.clone(), + box_token: self.box_token.clone(), + pat: self.pat.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for PatIdent { + fn clone(&self) -> Self { + PatIdent { + attrs: self.attrs.clone(), + by_ref: self.by_ref.clone(), + mutability: self.mutability.clone(), + ident: self.ident.clone(), + subpat: self.subpat.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for PatLit { + fn clone(&self) -> Self { + PatLit { + attrs: self.attrs.clone(), + expr: self.expr.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for PatMacro { + fn clone(&self) -> Self { + PatMacro { + attrs: self.attrs.clone(), + mac: self.mac.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for PatOr { + fn clone(&self) -> Self { + PatOr { + attrs: self.attrs.clone(), + leading_vert: self.leading_vert.clone(), + cases: self.cases.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for PatPath { + fn clone(&self) -> Self { + PatPath { + attrs: self.attrs.clone(), + qself: self.qself.clone(), + path: self.path.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for PatRange { + fn clone(&self) -> Self { + PatRange { + attrs: self.attrs.clone(), + lo: self.lo.clone(), + limits: self.limits.clone(), + hi: self.hi.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for PatReference { + fn clone(&self) -> Self { + PatReference { + attrs: self.attrs.clone(), + and_token: self.and_token.clone(), + mutability: self.mutability.clone(), + pat: self.pat.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for PatRest { + fn clone(&self) -> Self { + PatRest { + attrs: self.attrs.clone(), + dot2_token: self.dot2_token.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for PatSlice { + fn clone(&self) -> Self { + PatSlice { + attrs: self.attrs.clone(), + bracket_token: self.bracket_token.clone(), + elems: self.elems.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for PatStruct { + fn clone(&self) -> Self { + PatStruct { + attrs: self.attrs.clone(), + path: self.path.clone(), + brace_token: self.brace_token.clone(), + fields: self.fields.clone(), + dot2_token: self.dot2_token.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for PatTuple { + fn clone(&self) -> Self { + PatTuple { + attrs: self.attrs.clone(), + paren_token: self.paren_token.clone(), + elems: self.elems.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for PatTupleStruct { + fn clone(&self) -> Self { + PatTupleStruct { + attrs: self.attrs.clone(), + path: self.path.clone(), + pat: self.pat.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for PatType { + fn clone(&self) -> Self { + PatType { + attrs: self.attrs.clone(), + pat: self.pat.clone(), + colon_token: self.colon_token.clone(), + ty: self.ty.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for PatWild { + fn clone(&self) -> Self { + PatWild { + attrs: self.attrs.clone(), + underscore_token: self.underscore_token.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for Path { + fn clone(&self) -> Self { + Path { + leading_colon: self.leading_colon.clone(), + segments: self.segments.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for PathArguments { + fn clone(&self) -> Self { + match self { + PathArguments::None => PathArguments::None, + PathArguments::AngleBracketed(v0) => { + PathArguments::AngleBracketed(v0.clone()) + } + PathArguments::Parenthesized(v0) => PathArguments::Parenthesized(v0.clone()), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for PathSegment { + fn clone(&self) -> Self { + PathSegment { + ident: self.ident.clone(), + arguments: self.arguments.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for PredicateEq { + fn clone(&self) -> Self { + PredicateEq { + lhs_ty: self.lhs_ty.clone(), + eq_token: self.eq_token.clone(), + rhs_ty: self.rhs_ty.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for PredicateLifetime { + fn clone(&self) -> Self { + PredicateLifetime { + lifetime: self.lifetime.clone(), + colon_token: self.colon_token.clone(), + bounds: self.bounds.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for PredicateType { + fn clone(&self) -> Self { + PredicateType { + lifetimes: self.lifetimes.clone(), + bounded_ty: self.bounded_ty.clone(), + colon_token: self.colon_token.clone(), + bounds: self.bounds.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for QSelf { + fn clone(&self) -> Self { + QSelf { + lt_token: self.lt_token.clone(), + ty: self.ty.clone(), + position: self.position.clone(), + as_token: self.as_token.clone(), + gt_token: self.gt_token.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Copy for RangeLimits {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for RangeLimits { + fn clone(&self) -> Self { + *self + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for Receiver { + fn clone(&self) -> Self { + Receiver { + attrs: self.attrs.clone(), + reference: self.reference.clone(), + mutability: self.mutability.clone(), + self_token: self.self_token.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for ReturnType { + fn clone(&self) -> Self { + match self { + ReturnType::Default => ReturnType::Default, + ReturnType::Type(v0, v1) => ReturnType::Type(v0.clone(), v1.clone()), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for Signature { + fn clone(&self) -> Self { + Signature { + constness: self.constness.clone(), + asyncness: self.asyncness.clone(), + unsafety: self.unsafety.clone(), + abi: self.abi.clone(), + fn_token: self.fn_token.clone(), + ident: self.ident.clone(), + generics: self.generics.clone(), + paren_token: self.paren_token.clone(), + inputs: self.inputs.clone(), + variadic: self.variadic.clone(), + output: self.output.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for Stmt { + fn clone(&self) -> Self { + match self { + Stmt::Local(v0) => Stmt::Local(v0.clone()), + Stmt::Item(v0) => Stmt::Item(v0.clone()), + Stmt::Expr(v0) => Stmt::Expr(v0.clone()), + Stmt::Semi(v0, v1) => Stmt::Semi(v0.clone(), v1.clone()), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for TraitBound { + fn clone(&self) -> Self { + TraitBound { + paren_token: self.paren_token.clone(), + modifier: self.modifier.clone(), + lifetimes: self.lifetimes.clone(), + path: self.path.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Copy for TraitBoundModifier {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for TraitBoundModifier { + fn clone(&self) -> Self { + *self + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for TraitItem { + fn clone(&self) -> Self { + match self { + TraitItem::Const(v0) => TraitItem::Const(v0.clone()), + TraitItem::Method(v0) => TraitItem::Method(v0.clone()), + TraitItem::Type(v0) => TraitItem::Type(v0.clone()), + TraitItem::Macro(v0) => TraitItem::Macro(v0.clone()), + TraitItem::Verbatim(v0) => TraitItem::Verbatim(v0.clone()), + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for TraitItemConst { + fn clone(&self) -> Self { + TraitItemConst { + attrs: self.attrs.clone(), + const_token: self.const_token.clone(), + ident: self.ident.clone(), + colon_token: self.colon_token.clone(), + ty: self.ty.clone(), + default: self.default.clone(), + semi_token: self.semi_token.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for TraitItemMacro { + fn clone(&self) -> Self { + TraitItemMacro { + attrs: self.attrs.clone(), + mac: self.mac.clone(), + semi_token: self.semi_token.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for TraitItemMethod { + fn clone(&self) -> Self { + TraitItemMethod { + attrs: self.attrs.clone(), + sig: self.sig.clone(), + default: self.default.clone(), + semi_token: self.semi_token.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for TraitItemType { + fn clone(&self) -> Self { + TraitItemType { + attrs: self.attrs.clone(), + type_token: self.type_token.clone(), + ident: self.ident.clone(), + generics: self.generics.clone(), + colon_token: self.colon_token.clone(), + bounds: self.bounds.clone(), + default: self.default.clone(), + semi_token: self.semi_token.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for Type { + fn clone(&self) -> Self { + match self { + Type::Array(v0) => Type::Array(v0.clone()), + Type::BareFn(v0) => Type::BareFn(v0.clone()), + Type::Group(v0) => Type::Group(v0.clone()), + Type::ImplTrait(v0) => Type::ImplTrait(v0.clone()), + Type::Infer(v0) => Type::Infer(v0.clone()), + Type::Macro(v0) => Type::Macro(v0.clone()), + Type::Never(v0) => Type::Never(v0.clone()), + Type::Paren(v0) => Type::Paren(v0.clone()), + Type::Path(v0) => Type::Path(v0.clone()), + Type::Ptr(v0) => Type::Ptr(v0.clone()), + Type::Reference(v0) => Type::Reference(v0.clone()), + Type::Slice(v0) => Type::Slice(v0.clone()), + Type::TraitObject(v0) => Type::TraitObject(v0.clone()), + Type::Tuple(v0) => Type::Tuple(v0.clone()), + Type::Verbatim(v0) => Type::Verbatim(v0.clone()), + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for TypeArray { + fn clone(&self) -> Self { + TypeArray { + bracket_token: self.bracket_token.clone(), + elem: self.elem.clone(), + semi_token: self.semi_token.clone(), + len: self.len.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for TypeBareFn { + fn clone(&self) -> Self { + TypeBareFn { + lifetimes: self.lifetimes.clone(), + unsafety: self.unsafety.clone(), + abi: self.abi.clone(), + fn_token: self.fn_token.clone(), + paren_token: self.paren_token.clone(), + inputs: self.inputs.clone(), + variadic: self.variadic.clone(), + output: self.output.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for TypeGroup { + fn clone(&self) -> Self { + TypeGroup { + group_token: self.group_token.clone(), + elem: self.elem.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for TypeImplTrait { + fn clone(&self) -> Self { + TypeImplTrait { + impl_token: self.impl_token.clone(), + bounds: self.bounds.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for TypeInfer { + fn clone(&self) -> Self { + TypeInfer { + underscore_token: self.underscore_token.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for TypeMacro { + fn clone(&self) -> Self { + TypeMacro { mac: self.mac.clone() } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for TypeNever { + fn clone(&self) -> Self { + TypeNever { + bang_token: self.bang_token.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for TypeParam { + fn clone(&self) -> Self { + TypeParam { + attrs: self.attrs.clone(), + ident: self.ident.clone(), + colon_token: self.colon_token.clone(), + bounds: self.bounds.clone(), + eq_token: self.eq_token.clone(), + default: self.default.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for TypeParamBound { + fn clone(&self) -> Self { + match self { + TypeParamBound::Trait(v0) => TypeParamBound::Trait(v0.clone()), + TypeParamBound::Lifetime(v0) => TypeParamBound::Lifetime(v0.clone()), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for TypeParen { + fn clone(&self) -> Self { + TypeParen { + paren_token: self.paren_token.clone(), + elem: self.elem.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for TypePath { + fn clone(&self) -> Self { + TypePath { + qself: self.qself.clone(), + path: self.path.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for TypePtr { + fn clone(&self) -> Self { + TypePtr { + star_token: self.star_token.clone(), + const_token: self.const_token.clone(), + mutability: self.mutability.clone(), + elem: self.elem.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for TypeReference { + fn clone(&self) -> Self { + TypeReference { + and_token: self.and_token.clone(), + lifetime: self.lifetime.clone(), + mutability: self.mutability.clone(), + elem: self.elem.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for TypeSlice { + fn clone(&self) -> Self { + TypeSlice { + bracket_token: self.bracket_token.clone(), + elem: self.elem.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for TypeTraitObject { + fn clone(&self) -> Self { + TypeTraitObject { + dyn_token: self.dyn_token.clone(), + bounds: self.bounds.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for TypeTuple { + fn clone(&self) -> Self { + TypeTuple { + paren_token: self.paren_token.clone(), + elems: self.elems.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Copy for UnOp {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for UnOp { + fn clone(&self) -> Self { + *self + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for UseGlob { + fn clone(&self) -> Self { + UseGlob { + star_token: self.star_token.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for UseGroup { + fn clone(&self) -> Self { + UseGroup { + brace_token: self.brace_token.clone(), + items: self.items.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for UseName { + fn clone(&self) -> Self { + UseName { + ident: self.ident.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for UsePath { + fn clone(&self) -> Self { + UsePath { + ident: self.ident.clone(), + colon2_token: self.colon2_token.clone(), + tree: self.tree.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for UseRename { + fn clone(&self) -> Self { + UseRename { + ident: self.ident.clone(), + as_token: self.as_token.clone(), + rename: self.rename.clone(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for UseTree { + fn clone(&self) -> Self { + match self { + UseTree::Path(v0) => UseTree::Path(v0.clone()), + UseTree::Name(v0) => UseTree::Name(v0.clone()), + UseTree::Rename(v0) => UseTree::Rename(v0.clone()), + UseTree::Glob(v0) => UseTree::Glob(v0.clone()), + UseTree::Group(v0) => UseTree::Group(v0.clone()), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for Variadic { + fn clone(&self) -> Self { + Variadic { + attrs: self.attrs.clone(), + dots: self.dots.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for Variant { + fn clone(&self) -> Self { + Variant { + attrs: self.attrs.clone(), + ident: self.ident.clone(), + fields: self.fields.clone(), + discriminant: self.discriminant.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for VisCrate { + fn clone(&self) -> Self { + VisCrate { + crate_token: self.crate_token.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for VisPublic { + fn clone(&self) -> Self { + VisPublic { + pub_token: self.pub_token.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for VisRestricted { + fn clone(&self) -> Self { + VisRestricted { + pub_token: self.pub_token.clone(), + paren_token: self.paren_token.clone(), + in_token: self.in_token.clone(), + path: self.path.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for Visibility { + fn clone(&self) -> Self { + match self { + Visibility::Public(v0) => Visibility::Public(v0.clone()), + Visibility::Crate(v0) => Visibility::Crate(v0.clone()), + Visibility::Restricted(v0) => Visibility::Restricted(v0.clone()), + Visibility::Inherited => Visibility::Inherited, + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for WhereClause { + fn clone(&self) -> Self { + WhereClause { + where_token: self.where_token.clone(), + predicates: self.predicates.clone(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for WherePredicate { + fn clone(&self) -> Self { + match self { + WherePredicate::Type(v0) => WherePredicate::Type(v0.clone()), + WherePredicate::Lifetime(v0) => WherePredicate::Lifetime(v0.clone()), + WherePredicate::Eq(v0) => WherePredicate::Eq(v0.clone()), + } + } +} diff --git a/third_party/rust/syn/src/gen/debug.rs b/third_party/rust/syn/src/gen/debug.rs new file mode 100644 index 0000000000..a1f0afa790 --- /dev/null +++ b/third_party/rust/syn/src/gen/debug.rs @@ -0,0 +1,3042 @@ +// This file is @generated by syn-internal-codegen. +// It is not intended for manual editing. + +use crate::*; +use std::fmt::{self, Debug}; +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for Abi { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("Abi"); + formatter.field("extern_token", &self.extern_token); + formatter.field("name", &self.name); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for AngleBracketedGenericArguments { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("AngleBracketedGenericArguments"); + formatter.field("colon2_token", &self.colon2_token); + formatter.field("lt_token", &self.lt_token); + formatter.field("args", &self.args); + formatter.field("gt_token", &self.gt_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for Arm { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("Arm"); + formatter.field("attrs", &self.attrs); + formatter.field("pat", &self.pat); + formatter.field("guard", &self.guard); + formatter.field("fat_arrow_token", &self.fat_arrow_token); + formatter.field("body", &self.body); + formatter.field("comma", &self.comma); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for AttrStyle { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + match self { + AttrStyle::Outer => formatter.write_str("Outer"), + AttrStyle::Inner(v0) => { + let mut formatter = formatter.debug_tuple("Inner"); + formatter.field(v0); + formatter.finish() + } + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for Attribute { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("Attribute"); + formatter.field("pound_token", &self.pound_token); + formatter.field("style", &self.style); + formatter.field("bracket_token", &self.bracket_token); + formatter.field("path", &self.path); + formatter.field("tokens", &self.tokens); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for BareFnArg { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("BareFnArg"); + formatter.field("attrs", &self.attrs); + formatter.field("name", &self.name); + formatter.field("ty", &self.ty); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for BinOp { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + match self { + BinOp::Add(v0) => { + let mut formatter = formatter.debug_tuple("Add"); + formatter.field(v0); + formatter.finish() + } + BinOp::Sub(v0) => { + let mut formatter = formatter.debug_tuple("Sub"); + formatter.field(v0); + formatter.finish() + } + BinOp::Mul(v0) => { + let mut formatter = formatter.debug_tuple("Mul"); + formatter.field(v0); + formatter.finish() + } + BinOp::Div(v0) => { + let mut formatter = formatter.debug_tuple("Div"); + formatter.field(v0); + formatter.finish() + } + BinOp::Rem(v0) => { + let mut formatter = formatter.debug_tuple("Rem"); + formatter.field(v0); + formatter.finish() + } + BinOp::And(v0) => { + let mut formatter = formatter.debug_tuple("And"); + formatter.field(v0); + formatter.finish() + } + BinOp::Or(v0) => { + let mut formatter = formatter.debug_tuple("Or"); + formatter.field(v0); + formatter.finish() + } + BinOp::BitXor(v0) => { + let mut formatter = formatter.debug_tuple("BitXor"); + formatter.field(v0); + formatter.finish() + } + BinOp::BitAnd(v0) => { + let mut formatter = formatter.debug_tuple("BitAnd"); + formatter.field(v0); + formatter.finish() + } + BinOp::BitOr(v0) => { + let mut formatter = formatter.debug_tuple("BitOr"); + formatter.field(v0); + formatter.finish() + } + BinOp::Shl(v0) => { + let mut formatter = formatter.debug_tuple("Shl"); + formatter.field(v0); + formatter.finish() + } + BinOp::Shr(v0) => { + let mut formatter = formatter.debug_tuple("Shr"); + formatter.field(v0); + formatter.finish() + } + BinOp::Eq(v0) => { + let mut formatter = formatter.debug_tuple("Eq"); + formatter.field(v0); + formatter.finish() + } + BinOp::Lt(v0) => { + let mut formatter = formatter.debug_tuple("Lt"); + formatter.field(v0); + formatter.finish() + } + BinOp::Le(v0) => { + let mut formatter = formatter.debug_tuple("Le"); + formatter.field(v0); + formatter.finish() + } + BinOp::Ne(v0) => { + let mut formatter = formatter.debug_tuple("Ne"); + formatter.field(v0); + formatter.finish() + } + BinOp::Ge(v0) => { + let mut formatter = formatter.debug_tuple("Ge"); + formatter.field(v0); + formatter.finish() + } + BinOp::Gt(v0) => { + let mut formatter = formatter.debug_tuple("Gt"); + formatter.field(v0); + formatter.finish() + } + BinOp::AddEq(v0) => { + let mut formatter = formatter.debug_tuple("AddEq"); + formatter.field(v0); + formatter.finish() + } + BinOp::SubEq(v0) => { + let mut formatter = formatter.debug_tuple("SubEq"); + formatter.field(v0); + formatter.finish() + } + BinOp::MulEq(v0) => { + let mut formatter = formatter.debug_tuple("MulEq"); + formatter.field(v0); + formatter.finish() + } + BinOp::DivEq(v0) => { + let mut formatter = formatter.debug_tuple("DivEq"); + formatter.field(v0); + formatter.finish() + } + BinOp::RemEq(v0) => { + let mut formatter = formatter.debug_tuple("RemEq"); + formatter.field(v0); + formatter.finish() + } + BinOp::BitXorEq(v0) => { + let mut formatter = formatter.debug_tuple("BitXorEq"); + formatter.field(v0); + formatter.finish() + } + BinOp::BitAndEq(v0) => { + let mut formatter = formatter.debug_tuple("BitAndEq"); + formatter.field(v0); + formatter.finish() + } + BinOp::BitOrEq(v0) => { + let mut formatter = formatter.debug_tuple("BitOrEq"); + formatter.field(v0); + formatter.finish() + } + BinOp::ShlEq(v0) => { + let mut formatter = formatter.debug_tuple("ShlEq"); + formatter.field(v0); + formatter.finish() + } + BinOp::ShrEq(v0) => { + let mut formatter = formatter.debug_tuple("ShrEq"); + formatter.field(v0); + formatter.finish() + } + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for Binding { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("Binding"); + formatter.field("ident", &self.ident); + formatter.field("eq_token", &self.eq_token); + formatter.field("ty", &self.ty); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for Block { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("Block"); + formatter.field("brace_token", &self.brace_token); + formatter.field("stmts", &self.stmts); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for BoundLifetimes { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("BoundLifetimes"); + formatter.field("for_token", &self.for_token); + formatter.field("lt_token", &self.lt_token); + formatter.field("lifetimes", &self.lifetimes); + formatter.field("gt_token", &self.gt_token); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ConstParam { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ConstParam"); + formatter.field("attrs", &self.attrs); + formatter.field("const_token", &self.const_token); + formatter.field("ident", &self.ident); + formatter.field("colon_token", &self.colon_token); + formatter.field("ty", &self.ty); + formatter.field("eq_token", &self.eq_token); + formatter.field("default", &self.default); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for Constraint { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("Constraint"); + formatter.field("ident", &self.ident); + formatter.field("colon_token", &self.colon_token); + formatter.field("bounds", &self.bounds); + formatter.finish() + } +} +#[cfg(feature = "derive")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for Data { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + match self { + Data::Struct(v0) => { + let mut formatter = formatter.debug_tuple("Struct"); + formatter.field(v0); + formatter.finish() + } + Data::Enum(v0) => { + let mut formatter = formatter.debug_tuple("Enum"); + formatter.field(v0); + formatter.finish() + } + Data::Union(v0) => { + let mut formatter = formatter.debug_tuple("Union"); + formatter.field(v0); + formatter.finish() + } + } + } +} +#[cfg(feature = "derive")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for DataEnum { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("DataEnum"); + formatter.field("enum_token", &self.enum_token); + formatter.field("brace_token", &self.brace_token); + formatter.field("variants", &self.variants); + formatter.finish() + } +} +#[cfg(feature = "derive")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for DataStruct { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("DataStruct"); + formatter.field("struct_token", &self.struct_token); + formatter.field("fields", &self.fields); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(feature = "derive")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for DataUnion { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("DataUnion"); + formatter.field("union_token", &self.union_token); + formatter.field("fields", &self.fields); + formatter.finish() + } +} +#[cfg(feature = "derive")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for DeriveInput { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("DeriveInput"); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("ident", &self.ident); + formatter.field("generics", &self.generics); + formatter.field("data", &self.data); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for Expr { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + match self { + #[cfg(feature = "full")] + Expr::Array(v0) => { + let mut formatter = formatter.debug_tuple("Array"); + formatter.field(v0); + formatter.finish() + } + #[cfg(feature = "full")] + Expr::Assign(v0) => { + let mut formatter = formatter.debug_tuple("Assign"); + formatter.field(v0); + formatter.finish() + } + #[cfg(feature = "full")] + Expr::AssignOp(v0) => { + let mut formatter = formatter.debug_tuple("AssignOp"); + formatter.field(v0); + formatter.finish() + } + #[cfg(feature = "full")] + Expr::Async(v0) => { + let mut formatter = formatter.debug_tuple("Async"); + formatter.field(v0); + formatter.finish() + } + #[cfg(feature = "full")] + Expr::Await(v0) => { + let mut formatter = formatter.debug_tuple("Await"); + formatter.field(v0); + formatter.finish() + } + Expr::Binary(v0) => { + let mut formatter = formatter.debug_tuple("Binary"); + formatter.field(v0); + formatter.finish() + } + #[cfg(feature = "full")] + Expr::Block(v0) => { + let mut formatter = formatter.debug_tuple("Block"); + formatter.field(v0); + formatter.finish() + } + #[cfg(feature = "full")] + Expr::Box(v0) => { + let mut formatter = formatter.debug_tuple("Box"); + formatter.field(v0); + formatter.finish() + } + #[cfg(feature = "full")] + Expr::Break(v0) => { + let mut formatter = formatter.debug_tuple("Break"); + formatter.field(v0); + formatter.finish() + } + Expr::Call(v0) => { + let mut formatter = formatter.debug_tuple("Call"); + formatter.field(v0); + formatter.finish() + } + Expr::Cast(v0) => { + let mut formatter = formatter.debug_tuple("Cast"); + formatter.field(v0); + formatter.finish() + } + #[cfg(feature = "full")] + Expr::Closure(v0) => { + let mut formatter = formatter.debug_tuple("Closure"); + formatter.field(v0); + formatter.finish() + } + #[cfg(feature = "full")] + Expr::Continue(v0) => { + let mut formatter = formatter.debug_tuple("Continue"); + formatter.field(v0); + formatter.finish() + } + Expr::Field(v0) => { + let mut formatter = formatter.debug_tuple("Field"); + formatter.field(v0); + formatter.finish() + } + #[cfg(feature = "full")] + Expr::ForLoop(v0) => { + let mut formatter = formatter.debug_tuple("ForLoop"); + formatter.field(v0); + formatter.finish() + } + #[cfg(feature = "full")] + Expr::Group(v0) => { + let mut formatter = formatter.debug_tuple("Group"); + formatter.field(v0); + formatter.finish() + } + #[cfg(feature = "full")] + Expr::If(v0) => { + let mut formatter = formatter.debug_tuple("If"); + formatter.field(v0); + formatter.finish() + } + Expr::Index(v0) => { + let mut formatter = formatter.debug_tuple("Index"); + formatter.field(v0); + formatter.finish() + } + #[cfg(feature = "full")] + Expr::Let(v0) => { + let mut formatter = formatter.debug_tuple("Let"); + formatter.field(v0); + formatter.finish() + } + Expr::Lit(v0) => { + let mut formatter = formatter.debug_tuple("Lit"); + formatter.field(v0); + formatter.finish() + } + #[cfg(feature = "full")] + Expr::Loop(v0) => { + let mut formatter = formatter.debug_tuple("Loop"); + formatter.field(v0); + formatter.finish() + } + #[cfg(feature = "full")] + Expr::Macro(v0) => { + let mut formatter = formatter.debug_tuple("Macro"); + formatter.field(v0); + formatter.finish() + } + #[cfg(feature = "full")] + Expr::Match(v0) => { + let mut formatter = formatter.debug_tuple("Match"); + formatter.field(v0); + formatter.finish() + } + #[cfg(feature = "full")] + Expr::MethodCall(v0) => { + let mut formatter = formatter.debug_tuple("MethodCall"); + formatter.field(v0); + formatter.finish() + } + Expr::Paren(v0) => { + let mut formatter = formatter.debug_tuple("Paren"); + formatter.field(v0); + formatter.finish() + } + Expr::Path(v0) => { + let mut formatter = formatter.debug_tuple("Path"); + formatter.field(v0); + formatter.finish() + } + #[cfg(feature = "full")] + Expr::Range(v0) => { + let mut formatter = formatter.debug_tuple("Range"); + formatter.field(v0); + formatter.finish() + } + #[cfg(feature = "full")] + Expr::Reference(v0) => { + let mut formatter = formatter.debug_tuple("Reference"); + formatter.field(v0); + formatter.finish() + } + #[cfg(feature = "full")] + Expr::Repeat(v0) => { + let mut formatter = formatter.debug_tuple("Repeat"); + formatter.field(v0); + formatter.finish() + } + #[cfg(feature = "full")] + Expr::Return(v0) => { + let mut formatter = formatter.debug_tuple("Return"); + formatter.field(v0); + formatter.finish() + } + #[cfg(feature = "full")] + Expr::Struct(v0) => { + let mut formatter = formatter.debug_tuple("Struct"); + formatter.field(v0); + formatter.finish() + } + #[cfg(feature = "full")] + Expr::Try(v0) => { + let mut formatter = formatter.debug_tuple("Try"); + formatter.field(v0); + formatter.finish() + } + #[cfg(feature = "full")] + Expr::TryBlock(v0) => { + let mut formatter = formatter.debug_tuple("TryBlock"); + formatter.field(v0); + formatter.finish() + } + #[cfg(feature = "full")] + Expr::Tuple(v0) => { + let mut formatter = formatter.debug_tuple("Tuple"); + formatter.field(v0); + formatter.finish() + } + #[cfg(feature = "full")] + Expr::Type(v0) => { + let mut formatter = formatter.debug_tuple("Type"); + formatter.field(v0); + formatter.finish() + } + Expr::Unary(v0) => { + let mut formatter = formatter.debug_tuple("Unary"); + formatter.field(v0); + formatter.finish() + } + #[cfg(feature = "full")] + Expr::Unsafe(v0) => { + let mut formatter = formatter.debug_tuple("Unsafe"); + formatter.field(v0); + formatter.finish() + } + Expr::Verbatim(v0) => { + let mut formatter = formatter.debug_tuple("Verbatim"); + formatter.field(v0); + formatter.finish() + } + #[cfg(feature = "full")] + Expr::While(v0) => { + let mut formatter = formatter.debug_tuple("While"); + formatter.field(v0); + formatter.finish() + } + #[cfg(feature = "full")] + Expr::Yield(v0) => { + let mut formatter = formatter.debug_tuple("Yield"); + formatter.field(v0); + formatter.finish() + } + #[cfg(any(syn_no_non_exhaustive, not(feature = "full")))] + _ => unreachable!(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprArray { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprArray"); + formatter.field("attrs", &self.attrs); + formatter.field("bracket_token", &self.bracket_token); + formatter.field("elems", &self.elems); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprAssign { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprAssign"); + formatter.field("attrs", &self.attrs); + formatter.field("left", &self.left); + formatter.field("eq_token", &self.eq_token); + formatter.field("right", &self.right); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprAssignOp { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprAssignOp"); + formatter.field("attrs", &self.attrs); + formatter.field("left", &self.left); + formatter.field("op", &self.op); + formatter.field("right", &self.right); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprAsync { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprAsync"); + formatter.field("attrs", &self.attrs); + formatter.field("async_token", &self.async_token); + formatter.field("capture", &self.capture); + formatter.field("block", &self.block); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprAwait { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprAwait"); + formatter.field("attrs", &self.attrs); + formatter.field("base", &self.base); + formatter.field("dot_token", &self.dot_token); + formatter.field("await_token", &self.await_token); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprBinary { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprBinary"); + formatter.field("attrs", &self.attrs); + formatter.field("left", &self.left); + formatter.field("op", &self.op); + formatter.field("right", &self.right); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprBlock { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprBlock"); + formatter.field("attrs", &self.attrs); + formatter.field("label", &self.label); + formatter.field("block", &self.block); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprBox { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprBox"); + formatter.field("attrs", &self.attrs); + formatter.field("box_token", &self.box_token); + formatter.field("expr", &self.expr); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprBreak { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprBreak"); + formatter.field("attrs", &self.attrs); + formatter.field("break_token", &self.break_token); + formatter.field("label", &self.label); + formatter.field("expr", &self.expr); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprCall { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprCall"); + formatter.field("attrs", &self.attrs); + formatter.field("func", &self.func); + formatter.field("paren_token", &self.paren_token); + formatter.field("args", &self.args); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprCast { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprCast"); + formatter.field("attrs", &self.attrs); + formatter.field("expr", &self.expr); + formatter.field("as_token", &self.as_token); + formatter.field("ty", &self.ty); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprClosure { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprClosure"); + formatter.field("attrs", &self.attrs); + formatter.field("movability", &self.movability); + formatter.field("asyncness", &self.asyncness); + formatter.field("capture", &self.capture); + formatter.field("or1_token", &self.or1_token); + formatter.field("inputs", &self.inputs); + formatter.field("or2_token", &self.or2_token); + formatter.field("output", &self.output); + formatter.field("body", &self.body); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprContinue { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprContinue"); + formatter.field("attrs", &self.attrs); + formatter.field("continue_token", &self.continue_token); + formatter.field("label", &self.label); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprField { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprField"); + formatter.field("attrs", &self.attrs); + formatter.field("base", &self.base); + formatter.field("dot_token", &self.dot_token); + formatter.field("member", &self.member); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprForLoop { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprForLoop"); + formatter.field("attrs", &self.attrs); + formatter.field("label", &self.label); + formatter.field("for_token", &self.for_token); + formatter.field("pat", &self.pat); + formatter.field("in_token", &self.in_token); + formatter.field("expr", &self.expr); + formatter.field("body", &self.body); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprGroup { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprGroup"); + formatter.field("attrs", &self.attrs); + formatter.field("group_token", &self.group_token); + formatter.field("expr", &self.expr); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprIf { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprIf"); + formatter.field("attrs", &self.attrs); + formatter.field("if_token", &self.if_token); + formatter.field("cond", &self.cond); + formatter.field("then_branch", &self.then_branch); + formatter.field("else_branch", &self.else_branch); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprIndex { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprIndex"); + formatter.field("attrs", &self.attrs); + formatter.field("expr", &self.expr); + formatter.field("bracket_token", &self.bracket_token); + formatter.field("index", &self.index); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprLet { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprLet"); + formatter.field("attrs", &self.attrs); + formatter.field("let_token", &self.let_token); + formatter.field("pat", &self.pat); + formatter.field("eq_token", &self.eq_token); + formatter.field("expr", &self.expr); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprLit { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprLit"); + formatter.field("attrs", &self.attrs); + formatter.field("lit", &self.lit); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprLoop { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprLoop"); + formatter.field("attrs", &self.attrs); + formatter.field("label", &self.label); + formatter.field("loop_token", &self.loop_token); + formatter.field("body", &self.body); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprMacro { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprMacro"); + formatter.field("attrs", &self.attrs); + formatter.field("mac", &self.mac); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprMatch { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprMatch"); + formatter.field("attrs", &self.attrs); + formatter.field("match_token", &self.match_token); + formatter.field("expr", &self.expr); + formatter.field("brace_token", &self.brace_token); + formatter.field("arms", &self.arms); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprMethodCall { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprMethodCall"); + formatter.field("attrs", &self.attrs); + formatter.field("receiver", &self.receiver); + formatter.field("dot_token", &self.dot_token); + formatter.field("method", &self.method); + formatter.field("turbofish", &self.turbofish); + formatter.field("paren_token", &self.paren_token); + formatter.field("args", &self.args); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprParen { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprParen"); + formatter.field("attrs", &self.attrs); + formatter.field("paren_token", &self.paren_token); + formatter.field("expr", &self.expr); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprPath { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprPath"); + formatter.field("attrs", &self.attrs); + formatter.field("qself", &self.qself); + formatter.field("path", &self.path); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprRange { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprRange"); + formatter.field("attrs", &self.attrs); + formatter.field("from", &self.from); + formatter.field("limits", &self.limits); + formatter.field("to", &self.to); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprReference { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprReference"); + formatter.field("attrs", &self.attrs); + formatter.field("and_token", &self.and_token); + formatter.field("raw", &self.raw); + formatter.field("mutability", &self.mutability); + formatter.field("expr", &self.expr); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprRepeat { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprRepeat"); + formatter.field("attrs", &self.attrs); + formatter.field("bracket_token", &self.bracket_token); + formatter.field("expr", &self.expr); + formatter.field("semi_token", &self.semi_token); + formatter.field("len", &self.len); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprReturn { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprReturn"); + formatter.field("attrs", &self.attrs); + formatter.field("return_token", &self.return_token); + formatter.field("expr", &self.expr); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprStruct { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprStruct"); + formatter.field("attrs", &self.attrs); + formatter.field("path", &self.path); + formatter.field("brace_token", &self.brace_token); + formatter.field("fields", &self.fields); + formatter.field("dot2_token", &self.dot2_token); + formatter.field("rest", &self.rest); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprTry { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprTry"); + formatter.field("attrs", &self.attrs); + formatter.field("expr", &self.expr); + formatter.field("question_token", &self.question_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprTryBlock { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprTryBlock"); + formatter.field("attrs", &self.attrs); + formatter.field("try_token", &self.try_token); + formatter.field("block", &self.block); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprTuple { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprTuple"); + formatter.field("attrs", &self.attrs); + formatter.field("paren_token", &self.paren_token); + formatter.field("elems", &self.elems); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprType { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprType"); + formatter.field("attrs", &self.attrs); + formatter.field("expr", &self.expr); + formatter.field("colon_token", &self.colon_token); + formatter.field("ty", &self.ty); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprUnary { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprUnary"); + formatter.field("attrs", &self.attrs); + formatter.field("op", &self.op); + formatter.field("expr", &self.expr); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprUnsafe { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprUnsafe"); + formatter.field("attrs", &self.attrs); + formatter.field("unsafe_token", &self.unsafe_token); + formatter.field("block", &self.block); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprWhile { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprWhile"); + formatter.field("attrs", &self.attrs); + formatter.field("label", &self.label); + formatter.field("while_token", &self.while_token); + formatter.field("cond", &self.cond); + formatter.field("body", &self.body); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ExprYield { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ExprYield"); + formatter.field("attrs", &self.attrs); + formatter.field("yield_token", &self.yield_token); + formatter.field("expr", &self.expr); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for Field { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("Field"); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("ident", &self.ident); + formatter.field("colon_token", &self.colon_token); + formatter.field("ty", &self.ty); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for FieldPat { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("FieldPat"); + formatter.field("attrs", &self.attrs); + formatter.field("member", &self.member); + formatter.field("colon_token", &self.colon_token); + formatter.field("pat", &self.pat); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for FieldValue { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("FieldValue"); + formatter.field("attrs", &self.attrs); + formatter.field("member", &self.member); + formatter.field("colon_token", &self.colon_token); + formatter.field("expr", &self.expr); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for Fields { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + match self { + Fields::Named(v0) => { + let mut formatter = formatter.debug_tuple("Named"); + formatter.field(v0); + formatter.finish() + } + Fields::Unnamed(v0) => { + let mut formatter = formatter.debug_tuple("Unnamed"); + formatter.field(v0); + formatter.finish() + } + Fields::Unit => formatter.write_str("Unit"), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for FieldsNamed { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("FieldsNamed"); + formatter.field("brace_token", &self.brace_token); + formatter.field("named", &self.named); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for FieldsUnnamed { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("FieldsUnnamed"); + formatter.field("paren_token", &self.paren_token); + formatter.field("unnamed", &self.unnamed); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for File { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("File"); + formatter.field("shebang", &self.shebang); + formatter.field("attrs", &self.attrs); + formatter.field("items", &self.items); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for FnArg { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + match self { + FnArg::Receiver(v0) => { + let mut formatter = formatter.debug_tuple("Receiver"); + formatter.field(v0); + formatter.finish() + } + FnArg::Typed(v0) => { + let mut formatter = formatter.debug_tuple("Typed"); + formatter.field(v0); + formatter.finish() + } + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ForeignItem { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + match self { + ForeignItem::Fn(v0) => { + let mut formatter = formatter.debug_tuple("Fn"); + formatter.field(v0); + formatter.finish() + } + ForeignItem::Static(v0) => { + let mut formatter = formatter.debug_tuple("Static"); + formatter.field(v0); + formatter.finish() + } + ForeignItem::Type(v0) => { + let mut formatter = formatter.debug_tuple("Type"); + formatter.field(v0); + formatter.finish() + } + ForeignItem::Macro(v0) => { + let mut formatter = formatter.debug_tuple("Macro"); + formatter.field(v0); + formatter.finish() + } + ForeignItem::Verbatim(v0) => { + let mut formatter = formatter.debug_tuple("Verbatim"); + formatter.field(v0); + formatter.finish() + } + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ForeignItemFn { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ForeignItemFn"); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("sig", &self.sig); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ForeignItemMacro { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ForeignItemMacro"); + formatter.field("attrs", &self.attrs); + formatter.field("mac", &self.mac); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ForeignItemStatic { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ForeignItemStatic"); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("static_token", &self.static_token); + formatter.field("mutability", &self.mutability); + formatter.field("ident", &self.ident); + formatter.field("colon_token", &self.colon_token); + formatter.field("ty", &self.ty); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ForeignItemType { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ForeignItemType"); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("type_token", &self.type_token); + formatter.field("ident", &self.ident); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for GenericArgument { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + match self { + GenericArgument::Lifetime(v0) => { + let mut formatter = formatter.debug_tuple("Lifetime"); + formatter.field(v0); + formatter.finish() + } + GenericArgument::Type(v0) => { + let mut formatter = formatter.debug_tuple("Type"); + formatter.field(v0); + formatter.finish() + } + GenericArgument::Const(v0) => { + let mut formatter = formatter.debug_tuple("Const"); + formatter.field(v0); + formatter.finish() + } + GenericArgument::Binding(v0) => { + let mut formatter = formatter.debug_tuple("Binding"); + formatter.field(v0); + formatter.finish() + } + GenericArgument::Constraint(v0) => { + let mut formatter = formatter.debug_tuple("Constraint"); + formatter.field(v0); + formatter.finish() + } + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for GenericMethodArgument { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + match self { + GenericMethodArgument::Type(v0) => { + let mut formatter = formatter.debug_tuple("Type"); + formatter.field(v0); + formatter.finish() + } + GenericMethodArgument::Const(v0) => { + let mut formatter = formatter.debug_tuple("Const"); + formatter.field(v0); + formatter.finish() + } + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for GenericParam { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + match self { + GenericParam::Type(v0) => { + let mut formatter = formatter.debug_tuple("Type"); + formatter.field(v0); + formatter.finish() + } + GenericParam::Lifetime(v0) => { + let mut formatter = formatter.debug_tuple("Lifetime"); + formatter.field(v0); + formatter.finish() + } + GenericParam::Const(v0) => { + let mut formatter = formatter.debug_tuple("Const"); + formatter.field(v0); + formatter.finish() + } + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for Generics { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("Generics"); + formatter.field("lt_token", &self.lt_token); + formatter.field("params", &self.params); + formatter.field("gt_token", &self.gt_token); + formatter.field("where_clause", &self.where_clause); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ImplItem { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + match self { + ImplItem::Const(v0) => { + let mut formatter = formatter.debug_tuple("Const"); + formatter.field(v0); + formatter.finish() + } + ImplItem::Method(v0) => { + let mut formatter = formatter.debug_tuple("Method"); + formatter.field(v0); + formatter.finish() + } + ImplItem::Type(v0) => { + let mut formatter = formatter.debug_tuple("Type"); + formatter.field(v0); + formatter.finish() + } + ImplItem::Macro(v0) => { + let mut formatter = formatter.debug_tuple("Macro"); + formatter.field(v0); + formatter.finish() + } + ImplItem::Verbatim(v0) => { + let mut formatter = formatter.debug_tuple("Verbatim"); + formatter.field(v0); + formatter.finish() + } + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ImplItemConst { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ImplItemConst"); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("defaultness", &self.defaultness); + formatter.field("const_token", &self.const_token); + formatter.field("ident", &self.ident); + formatter.field("colon_token", &self.colon_token); + formatter.field("ty", &self.ty); + formatter.field("eq_token", &self.eq_token); + formatter.field("expr", &self.expr); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ImplItemMacro { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ImplItemMacro"); + formatter.field("attrs", &self.attrs); + formatter.field("mac", &self.mac); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ImplItemMethod { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ImplItemMethod"); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("defaultness", &self.defaultness); + formatter.field("sig", &self.sig); + formatter.field("block", &self.block); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ImplItemType { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ImplItemType"); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("defaultness", &self.defaultness); + formatter.field("type_token", &self.type_token); + formatter.field("ident", &self.ident); + formatter.field("generics", &self.generics); + formatter.field("eq_token", &self.eq_token); + formatter.field("ty", &self.ty); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for Index { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("Index"); + formatter.field("index", &self.index); + formatter.field("span", &self.span); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for Item { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + match self { + Item::Const(v0) => { + let mut formatter = formatter.debug_tuple("Const"); + formatter.field(v0); + formatter.finish() + } + Item::Enum(v0) => { + let mut formatter = formatter.debug_tuple("Enum"); + formatter.field(v0); + formatter.finish() + } + Item::ExternCrate(v0) => { + let mut formatter = formatter.debug_tuple("ExternCrate"); + formatter.field(v0); + formatter.finish() + } + Item::Fn(v0) => { + let mut formatter = formatter.debug_tuple("Fn"); + formatter.field(v0); + formatter.finish() + } + Item::ForeignMod(v0) => { + let mut formatter = formatter.debug_tuple("ForeignMod"); + formatter.field(v0); + formatter.finish() + } + Item::Impl(v0) => { + let mut formatter = formatter.debug_tuple("Impl"); + formatter.field(v0); + formatter.finish() + } + Item::Macro(v0) => { + let mut formatter = formatter.debug_tuple("Macro"); + formatter.field(v0); + formatter.finish() + } + Item::Macro2(v0) => { + let mut formatter = formatter.debug_tuple("Macro2"); + formatter.field(v0); + formatter.finish() + } + Item::Mod(v0) => { + let mut formatter = formatter.debug_tuple("Mod"); + formatter.field(v0); + formatter.finish() + } + Item::Static(v0) => { + let mut formatter = formatter.debug_tuple("Static"); + formatter.field(v0); + formatter.finish() + } + Item::Struct(v0) => { + let mut formatter = formatter.debug_tuple("Struct"); + formatter.field(v0); + formatter.finish() + } + Item::Trait(v0) => { + let mut formatter = formatter.debug_tuple("Trait"); + formatter.field(v0); + formatter.finish() + } + Item::TraitAlias(v0) => { + let mut formatter = formatter.debug_tuple("TraitAlias"); + formatter.field(v0); + formatter.finish() + } + Item::Type(v0) => { + let mut formatter = formatter.debug_tuple("Type"); + formatter.field(v0); + formatter.finish() + } + Item::Union(v0) => { + let mut formatter = formatter.debug_tuple("Union"); + formatter.field(v0); + formatter.finish() + } + Item::Use(v0) => { + let mut formatter = formatter.debug_tuple("Use"); + formatter.field(v0); + formatter.finish() + } + Item::Verbatim(v0) => { + let mut formatter = formatter.debug_tuple("Verbatim"); + formatter.field(v0); + formatter.finish() + } + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ItemConst { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ItemConst"); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("const_token", &self.const_token); + formatter.field("ident", &self.ident); + formatter.field("colon_token", &self.colon_token); + formatter.field("ty", &self.ty); + formatter.field("eq_token", &self.eq_token); + formatter.field("expr", &self.expr); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ItemEnum { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ItemEnum"); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("enum_token", &self.enum_token); + formatter.field("ident", &self.ident); + formatter.field("generics", &self.generics); + formatter.field("brace_token", &self.brace_token); + formatter.field("variants", &self.variants); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ItemExternCrate { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ItemExternCrate"); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("extern_token", &self.extern_token); + formatter.field("crate_token", &self.crate_token); + formatter.field("ident", &self.ident); + formatter.field("rename", &self.rename); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ItemFn { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ItemFn"); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("sig", &self.sig); + formatter.field("block", &self.block); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ItemForeignMod { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ItemForeignMod"); + formatter.field("attrs", &self.attrs); + formatter.field("abi", &self.abi); + formatter.field("brace_token", &self.brace_token); + formatter.field("items", &self.items); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ItemImpl { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ItemImpl"); + formatter.field("attrs", &self.attrs); + formatter.field("defaultness", &self.defaultness); + formatter.field("unsafety", &self.unsafety); + formatter.field("impl_token", &self.impl_token); + formatter.field("generics", &self.generics); + formatter.field("trait_", &self.trait_); + formatter.field("self_ty", &self.self_ty); + formatter.field("brace_token", &self.brace_token); + formatter.field("items", &self.items); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ItemMacro { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ItemMacro"); + formatter.field("attrs", &self.attrs); + formatter.field("ident", &self.ident); + formatter.field("mac", &self.mac); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ItemMacro2 { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ItemMacro2"); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("macro_token", &self.macro_token); + formatter.field("ident", &self.ident); + formatter.field("rules", &self.rules); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ItemMod { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ItemMod"); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("mod_token", &self.mod_token); + formatter.field("ident", &self.ident); + formatter.field("content", &self.content); + formatter.field("semi", &self.semi); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ItemStatic { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ItemStatic"); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("static_token", &self.static_token); + formatter.field("mutability", &self.mutability); + formatter.field("ident", &self.ident); + formatter.field("colon_token", &self.colon_token); + formatter.field("ty", &self.ty); + formatter.field("eq_token", &self.eq_token); + formatter.field("expr", &self.expr); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ItemStruct { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ItemStruct"); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("struct_token", &self.struct_token); + formatter.field("ident", &self.ident); + formatter.field("generics", &self.generics); + formatter.field("fields", &self.fields); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ItemTrait { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ItemTrait"); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("unsafety", &self.unsafety); + formatter.field("auto_token", &self.auto_token); + formatter.field("trait_token", &self.trait_token); + formatter.field("ident", &self.ident); + formatter.field("generics", &self.generics); + formatter.field("colon_token", &self.colon_token); + formatter.field("supertraits", &self.supertraits); + formatter.field("brace_token", &self.brace_token); + formatter.field("items", &self.items); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ItemTraitAlias { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ItemTraitAlias"); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("trait_token", &self.trait_token); + formatter.field("ident", &self.ident); + formatter.field("generics", &self.generics); + formatter.field("eq_token", &self.eq_token); + formatter.field("bounds", &self.bounds); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ItemType { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ItemType"); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("type_token", &self.type_token); + formatter.field("ident", &self.ident); + formatter.field("generics", &self.generics); + formatter.field("eq_token", &self.eq_token); + formatter.field("ty", &self.ty); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ItemUnion { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ItemUnion"); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("union_token", &self.union_token); + formatter.field("ident", &self.ident); + formatter.field("generics", &self.generics); + formatter.field("fields", &self.fields); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ItemUse { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ItemUse"); + formatter.field("attrs", &self.attrs); + formatter.field("vis", &self.vis); + formatter.field("use_token", &self.use_token); + formatter.field("leading_colon", &self.leading_colon); + formatter.field("tree", &self.tree); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for Label { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("Label"); + formatter.field("name", &self.name); + formatter.field("colon_token", &self.colon_token); + formatter.finish() + } +} +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for Lifetime { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("Lifetime"); + formatter.field("apostrophe", &self.apostrophe); + formatter.field("ident", &self.ident); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for LifetimeDef { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("LifetimeDef"); + formatter.field("attrs", &self.attrs); + formatter.field("lifetime", &self.lifetime); + formatter.field("colon_token", &self.colon_token); + formatter.field("bounds", &self.bounds); + formatter.finish() + } +} +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for Lit { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + match self { + Lit::Str(v0) => { + let mut formatter = formatter.debug_tuple("Str"); + formatter.field(v0); + formatter.finish() + } + Lit::ByteStr(v0) => { + let mut formatter = formatter.debug_tuple("ByteStr"); + formatter.field(v0); + formatter.finish() + } + Lit::Byte(v0) => { + let mut formatter = formatter.debug_tuple("Byte"); + formatter.field(v0); + formatter.finish() + } + Lit::Char(v0) => { + let mut formatter = formatter.debug_tuple("Char"); + formatter.field(v0); + formatter.finish() + } + Lit::Int(v0) => { + let mut formatter = formatter.debug_tuple("Int"); + formatter.field(v0); + formatter.finish() + } + Lit::Float(v0) => { + let mut formatter = formatter.debug_tuple("Float"); + formatter.field(v0); + formatter.finish() + } + Lit::Bool(v0) => { + let mut formatter = formatter.debug_tuple("Bool"); + formatter.field(v0); + formatter.finish() + } + Lit::Verbatim(v0) => { + let mut formatter = formatter.debug_tuple("Verbatim"); + formatter.field(v0); + formatter.finish() + } + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for Local { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("Local"); + formatter.field("attrs", &self.attrs); + formatter.field("let_token", &self.let_token); + formatter.field("pat", &self.pat); + formatter.field("init", &self.init); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for Macro { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("Macro"); + formatter.field("path", &self.path); + formatter.field("bang_token", &self.bang_token); + formatter.field("delimiter", &self.delimiter); + formatter.field("tokens", &self.tokens); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for MacroDelimiter { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + match self { + MacroDelimiter::Paren(v0) => { + let mut formatter = formatter.debug_tuple("Paren"); + formatter.field(v0); + formatter.finish() + } + MacroDelimiter::Brace(v0) => { + let mut formatter = formatter.debug_tuple("Brace"); + formatter.field(v0); + formatter.finish() + } + MacroDelimiter::Bracket(v0) => { + let mut formatter = formatter.debug_tuple("Bracket"); + formatter.field(v0); + formatter.finish() + } + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for Member { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + match self { + Member::Named(v0) => { + let mut formatter = formatter.debug_tuple("Named"); + formatter.field(v0); + formatter.finish() + } + Member::Unnamed(v0) => { + let mut formatter = formatter.debug_tuple("Unnamed"); + formatter.field(v0); + formatter.finish() + } + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for Meta { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + match self { + Meta::Path(v0) => { + let mut formatter = formatter.debug_tuple("Path"); + formatter.field(v0); + formatter.finish() + } + Meta::List(v0) => { + let mut formatter = formatter.debug_tuple("List"); + formatter.field(v0); + formatter.finish() + } + Meta::NameValue(v0) => { + let mut formatter = formatter.debug_tuple("NameValue"); + formatter.field(v0); + formatter.finish() + } + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for MetaList { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("MetaList"); + formatter.field("path", &self.path); + formatter.field("paren_token", &self.paren_token); + formatter.field("nested", &self.nested); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for MetaNameValue { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("MetaNameValue"); + formatter.field("path", &self.path); + formatter.field("eq_token", &self.eq_token); + formatter.field("lit", &self.lit); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for MethodTurbofish { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("MethodTurbofish"); + formatter.field("colon2_token", &self.colon2_token); + formatter.field("lt_token", &self.lt_token); + formatter.field("args", &self.args); + formatter.field("gt_token", &self.gt_token); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for NestedMeta { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + match self { + NestedMeta::Meta(v0) => { + let mut formatter = formatter.debug_tuple("Meta"); + formatter.field(v0); + formatter.finish() + } + NestedMeta::Lit(v0) => { + let mut formatter = formatter.debug_tuple("Lit"); + formatter.field(v0); + formatter.finish() + } + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ParenthesizedGenericArguments { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("ParenthesizedGenericArguments"); + formatter.field("paren_token", &self.paren_token); + formatter.field("inputs", &self.inputs); + formatter.field("output", &self.output); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for Pat { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + match self { + Pat::Box(v0) => { + let mut formatter = formatter.debug_tuple("Box"); + formatter.field(v0); + formatter.finish() + } + Pat::Ident(v0) => { + let mut formatter = formatter.debug_tuple("Ident"); + formatter.field(v0); + formatter.finish() + } + Pat::Lit(v0) => { + let mut formatter = formatter.debug_tuple("Lit"); + formatter.field(v0); + formatter.finish() + } + Pat::Macro(v0) => { + let mut formatter = formatter.debug_tuple("Macro"); + formatter.field(v0); + formatter.finish() + } + Pat::Or(v0) => { + let mut formatter = formatter.debug_tuple("Or"); + formatter.field(v0); + formatter.finish() + } + Pat::Path(v0) => { + let mut formatter = formatter.debug_tuple("Path"); + formatter.field(v0); + formatter.finish() + } + Pat::Range(v0) => { + let mut formatter = formatter.debug_tuple("Range"); + formatter.field(v0); + formatter.finish() + } + Pat::Reference(v0) => { + let mut formatter = formatter.debug_tuple("Reference"); + formatter.field(v0); + formatter.finish() + } + Pat::Rest(v0) => { + let mut formatter = formatter.debug_tuple("Rest"); + formatter.field(v0); + formatter.finish() + } + Pat::Slice(v0) => { + let mut formatter = formatter.debug_tuple("Slice"); + formatter.field(v0); + formatter.finish() + } + Pat::Struct(v0) => { + let mut formatter = formatter.debug_tuple("Struct"); + formatter.field(v0); + formatter.finish() + } + Pat::Tuple(v0) => { + let mut formatter = formatter.debug_tuple("Tuple"); + formatter.field(v0); + formatter.finish() + } + Pat::TupleStruct(v0) => { + let mut formatter = formatter.debug_tuple("TupleStruct"); + formatter.field(v0); + formatter.finish() + } + Pat::Type(v0) => { + let mut formatter = formatter.debug_tuple("Type"); + formatter.field(v0); + formatter.finish() + } + Pat::Verbatim(v0) => { + let mut formatter = formatter.debug_tuple("Verbatim"); + formatter.field(v0); + formatter.finish() + } + Pat::Wild(v0) => { + let mut formatter = formatter.debug_tuple("Wild"); + formatter.field(v0); + formatter.finish() + } + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for PatBox { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("PatBox"); + formatter.field("attrs", &self.attrs); + formatter.field("box_token", &self.box_token); + formatter.field("pat", &self.pat); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for PatIdent { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("PatIdent"); + formatter.field("attrs", &self.attrs); + formatter.field("by_ref", &self.by_ref); + formatter.field("mutability", &self.mutability); + formatter.field("ident", &self.ident); + formatter.field("subpat", &self.subpat); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for PatLit { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("PatLit"); + formatter.field("attrs", &self.attrs); + formatter.field("expr", &self.expr); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for PatMacro { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("PatMacro"); + formatter.field("attrs", &self.attrs); + formatter.field("mac", &self.mac); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for PatOr { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("PatOr"); + formatter.field("attrs", &self.attrs); + formatter.field("leading_vert", &self.leading_vert); + formatter.field("cases", &self.cases); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for PatPath { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("PatPath"); + formatter.field("attrs", &self.attrs); + formatter.field("qself", &self.qself); + formatter.field("path", &self.path); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for PatRange { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("PatRange"); + formatter.field("attrs", &self.attrs); + formatter.field("lo", &self.lo); + formatter.field("limits", &self.limits); + formatter.field("hi", &self.hi); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for PatReference { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("PatReference"); + formatter.field("attrs", &self.attrs); + formatter.field("and_token", &self.and_token); + formatter.field("mutability", &self.mutability); + formatter.field("pat", &self.pat); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for PatRest { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("PatRest"); + formatter.field("attrs", &self.attrs); + formatter.field("dot2_token", &self.dot2_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for PatSlice { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("PatSlice"); + formatter.field("attrs", &self.attrs); + formatter.field("bracket_token", &self.bracket_token); + formatter.field("elems", &self.elems); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for PatStruct { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("PatStruct"); + formatter.field("attrs", &self.attrs); + formatter.field("path", &self.path); + formatter.field("brace_token", &self.brace_token); + formatter.field("fields", &self.fields); + formatter.field("dot2_token", &self.dot2_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for PatTuple { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("PatTuple"); + formatter.field("attrs", &self.attrs); + formatter.field("paren_token", &self.paren_token); + formatter.field("elems", &self.elems); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for PatTupleStruct { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("PatTupleStruct"); + formatter.field("attrs", &self.attrs); + formatter.field("path", &self.path); + formatter.field("pat", &self.pat); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for PatType { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("PatType"); + formatter.field("attrs", &self.attrs); + formatter.field("pat", &self.pat); + formatter.field("colon_token", &self.colon_token); + formatter.field("ty", &self.ty); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for PatWild { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("PatWild"); + formatter.field("attrs", &self.attrs); + formatter.field("underscore_token", &self.underscore_token); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for Path { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("Path"); + formatter.field("leading_colon", &self.leading_colon); + formatter.field("segments", &self.segments); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for PathArguments { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + match self { + PathArguments::None => formatter.write_str("None"), + PathArguments::AngleBracketed(v0) => { + let mut formatter = formatter.debug_tuple("AngleBracketed"); + formatter.field(v0); + formatter.finish() + } + PathArguments::Parenthesized(v0) => { + let mut formatter = formatter.debug_tuple("Parenthesized"); + formatter.field(v0); + formatter.finish() + } + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for PathSegment { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("PathSegment"); + formatter.field("ident", &self.ident); + formatter.field("arguments", &self.arguments); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for PredicateEq { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("PredicateEq"); + formatter.field("lhs_ty", &self.lhs_ty); + formatter.field("eq_token", &self.eq_token); + formatter.field("rhs_ty", &self.rhs_ty); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for PredicateLifetime { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("PredicateLifetime"); + formatter.field("lifetime", &self.lifetime); + formatter.field("colon_token", &self.colon_token); + formatter.field("bounds", &self.bounds); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for PredicateType { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("PredicateType"); + formatter.field("lifetimes", &self.lifetimes); + formatter.field("bounded_ty", &self.bounded_ty); + formatter.field("colon_token", &self.colon_token); + formatter.field("bounds", &self.bounds); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for QSelf { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("QSelf"); + formatter.field("lt_token", &self.lt_token); + formatter.field("ty", &self.ty); + formatter.field("position", &self.position); + formatter.field("as_token", &self.as_token); + formatter.field("gt_token", &self.gt_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for RangeLimits { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + match self { + RangeLimits::HalfOpen(v0) => { + let mut formatter = formatter.debug_tuple("HalfOpen"); + formatter.field(v0); + formatter.finish() + } + RangeLimits::Closed(v0) => { + let mut formatter = formatter.debug_tuple("Closed"); + formatter.field(v0); + formatter.finish() + } + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for Receiver { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("Receiver"); + formatter.field("attrs", &self.attrs); + formatter.field("reference", &self.reference); + formatter.field("mutability", &self.mutability); + formatter.field("self_token", &self.self_token); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for ReturnType { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + match self { + ReturnType::Default => formatter.write_str("Default"), + ReturnType::Type(v0, v1) => { + let mut formatter = formatter.debug_tuple("Type"); + formatter.field(v0); + formatter.field(v1); + formatter.finish() + } + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for Signature { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("Signature"); + formatter.field("constness", &self.constness); + formatter.field("asyncness", &self.asyncness); + formatter.field("unsafety", &self.unsafety); + formatter.field("abi", &self.abi); + formatter.field("fn_token", &self.fn_token); + formatter.field("ident", &self.ident); + formatter.field("generics", &self.generics); + formatter.field("paren_token", &self.paren_token); + formatter.field("inputs", &self.inputs); + formatter.field("variadic", &self.variadic); + formatter.field("output", &self.output); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for Stmt { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + match self { + Stmt::Local(v0) => { + let mut formatter = formatter.debug_tuple("Local"); + formatter.field(v0); + formatter.finish() + } + Stmt::Item(v0) => { + let mut formatter = formatter.debug_tuple("Item"); + formatter.field(v0); + formatter.finish() + } + Stmt::Expr(v0) => { + let mut formatter = formatter.debug_tuple("Expr"); + formatter.field(v0); + formatter.finish() + } + Stmt::Semi(v0, v1) => { + let mut formatter = formatter.debug_tuple("Semi"); + formatter.field(v0); + formatter.field(v1); + formatter.finish() + } + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for TraitBound { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("TraitBound"); + formatter.field("paren_token", &self.paren_token); + formatter.field("modifier", &self.modifier); + formatter.field("lifetimes", &self.lifetimes); + formatter.field("path", &self.path); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for TraitBoundModifier { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + match self { + TraitBoundModifier::None => formatter.write_str("None"), + TraitBoundModifier::Maybe(v0) => { + let mut formatter = formatter.debug_tuple("Maybe"); + formatter.field(v0); + formatter.finish() + } + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for TraitItem { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + match self { + TraitItem::Const(v0) => { + let mut formatter = formatter.debug_tuple("Const"); + formatter.field(v0); + formatter.finish() + } + TraitItem::Method(v0) => { + let mut formatter = formatter.debug_tuple("Method"); + formatter.field(v0); + formatter.finish() + } + TraitItem::Type(v0) => { + let mut formatter = formatter.debug_tuple("Type"); + formatter.field(v0); + formatter.finish() + } + TraitItem::Macro(v0) => { + let mut formatter = formatter.debug_tuple("Macro"); + formatter.field(v0); + formatter.finish() + } + TraitItem::Verbatim(v0) => { + let mut formatter = formatter.debug_tuple("Verbatim"); + formatter.field(v0); + formatter.finish() + } + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for TraitItemConst { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("TraitItemConst"); + formatter.field("attrs", &self.attrs); + formatter.field("const_token", &self.const_token); + formatter.field("ident", &self.ident); + formatter.field("colon_token", &self.colon_token); + formatter.field("ty", &self.ty); + formatter.field("default", &self.default); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for TraitItemMacro { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("TraitItemMacro"); + formatter.field("attrs", &self.attrs); + formatter.field("mac", &self.mac); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for TraitItemMethod { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("TraitItemMethod"); + formatter.field("attrs", &self.attrs); + formatter.field("sig", &self.sig); + formatter.field("default", &self.default); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for TraitItemType { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("TraitItemType"); + formatter.field("attrs", &self.attrs); + formatter.field("type_token", &self.type_token); + formatter.field("ident", &self.ident); + formatter.field("generics", &self.generics); + formatter.field("colon_token", &self.colon_token); + formatter.field("bounds", &self.bounds); + formatter.field("default", &self.default); + formatter.field("semi_token", &self.semi_token); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for Type { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + match self { + Type::Array(v0) => { + let mut formatter = formatter.debug_tuple("Array"); + formatter.field(v0); + formatter.finish() + } + Type::BareFn(v0) => { + let mut formatter = formatter.debug_tuple("BareFn"); + formatter.field(v0); + formatter.finish() + } + Type::Group(v0) => { + let mut formatter = formatter.debug_tuple("Group"); + formatter.field(v0); + formatter.finish() + } + Type::ImplTrait(v0) => { + let mut formatter = formatter.debug_tuple("ImplTrait"); + formatter.field(v0); + formatter.finish() + } + Type::Infer(v0) => { + let mut formatter = formatter.debug_tuple("Infer"); + formatter.field(v0); + formatter.finish() + } + Type::Macro(v0) => { + let mut formatter = formatter.debug_tuple("Macro"); + formatter.field(v0); + formatter.finish() + } + Type::Never(v0) => { + let mut formatter = formatter.debug_tuple("Never"); + formatter.field(v0); + formatter.finish() + } + Type::Paren(v0) => { + let mut formatter = formatter.debug_tuple("Paren"); + formatter.field(v0); + formatter.finish() + } + Type::Path(v0) => { + let mut formatter = formatter.debug_tuple("Path"); + formatter.field(v0); + formatter.finish() + } + Type::Ptr(v0) => { + let mut formatter = formatter.debug_tuple("Ptr"); + formatter.field(v0); + formatter.finish() + } + Type::Reference(v0) => { + let mut formatter = formatter.debug_tuple("Reference"); + formatter.field(v0); + formatter.finish() + } + Type::Slice(v0) => { + let mut formatter = formatter.debug_tuple("Slice"); + formatter.field(v0); + formatter.finish() + } + Type::TraitObject(v0) => { + let mut formatter = formatter.debug_tuple("TraitObject"); + formatter.field(v0); + formatter.finish() + } + Type::Tuple(v0) => { + let mut formatter = formatter.debug_tuple("Tuple"); + formatter.field(v0); + formatter.finish() + } + Type::Verbatim(v0) => { + let mut formatter = formatter.debug_tuple("Verbatim"); + formatter.field(v0); + formatter.finish() + } + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for TypeArray { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("TypeArray"); + formatter.field("bracket_token", &self.bracket_token); + formatter.field("elem", &self.elem); + formatter.field("semi_token", &self.semi_token); + formatter.field("len", &self.len); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for TypeBareFn { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("TypeBareFn"); + formatter.field("lifetimes", &self.lifetimes); + formatter.field("unsafety", &self.unsafety); + formatter.field("abi", &self.abi); + formatter.field("fn_token", &self.fn_token); + formatter.field("paren_token", &self.paren_token); + formatter.field("inputs", &self.inputs); + formatter.field("variadic", &self.variadic); + formatter.field("output", &self.output); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for TypeGroup { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("TypeGroup"); + formatter.field("group_token", &self.group_token); + formatter.field("elem", &self.elem); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for TypeImplTrait { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("TypeImplTrait"); + formatter.field("impl_token", &self.impl_token); + formatter.field("bounds", &self.bounds); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for TypeInfer { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("TypeInfer"); + formatter.field("underscore_token", &self.underscore_token); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for TypeMacro { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("TypeMacro"); + formatter.field("mac", &self.mac); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for TypeNever { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("TypeNever"); + formatter.field("bang_token", &self.bang_token); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for TypeParam { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("TypeParam"); + formatter.field("attrs", &self.attrs); + formatter.field("ident", &self.ident); + formatter.field("colon_token", &self.colon_token); + formatter.field("bounds", &self.bounds); + formatter.field("eq_token", &self.eq_token); + formatter.field("default", &self.default); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for TypeParamBound { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + match self { + TypeParamBound::Trait(v0) => { + let mut formatter = formatter.debug_tuple("Trait"); + formatter.field(v0); + formatter.finish() + } + TypeParamBound::Lifetime(v0) => { + let mut formatter = formatter.debug_tuple("Lifetime"); + formatter.field(v0); + formatter.finish() + } + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for TypeParen { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("TypeParen"); + formatter.field("paren_token", &self.paren_token); + formatter.field("elem", &self.elem); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for TypePath { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("TypePath"); + formatter.field("qself", &self.qself); + formatter.field("path", &self.path); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for TypePtr { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("TypePtr"); + formatter.field("star_token", &self.star_token); + formatter.field("const_token", &self.const_token); + formatter.field("mutability", &self.mutability); + formatter.field("elem", &self.elem); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for TypeReference { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("TypeReference"); + formatter.field("and_token", &self.and_token); + formatter.field("lifetime", &self.lifetime); + formatter.field("mutability", &self.mutability); + formatter.field("elem", &self.elem); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for TypeSlice { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("TypeSlice"); + formatter.field("bracket_token", &self.bracket_token); + formatter.field("elem", &self.elem); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for TypeTraitObject { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("TypeTraitObject"); + formatter.field("dyn_token", &self.dyn_token); + formatter.field("bounds", &self.bounds); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for TypeTuple { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("TypeTuple"); + formatter.field("paren_token", &self.paren_token); + formatter.field("elems", &self.elems); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for UnOp { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + match self { + UnOp::Deref(v0) => { + let mut formatter = formatter.debug_tuple("Deref"); + formatter.field(v0); + formatter.finish() + } + UnOp::Not(v0) => { + let mut formatter = formatter.debug_tuple("Not"); + formatter.field(v0); + formatter.finish() + } + UnOp::Neg(v0) => { + let mut formatter = formatter.debug_tuple("Neg"); + formatter.field(v0); + formatter.finish() + } + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for UseGlob { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("UseGlob"); + formatter.field("star_token", &self.star_token); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for UseGroup { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("UseGroup"); + formatter.field("brace_token", &self.brace_token); + formatter.field("items", &self.items); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for UseName { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("UseName"); + formatter.field("ident", &self.ident); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for UsePath { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("UsePath"); + formatter.field("ident", &self.ident); + formatter.field("colon2_token", &self.colon2_token); + formatter.field("tree", &self.tree); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for UseRename { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("UseRename"); + formatter.field("ident", &self.ident); + formatter.field("as_token", &self.as_token); + formatter.field("rename", &self.rename); + formatter.finish() + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for UseTree { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + match self { + UseTree::Path(v0) => { + let mut formatter = formatter.debug_tuple("Path"); + formatter.field(v0); + formatter.finish() + } + UseTree::Name(v0) => { + let mut formatter = formatter.debug_tuple("Name"); + formatter.field(v0); + formatter.finish() + } + UseTree::Rename(v0) => { + let mut formatter = formatter.debug_tuple("Rename"); + formatter.field(v0); + formatter.finish() + } + UseTree::Glob(v0) => { + let mut formatter = formatter.debug_tuple("Glob"); + formatter.field(v0); + formatter.finish() + } + UseTree::Group(v0) => { + let mut formatter = formatter.debug_tuple("Group"); + formatter.field(v0); + formatter.finish() + } + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for Variadic { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("Variadic"); + formatter.field("attrs", &self.attrs); + formatter.field("dots", &self.dots); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for Variant { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("Variant"); + formatter.field("attrs", &self.attrs); + formatter.field("ident", &self.ident); + formatter.field("fields", &self.fields); + formatter.field("discriminant", &self.discriminant); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for VisCrate { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("VisCrate"); + formatter.field("crate_token", &self.crate_token); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for VisPublic { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("VisPublic"); + formatter.field("pub_token", &self.pub_token); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for VisRestricted { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("VisRestricted"); + formatter.field("pub_token", &self.pub_token); + formatter.field("paren_token", &self.paren_token); + formatter.field("in_token", &self.in_token); + formatter.field("path", &self.path); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for Visibility { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + match self { + Visibility::Public(v0) => { + let mut formatter = formatter.debug_tuple("Public"); + formatter.field(v0); + formatter.finish() + } + Visibility::Crate(v0) => { + let mut formatter = formatter.debug_tuple("Crate"); + formatter.field(v0); + formatter.finish() + } + Visibility::Restricted(v0) => { + let mut formatter = formatter.debug_tuple("Restricted"); + formatter.field(v0); + formatter.finish() + } + Visibility::Inherited => formatter.write_str("Inherited"), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for WhereClause { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = formatter.debug_struct("WhereClause"); + formatter.field("where_token", &self.where_token); + formatter.field("predicates", &self.predicates); + formatter.finish() + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for WherePredicate { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + match self { + WherePredicate::Type(v0) => { + let mut formatter = formatter.debug_tuple("Type"); + formatter.field(v0); + formatter.finish() + } + WherePredicate::Lifetime(v0) => { + let mut formatter = formatter.debug_tuple("Lifetime"); + formatter.field(v0); + formatter.finish() + } + WherePredicate::Eq(v0) => { + let mut formatter = formatter.debug_tuple("Eq"); + formatter.field(v0); + formatter.finish() + } + } + } +} diff --git a/third_party/rust/syn/src/gen/eq.rs b/third_party/rust/syn/src/gen/eq.rs new file mode 100644 index 0000000000..20acb809d8 --- /dev/null +++ b/third_party/rust/syn/src/gen/eq.rs @@ -0,0 +1,2195 @@ +// This file is @generated by syn-internal-codegen. +// It is not intended for manual editing. + +#[cfg(any(feature = "derive", feature = "full"))] +use crate::tt::TokenStreamHelper; +use crate::*; +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for Abi {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for Abi { + fn eq(&self, other: &Self) -> bool { + self.name == other.name + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for AngleBracketedGenericArguments {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for AngleBracketedGenericArguments { + fn eq(&self, other: &Self) -> bool { + self.colon2_token == other.colon2_token && self.args == other.args + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for Arm {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for Arm { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.pat == other.pat && self.guard == other.guard + && self.body == other.body && self.comma == other.comma + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for AttrStyle {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for AttrStyle { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (AttrStyle::Outer, AttrStyle::Outer) => true, + (AttrStyle::Inner(_), AttrStyle::Inner(_)) => true, + _ => false, + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for Attribute {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for Attribute { + fn eq(&self, other: &Self) -> bool { + self.style == other.style && self.path == other.path + && TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens) + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for BareFnArg {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for BareFnArg { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.name == other.name && self.ty == other.ty + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for BinOp {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for BinOp { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (BinOp::Add(_), BinOp::Add(_)) => true, + (BinOp::Sub(_), BinOp::Sub(_)) => true, + (BinOp::Mul(_), BinOp::Mul(_)) => true, + (BinOp::Div(_), BinOp::Div(_)) => true, + (BinOp::Rem(_), BinOp::Rem(_)) => true, + (BinOp::And(_), BinOp::And(_)) => true, + (BinOp::Or(_), BinOp::Or(_)) => true, + (BinOp::BitXor(_), BinOp::BitXor(_)) => true, + (BinOp::BitAnd(_), BinOp::BitAnd(_)) => true, + (BinOp::BitOr(_), BinOp::BitOr(_)) => true, + (BinOp::Shl(_), BinOp::Shl(_)) => true, + (BinOp::Shr(_), BinOp::Shr(_)) => true, + (BinOp::Eq(_), BinOp::Eq(_)) => true, + (BinOp::Lt(_), BinOp::Lt(_)) => true, + (BinOp::Le(_), BinOp::Le(_)) => true, + (BinOp::Ne(_), BinOp::Ne(_)) => true, + (BinOp::Ge(_), BinOp::Ge(_)) => true, + (BinOp::Gt(_), BinOp::Gt(_)) => true, + (BinOp::AddEq(_), BinOp::AddEq(_)) => true, + (BinOp::SubEq(_), BinOp::SubEq(_)) => true, + (BinOp::MulEq(_), BinOp::MulEq(_)) => true, + (BinOp::DivEq(_), BinOp::DivEq(_)) => true, + (BinOp::RemEq(_), BinOp::RemEq(_)) => true, + (BinOp::BitXorEq(_), BinOp::BitXorEq(_)) => true, + (BinOp::BitAndEq(_), BinOp::BitAndEq(_)) => true, + (BinOp::BitOrEq(_), BinOp::BitOrEq(_)) => true, + (BinOp::ShlEq(_), BinOp::ShlEq(_)) => true, + (BinOp::ShrEq(_), BinOp::ShrEq(_)) => true, + _ => false, + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for Binding {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for Binding { + fn eq(&self, other: &Self) -> bool { + self.ident == other.ident && self.ty == other.ty + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for Block {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for Block { + fn eq(&self, other: &Self) -> bool { + self.stmts == other.stmts + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for BoundLifetimes {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for BoundLifetimes { + fn eq(&self, other: &Self) -> bool { + self.lifetimes == other.lifetimes + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ConstParam {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ConstParam { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.ident == other.ident && self.ty == other.ty + && self.eq_token == other.eq_token && self.default == other.default + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for Constraint {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for Constraint { + fn eq(&self, other: &Self) -> bool { + self.ident == other.ident && self.bounds == other.bounds + } +} +#[cfg(feature = "derive")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for Data {} +#[cfg(feature = "derive")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for Data { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Data::Struct(self0), Data::Struct(other0)) => self0 == other0, + (Data::Enum(self0), Data::Enum(other0)) => self0 == other0, + (Data::Union(self0), Data::Union(other0)) => self0 == other0, + _ => false, + } + } +} +#[cfg(feature = "derive")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for DataEnum {} +#[cfg(feature = "derive")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for DataEnum { + fn eq(&self, other: &Self) -> bool { + self.variants == other.variants + } +} +#[cfg(feature = "derive")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for DataStruct {} +#[cfg(feature = "derive")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for DataStruct { + fn eq(&self, other: &Self) -> bool { + self.fields == other.fields && self.semi_token == other.semi_token + } +} +#[cfg(feature = "derive")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for DataUnion {} +#[cfg(feature = "derive")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for DataUnion { + fn eq(&self, other: &Self) -> bool { + self.fields == other.fields + } +} +#[cfg(feature = "derive")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for DeriveInput {} +#[cfg(feature = "derive")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for DeriveInput { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.vis == other.vis && self.ident == other.ident + && self.generics == other.generics && self.data == other.data + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for Expr {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for Expr { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + #[cfg(feature = "full")] + (Expr::Array(self0), Expr::Array(other0)) => self0 == other0, + #[cfg(feature = "full")] + (Expr::Assign(self0), Expr::Assign(other0)) => self0 == other0, + #[cfg(feature = "full")] + (Expr::AssignOp(self0), Expr::AssignOp(other0)) => self0 == other0, + #[cfg(feature = "full")] + (Expr::Async(self0), Expr::Async(other0)) => self0 == other0, + #[cfg(feature = "full")] + (Expr::Await(self0), Expr::Await(other0)) => self0 == other0, + (Expr::Binary(self0), Expr::Binary(other0)) => self0 == other0, + #[cfg(feature = "full")] + (Expr::Block(self0), Expr::Block(other0)) => self0 == other0, + #[cfg(feature = "full")] + (Expr::Box(self0), Expr::Box(other0)) => self0 == other0, + #[cfg(feature = "full")] + (Expr::Break(self0), Expr::Break(other0)) => self0 == other0, + (Expr::Call(self0), Expr::Call(other0)) => self0 == other0, + (Expr::Cast(self0), Expr::Cast(other0)) => self0 == other0, + #[cfg(feature = "full")] + (Expr::Closure(self0), Expr::Closure(other0)) => self0 == other0, + #[cfg(feature = "full")] + (Expr::Continue(self0), Expr::Continue(other0)) => self0 == other0, + (Expr::Field(self0), Expr::Field(other0)) => self0 == other0, + #[cfg(feature = "full")] + (Expr::ForLoop(self0), Expr::ForLoop(other0)) => self0 == other0, + #[cfg(feature = "full")] + (Expr::Group(self0), Expr::Group(other0)) => self0 == other0, + #[cfg(feature = "full")] + (Expr::If(self0), Expr::If(other0)) => self0 == other0, + (Expr::Index(self0), Expr::Index(other0)) => self0 == other0, + #[cfg(feature = "full")] + (Expr::Let(self0), Expr::Let(other0)) => self0 == other0, + (Expr::Lit(self0), Expr::Lit(other0)) => self0 == other0, + #[cfg(feature = "full")] + (Expr::Loop(self0), Expr::Loop(other0)) => self0 == other0, + #[cfg(feature = "full")] + (Expr::Macro(self0), Expr::Macro(other0)) => self0 == other0, + #[cfg(feature = "full")] + (Expr::Match(self0), Expr::Match(other0)) => self0 == other0, + #[cfg(feature = "full")] + (Expr::MethodCall(self0), Expr::MethodCall(other0)) => self0 == other0, + (Expr::Paren(self0), Expr::Paren(other0)) => self0 == other0, + (Expr::Path(self0), Expr::Path(other0)) => self0 == other0, + #[cfg(feature = "full")] + (Expr::Range(self0), Expr::Range(other0)) => self0 == other0, + #[cfg(feature = "full")] + (Expr::Reference(self0), Expr::Reference(other0)) => self0 == other0, + #[cfg(feature = "full")] + (Expr::Repeat(self0), Expr::Repeat(other0)) => self0 == other0, + #[cfg(feature = "full")] + (Expr::Return(self0), Expr::Return(other0)) => self0 == other0, + #[cfg(feature = "full")] + (Expr::Struct(self0), Expr::Struct(other0)) => self0 == other0, + #[cfg(feature = "full")] + (Expr::Try(self0), Expr::Try(other0)) => self0 == other0, + #[cfg(feature = "full")] + (Expr::TryBlock(self0), Expr::TryBlock(other0)) => self0 == other0, + #[cfg(feature = "full")] + (Expr::Tuple(self0), Expr::Tuple(other0)) => self0 == other0, + #[cfg(feature = "full")] + (Expr::Type(self0), Expr::Type(other0)) => self0 == other0, + (Expr::Unary(self0), Expr::Unary(other0)) => self0 == other0, + #[cfg(feature = "full")] + (Expr::Unsafe(self0), Expr::Unsafe(other0)) => self0 == other0, + (Expr::Verbatim(self0), Expr::Verbatim(other0)) => { + TokenStreamHelper(self0) == TokenStreamHelper(other0) + } + #[cfg(feature = "full")] + (Expr::While(self0), Expr::While(other0)) => self0 == other0, + #[cfg(feature = "full")] + (Expr::Yield(self0), Expr::Yield(other0)) => self0 == other0, + _ => false, + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprArray {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprArray { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.elems == other.elems + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprAssign {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprAssign { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.left == other.left && self.right == other.right + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprAssignOp {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprAssignOp { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.left == other.left && self.op == other.op + && self.right == other.right + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprAsync {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprAsync { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.capture == other.capture + && self.block == other.block + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprAwait {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprAwait { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.base == other.base + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprBinary {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprBinary { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.left == other.left && self.op == other.op + && self.right == other.right + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprBlock {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprBlock { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.label == other.label + && self.block == other.block + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprBox {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprBox { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.expr == other.expr + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprBreak {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprBreak { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.label == other.label && self.expr == other.expr + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprCall {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprCall { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.func == other.func && self.args == other.args + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprCast {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprCast { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.expr == other.expr && self.ty == other.ty + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprClosure {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprClosure { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.movability == other.movability + && self.asyncness == other.asyncness && self.capture == other.capture + && self.inputs == other.inputs && self.output == other.output + && self.body == other.body + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprContinue {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprContinue { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.label == other.label + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprField {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprField { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.base == other.base + && self.member == other.member + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprForLoop {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprForLoop { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.label == other.label && self.pat == other.pat + && self.expr == other.expr && self.body == other.body + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprGroup {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprGroup { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.expr == other.expr + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprIf {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprIf { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.cond == other.cond + && self.then_branch == other.then_branch + && self.else_branch == other.else_branch + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprIndex {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprIndex { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.expr == other.expr && self.index == other.index + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprLet {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprLet { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.pat == other.pat && self.expr == other.expr + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprLit {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprLit { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.lit == other.lit + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprLoop {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprLoop { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.label == other.label && self.body == other.body + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprMacro {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprMacro { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.mac == other.mac + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprMatch {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprMatch { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.expr == other.expr && self.arms == other.arms + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprMethodCall {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprMethodCall { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.receiver == other.receiver + && self.method == other.method && self.turbofish == other.turbofish + && self.args == other.args + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprParen {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprParen { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.expr == other.expr + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprPath {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprPath { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.qself == other.qself && self.path == other.path + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprRange {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprRange { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.from == other.from + && self.limits == other.limits && self.to == other.to + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprReference {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprReference { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.mutability == other.mutability + && self.expr == other.expr + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprRepeat {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprRepeat { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.expr == other.expr && self.len == other.len + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprReturn {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprReturn { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.expr == other.expr + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprStruct {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprStruct { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.path == other.path + && self.fields == other.fields && self.dot2_token == other.dot2_token + && self.rest == other.rest + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprTry {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprTry { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.expr == other.expr + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprTryBlock {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprTryBlock { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.block == other.block + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprTuple {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprTuple { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.elems == other.elems + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprType {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprType { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.expr == other.expr && self.ty == other.ty + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprUnary {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprUnary { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.op == other.op && self.expr == other.expr + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprUnsafe {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprUnsafe { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.block == other.block + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprWhile {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprWhile { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.label == other.label && self.cond == other.cond + && self.body == other.body + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ExprYield {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ExprYield { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.expr == other.expr + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for Field {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for Field { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.vis == other.vis && self.ident == other.ident + && self.colon_token == other.colon_token && self.ty == other.ty + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for FieldPat {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for FieldPat { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.member == other.member + && self.colon_token == other.colon_token && self.pat == other.pat + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for FieldValue {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for FieldValue { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.member == other.member + && self.colon_token == other.colon_token && self.expr == other.expr + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for Fields {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for Fields { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Fields::Named(self0), Fields::Named(other0)) => self0 == other0, + (Fields::Unnamed(self0), Fields::Unnamed(other0)) => self0 == other0, + (Fields::Unit, Fields::Unit) => true, + _ => false, + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for FieldsNamed {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for FieldsNamed { + fn eq(&self, other: &Self) -> bool { + self.named == other.named + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for FieldsUnnamed {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for FieldsUnnamed { + fn eq(&self, other: &Self) -> bool { + self.unnamed == other.unnamed + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for File {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for File { + fn eq(&self, other: &Self) -> bool { + self.shebang == other.shebang && self.attrs == other.attrs + && self.items == other.items + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for FnArg {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for FnArg { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (FnArg::Receiver(self0), FnArg::Receiver(other0)) => self0 == other0, + (FnArg::Typed(self0), FnArg::Typed(other0)) => self0 == other0, + _ => false, + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ForeignItem {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ForeignItem { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (ForeignItem::Fn(self0), ForeignItem::Fn(other0)) => self0 == other0, + (ForeignItem::Static(self0), ForeignItem::Static(other0)) => self0 == other0, + (ForeignItem::Type(self0), ForeignItem::Type(other0)) => self0 == other0, + (ForeignItem::Macro(self0), ForeignItem::Macro(other0)) => self0 == other0, + (ForeignItem::Verbatim(self0), ForeignItem::Verbatim(other0)) => { + TokenStreamHelper(self0) == TokenStreamHelper(other0) + } + _ => false, + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ForeignItemFn {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ForeignItemFn { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.vis == other.vis && self.sig == other.sig + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ForeignItemMacro {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ForeignItemMacro { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.mac == other.mac + && self.semi_token == other.semi_token + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ForeignItemStatic {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ForeignItemStatic { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.vis == other.vis + && self.mutability == other.mutability && self.ident == other.ident + && self.ty == other.ty + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ForeignItemType {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ForeignItemType { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.vis == other.vis && self.ident == other.ident + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for GenericArgument {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for GenericArgument { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (GenericArgument::Lifetime(self0), GenericArgument::Lifetime(other0)) => { + self0 == other0 + } + (GenericArgument::Type(self0), GenericArgument::Type(other0)) => { + self0 == other0 + } + (GenericArgument::Const(self0), GenericArgument::Const(other0)) => { + self0 == other0 + } + (GenericArgument::Binding(self0), GenericArgument::Binding(other0)) => { + self0 == other0 + } + (GenericArgument::Constraint(self0), GenericArgument::Constraint(other0)) => { + self0 == other0 + } + _ => false, + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for GenericMethodArgument {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for GenericMethodArgument { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (GenericMethodArgument::Type(self0), GenericMethodArgument::Type(other0)) => { + self0 == other0 + } + ( + GenericMethodArgument::Const(self0), + GenericMethodArgument::Const(other0), + ) => self0 == other0, + _ => false, + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for GenericParam {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for GenericParam { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (GenericParam::Type(self0), GenericParam::Type(other0)) => self0 == other0, + (GenericParam::Lifetime(self0), GenericParam::Lifetime(other0)) => { + self0 == other0 + } + (GenericParam::Const(self0), GenericParam::Const(other0)) => self0 == other0, + _ => false, + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for Generics {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for Generics { + fn eq(&self, other: &Self) -> bool { + self.lt_token == other.lt_token && self.params == other.params + && self.gt_token == other.gt_token && self.where_clause == other.where_clause + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ImplItem {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ImplItem { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (ImplItem::Const(self0), ImplItem::Const(other0)) => self0 == other0, + (ImplItem::Method(self0), ImplItem::Method(other0)) => self0 == other0, + (ImplItem::Type(self0), ImplItem::Type(other0)) => self0 == other0, + (ImplItem::Macro(self0), ImplItem::Macro(other0)) => self0 == other0, + (ImplItem::Verbatim(self0), ImplItem::Verbatim(other0)) => { + TokenStreamHelper(self0) == TokenStreamHelper(other0) + } + _ => false, + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ImplItemConst {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ImplItemConst { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.vis == other.vis + && self.defaultness == other.defaultness && self.ident == other.ident + && self.ty == other.ty && self.expr == other.expr + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ImplItemMacro {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ImplItemMacro { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.mac == other.mac + && self.semi_token == other.semi_token + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ImplItemMethod {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ImplItemMethod { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.vis == other.vis + && self.defaultness == other.defaultness && self.sig == other.sig + && self.block == other.block + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ImplItemType {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ImplItemType { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.vis == other.vis + && self.defaultness == other.defaultness && self.ident == other.ident + && self.generics == other.generics && self.ty == other.ty + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for Item {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for Item { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Item::Const(self0), Item::Const(other0)) => self0 == other0, + (Item::Enum(self0), Item::Enum(other0)) => self0 == other0, + (Item::ExternCrate(self0), Item::ExternCrate(other0)) => self0 == other0, + (Item::Fn(self0), Item::Fn(other0)) => self0 == other0, + (Item::ForeignMod(self0), Item::ForeignMod(other0)) => self0 == other0, + (Item::Impl(self0), Item::Impl(other0)) => self0 == other0, + (Item::Macro(self0), Item::Macro(other0)) => self0 == other0, + (Item::Macro2(self0), Item::Macro2(other0)) => self0 == other0, + (Item::Mod(self0), Item::Mod(other0)) => self0 == other0, + (Item::Static(self0), Item::Static(other0)) => self0 == other0, + (Item::Struct(self0), Item::Struct(other0)) => self0 == other0, + (Item::Trait(self0), Item::Trait(other0)) => self0 == other0, + (Item::TraitAlias(self0), Item::TraitAlias(other0)) => self0 == other0, + (Item::Type(self0), Item::Type(other0)) => self0 == other0, + (Item::Union(self0), Item::Union(other0)) => self0 == other0, + (Item::Use(self0), Item::Use(other0)) => self0 == other0, + (Item::Verbatim(self0), Item::Verbatim(other0)) => { + TokenStreamHelper(self0) == TokenStreamHelper(other0) + } + _ => false, + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ItemConst {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ItemConst { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.vis == other.vis && self.ident == other.ident + && self.ty == other.ty && self.expr == other.expr + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ItemEnum {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ItemEnum { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.vis == other.vis && self.ident == other.ident + && self.generics == other.generics && self.variants == other.variants + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ItemExternCrate {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ItemExternCrate { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.vis == other.vis && self.ident == other.ident + && self.rename == other.rename + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ItemFn {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ItemFn { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.vis == other.vis && self.sig == other.sig + && self.block == other.block + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ItemForeignMod {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ItemForeignMod { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.abi == other.abi && self.items == other.items + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ItemImpl {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ItemImpl { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.defaultness == other.defaultness + && self.unsafety == other.unsafety && self.generics == other.generics + && self.trait_ == other.trait_ && self.self_ty == other.self_ty + && self.items == other.items + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ItemMacro {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ItemMacro { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.ident == other.ident && self.mac == other.mac + && self.semi_token == other.semi_token + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ItemMacro2 {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ItemMacro2 { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.vis == other.vis && self.ident == other.ident + && TokenStreamHelper(&self.rules) == TokenStreamHelper(&other.rules) + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ItemMod {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ItemMod { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.vis == other.vis && self.ident == other.ident + && self.content == other.content && self.semi == other.semi + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ItemStatic {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ItemStatic { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.vis == other.vis + && self.mutability == other.mutability && self.ident == other.ident + && self.ty == other.ty && self.expr == other.expr + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ItemStruct {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ItemStruct { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.vis == other.vis && self.ident == other.ident + && self.generics == other.generics && self.fields == other.fields + && self.semi_token == other.semi_token + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ItemTrait {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ItemTrait { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.vis == other.vis + && self.unsafety == other.unsafety && self.auto_token == other.auto_token + && self.ident == other.ident && self.generics == other.generics + && self.colon_token == other.colon_token + && self.supertraits == other.supertraits && self.items == other.items + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ItemTraitAlias {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ItemTraitAlias { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.vis == other.vis && self.ident == other.ident + && self.generics == other.generics && self.bounds == other.bounds + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ItemType {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ItemType { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.vis == other.vis && self.ident == other.ident + && self.generics == other.generics && self.ty == other.ty + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ItemUnion {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ItemUnion { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.vis == other.vis && self.ident == other.ident + && self.generics == other.generics && self.fields == other.fields + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ItemUse {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ItemUse { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.vis == other.vis + && self.leading_colon == other.leading_colon && self.tree == other.tree + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for Label {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for Label { + fn eq(&self, other: &Self) -> bool { + self.name == other.name + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for LifetimeDef {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for LifetimeDef { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.lifetime == other.lifetime + && self.colon_token == other.colon_token && self.bounds == other.bounds + } +} +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for Lit {} +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for Lit { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Lit::Str(self0), Lit::Str(other0)) => self0 == other0, + (Lit::ByteStr(self0), Lit::ByteStr(other0)) => self0 == other0, + (Lit::Byte(self0), Lit::Byte(other0)) => self0 == other0, + (Lit::Char(self0), Lit::Char(other0)) => self0 == other0, + (Lit::Int(self0), Lit::Int(other0)) => self0 == other0, + (Lit::Float(self0), Lit::Float(other0)) => self0 == other0, + (Lit::Bool(self0), Lit::Bool(other0)) => self0 == other0, + (Lit::Verbatim(self0), Lit::Verbatim(other0)) => { + self0.to_string() == other0.to_string() + } + _ => false, + } + } +} +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for LitBool {} +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for LitBool { + fn eq(&self, other: &Self) -> bool { + self.value == other.value + } +} +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for LitByte {} +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for LitByteStr {} +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for LitChar {} +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for LitFloat {} +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for LitInt {} +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for LitStr {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for Local {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for Local { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.pat == other.pat && self.init == other.init + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for Macro {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for Macro { + fn eq(&self, other: &Self) -> bool { + self.path == other.path && self.delimiter == other.delimiter + && TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens) + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for MacroDelimiter {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for MacroDelimiter { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (MacroDelimiter::Paren(_), MacroDelimiter::Paren(_)) => true, + (MacroDelimiter::Brace(_), MacroDelimiter::Brace(_)) => true, + (MacroDelimiter::Bracket(_), MacroDelimiter::Bracket(_)) => true, + _ => false, + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for Meta {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for Meta { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Meta::Path(self0), Meta::Path(other0)) => self0 == other0, + (Meta::List(self0), Meta::List(other0)) => self0 == other0, + (Meta::NameValue(self0), Meta::NameValue(other0)) => self0 == other0, + _ => false, + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for MetaList {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for MetaList { + fn eq(&self, other: &Self) -> bool { + self.path == other.path && self.nested == other.nested + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for MetaNameValue {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for MetaNameValue { + fn eq(&self, other: &Self) -> bool { + self.path == other.path && self.lit == other.lit + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for MethodTurbofish {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for MethodTurbofish { + fn eq(&self, other: &Self) -> bool { + self.args == other.args + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for NestedMeta {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for NestedMeta { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (NestedMeta::Meta(self0), NestedMeta::Meta(other0)) => self0 == other0, + (NestedMeta::Lit(self0), NestedMeta::Lit(other0)) => self0 == other0, + _ => false, + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ParenthesizedGenericArguments {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ParenthesizedGenericArguments { + fn eq(&self, other: &Self) -> bool { + self.inputs == other.inputs && self.output == other.output + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for Pat {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for Pat { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Pat::Box(self0), Pat::Box(other0)) => self0 == other0, + (Pat::Ident(self0), Pat::Ident(other0)) => self0 == other0, + (Pat::Lit(self0), Pat::Lit(other0)) => self0 == other0, + (Pat::Macro(self0), Pat::Macro(other0)) => self0 == other0, + (Pat::Or(self0), Pat::Or(other0)) => self0 == other0, + (Pat::Path(self0), Pat::Path(other0)) => self0 == other0, + (Pat::Range(self0), Pat::Range(other0)) => self0 == other0, + (Pat::Reference(self0), Pat::Reference(other0)) => self0 == other0, + (Pat::Rest(self0), Pat::Rest(other0)) => self0 == other0, + (Pat::Slice(self0), Pat::Slice(other0)) => self0 == other0, + (Pat::Struct(self0), Pat::Struct(other0)) => self0 == other0, + (Pat::Tuple(self0), Pat::Tuple(other0)) => self0 == other0, + (Pat::TupleStruct(self0), Pat::TupleStruct(other0)) => self0 == other0, + (Pat::Type(self0), Pat::Type(other0)) => self0 == other0, + (Pat::Verbatim(self0), Pat::Verbatim(other0)) => { + TokenStreamHelper(self0) == TokenStreamHelper(other0) + } + (Pat::Wild(self0), Pat::Wild(other0)) => self0 == other0, + _ => false, + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for PatBox {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for PatBox { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.pat == other.pat + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for PatIdent {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for PatIdent { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.by_ref == other.by_ref + && self.mutability == other.mutability && self.ident == other.ident + && self.subpat == other.subpat + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for PatLit {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for PatLit { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.expr == other.expr + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for PatMacro {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for PatMacro { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.mac == other.mac + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for PatOr {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for PatOr { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.leading_vert == other.leading_vert + && self.cases == other.cases + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for PatPath {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for PatPath { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.qself == other.qself && self.path == other.path + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for PatRange {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for PatRange { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.lo == other.lo && self.limits == other.limits + && self.hi == other.hi + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for PatReference {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for PatReference { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.mutability == other.mutability + && self.pat == other.pat + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for PatRest {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for PatRest { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for PatSlice {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for PatSlice { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.elems == other.elems + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for PatStruct {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for PatStruct { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.path == other.path + && self.fields == other.fields && self.dot2_token == other.dot2_token + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for PatTuple {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for PatTuple { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.elems == other.elems + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for PatTupleStruct {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for PatTupleStruct { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.path == other.path && self.pat == other.pat + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for PatType {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for PatType { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.pat == other.pat && self.ty == other.ty + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for PatWild {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for PatWild { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for Path {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for Path { + fn eq(&self, other: &Self) -> bool { + self.leading_colon == other.leading_colon && self.segments == other.segments + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for PathArguments {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for PathArguments { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (PathArguments::None, PathArguments::None) => true, + ( + PathArguments::AngleBracketed(self0), + PathArguments::AngleBracketed(other0), + ) => self0 == other0, + ( + PathArguments::Parenthesized(self0), + PathArguments::Parenthesized(other0), + ) => self0 == other0, + _ => false, + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for PathSegment {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for PathSegment { + fn eq(&self, other: &Self) -> bool { + self.ident == other.ident && self.arguments == other.arguments + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for PredicateEq {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for PredicateEq { + fn eq(&self, other: &Self) -> bool { + self.lhs_ty == other.lhs_ty && self.rhs_ty == other.rhs_ty + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for PredicateLifetime {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for PredicateLifetime { + fn eq(&self, other: &Self) -> bool { + self.lifetime == other.lifetime && self.bounds == other.bounds + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for PredicateType {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for PredicateType { + fn eq(&self, other: &Self) -> bool { + self.lifetimes == other.lifetimes && self.bounded_ty == other.bounded_ty + && self.bounds == other.bounds + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for QSelf {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for QSelf { + fn eq(&self, other: &Self) -> bool { + self.ty == other.ty && self.position == other.position + && self.as_token == other.as_token + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for RangeLimits {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for RangeLimits { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (RangeLimits::HalfOpen(_), RangeLimits::HalfOpen(_)) => true, + (RangeLimits::Closed(_), RangeLimits::Closed(_)) => true, + _ => false, + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for Receiver {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for Receiver { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.reference == other.reference + && self.mutability == other.mutability + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for ReturnType {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for ReturnType { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (ReturnType::Default, ReturnType::Default) => true, + (ReturnType::Type(_, self1), ReturnType::Type(_, other1)) => self1 == other1, + _ => false, + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for Signature {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for Signature { + fn eq(&self, other: &Self) -> bool { + self.constness == other.constness && self.asyncness == other.asyncness + && self.unsafety == other.unsafety && self.abi == other.abi + && self.ident == other.ident && self.generics == other.generics + && self.inputs == other.inputs && self.variadic == other.variadic + && self.output == other.output + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for Stmt {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for Stmt { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Stmt::Local(self0), Stmt::Local(other0)) => self0 == other0, + (Stmt::Item(self0), Stmt::Item(other0)) => self0 == other0, + (Stmt::Expr(self0), Stmt::Expr(other0)) => self0 == other0, + (Stmt::Semi(self0, _), Stmt::Semi(other0, _)) => self0 == other0, + _ => false, + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for TraitBound {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for TraitBound { + fn eq(&self, other: &Self) -> bool { + self.paren_token == other.paren_token && self.modifier == other.modifier + && self.lifetimes == other.lifetimes && self.path == other.path + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for TraitBoundModifier {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for TraitBoundModifier { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (TraitBoundModifier::None, TraitBoundModifier::None) => true, + (TraitBoundModifier::Maybe(_), TraitBoundModifier::Maybe(_)) => true, + _ => false, + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for TraitItem {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for TraitItem { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (TraitItem::Const(self0), TraitItem::Const(other0)) => self0 == other0, + (TraitItem::Method(self0), TraitItem::Method(other0)) => self0 == other0, + (TraitItem::Type(self0), TraitItem::Type(other0)) => self0 == other0, + (TraitItem::Macro(self0), TraitItem::Macro(other0)) => self0 == other0, + (TraitItem::Verbatim(self0), TraitItem::Verbatim(other0)) => { + TokenStreamHelper(self0) == TokenStreamHelper(other0) + } + _ => false, + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for TraitItemConst {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for TraitItemConst { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.ident == other.ident && self.ty == other.ty + && self.default == other.default + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for TraitItemMacro {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for TraitItemMacro { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.mac == other.mac + && self.semi_token == other.semi_token + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for TraitItemMethod {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for TraitItemMethod { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.sig == other.sig + && self.default == other.default && self.semi_token == other.semi_token + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for TraitItemType {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for TraitItemType { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.ident == other.ident + && self.generics == other.generics && self.colon_token == other.colon_token + && self.bounds == other.bounds && self.default == other.default + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for Type {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for Type { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Type::Array(self0), Type::Array(other0)) => self0 == other0, + (Type::BareFn(self0), Type::BareFn(other0)) => self0 == other0, + (Type::Group(self0), Type::Group(other0)) => self0 == other0, + (Type::ImplTrait(self0), Type::ImplTrait(other0)) => self0 == other0, + (Type::Infer(self0), Type::Infer(other0)) => self0 == other0, + (Type::Macro(self0), Type::Macro(other0)) => self0 == other0, + (Type::Never(self0), Type::Never(other0)) => self0 == other0, + (Type::Paren(self0), Type::Paren(other0)) => self0 == other0, + (Type::Path(self0), Type::Path(other0)) => self0 == other0, + (Type::Ptr(self0), Type::Ptr(other0)) => self0 == other0, + (Type::Reference(self0), Type::Reference(other0)) => self0 == other0, + (Type::Slice(self0), Type::Slice(other0)) => self0 == other0, + (Type::TraitObject(self0), Type::TraitObject(other0)) => self0 == other0, + (Type::Tuple(self0), Type::Tuple(other0)) => self0 == other0, + (Type::Verbatim(self0), Type::Verbatim(other0)) => { + TokenStreamHelper(self0) == TokenStreamHelper(other0) + } + _ => false, + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for TypeArray {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for TypeArray { + fn eq(&self, other: &Self) -> bool { + self.elem == other.elem && self.len == other.len + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for TypeBareFn {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for TypeBareFn { + fn eq(&self, other: &Self) -> bool { + self.lifetimes == other.lifetimes && self.unsafety == other.unsafety + && self.abi == other.abi && self.inputs == other.inputs + && self.variadic == other.variadic && self.output == other.output + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for TypeGroup {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for TypeGroup { + fn eq(&self, other: &Self) -> bool { + self.elem == other.elem + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for TypeImplTrait {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for TypeImplTrait { + fn eq(&self, other: &Self) -> bool { + self.bounds == other.bounds + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for TypeInfer {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for TypeInfer { + fn eq(&self, _other: &Self) -> bool { + true + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for TypeMacro {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for TypeMacro { + fn eq(&self, other: &Self) -> bool { + self.mac == other.mac + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for TypeNever {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for TypeNever { + fn eq(&self, _other: &Self) -> bool { + true + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for TypeParam {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for TypeParam { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.ident == other.ident + && self.colon_token == other.colon_token && self.bounds == other.bounds + && self.eq_token == other.eq_token && self.default == other.default + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for TypeParamBound {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for TypeParamBound { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (TypeParamBound::Trait(self0), TypeParamBound::Trait(other0)) => { + self0 == other0 + } + (TypeParamBound::Lifetime(self0), TypeParamBound::Lifetime(other0)) => { + self0 == other0 + } + _ => false, + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for TypeParen {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for TypeParen { + fn eq(&self, other: &Self) -> bool { + self.elem == other.elem + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for TypePath {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for TypePath { + fn eq(&self, other: &Self) -> bool { + self.qself == other.qself && self.path == other.path + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for TypePtr {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for TypePtr { + fn eq(&self, other: &Self) -> bool { + self.const_token == other.const_token && self.mutability == other.mutability + && self.elem == other.elem + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for TypeReference {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for TypeReference { + fn eq(&self, other: &Self) -> bool { + self.lifetime == other.lifetime && self.mutability == other.mutability + && self.elem == other.elem + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for TypeSlice {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for TypeSlice { + fn eq(&self, other: &Self) -> bool { + self.elem == other.elem + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for TypeTraitObject {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for TypeTraitObject { + fn eq(&self, other: &Self) -> bool { + self.dyn_token == other.dyn_token && self.bounds == other.bounds + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for TypeTuple {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for TypeTuple { + fn eq(&self, other: &Self) -> bool { + self.elems == other.elems + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for UnOp {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for UnOp { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (UnOp::Deref(_), UnOp::Deref(_)) => true, + (UnOp::Not(_), UnOp::Not(_)) => true, + (UnOp::Neg(_), UnOp::Neg(_)) => true, + _ => false, + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for UseGlob {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for UseGlob { + fn eq(&self, _other: &Self) -> bool { + true + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for UseGroup {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for UseGroup { + fn eq(&self, other: &Self) -> bool { + self.items == other.items + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for UseName {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for UseName { + fn eq(&self, other: &Self) -> bool { + self.ident == other.ident + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for UsePath {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for UsePath { + fn eq(&self, other: &Self) -> bool { + self.ident == other.ident && self.tree == other.tree + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for UseRename {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for UseRename { + fn eq(&self, other: &Self) -> bool { + self.ident == other.ident && self.rename == other.rename + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for UseTree {} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for UseTree { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (UseTree::Path(self0), UseTree::Path(other0)) => self0 == other0, + (UseTree::Name(self0), UseTree::Name(other0)) => self0 == other0, + (UseTree::Rename(self0), UseTree::Rename(other0)) => self0 == other0, + (UseTree::Glob(self0), UseTree::Glob(other0)) => self0 == other0, + (UseTree::Group(self0), UseTree::Group(other0)) => self0 == other0, + _ => false, + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for Variadic {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for Variadic { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for Variant {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for Variant { + fn eq(&self, other: &Self) -> bool { + self.attrs == other.attrs && self.ident == other.ident + && self.fields == other.fields && self.discriminant == other.discriminant + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for VisCrate {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for VisCrate { + fn eq(&self, _other: &Self) -> bool { + true + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for VisPublic {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for VisPublic { + fn eq(&self, _other: &Self) -> bool { + true + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for VisRestricted {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for VisRestricted { + fn eq(&self, other: &Self) -> bool { + self.in_token == other.in_token && self.path == other.path + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for Visibility {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for Visibility { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Visibility::Public(self0), Visibility::Public(other0)) => self0 == other0, + (Visibility::Crate(self0), Visibility::Crate(other0)) => self0 == other0, + (Visibility::Restricted(self0), Visibility::Restricted(other0)) => { + self0 == other0 + } + (Visibility::Inherited, Visibility::Inherited) => true, + _ => false, + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for WhereClause {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for WhereClause { + fn eq(&self, other: &Self) -> bool { + self.predicates == other.predicates + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for WherePredicate {} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for WherePredicate { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (WherePredicate::Type(self0), WherePredicate::Type(other0)) => { + self0 == other0 + } + (WherePredicate::Lifetime(self0), WherePredicate::Lifetime(other0)) => { + self0 == other0 + } + (WherePredicate::Eq(self0), WherePredicate::Eq(other0)) => self0 == other0, + _ => false, + } + } +} diff --git a/third_party/rust/syn/src/gen/fold.rs b/third_party/rust/syn/src/gen/fold.rs new file mode 100644 index 0000000000..98bb5794aa --- /dev/null +++ b/third_party/rust/syn/src/gen/fold.rs @@ -0,0 +1,3341 @@ +// This file is @generated by syn-internal-codegen. +// It is not intended for manual editing. + +#![allow(unreachable_code, unused_variables)] +#![allow(clippy::match_wildcard_for_single_variants)] +#[cfg(any(feature = "full", feature = "derive"))] +use crate::gen::helper::fold::*; +#[cfg(any(feature = "full", feature = "derive"))] +use crate::token::{Brace, Bracket, Group, Paren}; +use crate::*; +use proc_macro2::Span; +#[cfg(feature = "full")] +macro_rules! full { + ($e:expr) => { + $e + }; +} +#[cfg(all(feature = "derive", not(feature = "full")))] +macro_rules! full { + ($e:expr) => { + unreachable!() + }; +} +/// Syntax tree traversal to transform the nodes of an owned syntax tree. +/// +/// See the [module documentation] for details. +/// +/// [module documentation]: self +/// +/// *This trait is available only if Syn is built with the `"fold"` feature.* +pub trait Fold { + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_abi(&mut self, i: Abi) -> Abi { + fold_abi(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_angle_bracketed_generic_arguments( + &mut self, + i: AngleBracketedGenericArguments, + ) -> AngleBracketedGenericArguments { + fold_angle_bracketed_generic_arguments(self, i) + } + #[cfg(feature = "full")] + fn fold_arm(&mut self, i: Arm) -> Arm { + fold_arm(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_attr_style(&mut self, i: AttrStyle) -> AttrStyle { + fold_attr_style(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_attribute(&mut self, i: Attribute) -> Attribute { + fold_attribute(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_bare_fn_arg(&mut self, i: BareFnArg) -> BareFnArg { + fold_bare_fn_arg(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_bin_op(&mut self, i: BinOp) -> BinOp { + fold_bin_op(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_binding(&mut self, i: Binding) -> Binding { + fold_binding(self, i) + } + #[cfg(feature = "full")] + fn fold_block(&mut self, i: Block) -> Block { + fold_block(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_bound_lifetimes(&mut self, i: BoundLifetimes) -> BoundLifetimes { + fold_bound_lifetimes(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_const_param(&mut self, i: ConstParam) -> ConstParam { + fold_const_param(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_constraint(&mut self, i: Constraint) -> Constraint { + fold_constraint(self, i) + } + #[cfg(feature = "derive")] + fn fold_data(&mut self, i: Data) -> Data { + fold_data(self, i) + } + #[cfg(feature = "derive")] + fn fold_data_enum(&mut self, i: DataEnum) -> DataEnum { + fold_data_enum(self, i) + } + #[cfg(feature = "derive")] + fn fold_data_struct(&mut self, i: DataStruct) -> DataStruct { + fold_data_struct(self, i) + } + #[cfg(feature = "derive")] + fn fold_data_union(&mut self, i: DataUnion) -> DataUnion { + fold_data_union(self, i) + } + #[cfg(feature = "derive")] + fn fold_derive_input(&mut self, i: DeriveInput) -> DeriveInput { + fold_derive_input(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_expr(&mut self, i: Expr) -> Expr { + fold_expr(self, i) + } + #[cfg(feature = "full")] + fn fold_expr_array(&mut self, i: ExprArray) -> ExprArray { + fold_expr_array(self, i) + } + #[cfg(feature = "full")] + fn fold_expr_assign(&mut self, i: ExprAssign) -> ExprAssign { + fold_expr_assign(self, i) + } + #[cfg(feature = "full")] + fn fold_expr_assign_op(&mut self, i: ExprAssignOp) -> ExprAssignOp { + fold_expr_assign_op(self, i) + } + #[cfg(feature = "full")] + fn fold_expr_async(&mut self, i: ExprAsync) -> ExprAsync { + fold_expr_async(self, i) + } + #[cfg(feature = "full")] + fn fold_expr_await(&mut self, i: ExprAwait) -> ExprAwait { + fold_expr_await(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_expr_binary(&mut self, i: ExprBinary) -> ExprBinary { + fold_expr_binary(self, i) + } + #[cfg(feature = "full")] + fn fold_expr_block(&mut self, i: ExprBlock) -> ExprBlock { + fold_expr_block(self, i) + } + #[cfg(feature = "full")] + fn fold_expr_box(&mut self, i: ExprBox) -> ExprBox { + fold_expr_box(self, i) + } + #[cfg(feature = "full")] + fn fold_expr_break(&mut self, i: ExprBreak) -> ExprBreak { + fold_expr_break(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_expr_call(&mut self, i: ExprCall) -> ExprCall { + fold_expr_call(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_expr_cast(&mut self, i: ExprCast) -> ExprCast { + fold_expr_cast(self, i) + } + #[cfg(feature = "full")] + fn fold_expr_closure(&mut self, i: ExprClosure) -> ExprClosure { + fold_expr_closure(self, i) + } + #[cfg(feature = "full")] + fn fold_expr_continue(&mut self, i: ExprContinue) -> ExprContinue { + fold_expr_continue(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_expr_field(&mut self, i: ExprField) -> ExprField { + fold_expr_field(self, i) + } + #[cfg(feature = "full")] + fn fold_expr_for_loop(&mut self, i: ExprForLoop) -> ExprForLoop { + fold_expr_for_loop(self, i) + } + #[cfg(feature = "full")] + fn fold_expr_group(&mut self, i: ExprGroup) -> ExprGroup { + fold_expr_group(self, i) + } + #[cfg(feature = "full")] + fn fold_expr_if(&mut self, i: ExprIf) -> ExprIf { + fold_expr_if(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_expr_index(&mut self, i: ExprIndex) -> ExprIndex { + fold_expr_index(self, i) + } + #[cfg(feature = "full")] + fn fold_expr_let(&mut self, i: ExprLet) -> ExprLet { + fold_expr_let(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_expr_lit(&mut self, i: ExprLit) -> ExprLit { + fold_expr_lit(self, i) + } + #[cfg(feature = "full")] + fn fold_expr_loop(&mut self, i: ExprLoop) -> ExprLoop { + fold_expr_loop(self, i) + } + #[cfg(feature = "full")] + fn fold_expr_macro(&mut self, i: ExprMacro) -> ExprMacro { + fold_expr_macro(self, i) + } + #[cfg(feature = "full")] + fn fold_expr_match(&mut self, i: ExprMatch) -> ExprMatch { + fold_expr_match(self, i) + } + #[cfg(feature = "full")] + fn fold_expr_method_call(&mut self, i: ExprMethodCall) -> ExprMethodCall { + fold_expr_method_call(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_expr_paren(&mut self, i: ExprParen) -> ExprParen { + fold_expr_paren(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_expr_path(&mut self, i: ExprPath) -> ExprPath { + fold_expr_path(self, i) + } + #[cfg(feature = "full")] + fn fold_expr_range(&mut self, i: ExprRange) -> ExprRange { + fold_expr_range(self, i) + } + #[cfg(feature = "full")] + fn fold_expr_reference(&mut self, i: ExprReference) -> ExprReference { + fold_expr_reference(self, i) + } + #[cfg(feature = "full")] + fn fold_expr_repeat(&mut self, i: ExprRepeat) -> ExprRepeat { + fold_expr_repeat(self, i) + } + #[cfg(feature = "full")] + fn fold_expr_return(&mut self, i: ExprReturn) -> ExprReturn { + fold_expr_return(self, i) + } + #[cfg(feature = "full")] + fn fold_expr_struct(&mut self, i: ExprStruct) -> ExprStruct { + fold_expr_struct(self, i) + } + #[cfg(feature = "full")] + fn fold_expr_try(&mut self, i: ExprTry) -> ExprTry { + fold_expr_try(self, i) + } + #[cfg(feature = "full")] + fn fold_expr_try_block(&mut self, i: ExprTryBlock) -> ExprTryBlock { + fold_expr_try_block(self, i) + } + #[cfg(feature = "full")] + fn fold_expr_tuple(&mut self, i: ExprTuple) -> ExprTuple { + fold_expr_tuple(self, i) + } + #[cfg(feature = "full")] + fn fold_expr_type(&mut self, i: ExprType) -> ExprType { + fold_expr_type(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_expr_unary(&mut self, i: ExprUnary) -> ExprUnary { + fold_expr_unary(self, i) + } + #[cfg(feature = "full")] + fn fold_expr_unsafe(&mut self, i: ExprUnsafe) -> ExprUnsafe { + fold_expr_unsafe(self, i) + } + #[cfg(feature = "full")] + fn fold_expr_while(&mut self, i: ExprWhile) -> ExprWhile { + fold_expr_while(self, i) + } + #[cfg(feature = "full")] + fn fold_expr_yield(&mut self, i: ExprYield) -> ExprYield { + fold_expr_yield(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_field(&mut self, i: Field) -> Field { + fold_field(self, i) + } + #[cfg(feature = "full")] + fn fold_field_pat(&mut self, i: FieldPat) -> FieldPat { + fold_field_pat(self, i) + } + #[cfg(feature = "full")] + fn fold_field_value(&mut self, i: FieldValue) -> FieldValue { + fold_field_value(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_fields(&mut self, i: Fields) -> Fields { + fold_fields(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_fields_named(&mut self, i: FieldsNamed) -> FieldsNamed { + fold_fields_named(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_fields_unnamed(&mut self, i: FieldsUnnamed) -> FieldsUnnamed { + fold_fields_unnamed(self, i) + } + #[cfg(feature = "full")] + fn fold_file(&mut self, i: File) -> File { + fold_file(self, i) + } + #[cfg(feature = "full")] + fn fold_fn_arg(&mut self, i: FnArg) -> FnArg { + fold_fn_arg(self, i) + } + #[cfg(feature = "full")] + fn fold_foreign_item(&mut self, i: ForeignItem) -> ForeignItem { + fold_foreign_item(self, i) + } + #[cfg(feature = "full")] + fn fold_foreign_item_fn(&mut self, i: ForeignItemFn) -> ForeignItemFn { + fold_foreign_item_fn(self, i) + } + #[cfg(feature = "full")] + fn fold_foreign_item_macro(&mut self, i: ForeignItemMacro) -> ForeignItemMacro { + fold_foreign_item_macro(self, i) + } + #[cfg(feature = "full")] + fn fold_foreign_item_static(&mut self, i: ForeignItemStatic) -> ForeignItemStatic { + fold_foreign_item_static(self, i) + } + #[cfg(feature = "full")] + fn fold_foreign_item_type(&mut self, i: ForeignItemType) -> ForeignItemType { + fold_foreign_item_type(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_generic_argument(&mut self, i: GenericArgument) -> GenericArgument { + fold_generic_argument(self, i) + } + #[cfg(feature = "full")] + fn fold_generic_method_argument( + &mut self, + i: GenericMethodArgument, + ) -> GenericMethodArgument { + fold_generic_method_argument(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_generic_param(&mut self, i: GenericParam) -> GenericParam { + fold_generic_param(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_generics(&mut self, i: Generics) -> Generics { + fold_generics(self, i) + } + fn fold_ident(&mut self, i: Ident) -> Ident { + fold_ident(self, i) + } + #[cfg(feature = "full")] + fn fold_impl_item(&mut self, i: ImplItem) -> ImplItem { + fold_impl_item(self, i) + } + #[cfg(feature = "full")] + fn fold_impl_item_const(&mut self, i: ImplItemConst) -> ImplItemConst { + fold_impl_item_const(self, i) + } + #[cfg(feature = "full")] + fn fold_impl_item_macro(&mut self, i: ImplItemMacro) -> ImplItemMacro { + fold_impl_item_macro(self, i) + } + #[cfg(feature = "full")] + fn fold_impl_item_method(&mut self, i: ImplItemMethod) -> ImplItemMethod { + fold_impl_item_method(self, i) + } + #[cfg(feature = "full")] + fn fold_impl_item_type(&mut self, i: ImplItemType) -> ImplItemType { + fold_impl_item_type(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_index(&mut self, i: Index) -> Index { + fold_index(self, i) + } + #[cfg(feature = "full")] + fn fold_item(&mut self, i: Item) -> Item { + fold_item(self, i) + } + #[cfg(feature = "full")] + fn fold_item_const(&mut self, i: ItemConst) -> ItemConst { + fold_item_const(self, i) + } + #[cfg(feature = "full")] + fn fold_item_enum(&mut self, i: ItemEnum) -> ItemEnum { + fold_item_enum(self, i) + } + #[cfg(feature = "full")] + fn fold_item_extern_crate(&mut self, i: ItemExternCrate) -> ItemExternCrate { + fold_item_extern_crate(self, i) + } + #[cfg(feature = "full")] + fn fold_item_fn(&mut self, i: ItemFn) -> ItemFn { + fold_item_fn(self, i) + } + #[cfg(feature = "full")] + fn fold_item_foreign_mod(&mut self, i: ItemForeignMod) -> ItemForeignMod { + fold_item_foreign_mod(self, i) + } + #[cfg(feature = "full")] + fn fold_item_impl(&mut self, i: ItemImpl) -> ItemImpl { + fold_item_impl(self, i) + } + #[cfg(feature = "full")] + fn fold_item_macro(&mut self, i: ItemMacro) -> ItemMacro { + fold_item_macro(self, i) + } + #[cfg(feature = "full")] + fn fold_item_macro2(&mut self, i: ItemMacro2) -> ItemMacro2 { + fold_item_macro2(self, i) + } + #[cfg(feature = "full")] + fn fold_item_mod(&mut self, i: ItemMod) -> ItemMod { + fold_item_mod(self, i) + } + #[cfg(feature = "full")] + fn fold_item_static(&mut self, i: ItemStatic) -> ItemStatic { + fold_item_static(self, i) + } + #[cfg(feature = "full")] + fn fold_item_struct(&mut self, i: ItemStruct) -> ItemStruct { + fold_item_struct(self, i) + } + #[cfg(feature = "full")] + fn fold_item_trait(&mut self, i: ItemTrait) -> ItemTrait { + fold_item_trait(self, i) + } + #[cfg(feature = "full")] + fn fold_item_trait_alias(&mut self, i: ItemTraitAlias) -> ItemTraitAlias { + fold_item_trait_alias(self, i) + } + #[cfg(feature = "full")] + fn fold_item_type(&mut self, i: ItemType) -> ItemType { + fold_item_type(self, i) + } + #[cfg(feature = "full")] + fn fold_item_union(&mut self, i: ItemUnion) -> ItemUnion { + fold_item_union(self, i) + } + #[cfg(feature = "full")] + fn fold_item_use(&mut self, i: ItemUse) -> ItemUse { + fold_item_use(self, i) + } + #[cfg(feature = "full")] + fn fold_label(&mut self, i: Label) -> Label { + fold_label(self, i) + } + fn fold_lifetime(&mut self, i: Lifetime) -> Lifetime { + fold_lifetime(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_lifetime_def(&mut self, i: LifetimeDef) -> LifetimeDef { + fold_lifetime_def(self, i) + } + fn fold_lit(&mut self, i: Lit) -> Lit { + fold_lit(self, i) + } + fn fold_lit_bool(&mut self, i: LitBool) -> LitBool { + fold_lit_bool(self, i) + } + fn fold_lit_byte(&mut self, i: LitByte) -> LitByte { + fold_lit_byte(self, i) + } + fn fold_lit_byte_str(&mut self, i: LitByteStr) -> LitByteStr { + fold_lit_byte_str(self, i) + } + fn fold_lit_char(&mut self, i: LitChar) -> LitChar { + fold_lit_char(self, i) + } + fn fold_lit_float(&mut self, i: LitFloat) -> LitFloat { + fold_lit_float(self, i) + } + fn fold_lit_int(&mut self, i: LitInt) -> LitInt { + fold_lit_int(self, i) + } + fn fold_lit_str(&mut self, i: LitStr) -> LitStr { + fold_lit_str(self, i) + } + #[cfg(feature = "full")] + fn fold_local(&mut self, i: Local) -> Local { + fold_local(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_macro(&mut self, i: Macro) -> Macro { + fold_macro(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_macro_delimiter(&mut self, i: MacroDelimiter) -> MacroDelimiter { + fold_macro_delimiter(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_member(&mut self, i: Member) -> Member { + fold_member(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_meta(&mut self, i: Meta) -> Meta { + fold_meta(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_meta_list(&mut self, i: MetaList) -> MetaList { + fold_meta_list(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_meta_name_value(&mut self, i: MetaNameValue) -> MetaNameValue { + fold_meta_name_value(self, i) + } + #[cfg(feature = "full")] + fn fold_method_turbofish(&mut self, i: MethodTurbofish) -> MethodTurbofish { + fold_method_turbofish(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_nested_meta(&mut self, i: NestedMeta) -> NestedMeta { + fold_nested_meta(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_parenthesized_generic_arguments( + &mut self, + i: ParenthesizedGenericArguments, + ) -> ParenthesizedGenericArguments { + fold_parenthesized_generic_arguments(self, i) + } + #[cfg(feature = "full")] + fn fold_pat(&mut self, i: Pat) -> Pat { + fold_pat(self, i) + } + #[cfg(feature = "full")] + fn fold_pat_box(&mut self, i: PatBox) -> PatBox { + fold_pat_box(self, i) + } + #[cfg(feature = "full")] + fn fold_pat_ident(&mut self, i: PatIdent) -> PatIdent { + fold_pat_ident(self, i) + } + #[cfg(feature = "full")] + fn fold_pat_lit(&mut self, i: PatLit) -> PatLit { + fold_pat_lit(self, i) + } + #[cfg(feature = "full")] + fn fold_pat_macro(&mut self, i: PatMacro) -> PatMacro { + fold_pat_macro(self, i) + } + #[cfg(feature = "full")] + fn fold_pat_or(&mut self, i: PatOr) -> PatOr { + fold_pat_or(self, i) + } + #[cfg(feature = "full")] + fn fold_pat_path(&mut self, i: PatPath) -> PatPath { + fold_pat_path(self, i) + } + #[cfg(feature = "full")] + fn fold_pat_range(&mut self, i: PatRange) -> PatRange { + fold_pat_range(self, i) + } + #[cfg(feature = "full")] + fn fold_pat_reference(&mut self, i: PatReference) -> PatReference { + fold_pat_reference(self, i) + } + #[cfg(feature = "full")] + fn fold_pat_rest(&mut self, i: PatRest) -> PatRest { + fold_pat_rest(self, i) + } + #[cfg(feature = "full")] + fn fold_pat_slice(&mut self, i: PatSlice) -> PatSlice { + fold_pat_slice(self, i) + } + #[cfg(feature = "full")] + fn fold_pat_struct(&mut self, i: PatStruct) -> PatStruct { + fold_pat_struct(self, i) + } + #[cfg(feature = "full")] + fn fold_pat_tuple(&mut self, i: PatTuple) -> PatTuple { + fold_pat_tuple(self, i) + } + #[cfg(feature = "full")] + fn fold_pat_tuple_struct(&mut self, i: PatTupleStruct) -> PatTupleStruct { + fold_pat_tuple_struct(self, i) + } + #[cfg(feature = "full")] + fn fold_pat_type(&mut self, i: PatType) -> PatType { + fold_pat_type(self, i) + } + #[cfg(feature = "full")] + fn fold_pat_wild(&mut self, i: PatWild) -> PatWild { + fold_pat_wild(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_path(&mut self, i: Path) -> Path { + fold_path(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_path_arguments(&mut self, i: PathArguments) -> PathArguments { + fold_path_arguments(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_path_segment(&mut self, i: PathSegment) -> PathSegment { + fold_path_segment(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_predicate_eq(&mut self, i: PredicateEq) -> PredicateEq { + fold_predicate_eq(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_predicate_lifetime(&mut self, i: PredicateLifetime) -> PredicateLifetime { + fold_predicate_lifetime(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_predicate_type(&mut self, i: PredicateType) -> PredicateType { + fold_predicate_type(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_qself(&mut self, i: QSelf) -> QSelf { + fold_qself(self, i) + } + #[cfg(feature = "full")] + fn fold_range_limits(&mut self, i: RangeLimits) -> RangeLimits { + fold_range_limits(self, i) + } + #[cfg(feature = "full")] + fn fold_receiver(&mut self, i: Receiver) -> Receiver { + fold_receiver(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_return_type(&mut self, i: ReturnType) -> ReturnType { + fold_return_type(self, i) + } + #[cfg(feature = "full")] + fn fold_signature(&mut self, i: Signature) -> Signature { + fold_signature(self, i) + } + fn fold_span(&mut self, i: Span) -> Span { + fold_span(self, i) + } + #[cfg(feature = "full")] + fn fold_stmt(&mut self, i: Stmt) -> Stmt { + fold_stmt(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_trait_bound(&mut self, i: TraitBound) -> TraitBound { + fold_trait_bound(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_trait_bound_modifier( + &mut self, + i: TraitBoundModifier, + ) -> TraitBoundModifier { + fold_trait_bound_modifier(self, i) + } + #[cfg(feature = "full")] + fn fold_trait_item(&mut self, i: TraitItem) -> TraitItem { + fold_trait_item(self, i) + } + #[cfg(feature = "full")] + fn fold_trait_item_const(&mut self, i: TraitItemConst) -> TraitItemConst { + fold_trait_item_const(self, i) + } + #[cfg(feature = "full")] + fn fold_trait_item_macro(&mut self, i: TraitItemMacro) -> TraitItemMacro { + fold_trait_item_macro(self, i) + } + #[cfg(feature = "full")] + fn fold_trait_item_method(&mut self, i: TraitItemMethod) -> TraitItemMethod { + fold_trait_item_method(self, i) + } + #[cfg(feature = "full")] + fn fold_trait_item_type(&mut self, i: TraitItemType) -> TraitItemType { + fold_trait_item_type(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_type(&mut self, i: Type) -> Type { + fold_type(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_type_array(&mut self, i: TypeArray) -> TypeArray { + fold_type_array(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_type_bare_fn(&mut self, i: TypeBareFn) -> TypeBareFn { + fold_type_bare_fn(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_type_group(&mut self, i: TypeGroup) -> TypeGroup { + fold_type_group(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_type_impl_trait(&mut self, i: TypeImplTrait) -> TypeImplTrait { + fold_type_impl_trait(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_type_infer(&mut self, i: TypeInfer) -> TypeInfer { + fold_type_infer(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_type_macro(&mut self, i: TypeMacro) -> TypeMacro { + fold_type_macro(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_type_never(&mut self, i: TypeNever) -> TypeNever { + fold_type_never(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_type_param(&mut self, i: TypeParam) -> TypeParam { + fold_type_param(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_type_param_bound(&mut self, i: TypeParamBound) -> TypeParamBound { + fold_type_param_bound(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_type_paren(&mut self, i: TypeParen) -> TypeParen { + fold_type_paren(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_type_path(&mut self, i: TypePath) -> TypePath { + fold_type_path(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_type_ptr(&mut self, i: TypePtr) -> TypePtr { + fold_type_ptr(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_type_reference(&mut self, i: TypeReference) -> TypeReference { + fold_type_reference(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_type_slice(&mut self, i: TypeSlice) -> TypeSlice { + fold_type_slice(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_type_trait_object(&mut self, i: TypeTraitObject) -> TypeTraitObject { + fold_type_trait_object(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_type_tuple(&mut self, i: TypeTuple) -> TypeTuple { + fold_type_tuple(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_un_op(&mut self, i: UnOp) -> UnOp { + fold_un_op(self, i) + } + #[cfg(feature = "full")] + fn fold_use_glob(&mut self, i: UseGlob) -> UseGlob { + fold_use_glob(self, i) + } + #[cfg(feature = "full")] + fn fold_use_group(&mut self, i: UseGroup) -> UseGroup { + fold_use_group(self, i) + } + #[cfg(feature = "full")] + fn fold_use_name(&mut self, i: UseName) -> UseName { + fold_use_name(self, i) + } + #[cfg(feature = "full")] + fn fold_use_path(&mut self, i: UsePath) -> UsePath { + fold_use_path(self, i) + } + #[cfg(feature = "full")] + fn fold_use_rename(&mut self, i: UseRename) -> UseRename { + fold_use_rename(self, i) + } + #[cfg(feature = "full")] + fn fold_use_tree(&mut self, i: UseTree) -> UseTree { + fold_use_tree(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_variadic(&mut self, i: Variadic) -> Variadic { + fold_variadic(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_variant(&mut self, i: Variant) -> Variant { + fold_variant(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_vis_crate(&mut self, i: VisCrate) -> VisCrate { + fold_vis_crate(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_vis_public(&mut self, i: VisPublic) -> VisPublic { + fold_vis_public(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_vis_restricted(&mut self, i: VisRestricted) -> VisRestricted { + fold_vis_restricted(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_visibility(&mut self, i: Visibility) -> Visibility { + fold_visibility(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_where_clause(&mut self, i: WhereClause) -> WhereClause { + fold_where_clause(self, i) + } + #[cfg(any(feature = "derive", feature = "full"))] + fn fold_where_predicate(&mut self, i: WherePredicate) -> WherePredicate { + fold_where_predicate(self, i) + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_abi<F>(f: &mut F, node: Abi) -> Abi +where + F: Fold + ?Sized, +{ + Abi { + extern_token: Token![extern](tokens_helper(f, &node.extern_token.span)), + name: (node.name).map(|it| f.fold_lit_str(it)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_angle_bracketed_generic_arguments<F>( + f: &mut F, + node: AngleBracketedGenericArguments, +) -> AngleBracketedGenericArguments +where + F: Fold + ?Sized, +{ + AngleBracketedGenericArguments { + colon2_token: (node.colon2_token) + .map(|it| Token![::](tokens_helper(f, &it.spans))), + lt_token: Token![<](tokens_helper(f, &node.lt_token.spans)), + args: FoldHelper::lift(node.args, |it| f.fold_generic_argument(it)), + gt_token: Token![>](tokens_helper(f, &node.gt_token.spans)), + } +} +#[cfg(feature = "full")] +pub fn fold_arm<F>(f: &mut F, node: Arm) -> Arm +where + F: Fold + ?Sized, +{ + Arm { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + pat: f.fold_pat(node.pat), + guard: (node.guard) + .map(|it| ( + Token![if](tokens_helper(f, &(it).0.span)), + Box::new(f.fold_expr(*(it).1)), + )), + fat_arrow_token: Token![=>](tokens_helper(f, &node.fat_arrow_token.spans)), + body: Box::new(f.fold_expr(*node.body)), + comma: (node.comma).map(|it| Token![,](tokens_helper(f, &it.spans))), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_attr_style<F>(f: &mut F, node: AttrStyle) -> AttrStyle +where + F: Fold + ?Sized, +{ + match node { + AttrStyle::Outer => AttrStyle::Outer, + AttrStyle::Inner(_binding_0) => { + AttrStyle::Inner(Token![!](tokens_helper(f, &_binding_0.spans))) + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_attribute<F>(f: &mut F, node: Attribute) -> Attribute +where + F: Fold + ?Sized, +{ + Attribute { + pound_token: Token![#](tokens_helper(f, &node.pound_token.spans)), + style: f.fold_attr_style(node.style), + bracket_token: Bracket(tokens_helper(f, &node.bracket_token.span)), + path: f.fold_path(node.path), + tokens: node.tokens, + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_bare_fn_arg<F>(f: &mut F, node: BareFnArg) -> BareFnArg +where + F: Fold + ?Sized, +{ + BareFnArg { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + name: (node.name) + .map(|it| ( + f.fold_ident((it).0), + Token![:](tokens_helper(f, &(it).1.spans)), + )), + ty: f.fold_type(node.ty), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_bin_op<F>(f: &mut F, node: BinOp) -> BinOp +where + F: Fold + ?Sized, +{ + match node { + BinOp::Add(_binding_0) => { + BinOp::Add(Token![+](tokens_helper(f, &_binding_0.spans))) + } + BinOp::Sub(_binding_0) => { + BinOp::Sub(Token![-](tokens_helper(f, &_binding_0.spans))) + } + BinOp::Mul(_binding_0) => { + BinOp::Mul(Token![*](tokens_helper(f, &_binding_0.spans))) + } + BinOp::Div(_binding_0) => { + BinOp::Div(Token![/](tokens_helper(f, &_binding_0.spans))) + } + BinOp::Rem(_binding_0) => { + BinOp::Rem(Token![%](tokens_helper(f, &_binding_0.spans))) + } + BinOp::And(_binding_0) => { + BinOp::And(Token![&&](tokens_helper(f, &_binding_0.spans))) + } + BinOp::Or(_binding_0) => { + BinOp::Or(Token![||](tokens_helper(f, &_binding_0.spans))) + } + BinOp::BitXor(_binding_0) => { + BinOp::BitXor(Token![^](tokens_helper(f, &_binding_0.spans))) + } + BinOp::BitAnd(_binding_0) => { + BinOp::BitAnd(Token![&](tokens_helper(f, &_binding_0.spans))) + } + BinOp::BitOr(_binding_0) => { + BinOp::BitOr(Token![|](tokens_helper(f, &_binding_0.spans))) + } + BinOp::Shl(_binding_0) => { + BinOp::Shl(Token![<<](tokens_helper(f, &_binding_0.spans))) + } + BinOp::Shr(_binding_0) => { + BinOp::Shr(Token![>>](tokens_helper(f, &_binding_0.spans))) + } + BinOp::Eq(_binding_0) => { + BinOp::Eq(Token![==](tokens_helper(f, &_binding_0.spans))) + } + BinOp::Lt(_binding_0) => { + BinOp::Lt(Token![<](tokens_helper(f, &_binding_0.spans))) + } + BinOp::Le(_binding_0) => { + BinOp::Le(Token![<=](tokens_helper(f, &_binding_0.spans))) + } + BinOp::Ne(_binding_0) => { + BinOp::Ne(Token![!=](tokens_helper(f, &_binding_0.spans))) + } + BinOp::Ge(_binding_0) => { + BinOp::Ge(Token![>=](tokens_helper(f, &_binding_0.spans))) + } + BinOp::Gt(_binding_0) => { + BinOp::Gt(Token![>](tokens_helper(f, &_binding_0.spans))) + } + BinOp::AddEq(_binding_0) => { + BinOp::AddEq(Token![+=](tokens_helper(f, &_binding_0.spans))) + } + BinOp::SubEq(_binding_0) => { + BinOp::SubEq(Token![-=](tokens_helper(f, &_binding_0.spans))) + } + BinOp::MulEq(_binding_0) => { + BinOp::MulEq(Token![*=](tokens_helper(f, &_binding_0.spans))) + } + BinOp::DivEq(_binding_0) => { + BinOp::DivEq(Token![/=](tokens_helper(f, &_binding_0.spans))) + } + BinOp::RemEq(_binding_0) => { + BinOp::RemEq(Token![%=](tokens_helper(f, &_binding_0.spans))) + } + BinOp::BitXorEq(_binding_0) => { + BinOp::BitXorEq(Token![^=](tokens_helper(f, &_binding_0.spans))) + } + BinOp::BitAndEq(_binding_0) => { + BinOp::BitAndEq(Token![&=](tokens_helper(f, &_binding_0.spans))) + } + BinOp::BitOrEq(_binding_0) => { + BinOp::BitOrEq(Token![|=](tokens_helper(f, &_binding_0.spans))) + } + BinOp::ShlEq(_binding_0) => { + BinOp::ShlEq(Token![<<=](tokens_helper(f, &_binding_0.spans))) + } + BinOp::ShrEq(_binding_0) => { + BinOp::ShrEq(Token![>>=](tokens_helper(f, &_binding_0.spans))) + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_binding<F>(f: &mut F, node: Binding) -> Binding +where + F: Fold + ?Sized, +{ + Binding { + ident: f.fold_ident(node.ident), + eq_token: Token![=](tokens_helper(f, &node.eq_token.spans)), + ty: f.fold_type(node.ty), + } +} +#[cfg(feature = "full")] +pub fn fold_block<F>(f: &mut F, node: Block) -> Block +where + F: Fold + ?Sized, +{ + Block { + brace_token: Brace(tokens_helper(f, &node.brace_token.span)), + stmts: FoldHelper::lift(node.stmts, |it| f.fold_stmt(it)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_bound_lifetimes<F>(f: &mut F, node: BoundLifetimes) -> BoundLifetimes +where + F: Fold + ?Sized, +{ + BoundLifetimes { + for_token: Token![for](tokens_helper(f, &node.for_token.span)), + lt_token: Token![<](tokens_helper(f, &node.lt_token.spans)), + lifetimes: FoldHelper::lift(node.lifetimes, |it| f.fold_lifetime_def(it)), + gt_token: Token![>](tokens_helper(f, &node.gt_token.spans)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_const_param<F>(f: &mut F, node: ConstParam) -> ConstParam +where + F: Fold + ?Sized, +{ + ConstParam { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + const_token: Token![const](tokens_helper(f, &node.const_token.span)), + ident: f.fold_ident(node.ident), + colon_token: Token![:](tokens_helper(f, &node.colon_token.spans)), + ty: f.fold_type(node.ty), + eq_token: (node.eq_token).map(|it| Token![=](tokens_helper(f, &it.spans))), + default: (node.default).map(|it| f.fold_expr(it)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_constraint<F>(f: &mut F, node: Constraint) -> Constraint +where + F: Fold + ?Sized, +{ + Constraint { + ident: f.fold_ident(node.ident), + colon_token: Token![:](tokens_helper(f, &node.colon_token.spans)), + bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)), + } +} +#[cfg(feature = "derive")] +pub fn fold_data<F>(f: &mut F, node: Data) -> Data +where + F: Fold + ?Sized, +{ + match node { + Data::Struct(_binding_0) => Data::Struct(f.fold_data_struct(_binding_0)), + Data::Enum(_binding_0) => Data::Enum(f.fold_data_enum(_binding_0)), + Data::Union(_binding_0) => Data::Union(f.fold_data_union(_binding_0)), + } +} +#[cfg(feature = "derive")] +pub fn fold_data_enum<F>(f: &mut F, node: DataEnum) -> DataEnum +where + F: Fold + ?Sized, +{ + DataEnum { + enum_token: Token![enum](tokens_helper(f, &node.enum_token.span)), + brace_token: Brace(tokens_helper(f, &node.brace_token.span)), + variants: FoldHelper::lift(node.variants, |it| f.fold_variant(it)), + } +} +#[cfg(feature = "derive")] +pub fn fold_data_struct<F>(f: &mut F, node: DataStruct) -> DataStruct +where + F: Fold + ?Sized, +{ + DataStruct { + struct_token: Token![struct](tokens_helper(f, &node.struct_token.span)), + fields: f.fold_fields(node.fields), + semi_token: (node.semi_token).map(|it| Token![;](tokens_helper(f, &it.spans))), + } +} +#[cfg(feature = "derive")] +pub fn fold_data_union<F>(f: &mut F, node: DataUnion) -> DataUnion +where + F: Fold + ?Sized, +{ + DataUnion { + union_token: Token![union](tokens_helper(f, &node.union_token.span)), + fields: f.fold_fields_named(node.fields), + } +} +#[cfg(feature = "derive")] +pub fn fold_derive_input<F>(f: &mut F, node: DeriveInput) -> DeriveInput +where + F: Fold + ?Sized, +{ + DeriveInput { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + vis: f.fold_visibility(node.vis), + ident: f.fold_ident(node.ident), + generics: f.fold_generics(node.generics), + data: f.fold_data(node.data), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_expr<F>(f: &mut F, node: Expr) -> Expr +where + F: Fold + ?Sized, +{ + match node { + Expr::Array(_binding_0) => Expr::Array(full!(f.fold_expr_array(_binding_0))), + Expr::Assign(_binding_0) => Expr::Assign(full!(f.fold_expr_assign(_binding_0))), + Expr::AssignOp(_binding_0) => { + Expr::AssignOp(full!(f.fold_expr_assign_op(_binding_0))) + } + Expr::Async(_binding_0) => Expr::Async(full!(f.fold_expr_async(_binding_0))), + Expr::Await(_binding_0) => Expr::Await(full!(f.fold_expr_await(_binding_0))), + Expr::Binary(_binding_0) => Expr::Binary(f.fold_expr_binary(_binding_0)), + Expr::Block(_binding_0) => Expr::Block(full!(f.fold_expr_block(_binding_0))), + Expr::Box(_binding_0) => Expr::Box(full!(f.fold_expr_box(_binding_0))), + Expr::Break(_binding_0) => Expr::Break(full!(f.fold_expr_break(_binding_0))), + Expr::Call(_binding_0) => Expr::Call(f.fold_expr_call(_binding_0)), + Expr::Cast(_binding_0) => Expr::Cast(f.fold_expr_cast(_binding_0)), + Expr::Closure(_binding_0) => { + Expr::Closure(full!(f.fold_expr_closure(_binding_0))) + } + Expr::Continue(_binding_0) => { + Expr::Continue(full!(f.fold_expr_continue(_binding_0))) + } + Expr::Field(_binding_0) => Expr::Field(f.fold_expr_field(_binding_0)), + Expr::ForLoop(_binding_0) => { + Expr::ForLoop(full!(f.fold_expr_for_loop(_binding_0))) + } + Expr::Group(_binding_0) => Expr::Group(full!(f.fold_expr_group(_binding_0))), + Expr::If(_binding_0) => Expr::If(full!(f.fold_expr_if(_binding_0))), + Expr::Index(_binding_0) => Expr::Index(f.fold_expr_index(_binding_0)), + Expr::Let(_binding_0) => Expr::Let(full!(f.fold_expr_let(_binding_0))), + Expr::Lit(_binding_0) => Expr::Lit(f.fold_expr_lit(_binding_0)), + Expr::Loop(_binding_0) => Expr::Loop(full!(f.fold_expr_loop(_binding_0))), + Expr::Macro(_binding_0) => Expr::Macro(full!(f.fold_expr_macro(_binding_0))), + Expr::Match(_binding_0) => Expr::Match(full!(f.fold_expr_match(_binding_0))), + Expr::MethodCall(_binding_0) => { + Expr::MethodCall(full!(f.fold_expr_method_call(_binding_0))) + } + Expr::Paren(_binding_0) => Expr::Paren(f.fold_expr_paren(_binding_0)), + Expr::Path(_binding_0) => Expr::Path(f.fold_expr_path(_binding_0)), + Expr::Range(_binding_0) => Expr::Range(full!(f.fold_expr_range(_binding_0))), + Expr::Reference(_binding_0) => { + Expr::Reference(full!(f.fold_expr_reference(_binding_0))) + } + Expr::Repeat(_binding_0) => Expr::Repeat(full!(f.fold_expr_repeat(_binding_0))), + Expr::Return(_binding_0) => Expr::Return(full!(f.fold_expr_return(_binding_0))), + Expr::Struct(_binding_0) => Expr::Struct(full!(f.fold_expr_struct(_binding_0))), + Expr::Try(_binding_0) => Expr::Try(full!(f.fold_expr_try(_binding_0))), + Expr::TryBlock(_binding_0) => { + Expr::TryBlock(full!(f.fold_expr_try_block(_binding_0))) + } + Expr::Tuple(_binding_0) => Expr::Tuple(full!(f.fold_expr_tuple(_binding_0))), + Expr::Type(_binding_0) => Expr::Type(full!(f.fold_expr_type(_binding_0))), + Expr::Unary(_binding_0) => Expr::Unary(f.fold_expr_unary(_binding_0)), + Expr::Unsafe(_binding_0) => Expr::Unsafe(full!(f.fold_expr_unsafe(_binding_0))), + Expr::Verbatim(_binding_0) => Expr::Verbatim(_binding_0), + Expr::While(_binding_0) => Expr::While(full!(f.fold_expr_while(_binding_0))), + Expr::Yield(_binding_0) => Expr::Yield(full!(f.fold_expr_yield(_binding_0))), + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } +} +#[cfg(feature = "full")] +pub fn fold_expr_array<F>(f: &mut F, node: ExprArray) -> ExprArray +where + F: Fold + ?Sized, +{ + ExprArray { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + bracket_token: Bracket(tokens_helper(f, &node.bracket_token.span)), + elems: FoldHelper::lift(node.elems, |it| f.fold_expr(it)), + } +} +#[cfg(feature = "full")] +pub fn fold_expr_assign<F>(f: &mut F, node: ExprAssign) -> ExprAssign +where + F: Fold + ?Sized, +{ + ExprAssign { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + left: Box::new(f.fold_expr(*node.left)), + eq_token: Token![=](tokens_helper(f, &node.eq_token.spans)), + right: Box::new(f.fold_expr(*node.right)), + } +} +#[cfg(feature = "full")] +pub fn fold_expr_assign_op<F>(f: &mut F, node: ExprAssignOp) -> ExprAssignOp +where + F: Fold + ?Sized, +{ + ExprAssignOp { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + left: Box::new(f.fold_expr(*node.left)), + op: f.fold_bin_op(node.op), + right: Box::new(f.fold_expr(*node.right)), + } +} +#[cfg(feature = "full")] +pub fn fold_expr_async<F>(f: &mut F, node: ExprAsync) -> ExprAsync +where + F: Fold + ?Sized, +{ + ExprAsync { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + async_token: Token![async](tokens_helper(f, &node.async_token.span)), + capture: (node.capture).map(|it| Token![move](tokens_helper(f, &it.span))), + block: f.fold_block(node.block), + } +} +#[cfg(feature = "full")] +pub fn fold_expr_await<F>(f: &mut F, node: ExprAwait) -> ExprAwait +where + F: Fold + ?Sized, +{ + ExprAwait { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + base: Box::new(f.fold_expr(*node.base)), + dot_token: Token![.](tokens_helper(f, &node.dot_token.spans)), + await_token: crate::token::Await(tokens_helper(f, &node.await_token.span)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_expr_binary<F>(f: &mut F, node: ExprBinary) -> ExprBinary +where + F: Fold + ?Sized, +{ + ExprBinary { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + left: Box::new(f.fold_expr(*node.left)), + op: f.fold_bin_op(node.op), + right: Box::new(f.fold_expr(*node.right)), + } +} +#[cfg(feature = "full")] +pub fn fold_expr_block<F>(f: &mut F, node: ExprBlock) -> ExprBlock +where + F: Fold + ?Sized, +{ + ExprBlock { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + label: (node.label).map(|it| f.fold_label(it)), + block: f.fold_block(node.block), + } +} +#[cfg(feature = "full")] +pub fn fold_expr_box<F>(f: &mut F, node: ExprBox) -> ExprBox +where + F: Fold + ?Sized, +{ + ExprBox { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + box_token: Token![box](tokens_helper(f, &node.box_token.span)), + expr: Box::new(f.fold_expr(*node.expr)), + } +} +#[cfg(feature = "full")] +pub fn fold_expr_break<F>(f: &mut F, node: ExprBreak) -> ExprBreak +where + F: Fold + ?Sized, +{ + ExprBreak { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + break_token: Token![break](tokens_helper(f, &node.break_token.span)), + label: (node.label).map(|it| f.fold_lifetime(it)), + expr: (node.expr).map(|it| Box::new(f.fold_expr(*it))), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_expr_call<F>(f: &mut F, node: ExprCall) -> ExprCall +where + F: Fold + ?Sized, +{ + ExprCall { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + func: Box::new(f.fold_expr(*node.func)), + paren_token: Paren(tokens_helper(f, &node.paren_token.span)), + args: FoldHelper::lift(node.args, |it| f.fold_expr(it)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_expr_cast<F>(f: &mut F, node: ExprCast) -> ExprCast +where + F: Fold + ?Sized, +{ + ExprCast { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + expr: Box::new(f.fold_expr(*node.expr)), + as_token: Token![as](tokens_helper(f, &node.as_token.span)), + ty: Box::new(f.fold_type(*node.ty)), + } +} +#[cfg(feature = "full")] +pub fn fold_expr_closure<F>(f: &mut F, node: ExprClosure) -> ExprClosure +where + F: Fold + ?Sized, +{ + ExprClosure { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + movability: (node.movability) + .map(|it| Token![static](tokens_helper(f, &it.span))), + asyncness: (node.asyncness).map(|it| Token![async](tokens_helper(f, &it.span))), + capture: (node.capture).map(|it| Token![move](tokens_helper(f, &it.span))), + or1_token: Token![|](tokens_helper(f, &node.or1_token.spans)), + inputs: FoldHelper::lift(node.inputs, |it| f.fold_pat(it)), + or2_token: Token![|](tokens_helper(f, &node.or2_token.spans)), + output: f.fold_return_type(node.output), + body: Box::new(f.fold_expr(*node.body)), + } +} +#[cfg(feature = "full")] +pub fn fold_expr_continue<F>(f: &mut F, node: ExprContinue) -> ExprContinue +where + F: Fold + ?Sized, +{ + ExprContinue { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + continue_token: Token![continue](tokens_helper(f, &node.continue_token.span)), + label: (node.label).map(|it| f.fold_lifetime(it)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_expr_field<F>(f: &mut F, node: ExprField) -> ExprField +where + F: Fold + ?Sized, +{ + ExprField { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + base: Box::new(f.fold_expr(*node.base)), + dot_token: Token![.](tokens_helper(f, &node.dot_token.spans)), + member: f.fold_member(node.member), + } +} +#[cfg(feature = "full")] +pub fn fold_expr_for_loop<F>(f: &mut F, node: ExprForLoop) -> ExprForLoop +where + F: Fold + ?Sized, +{ + ExprForLoop { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + label: (node.label).map(|it| f.fold_label(it)), + for_token: Token![for](tokens_helper(f, &node.for_token.span)), + pat: f.fold_pat(node.pat), + in_token: Token![in](tokens_helper(f, &node.in_token.span)), + expr: Box::new(f.fold_expr(*node.expr)), + body: f.fold_block(node.body), + } +} +#[cfg(feature = "full")] +pub fn fold_expr_group<F>(f: &mut F, node: ExprGroup) -> ExprGroup +where + F: Fold + ?Sized, +{ + ExprGroup { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + group_token: Group(tokens_helper(f, &node.group_token.span)), + expr: Box::new(f.fold_expr(*node.expr)), + } +} +#[cfg(feature = "full")] +pub fn fold_expr_if<F>(f: &mut F, node: ExprIf) -> ExprIf +where + F: Fold + ?Sized, +{ + ExprIf { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + if_token: Token![if](tokens_helper(f, &node.if_token.span)), + cond: Box::new(f.fold_expr(*node.cond)), + then_branch: f.fold_block(node.then_branch), + else_branch: (node.else_branch) + .map(|it| ( + Token![else](tokens_helper(f, &(it).0.span)), + Box::new(f.fold_expr(*(it).1)), + )), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_expr_index<F>(f: &mut F, node: ExprIndex) -> ExprIndex +where + F: Fold + ?Sized, +{ + ExprIndex { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + expr: Box::new(f.fold_expr(*node.expr)), + bracket_token: Bracket(tokens_helper(f, &node.bracket_token.span)), + index: Box::new(f.fold_expr(*node.index)), + } +} +#[cfg(feature = "full")] +pub fn fold_expr_let<F>(f: &mut F, node: ExprLet) -> ExprLet +where + F: Fold + ?Sized, +{ + ExprLet { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + let_token: Token![let](tokens_helper(f, &node.let_token.span)), + pat: f.fold_pat(node.pat), + eq_token: Token![=](tokens_helper(f, &node.eq_token.spans)), + expr: Box::new(f.fold_expr(*node.expr)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_expr_lit<F>(f: &mut F, node: ExprLit) -> ExprLit +where + F: Fold + ?Sized, +{ + ExprLit { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + lit: f.fold_lit(node.lit), + } +} +#[cfg(feature = "full")] +pub fn fold_expr_loop<F>(f: &mut F, node: ExprLoop) -> ExprLoop +where + F: Fold + ?Sized, +{ + ExprLoop { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + label: (node.label).map(|it| f.fold_label(it)), + loop_token: Token![loop](tokens_helper(f, &node.loop_token.span)), + body: f.fold_block(node.body), + } +} +#[cfg(feature = "full")] +pub fn fold_expr_macro<F>(f: &mut F, node: ExprMacro) -> ExprMacro +where + F: Fold + ?Sized, +{ + ExprMacro { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + mac: f.fold_macro(node.mac), + } +} +#[cfg(feature = "full")] +pub fn fold_expr_match<F>(f: &mut F, node: ExprMatch) -> ExprMatch +where + F: Fold + ?Sized, +{ + ExprMatch { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + match_token: Token![match](tokens_helper(f, &node.match_token.span)), + expr: Box::new(f.fold_expr(*node.expr)), + brace_token: Brace(tokens_helper(f, &node.brace_token.span)), + arms: FoldHelper::lift(node.arms, |it| f.fold_arm(it)), + } +} +#[cfg(feature = "full")] +pub fn fold_expr_method_call<F>(f: &mut F, node: ExprMethodCall) -> ExprMethodCall +where + F: Fold + ?Sized, +{ + ExprMethodCall { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + receiver: Box::new(f.fold_expr(*node.receiver)), + dot_token: Token![.](tokens_helper(f, &node.dot_token.spans)), + method: f.fold_ident(node.method), + turbofish: (node.turbofish).map(|it| f.fold_method_turbofish(it)), + paren_token: Paren(tokens_helper(f, &node.paren_token.span)), + args: FoldHelper::lift(node.args, |it| f.fold_expr(it)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_expr_paren<F>(f: &mut F, node: ExprParen) -> ExprParen +where + F: Fold + ?Sized, +{ + ExprParen { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + paren_token: Paren(tokens_helper(f, &node.paren_token.span)), + expr: Box::new(f.fold_expr(*node.expr)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_expr_path<F>(f: &mut F, node: ExprPath) -> ExprPath +where + F: Fold + ?Sized, +{ + ExprPath { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + qself: (node.qself).map(|it| f.fold_qself(it)), + path: f.fold_path(node.path), + } +} +#[cfg(feature = "full")] +pub fn fold_expr_range<F>(f: &mut F, node: ExprRange) -> ExprRange +where + F: Fold + ?Sized, +{ + ExprRange { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + from: (node.from).map(|it| Box::new(f.fold_expr(*it))), + limits: f.fold_range_limits(node.limits), + to: (node.to).map(|it| Box::new(f.fold_expr(*it))), + } +} +#[cfg(feature = "full")] +pub fn fold_expr_reference<F>(f: &mut F, node: ExprReference) -> ExprReference +where + F: Fold + ?Sized, +{ + ExprReference { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + and_token: Token![&](tokens_helper(f, &node.and_token.spans)), + raw: node.raw, + mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))), + expr: Box::new(f.fold_expr(*node.expr)), + } +} +#[cfg(feature = "full")] +pub fn fold_expr_repeat<F>(f: &mut F, node: ExprRepeat) -> ExprRepeat +where + F: Fold + ?Sized, +{ + ExprRepeat { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + bracket_token: Bracket(tokens_helper(f, &node.bracket_token.span)), + expr: Box::new(f.fold_expr(*node.expr)), + semi_token: Token![;](tokens_helper(f, &node.semi_token.spans)), + len: Box::new(f.fold_expr(*node.len)), + } +} +#[cfg(feature = "full")] +pub fn fold_expr_return<F>(f: &mut F, node: ExprReturn) -> ExprReturn +where + F: Fold + ?Sized, +{ + ExprReturn { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + return_token: Token![return](tokens_helper(f, &node.return_token.span)), + expr: (node.expr).map(|it| Box::new(f.fold_expr(*it))), + } +} +#[cfg(feature = "full")] +pub fn fold_expr_struct<F>(f: &mut F, node: ExprStruct) -> ExprStruct +where + F: Fold + ?Sized, +{ + ExprStruct { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + path: f.fold_path(node.path), + brace_token: Brace(tokens_helper(f, &node.brace_token.span)), + fields: FoldHelper::lift(node.fields, |it| f.fold_field_value(it)), + dot2_token: (node.dot2_token).map(|it| Token![..](tokens_helper(f, &it.spans))), + rest: (node.rest).map(|it| Box::new(f.fold_expr(*it))), + } +} +#[cfg(feature = "full")] +pub fn fold_expr_try<F>(f: &mut F, node: ExprTry) -> ExprTry +where + F: Fold + ?Sized, +{ + ExprTry { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + expr: Box::new(f.fold_expr(*node.expr)), + question_token: Token![?](tokens_helper(f, &node.question_token.spans)), + } +} +#[cfg(feature = "full")] +pub fn fold_expr_try_block<F>(f: &mut F, node: ExprTryBlock) -> ExprTryBlock +where + F: Fold + ?Sized, +{ + ExprTryBlock { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + try_token: Token![try](tokens_helper(f, &node.try_token.span)), + block: f.fold_block(node.block), + } +} +#[cfg(feature = "full")] +pub fn fold_expr_tuple<F>(f: &mut F, node: ExprTuple) -> ExprTuple +where + F: Fold + ?Sized, +{ + ExprTuple { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + paren_token: Paren(tokens_helper(f, &node.paren_token.span)), + elems: FoldHelper::lift(node.elems, |it| f.fold_expr(it)), + } +} +#[cfg(feature = "full")] +pub fn fold_expr_type<F>(f: &mut F, node: ExprType) -> ExprType +where + F: Fold + ?Sized, +{ + ExprType { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + expr: Box::new(f.fold_expr(*node.expr)), + colon_token: Token![:](tokens_helper(f, &node.colon_token.spans)), + ty: Box::new(f.fold_type(*node.ty)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_expr_unary<F>(f: &mut F, node: ExprUnary) -> ExprUnary +where + F: Fold + ?Sized, +{ + ExprUnary { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + op: f.fold_un_op(node.op), + expr: Box::new(f.fold_expr(*node.expr)), + } +} +#[cfg(feature = "full")] +pub fn fold_expr_unsafe<F>(f: &mut F, node: ExprUnsafe) -> ExprUnsafe +where + F: Fold + ?Sized, +{ + ExprUnsafe { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + unsafe_token: Token![unsafe](tokens_helper(f, &node.unsafe_token.span)), + block: f.fold_block(node.block), + } +} +#[cfg(feature = "full")] +pub fn fold_expr_while<F>(f: &mut F, node: ExprWhile) -> ExprWhile +where + F: Fold + ?Sized, +{ + ExprWhile { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + label: (node.label).map(|it| f.fold_label(it)), + while_token: Token![while](tokens_helper(f, &node.while_token.span)), + cond: Box::new(f.fold_expr(*node.cond)), + body: f.fold_block(node.body), + } +} +#[cfg(feature = "full")] +pub fn fold_expr_yield<F>(f: &mut F, node: ExprYield) -> ExprYield +where + F: Fold + ?Sized, +{ + ExprYield { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + yield_token: Token![yield](tokens_helper(f, &node.yield_token.span)), + expr: (node.expr).map(|it| Box::new(f.fold_expr(*it))), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_field<F>(f: &mut F, node: Field) -> Field +where + F: Fold + ?Sized, +{ + Field { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + vis: f.fold_visibility(node.vis), + ident: (node.ident).map(|it| f.fold_ident(it)), + colon_token: (node.colon_token).map(|it| Token![:](tokens_helper(f, &it.spans))), + ty: f.fold_type(node.ty), + } +} +#[cfg(feature = "full")] +pub fn fold_field_pat<F>(f: &mut F, node: FieldPat) -> FieldPat +where + F: Fold + ?Sized, +{ + FieldPat { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + member: f.fold_member(node.member), + colon_token: (node.colon_token).map(|it| Token![:](tokens_helper(f, &it.spans))), + pat: Box::new(f.fold_pat(*node.pat)), + } +} +#[cfg(feature = "full")] +pub fn fold_field_value<F>(f: &mut F, node: FieldValue) -> FieldValue +where + F: Fold + ?Sized, +{ + FieldValue { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + member: f.fold_member(node.member), + colon_token: (node.colon_token).map(|it| Token![:](tokens_helper(f, &it.spans))), + expr: f.fold_expr(node.expr), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_fields<F>(f: &mut F, node: Fields) -> Fields +where + F: Fold + ?Sized, +{ + match node { + Fields::Named(_binding_0) => Fields::Named(f.fold_fields_named(_binding_0)), + Fields::Unnamed(_binding_0) => Fields::Unnamed(f.fold_fields_unnamed(_binding_0)), + Fields::Unit => Fields::Unit, + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_fields_named<F>(f: &mut F, node: FieldsNamed) -> FieldsNamed +where + F: Fold + ?Sized, +{ + FieldsNamed { + brace_token: Brace(tokens_helper(f, &node.brace_token.span)), + named: FoldHelper::lift(node.named, |it| f.fold_field(it)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_fields_unnamed<F>(f: &mut F, node: FieldsUnnamed) -> FieldsUnnamed +where + F: Fold + ?Sized, +{ + FieldsUnnamed { + paren_token: Paren(tokens_helper(f, &node.paren_token.span)), + unnamed: FoldHelper::lift(node.unnamed, |it| f.fold_field(it)), + } +} +#[cfg(feature = "full")] +pub fn fold_file<F>(f: &mut F, node: File) -> File +where + F: Fold + ?Sized, +{ + File { + shebang: node.shebang, + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + items: FoldHelper::lift(node.items, |it| f.fold_item(it)), + } +} +#[cfg(feature = "full")] +pub fn fold_fn_arg<F>(f: &mut F, node: FnArg) -> FnArg +where + F: Fold + ?Sized, +{ + match node { + FnArg::Receiver(_binding_0) => FnArg::Receiver(f.fold_receiver(_binding_0)), + FnArg::Typed(_binding_0) => FnArg::Typed(f.fold_pat_type(_binding_0)), + } +} +#[cfg(feature = "full")] +pub fn fold_foreign_item<F>(f: &mut F, node: ForeignItem) -> ForeignItem +where + F: Fold + ?Sized, +{ + match node { + ForeignItem::Fn(_binding_0) => { + ForeignItem::Fn(f.fold_foreign_item_fn(_binding_0)) + } + ForeignItem::Static(_binding_0) => { + ForeignItem::Static(f.fold_foreign_item_static(_binding_0)) + } + ForeignItem::Type(_binding_0) => { + ForeignItem::Type(f.fold_foreign_item_type(_binding_0)) + } + ForeignItem::Macro(_binding_0) => { + ForeignItem::Macro(f.fold_foreign_item_macro(_binding_0)) + } + ForeignItem::Verbatim(_binding_0) => ForeignItem::Verbatim(_binding_0), + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } +} +#[cfg(feature = "full")] +pub fn fold_foreign_item_fn<F>(f: &mut F, node: ForeignItemFn) -> ForeignItemFn +where + F: Fold + ?Sized, +{ + ForeignItemFn { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + vis: f.fold_visibility(node.vis), + sig: f.fold_signature(node.sig), + semi_token: Token![;](tokens_helper(f, &node.semi_token.spans)), + } +} +#[cfg(feature = "full")] +pub fn fold_foreign_item_macro<F>(f: &mut F, node: ForeignItemMacro) -> ForeignItemMacro +where + F: Fold + ?Sized, +{ + ForeignItemMacro { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + mac: f.fold_macro(node.mac), + semi_token: (node.semi_token).map(|it| Token![;](tokens_helper(f, &it.spans))), + } +} +#[cfg(feature = "full")] +pub fn fold_foreign_item_static<F>( + f: &mut F, + node: ForeignItemStatic, +) -> ForeignItemStatic +where + F: Fold + ?Sized, +{ + ForeignItemStatic { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + vis: f.fold_visibility(node.vis), + static_token: Token![static](tokens_helper(f, &node.static_token.span)), + mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))), + ident: f.fold_ident(node.ident), + colon_token: Token![:](tokens_helper(f, &node.colon_token.spans)), + ty: Box::new(f.fold_type(*node.ty)), + semi_token: Token![;](tokens_helper(f, &node.semi_token.spans)), + } +} +#[cfg(feature = "full")] +pub fn fold_foreign_item_type<F>(f: &mut F, node: ForeignItemType) -> ForeignItemType +where + F: Fold + ?Sized, +{ + ForeignItemType { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + vis: f.fold_visibility(node.vis), + type_token: Token![type](tokens_helper(f, &node.type_token.span)), + ident: f.fold_ident(node.ident), + semi_token: Token![;](tokens_helper(f, &node.semi_token.spans)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_generic_argument<F>(f: &mut F, node: GenericArgument) -> GenericArgument +where + F: Fold + ?Sized, +{ + match node { + GenericArgument::Lifetime(_binding_0) => { + GenericArgument::Lifetime(f.fold_lifetime(_binding_0)) + } + GenericArgument::Type(_binding_0) => { + GenericArgument::Type(f.fold_type(_binding_0)) + } + GenericArgument::Const(_binding_0) => { + GenericArgument::Const(f.fold_expr(_binding_0)) + } + GenericArgument::Binding(_binding_0) => { + GenericArgument::Binding(f.fold_binding(_binding_0)) + } + GenericArgument::Constraint(_binding_0) => { + GenericArgument::Constraint(f.fold_constraint(_binding_0)) + } + } +} +#[cfg(feature = "full")] +pub fn fold_generic_method_argument<F>( + f: &mut F, + node: GenericMethodArgument, +) -> GenericMethodArgument +where + F: Fold + ?Sized, +{ + match node { + GenericMethodArgument::Type(_binding_0) => { + GenericMethodArgument::Type(f.fold_type(_binding_0)) + } + GenericMethodArgument::Const(_binding_0) => { + GenericMethodArgument::Const(f.fold_expr(_binding_0)) + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_generic_param<F>(f: &mut F, node: GenericParam) -> GenericParam +where + F: Fold + ?Sized, +{ + match node { + GenericParam::Type(_binding_0) => { + GenericParam::Type(f.fold_type_param(_binding_0)) + } + GenericParam::Lifetime(_binding_0) => { + GenericParam::Lifetime(f.fold_lifetime_def(_binding_0)) + } + GenericParam::Const(_binding_0) => { + GenericParam::Const(f.fold_const_param(_binding_0)) + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_generics<F>(f: &mut F, node: Generics) -> Generics +where + F: Fold + ?Sized, +{ + Generics { + lt_token: (node.lt_token).map(|it| Token![<](tokens_helper(f, &it.spans))), + params: FoldHelper::lift(node.params, |it| f.fold_generic_param(it)), + gt_token: (node.gt_token).map(|it| Token![>](tokens_helper(f, &it.spans))), + where_clause: (node.where_clause).map(|it| f.fold_where_clause(it)), + } +} +pub fn fold_ident<F>(f: &mut F, node: Ident) -> Ident +where + F: Fold + ?Sized, +{ + let mut node = node; + let span = f.fold_span(node.span()); + node.set_span(span); + node +} +#[cfg(feature = "full")] +pub fn fold_impl_item<F>(f: &mut F, node: ImplItem) -> ImplItem +where + F: Fold + ?Sized, +{ + match node { + ImplItem::Const(_binding_0) => { + ImplItem::Const(f.fold_impl_item_const(_binding_0)) + } + ImplItem::Method(_binding_0) => { + ImplItem::Method(f.fold_impl_item_method(_binding_0)) + } + ImplItem::Type(_binding_0) => ImplItem::Type(f.fold_impl_item_type(_binding_0)), + ImplItem::Macro(_binding_0) => { + ImplItem::Macro(f.fold_impl_item_macro(_binding_0)) + } + ImplItem::Verbatim(_binding_0) => ImplItem::Verbatim(_binding_0), + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } +} +#[cfg(feature = "full")] +pub fn fold_impl_item_const<F>(f: &mut F, node: ImplItemConst) -> ImplItemConst +where + F: Fold + ?Sized, +{ + ImplItemConst { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + vis: f.fold_visibility(node.vis), + defaultness: (node.defaultness) + .map(|it| Token![default](tokens_helper(f, &it.span))), + const_token: Token![const](tokens_helper(f, &node.const_token.span)), + ident: f.fold_ident(node.ident), + colon_token: Token![:](tokens_helper(f, &node.colon_token.spans)), + ty: f.fold_type(node.ty), + eq_token: Token![=](tokens_helper(f, &node.eq_token.spans)), + expr: f.fold_expr(node.expr), + semi_token: Token![;](tokens_helper(f, &node.semi_token.spans)), + } +} +#[cfg(feature = "full")] +pub fn fold_impl_item_macro<F>(f: &mut F, node: ImplItemMacro) -> ImplItemMacro +where + F: Fold + ?Sized, +{ + ImplItemMacro { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + mac: f.fold_macro(node.mac), + semi_token: (node.semi_token).map(|it| Token![;](tokens_helper(f, &it.spans))), + } +} +#[cfg(feature = "full")] +pub fn fold_impl_item_method<F>(f: &mut F, node: ImplItemMethod) -> ImplItemMethod +where + F: Fold + ?Sized, +{ + ImplItemMethod { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + vis: f.fold_visibility(node.vis), + defaultness: (node.defaultness) + .map(|it| Token![default](tokens_helper(f, &it.span))), + sig: f.fold_signature(node.sig), + block: f.fold_block(node.block), + } +} +#[cfg(feature = "full")] +pub fn fold_impl_item_type<F>(f: &mut F, node: ImplItemType) -> ImplItemType +where + F: Fold + ?Sized, +{ + ImplItemType { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + vis: f.fold_visibility(node.vis), + defaultness: (node.defaultness) + .map(|it| Token![default](tokens_helper(f, &it.span))), + type_token: Token![type](tokens_helper(f, &node.type_token.span)), + ident: f.fold_ident(node.ident), + generics: f.fold_generics(node.generics), + eq_token: Token![=](tokens_helper(f, &node.eq_token.spans)), + ty: f.fold_type(node.ty), + semi_token: Token![;](tokens_helper(f, &node.semi_token.spans)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_index<F>(f: &mut F, node: Index) -> Index +where + F: Fold + ?Sized, +{ + Index { + index: node.index, + span: f.fold_span(node.span), + } +} +#[cfg(feature = "full")] +pub fn fold_item<F>(f: &mut F, node: Item) -> Item +where + F: Fold + ?Sized, +{ + match node { + Item::Const(_binding_0) => Item::Const(f.fold_item_const(_binding_0)), + Item::Enum(_binding_0) => Item::Enum(f.fold_item_enum(_binding_0)), + Item::ExternCrate(_binding_0) => { + Item::ExternCrate(f.fold_item_extern_crate(_binding_0)) + } + Item::Fn(_binding_0) => Item::Fn(f.fold_item_fn(_binding_0)), + Item::ForeignMod(_binding_0) => { + Item::ForeignMod(f.fold_item_foreign_mod(_binding_0)) + } + Item::Impl(_binding_0) => Item::Impl(f.fold_item_impl(_binding_0)), + Item::Macro(_binding_0) => Item::Macro(f.fold_item_macro(_binding_0)), + Item::Macro2(_binding_0) => Item::Macro2(f.fold_item_macro2(_binding_0)), + Item::Mod(_binding_0) => Item::Mod(f.fold_item_mod(_binding_0)), + Item::Static(_binding_0) => Item::Static(f.fold_item_static(_binding_0)), + Item::Struct(_binding_0) => Item::Struct(f.fold_item_struct(_binding_0)), + Item::Trait(_binding_0) => Item::Trait(f.fold_item_trait(_binding_0)), + Item::TraitAlias(_binding_0) => { + Item::TraitAlias(f.fold_item_trait_alias(_binding_0)) + } + Item::Type(_binding_0) => Item::Type(f.fold_item_type(_binding_0)), + Item::Union(_binding_0) => Item::Union(f.fold_item_union(_binding_0)), + Item::Use(_binding_0) => Item::Use(f.fold_item_use(_binding_0)), + Item::Verbatim(_binding_0) => Item::Verbatim(_binding_0), + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } +} +#[cfg(feature = "full")] +pub fn fold_item_const<F>(f: &mut F, node: ItemConst) -> ItemConst +where + F: Fold + ?Sized, +{ + ItemConst { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + vis: f.fold_visibility(node.vis), + const_token: Token![const](tokens_helper(f, &node.const_token.span)), + ident: f.fold_ident(node.ident), + colon_token: Token![:](tokens_helper(f, &node.colon_token.spans)), + ty: Box::new(f.fold_type(*node.ty)), + eq_token: Token![=](tokens_helper(f, &node.eq_token.spans)), + expr: Box::new(f.fold_expr(*node.expr)), + semi_token: Token![;](tokens_helper(f, &node.semi_token.spans)), + } +} +#[cfg(feature = "full")] +pub fn fold_item_enum<F>(f: &mut F, node: ItemEnum) -> ItemEnum +where + F: Fold + ?Sized, +{ + ItemEnum { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + vis: f.fold_visibility(node.vis), + enum_token: Token![enum](tokens_helper(f, &node.enum_token.span)), + ident: f.fold_ident(node.ident), + generics: f.fold_generics(node.generics), + brace_token: Brace(tokens_helper(f, &node.brace_token.span)), + variants: FoldHelper::lift(node.variants, |it| f.fold_variant(it)), + } +} +#[cfg(feature = "full")] +pub fn fold_item_extern_crate<F>(f: &mut F, node: ItemExternCrate) -> ItemExternCrate +where + F: Fold + ?Sized, +{ + ItemExternCrate { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + vis: f.fold_visibility(node.vis), + extern_token: Token![extern](tokens_helper(f, &node.extern_token.span)), + crate_token: Token![crate](tokens_helper(f, &node.crate_token.span)), + ident: f.fold_ident(node.ident), + rename: (node.rename) + .map(|it| ( + Token![as](tokens_helper(f, &(it).0.span)), + f.fold_ident((it).1), + )), + semi_token: Token![;](tokens_helper(f, &node.semi_token.spans)), + } +} +#[cfg(feature = "full")] +pub fn fold_item_fn<F>(f: &mut F, node: ItemFn) -> ItemFn +where + F: Fold + ?Sized, +{ + ItemFn { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + vis: f.fold_visibility(node.vis), + sig: f.fold_signature(node.sig), + block: Box::new(f.fold_block(*node.block)), + } +} +#[cfg(feature = "full")] +pub fn fold_item_foreign_mod<F>(f: &mut F, node: ItemForeignMod) -> ItemForeignMod +where + F: Fold + ?Sized, +{ + ItemForeignMod { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + abi: f.fold_abi(node.abi), + brace_token: Brace(tokens_helper(f, &node.brace_token.span)), + items: FoldHelper::lift(node.items, |it| f.fold_foreign_item(it)), + } +} +#[cfg(feature = "full")] +pub fn fold_item_impl<F>(f: &mut F, node: ItemImpl) -> ItemImpl +where + F: Fold + ?Sized, +{ + ItemImpl { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + defaultness: (node.defaultness) + .map(|it| Token![default](tokens_helper(f, &it.span))), + unsafety: (node.unsafety).map(|it| Token![unsafe](tokens_helper(f, &it.span))), + impl_token: Token![impl](tokens_helper(f, &node.impl_token.span)), + generics: f.fold_generics(node.generics), + trait_: (node.trait_) + .map(|it| ( + ((it).0).map(|it| Token![!](tokens_helper(f, &it.spans))), + f.fold_path((it).1), + Token![for](tokens_helper(f, &(it).2.span)), + )), + self_ty: Box::new(f.fold_type(*node.self_ty)), + brace_token: Brace(tokens_helper(f, &node.brace_token.span)), + items: FoldHelper::lift(node.items, |it| f.fold_impl_item(it)), + } +} +#[cfg(feature = "full")] +pub fn fold_item_macro<F>(f: &mut F, node: ItemMacro) -> ItemMacro +where + F: Fold + ?Sized, +{ + ItemMacro { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + ident: (node.ident).map(|it| f.fold_ident(it)), + mac: f.fold_macro(node.mac), + semi_token: (node.semi_token).map(|it| Token![;](tokens_helper(f, &it.spans))), + } +} +#[cfg(feature = "full")] +pub fn fold_item_macro2<F>(f: &mut F, node: ItemMacro2) -> ItemMacro2 +where + F: Fold + ?Sized, +{ + ItemMacro2 { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + vis: f.fold_visibility(node.vis), + macro_token: Token![macro](tokens_helper(f, &node.macro_token.span)), + ident: f.fold_ident(node.ident), + rules: node.rules, + } +} +#[cfg(feature = "full")] +pub fn fold_item_mod<F>(f: &mut F, node: ItemMod) -> ItemMod +where + F: Fold + ?Sized, +{ + ItemMod { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + vis: f.fold_visibility(node.vis), + mod_token: Token![mod](tokens_helper(f, &node.mod_token.span)), + ident: f.fold_ident(node.ident), + content: (node.content) + .map(|it| ( + Brace(tokens_helper(f, &(it).0.span)), + FoldHelper::lift((it).1, |it| f.fold_item(it)), + )), + semi: (node.semi).map(|it| Token![;](tokens_helper(f, &it.spans))), + } +} +#[cfg(feature = "full")] +pub fn fold_item_static<F>(f: &mut F, node: ItemStatic) -> ItemStatic +where + F: Fold + ?Sized, +{ + ItemStatic { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + vis: f.fold_visibility(node.vis), + static_token: Token![static](tokens_helper(f, &node.static_token.span)), + mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))), + ident: f.fold_ident(node.ident), + colon_token: Token![:](tokens_helper(f, &node.colon_token.spans)), + ty: Box::new(f.fold_type(*node.ty)), + eq_token: Token![=](tokens_helper(f, &node.eq_token.spans)), + expr: Box::new(f.fold_expr(*node.expr)), + semi_token: Token![;](tokens_helper(f, &node.semi_token.spans)), + } +} +#[cfg(feature = "full")] +pub fn fold_item_struct<F>(f: &mut F, node: ItemStruct) -> ItemStruct +where + F: Fold + ?Sized, +{ + ItemStruct { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + vis: f.fold_visibility(node.vis), + struct_token: Token![struct](tokens_helper(f, &node.struct_token.span)), + ident: f.fold_ident(node.ident), + generics: f.fold_generics(node.generics), + fields: f.fold_fields(node.fields), + semi_token: (node.semi_token).map(|it| Token![;](tokens_helper(f, &it.spans))), + } +} +#[cfg(feature = "full")] +pub fn fold_item_trait<F>(f: &mut F, node: ItemTrait) -> ItemTrait +where + F: Fold + ?Sized, +{ + ItemTrait { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + vis: f.fold_visibility(node.vis), + unsafety: (node.unsafety).map(|it| Token![unsafe](tokens_helper(f, &it.span))), + auto_token: (node.auto_token).map(|it| Token![auto](tokens_helper(f, &it.span))), + trait_token: Token![trait](tokens_helper(f, &node.trait_token.span)), + ident: f.fold_ident(node.ident), + generics: f.fold_generics(node.generics), + colon_token: (node.colon_token).map(|it| Token![:](tokens_helper(f, &it.spans))), + supertraits: FoldHelper::lift( + node.supertraits, + |it| f.fold_type_param_bound(it), + ), + brace_token: Brace(tokens_helper(f, &node.brace_token.span)), + items: FoldHelper::lift(node.items, |it| f.fold_trait_item(it)), + } +} +#[cfg(feature = "full")] +pub fn fold_item_trait_alias<F>(f: &mut F, node: ItemTraitAlias) -> ItemTraitAlias +where + F: Fold + ?Sized, +{ + ItemTraitAlias { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + vis: f.fold_visibility(node.vis), + trait_token: Token![trait](tokens_helper(f, &node.trait_token.span)), + ident: f.fold_ident(node.ident), + generics: f.fold_generics(node.generics), + eq_token: Token![=](tokens_helper(f, &node.eq_token.spans)), + bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)), + semi_token: Token![;](tokens_helper(f, &node.semi_token.spans)), + } +} +#[cfg(feature = "full")] +pub fn fold_item_type<F>(f: &mut F, node: ItemType) -> ItemType +where + F: Fold + ?Sized, +{ + ItemType { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + vis: f.fold_visibility(node.vis), + type_token: Token![type](tokens_helper(f, &node.type_token.span)), + ident: f.fold_ident(node.ident), + generics: f.fold_generics(node.generics), + eq_token: Token![=](tokens_helper(f, &node.eq_token.spans)), + ty: Box::new(f.fold_type(*node.ty)), + semi_token: Token![;](tokens_helper(f, &node.semi_token.spans)), + } +} +#[cfg(feature = "full")] +pub fn fold_item_union<F>(f: &mut F, node: ItemUnion) -> ItemUnion +where + F: Fold + ?Sized, +{ + ItemUnion { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + vis: f.fold_visibility(node.vis), + union_token: Token![union](tokens_helper(f, &node.union_token.span)), + ident: f.fold_ident(node.ident), + generics: f.fold_generics(node.generics), + fields: f.fold_fields_named(node.fields), + } +} +#[cfg(feature = "full")] +pub fn fold_item_use<F>(f: &mut F, node: ItemUse) -> ItemUse +where + F: Fold + ?Sized, +{ + ItemUse { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + vis: f.fold_visibility(node.vis), + use_token: Token![use](tokens_helper(f, &node.use_token.span)), + leading_colon: (node.leading_colon) + .map(|it| Token![::](tokens_helper(f, &it.spans))), + tree: f.fold_use_tree(node.tree), + semi_token: Token![;](tokens_helper(f, &node.semi_token.spans)), + } +} +#[cfg(feature = "full")] +pub fn fold_label<F>(f: &mut F, node: Label) -> Label +where + F: Fold + ?Sized, +{ + Label { + name: f.fold_lifetime(node.name), + colon_token: Token![:](tokens_helper(f, &node.colon_token.spans)), + } +} +pub fn fold_lifetime<F>(f: &mut F, node: Lifetime) -> Lifetime +where + F: Fold + ?Sized, +{ + Lifetime { + apostrophe: f.fold_span(node.apostrophe), + ident: f.fold_ident(node.ident), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_lifetime_def<F>(f: &mut F, node: LifetimeDef) -> LifetimeDef +where + F: Fold + ?Sized, +{ + LifetimeDef { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + lifetime: f.fold_lifetime(node.lifetime), + colon_token: (node.colon_token).map(|it| Token![:](tokens_helper(f, &it.spans))), + bounds: FoldHelper::lift(node.bounds, |it| f.fold_lifetime(it)), + } +} +pub fn fold_lit<F>(f: &mut F, node: Lit) -> Lit +where + F: Fold + ?Sized, +{ + match node { + Lit::Str(_binding_0) => Lit::Str(f.fold_lit_str(_binding_0)), + Lit::ByteStr(_binding_0) => Lit::ByteStr(f.fold_lit_byte_str(_binding_0)), + Lit::Byte(_binding_0) => Lit::Byte(f.fold_lit_byte(_binding_0)), + Lit::Char(_binding_0) => Lit::Char(f.fold_lit_char(_binding_0)), + Lit::Int(_binding_0) => Lit::Int(f.fold_lit_int(_binding_0)), + Lit::Float(_binding_0) => Lit::Float(f.fold_lit_float(_binding_0)), + Lit::Bool(_binding_0) => Lit::Bool(f.fold_lit_bool(_binding_0)), + Lit::Verbatim(_binding_0) => Lit::Verbatim(_binding_0), + } +} +pub fn fold_lit_bool<F>(f: &mut F, node: LitBool) -> LitBool +where + F: Fold + ?Sized, +{ + LitBool { + value: node.value, + span: f.fold_span(node.span), + } +} +pub fn fold_lit_byte<F>(f: &mut F, node: LitByte) -> LitByte +where + F: Fold + ?Sized, +{ + let span = f.fold_span(node.span()); + let mut node = node; + node.set_span(span); + node +} +pub fn fold_lit_byte_str<F>(f: &mut F, node: LitByteStr) -> LitByteStr +where + F: Fold + ?Sized, +{ + let span = f.fold_span(node.span()); + let mut node = node; + node.set_span(span); + node +} +pub fn fold_lit_char<F>(f: &mut F, node: LitChar) -> LitChar +where + F: Fold + ?Sized, +{ + let span = f.fold_span(node.span()); + let mut node = node; + node.set_span(span); + node +} +pub fn fold_lit_float<F>(f: &mut F, node: LitFloat) -> LitFloat +where + F: Fold + ?Sized, +{ + let span = f.fold_span(node.span()); + let mut node = node; + node.set_span(span); + node +} +pub fn fold_lit_int<F>(f: &mut F, node: LitInt) -> LitInt +where + F: Fold + ?Sized, +{ + let span = f.fold_span(node.span()); + let mut node = node; + node.set_span(span); + node +} +pub fn fold_lit_str<F>(f: &mut F, node: LitStr) -> LitStr +where + F: Fold + ?Sized, +{ + let span = f.fold_span(node.span()); + let mut node = node; + node.set_span(span); + node +} +#[cfg(feature = "full")] +pub fn fold_local<F>(f: &mut F, node: Local) -> Local +where + F: Fold + ?Sized, +{ + Local { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + let_token: Token![let](tokens_helper(f, &node.let_token.span)), + pat: f.fold_pat(node.pat), + init: (node.init) + .map(|it| ( + Token![=](tokens_helper(f, &(it).0.spans)), + Box::new(f.fold_expr(*(it).1)), + )), + semi_token: Token![;](tokens_helper(f, &node.semi_token.spans)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_macro<F>(f: &mut F, node: Macro) -> Macro +where + F: Fold + ?Sized, +{ + Macro { + path: f.fold_path(node.path), + bang_token: Token![!](tokens_helper(f, &node.bang_token.spans)), + delimiter: f.fold_macro_delimiter(node.delimiter), + tokens: node.tokens, + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_macro_delimiter<F>(f: &mut F, node: MacroDelimiter) -> MacroDelimiter +where + F: Fold + ?Sized, +{ + match node { + MacroDelimiter::Paren(_binding_0) => { + MacroDelimiter::Paren(Paren(tokens_helper(f, &_binding_0.span))) + } + MacroDelimiter::Brace(_binding_0) => { + MacroDelimiter::Brace(Brace(tokens_helper(f, &_binding_0.span))) + } + MacroDelimiter::Bracket(_binding_0) => { + MacroDelimiter::Bracket(Bracket(tokens_helper(f, &_binding_0.span))) + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_member<F>(f: &mut F, node: Member) -> Member +where + F: Fold + ?Sized, +{ + match node { + Member::Named(_binding_0) => Member::Named(f.fold_ident(_binding_0)), + Member::Unnamed(_binding_0) => Member::Unnamed(f.fold_index(_binding_0)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_meta<F>(f: &mut F, node: Meta) -> Meta +where + F: Fold + ?Sized, +{ + match node { + Meta::Path(_binding_0) => Meta::Path(f.fold_path(_binding_0)), + Meta::List(_binding_0) => Meta::List(f.fold_meta_list(_binding_0)), + Meta::NameValue(_binding_0) => { + Meta::NameValue(f.fold_meta_name_value(_binding_0)) + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_meta_list<F>(f: &mut F, node: MetaList) -> MetaList +where + F: Fold + ?Sized, +{ + MetaList { + path: f.fold_path(node.path), + paren_token: Paren(tokens_helper(f, &node.paren_token.span)), + nested: FoldHelper::lift(node.nested, |it| f.fold_nested_meta(it)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_meta_name_value<F>(f: &mut F, node: MetaNameValue) -> MetaNameValue +where + F: Fold + ?Sized, +{ + MetaNameValue { + path: f.fold_path(node.path), + eq_token: Token![=](tokens_helper(f, &node.eq_token.spans)), + lit: f.fold_lit(node.lit), + } +} +#[cfg(feature = "full")] +pub fn fold_method_turbofish<F>(f: &mut F, node: MethodTurbofish) -> MethodTurbofish +where + F: Fold + ?Sized, +{ + MethodTurbofish { + colon2_token: Token![::](tokens_helper(f, &node.colon2_token.spans)), + lt_token: Token![<](tokens_helper(f, &node.lt_token.spans)), + args: FoldHelper::lift(node.args, |it| f.fold_generic_method_argument(it)), + gt_token: Token![>](tokens_helper(f, &node.gt_token.spans)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_nested_meta<F>(f: &mut F, node: NestedMeta) -> NestedMeta +where + F: Fold + ?Sized, +{ + match node { + NestedMeta::Meta(_binding_0) => NestedMeta::Meta(f.fold_meta(_binding_0)), + NestedMeta::Lit(_binding_0) => NestedMeta::Lit(f.fold_lit(_binding_0)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_parenthesized_generic_arguments<F>( + f: &mut F, + node: ParenthesizedGenericArguments, +) -> ParenthesizedGenericArguments +where + F: Fold + ?Sized, +{ + ParenthesizedGenericArguments { + paren_token: Paren(tokens_helper(f, &node.paren_token.span)), + inputs: FoldHelper::lift(node.inputs, |it| f.fold_type(it)), + output: f.fold_return_type(node.output), + } +} +#[cfg(feature = "full")] +pub fn fold_pat<F>(f: &mut F, node: Pat) -> Pat +where + F: Fold + ?Sized, +{ + match node { + Pat::Box(_binding_0) => Pat::Box(f.fold_pat_box(_binding_0)), + Pat::Ident(_binding_0) => Pat::Ident(f.fold_pat_ident(_binding_0)), + Pat::Lit(_binding_0) => Pat::Lit(f.fold_pat_lit(_binding_0)), + Pat::Macro(_binding_0) => Pat::Macro(f.fold_pat_macro(_binding_0)), + Pat::Or(_binding_0) => Pat::Or(f.fold_pat_or(_binding_0)), + Pat::Path(_binding_0) => Pat::Path(f.fold_pat_path(_binding_0)), + Pat::Range(_binding_0) => Pat::Range(f.fold_pat_range(_binding_0)), + Pat::Reference(_binding_0) => Pat::Reference(f.fold_pat_reference(_binding_0)), + Pat::Rest(_binding_0) => Pat::Rest(f.fold_pat_rest(_binding_0)), + Pat::Slice(_binding_0) => Pat::Slice(f.fold_pat_slice(_binding_0)), + Pat::Struct(_binding_0) => Pat::Struct(f.fold_pat_struct(_binding_0)), + Pat::Tuple(_binding_0) => Pat::Tuple(f.fold_pat_tuple(_binding_0)), + Pat::TupleStruct(_binding_0) => { + Pat::TupleStruct(f.fold_pat_tuple_struct(_binding_0)) + } + Pat::Type(_binding_0) => Pat::Type(f.fold_pat_type(_binding_0)), + Pat::Verbatim(_binding_0) => Pat::Verbatim(_binding_0), + Pat::Wild(_binding_0) => Pat::Wild(f.fold_pat_wild(_binding_0)), + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } +} +#[cfg(feature = "full")] +pub fn fold_pat_box<F>(f: &mut F, node: PatBox) -> PatBox +where + F: Fold + ?Sized, +{ + PatBox { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + box_token: Token![box](tokens_helper(f, &node.box_token.span)), + pat: Box::new(f.fold_pat(*node.pat)), + } +} +#[cfg(feature = "full")] +pub fn fold_pat_ident<F>(f: &mut F, node: PatIdent) -> PatIdent +where + F: Fold + ?Sized, +{ + PatIdent { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + by_ref: (node.by_ref).map(|it| Token![ref](tokens_helper(f, &it.span))), + mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))), + ident: f.fold_ident(node.ident), + subpat: (node.subpat) + .map(|it| ( + Token![@](tokens_helper(f, &(it).0.spans)), + Box::new(f.fold_pat(*(it).1)), + )), + } +} +#[cfg(feature = "full")] +pub fn fold_pat_lit<F>(f: &mut F, node: PatLit) -> PatLit +where + F: Fold + ?Sized, +{ + PatLit { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + expr: Box::new(f.fold_expr(*node.expr)), + } +} +#[cfg(feature = "full")] +pub fn fold_pat_macro<F>(f: &mut F, node: PatMacro) -> PatMacro +where + F: Fold + ?Sized, +{ + PatMacro { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + mac: f.fold_macro(node.mac), + } +} +#[cfg(feature = "full")] +pub fn fold_pat_or<F>(f: &mut F, node: PatOr) -> PatOr +where + F: Fold + ?Sized, +{ + PatOr { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + leading_vert: (node.leading_vert) + .map(|it| Token![|](tokens_helper(f, &it.spans))), + cases: FoldHelper::lift(node.cases, |it| f.fold_pat(it)), + } +} +#[cfg(feature = "full")] +pub fn fold_pat_path<F>(f: &mut F, node: PatPath) -> PatPath +where + F: Fold + ?Sized, +{ + PatPath { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + qself: (node.qself).map(|it| f.fold_qself(it)), + path: f.fold_path(node.path), + } +} +#[cfg(feature = "full")] +pub fn fold_pat_range<F>(f: &mut F, node: PatRange) -> PatRange +where + F: Fold + ?Sized, +{ + PatRange { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + lo: Box::new(f.fold_expr(*node.lo)), + limits: f.fold_range_limits(node.limits), + hi: Box::new(f.fold_expr(*node.hi)), + } +} +#[cfg(feature = "full")] +pub fn fold_pat_reference<F>(f: &mut F, node: PatReference) -> PatReference +where + F: Fold + ?Sized, +{ + PatReference { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + and_token: Token![&](tokens_helper(f, &node.and_token.spans)), + mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))), + pat: Box::new(f.fold_pat(*node.pat)), + } +} +#[cfg(feature = "full")] +pub fn fold_pat_rest<F>(f: &mut F, node: PatRest) -> PatRest +where + F: Fold + ?Sized, +{ + PatRest { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + dot2_token: Token![..](tokens_helper(f, &node.dot2_token.spans)), + } +} +#[cfg(feature = "full")] +pub fn fold_pat_slice<F>(f: &mut F, node: PatSlice) -> PatSlice +where + F: Fold + ?Sized, +{ + PatSlice { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + bracket_token: Bracket(tokens_helper(f, &node.bracket_token.span)), + elems: FoldHelper::lift(node.elems, |it| f.fold_pat(it)), + } +} +#[cfg(feature = "full")] +pub fn fold_pat_struct<F>(f: &mut F, node: PatStruct) -> PatStruct +where + F: Fold + ?Sized, +{ + PatStruct { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + path: f.fold_path(node.path), + brace_token: Brace(tokens_helper(f, &node.brace_token.span)), + fields: FoldHelper::lift(node.fields, |it| f.fold_field_pat(it)), + dot2_token: (node.dot2_token).map(|it| Token![..](tokens_helper(f, &it.spans))), + } +} +#[cfg(feature = "full")] +pub fn fold_pat_tuple<F>(f: &mut F, node: PatTuple) -> PatTuple +where + F: Fold + ?Sized, +{ + PatTuple { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + paren_token: Paren(tokens_helper(f, &node.paren_token.span)), + elems: FoldHelper::lift(node.elems, |it| f.fold_pat(it)), + } +} +#[cfg(feature = "full")] +pub fn fold_pat_tuple_struct<F>(f: &mut F, node: PatTupleStruct) -> PatTupleStruct +where + F: Fold + ?Sized, +{ + PatTupleStruct { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + path: f.fold_path(node.path), + pat: f.fold_pat_tuple(node.pat), + } +} +#[cfg(feature = "full")] +pub fn fold_pat_type<F>(f: &mut F, node: PatType) -> PatType +where + F: Fold + ?Sized, +{ + PatType { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + pat: Box::new(f.fold_pat(*node.pat)), + colon_token: Token![:](tokens_helper(f, &node.colon_token.spans)), + ty: Box::new(f.fold_type(*node.ty)), + } +} +#[cfg(feature = "full")] +pub fn fold_pat_wild<F>(f: &mut F, node: PatWild) -> PatWild +where + F: Fold + ?Sized, +{ + PatWild { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + underscore_token: Token![_](tokens_helper(f, &node.underscore_token.spans)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_path<F>(f: &mut F, node: Path) -> Path +where + F: Fold + ?Sized, +{ + Path { + leading_colon: (node.leading_colon) + .map(|it| Token![::](tokens_helper(f, &it.spans))), + segments: FoldHelper::lift(node.segments, |it| f.fold_path_segment(it)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_path_arguments<F>(f: &mut F, node: PathArguments) -> PathArguments +where + F: Fold + ?Sized, +{ + match node { + PathArguments::None => PathArguments::None, + PathArguments::AngleBracketed(_binding_0) => { + PathArguments::AngleBracketed( + f.fold_angle_bracketed_generic_arguments(_binding_0), + ) + } + PathArguments::Parenthesized(_binding_0) => { + PathArguments::Parenthesized( + f.fold_parenthesized_generic_arguments(_binding_0), + ) + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_path_segment<F>(f: &mut F, node: PathSegment) -> PathSegment +where + F: Fold + ?Sized, +{ + PathSegment { + ident: f.fold_ident(node.ident), + arguments: f.fold_path_arguments(node.arguments), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_predicate_eq<F>(f: &mut F, node: PredicateEq) -> PredicateEq +where + F: Fold + ?Sized, +{ + PredicateEq { + lhs_ty: f.fold_type(node.lhs_ty), + eq_token: Token![=](tokens_helper(f, &node.eq_token.spans)), + rhs_ty: f.fold_type(node.rhs_ty), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_predicate_lifetime<F>( + f: &mut F, + node: PredicateLifetime, +) -> PredicateLifetime +where + F: Fold + ?Sized, +{ + PredicateLifetime { + lifetime: f.fold_lifetime(node.lifetime), + colon_token: Token![:](tokens_helper(f, &node.colon_token.spans)), + bounds: FoldHelper::lift(node.bounds, |it| f.fold_lifetime(it)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_predicate_type<F>(f: &mut F, node: PredicateType) -> PredicateType +where + F: Fold + ?Sized, +{ + PredicateType { + lifetimes: (node.lifetimes).map(|it| f.fold_bound_lifetimes(it)), + bounded_ty: f.fold_type(node.bounded_ty), + colon_token: Token![:](tokens_helper(f, &node.colon_token.spans)), + bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_qself<F>(f: &mut F, node: QSelf) -> QSelf +where + F: Fold + ?Sized, +{ + QSelf { + lt_token: Token![<](tokens_helper(f, &node.lt_token.spans)), + ty: Box::new(f.fold_type(*node.ty)), + position: node.position, + as_token: (node.as_token).map(|it| Token![as](tokens_helper(f, &it.span))), + gt_token: Token![>](tokens_helper(f, &node.gt_token.spans)), + } +} +#[cfg(feature = "full")] +pub fn fold_range_limits<F>(f: &mut F, node: RangeLimits) -> RangeLimits +where + F: Fold + ?Sized, +{ + match node { + RangeLimits::HalfOpen(_binding_0) => { + RangeLimits::HalfOpen(Token![..](tokens_helper(f, &_binding_0.spans))) + } + RangeLimits::Closed(_binding_0) => { + RangeLimits::Closed(Token![..=](tokens_helper(f, &_binding_0.spans))) + } + } +} +#[cfg(feature = "full")] +pub fn fold_receiver<F>(f: &mut F, node: Receiver) -> Receiver +where + F: Fold + ?Sized, +{ + Receiver { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + reference: (node.reference) + .map(|it| ( + Token![&](tokens_helper(f, &(it).0.spans)), + ((it).1).map(|it| f.fold_lifetime(it)), + )), + mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))), + self_token: Token![self](tokens_helper(f, &node.self_token.span)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_return_type<F>(f: &mut F, node: ReturnType) -> ReturnType +where + F: Fold + ?Sized, +{ + match node { + ReturnType::Default => ReturnType::Default, + ReturnType::Type(_binding_0, _binding_1) => { + ReturnType::Type( + Token![->](tokens_helper(f, &_binding_0.spans)), + Box::new(f.fold_type(*_binding_1)), + ) + } + } +} +#[cfg(feature = "full")] +pub fn fold_signature<F>(f: &mut F, node: Signature) -> Signature +where + F: Fold + ?Sized, +{ + Signature { + constness: (node.constness).map(|it| Token![const](tokens_helper(f, &it.span))), + asyncness: (node.asyncness).map(|it| Token![async](tokens_helper(f, &it.span))), + unsafety: (node.unsafety).map(|it| Token![unsafe](tokens_helper(f, &it.span))), + abi: (node.abi).map(|it| f.fold_abi(it)), + fn_token: Token![fn](tokens_helper(f, &node.fn_token.span)), + ident: f.fold_ident(node.ident), + generics: f.fold_generics(node.generics), + paren_token: Paren(tokens_helper(f, &node.paren_token.span)), + inputs: FoldHelper::lift(node.inputs, |it| f.fold_fn_arg(it)), + variadic: (node.variadic).map(|it| f.fold_variadic(it)), + output: f.fold_return_type(node.output), + } +} +pub fn fold_span<F>(f: &mut F, node: Span) -> Span +where + F: Fold + ?Sized, +{ + node +} +#[cfg(feature = "full")] +pub fn fold_stmt<F>(f: &mut F, node: Stmt) -> Stmt +where + F: Fold + ?Sized, +{ + match node { + Stmt::Local(_binding_0) => Stmt::Local(f.fold_local(_binding_0)), + Stmt::Item(_binding_0) => Stmt::Item(f.fold_item(_binding_0)), + Stmt::Expr(_binding_0) => Stmt::Expr(f.fold_expr(_binding_0)), + Stmt::Semi(_binding_0, _binding_1) => { + Stmt::Semi( + f.fold_expr(_binding_0), + Token![;](tokens_helper(f, &_binding_1.spans)), + ) + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_trait_bound<F>(f: &mut F, node: TraitBound) -> TraitBound +where + F: Fold + ?Sized, +{ + TraitBound { + paren_token: (node.paren_token).map(|it| Paren(tokens_helper(f, &it.span))), + modifier: f.fold_trait_bound_modifier(node.modifier), + lifetimes: (node.lifetimes).map(|it| f.fold_bound_lifetimes(it)), + path: f.fold_path(node.path), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_trait_bound_modifier<F>( + f: &mut F, + node: TraitBoundModifier, +) -> TraitBoundModifier +where + F: Fold + ?Sized, +{ + match node { + TraitBoundModifier::None => TraitBoundModifier::None, + TraitBoundModifier::Maybe(_binding_0) => { + TraitBoundModifier::Maybe(Token![?](tokens_helper(f, &_binding_0.spans))) + } + } +} +#[cfg(feature = "full")] +pub fn fold_trait_item<F>(f: &mut F, node: TraitItem) -> TraitItem +where + F: Fold + ?Sized, +{ + match node { + TraitItem::Const(_binding_0) => { + TraitItem::Const(f.fold_trait_item_const(_binding_0)) + } + TraitItem::Method(_binding_0) => { + TraitItem::Method(f.fold_trait_item_method(_binding_0)) + } + TraitItem::Type(_binding_0) => { + TraitItem::Type(f.fold_trait_item_type(_binding_0)) + } + TraitItem::Macro(_binding_0) => { + TraitItem::Macro(f.fold_trait_item_macro(_binding_0)) + } + TraitItem::Verbatim(_binding_0) => TraitItem::Verbatim(_binding_0), + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } +} +#[cfg(feature = "full")] +pub fn fold_trait_item_const<F>(f: &mut F, node: TraitItemConst) -> TraitItemConst +where + F: Fold + ?Sized, +{ + TraitItemConst { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + const_token: Token![const](tokens_helper(f, &node.const_token.span)), + ident: f.fold_ident(node.ident), + colon_token: Token![:](tokens_helper(f, &node.colon_token.spans)), + ty: f.fold_type(node.ty), + default: (node.default) + .map(|it| (Token![=](tokens_helper(f, &(it).0.spans)), f.fold_expr((it).1))), + semi_token: Token![;](tokens_helper(f, &node.semi_token.spans)), + } +} +#[cfg(feature = "full")] +pub fn fold_trait_item_macro<F>(f: &mut F, node: TraitItemMacro) -> TraitItemMacro +where + F: Fold + ?Sized, +{ + TraitItemMacro { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + mac: f.fold_macro(node.mac), + semi_token: (node.semi_token).map(|it| Token![;](tokens_helper(f, &it.spans))), + } +} +#[cfg(feature = "full")] +pub fn fold_trait_item_method<F>(f: &mut F, node: TraitItemMethod) -> TraitItemMethod +where + F: Fold + ?Sized, +{ + TraitItemMethod { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + sig: f.fold_signature(node.sig), + default: (node.default).map(|it| f.fold_block(it)), + semi_token: (node.semi_token).map(|it| Token![;](tokens_helper(f, &it.spans))), + } +} +#[cfg(feature = "full")] +pub fn fold_trait_item_type<F>(f: &mut F, node: TraitItemType) -> TraitItemType +where + F: Fold + ?Sized, +{ + TraitItemType { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + type_token: Token![type](tokens_helper(f, &node.type_token.span)), + ident: f.fold_ident(node.ident), + generics: f.fold_generics(node.generics), + colon_token: (node.colon_token).map(|it| Token![:](tokens_helper(f, &it.spans))), + bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)), + default: (node.default) + .map(|it| (Token![=](tokens_helper(f, &(it).0.spans)), f.fold_type((it).1))), + semi_token: Token![;](tokens_helper(f, &node.semi_token.spans)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_type<F>(f: &mut F, node: Type) -> Type +where + F: Fold + ?Sized, +{ + match node { + Type::Array(_binding_0) => Type::Array(f.fold_type_array(_binding_0)), + Type::BareFn(_binding_0) => Type::BareFn(f.fold_type_bare_fn(_binding_0)), + Type::Group(_binding_0) => Type::Group(f.fold_type_group(_binding_0)), + Type::ImplTrait(_binding_0) => { + Type::ImplTrait(f.fold_type_impl_trait(_binding_0)) + } + Type::Infer(_binding_0) => Type::Infer(f.fold_type_infer(_binding_0)), + Type::Macro(_binding_0) => Type::Macro(f.fold_type_macro(_binding_0)), + Type::Never(_binding_0) => Type::Never(f.fold_type_never(_binding_0)), + Type::Paren(_binding_0) => Type::Paren(f.fold_type_paren(_binding_0)), + Type::Path(_binding_0) => Type::Path(f.fold_type_path(_binding_0)), + Type::Ptr(_binding_0) => Type::Ptr(f.fold_type_ptr(_binding_0)), + Type::Reference(_binding_0) => Type::Reference(f.fold_type_reference(_binding_0)), + Type::Slice(_binding_0) => Type::Slice(f.fold_type_slice(_binding_0)), + Type::TraitObject(_binding_0) => { + Type::TraitObject(f.fold_type_trait_object(_binding_0)) + } + Type::Tuple(_binding_0) => Type::Tuple(f.fold_type_tuple(_binding_0)), + Type::Verbatim(_binding_0) => Type::Verbatim(_binding_0), + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_type_array<F>(f: &mut F, node: TypeArray) -> TypeArray +where + F: Fold + ?Sized, +{ + TypeArray { + bracket_token: Bracket(tokens_helper(f, &node.bracket_token.span)), + elem: Box::new(f.fold_type(*node.elem)), + semi_token: Token![;](tokens_helper(f, &node.semi_token.spans)), + len: f.fold_expr(node.len), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_type_bare_fn<F>(f: &mut F, node: TypeBareFn) -> TypeBareFn +where + F: Fold + ?Sized, +{ + TypeBareFn { + lifetimes: (node.lifetimes).map(|it| f.fold_bound_lifetimes(it)), + unsafety: (node.unsafety).map(|it| Token![unsafe](tokens_helper(f, &it.span))), + abi: (node.abi).map(|it| f.fold_abi(it)), + fn_token: Token![fn](tokens_helper(f, &node.fn_token.span)), + paren_token: Paren(tokens_helper(f, &node.paren_token.span)), + inputs: FoldHelper::lift(node.inputs, |it| f.fold_bare_fn_arg(it)), + variadic: (node.variadic).map(|it| f.fold_variadic(it)), + output: f.fold_return_type(node.output), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_type_group<F>(f: &mut F, node: TypeGroup) -> TypeGroup +where + F: Fold + ?Sized, +{ + TypeGroup { + group_token: Group(tokens_helper(f, &node.group_token.span)), + elem: Box::new(f.fold_type(*node.elem)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_type_impl_trait<F>(f: &mut F, node: TypeImplTrait) -> TypeImplTrait +where + F: Fold + ?Sized, +{ + TypeImplTrait { + impl_token: Token![impl](tokens_helper(f, &node.impl_token.span)), + bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_type_infer<F>(f: &mut F, node: TypeInfer) -> TypeInfer +where + F: Fold + ?Sized, +{ + TypeInfer { + underscore_token: Token![_](tokens_helper(f, &node.underscore_token.spans)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_type_macro<F>(f: &mut F, node: TypeMacro) -> TypeMacro +where + F: Fold + ?Sized, +{ + TypeMacro { + mac: f.fold_macro(node.mac), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_type_never<F>(f: &mut F, node: TypeNever) -> TypeNever +where + F: Fold + ?Sized, +{ + TypeNever { + bang_token: Token![!](tokens_helper(f, &node.bang_token.spans)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_type_param<F>(f: &mut F, node: TypeParam) -> TypeParam +where + F: Fold + ?Sized, +{ + TypeParam { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + ident: f.fold_ident(node.ident), + colon_token: (node.colon_token).map(|it| Token![:](tokens_helper(f, &it.spans))), + bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)), + eq_token: (node.eq_token).map(|it| Token![=](tokens_helper(f, &it.spans))), + default: (node.default).map(|it| f.fold_type(it)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_type_param_bound<F>(f: &mut F, node: TypeParamBound) -> TypeParamBound +where + F: Fold + ?Sized, +{ + match node { + TypeParamBound::Trait(_binding_0) => { + TypeParamBound::Trait(f.fold_trait_bound(_binding_0)) + } + TypeParamBound::Lifetime(_binding_0) => { + TypeParamBound::Lifetime(f.fold_lifetime(_binding_0)) + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_type_paren<F>(f: &mut F, node: TypeParen) -> TypeParen +where + F: Fold + ?Sized, +{ + TypeParen { + paren_token: Paren(tokens_helper(f, &node.paren_token.span)), + elem: Box::new(f.fold_type(*node.elem)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_type_path<F>(f: &mut F, node: TypePath) -> TypePath +where + F: Fold + ?Sized, +{ + TypePath { + qself: (node.qself).map(|it| f.fold_qself(it)), + path: f.fold_path(node.path), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_type_ptr<F>(f: &mut F, node: TypePtr) -> TypePtr +where + F: Fold + ?Sized, +{ + TypePtr { + star_token: Token![*](tokens_helper(f, &node.star_token.spans)), + const_token: (node.const_token) + .map(|it| Token![const](tokens_helper(f, &it.span))), + mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))), + elem: Box::new(f.fold_type(*node.elem)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_type_reference<F>(f: &mut F, node: TypeReference) -> TypeReference +where + F: Fold + ?Sized, +{ + TypeReference { + and_token: Token![&](tokens_helper(f, &node.and_token.spans)), + lifetime: (node.lifetime).map(|it| f.fold_lifetime(it)), + mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))), + elem: Box::new(f.fold_type(*node.elem)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_type_slice<F>(f: &mut F, node: TypeSlice) -> TypeSlice +where + F: Fold + ?Sized, +{ + TypeSlice { + bracket_token: Bracket(tokens_helper(f, &node.bracket_token.span)), + elem: Box::new(f.fold_type(*node.elem)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_type_trait_object<F>(f: &mut F, node: TypeTraitObject) -> TypeTraitObject +where + F: Fold + ?Sized, +{ + TypeTraitObject { + dyn_token: (node.dyn_token).map(|it| Token![dyn](tokens_helper(f, &it.span))), + bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_type_tuple<F>(f: &mut F, node: TypeTuple) -> TypeTuple +where + F: Fold + ?Sized, +{ + TypeTuple { + paren_token: Paren(tokens_helper(f, &node.paren_token.span)), + elems: FoldHelper::lift(node.elems, |it| f.fold_type(it)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_un_op<F>(f: &mut F, node: UnOp) -> UnOp +where + F: Fold + ?Sized, +{ + match node { + UnOp::Deref(_binding_0) => { + UnOp::Deref(Token![*](tokens_helper(f, &_binding_0.spans))) + } + UnOp::Not(_binding_0) => { + UnOp::Not(Token![!](tokens_helper(f, &_binding_0.spans))) + } + UnOp::Neg(_binding_0) => { + UnOp::Neg(Token![-](tokens_helper(f, &_binding_0.spans))) + } + } +} +#[cfg(feature = "full")] +pub fn fold_use_glob<F>(f: &mut F, node: UseGlob) -> UseGlob +where + F: Fold + ?Sized, +{ + UseGlob { + star_token: Token![*](tokens_helper(f, &node.star_token.spans)), + } +} +#[cfg(feature = "full")] +pub fn fold_use_group<F>(f: &mut F, node: UseGroup) -> UseGroup +where + F: Fold + ?Sized, +{ + UseGroup { + brace_token: Brace(tokens_helper(f, &node.brace_token.span)), + items: FoldHelper::lift(node.items, |it| f.fold_use_tree(it)), + } +} +#[cfg(feature = "full")] +pub fn fold_use_name<F>(f: &mut F, node: UseName) -> UseName +where + F: Fold + ?Sized, +{ + UseName { + ident: f.fold_ident(node.ident), + } +} +#[cfg(feature = "full")] +pub fn fold_use_path<F>(f: &mut F, node: UsePath) -> UsePath +where + F: Fold + ?Sized, +{ + UsePath { + ident: f.fold_ident(node.ident), + colon2_token: Token![::](tokens_helper(f, &node.colon2_token.spans)), + tree: Box::new(f.fold_use_tree(*node.tree)), + } +} +#[cfg(feature = "full")] +pub fn fold_use_rename<F>(f: &mut F, node: UseRename) -> UseRename +where + F: Fold + ?Sized, +{ + UseRename { + ident: f.fold_ident(node.ident), + as_token: Token![as](tokens_helper(f, &node.as_token.span)), + rename: f.fold_ident(node.rename), + } +} +#[cfg(feature = "full")] +pub fn fold_use_tree<F>(f: &mut F, node: UseTree) -> UseTree +where + F: Fold + ?Sized, +{ + match node { + UseTree::Path(_binding_0) => UseTree::Path(f.fold_use_path(_binding_0)), + UseTree::Name(_binding_0) => UseTree::Name(f.fold_use_name(_binding_0)), + UseTree::Rename(_binding_0) => UseTree::Rename(f.fold_use_rename(_binding_0)), + UseTree::Glob(_binding_0) => UseTree::Glob(f.fold_use_glob(_binding_0)), + UseTree::Group(_binding_0) => UseTree::Group(f.fold_use_group(_binding_0)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_variadic<F>(f: &mut F, node: Variadic) -> Variadic +where + F: Fold + ?Sized, +{ + Variadic { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + dots: Token![...](tokens_helper(f, &node.dots.spans)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_variant<F>(f: &mut F, node: Variant) -> Variant +where + F: Fold + ?Sized, +{ + Variant { + attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)), + ident: f.fold_ident(node.ident), + fields: f.fold_fields(node.fields), + discriminant: (node.discriminant) + .map(|it| (Token![=](tokens_helper(f, &(it).0.spans)), f.fold_expr((it).1))), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_vis_crate<F>(f: &mut F, node: VisCrate) -> VisCrate +where + F: Fold + ?Sized, +{ + VisCrate { + crate_token: Token![crate](tokens_helper(f, &node.crate_token.span)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_vis_public<F>(f: &mut F, node: VisPublic) -> VisPublic +where + F: Fold + ?Sized, +{ + VisPublic { + pub_token: Token![pub](tokens_helper(f, &node.pub_token.span)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_vis_restricted<F>(f: &mut F, node: VisRestricted) -> VisRestricted +where + F: Fold + ?Sized, +{ + VisRestricted { + pub_token: Token![pub](tokens_helper(f, &node.pub_token.span)), + paren_token: Paren(tokens_helper(f, &node.paren_token.span)), + in_token: (node.in_token).map(|it| Token![in](tokens_helper(f, &it.span))), + path: Box::new(f.fold_path(*node.path)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_visibility<F>(f: &mut F, node: Visibility) -> Visibility +where + F: Fold + ?Sized, +{ + match node { + Visibility::Public(_binding_0) => { + Visibility::Public(f.fold_vis_public(_binding_0)) + } + Visibility::Crate(_binding_0) => Visibility::Crate(f.fold_vis_crate(_binding_0)), + Visibility::Restricted(_binding_0) => { + Visibility::Restricted(f.fold_vis_restricted(_binding_0)) + } + Visibility::Inherited => Visibility::Inherited, + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_where_clause<F>(f: &mut F, node: WhereClause) -> WhereClause +where + F: Fold + ?Sized, +{ + WhereClause { + where_token: Token![where](tokens_helper(f, &node.where_token.span)), + predicates: FoldHelper::lift(node.predicates, |it| f.fold_where_predicate(it)), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn fold_where_predicate<F>(f: &mut F, node: WherePredicate) -> WherePredicate +where + F: Fold + ?Sized, +{ + match node { + WherePredicate::Type(_binding_0) => { + WherePredicate::Type(f.fold_predicate_type(_binding_0)) + } + WherePredicate::Lifetime(_binding_0) => { + WherePredicate::Lifetime(f.fold_predicate_lifetime(_binding_0)) + } + WherePredicate::Eq(_binding_0) => { + WherePredicate::Eq(f.fold_predicate_eq(_binding_0)) + } + } +} diff --git a/third_party/rust/syn/src/gen/hash.rs b/third_party/rust/syn/src/gen/hash.rs new file mode 100644 index 0000000000..d0400e19d6 --- /dev/null +++ b/third_party/rust/syn/src/gen/hash.rs @@ -0,0 +1,2869 @@ +// This file is @generated by syn-internal-codegen. +// It is not intended for manual editing. + +#[cfg(any(feature = "derive", feature = "full"))] +use crate::tt::TokenStreamHelper; +use crate::*; +use std::hash::{Hash, Hasher}; +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for Abi { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.name.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for AngleBracketedGenericArguments { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.colon2_token.hash(state); + self.args.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for Arm { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.pat.hash(state); + self.guard.hash(state); + self.body.hash(state); + self.comma.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for AttrStyle { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + match self { + AttrStyle::Outer => { + state.write_u8(0u8); + } + AttrStyle::Inner(_) => { + state.write_u8(1u8); + } + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for Attribute { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.style.hash(state); + self.path.hash(state); + TokenStreamHelper(&self.tokens).hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for BareFnArg { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.name.hash(state); + self.ty.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for BinOp { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + match self { + BinOp::Add(_) => { + state.write_u8(0u8); + } + BinOp::Sub(_) => { + state.write_u8(1u8); + } + BinOp::Mul(_) => { + state.write_u8(2u8); + } + BinOp::Div(_) => { + state.write_u8(3u8); + } + BinOp::Rem(_) => { + state.write_u8(4u8); + } + BinOp::And(_) => { + state.write_u8(5u8); + } + BinOp::Or(_) => { + state.write_u8(6u8); + } + BinOp::BitXor(_) => { + state.write_u8(7u8); + } + BinOp::BitAnd(_) => { + state.write_u8(8u8); + } + BinOp::BitOr(_) => { + state.write_u8(9u8); + } + BinOp::Shl(_) => { + state.write_u8(10u8); + } + BinOp::Shr(_) => { + state.write_u8(11u8); + } + BinOp::Eq(_) => { + state.write_u8(12u8); + } + BinOp::Lt(_) => { + state.write_u8(13u8); + } + BinOp::Le(_) => { + state.write_u8(14u8); + } + BinOp::Ne(_) => { + state.write_u8(15u8); + } + BinOp::Ge(_) => { + state.write_u8(16u8); + } + BinOp::Gt(_) => { + state.write_u8(17u8); + } + BinOp::AddEq(_) => { + state.write_u8(18u8); + } + BinOp::SubEq(_) => { + state.write_u8(19u8); + } + BinOp::MulEq(_) => { + state.write_u8(20u8); + } + BinOp::DivEq(_) => { + state.write_u8(21u8); + } + BinOp::RemEq(_) => { + state.write_u8(22u8); + } + BinOp::BitXorEq(_) => { + state.write_u8(23u8); + } + BinOp::BitAndEq(_) => { + state.write_u8(24u8); + } + BinOp::BitOrEq(_) => { + state.write_u8(25u8); + } + BinOp::ShlEq(_) => { + state.write_u8(26u8); + } + BinOp::ShrEq(_) => { + state.write_u8(27u8); + } + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for Binding { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.ident.hash(state); + self.ty.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for Block { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.stmts.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for BoundLifetimes { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.lifetimes.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ConstParam { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.ident.hash(state); + self.ty.hash(state); + self.eq_token.hash(state); + self.default.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for Constraint { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.ident.hash(state); + self.bounds.hash(state); + } +} +#[cfg(feature = "derive")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for Data { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + match self { + Data::Struct(v0) => { + state.write_u8(0u8); + v0.hash(state); + } + Data::Enum(v0) => { + state.write_u8(1u8); + v0.hash(state); + } + Data::Union(v0) => { + state.write_u8(2u8); + v0.hash(state); + } + } + } +} +#[cfg(feature = "derive")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for DataEnum { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.variants.hash(state); + } +} +#[cfg(feature = "derive")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for DataStruct { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.fields.hash(state); + self.semi_token.hash(state); + } +} +#[cfg(feature = "derive")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for DataUnion { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.fields.hash(state); + } +} +#[cfg(feature = "derive")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for DeriveInput { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.vis.hash(state); + self.ident.hash(state); + self.generics.hash(state); + self.data.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for Expr { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + match self { + #[cfg(feature = "full")] + Expr::Array(v0) => { + state.write_u8(0u8); + v0.hash(state); + } + #[cfg(feature = "full")] + Expr::Assign(v0) => { + state.write_u8(1u8); + v0.hash(state); + } + #[cfg(feature = "full")] + Expr::AssignOp(v0) => { + state.write_u8(2u8); + v0.hash(state); + } + #[cfg(feature = "full")] + Expr::Async(v0) => { + state.write_u8(3u8); + v0.hash(state); + } + #[cfg(feature = "full")] + Expr::Await(v0) => { + state.write_u8(4u8); + v0.hash(state); + } + Expr::Binary(v0) => { + state.write_u8(5u8); + v0.hash(state); + } + #[cfg(feature = "full")] + Expr::Block(v0) => { + state.write_u8(6u8); + v0.hash(state); + } + #[cfg(feature = "full")] + Expr::Box(v0) => { + state.write_u8(7u8); + v0.hash(state); + } + #[cfg(feature = "full")] + Expr::Break(v0) => { + state.write_u8(8u8); + v0.hash(state); + } + Expr::Call(v0) => { + state.write_u8(9u8); + v0.hash(state); + } + Expr::Cast(v0) => { + state.write_u8(10u8); + v0.hash(state); + } + #[cfg(feature = "full")] + Expr::Closure(v0) => { + state.write_u8(11u8); + v0.hash(state); + } + #[cfg(feature = "full")] + Expr::Continue(v0) => { + state.write_u8(12u8); + v0.hash(state); + } + Expr::Field(v0) => { + state.write_u8(13u8); + v0.hash(state); + } + #[cfg(feature = "full")] + Expr::ForLoop(v0) => { + state.write_u8(14u8); + v0.hash(state); + } + #[cfg(feature = "full")] + Expr::Group(v0) => { + state.write_u8(15u8); + v0.hash(state); + } + #[cfg(feature = "full")] + Expr::If(v0) => { + state.write_u8(16u8); + v0.hash(state); + } + Expr::Index(v0) => { + state.write_u8(17u8); + v0.hash(state); + } + #[cfg(feature = "full")] + Expr::Let(v0) => { + state.write_u8(18u8); + v0.hash(state); + } + Expr::Lit(v0) => { + state.write_u8(19u8); + v0.hash(state); + } + #[cfg(feature = "full")] + Expr::Loop(v0) => { + state.write_u8(20u8); + v0.hash(state); + } + #[cfg(feature = "full")] + Expr::Macro(v0) => { + state.write_u8(21u8); + v0.hash(state); + } + #[cfg(feature = "full")] + Expr::Match(v0) => { + state.write_u8(22u8); + v0.hash(state); + } + #[cfg(feature = "full")] + Expr::MethodCall(v0) => { + state.write_u8(23u8); + v0.hash(state); + } + Expr::Paren(v0) => { + state.write_u8(24u8); + v0.hash(state); + } + Expr::Path(v0) => { + state.write_u8(25u8); + v0.hash(state); + } + #[cfg(feature = "full")] + Expr::Range(v0) => { + state.write_u8(26u8); + v0.hash(state); + } + #[cfg(feature = "full")] + Expr::Reference(v0) => { + state.write_u8(27u8); + v0.hash(state); + } + #[cfg(feature = "full")] + Expr::Repeat(v0) => { + state.write_u8(28u8); + v0.hash(state); + } + #[cfg(feature = "full")] + Expr::Return(v0) => { + state.write_u8(29u8); + v0.hash(state); + } + #[cfg(feature = "full")] + Expr::Struct(v0) => { + state.write_u8(30u8); + v0.hash(state); + } + #[cfg(feature = "full")] + Expr::Try(v0) => { + state.write_u8(31u8); + v0.hash(state); + } + #[cfg(feature = "full")] + Expr::TryBlock(v0) => { + state.write_u8(32u8); + v0.hash(state); + } + #[cfg(feature = "full")] + Expr::Tuple(v0) => { + state.write_u8(33u8); + v0.hash(state); + } + #[cfg(feature = "full")] + Expr::Type(v0) => { + state.write_u8(34u8); + v0.hash(state); + } + Expr::Unary(v0) => { + state.write_u8(35u8); + v0.hash(state); + } + #[cfg(feature = "full")] + Expr::Unsafe(v0) => { + state.write_u8(36u8); + v0.hash(state); + } + Expr::Verbatim(v0) => { + state.write_u8(37u8); + TokenStreamHelper(v0).hash(state); + } + #[cfg(feature = "full")] + Expr::While(v0) => { + state.write_u8(38u8); + v0.hash(state); + } + #[cfg(feature = "full")] + Expr::Yield(v0) => { + state.write_u8(39u8); + v0.hash(state); + } + #[cfg(any(syn_no_non_exhaustive, not(feature = "full")))] + _ => unreachable!(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprArray { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.elems.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprAssign { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.left.hash(state); + self.right.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprAssignOp { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.left.hash(state); + self.op.hash(state); + self.right.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprAsync { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.capture.hash(state); + self.block.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprAwait { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.base.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprBinary { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.left.hash(state); + self.op.hash(state); + self.right.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprBlock { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.label.hash(state); + self.block.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprBox { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.expr.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprBreak { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.label.hash(state); + self.expr.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprCall { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.func.hash(state); + self.args.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprCast { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.expr.hash(state); + self.ty.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprClosure { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.movability.hash(state); + self.asyncness.hash(state); + self.capture.hash(state); + self.inputs.hash(state); + self.output.hash(state); + self.body.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprContinue { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.label.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprField { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.base.hash(state); + self.member.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprForLoop { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.label.hash(state); + self.pat.hash(state); + self.expr.hash(state); + self.body.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprGroup { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.expr.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprIf { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.cond.hash(state); + self.then_branch.hash(state); + self.else_branch.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprIndex { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.expr.hash(state); + self.index.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprLet { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.pat.hash(state); + self.expr.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprLit { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.lit.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprLoop { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.label.hash(state); + self.body.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprMacro { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.mac.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprMatch { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.expr.hash(state); + self.arms.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprMethodCall { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.receiver.hash(state); + self.method.hash(state); + self.turbofish.hash(state); + self.args.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprParen { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.expr.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprPath { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.qself.hash(state); + self.path.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprRange { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.from.hash(state); + self.limits.hash(state); + self.to.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprReference { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.mutability.hash(state); + self.expr.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprRepeat { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.expr.hash(state); + self.len.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprReturn { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.expr.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprStruct { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.path.hash(state); + self.fields.hash(state); + self.dot2_token.hash(state); + self.rest.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprTry { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.expr.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprTryBlock { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.block.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprTuple { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.elems.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprType { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.expr.hash(state); + self.ty.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprUnary { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.op.hash(state); + self.expr.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprUnsafe { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.block.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprWhile { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.label.hash(state); + self.cond.hash(state); + self.body.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ExprYield { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.expr.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for Field { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.vis.hash(state); + self.ident.hash(state); + self.colon_token.hash(state); + self.ty.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for FieldPat { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.member.hash(state); + self.colon_token.hash(state); + self.pat.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for FieldValue { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.member.hash(state); + self.colon_token.hash(state); + self.expr.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for Fields { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + match self { + Fields::Named(v0) => { + state.write_u8(0u8); + v0.hash(state); + } + Fields::Unnamed(v0) => { + state.write_u8(1u8); + v0.hash(state); + } + Fields::Unit => { + state.write_u8(2u8); + } + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for FieldsNamed { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.named.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for FieldsUnnamed { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.unnamed.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for File { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.shebang.hash(state); + self.attrs.hash(state); + self.items.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for FnArg { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + match self { + FnArg::Receiver(v0) => { + state.write_u8(0u8); + v0.hash(state); + } + FnArg::Typed(v0) => { + state.write_u8(1u8); + v0.hash(state); + } + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ForeignItem { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + match self { + ForeignItem::Fn(v0) => { + state.write_u8(0u8); + v0.hash(state); + } + ForeignItem::Static(v0) => { + state.write_u8(1u8); + v0.hash(state); + } + ForeignItem::Type(v0) => { + state.write_u8(2u8); + v0.hash(state); + } + ForeignItem::Macro(v0) => { + state.write_u8(3u8); + v0.hash(state); + } + ForeignItem::Verbatim(v0) => { + state.write_u8(4u8); + TokenStreamHelper(v0).hash(state); + } + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ForeignItemFn { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.vis.hash(state); + self.sig.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ForeignItemMacro { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.mac.hash(state); + self.semi_token.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ForeignItemStatic { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.vis.hash(state); + self.mutability.hash(state); + self.ident.hash(state); + self.ty.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ForeignItemType { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.vis.hash(state); + self.ident.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for GenericArgument { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + match self { + GenericArgument::Lifetime(v0) => { + state.write_u8(0u8); + v0.hash(state); + } + GenericArgument::Type(v0) => { + state.write_u8(1u8); + v0.hash(state); + } + GenericArgument::Const(v0) => { + state.write_u8(2u8); + v0.hash(state); + } + GenericArgument::Binding(v0) => { + state.write_u8(3u8); + v0.hash(state); + } + GenericArgument::Constraint(v0) => { + state.write_u8(4u8); + v0.hash(state); + } + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for GenericMethodArgument { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + match self { + GenericMethodArgument::Type(v0) => { + state.write_u8(0u8); + v0.hash(state); + } + GenericMethodArgument::Const(v0) => { + state.write_u8(1u8); + v0.hash(state); + } + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for GenericParam { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + match self { + GenericParam::Type(v0) => { + state.write_u8(0u8); + v0.hash(state); + } + GenericParam::Lifetime(v0) => { + state.write_u8(1u8); + v0.hash(state); + } + GenericParam::Const(v0) => { + state.write_u8(2u8); + v0.hash(state); + } + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for Generics { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.lt_token.hash(state); + self.params.hash(state); + self.gt_token.hash(state); + self.where_clause.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ImplItem { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + match self { + ImplItem::Const(v0) => { + state.write_u8(0u8); + v0.hash(state); + } + ImplItem::Method(v0) => { + state.write_u8(1u8); + v0.hash(state); + } + ImplItem::Type(v0) => { + state.write_u8(2u8); + v0.hash(state); + } + ImplItem::Macro(v0) => { + state.write_u8(3u8); + v0.hash(state); + } + ImplItem::Verbatim(v0) => { + state.write_u8(4u8); + TokenStreamHelper(v0).hash(state); + } + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ImplItemConst { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.vis.hash(state); + self.defaultness.hash(state); + self.ident.hash(state); + self.ty.hash(state); + self.expr.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ImplItemMacro { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.mac.hash(state); + self.semi_token.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ImplItemMethod { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.vis.hash(state); + self.defaultness.hash(state); + self.sig.hash(state); + self.block.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ImplItemType { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.vis.hash(state); + self.defaultness.hash(state); + self.ident.hash(state); + self.generics.hash(state); + self.ty.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for Item { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + match self { + Item::Const(v0) => { + state.write_u8(0u8); + v0.hash(state); + } + Item::Enum(v0) => { + state.write_u8(1u8); + v0.hash(state); + } + Item::ExternCrate(v0) => { + state.write_u8(2u8); + v0.hash(state); + } + Item::Fn(v0) => { + state.write_u8(3u8); + v0.hash(state); + } + Item::ForeignMod(v0) => { + state.write_u8(4u8); + v0.hash(state); + } + Item::Impl(v0) => { + state.write_u8(5u8); + v0.hash(state); + } + Item::Macro(v0) => { + state.write_u8(6u8); + v0.hash(state); + } + Item::Macro2(v0) => { + state.write_u8(7u8); + v0.hash(state); + } + Item::Mod(v0) => { + state.write_u8(8u8); + v0.hash(state); + } + Item::Static(v0) => { + state.write_u8(9u8); + v0.hash(state); + } + Item::Struct(v0) => { + state.write_u8(10u8); + v0.hash(state); + } + Item::Trait(v0) => { + state.write_u8(11u8); + v0.hash(state); + } + Item::TraitAlias(v0) => { + state.write_u8(12u8); + v0.hash(state); + } + Item::Type(v0) => { + state.write_u8(13u8); + v0.hash(state); + } + Item::Union(v0) => { + state.write_u8(14u8); + v0.hash(state); + } + Item::Use(v0) => { + state.write_u8(15u8); + v0.hash(state); + } + Item::Verbatim(v0) => { + state.write_u8(16u8); + TokenStreamHelper(v0).hash(state); + } + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ItemConst { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.vis.hash(state); + self.ident.hash(state); + self.ty.hash(state); + self.expr.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ItemEnum { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.vis.hash(state); + self.ident.hash(state); + self.generics.hash(state); + self.variants.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ItemExternCrate { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.vis.hash(state); + self.ident.hash(state); + self.rename.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ItemFn { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.vis.hash(state); + self.sig.hash(state); + self.block.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ItemForeignMod { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.abi.hash(state); + self.items.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ItemImpl { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.defaultness.hash(state); + self.unsafety.hash(state); + self.generics.hash(state); + self.trait_.hash(state); + self.self_ty.hash(state); + self.items.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ItemMacro { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.ident.hash(state); + self.mac.hash(state); + self.semi_token.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ItemMacro2 { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.vis.hash(state); + self.ident.hash(state); + TokenStreamHelper(&self.rules).hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ItemMod { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.vis.hash(state); + self.ident.hash(state); + self.content.hash(state); + self.semi.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ItemStatic { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.vis.hash(state); + self.mutability.hash(state); + self.ident.hash(state); + self.ty.hash(state); + self.expr.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ItemStruct { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.vis.hash(state); + self.ident.hash(state); + self.generics.hash(state); + self.fields.hash(state); + self.semi_token.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ItemTrait { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.vis.hash(state); + self.unsafety.hash(state); + self.auto_token.hash(state); + self.ident.hash(state); + self.generics.hash(state); + self.colon_token.hash(state); + self.supertraits.hash(state); + self.items.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ItemTraitAlias { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.vis.hash(state); + self.ident.hash(state); + self.generics.hash(state); + self.bounds.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ItemType { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.vis.hash(state); + self.ident.hash(state); + self.generics.hash(state); + self.ty.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ItemUnion { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.vis.hash(state); + self.ident.hash(state); + self.generics.hash(state); + self.fields.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ItemUse { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.vis.hash(state); + self.leading_colon.hash(state); + self.tree.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for Label { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.name.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for LifetimeDef { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.lifetime.hash(state); + self.colon_token.hash(state); + self.bounds.hash(state); + } +} +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for Lit { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + match self { + Lit::Str(v0) => { + state.write_u8(0u8); + v0.hash(state); + } + Lit::ByteStr(v0) => { + state.write_u8(1u8); + v0.hash(state); + } + Lit::Byte(v0) => { + state.write_u8(2u8); + v0.hash(state); + } + Lit::Char(v0) => { + state.write_u8(3u8); + v0.hash(state); + } + Lit::Int(v0) => { + state.write_u8(4u8); + v0.hash(state); + } + Lit::Float(v0) => { + state.write_u8(5u8); + v0.hash(state); + } + Lit::Bool(v0) => { + state.write_u8(6u8); + v0.hash(state); + } + Lit::Verbatim(v0) => { + state.write_u8(7u8); + v0.to_string().hash(state); + } + } + } +} +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for LitBool { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.value.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for Local { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.pat.hash(state); + self.init.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for Macro { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.path.hash(state); + self.delimiter.hash(state); + TokenStreamHelper(&self.tokens).hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for MacroDelimiter { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + match self { + MacroDelimiter::Paren(_) => { + state.write_u8(0u8); + } + MacroDelimiter::Brace(_) => { + state.write_u8(1u8); + } + MacroDelimiter::Bracket(_) => { + state.write_u8(2u8); + } + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for Meta { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + match self { + Meta::Path(v0) => { + state.write_u8(0u8); + v0.hash(state); + } + Meta::List(v0) => { + state.write_u8(1u8); + v0.hash(state); + } + Meta::NameValue(v0) => { + state.write_u8(2u8); + v0.hash(state); + } + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for MetaList { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.path.hash(state); + self.nested.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for MetaNameValue { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.path.hash(state); + self.lit.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for MethodTurbofish { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.args.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for NestedMeta { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + match self { + NestedMeta::Meta(v0) => { + state.write_u8(0u8); + v0.hash(state); + } + NestedMeta::Lit(v0) => { + state.write_u8(1u8); + v0.hash(state); + } + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ParenthesizedGenericArguments { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.inputs.hash(state); + self.output.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for Pat { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + match self { + Pat::Box(v0) => { + state.write_u8(0u8); + v0.hash(state); + } + Pat::Ident(v0) => { + state.write_u8(1u8); + v0.hash(state); + } + Pat::Lit(v0) => { + state.write_u8(2u8); + v0.hash(state); + } + Pat::Macro(v0) => { + state.write_u8(3u8); + v0.hash(state); + } + Pat::Or(v0) => { + state.write_u8(4u8); + v0.hash(state); + } + Pat::Path(v0) => { + state.write_u8(5u8); + v0.hash(state); + } + Pat::Range(v0) => { + state.write_u8(6u8); + v0.hash(state); + } + Pat::Reference(v0) => { + state.write_u8(7u8); + v0.hash(state); + } + Pat::Rest(v0) => { + state.write_u8(8u8); + v0.hash(state); + } + Pat::Slice(v0) => { + state.write_u8(9u8); + v0.hash(state); + } + Pat::Struct(v0) => { + state.write_u8(10u8); + v0.hash(state); + } + Pat::Tuple(v0) => { + state.write_u8(11u8); + v0.hash(state); + } + Pat::TupleStruct(v0) => { + state.write_u8(12u8); + v0.hash(state); + } + Pat::Type(v0) => { + state.write_u8(13u8); + v0.hash(state); + } + Pat::Verbatim(v0) => { + state.write_u8(14u8); + TokenStreamHelper(v0).hash(state); + } + Pat::Wild(v0) => { + state.write_u8(15u8); + v0.hash(state); + } + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for PatBox { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.pat.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for PatIdent { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.by_ref.hash(state); + self.mutability.hash(state); + self.ident.hash(state); + self.subpat.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for PatLit { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.expr.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for PatMacro { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.mac.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for PatOr { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.leading_vert.hash(state); + self.cases.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for PatPath { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.qself.hash(state); + self.path.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for PatRange { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.lo.hash(state); + self.limits.hash(state); + self.hi.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for PatReference { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.mutability.hash(state); + self.pat.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for PatRest { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for PatSlice { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.elems.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for PatStruct { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.path.hash(state); + self.fields.hash(state); + self.dot2_token.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for PatTuple { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.elems.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for PatTupleStruct { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.path.hash(state); + self.pat.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for PatType { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.pat.hash(state); + self.ty.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for PatWild { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for Path { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.leading_colon.hash(state); + self.segments.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for PathArguments { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + match self { + PathArguments::None => { + state.write_u8(0u8); + } + PathArguments::AngleBracketed(v0) => { + state.write_u8(1u8); + v0.hash(state); + } + PathArguments::Parenthesized(v0) => { + state.write_u8(2u8); + v0.hash(state); + } + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for PathSegment { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.ident.hash(state); + self.arguments.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for PredicateEq { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.lhs_ty.hash(state); + self.rhs_ty.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for PredicateLifetime { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.lifetime.hash(state); + self.bounds.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for PredicateType { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.lifetimes.hash(state); + self.bounded_ty.hash(state); + self.bounds.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for QSelf { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.ty.hash(state); + self.position.hash(state); + self.as_token.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for RangeLimits { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + match self { + RangeLimits::HalfOpen(_) => { + state.write_u8(0u8); + } + RangeLimits::Closed(_) => { + state.write_u8(1u8); + } + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for Receiver { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.reference.hash(state); + self.mutability.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for ReturnType { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + match self { + ReturnType::Default => { + state.write_u8(0u8); + } + ReturnType::Type(_, v1) => { + state.write_u8(1u8); + v1.hash(state); + } + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for Signature { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.constness.hash(state); + self.asyncness.hash(state); + self.unsafety.hash(state); + self.abi.hash(state); + self.ident.hash(state); + self.generics.hash(state); + self.inputs.hash(state); + self.variadic.hash(state); + self.output.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for Stmt { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + match self { + Stmt::Local(v0) => { + state.write_u8(0u8); + v0.hash(state); + } + Stmt::Item(v0) => { + state.write_u8(1u8); + v0.hash(state); + } + Stmt::Expr(v0) => { + state.write_u8(2u8); + v0.hash(state); + } + Stmt::Semi(v0, _) => { + state.write_u8(3u8); + v0.hash(state); + } + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for TraitBound { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.paren_token.hash(state); + self.modifier.hash(state); + self.lifetimes.hash(state); + self.path.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for TraitBoundModifier { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + match self { + TraitBoundModifier::None => { + state.write_u8(0u8); + } + TraitBoundModifier::Maybe(_) => { + state.write_u8(1u8); + } + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for TraitItem { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + match self { + TraitItem::Const(v0) => { + state.write_u8(0u8); + v0.hash(state); + } + TraitItem::Method(v0) => { + state.write_u8(1u8); + v0.hash(state); + } + TraitItem::Type(v0) => { + state.write_u8(2u8); + v0.hash(state); + } + TraitItem::Macro(v0) => { + state.write_u8(3u8); + v0.hash(state); + } + TraitItem::Verbatim(v0) => { + state.write_u8(4u8); + TokenStreamHelper(v0).hash(state); + } + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for TraitItemConst { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.ident.hash(state); + self.ty.hash(state); + self.default.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for TraitItemMacro { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.mac.hash(state); + self.semi_token.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for TraitItemMethod { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.sig.hash(state); + self.default.hash(state); + self.semi_token.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for TraitItemType { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.ident.hash(state); + self.generics.hash(state); + self.colon_token.hash(state); + self.bounds.hash(state); + self.default.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for Type { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + match self { + Type::Array(v0) => { + state.write_u8(0u8); + v0.hash(state); + } + Type::BareFn(v0) => { + state.write_u8(1u8); + v0.hash(state); + } + Type::Group(v0) => { + state.write_u8(2u8); + v0.hash(state); + } + Type::ImplTrait(v0) => { + state.write_u8(3u8); + v0.hash(state); + } + Type::Infer(v0) => { + state.write_u8(4u8); + v0.hash(state); + } + Type::Macro(v0) => { + state.write_u8(5u8); + v0.hash(state); + } + Type::Never(v0) => { + state.write_u8(6u8); + v0.hash(state); + } + Type::Paren(v0) => { + state.write_u8(7u8); + v0.hash(state); + } + Type::Path(v0) => { + state.write_u8(8u8); + v0.hash(state); + } + Type::Ptr(v0) => { + state.write_u8(9u8); + v0.hash(state); + } + Type::Reference(v0) => { + state.write_u8(10u8); + v0.hash(state); + } + Type::Slice(v0) => { + state.write_u8(11u8); + v0.hash(state); + } + Type::TraitObject(v0) => { + state.write_u8(12u8); + v0.hash(state); + } + Type::Tuple(v0) => { + state.write_u8(13u8); + v0.hash(state); + } + Type::Verbatim(v0) => { + state.write_u8(14u8); + TokenStreamHelper(v0).hash(state); + } + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for TypeArray { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.elem.hash(state); + self.len.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for TypeBareFn { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.lifetimes.hash(state); + self.unsafety.hash(state); + self.abi.hash(state); + self.inputs.hash(state); + self.variadic.hash(state); + self.output.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for TypeGroup { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.elem.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for TypeImplTrait { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.bounds.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for TypeInfer { + fn hash<H>(&self, _state: &mut H) + where + H: Hasher, + {} +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for TypeMacro { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.mac.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for TypeNever { + fn hash<H>(&self, _state: &mut H) + where + H: Hasher, + {} +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for TypeParam { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.ident.hash(state); + self.colon_token.hash(state); + self.bounds.hash(state); + self.eq_token.hash(state); + self.default.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for TypeParamBound { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + match self { + TypeParamBound::Trait(v0) => { + state.write_u8(0u8); + v0.hash(state); + } + TypeParamBound::Lifetime(v0) => { + state.write_u8(1u8); + v0.hash(state); + } + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for TypeParen { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.elem.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for TypePath { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.qself.hash(state); + self.path.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for TypePtr { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.const_token.hash(state); + self.mutability.hash(state); + self.elem.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for TypeReference { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.lifetime.hash(state); + self.mutability.hash(state); + self.elem.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for TypeSlice { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.elem.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for TypeTraitObject { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.dyn_token.hash(state); + self.bounds.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for TypeTuple { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.elems.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for UnOp { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + match self { + UnOp::Deref(_) => { + state.write_u8(0u8); + } + UnOp::Not(_) => { + state.write_u8(1u8); + } + UnOp::Neg(_) => { + state.write_u8(2u8); + } + } + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for UseGlob { + fn hash<H>(&self, _state: &mut H) + where + H: Hasher, + {} +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for UseGroup { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.items.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for UseName { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.ident.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for UsePath { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.ident.hash(state); + self.tree.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for UseRename { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.ident.hash(state); + self.rename.hash(state); + } +} +#[cfg(feature = "full")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for UseTree { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + match self { + UseTree::Path(v0) => { + state.write_u8(0u8); + v0.hash(state); + } + UseTree::Name(v0) => { + state.write_u8(1u8); + v0.hash(state); + } + UseTree::Rename(v0) => { + state.write_u8(2u8); + v0.hash(state); + } + UseTree::Glob(v0) => { + state.write_u8(3u8); + v0.hash(state); + } + UseTree::Group(v0) => { + state.write_u8(4u8); + v0.hash(state); + } + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for Variadic { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for Variant { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.attrs.hash(state); + self.ident.hash(state); + self.fields.hash(state); + self.discriminant.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for VisCrate { + fn hash<H>(&self, _state: &mut H) + where + H: Hasher, + {} +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for VisPublic { + fn hash<H>(&self, _state: &mut H) + where + H: Hasher, + {} +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for VisRestricted { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.in_token.hash(state); + self.path.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for Visibility { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + match self { + Visibility::Public(v0) => { + state.write_u8(0u8); + v0.hash(state); + } + Visibility::Crate(v0) => { + state.write_u8(1u8); + v0.hash(state); + } + Visibility::Restricted(v0) => { + state.write_u8(2u8); + v0.hash(state); + } + Visibility::Inherited => { + state.write_u8(3u8); + } + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for WhereClause { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.predicates.hash(state); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for WherePredicate { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + match self { + WherePredicate::Type(v0) => { + state.write_u8(0u8); + v0.hash(state); + } + WherePredicate::Lifetime(v0) => { + state.write_u8(1u8); + v0.hash(state); + } + WherePredicate::Eq(v0) => { + state.write_u8(2u8); + v0.hash(state); + } + } + } +} diff --git a/third_party/rust/syn/src/gen/visit.rs b/third_party/rust/syn/src/gen/visit.rs new file mode 100644 index 0000000000..19ddd2e726 --- /dev/null +++ b/third_party/rust/syn/src/gen/visit.rs @@ -0,0 +1,3786 @@ +// This file is @generated by syn-internal-codegen. +// It is not intended for manual editing. + +#![allow(unused_variables)] +#[cfg(any(feature = "full", feature = "derive"))] +use crate::gen::helper::visit::*; +#[cfg(any(feature = "full", feature = "derive"))] +use crate::punctuated::Punctuated; +use crate::*; +use proc_macro2::Span; +#[cfg(feature = "full")] +macro_rules! full { + ($e:expr) => { + $e + }; +} +#[cfg(all(feature = "derive", not(feature = "full")))] +macro_rules! full { + ($e:expr) => { + unreachable!() + }; +} +macro_rules! skip { + ($($tt:tt)*) => {}; +} +/// Syntax tree traversal to walk a shared borrow of a syntax tree. +/// +/// See the [module documentation] for details. +/// +/// [module documentation]: self +/// +/// *This trait is available only if Syn is built with the `"visit"` feature.* +pub trait Visit<'ast> { + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_abi(&mut self, i: &'ast Abi) { + visit_abi(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_angle_bracketed_generic_arguments( + &mut self, + i: &'ast AngleBracketedGenericArguments, + ) { + visit_angle_bracketed_generic_arguments(self, i); + } + #[cfg(feature = "full")] + fn visit_arm(&mut self, i: &'ast Arm) { + visit_arm(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_attr_style(&mut self, i: &'ast AttrStyle) { + visit_attr_style(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_attribute(&mut self, i: &'ast Attribute) { + visit_attribute(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_bare_fn_arg(&mut self, i: &'ast BareFnArg) { + visit_bare_fn_arg(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_bin_op(&mut self, i: &'ast BinOp) { + visit_bin_op(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_binding(&mut self, i: &'ast Binding) { + visit_binding(self, i); + } + #[cfg(feature = "full")] + fn visit_block(&mut self, i: &'ast Block) { + visit_block(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_bound_lifetimes(&mut self, i: &'ast BoundLifetimes) { + visit_bound_lifetimes(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_const_param(&mut self, i: &'ast ConstParam) { + visit_const_param(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_constraint(&mut self, i: &'ast Constraint) { + visit_constraint(self, i); + } + #[cfg(feature = "derive")] + fn visit_data(&mut self, i: &'ast Data) { + visit_data(self, i); + } + #[cfg(feature = "derive")] + fn visit_data_enum(&mut self, i: &'ast DataEnum) { + visit_data_enum(self, i); + } + #[cfg(feature = "derive")] + fn visit_data_struct(&mut self, i: &'ast DataStruct) { + visit_data_struct(self, i); + } + #[cfg(feature = "derive")] + fn visit_data_union(&mut self, i: &'ast DataUnion) { + visit_data_union(self, i); + } + #[cfg(feature = "derive")] + fn visit_derive_input(&mut self, i: &'ast DeriveInput) { + visit_derive_input(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_expr(&mut self, i: &'ast Expr) { + visit_expr(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_array(&mut self, i: &'ast ExprArray) { + visit_expr_array(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_assign(&mut self, i: &'ast ExprAssign) { + visit_expr_assign(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_assign_op(&mut self, i: &'ast ExprAssignOp) { + visit_expr_assign_op(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_async(&mut self, i: &'ast ExprAsync) { + visit_expr_async(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_await(&mut self, i: &'ast ExprAwait) { + visit_expr_await(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_expr_binary(&mut self, i: &'ast ExprBinary) { + visit_expr_binary(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_block(&mut self, i: &'ast ExprBlock) { + visit_expr_block(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_box(&mut self, i: &'ast ExprBox) { + visit_expr_box(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_break(&mut self, i: &'ast ExprBreak) { + visit_expr_break(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_expr_call(&mut self, i: &'ast ExprCall) { + visit_expr_call(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_expr_cast(&mut self, i: &'ast ExprCast) { + visit_expr_cast(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_closure(&mut self, i: &'ast ExprClosure) { + visit_expr_closure(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_continue(&mut self, i: &'ast ExprContinue) { + visit_expr_continue(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_expr_field(&mut self, i: &'ast ExprField) { + visit_expr_field(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_for_loop(&mut self, i: &'ast ExprForLoop) { + visit_expr_for_loop(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_group(&mut self, i: &'ast ExprGroup) { + visit_expr_group(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_if(&mut self, i: &'ast ExprIf) { + visit_expr_if(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_expr_index(&mut self, i: &'ast ExprIndex) { + visit_expr_index(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_let(&mut self, i: &'ast ExprLet) { + visit_expr_let(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_expr_lit(&mut self, i: &'ast ExprLit) { + visit_expr_lit(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_loop(&mut self, i: &'ast ExprLoop) { + visit_expr_loop(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_macro(&mut self, i: &'ast ExprMacro) { + visit_expr_macro(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_match(&mut self, i: &'ast ExprMatch) { + visit_expr_match(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_method_call(&mut self, i: &'ast ExprMethodCall) { + visit_expr_method_call(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_expr_paren(&mut self, i: &'ast ExprParen) { + visit_expr_paren(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_expr_path(&mut self, i: &'ast ExprPath) { + visit_expr_path(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_range(&mut self, i: &'ast ExprRange) { + visit_expr_range(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_reference(&mut self, i: &'ast ExprReference) { + visit_expr_reference(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_repeat(&mut self, i: &'ast ExprRepeat) { + visit_expr_repeat(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_return(&mut self, i: &'ast ExprReturn) { + visit_expr_return(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_struct(&mut self, i: &'ast ExprStruct) { + visit_expr_struct(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_try(&mut self, i: &'ast ExprTry) { + visit_expr_try(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_try_block(&mut self, i: &'ast ExprTryBlock) { + visit_expr_try_block(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_tuple(&mut self, i: &'ast ExprTuple) { + visit_expr_tuple(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_type(&mut self, i: &'ast ExprType) { + visit_expr_type(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_expr_unary(&mut self, i: &'ast ExprUnary) { + visit_expr_unary(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_unsafe(&mut self, i: &'ast ExprUnsafe) { + visit_expr_unsafe(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_while(&mut self, i: &'ast ExprWhile) { + visit_expr_while(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_yield(&mut self, i: &'ast ExprYield) { + visit_expr_yield(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_field(&mut self, i: &'ast Field) { + visit_field(self, i); + } + #[cfg(feature = "full")] + fn visit_field_pat(&mut self, i: &'ast FieldPat) { + visit_field_pat(self, i); + } + #[cfg(feature = "full")] + fn visit_field_value(&mut self, i: &'ast FieldValue) { + visit_field_value(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_fields(&mut self, i: &'ast Fields) { + visit_fields(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_fields_named(&mut self, i: &'ast FieldsNamed) { + visit_fields_named(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_fields_unnamed(&mut self, i: &'ast FieldsUnnamed) { + visit_fields_unnamed(self, i); + } + #[cfg(feature = "full")] + fn visit_file(&mut self, i: &'ast File) { + visit_file(self, i); + } + #[cfg(feature = "full")] + fn visit_fn_arg(&mut self, i: &'ast FnArg) { + visit_fn_arg(self, i); + } + #[cfg(feature = "full")] + fn visit_foreign_item(&mut self, i: &'ast ForeignItem) { + visit_foreign_item(self, i); + } + #[cfg(feature = "full")] + fn visit_foreign_item_fn(&mut self, i: &'ast ForeignItemFn) { + visit_foreign_item_fn(self, i); + } + #[cfg(feature = "full")] + fn visit_foreign_item_macro(&mut self, i: &'ast ForeignItemMacro) { + visit_foreign_item_macro(self, i); + } + #[cfg(feature = "full")] + fn visit_foreign_item_static(&mut self, i: &'ast ForeignItemStatic) { + visit_foreign_item_static(self, i); + } + #[cfg(feature = "full")] + fn visit_foreign_item_type(&mut self, i: &'ast ForeignItemType) { + visit_foreign_item_type(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_generic_argument(&mut self, i: &'ast GenericArgument) { + visit_generic_argument(self, i); + } + #[cfg(feature = "full")] + fn visit_generic_method_argument(&mut self, i: &'ast GenericMethodArgument) { + visit_generic_method_argument(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_generic_param(&mut self, i: &'ast GenericParam) { + visit_generic_param(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_generics(&mut self, i: &'ast Generics) { + visit_generics(self, i); + } + fn visit_ident(&mut self, i: &'ast Ident) { + visit_ident(self, i); + } + #[cfg(feature = "full")] + fn visit_impl_item(&mut self, i: &'ast ImplItem) { + visit_impl_item(self, i); + } + #[cfg(feature = "full")] + fn visit_impl_item_const(&mut self, i: &'ast ImplItemConst) { + visit_impl_item_const(self, i); + } + #[cfg(feature = "full")] + fn visit_impl_item_macro(&mut self, i: &'ast ImplItemMacro) { + visit_impl_item_macro(self, i); + } + #[cfg(feature = "full")] + fn visit_impl_item_method(&mut self, i: &'ast ImplItemMethod) { + visit_impl_item_method(self, i); + } + #[cfg(feature = "full")] + fn visit_impl_item_type(&mut self, i: &'ast ImplItemType) { + visit_impl_item_type(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_index(&mut self, i: &'ast Index) { + visit_index(self, i); + } + #[cfg(feature = "full")] + fn visit_item(&mut self, i: &'ast Item) { + visit_item(self, i); + } + #[cfg(feature = "full")] + fn visit_item_const(&mut self, i: &'ast ItemConst) { + visit_item_const(self, i); + } + #[cfg(feature = "full")] + fn visit_item_enum(&mut self, i: &'ast ItemEnum) { + visit_item_enum(self, i); + } + #[cfg(feature = "full")] + fn visit_item_extern_crate(&mut self, i: &'ast ItemExternCrate) { + visit_item_extern_crate(self, i); + } + #[cfg(feature = "full")] + fn visit_item_fn(&mut self, i: &'ast ItemFn) { + visit_item_fn(self, i); + } + #[cfg(feature = "full")] + fn visit_item_foreign_mod(&mut self, i: &'ast ItemForeignMod) { + visit_item_foreign_mod(self, i); + } + #[cfg(feature = "full")] + fn visit_item_impl(&mut self, i: &'ast ItemImpl) { + visit_item_impl(self, i); + } + #[cfg(feature = "full")] + fn visit_item_macro(&mut self, i: &'ast ItemMacro) { + visit_item_macro(self, i); + } + #[cfg(feature = "full")] + fn visit_item_macro2(&mut self, i: &'ast ItemMacro2) { + visit_item_macro2(self, i); + } + #[cfg(feature = "full")] + fn visit_item_mod(&mut self, i: &'ast ItemMod) { + visit_item_mod(self, i); + } + #[cfg(feature = "full")] + fn visit_item_static(&mut self, i: &'ast ItemStatic) { + visit_item_static(self, i); + } + #[cfg(feature = "full")] + fn visit_item_struct(&mut self, i: &'ast ItemStruct) { + visit_item_struct(self, i); + } + #[cfg(feature = "full")] + fn visit_item_trait(&mut self, i: &'ast ItemTrait) { + visit_item_trait(self, i); + } + #[cfg(feature = "full")] + fn visit_item_trait_alias(&mut self, i: &'ast ItemTraitAlias) { + visit_item_trait_alias(self, i); + } + #[cfg(feature = "full")] + fn visit_item_type(&mut self, i: &'ast ItemType) { + visit_item_type(self, i); + } + #[cfg(feature = "full")] + fn visit_item_union(&mut self, i: &'ast ItemUnion) { + visit_item_union(self, i); + } + #[cfg(feature = "full")] + fn visit_item_use(&mut self, i: &'ast ItemUse) { + visit_item_use(self, i); + } + #[cfg(feature = "full")] + fn visit_label(&mut self, i: &'ast Label) { + visit_label(self, i); + } + fn visit_lifetime(&mut self, i: &'ast Lifetime) { + visit_lifetime(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_lifetime_def(&mut self, i: &'ast LifetimeDef) { + visit_lifetime_def(self, i); + } + fn visit_lit(&mut self, i: &'ast Lit) { + visit_lit(self, i); + } + fn visit_lit_bool(&mut self, i: &'ast LitBool) { + visit_lit_bool(self, i); + } + fn visit_lit_byte(&mut self, i: &'ast LitByte) { + visit_lit_byte(self, i); + } + fn visit_lit_byte_str(&mut self, i: &'ast LitByteStr) { + visit_lit_byte_str(self, i); + } + fn visit_lit_char(&mut self, i: &'ast LitChar) { + visit_lit_char(self, i); + } + fn visit_lit_float(&mut self, i: &'ast LitFloat) { + visit_lit_float(self, i); + } + fn visit_lit_int(&mut self, i: &'ast LitInt) { + visit_lit_int(self, i); + } + fn visit_lit_str(&mut self, i: &'ast LitStr) { + visit_lit_str(self, i); + } + #[cfg(feature = "full")] + fn visit_local(&mut self, i: &'ast Local) { + visit_local(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_macro(&mut self, i: &'ast Macro) { + visit_macro(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_macro_delimiter(&mut self, i: &'ast MacroDelimiter) { + visit_macro_delimiter(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_member(&mut self, i: &'ast Member) { + visit_member(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_meta(&mut self, i: &'ast Meta) { + visit_meta(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_meta_list(&mut self, i: &'ast MetaList) { + visit_meta_list(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_meta_name_value(&mut self, i: &'ast MetaNameValue) { + visit_meta_name_value(self, i); + } + #[cfg(feature = "full")] + fn visit_method_turbofish(&mut self, i: &'ast MethodTurbofish) { + visit_method_turbofish(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_nested_meta(&mut self, i: &'ast NestedMeta) { + visit_nested_meta(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_parenthesized_generic_arguments( + &mut self, + i: &'ast ParenthesizedGenericArguments, + ) { + visit_parenthesized_generic_arguments(self, i); + } + #[cfg(feature = "full")] + fn visit_pat(&mut self, i: &'ast Pat) { + visit_pat(self, i); + } + #[cfg(feature = "full")] + fn visit_pat_box(&mut self, i: &'ast PatBox) { + visit_pat_box(self, i); + } + #[cfg(feature = "full")] + fn visit_pat_ident(&mut self, i: &'ast PatIdent) { + visit_pat_ident(self, i); + } + #[cfg(feature = "full")] + fn visit_pat_lit(&mut self, i: &'ast PatLit) { + visit_pat_lit(self, i); + } + #[cfg(feature = "full")] + fn visit_pat_macro(&mut self, i: &'ast PatMacro) { + visit_pat_macro(self, i); + } + #[cfg(feature = "full")] + fn visit_pat_or(&mut self, i: &'ast PatOr) { + visit_pat_or(self, i); + } + #[cfg(feature = "full")] + fn visit_pat_path(&mut self, i: &'ast PatPath) { + visit_pat_path(self, i); + } + #[cfg(feature = "full")] + fn visit_pat_range(&mut self, i: &'ast PatRange) { + visit_pat_range(self, i); + } + #[cfg(feature = "full")] + fn visit_pat_reference(&mut self, i: &'ast PatReference) { + visit_pat_reference(self, i); + } + #[cfg(feature = "full")] + fn visit_pat_rest(&mut self, i: &'ast PatRest) { + visit_pat_rest(self, i); + } + #[cfg(feature = "full")] + fn visit_pat_slice(&mut self, i: &'ast PatSlice) { + visit_pat_slice(self, i); + } + #[cfg(feature = "full")] + fn visit_pat_struct(&mut self, i: &'ast PatStruct) { + visit_pat_struct(self, i); + } + #[cfg(feature = "full")] + fn visit_pat_tuple(&mut self, i: &'ast PatTuple) { + visit_pat_tuple(self, i); + } + #[cfg(feature = "full")] + fn visit_pat_tuple_struct(&mut self, i: &'ast PatTupleStruct) { + visit_pat_tuple_struct(self, i); + } + #[cfg(feature = "full")] + fn visit_pat_type(&mut self, i: &'ast PatType) { + visit_pat_type(self, i); + } + #[cfg(feature = "full")] + fn visit_pat_wild(&mut self, i: &'ast PatWild) { + visit_pat_wild(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_path(&mut self, i: &'ast Path) { + visit_path(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_path_arguments(&mut self, i: &'ast PathArguments) { + visit_path_arguments(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_path_segment(&mut self, i: &'ast PathSegment) { + visit_path_segment(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_predicate_eq(&mut self, i: &'ast PredicateEq) { + visit_predicate_eq(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_predicate_lifetime(&mut self, i: &'ast PredicateLifetime) { + visit_predicate_lifetime(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_predicate_type(&mut self, i: &'ast PredicateType) { + visit_predicate_type(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_qself(&mut self, i: &'ast QSelf) { + visit_qself(self, i); + } + #[cfg(feature = "full")] + fn visit_range_limits(&mut self, i: &'ast RangeLimits) { + visit_range_limits(self, i); + } + #[cfg(feature = "full")] + fn visit_receiver(&mut self, i: &'ast Receiver) { + visit_receiver(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_return_type(&mut self, i: &'ast ReturnType) { + visit_return_type(self, i); + } + #[cfg(feature = "full")] + fn visit_signature(&mut self, i: &'ast Signature) { + visit_signature(self, i); + } + fn visit_span(&mut self, i: &Span) { + visit_span(self, i); + } + #[cfg(feature = "full")] + fn visit_stmt(&mut self, i: &'ast Stmt) { + visit_stmt(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_trait_bound(&mut self, i: &'ast TraitBound) { + visit_trait_bound(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_trait_bound_modifier(&mut self, i: &'ast TraitBoundModifier) { + visit_trait_bound_modifier(self, i); + } + #[cfg(feature = "full")] + fn visit_trait_item(&mut self, i: &'ast TraitItem) { + visit_trait_item(self, i); + } + #[cfg(feature = "full")] + fn visit_trait_item_const(&mut self, i: &'ast TraitItemConst) { + visit_trait_item_const(self, i); + } + #[cfg(feature = "full")] + fn visit_trait_item_macro(&mut self, i: &'ast TraitItemMacro) { + visit_trait_item_macro(self, i); + } + #[cfg(feature = "full")] + fn visit_trait_item_method(&mut self, i: &'ast TraitItemMethod) { + visit_trait_item_method(self, i); + } + #[cfg(feature = "full")] + fn visit_trait_item_type(&mut self, i: &'ast TraitItemType) { + visit_trait_item_type(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_type(&mut self, i: &'ast Type) { + visit_type(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_type_array(&mut self, i: &'ast TypeArray) { + visit_type_array(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_type_bare_fn(&mut self, i: &'ast TypeBareFn) { + visit_type_bare_fn(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_type_group(&mut self, i: &'ast TypeGroup) { + visit_type_group(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_type_impl_trait(&mut self, i: &'ast TypeImplTrait) { + visit_type_impl_trait(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_type_infer(&mut self, i: &'ast TypeInfer) { + visit_type_infer(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_type_macro(&mut self, i: &'ast TypeMacro) { + visit_type_macro(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_type_never(&mut self, i: &'ast TypeNever) { + visit_type_never(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_type_param(&mut self, i: &'ast TypeParam) { + visit_type_param(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_type_param_bound(&mut self, i: &'ast TypeParamBound) { + visit_type_param_bound(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_type_paren(&mut self, i: &'ast TypeParen) { + visit_type_paren(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_type_path(&mut self, i: &'ast TypePath) { + visit_type_path(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_type_ptr(&mut self, i: &'ast TypePtr) { + visit_type_ptr(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_type_reference(&mut self, i: &'ast TypeReference) { + visit_type_reference(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_type_slice(&mut self, i: &'ast TypeSlice) { + visit_type_slice(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_type_trait_object(&mut self, i: &'ast TypeTraitObject) { + visit_type_trait_object(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_type_tuple(&mut self, i: &'ast TypeTuple) { + visit_type_tuple(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_un_op(&mut self, i: &'ast UnOp) { + visit_un_op(self, i); + } + #[cfg(feature = "full")] + fn visit_use_glob(&mut self, i: &'ast UseGlob) { + visit_use_glob(self, i); + } + #[cfg(feature = "full")] + fn visit_use_group(&mut self, i: &'ast UseGroup) { + visit_use_group(self, i); + } + #[cfg(feature = "full")] + fn visit_use_name(&mut self, i: &'ast UseName) { + visit_use_name(self, i); + } + #[cfg(feature = "full")] + fn visit_use_path(&mut self, i: &'ast UsePath) { + visit_use_path(self, i); + } + #[cfg(feature = "full")] + fn visit_use_rename(&mut self, i: &'ast UseRename) { + visit_use_rename(self, i); + } + #[cfg(feature = "full")] + fn visit_use_tree(&mut self, i: &'ast UseTree) { + visit_use_tree(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_variadic(&mut self, i: &'ast Variadic) { + visit_variadic(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_variant(&mut self, i: &'ast Variant) { + visit_variant(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_vis_crate(&mut self, i: &'ast VisCrate) { + visit_vis_crate(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_vis_public(&mut self, i: &'ast VisPublic) { + visit_vis_public(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_vis_restricted(&mut self, i: &'ast VisRestricted) { + visit_vis_restricted(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_visibility(&mut self, i: &'ast Visibility) { + visit_visibility(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_where_clause(&mut self, i: &'ast WhereClause) { + visit_where_clause(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_where_predicate(&mut self, i: &'ast WherePredicate) { + visit_where_predicate(self, i); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_abi<'ast, V>(v: &mut V, node: &'ast Abi) +where + V: Visit<'ast> + ?Sized, +{ + tokens_helper(v, &node.extern_token.span); + if let Some(it) = &node.name { + v.visit_lit_str(it); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_angle_bracketed_generic_arguments<'ast, V>( + v: &mut V, + node: &'ast AngleBracketedGenericArguments, +) +where + V: Visit<'ast> + ?Sized, +{ + if let Some(it) = &node.colon2_token { + tokens_helper(v, &it.spans); + } + tokens_helper(v, &node.lt_token.spans); + for el in Punctuated::pairs(&node.args) { + let (it, p) = el.into_tuple(); + v.visit_generic_argument(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } + tokens_helper(v, &node.gt_token.spans); +} +#[cfg(feature = "full")] +pub fn visit_arm<'ast, V>(v: &mut V, node: &'ast Arm) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_pat(&node.pat); + if let Some(it) = &node.guard { + tokens_helper(v, &(it).0.span); + v.visit_expr(&*(it).1); + } + tokens_helper(v, &node.fat_arrow_token.spans); + v.visit_expr(&*node.body); + if let Some(it) = &node.comma { + tokens_helper(v, &it.spans); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_attr_style<'ast, V>(v: &mut V, node: &'ast AttrStyle) +where + V: Visit<'ast> + ?Sized, +{ + match node { + AttrStyle::Outer => {} + AttrStyle::Inner(_binding_0) => { + tokens_helper(v, &_binding_0.spans); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_attribute<'ast, V>(v: &mut V, node: &'ast Attribute) +where + V: Visit<'ast> + ?Sized, +{ + tokens_helper(v, &node.pound_token.spans); + v.visit_attr_style(&node.style); + tokens_helper(v, &node.bracket_token.span); + v.visit_path(&node.path); + skip!(node.tokens); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_bare_fn_arg<'ast, V>(v: &mut V, node: &'ast BareFnArg) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + if let Some(it) = &node.name { + v.visit_ident(&(it).0); + tokens_helper(v, &(it).1.spans); + } + v.visit_type(&node.ty); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_bin_op<'ast, V>(v: &mut V, node: &'ast BinOp) +where + V: Visit<'ast> + ?Sized, +{ + match node { + BinOp::Add(_binding_0) => { + tokens_helper(v, &_binding_0.spans); + } + BinOp::Sub(_binding_0) => { + tokens_helper(v, &_binding_0.spans); + } + BinOp::Mul(_binding_0) => { + tokens_helper(v, &_binding_0.spans); + } + BinOp::Div(_binding_0) => { + tokens_helper(v, &_binding_0.spans); + } + BinOp::Rem(_binding_0) => { + tokens_helper(v, &_binding_0.spans); + } + BinOp::And(_binding_0) => { + tokens_helper(v, &_binding_0.spans); + } + BinOp::Or(_binding_0) => { + tokens_helper(v, &_binding_0.spans); + } + BinOp::BitXor(_binding_0) => { + tokens_helper(v, &_binding_0.spans); + } + BinOp::BitAnd(_binding_0) => { + tokens_helper(v, &_binding_0.spans); + } + BinOp::BitOr(_binding_0) => { + tokens_helper(v, &_binding_0.spans); + } + BinOp::Shl(_binding_0) => { + tokens_helper(v, &_binding_0.spans); + } + BinOp::Shr(_binding_0) => { + tokens_helper(v, &_binding_0.spans); + } + BinOp::Eq(_binding_0) => { + tokens_helper(v, &_binding_0.spans); + } + BinOp::Lt(_binding_0) => { + tokens_helper(v, &_binding_0.spans); + } + BinOp::Le(_binding_0) => { + tokens_helper(v, &_binding_0.spans); + } + BinOp::Ne(_binding_0) => { + tokens_helper(v, &_binding_0.spans); + } + BinOp::Ge(_binding_0) => { + tokens_helper(v, &_binding_0.spans); + } + BinOp::Gt(_binding_0) => { + tokens_helper(v, &_binding_0.spans); + } + BinOp::AddEq(_binding_0) => { + tokens_helper(v, &_binding_0.spans); + } + BinOp::SubEq(_binding_0) => { + tokens_helper(v, &_binding_0.spans); + } + BinOp::MulEq(_binding_0) => { + tokens_helper(v, &_binding_0.spans); + } + BinOp::DivEq(_binding_0) => { + tokens_helper(v, &_binding_0.spans); + } + BinOp::RemEq(_binding_0) => { + tokens_helper(v, &_binding_0.spans); + } + BinOp::BitXorEq(_binding_0) => { + tokens_helper(v, &_binding_0.spans); + } + BinOp::BitAndEq(_binding_0) => { + tokens_helper(v, &_binding_0.spans); + } + BinOp::BitOrEq(_binding_0) => { + tokens_helper(v, &_binding_0.spans); + } + BinOp::ShlEq(_binding_0) => { + tokens_helper(v, &_binding_0.spans); + } + BinOp::ShrEq(_binding_0) => { + tokens_helper(v, &_binding_0.spans); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_binding<'ast, V>(v: &mut V, node: &'ast Binding) +where + V: Visit<'ast> + ?Sized, +{ + v.visit_ident(&node.ident); + tokens_helper(v, &node.eq_token.spans); + v.visit_type(&node.ty); +} +#[cfg(feature = "full")] +pub fn visit_block<'ast, V>(v: &mut V, node: &'ast Block) +where + V: Visit<'ast> + ?Sized, +{ + tokens_helper(v, &node.brace_token.span); + for it in &node.stmts { + v.visit_stmt(it); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_bound_lifetimes<'ast, V>(v: &mut V, node: &'ast BoundLifetimes) +where + V: Visit<'ast> + ?Sized, +{ + tokens_helper(v, &node.for_token.span); + tokens_helper(v, &node.lt_token.spans); + for el in Punctuated::pairs(&node.lifetimes) { + let (it, p) = el.into_tuple(); + v.visit_lifetime_def(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } + tokens_helper(v, &node.gt_token.spans); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_const_param<'ast, V>(v: &mut V, node: &'ast ConstParam) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + tokens_helper(v, &node.const_token.span); + v.visit_ident(&node.ident); + tokens_helper(v, &node.colon_token.spans); + v.visit_type(&node.ty); + if let Some(it) = &node.eq_token { + tokens_helper(v, &it.spans); + } + if let Some(it) = &node.default { + v.visit_expr(it); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_constraint<'ast, V>(v: &mut V, node: &'ast Constraint) +where + V: Visit<'ast> + ?Sized, +{ + v.visit_ident(&node.ident); + tokens_helper(v, &node.colon_token.spans); + for el in Punctuated::pairs(&node.bounds) { + let (it, p) = el.into_tuple(); + v.visit_type_param_bound(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } +} +#[cfg(feature = "derive")] +pub fn visit_data<'ast, V>(v: &mut V, node: &'ast Data) +where + V: Visit<'ast> + ?Sized, +{ + match node { + Data::Struct(_binding_0) => { + v.visit_data_struct(_binding_0); + } + Data::Enum(_binding_0) => { + v.visit_data_enum(_binding_0); + } + Data::Union(_binding_0) => { + v.visit_data_union(_binding_0); + } + } +} +#[cfg(feature = "derive")] +pub fn visit_data_enum<'ast, V>(v: &mut V, node: &'ast DataEnum) +where + V: Visit<'ast> + ?Sized, +{ + tokens_helper(v, &node.enum_token.span); + tokens_helper(v, &node.brace_token.span); + for el in Punctuated::pairs(&node.variants) { + let (it, p) = el.into_tuple(); + v.visit_variant(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } +} +#[cfg(feature = "derive")] +pub fn visit_data_struct<'ast, V>(v: &mut V, node: &'ast DataStruct) +where + V: Visit<'ast> + ?Sized, +{ + tokens_helper(v, &node.struct_token.span); + v.visit_fields(&node.fields); + if let Some(it) = &node.semi_token { + tokens_helper(v, &it.spans); + } +} +#[cfg(feature = "derive")] +pub fn visit_data_union<'ast, V>(v: &mut V, node: &'ast DataUnion) +where + V: Visit<'ast> + ?Sized, +{ + tokens_helper(v, &node.union_token.span); + v.visit_fields_named(&node.fields); +} +#[cfg(feature = "derive")] +pub fn visit_derive_input<'ast, V>(v: &mut V, node: &'ast DeriveInput) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_visibility(&node.vis); + v.visit_ident(&node.ident); + v.visit_generics(&node.generics); + v.visit_data(&node.data); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_expr<'ast, V>(v: &mut V, node: &'ast Expr) +where + V: Visit<'ast> + ?Sized, +{ + match node { + Expr::Array(_binding_0) => { + full!(v.visit_expr_array(_binding_0)); + } + Expr::Assign(_binding_0) => { + full!(v.visit_expr_assign(_binding_0)); + } + Expr::AssignOp(_binding_0) => { + full!(v.visit_expr_assign_op(_binding_0)); + } + Expr::Async(_binding_0) => { + full!(v.visit_expr_async(_binding_0)); + } + Expr::Await(_binding_0) => { + full!(v.visit_expr_await(_binding_0)); + } + Expr::Binary(_binding_0) => { + v.visit_expr_binary(_binding_0); + } + Expr::Block(_binding_0) => { + full!(v.visit_expr_block(_binding_0)); + } + Expr::Box(_binding_0) => { + full!(v.visit_expr_box(_binding_0)); + } + Expr::Break(_binding_0) => { + full!(v.visit_expr_break(_binding_0)); + } + Expr::Call(_binding_0) => { + v.visit_expr_call(_binding_0); + } + Expr::Cast(_binding_0) => { + v.visit_expr_cast(_binding_0); + } + Expr::Closure(_binding_0) => { + full!(v.visit_expr_closure(_binding_0)); + } + Expr::Continue(_binding_0) => { + full!(v.visit_expr_continue(_binding_0)); + } + Expr::Field(_binding_0) => { + v.visit_expr_field(_binding_0); + } + Expr::ForLoop(_binding_0) => { + full!(v.visit_expr_for_loop(_binding_0)); + } + Expr::Group(_binding_0) => { + full!(v.visit_expr_group(_binding_0)); + } + Expr::If(_binding_0) => { + full!(v.visit_expr_if(_binding_0)); + } + Expr::Index(_binding_0) => { + v.visit_expr_index(_binding_0); + } + Expr::Let(_binding_0) => { + full!(v.visit_expr_let(_binding_0)); + } + Expr::Lit(_binding_0) => { + v.visit_expr_lit(_binding_0); + } + Expr::Loop(_binding_0) => { + full!(v.visit_expr_loop(_binding_0)); + } + Expr::Macro(_binding_0) => { + full!(v.visit_expr_macro(_binding_0)); + } + Expr::Match(_binding_0) => { + full!(v.visit_expr_match(_binding_0)); + } + Expr::MethodCall(_binding_0) => { + full!(v.visit_expr_method_call(_binding_0)); + } + Expr::Paren(_binding_0) => { + v.visit_expr_paren(_binding_0); + } + Expr::Path(_binding_0) => { + v.visit_expr_path(_binding_0); + } + Expr::Range(_binding_0) => { + full!(v.visit_expr_range(_binding_0)); + } + Expr::Reference(_binding_0) => { + full!(v.visit_expr_reference(_binding_0)); + } + Expr::Repeat(_binding_0) => { + full!(v.visit_expr_repeat(_binding_0)); + } + Expr::Return(_binding_0) => { + full!(v.visit_expr_return(_binding_0)); + } + Expr::Struct(_binding_0) => { + full!(v.visit_expr_struct(_binding_0)); + } + Expr::Try(_binding_0) => { + full!(v.visit_expr_try(_binding_0)); + } + Expr::TryBlock(_binding_0) => { + full!(v.visit_expr_try_block(_binding_0)); + } + Expr::Tuple(_binding_0) => { + full!(v.visit_expr_tuple(_binding_0)); + } + Expr::Type(_binding_0) => { + full!(v.visit_expr_type(_binding_0)); + } + Expr::Unary(_binding_0) => { + v.visit_expr_unary(_binding_0); + } + Expr::Unsafe(_binding_0) => { + full!(v.visit_expr_unsafe(_binding_0)); + } + Expr::Verbatim(_binding_0) => { + skip!(_binding_0); + } + Expr::While(_binding_0) => { + full!(v.visit_expr_while(_binding_0)); + } + Expr::Yield(_binding_0) => { + full!(v.visit_expr_yield(_binding_0)); + } + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } +} +#[cfg(feature = "full")] +pub fn visit_expr_array<'ast, V>(v: &mut V, node: &'ast ExprArray) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + tokens_helper(v, &node.bracket_token.span); + for el in Punctuated::pairs(&node.elems) { + let (it, p) = el.into_tuple(); + v.visit_expr(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } +} +#[cfg(feature = "full")] +pub fn visit_expr_assign<'ast, V>(v: &mut V, node: &'ast ExprAssign) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_expr(&*node.left); + tokens_helper(v, &node.eq_token.spans); + v.visit_expr(&*node.right); +} +#[cfg(feature = "full")] +pub fn visit_expr_assign_op<'ast, V>(v: &mut V, node: &'ast ExprAssignOp) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_expr(&*node.left); + v.visit_bin_op(&node.op); + v.visit_expr(&*node.right); +} +#[cfg(feature = "full")] +pub fn visit_expr_async<'ast, V>(v: &mut V, node: &'ast ExprAsync) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + tokens_helper(v, &node.async_token.span); + if let Some(it) = &node.capture { + tokens_helper(v, &it.span); + } + v.visit_block(&node.block); +} +#[cfg(feature = "full")] +pub fn visit_expr_await<'ast, V>(v: &mut V, node: &'ast ExprAwait) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_expr(&*node.base); + tokens_helper(v, &node.dot_token.spans); + tokens_helper(v, &node.await_token.span); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_expr_binary<'ast, V>(v: &mut V, node: &'ast ExprBinary) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_expr(&*node.left); + v.visit_bin_op(&node.op); + v.visit_expr(&*node.right); +} +#[cfg(feature = "full")] +pub fn visit_expr_block<'ast, V>(v: &mut V, node: &'ast ExprBlock) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + if let Some(it) = &node.label { + v.visit_label(it); + } + v.visit_block(&node.block); +} +#[cfg(feature = "full")] +pub fn visit_expr_box<'ast, V>(v: &mut V, node: &'ast ExprBox) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + tokens_helper(v, &node.box_token.span); + v.visit_expr(&*node.expr); +} +#[cfg(feature = "full")] +pub fn visit_expr_break<'ast, V>(v: &mut V, node: &'ast ExprBreak) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + tokens_helper(v, &node.break_token.span); + if let Some(it) = &node.label { + v.visit_lifetime(it); + } + if let Some(it) = &node.expr { + v.visit_expr(&**it); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_expr_call<'ast, V>(v: &mut V, node: &'ast ExprCall) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_expr(&*node.func); + tokens_helper(v, &node.paren_token.span); + for el in Punctuated::pairs(&node.args) { + let (it, p) = el.into_tuple(); + v.visit_expr(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_expr_cast<'ast, V>(v: &mut V, node: &'ast ExprCast) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_expr(&*node.expr); + tokens_helper(v, &node.as_token.span); + v.visit_type(&*node.ty); +} +#[cfg(feature = "full")] +pub fn visit_expr_closure<'ast, V>(v: &mut V, node: &'ast ExprClosure) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + if let Some(it) = &node.movability { + tokens_helper(v, &it.span); + } + if let Some(it) = &node.asyncness { + tokens_helper(v, &it.span); + } + if let Some(it) = &node.capture { + tokens_helper(v, &it.span); + } + tokens_helper(v, &node.or1_token.spans); + for el in Punctuated::pairs(&node.inputs) { + let (it, p) = el.into_tuple(); + v.visit_pat(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } + tokens_helper(v, &node.or2_token.spans); + v.visit_return_type(&node.output); + v.visit_expr(&*node.body); +} +#[cfg(feature = "full")] +pub fn visit_expr_continue<'ast, V>(v: &mut V, node: &'ast ExprContinue) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + tokens_helper(v, &node.continue_token.span); + if let Some(it) = &node.label { + v.visit_lifetime(it); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_expr_field<'ast, V>(v: &mut V, node: &'ast ExprField) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_expr(&*node.base); + tokens_helper(v, &node.dot_token.spans); + v.visit_member(&node.member); +} +#[cfg(feature = "full")] +pub fn visit_expr_for_loop<'ast, V>(v: &mut V, node: &'ast ExprForLoop) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + if let Some(it) = &node.label { + v.visit_label(it); + } + tokens_helper(v, &node.for_token.span); + v.visit_pat(&node.pat); + tokens_helper(v, &node.in_token.span); + v.visit_expr(&*node.expr); + v.visit_block(&node.body); +} +#[cfg(feature = "full")] +pub fn visit_expr_group<'ast, V>(v: &mut V, node: &'ast ExprGroup) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + tokens_helper(v, &node.group_token.span); + v.visit_expr(&*node.expr); +} +#[cfg(feature = "full")] +pub fn visit_expr_if<'ast, V>(v: &mut V, node: &'ast ExprIf) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + tokens_helper(v, &node.if_token.span); + v.visit_expr(&*node.cond); + v.visit_block(&node.then_branch); + if let Some(it) = &node.else_branch { + tokens_helper(v, &(it).0.span); + v.visit_expr(&*(it).1); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_expr_index<'ast, V>(v: &mut V, node: &'ast ExprIndex) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_expr(&*node.expr); + tokens_helper(v, &node.bracket_token.span); + v.visit_expr(&*node.index); +} +#[cfg(feature = "full")] +pub fn visit_expr_let<'ast, V>(v: &mut V, node: &'ast ExprLet) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + tokens_helper(v, &node.let_token.span); + v.visit_pat(&node.pat); + tokens_helper(v, &node.eq_token.spans); + v.visit_expr(&*node.expr); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_expr_lit<'ast, V>(v: &mut V, node: &'ast ExprLit) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_lit(&node.lit); +} +#[cfg(feature = "full")] +pub fn visit_expr_loop<'ast, V>(v: &mut V, node: &'ast ExprLoop) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + if let Some(it) = &node.label { + v.visit_label(it); + } + tokens_helper(v, &node.loop_token.span); + v.visit_block(&node.body); +} +#[cfg(feature = "full")] +pub fn visit_expr_macro<'ast, V>(v: &mut V, node: &'ast ExprMacro) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_macro(&node.mac); +} +#[cfg(feature = "full")] +pub fn visit_expr_match<'ast, V>(v: &mut V, node: &'ast ExprMatch) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + tokens_helper(v, &node.match_token.span); + v.visit_expr(&*node.expr); + tokens_helper(v, &node.brace_token.span); + for it in &node.arms { + v.visit_arm(it); + } +} +#[cfg(feature = "full")] +pub fn visit_expr_method_call<'ast, V>(v: &mut V, node: &'ast ExprMethodCall) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_expr(&*node.receiver); + tokens_helper(v, &node.dot_token.spans); + v.visit_ident(&node.method); + if let Some(it) = &node.turbofish { + v.visit_method_turbofish(it); + } + tokens_helper(v, &node.paren_token.span); + for el in Punctuated::pairs(&node.args) { + let (it, p) = el.into_tuple(); + v.visit_expr(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_expr_paren<'ast, V>(v: &mut V, node: &'ast ExprParen) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + tokens_helper(v, &node.paren_token.span); + v.visit_expr(&*node.expr); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_expr_path<'ast, V>(v: &mut V, node: &'ast ExprPath) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + if let Some(it) = &node.qself { + v.visit_qself(it); + } + v.visit_path(&node.path); +} +#[cfg(feature = "full")] +pub fn visit_expr_range<'ast, V>(v: &mut V, node: &'ast ExprRange) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + if let Some(it) = &node.from { + v.visit_expr(&**it); + } + v.visit_range_limits(&node.limits); + if let Some(it) = &node.to { + v.visit_expr(&**it); + } +} +#[cfg(feature = "full")] +pub fn visit_expr_reference<'ast, V>(v: &mut V, node: &'ast ExprReference) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + tokens_helper(v, &node.and_token.spans); + if let Some(it) = &node.mutability { + tokens_helper(v, &it.span); + } + v.visit_expr(&*node.expr); +} +#[cfg(feature = "full")] +pub fn visit_expr_repeat<'ast, V>(v: &mut V, node: &'ast ExprRepeat) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + tokens_helper(v, &node.bracket_token.span); + v.visit_expr(&*node.expr); + tokens_helper(v, &node.semi_token.spans); + v.visit_expr(&*node.len); +} +#[cfg(feature = "full")] +pub fn visit_expr_return<'ast, V>(v: &mut V, node: &'ast ExprReturn) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + tokens_helper(v, &node.return_token.span); + if let Some(it) = &node.expr { + v.visit_expr(&**it); + } +} +#[cfg(feature = "full")] +pub fn visit_expr_struct<'ast, V>(v: &mut V, node: &'ast ExprStruct) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_path(&node.path); + tokens_helper(v, &node.brace_token.span); + for el in Punctuated::pairs(&node.fields) { + let (it, p) = el.into_tuple(); + v.visit_field_value(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } + if let Some(it) = &node.dot2_token { + tokens_helper(v, &it.spans); + } + if let Some(it) = &node.rest { + v.visit_expr(&**it); + } +} +#[cfg(feature = "full")] +pub fn visit_expr_try<'ast, V>(v: &mut V, node: &'ast ExprTry) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_expr(&*node.expr); + tokens_helper(v, &node.question_token.spans); +} +#[cfg(feature = "full")] +pub fn visit_expr_try_block<'ast, V>(v: &mut V, node: &'ast ExprTryBlock) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + tokens_helper(v, &node.try_token.span); + v.visit_block(&node.block); +} +#[cfg(feature = "full")] +pub fn visit_expr_tuple<'ast, V>(v: &mut V, node: &'ast ExprTuple) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + tokens_helper(v, &node.paren_token.span); + for el in Punctuated::pairs(&node.elems) { + let (it, p) = el.into_tuple(); + v.visit_expr(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } +} +#[cfg(feature = "full")] +pub fn visit_expr_type<'ast, V>(v: &mut V, node: &'ast ExprType) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_expr(&*node.expr); + tokens_helper(v, &node.colon_token.spans); + v.visit_type(&*node.ty); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_expr_unary<'ast, V>(v: &mut V, node: &'ast ExprUnary) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_un_op(&node.op); + v.visit_expr(&*node.expr); +} +#[cfg(feature = "full")] +pub fn visit_expr_unsafe<'ast, V>(v: &mut V, node: &'ast ExprUnsafe) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + tokens_helper(v, &node.unsafe_token.span); + v.visit_block(&node.block); +} +#[cfg(feature = "full")] +pub fn visit_expr_while<'ast, V>(v: &mut V, node: &'ast ExprWhile) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + if let Some(it) = &node.label { + v.visit_label(it); + } + tokens_helper(v, &node.while_token.span); + v.visit_expr(&*node.cond); + v.visit_block(&node.body); +} +#[cfg(feature = "full")] +pub fn visit_expr_yield<'ast, V>(v: &mut V, node: &'ast ExprYield) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + tokens_helper(v, &node.yield_token.span); + if let Some(it) = &node.expr { + v.visit_expr(&**it); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_field<'ast, V>(v: &mut V, node: &'ast Field) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_visibility(&node.vis); + if let Some(it) = &node.ident { + v.visit_ident(it); + } + if let Some(it) = &node.colon_token { + tokens_helper(v, &it.spans); + } + v.visit_type(&node.ty); +} +#[cfg(feature = "full")] +pub fn visit_field_pat<'ast, V>(v: &mut V, node: &'ast FieldPat) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_member(&node.member); + if let Some(it) = &node.colon_token { + tokens_helper(v, &it.spans); + } + v.visit_pat(&*node.pat); +} +#[cfg(feature = "full")] +pub fn visit_field_value<'ast, V>(v: &mut V, node: &'ast FieldValue) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_member(&node.member); + if let Some(it) = &node.colon_token { + tokens_helper(v, &it.spans); + } + v.visit_expr(&node.expr); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_fields<'ast, V>(v: &mut V, node: &'ast Fields) +where + V: Visit<'ast> + ?Sized, +{ + match node { + Fields::Named(_binding_0) => { + v.visit_fields_named(_binding_0); + } + Fields::Unnamed(_binding_0) => { + v.visit_fields_unnamed(_binding_0); + } + Fields::Unit => {} + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_fields_named<'ast, V>(v: &mut V, node: &'ast FieldsNamed) +where + V: Visit<'ast> + ?Sized, +{ + tokens_helper(v, &node.brace_token.span); + for el in Punctuated::pairs(&node.named) { + let (it, p) = el.into_tuple(); + v.visit_field(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_fields_unnamed<'ast, V>(v: &mut V, node: &'ast FieldsUnnamed) +where + V: Visit<'ast> + ?Sized, +{ + tokens_helper(v, &node.paren_token.span); + for el in Punctuated::pairs(&node.unnamed) { + let (it, p) = el.into_tuple(); + v.visit_field(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } +} +#[cfg(feature = "full")] +pub fn visit_file<'ast, V>(v: &mut V, node: &'ast File) +where + V: Visit<'ast> + ?Sized, +{ + skip!(node.shebang); + for it in &node.attrs { + v.visit_attribute(it); + } + for it in &node.items { + v.visit_item(it); + } +} +#[cfg(feature = "full")] +pub fn visit_fn_arg<'ast, V>(v: &mut V, node: &'ast FnArg) +where + V: Visit<'ast> + ?Sized, +{ + match node { + FnArg::Receiver(_binding_0) => { + v.visit_receiver(_binding_0); + } + FnArg::Typed(_binding_0) => { + v.visit_pat_type(_binding_0); + } + } +} +#[cfg(feature = "full")] +pub fn visit_foreign_item<'ast, V>(v: &mut V, node: &'ast ForeignItem) +where + V: Visit<'ast> + ?Sized, +{ + match node { + ForeignItem::Fn(_binding_0) => { + v.visit_foreign_item_fn(_binding_0); + } + ForeignItem::Static(_binding_0) => { + v.visit_foreign_item_static(_binding_0); + } + ForeignItem::Type(_binding_0) => { + v.visit_foreign_item_type(_binding_0); + } + ForeignItem::Macro(_binding_0) => { + v.visit_foreign_item_macro(_binding_0); + } + ForeignItem::Verbatim(_binding_0) => { + skip!(_binding_0); + } + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } +} +#[cfg(feature = "full")] +pub fn visit_foreign_item_fn<'ast, V>(v: &mut V, node: &'ast ForeignItemFn) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_visibility(&node.vis); + v.visit_signature(&node.sig); + tokens_helper(v, &node.semi_token.spans); +} +#[cfg(feature = "full")] +pub fn visit_foreign_item_macro<'ast, V>(v: &mut V, node: &'ast ForeignItemMacro) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_macro(&node.mac); + if let Some(it) = &node.semi_token { + tokens_helper(v, &it.spans); + } +} +#[cfg(feature = "full")] +pub fn visit_foreign_item_static<'ast, V>(v: &mut V, node: &'ast ForeignItemStatic) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_visibility(&node.vis); + tokens_helper(v, &node.static_token.span); + if let Some(it) = &node.mutability { + tokens_helper(v, &it.span); + } + v.visit_ident(&node.ident); + tokens_helper(v, &node.colon_token.spans); + v.visit_type(&*node.ty); + tokens_helper(v, &node.semi_token.spans); +} +#[cfg(feature = "full")] +pub fn visit_foreign_item_type<'ast, V>(v: &mut V, node: &'ast ForeignItemType) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_visibility(&node.vis); + tokens_helper(v, &node.type_token.span); + v.visit_ident(&node.ident); + tokens_helper(v, &node.semi_token.spans); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_generic_argument<'ast, V>(v: &mut V, node: &'ast GenericArgument) +where + V: Visit<'ast> + ?Sized, +{ + match node { + GenericArgument::Lifetime(_binding_0) => { + v.visit_lifetime(_binding_0); + } + GenericArgument::Type(_binding_0) => { + v.visit_type(_binding_0); + } + GenericArgument::Const(_binding_0) => { + v.visit_expr(_binding_0); + } + GenericArgument::Binding(_binding_0) => { + v.visit_binding(_binding_0); + } + GenericArgument::Constraint(_binding_0) => { + v.visit_constraint(_binding_0); + } + } +} +#[cfg(feature = "full")] +pub fn visit_generic_method_argument<'ast, V>( + v: &mut V, + node: &'ast GenericMethodArgument, +) +where + V: Visit<'ast> + ?Sized, +{ + match node { + GenericMethodArgument::Type(_binding_0) => { + v.visit_type(_binding_0); + } + GenericMethodArgument::Const(_binding_0) => { + v.visit_expr(_binding_0); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_generic_param<'ast, V>(v: &mut V, node: &'ast GenericParam) +where + V: Visit<'ast> + ?Sized, +{ + match node { + GenericParam::Type(_binding_0) => { + v.visit_type_param(_binding_0); + } + GenericParam::Lifetime(_binding_0) => { + v.visit_lifetime_def(_binding_0); + } + GenericParam::Const(_binding_0) => { + v.visit_const_param(_binding_0); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_generics<'ast, V>(v: &mut V, node: &'ast Generics) +where + V: Visit<'ast> + ?Sized, +{ + if let Some(it) = &node.lt_token { + tokens_helper(v, &it.spans); + } + for el in Punctuated::pairs(&node.params) { + let (it, p) = el.into_tuple(); + v.visit_generic_param(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } + if let Some(it) = &node.gt_token { + tokens_helper(v, &it.spans); + } + if let Some(it) = &node.where_clause { + v.visit_where_clause(it); + } +} +pub fn visit_ident<'ast, V>(v: &mut V, node: &'ast Ident) +where + V: Visit<'ast> + ?Sized, +{ + v.visit_span(&node.span()); +} +#[cfg(feature = "full")] +pub fn visit_impl_item<'ast, V>(v: &mut V, node: &'ast ImplItem) +where + V: Visit<'ast> + ?Sized, +{ + match node { + ImplItem::Const(_binding_0) => { + v.visit_impl_item_const(_binding_0); + } + ImplItem::Method(_binding_0) => { + v.visit_impl_item_method(_binding_0); + } + ImplItem::Type(_binding_0) => { + v.visit_impl_item_type(_binding_0); + } + ImplItem::Macro(_binding_0) => { + v.visit_impl_item_macro(_binding_0); + } + ImplItem::Verbatim(_binding_0) => { + skip!(_binding_0); + } + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } +} +#[cfg(feature = "full")] +pub fn visit_impl_item_const<'ast, V>(v: &mut V, node: &'ast ImplItemConst) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_visibility(&node.vis); + if let Some(it) = &node.defaultness { + tokens_helper(v, &it.span); + } + tokens_helper(v, &node.const_token.span); + v.visit_ident(&node.ident); + tokens_helper(v, &node.colon_token.spans); + v.visit_type(&node.ty); + tokens_helper(v, &node.eq_token.spans); + v.visit_expr(&node.expr); + tokens_helper(v, &node.semi_token.spans); +} +#[cfg(feature = "full")] +pub fn visit_impl_item_macro<'ast, V>(v: &mut V, node: &'ast ImplItemMacro) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_macro(&node.mac); + if let Some(it) = &node.semi_token { + tokens_helper(v, &it.spans); + } +} +#[cfg(feature = "full")] +pub fn visit_impl_item_method<'ast, V>(v: &mut V, node: &'ast ImplItemMethod) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_visibility(&node.vis); + if let Some(it) = &node.defaultness { + tokens_helper(v, &it.span); + } + v.visit_signature(&node.sig); + v.visit_block(&node.block); +} +#[cfg(feature = "full")] +pub fn visit_impl_item_type<'ast, V>(v: &mut V, node: &'ast ImplItemType) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_visibility(&node.vis); + if let Some(it) = &node.defaultness { + tokens_helper(v, &it.span); + } + tokens_helper(v, &node.type_token.span); + v.visit_ident(&node.ident); + v.visit_generics(&node.generics); + tokens_helper(v, &node.eq_token.spans); + v.visit_type(&node.ty); + tokens_helper(v, &node.semi_token.spans); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_index<'ast, V>(v: &mut V, node: &'ast Index) +where + V: Visit<'ast> + ?Sized, +{ + skip!(node.index); + v.visit_span(&node.span); +} +#[cfg(feature = "full")] +pub fn visit_item<'ast, V>(v: &mut V, node: &'ast Item) +where + V: Visit<'ast> + ?Sized, +{ + match node { + Item::Const(_binding_0) => { + v.visit_item_const(_binding_0); + } + Item::Enum(_binding_0) => { + v.visit_item_enum(_binding_0); + } + Item::ExternCrate(_binding_0) => { + v.visit_item_extern_crate(_binding_0); + } + Item::Fn(_binding_0) => { + v.visit_item_fn(_binding_0); + } + Item::ForeignMod(_binding_0) => { + v.visit_item_foreign_mod(_binding_0); + } + Item::Impl(_binding_0) => { + v.visit_item_impl(_binding_0); + } + Item::Macro(_binding_0) => { + v.visit_item_macro(_binding_0); + } + Item::Macro2(_binding_0) => { + v.visit_item_macro2(_binding_0); + } + Item::Mod(_binding_0) => { + v.visit_item_mod(_binding_0); + } + Item::Static(_binding_0) => { + v.visit_item_static(_binding_0); + } + Item::Struct(_binding_0) => { + v.visit_item_struct(_binding_0); + } + Item::Trait(_binding_0) => { + v.visit_item_trait(_binding_0); + } + Item::TraitAlias(_binding_0) => { + v.visit_item_trait_alias(_binding_0); + } + Item::Type(_binding_0) => { + v.visit_item_type(_binding_0); + } + Item::Union(_binding_0) => { + v.visit_item_union(_binding_0); + } + Item::Use(_binding_0) => { + v.visit_item_use(_binding_0); + } + Item::Verbatim(_binding_0) => { + skip!(_binding_0); + } + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } +} +#[cfg(feature = "full")] +pub fn visit_item_const<'ast, V>(v: &mut V, node: &'ast ItemConst) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_visibility(&node.vis); + tokens_helper(v, &node.const_token.span); + v.visit_ident(&node.ident); + tokens_helper(v, &node.colon_token.spans); + v.visit_type(&*node.ty); + tokens_helper(v, &node.eq_token.spans); + v.visit_expr(&*node.expr); + tokens_helper(v, &node.semi_token.spans); +} +#[cfg(feature = "full")] +pub fn visit_item_enum<'ast, V>(v: &mut V, node: &'ast ItemEnum) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_visibility(&node.vis); + tokens_helper(v, &node.enum_token.span); + v.visit_ident(&node.ident); + v.visit_generics(&node.generics); + tokens_helper(v, &node.brace_token.span); + for el in Punctuated::pairs(&node.variants) { + let (it, p) = el.into_tuple(); + v.visit_variant(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } +} +#[cfg(feature = "full")] +pub fn visit_item_extern_crate<'ast, V>(v: &mut V, node: &'ast ItemExternCrate) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_visibility(&node.vis); + tokens_helper(v, &node.extern_token.span); + tokens_helper(v, &node.crate_token.span); + v.visit_ident(&node.ident); + if let Some(it) = &node.rename { + tokens_helper(v, &(it).0.span); + v.visit_ident(&(it).1); + } + tokens_helper(v, &node.semi_token.spans); +} +#[cfg(feature = "full")] +pub fn visit_item_fn<'ast, V>(v: &mut V, node: &'ast ItemFn) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_visibility(&node.vis); + v.visit_signature(&node.sig); + v.visit_block(&*node.block); +} +#[cfg(feature = "full")] +pub fn visit_item_foreign_mod<'ast, V>(v: &mut V, node: &'ast ItemForeignMod) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_abi(&node.abi); + tokens_helper(v, &node.brace_token.span); + for it in &node.items { + v.visit_foreign_item(it); + } +} +#[cfg(feature = "full")] +pub fn visit_item_impl<'ast, V>(v: &mut V, node: &'ast ItemImpl) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + if let Some(it) = &node.defaultness { + tokens_helper(v, &it.span); + } + if let Some(it) = &node.unsafety { + tokens_helper(v, &it.span); + } + tokens_helper(v, &node.impl_token.span); + v.visit_generics(&node.generics); + if let Some(it) = &node.trait_ { + if let Some(it) = &(it).0 { + tokens_helper(v, &it.spans); + } + v.visit_path(&(it).1); + tokens_helper(v, &(it).2.span); + } + v.visit_type(&*node.self_ty); + tokens_helper(v, &node.brace_token.span); + for it in &node.items { + v.visit_impl_item(it); + } +} +#[cfg(feature = "full")] +pub fn visit_item_macro<'ast, V>(v: &mut V, node: &'ast ItemMacro) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + if let Some(it) = &node.ident { + v.visit_ident(it); + } + v.visit_macro(&node.mac); + if let Some(it) = &node.semi_token { + tokens_helper(v, &it.spans); + } +} +#[cfg(feature = "full")] +pub fn visit_item_macro2<'ast, V>(v: &mut V, node: &'ast ItemMacro2) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_visibility(&node.vis); + tokens_helper(v, &node.macro_token.span); + v.visit_ident(&node.ident); + skip!(node.rules); +} +#[cfg(feature = "full")] +pub fn visit_item_mod<'ast, V>(v: &mut V, node: &'ast ItemMod) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_visibility(&node.vis); + tokens_helper(v, &node.mod_token.span); + v.visit_ident(&node.ident); + if let Some(it) = &node.content { + tokens_helper(v, &(it).0.span); + for it in &(it).1 { + v.visit_item(it); + } + } + if let Some(it) = &node.semi { + tokens_helper(v, &it.spans); + } +} +#[cfg(feature = "full")] +pub fn visit_item_static<'ast, V>(v: &mut V, node: &'ast ItemStatic) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_visibility(&node.vis); + tokens_helper(v, &node.static_token.span); + if let Some(it) = &node.mutability { + tokens_helper(v, &it.span); + } + v.visit_ident(&node.ident); + tokens_helper(v, &node.colon_token.spans); + v.visit_type(&*node.ty); + tokens_helper(v, &node.eq_token.spans); + v.visit_expr(&*node.expr); + tokens_helper(v, &node.semi_token.spans); +} +#[cfg(feature = "full")] +pub fn visit_item_struct<'ast, V>(v: &mut V, node: &'ast ItemStruct) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_visibility(&node.vis); + tokens_helper(v, &node.struct_token.span); + v.visit_ident(&node.ident); + v.visit_generics(&node.generics); + v.visit_fields(&node.fields); + if let Some(it) = &node.semi_token { + tokens_helper(v, &it.spans); + } +} +#[cfg(feature = "full")] +pub fn visit_item_trait<'ast, V>(v: &mut V, node: &'ast ItemTrait) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_visibility(&node.vis); + if let Some(it) = &node.unsafety { + tokens_helper(v, &it.span); + } + if let Some(it) = &node.auto_token { + tokens_helper(v, &it.span); + } + tokens_helper(v, &node.trait_token.span); + v.visit_ident(&node.ident); + v.visit_generics(&node.generics); + if let Some(it) = &node.colon_token { + tokens_helper(v, &it.spans); + } + for el in Punctuated::pairs(&node.supertraits) { + let (it, p) = el.into_tuple(); + v.visit_type_param_bound(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } + tokens_helper(v, &node.brace_token.span); + for it in &node.items { + v.visit_trait_item(it); + } +} +#[cfg(feature = "full")] +pub fn visit_item_trait_alias<'ast, V>(v: &mut V, node: &'ast ItemTraitAlias) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_visibility(&node.vis); + tokens_helper(v, &node.trait_token.span); + v.visit_ident(&node.ident); + v.visit_generics(&node.generics); + tokens_helper(v, &node.eq_token.spans); + for el in Punctuated::pairs(&node.bounds) { + let (it, p) = el.into_tuple(); + v.visit_type_param_bound(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } + tokens_helper(v, &node.semi_token.spans); +} +#[cfg(feature = "full")] +pub fn visit_item_type<'ast, V>(v: &mut V, node: &'ast ItemType) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_visibility(&node.vis); + tokens_helper(v, &node.type_token.span); + v.visit_ident(&node.ident); + v.visit_generics(&node.generics); + tokens_helper(v, &node.eq_token.spans); + v.visit_type(&*node.ty); + tokens_helper(v, &node.semi_token.spans); +} +#[cfg(feature = "full")] +pub fn visit_item_union<'ast, V>(v: &mut V, node: &'ast ItemUnion) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_visibility(&node.vis); + tokens_helper(v, &node.union_token.span); + v.visit_ident(&node.ident); + v.visit_generics(&node.generics); + v.visit_fields_named(&node.fields); +} +#[cfg(feature = "full")] +pub fn visit_item_use<'ast, V>(v: &mut V, node: &'ast ItemUse) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_visibility(&node.vis); + tokens_helper(v, &node.use_token.span); + if let Some(it) = &node.leading_colon { + tokens_helper(v, &it.spans); + } + v.visit_use_tree(&node.tree); + tokens_helper(v, &node.semi_token.spans); +} +#[cfg(feature = "full")] +pub fn visit_label<'ast, V>(v: &mut V, node: &'ast Label) +where + V: Visit<'ast> + ?Sized, +{ + v.visit_lifetime(&node.name); + tokens_helper(v, &node.colon_token.spans); +} +pub fn visit_lifetime<'ast, V>(v: &mut V, node: &'ast Lifetime) +where + V: Visit<'ast> + ?Sized, +{ + v.visit_span(&node.apostrophe); + v.visit_ident(&node.ident); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_lifetime_def<'ast, V>(v: &mut V, node: &'ast LifetimeDef) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_lifetime(&node.lifetime); + if let Some(it) = &node.colon_token { + tokens_helper(v, &it.spans); + } + for el in Punctuated::pairs(&node.bounds) { + let (it, p) = el.into_tuple(); + v.visit_lifetime(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } +} +pub fn visit_lit<'ast, V>(v: &mut V, node: &'ast Lit) +where + V: Visit<'ast> + ?Sized, +{ + match node { + Lit::Str(_binding_0) => { + v.visit_lit_str(_binding_0); + } + Lit::ByteStr(_binding_0) => { + v.visit_lit_byte_str(_binding_0); + } + Lit::Byte(_binding_0) => { + v.visit_lit_byte(_binding_0); + } + Lit::Char(_binding_0) => { + v.visit_lit_char(_binding_0); + } + Lit::Int(_binding_0) => { + v.visit_lit_int(_binding_0); + } + Lit::Float(_binding_0) => { + v.visit_lit_float(_binding_0); + } + Lit::Bool(_binding_0) => { + v.visit_lit_bool(_binding_0); + } + Lit::Verbatim(_binding_0) => { + skip!(_binding_0); + } + } +} +pub fn visit_lit_bool<'ast, V>(v: &mut V, node: &'ast LitBool) +where + V: Visit<'ast> + ?Sized, +{ + skip!(node.value); + v.visit_span(&node.span); +} +pub fn visit_lit_byte<'ast, V>(v: &mut V, node: &'ast LitByte) +where + V: Visit<'ast> + ?Sized, +{} +pub fn visit_lit_byte_str<'ast, V>(v: &mut V, node: &'ast LitByteStr) +where + V: Visit<'ast> + ?Sized, +{} +pub fn visit_lit_char<'ast, V>(v: &mut V, node: &'ast LitChar) +where + V: Visit<'ast> + ?Sized, +{} +pub fn visit_lit_float<'ast, V>(v: &mut V, node: &'ast LitFloat) +where + V: Visit<'ast> + ?Sized, +{} +pub fn visit_lit_int<'ast, V>(v: &mut V, node: &'ast LitInt) +where + V: Visit<'ast> + ?Sized, +{} +pub fn visit_lit_str<'ast, V>(v: &mut V, node: &'ast LitStr) +where + V: Visit<'ast> + ?Sized, +{} +#[cfg(feature = "full")] +pub fn visit_local<'ast, V>(v: &mut V, node: &'ast Local) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + tokens_helper(v, &node.let_token.span); + v.visit_pat(&node.pat); + if let Some(it) = &node.init { + tokens_helper(v, &(it).0.spans); + v.visit_expr(&*(it).1); + } + tokens_helper(v, &node.semi_token.spans); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_macro<'ast, V>(v: &mut V, node: &'ast Macro) +where + V: Visit<'ast> + ?Sized, +{ + v.visit_path(&node.path); + tokens_helper(v, &node.bang_token.spans); + v.visit_macro_delimiter(&node.delimiter); + skip!(node.tokens); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_macro_delimiter<'ast, V>(v: &mut V, node: &'ast MacroDelimiter) +where + V: Visit<'ast> + ?Sized, +{ + match node { + MacroDelimiter::Paren(_binding_0) => { + tokens_helper(v, &_binding_0.span); + } + MacroDelimiter::Brace(_binding_0) => { + tokens_helper(v, &_binding_0.span); + } + MacroDelimiter::Bracket(_binding_0) => { + tokens_helper(v, &_binding_0.span); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_member<'ast, V>(v: &mut V, node: &'ast Member) +where + V: Visit<'ast> + ?Sized, +{ + match node { + Member::Named(_binding_0) => { + v.visit_ident(_binding_0); + } + Member::Unnamed(_binding_0) => { + v.visit_index(_binding_0); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_meta<'ast, V>(v: &mut V, node: &'ast Meta) +where + V: Visit<'ast> + ?Sized, +{ + match node { + Meta::Path(_binding_0) => { + v.visit_path(_binding_0); + } + Meta::List(_binding_0) => { + v.visit_meta_list(_binding_0); + } + Meta::NameValue(_binding_0) => { + v.visit_meta_name_value(_binding_0); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_meta_list<'ast, V>(v: &mut V, node: &'ast MetaList) +where + V: Visit<'ast> + ?Sized, +{ + v.visit_path(&node.path); + tokens_helper(v, &node.paren_token.span); + for el in Punctuated::pairs(&node.nested) { + let (it, p) = el.into_tuple(); + v.visit_nested_meta(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_meta_name_value<'ast, V>(v: &mut V, node: &'ast MetaNameValue) +where + V: Visit<'ast> + ?Sized, +{ + v.visit_path(&node.path); + tokens_helper(v, &node.eq_token.spans); + v.visit_lit(&node.lit); +} +#[cfg(feature = "full")] +pub fn visit_method_turbofish<'ast, V>(v: &mut V, node: &'ast MethodTurbofish) +where + V: Visit<'ast> + ?Sized, +{ + tokens_helper(v, &node.colon2_token.spans); + tokens_helper(v, &node.lt_token.spans); + for el in Punctuated::pairs(&node.args) { + let (it, p) = el.into_tuple(); + v.visit_generic_method_argument(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } + tokens_helper(v, &node.gt_token.spans); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_nested_meta<'ast, V>(v: &mut V, node: &'ast NestedMeta) +where + V: Visit<'ast> + ?Sized, +{ + match node { + NestedMeta::Meta(_binding_0) => { + v.visit_meta(_binding_0); + } + NestedMeta::Lit(_binding_0) => { + v.visit_lit(_binding_0); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_parenthesized_generic_arguments<'ast, V>( + v: &mut V, + node: &'ast ParenthesizedGenericArguments, +) +where + V: Visit<'ast> + ?Sized, +{ + tokens_helper(v, &node.paren_token.span); + for el in Punctuated::pairs(&node.inputs) { + let (it, p) = el.into_tuple(); + v.visit_type(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } + v.visit_return_type(&node.output); +} +#[cfg(feature = "full")] +pub fn visit_pat<'ast, V>(v: &mut V, node: &'ast Pat) +where + V: Visit<'ast> + ?Sized, +{ + match node { + Pat::Box(_binding_0) => { + v.visit_pat_box(_binding_0); + } + Pat::Ident(_binding_0) => { + v.visit_pat_ident(_binding_0); + } + Pat::Lit(_binding_0) => { + v.visit_pat_lit(_binding_0); + } + Pat::Macro(_binding_0) => { + v.visit_pat_macro(_binding_0); + } + Pat::Or(_binding_0) => { + v.visit_pat_or(_binding_0); + } + Pat::Path(_binding_0) => { + v.visit_pat_path(_binding_0); + } + Pat::Range(_binding_0) => { + v.visit_pat_range(_binding_0); + } + Pat::Reference(_binding_0) => { + v.visit_pat_reference(_binding_0); + } + Pat::Rest(_binding_0) => { + v.visit_pat_rest(_binding_0); + } + Pat::Slice(_binding_0) => { + v.visit_pat_slice(_binding_0); + } + Pat::Struct(_binding_0) => { + v.visit_pat_struct(_binding_0); + } + Pat::Tuple(_binding_0) => { + v.visit_pat_tuple(_binding_0); + } + Pat::TupleStruct(_binding_0) => { + v.visit_pat_tuple_struct(_binding_0); + } + Pat::Type(_binding_0) => { + v.visit_pat_type(_binding_0); + } + Pat::Verbatim(_binding_0) => { + skip!(_binding_0); + } + Pat::Wild(_binding_0) => { + v.visit_pat_wild(_binding_0); + } + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } +} +#[cfg(feature = "full")] +pub fn visit_pat_box<'ast, V>(v: &mut V, node: &'ast PatBox) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + tokens_helper(v, &node.box_token.span); + v.visit_pat(&*node.pat); +} +#[cfg(feature = "full")] +pub fn visit_pat_ident<'ast, V>(v: &mut V, node: &'ast PatIdent) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + if let Some(it) = &node.by_ref { + tokens_helper(v, &it.span); + } + if let Some(it) = &node.mutability { + tokens_helper(v, &it.span); + } + v.visit_ident(&node.ident); + if let Some(it) = &node.subpat { + tokens_helper(v, &(it).0.spans); + v.visit_pat(&*(it).1); + } +} +#[cfg(feature = "full")] +pub fn visit_pat_lit<'ast, V>(v: &mut V, node: &'ast PatLit) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_expr(&*node.expr); +} +#[cfg(feature = "full")] +pub fn visit_pat_macro<'ast, V>(v: &mut V, node: &'ast PatMacro) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_macro(&node.mac); +} +#[cfg(feature = "full")] +pub fn visit_pat_or<'ast, V>(v: &mut V, node: &'ast PatOr) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + if let Some(it) = &node.leading_vert { + tokens_helper(v, &it.spans); + } + for el in Punctuated::pairs(&node.cases) { + let (it, p) = el.into_tuple(); + v.visit_pat(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } +} +#[cfg(feature = "full")] +pub fn visit_pat_path<'ast, V>(v: &mut V, node: &'ast PatPath) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + if let Some(it) = &node.qself { + v.visit_qself(it); + } + v.visit_path(&node.path); +} +#[cfg(feature = "full")] +pub fn visit_pat_range<'ast, V>(v: &mut V, node: &'ast PatRange) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_expr(&*node.lo); + v.visit_range_limits(&node.limits); + v.visit_expr(&*node.hi); +} +#[cfg(feature = "full")] +pub fn visit_pat_reference<'ast, V>(v: &mut V, node: &'ast PatReference) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + tokens_helper(v, &node.and_token.spans); + if let Some(it) = &node.mutability { + tokens_helper(v, &it.span); + } + v.visit_pat(&*node.pat); +} +#[cfg(feature = "full")] +pub fn visit_pat_rest<'ast, V>(v: &mut V, node: &'ast PatRest) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + tokens_helper(v, &node.dot2_token.spans); +} +#[cfg(feature = "full")] +pub fn visit_pat_slice<'ast, V>(v: &mut V, node: &'ast PatSlice) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + tokens_helper(v, &node.bracket_token.span); + for el in Punctuated::pairs(&node.elems) { + let (it, p) = el.into_tuple(); + v.visit_pat(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } +} +#[cfg(feature = "full")] +pub fn visit_pat_struct<'ast, V>(v: &mut V, node: &'ast PatStruct) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_path(&node.path); + tokens_helper(v, &node.brace_token.span); + for el in Punctuated::pairs(&node.fields) { + let (it, p) = el.into_tuple(); + v.visit_field_pat(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } + if let Some(it) = &node.dot2_token { + tokens_helper(v, &it.spans); + } +} +#[cfg(feature = "full")] +pub fn visit_pat_tuple<'ast, V>(v: &mut V, node: &'ast PatTuple) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + tokens_helper(v, &node.paren_token.span); + for el in Punctuated::pairs(&node.elems) { + let (it, p) = el.into_tuple(); + v.visit_pat(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } +} +#[cfg(feature = "full")] +pub fn visit_pat_tuple_struct<'ast, V>(v: &mut V, node: &'ast PatTupleStruct) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_path(&node.path); + v.visit_pat_tuple(&node.pat); +} +#[cfg(feature = "full")] +pub fn visit_pat_type<'ast, V>(v: &mut V, node: &'ast PatType) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_pat(&*node.pat); + tokens_helper(v, &node.colon_token.spans); + v.visit_type(&*node.ty); +} +#[cfg(feature = "full")] +pub fn visit_pat_wild<'ast, V>(v: &mut V, node: &'ast PatWild) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + tokens_helper(v, &node.underscore_token.spans); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_path<'ast, V>(v: &mut V, node: &'ast Path) +where + V: Visit<'ast> + ?Sized, +{ + if let Some(it) = &node.leading_colon { + tokens_helper(v, &it.spans); + } + for el in Punctuated::pairs(&node.segments) { + let (it, p) = el.into_tuple(); + v.visit_path_segment(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_path_arguments<'ast, V>(v: &mut V, node: &'ast PathArguments) +where + V: Visit<'ast> + ?Sized, +{ + match node { + PathArguments::None => {} + PathArguments::AngleBracketed(_binding_0) => { + v.visit_angle_bracketed_generic_arguments(_binding_0); + } + PathArguments::Parenthesized(_binding_0) => { + v.visit_parenthesized_generic_arguments(_binding_0); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_path_segment<'ast, V>(v: &mut V, node: &'ast PathSegment) +where + V: Visit<'ast> + ?Sized, +{ + v.visit_ident(&node.ident); + v.visit_path_arguments(&node.arguments); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_predicate_eq<'ast, V>(v: &mut V, node: &'ast PredicateEq) +where + V: Visit<'ast> + ?Sized, +{ + v.visit_type(&node.lhs_ty); + tokens_helper(v, &node.eq_token.spans); + v.visit_type(&node.rhs_ty); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_predicate_lifetime<'ast, V>(v: &mut V, node: &'ast PredicateLifetime) +where + V: Visit<'ast> + ?Sized, +{ + v.visit_lifetime(&node.lifetime); + tokens_helper(v, &node.colon_token.spans); + for el in Punctuated::pairs(&node.bounds) { + let (it, p) = el.into_tuple(); + v.visit_lifetime(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_predicate_type<'ast, V>(v: &mut V, node: &'ast PredicateType) +where + V: Visit<'ast> + ?Sized, +{ + if let Some(it) = &node.lifetimes { + v.visit_bound_lifetimes(it); + } + v.visit_type(&node.bounded_ty); + tokens_helper(v, &node.colon_token.spans); + for el in Punctuated::pairs(&node.bounds) { + let (it, p) = el.into_tuple(); + v.visit_type_param_bound(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_qself<'ast, V>(v: &mut V, node: &'ast QSelf) +where + V: Visit<'ast> + ?Sized, +{ + tokens_helper(v, &node.lt_token.spans); + v.visit_type(&*node.ty); + skip!(node.position); + if let Some(it) = &node.as_token { + tokens_helper(v, &it.span); + } + tokens_helper(v, &node.gt_token.spans); +} +#[cfg(feature = "full")] +pub fn visit_range_limits<'ast, V>(v: &mut V, node: &'ast RangeLimits) +where + V: Visit<'ast> + ?Sized, +{ + match node { + RangeLimits::HalfOpen(_binding_0) => { + tokens_helper(v, &_binding_0.spans); + } + RangeLimits::Closed(_binding_0) => { + tokens_helper(v, &_binding_0.spans); + } + } +} +#[cfg(feature = "full")] +pub fn visit_receiver<'ast, V>(v: &mut V, node: &'ast Receiver) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + if let Some(it) = &node.reference { + tokens_helper(v, &(it).0.spans); + if let Some(it) = &(it).1 { + v.visit_lifetime(it); + } + } + if let Some(it) = &node.mutability { + tokens_helper(v, &it.span); + } + tokens_helper(v, &node.self_token.span); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_return_type<'ast, V>(v: &mut V, node: &'ast ReturnType) +where + V: Visit<'ast> + ?Sized, +{ + match node { + ReturnType::Default => {} + ReturnType::Type(_binding_0, _binding_1) => { + tokens_helper(v, &_binding_0.spans); + v.visit_type(&**_binding_1); + } + } +} +#[cfg(feature = "full")] +pub fn visit_signature<'ast, V>(v: &mut V, node: &'ast Signature) +where + V: Visit<'ast> + ?Sized, +{ + if let Some(it) = &node.constness { + tokens_helper(v, &it.span); + } + if let Some(it) = &node.asyncness { + tokens_helper(v, &it.span); + } + if let Some(it) = &node.unsafety { + tokens_helper(v, &it.span); + } + if let Some(it) = &node.abi { + v.visit_abi(it); + } + tokens_helper(v, &node.fn_token.span); + v.visit_ident(&node.ident); + v.visit_generics(&node.generics); + tokens_helper(v, &node.paren_token.span); + for el in Punctuated::pairs(&node.inputs) { + let (it, p) = el.into_tuple(); + v.visit_fn_arg(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } + if let Some(it) = &node.variadic { + v.visit_variadic(it); + } + v.visit_return_type(&node.output); +} +pub fn visit_span<'ast, V>(v: &mut V, node: &Span) +where + V: Visit<'ast> + ?Sized, +{} +#[cfg(feature = "full")] +pub fn visit_stmt<'ast, V>(v: &mut V, node: &'ast Stmt) +where + V: Visit<'ast> + ?Sized, +{ + match node { + Stmt::Local(_binding_0) => { + v.visit_local(_binding_0); + } + Stmt::Item(_binding_0) => { + v.visit_item(_binding_0); + } + Stmt::Expr(_binding_0) => { + v.visit_expr(_binding_0); + } + Stmt::Semi(_binding_0, _binding_1) => { + v.visit_expr(_binding_0); + tokens_helper(v, &_binding_1.spans); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_trait_bound<'ast, V>(v: &mut V, node: &'ast TraitBound) +where + V: Visit<'ast> + ?Sized, +{ + if let Some(it) = &node.paren_token { + tokens_helper(v, &it.span); + } + v.visit_trait_bound_modifier(&node.modifier); + if let Some(it) = &node.lifetimes { + v.visit_bound_lifetimes(it); + } + v.visit_path(&node.path); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_trait_bound_modifier<'ast, V>(v: &mut V, node: &'ast TraitBoundModifier) +where + V: Visit<'ast> + ?Sized, +{ + match node { + TraitBoundModifier::None => {} + TraitBoundModifier::Maybe(_binding_0) => { + tokens_helper(v, &_binding_0.spans); + } + } +} +#[cfg(feature = "full")] +pub fn visit_trait_item<'ast, V>(v: &mut V, node: &'ast TraitItem) +where + V: Visit<'ast> + ?Sized, +{ + match node { + TraitItem::Const(_binding_0) => { + v.visit_trait_item_const(_binding_0); + } + TraitItem::Method(_binding_0) => { + v.visit_trait_item_method(_binding_0); + } + TraitItem::Type(_binding_0) => { + v.visit_trait_item_type(_binding_0); + } + TraitItem::Macro(_binding_0) => { + v.visit_trait_item_macro(_binding_0); + } + TraitItem::Verbatim(_binding_0) => { + skip!(_binding_0); + } + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } +} +#[cfg(feature = "full")] +pub fn visit_trait_item_const<'ast, V>(v: &mut V, node: &'ast TraitItemConst) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + tokens_helper(v, &node.const_token.span); + v.visit_ident(&node.ident); + tokens_helper(v, &node.colon_token.spans); + v.visit_type(&node.ty); + if let Some(it) = &node.default { + tokens_helper(v, &(it).0.spans); + v.visit_expr(&(it).1); + } + tokens_helper(v, &node.semi_token.spans); +} +#[cfg(feature = "full")] +pub fn visit_trait_item_macro<'ast, V>(v: &mut V, node: &'ast TraitItemMacro) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_macro(&node.mac); + if let Some(it) = &node.semi_token { + tokens_helper(v, &it.spans); + } +} +#[cfg(feature = "full")] +pub fn visit_trait_item_method<'ast, V>(v: &mut V, node: &'ast TraitItemMethod) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_signature(&node.sig); + if let Some(it) = &node.default { + v.visit_block(it); + } + if let Some(it) = &node.semi_token { + tokens_helper(v, &it.spans); + } +} +#[cfg(feature = "full")] +pub fn visit_trait_item_type<'ast, V>(v: &mut V, node: &'ast TraitItemType) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + tokens_helper(v, &node.type_token.span); + v.visit_ident(&node.ident); + v.visit_generics(&node.generics); + if let Some(it) = &node.colon_token { + tokens_helper(v, &it.spans); + } + for el in Punctuated::pairs(&node.bounds) { + let (it, p) = el.into_tuple(); + v.visit_type_param_bound(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } + if let Some(it) = &node.default { + tokens_helper(v, &(it).0.spans); + v.visit_type(&(it).1); + } + tokens_helper(v, &node.semi_token.spans); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_type<'ast, V>(v: &mut V, node: &'ast Type) +where + V: Visit<'ast> + ?Sized, +{ + match node { + Type::Array(_binding_0) => { + v.visit_type_array(_binding_0); + } + Type::BareFn(_binding_0) => { + v.visit_type_bare_fn(_binding_0); + } + Type::Group(_binding_0) => { + v.visit_type_group(_binding_0); + } + Type::ImplTrait(_binding_0) => { + v.visit_type_impl_trait(_binding_0); + } + Type::Infer(_binding_0) => { + v.visit_type_infer(_binding_0); + } + Type::Macro(_binding_0) => { + v.visit_type_macro(_binding_0); + } + Type::Never(_binding_0) => { + v.visit_type_never(_binding_0); + } + Type::Paren(_binding_0) => { + v.visit_type_paren(_binding_0); + } + Type::Path(_binding_0) => { + v.visit_type_path(_binding_0); + } + Type::Ptr(_binding_0) => { + v.visit_type_ptr(_binding_0); + } + Type::Reference(_binding_0) => { + v.visit_type_reference(_binding_0); + } + Type::Slice(_binding_0) => { + v.visit_type_slice(_binding_0); + } + Type::TraitObject(_binding_0) => { + v.visit_type_trait_object(_binding_0); + } + Type::Tuple(_binding_0) => { + v.visit_type_tuple(_binding_0); + } + Type::Verbatim(_binding_0) => { + skip!(_binding_0); + } + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_type_array<'ast, V>(v: &mut V, node: &'ast TypeArray) +where + V: Visit<'ast> + ?Sized, +{ + tokens_helper(v, &node.bracket_token.span); + v.visit_type(&*node.elem); + tokens_helper(v, &node.semi_token.spans); + v.visit_expr(&node.len); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_type_bare_fn<'ast, V>(v: &mut V, node: &'ast TypeBareFn) +where + V: Visit<'ast> + ?Sized, +{ + if let Some(it) = &node.lifetimes { + v.visit_bound_lifetimes(it); + } + if let Some(it) = &node.unsafety { + tokens_helper(v, &it.span); + } + if let Some(it) = &node.abi { + v.visit_abi(it); + } + tokens_helper(v, &node.fn_token.span); + tokens_helper(v, &node.paren_token.span); + for el in Punctuated::pairs(&node.inputs) { + let (it, p) = el.into_tuple(); + v.visit_bare_fn_arg(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } + if let Some(it) = &node.variadic { + v.visit_variadic(it); + } + v.visit_return_type(&node.output); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_type_group<'ast, V>(v: &mut V, node: &'ast TypeGroup) +where + V: Visit<'ast> + ?Sized, +{ + tokens_helper(v, &node.group_token.span); + v.visit_type(&*node.elem); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_type_impl_trait<'ast, V>(v: &mut V, node: &'ast TypeImplTrait) +where + V: Visit<'ast> + ?Sized, +{ + tokens_helper(v, &node.impl_token.span); + for el in Punctuated::pairs(&node.bounds) { + let (it, p) = el.into_tuple(); + v.visit_type_param_bound(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_type_infer<'ast, V>(v: &mut V, node: &'ast TypeInfer) +where + V: Visit<'ast> + ?Sized, +{ + tokens_helper(v, &node.underscore_token.spans); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_type_macro<'ast, V>(v: &mut V, node: &'ast TypeMacro) +where + V: Visit<'ast> + ?Sized, +{ + v.visit_macro(&node.mac); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_type_never<'ast, V>(v: &mut V, node: &'ast TypeNever) +where + V: Visit<'ast> + ?Sized, +{ + tokens_helper(v, &node.bang_token.spans); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_type_param<'ast, V>(v: &mut V, node: &'ast TypeParam) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_ident(&node.ident); + if let Some(it) = &node.colon_token { + tokens_helper(v, &it.spans); + } + for el in Punctuated::pairs(&node.bounds) { + let (it, p) = el.into_tuple(); + v.visit_type_param_bound(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } + if let Some(it) = &node.eq_token { + tokens_helper(v, &it.spans); + } + if let Some(it) = &node.default { + v.visit_type(it); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_type_param_bound<'ast, V>(v: &mut V, node: &'ast TypeParamBound) +where + V: Visit<'ast> + ?Sized, +{ + match node { + TypeParamBound::Trait(_binding_0) => { + v.visit_trait_bound(_binding_0); + } + TypeParamBound::Lifetime(_binding_0) => { + v.visit_lifetime(_binding_0); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_type_paren<'ast, V>(v: &mut V, node: &'ast TypeParen) +where + V: Visit<'ast> + ?Sized, +{ + tokens_helper(v, &node.paren_token.span); + v.visit_type(&*node.elem); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_type_path<'ast, V>(v: &mut V, node: &'ast TypePath) +where + V: Visit<'ast> + ?Sized, +{ + if let Some(it) = &node.qself { + v.visit_qself(it); + } + v.visit_path(&node.path); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_type_ptr<'ast, V>(v: &mut V, node: &'ast TypePtr) +where + V: Visit<'ast> + ?Sized, +{ + tokens_helper(v, &node.star_token.spans); + if let Some(it) = &node.const_token { + tokens_helper(v, &it.span); + } + if let Some(it) = &node.mutability { + tokens_helper(v, &it.span); + } + v.visit_type(&*node.elem); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_type_reference<'ast, V>(v: &mut V, node: &'ast TypeReference) +where + V: Visit<'ast> + ?Sized, +{ + tokens_helper(v, &node.and_token.spans); + if let Some(it) = &node.lifetime { + v.visit_lifetime(it); + } + if let Some(it) = &node.mutability { + tokens_helper(v, &it.span); + } + v.visit_type(&*node.elem); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_type_slice<'ast, V>(v: &mut V, node: &'ast TypeSlice) +where + V: Visit<'ast> + ?Sized, +{ + tokens_helper(v, &node.bracket_token.span); + v.visit_type(&*node.elem); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_type_trait_object<'ast, V>(v: &mut V, node: &'ast TypeTraitObject) +where + V: Visit<'ast> + ?Sized, +{ + if let Some(it) = &node.dyn_token { + tokens_helper(v, &it.span); + } + for el in Punctuated::pairs(&node.bounds) { + let (it, p) = el.into_tuple(); + v.visit_type_param_bound(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_type_tuple<'ast, V>(v: &mut V, node: &'ast TypeTuple) +where + V: Visit<'ast> + ?Sized, +{ + tokens_helper(v, &node.paren_token.span); + for el in Punctuated::pairs(&node.elems) { + let (it, p) = el.into_tuple(); + v.visit_type(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_un_op<'ast, V>(v: &mut V, node: &'ast UnOp) +where + V: Visit<'ast> + ?Sized, +{ + match node { + UnOp::Deref(_binding_0) => { + tokens_helper(v, &_binding_0.spans); + } + UnOp::Not(_binding_0) => { + tokens_helper(v, &_binding_0.spans); + } + UnOp::Neg(_binding_0) => { + tokens_helper(v, &_binding_0.spans); + } + } +} +#[cfg(feature = "full")] +pub fn visit_use_glob<'ast, V>(v: &mut V, node: &'ast UseGlob) +where + V: Visit<'ast> + ?Sized, +{ + tokens_helper(v, &node.star_token.spans); +} +#[cfg(feature = "full")] +pub fn visit_use_group<'ast, V>(v: &mut V, node: &'ast UseGroup) +where + V: Visit<'ast> + ?Sized, +{ + tokens_helper(v, &node.brace_token.span); + for el in Punctuated::pairs(&node.items) { + let (it, p) = el.into_tuple(); + v.visit_use_tree(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } +} +#[cfg(feature = "full")] +pub fn visit_use_name<'ast, V>(v: &mut V, node: &'ast UseName) +where + V: Visit<'ast> + ?Sized, +{ + v.visit_ident(&node.ident); +} +#[cfg(feature = "full")] +pub fn visit_use_path<'ast, V>(v: &mut V, node: &'ast UsePath) +where + V: Visit<'ast> + ?Sized, +{ + v.visit_ident(&node.ident); + tokens_helper(v, &node.colon2_token.spans); + v.visit_use_tree(&*node.tree); +} +#[cfg(feature = "full")] +pub fn visit_use_rename<'ast, V>(v: &mut V, node: &'ast UseRename) +where + V: Visit<'ast> + ?Sized, +{ + v.visit_ident(&node.ident); + tokens_helper(v, &node.as_token.span); + v.visit_ident(&node.rename); +} +#[cfg(feature = "full")] +pub fn visit_use_tree<'ast, V>(v: &mut V, node: &'ast UseTree) +where + V: Visit<'ast> + ?Sized, +{ + match node { + UseTree::Path(_binding_0) => { + v.visit_use_path(_binding_0); + } + UseTree::Name(_binding_0) => { + v.visit_use_name(_binding_0); + } + UseTree::Rename(_binding_0) => { + v.visit_use_rename(_binding_0); + } + UseTree::Glob(_binding_0) => { + v.visit_use_glob(_binding_0); + } + UseTree::Group(_binding_0) => { + v.visit_use_group(_binding_0); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_variadic<'ast, V>(v: &mut V, node: &'ast Variadic) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + tokens_helper(v, &node.dots.spans); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_variant<'ast, V>(v: &mut V, node: &'ast Variant) +where + V: Visit<'ast> + ?Sized, +{ + for it in &node.attrs { + v.visit_attribute(it); + } + v.visit_ident(&node.ident); + v.visit_fields(&node.fields); + if let Some(it) = &node.discriminant { + tokens_helper(v, &(it).0.spans); + v.visit_expr(&(it).1); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_vis_crate<'ast, V>(v: &mut V, node: &'ast VisCrate) +where + V: Visit<'ast> + ?Sized, +{ + tokens_helper(v, &node.crate_token.span); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_vis_public<'ast, V>(v: &mut V, node: &'ast VisPublic) +where + V: Visit<'ast> + ?Sized, +{ + tokens_helper(v, &node.pub_token.span); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_vis_restricted<'ast, V>(v: &mut V, node: &'ast VisRestricted) +where + V: Visit<'ast> + ?Sized, +{ + tokens_helper(v, &node.pub_token.span); + tokens_helper(v, &node.paren_token.span); + if let Some(it) = &node.in_token { + tokens_helper(v, &it.span); + } + v.visit_path(&*node.path); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_visibility<'ast, V>(v: &mut V, node: &'ast Visibility) +where + V: Visit<'ast> + ?Sized, +{ + match node { + Visibility::Public(_binding_0) => { + v.visit_vis_public(_binding_0); + } + Visibility::Crate(_binding_0) => { + v.visit_vis_crate(_binding_0); + } + Visibility::Restricted(_binding_0) => { + v.visit_vis_restricted(_binding_0); + } + Visibility::Inherited => {} + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_where_clause<'ast, V>(v: &mut V, node: &'ast WhereClause) +where + V: Visit<'ast> + ?Sized, +{ + tokens_helper(v, &node.where_token.span); + for el in Punctuated::pairs(&node.predicates) { + let (it, p) = el.into_tuple(); + v.visit_where_predicate(it); + if let Some(p) = p { + tokens_helper(v, &p.spans); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_where_predicate<'ast, V>(v: &mut V, node: &'ast WherePredicate) +where + V: Visit<'ast> + ?Sized, +{ + match node { + WherePredicate::Type(_binding_0) => { + v.visit_predicate_type(_binding_0); + } + WherePredicate::Lifetime(_binding_0) => { + v.visit_predicate_lifetime(_binding_0); + } + WherePredicate::Eq(_binding_0) => { + v.visit_predicate_eq(_binding_0); + } + } +} diff --git a/third_party/rust/syn/src/gen/visit_mut.rs b/third_party/rust/syn/src/gen/visit_mut.rs new file mode 100644 index 0000000000..239709d194 --- /dev/null +++ b/third_party/rust/syn/src/gen/visit_mut.rs @@ -0,0 +1,3786 @@ +// This file is @generated by syn-internal-codegen. +// It is not intended for manual editing. + +#![allow(unused_variables)] +#[cfg(any(feature = "full", feature = "derive"))] +use crate::gen::helper::visit_mut::*; +#[cfg(any(feature = "full", feature = "derive"))] +use crate::punctuated::Punctuated; +use crate::*; +use proc_macro2::Span; +#[cfg(feature = "full")] +macro_rules! full { + ($e:expr) => { + $e + }; +} +#[cfg(all(feature = "derive", not(feature = "full")))] +macro_rules! full { + ($e:expr) => { + unreachable!() + }; +} +macro_rules! skip { + ($($tt:tt)*) => {}; +} +/// Syntax tree traversal to mutate an exclusive borrow of a syntax tree in +/// place. +/// +/// See the [module documentation] for details. +/// +/// [module documentation]: self +/// +/// *This trait is available only if Syn is built with the `"visit-mut"` feature.* +pub trait VisitMut { + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_abi_mut(&mut self, i: &mut Abi) { + visit_abi_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_angle_bracketed_generic_arguments_mut( + &mut self, + i: &mut AngleBracketedGenericArguments, + ) { + visit_angle_bracketed_generic_arguments_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_arm_mut(&mut self, i: &mut Arm) { + visit_arm_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_attr_style_mut(&mut self, i: &mut AttrStyle) { + visit_attr_style_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_attribute_mut(&mut self, i: &mut Attribute) { + visit_attribute_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_bare_fn_arg_mut(&mut self, i: &mut BareFnArg) { + visit_bare_fn_arg_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_bin_op_mut(&mut self, i: &mut BinOp) { + visit_bin_op_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_binding_mut(&mut self, i: &mut Binding) { + visit_binding_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_block_mut(&mut self, i: &mut Block) { + visit_block_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_bound_lifetimes_mut(&mut self, i: &mut BoundLifetimes) { + visit_bound_lifetimes_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_const_param_mut(&mut self, i: &mut ConstParam) { + visit_const_param_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_constraint_mut(&mut self, i: &mut Constraint) { + visit_constraint_mut(self, i); + } + #[cfg(feature = "derive")] + fn visit_data_mut(&mut self, i: &mut Data) { + visit_data_mut(self, i); + } + #[cfg(feature = "derive")] + fn visit_data_enum_mut(&mut self, i: &mut DataEnum) { + visit_data_enum_mut(self, i); + } + #[cfg(feature = "derive")] + fn visit_data_struct_mut(&mut self, i: &mut DataStruct) { + visit_data_struct_mut(self, i); + } + #[cfg(feature = "derive")] + fn visit_data_union_mut(&mut self, i: &mut DataUnion) { + visit_data_union_mut(self, i); + } + #[cfg(feature = "derive")] + fn visit_derive_input_mut(&mut self, i: &mut DeriveInput) { + visit_derive_input_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_expr_mut(&mut self, i: &mut Expr) { + visit_expr_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_array_mut(&mut self, i: &mut ExprArray) { + visit_expr_array_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_assign_mut(&mut self, i: &mut ExprAssign) { + visit_expr_assign_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_assign_op_mut(&mut self, i: &mut ExprAssignOp) { + visit_expr_assign_op_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_async_mut(&mut self, i: &mut ExprAsync) { + visit_expr_async_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_await_mut(&mut self, i: &mut ExprAwait) { + visit_expr_await_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_expr_binary_mut(&mut self, i: &mut ExprBinary) { + visit_expr_binary_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_block_mut(&mut self, i: &mut ExprBlock) { + visit_expr_block_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_box_mut(&mut self, i: &mut ExprBox) { + visit_expr_box_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_break_mut(&mut self, i: &mut ExprBreak) { + visit_expr_break_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_expr_call_mut(&mut self, i: &mut ExprCall) { + visit_expr_call_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_expr_cast_mut(&mut self, i: &mut ExprCast) { + visit_expr_cast_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_closure_mut(&mut self, i: &mut ExprClosure) { + visit_expr_closure_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_continue_mut(&mut self, i: &mut ExprContinue) { + visit_expr_continue_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_expr_field_mut(&mut self, i: &mut ExprField) { + visit_expr_field_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_for_loop_mut(&mut self, i: &mut ExprForLoop) { + visit_expr_for_loop_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_group_mut(&mut self, i: &mut ExprGroup) { + visit_expr_group_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_if_mut(&mut self, i: &mut ExprIf) { + visit_expr_if_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_expr_index_mut(&mut self, i: &mut ExprIndex) { + visit_expr_index_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_let_mut(&mut self, i: &mut ExprLet) { + visit_expr_let_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_expr_lit_mut(&mut self, i: &mut ExprLit) { + visit_expr_lit_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_loop_mut(&mut self, i: &mut ExprLoop) { + visit_expr_loop_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_macro_mut(&mut self, i: &mut ExprMacro) { + visit_expr_macro_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_match_mut(&mut self, i: &mut ExprMatch) { + visit_expr_match_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_method_call_mut(&mut self, i: &mut ExprMethodCall) { + visit_expr_method_call_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_expr_paren_mut(&mut self, i: &mut ExprParen) { + visit_expr_paren_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_expr_path_mut(&mut self, i: &mut ExprPath) { + visit_expr_path_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_range_mut(&mut self, i: &mut ExprRange) { + visit_expr_range_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_reference_mut(&mut self, i: &mut ExprReference) { + visit_expr_reference_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_repeat_mut(&mut self, i: &mut ExprRepeat) { + visit_expr_repeat_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_return_mut(&mut self, i: &mut ExprReturn) { + visit_expr_return_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_struct_mut(&mut self, i: &mut ExprStruct) { + visit_expr_struct_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_try_mut(&mut self, i: &mut ExprTry) { + visit_expr_try_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_try_block_mut(&mut self, i: &mut ExprTryBlock) { + visit_expr_try_block_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_tuple_mut(&mut self, i: &mut ExprTuple) { + visit_expr_tuple_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_type_mut(&mut self, i: &mut ExprType) { + visit_expr_type_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_expr_unary_mut(&mut self, i: &mut ExprUnary) { + visit_expr_unary_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_unsafe_mut(&mut self, i: &mut ExprUnsafe) { + visit_expr_unsafe_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_while_mut(&mut self, i: &mut ExprWhile) { + visit_expr_while_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_expr_yield_mut(&mut self, i: &mut ExprYield) { + visit_expr_yield_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_field_mut(&mut self, i: &mut Field) { + visit_field_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_field_pat_mut(&mut self, i: &mut FieldPat) { + visit_field_pat_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_field_value_mut(&mut self, i: &mut FieldValue) { + visit_field_value_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_fields_mut(&mut self, i: &mut Fields) { + visit_fields_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_fields_named_mut(&mut self, i: &mut FieldsNamed) { + visit_fields_named_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_fields_unnamed_mut(&mut self, i: &mut FieldsUnnamed) { + visit_fields_unnamed_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_file_mut(&mut self, i: &mut File) { + visit_file_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_fn_arg_mut(&mut self, i: &mut FnArg) { + visit_fn_arg_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_foreign_item_mut(&mut self, i: &mut ForeignItem) { + visit_foreign_item_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_foreign_item_fn_mut(&mut self, i: &mut ForeignItemFn) { + visit_foreign_item_fn_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_foreign_item_macro_mut(&mut self, i: &mut ForeignItemMacro) { + visit_foreign_item_macro_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_foreign_item_static_mut(&mut self, i: &mut ForeignItemStatic) { + visit_foreign_item_static_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_foreign_item_type_mut(&mut self, i: &mut ForeignItemType) { + visit_foreign_item_type_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_generic_argument_mut(&mut self, i: &mut GenericArgument) { + visit_generic_argument_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_generic_method_argument_mut(&mut self, i: &mut GenericMethodArgument) { + visit_generic_method_argument_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_generic_param_mut(&mut self, i: &mut GenericParam) { + visit_generic_param_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_generics_mut(&mut self, i: &mut Generics) { + visit_generics_mut(self, i); + } + fn visit_ident_mut(&mut self, i: &mut Ident) { + visit_ident_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_impl_item_mut(&mut self, i: &mut ImplItem) { + visit_impl_item_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_impl_item_const_mut(&mut self, i: &mut ImplItemConst) { + visit_impl_item_const_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_impl_item_macro_mut(&mut self, i: &mut ImplItemMacro) { + visit_impl_item_macro_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_impl_item_method_mut(&mut self, i: &mut ImplItemMethod) { + visit_impl_item_method_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_impl_item_type_mut(&mut self, i: &mut ImplItemType) { + visit_impl_item_type_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_index_mut(&mut self, i: &mut Index) { + visit_index_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_item_mut(&mut self, i: &mut Item) { + visit_item_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_item_const_mut(&mut self, i: &mut ItemConst) { + visit_item_const_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_item_enum_mut(&mut self, i: &mut ItemEnum) { + visit_item_enum_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_item_extern_crate_mut(&mut self, i: &mut ItemExternCrate) { + visit_item_extern_crate_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_item_fn_mut(&mut self, i: &mut ItemFn) { + visit_item_fn_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_item_foreign_mod_mut(&mut self, i: &mut ItemForeignMod) { + visit_item_foreign_mod_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_item_impl_mut(&mut self, i: &mut ItemImpl) { + visit_item_impl_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_item_macro_mut(&mut self, i: &mut ItemMacro) { + visit_item_macro_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_item_macro2_mut(&mut self, i: &mut ItemMacro2) { + visit_item_macro2_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_item_mod_mut(&mut self, i: &mut ItemMod) { + visit_item_mod_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_item_static_mut(&mut self, i: &mut ItemStatic) { + visit_item_static_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_item_struct_mut(&mut self, i: &mut ItemStruct) { + visit_item_struct_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_item_trait_mut(&mut self, i: &mut ItemTrait) { + visit_item_trait_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_item_trait_alias_mut(&mut self, i: &mut ItemTraitAlias) { + visit_item_trait_alias_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_item_type_mut(&mut self, i: &mut ItemType) { + visit_item_type_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_item_union_mut(&mut self, i: &mut ItemUnion) { + visit_item_union_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_item_use_mut(&mut self, i: &mut ItemUse) { + visit_item_use_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_label_mut(&mut self, i: &mut Label) { + visit_label_mut(self, i); + } + fn visit_lifetime_mut(&mut self, i: &mut Lifetime) { + visit_lifetime_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_lifetime_def_mut(&mut self, i: &mut LifetimeDef) { + visit_lifetime_def_mut(self, i); + } + fn visit_lit_mut(&mut self, i: &mut Lit) { + visit_lit_mut(self, i); + } + fn visit_lit_bool_mut(&mut self, i: &mut LitBool) { + visit_lit_bool_mut(self, i); + } + fn visit_lit_byte_mut(&mut self, i: &mut LitByte) { + visit_lit_byte_mut(self, i); + } + fn visit_lit_byte_str_mut(&mut self, i: &mut LitByteStr) { + visit_lit_byte_str_mut(self, i); + } + fn visit_lit_char_mut(&mut self, i: &mut LitChar) { + visit_lit_char_mut(self, i); + } + fn visit_lit_float_mut(&mut self, i: &mut LitFloat) { + visit_lit_float_mut(self, i); + } + fn visit_lit_int_mut(&mut self, i: &mut LitInt) { + visit_lit_int_mut(self, i); + } + fn visit_lit_str_mut(&mut self, i: &mut LitStr) { + visit_lit_str_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_local_mut(&mut self, i: &mut Local) { + visit_local_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_macro_mut(&mut self, i: &mut Macro) { + visit_macro_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_macro_delimiter_mut(&mut self, i: &mut MacroDelimiter) { + visit_macro_delimiter_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_member_mut(&mut self, i: &mut Member) { + visit_member_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_meta_mut(&mut self, i: &mut Meta) { + visit_meta_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_meta_list_mut(&mut self, i: &mut MetaList) { + visit_meta_list_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_meta_name_value_mut(&mut self, i: &mut MetaNameValue) { + visit_meta_name_value_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_method_turbofish_mut(&mut self, i: &mut MethodTurbofish) { + visit_method_turbofish_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_nested_meta_mut(&mut self, i: &mut NestedMeta) { + visit_nested_meta_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_parenthesized_generic_arguments_mut( + &mut self, + i: &mut ParenthesizedGenericArguments, + ) { + visit_parenthesized_generic_arguments_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_pat_mut(&mut self, i: &mut Pat) { + visit_pat_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_pat_box_mut(&mut self, i: &mut PatBox) { + visit_pat_box_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_pat_ident_mut(&mut self, i: &mut PatIdent) { + visit_pat_ident_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_pat_lit_mut(&mut self, i: &mut PatLit) { + visit_pat_lit_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_pat_macro_mut(&mut self, i: &mut PatMacro) { + visit_pat_macro_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_pat_or_mut(&mut self, i: &mut PatOr) { + visit_pat_or_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_pat_path_mut(&mut self, i: &mut PatPath) { + visit_pat_path_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_pat_range_mut(&mut self, i: &mut PatRange) { + visit_pat_range_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_pat_reference_mut(&mut self, i: &mut PatReference) { + visit_pat_reference_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_pat_rest_mut(&mut self, i: &mut PatRest) { + visit_pat_rest_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_pat_slice_mut(&mut self, i: &mut PatSlice) { + visit_pat_slice_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_pat_struct_mut(&mut self, i: &mut PatStruct) { + visit_pat_struct_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_pat_tuple_mut(&mut self, i: &mut PatTuple) { + visit_pat_tuple_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_pat_tuple_struct_mut(&mut self, i: &mut PatTupleStruct) { + visit_pat_tuple_struct_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_pat_type_mut(&mut self, i: &mut PatType) { + visit_pat_type_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_pat_wild_mut(&mut self, i: &mut PatWild) { + visit_pat_wild_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_path_mut(&mut self, i: &mut Path) { + visit_path_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_path_arguments_mut(&mut self, i: &mut PathArguments) { + visit_path_arguments_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_path_segment_mut(&mut self, i: &mut PathSegment) { + visit_path_segment_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_predicate_eq_mut(&mut self, i: &mut PredicateEq) { + visit_predicate_eq_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_predicate_lifetime_mut(&mut self, i: &mut PredicateLifetime) { + visit_predicate_lifetime_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_predicate_type_mut(&mut self, i: &mut PredicateType) { + visit_predicate_type_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_qself_mut(&mut self, i: &mut QSelf) { + visit_qself_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_range_limits_mut(&mut self, i: &mut RangeLimits) { + visit_range_limits_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_receiver_mut(&mut self, i: &mut Receiver) { + visit_receiver_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_return_type_mut(&mut self, i: &mut ReturnType) { + visit_return_type_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_signature_mut(&mut self, i: &mut Signature) { + visit_signature_mut(self, i); + } + fn visit_span_mut(&mut self, i: &mut Span) { + visit_span_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_stmt_mut(&mut self, i: &mut Stmt) { + visit_stmt_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_trait_bound_mut(&mut self, i: &mut TraitBound) { + visit_trait_bound_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_trait_bound_modifier_mut(&mut self, i: &mut TraitBoundModifier) { + visit_trait_bound_modifier_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_trait_item_mut(&mut self, i: &mut TraitItem) { + visit_trait_item_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_trait_item_const_mut(&mut self, i: &mut TraitItemConst) { + visit_trait_item_const_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_trait_item_macro_mut(&mut self, i: &mut TraitItemMacro) { + visit_trait_item_macro_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_trait_item_method_mut(&mut self, i: &mut TraitItemMethod) { + visit_trait_item_method_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_trait_item_type_mut(&mut self, i: &mut TraitItemType) { + visit_trait_item_type_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_type_mut(&mut self, i: &mut Type) { + visit_type_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_type_array_mut(&mut self, i: &mut TypeArray) { + visit_type_array_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_type_bare_fn_mut(&mut self, i: &mut TypeBareFn) { + visit_type_bare_fn_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_type_group_mut(&mut self, i: &mut TypeGroup) { + visit_type_group_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_type_impl_trait_mut(&mut self, i: &mut TypeImplTrait) { + visit_type_impl_trait_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_type_infer_mut(&mut self, i: &mut TypeInfer) { + visit_type_infer_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_type_macro_mut(&mut self, i: &mut TypeMacro) { + visit_type_macro_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_type_never_mut(&mut self, i: &mut TypeNever) { + visit_type_never_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_type_param_mut(&mut self, i: &mut TypeParam) { + visit_type_param_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_type_param_bound_mut(&mut self, i: &mut TypeParamBound) { + visit_type_param_bound_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_type_paren_mut(&mut self, i: &mut TypeParen) { + visit_type_paren_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_type_path_mut(&mut self, i: &mut TypePath) { + visit_type_path_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_type_ptr_mut(&mut self, i: &mut TypePtr) { + visit_type_ptr_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_type_reference_mut(&mut self, i: &mut TypeReference) { + visit_type_reference_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_type_slice_mut(&mut self, i: &mut TypeSlice) { + visit_type_slice_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_type_trait_object_mut(&mut self, i: &mut TypeTraitObject) { + visit_type_trait_object_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_type_tuple_mut(&mut self, i: &mut TypeTuple) { + visit_type_tuple_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_un_op_mut(&mut self, i: &mut UnOp) { + visit_un_op_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_use_glob_mut(&mut self, i: &mut UseGlob) { + visit_use_glob_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_use_group_mut(&mut self, i: &mut UseGroup) { + visit_use_group_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_use_name_mut(&mut self, i: &mut UseName) { + visit_use_name_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_use_path_mut(&mut self, i: &mut UsePath) { + visit_use_path_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_use_rename_mut(&mut self, i: &mut UseRename) { + visit_use_rename_mut(self, i); + } + #[cfg(feature = "full")] + fn visit_use_tree_mut(&mut self, i: &mut UseTree) { + visit_use_tree_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_variadic_mut(&mut self, i: &mut Variadic) { + visit_variadic_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_variant_mut(&mut self, i: &mut Variant) { + visit_variant_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_vis_crate_mut(&mut self, i: &mut VisCrate) { + visit_vis_crate_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_vis_public_mut(&mut self, i: &mut VisPublic) { + visit_vis_public_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_vis_restricted_mut(&mut self, i: &mut VisRestricted) { + visit_vis_restricted_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_visibility_mut(&mut self, i: &mut Visibility) { + visit_visibility_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_where_clause_mut(&mut self, i: &mut WhereClause) { + visit_where_clause_mut(self, i); + } + #[cfg(any(feature = "derive", feature = "full"))] + fn visit_where_predicate_mut(&mut self, i: &mut WherePredicate) { + visit_where_predicate_mut(self, i); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_abi_mut<V>(v: &mut V, node: &mut Abi) +where + V: VisitMut + ?Sized, +{ + tokens_helper(v, &mut node.extern_token.span); + if let Some(it) = &mut node.name { + v.visit_lit_str_mut(it); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_angle_bracketed_generic_arguments_mut<V>( + v: &mut V, + node: &mut AngleBracketedGenericArguments, +) +where + V: VisitMut + ?Sized, +{ + if let Some(it) = &mut node.colon2_token { + tokens_helper(v, &mut it.spans); + } + tokens_helper(v, &mut node.lt_token.spans); + for el in Punctuated::pairs_mut(&mut node.args) { + let (it, p) = el.into_tuple(); + v.visit_generic_argument_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } + tokens_helper(v, &mut node.gt_token.spans); +} +#[cfg(feature = "full")] +pub fn visit_arm_mut<V>(v: &mut V, node: &mut Arm) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_pat_mut(&mut node.pat); + if let Some(it) = &mut node.guard { + tokens_helper(v, &mut (it).0.span); + v.visit_expr_mut(&mut *(it).1); + } + tokens_helper(v, &mut node.fat_arrow_token.spans); + v.visit_expr_mut(&mut *node.body); + if let Some(it) = &mut node.comma { + tokens_helper(v, &mut it.spans); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_attr_style_mut<V>(v: &mut V, node: &mut AttrStyle) +where + V: VisitMut + ?Sized, +{ + match node { + AttrStyle::Outer => {} + AttrStyle::Inner(_binding_0) => { + tokens_helper(v, &mut _binding_0.spans); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_attribute_mut<V>(v: &mut V, node: &mut Attribute) +where + V: VisitMut + ?Sized, +{ + tokens_helper(v, &mut node.pound_token.spans); + v.visit_attr_style_mut(&mut node.style); + tokens_helper(v, &mut node.bracket_token.span); + v.visit_path_mut(&mut node.path); + skip!(node.tokens); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_bare_fn_arg_mut<V>(v: &mut V, node: &mut BareFnArg) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + if let Some(it) = &mut node.name { + v.visit_ident_mut(&mut (it).0); + tokens_helper(v, &mut (it).1.spans); + } + v.visit_type_mut(&mut node.ty); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_bin_op_mut<V>(v: &mut V, node: &mut BinOp) +where + V: VisitMut + ?Sized, +{ + match node { + BinOp::Add(_binding_0) => { + tokens_helper(v, &mut _binding_0.spans); + } + BinOp::Sub(_binding_0) => { + tokens_helper(v, &mut _binding_0.spans); + } + BinOp::Mul(_binding_0) => { + tokens_helper(v, &mut _binding_0.spans); + } + BinOp::Div(_binding_0) => { + tokens_helper(v, &mut _binding_0.spans); + } + BinOp::Rem(_binding_0) => { + tokens_helper(v, &mut _binding_0.spans); + } + BinOp::And(_binding_0) => { + tokens_helper(v, &mut _binding_0.spans); + } + BinOp::Or(_binding_0) => { + tokens_helper(v, &mut _binding_0.spans); + } + BinOp::BitXor(_binding_0) => { + tokens_helper(v, &mut _binding_0.spans); + } + BinOp::BitAnd(_binding_0) => { + tokens_helper(v, &mut _binding_0.spans); + } + BinOp::BitOr(_binding_0) => { + tokens_helper(v, &mut _binding_0.spans); + } + BinOp::Shl(_binding_0) => { + tokens_helper(v, &mut _binding_0.spans); + } + BinOp::Shr(_binding_0) => { + tokens_helper(v, &mut _binding_0.spans); + } + BinOp::Eq(_binding_0) => { + tokens_helper(v, &mut _binding_0.spans); + } + BinOp::Lt(_binding_0) => { + tokens_helper(v, &mut _binding_0.spans); + } + BinOp::Le(_binding_0) => { + tokens_helper(v, &mut _binding_0.spans); + } + BinOp::Ne(_binding_0) => { + tokens_helper(v, &mut _binding_0.spans); + } + BinOp::Ge(_binding_0) => { + tokens_helper(v, &mut _binding_0.spans); + } + BinOp::Gt(_binding_0) => { + tokens_helper(v, &mut _binding_0.spans); + } + BinOp::AddEq(_binding_0) => { + tokens_helper(v, &mut _binding_0.spans); + } + BinOp::SubEq(_binding_0) => { + tokens_helper(v, &mut _binding_0.spans); + } + BinOp::MulEq(_binding_0) => { + tokens_helper(v, &mut _binding_0.spans); + } + BinOp::DivEq(_binding_0) => { + tokens_helper(v, &mut _binding_0.spans); + } + BinOp::RemEq(_binding_0) => { + tokens_helper(v, &mut _binding_0.spans); + } + BinOp::BitXorEq(_binding_0) => { + tokens_helper(v, &mut _binding_0.spans); + } + BinOp::BitAndEq(_binding_0) => { + tokens_helper(v, &mut _binding_0.spans); + } + BinOp::BitOrEq(_binding_0) => { + tokens_helper(v, &mut _binding_0.spans); + } + BinOp::ShlEq(_binding_0) => { + tokens_helper(v, &mut _binding_0.spans); + } + BinOp::ShrEq(_binding_0) => { + tokens_helper(v, &mut _binding_0.spans); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_binding_mut<V>(v: &mut V, node: &mut Binding) +where + V: VisitMut + ?Sized, +{ + v.visit_ident_mut(&mut node.ident); + tokens_helper(v, &mut node.eq_token.spans); + v.visit_type_mut(&mut node.ty); +} +#[cfg(feature = "full")] +pub fn visit_block_mut<V>(v: &mut V, node: &mut Block) +where + V: VisitMut + ?Sized, +{ + tokens_helper(v, &mut node.brace_token.span); + for it in &mut node.stmts { + v.visit_stmt_mut(it); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_bound_lifetimes_mut<V>(v: &mut V, node: &mut BoundLifetimes) +where + V: VisitMut + ?Sized, +{ + tokens_helper(v, &mut node.for_token.span); + tokens_helper(v, &mut node.lt_token.spans); + for el in Punctuated::pairs_mut(&mut node.lifetimes) { + let (it, p) = el.into_tuple(); + v.visit_lifetime_def_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } + tokens_helper(v, &mut node.gt_token.spans); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_const_param_mut<V>(v: &mut V, node: &mut ConstParam) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + tokens_helper(v, &mut node.const_token.span); + v.visit_ident_mut(&mut node.ident); + tokens_helper(v, &mut node.colon_token.spans); + v.visit_type_mut(&mut node.ty); + if let Some(it) = &mut node.eq_token { + tokens_helper(v, &mut it.spans); + } + if let Some(it) = &mut node.default { + v.visit_expr_mut(it); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_constraint_mut<V>(v: &mut V, node: &mut Constraint) +where + V: VisitMut + ?Sized, +{ + v.visit_ident_mut(&mut node.ident); + tokens_helper(v, &mut node.colon_token.spans); + for el in Punctuated::pairs_mut(&mut node.bounds) { + let (it, p) = el.into_tuple(); + v.visit_type_param_bound_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } +} +#[cfg(feature = "derive")] +pub fn visit_data_mut<V>(v: &mut V, node: &mut Data) +where + V: VisitMut + ?Sized, +{ + match node { + Data::Struct(_binding_0) => { + v.visit_data_struct_mut(_binding_0); + } + Data::Enum(_binding_0) => { + v.visit_data_enum_mut(_binding_0); + } + Data::Union(_binding_0) => { + v.visit_data_union_mut(_binding_0); + } + } +} +#[cfg(feature = "derive")] +pub fn visit_data_enum_mut<V>(v: &mut V, node: &mut DataEnum) +where + V: VisitMut + ?Sized, +{ + tokens_helper(v, &mut node.enum_token.span); + tokens_helper(v, &mut node.brace_token.span); + for el in Punctuated::pairs_mut(&mut node.variants) { + let (it, p) = el.into_tuple(); + v.visit_variant_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } +} +#[cfg(feature = "derive")] +pub fn visit_data_struct_mut<V>(v: &mut V, node: &mut DataStruct) +where + V: VisitMut + ?Sized, +{ + tokens_helper(v, &mut node.struct_token.span); + v.visit_fields_mut(&mut node.fields); + if let Some(it) = &mut node.semi_token { + tokens_helper(v, &mut it.spans); + } +} +#[cfg(feature = "derive")] +pub fn visit_data_union_mut<V>(v: &mut V, node: &mut DataUnion) +where + V: VisitMut + ?Sized, +{ + tokens_helper(v, &mut node.union_token.span); + v.visit_fields_named_mut(&mut node.fields); +} +#[cfg(feature = "derive")] +pub fn visit_derive_input_mut<V>(v: &mut V, node: &mut DeriveInput) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_visibility_mut(&mut node.vis); + v.visit_ident_mut(&mut node.ident); + v.visit_generics_mut(&mut node.generics); + v.visit_data_mut(&mut node.data); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_expr_mut<V>(v: &mut V, node: &mut Expr) +where + V: VisitMut + ?Sized, +{ + match node { + Expr::Array(_binding_0) => { + full!(v.visit_expr_array_mut(_binding_0)); + } + Expr::Assign(_binding_0) => { + full!(v.visit_expr_assign_mut(_binding_0)); + } + Expr::AssignOp(_binding_0) => { + full!(v.visit_expr_assign_op_mut(_binding_0)); + } + Expr::Async(_binding_0) => { + full!(v.visit_expr_async_mut(_binding_0)); + } + Expr::Await(_binding_0) => { + full!(v.visit_expr_await_mut(_binding_0)); + } + Expr::Binary(_binding_0) => { + v.visit_expr_binary_mut(_binding_0); + } + Expr::Block(_binding_0) => { + full!(v.visit_expr_block_mut(_binding_0)); + } + Expr::Box(_binding_0) => { + full!(v.visit_expr_box_mut(_binding_0)); + } + Expr::Break(_binding_0) => { + full!(v.visit_expr_break_mut(_binding_0)); + } + Expr::Call(_binding_0) => { + v.visit_expr_call_mut(_binding_0); + } + Expr::Cast(_binding_0) => { + v.visit_expr_cast_mut(_binding_0); + } + Expr::Closure(_binding_0) => { + full!(v.visit_expr_closure_mut(_binding_0)); + } + Expr::Continue(_binding_0) => { + full!(v.visit_expr_continue_mut(_binding_0)); + } + Expr::Field(_binding_0) => { + v.visit_expr_field_mut(_binding_0); + } + Expr::ForLoop(_binding_0) => { + full!(v.visit_expr_for_loop_mut(_binding_0)); + } + Expr::Group(_binding_0) => { + full!(v.visit_expr_group_mut(_binding_0)); + } + Expr::If(_binding_0) => { + full!(v.visit_expr_if_mut(_binding_0)); + } + Expr::Index(_binding_0) => { + v.visit_expr_index_mut(_binding_0); + } + Expr::Let(_binding_0) => { + full!(v.visit_expr_let_mut(_binding_0)); + } + Expr::Lit(_binding_0) => { + v.visit_expr_lit_mut(_binding_0); + } + Expr::Loop(_binding_0) => { + full!(v.visit_expr_loop_mut(_binding_0)); + } + Expr::Macro(_binding_0) => { + full!(v.visit_expr_macro_mut(_binding_0)); + } + Expr::Match(_binding_0) => { + full!(v.visit_expr_match_mut(_binding_0)); + } + Expr::MethodCall(_binding_0) => { + full!(v.visit_expr_method_call_mut(_binding_0)); + } + Expr::Paren(_binding_0) => { + v.visit_expr_paren_mut(_binding_0); + } + Expr::Path(_binding_0) => { + v.visit_expr_path_mut(_binding_0); + } + Expr::Range(_binding_0) => { + full!(v.visit_expr_range_mut(_binding_0)); + } + Expr::Reference(_binding_0) => { + full!(v.visit_expr_reference_mut(_binding_0)); + } + Expr::Repeat(_binding_0) => { + full!(v.visit_expr_repeat_mut(_binding_0)); + } + Expr::Return(_binding_0) => { + full!(v.visit_expr_return_mut(_binding_0)); + } + Expr::Struct(_binding_0) => { + full!(v.visit_expr_struct_mut(_binding_0)); + } + Expr::Try(_binding_0) => { + full!(v.visit_expr_try_mut(_binding_0)); + } + Expr::TryBlock(_binding_0) => { + full!(v.visit_expr_try_block_mut(_binding_0)); + } + Expr::Tuple(_binding_0) => { + full!(v.visit_expr_tuple_mut(_binding_0)); + } + Expr::Type(_binding_0) => { + full!(v.visit_expr_type_mut(_binding_0)); + } + Expr::Unary(_binding_0) => { + v.visit_expr_unary_mut(_binding_0); + } + Expr::Unsafe(_binding_0) => { + full!(v.visit_expr_unsafe_mut(_binding_0)); + } + Expr::Verbatim(_binding_0) => { + skip!(_binding_0); + } + Expr::While(_binding_0) => { + full!(v.visit_expr_while_mut(_binding_0)); + } + Expr::Yield(_binding_0) => { + full!(v.visit_expr_yield_mut(_binding_0)); + } + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } +} +#[cfg(feature = "full")] +pub fn visit_expr_array_mut<V>(v: &mut V, node: &mut ExprArray) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + tokens_helper(v, &mut node.bracket_token.span); + for el in Punctuated::pairs_mut(&mut node.elems) { + let (it, p) = el.into_tuple(); + v.visit_expr_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } +} +#[cfg(feature = "full")] +pub fn visit_expr_assign_mut<V>(v: &mut V, node: &mut ExprAssign) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_expr_mut(&mut *node.left); + tokens_helper(v, &mut node.eq_token.spans); + v.visit_expr_mut(&mut *node.right); +} +#[cfg(feature = "full")] +pub fn visit_expr_assign_op_mut<V>(v: &mut V, node: &mut ExprAssignOp) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_expr_mut(&mut *node.left); + v.visit_bin_op_mut(&mut node.op); + v.visit_expr_mut(&mut *node.right); +} +#[cfg(feature = "full")] +pub fn visit_expr_async_mut<V>(v: &mut V, node: &mut ExprAsync) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + tokens_helper(v, &mut node.async_token.span); + if let Some(it) = &mut node.capture { + tokens_helper(v, &mut it.span); + } + v.visit_block_mut(&mut node.block); +} +#[cfg(feature = "full")] +pub fn visit_expr_await_mut<V>(v: &mut V, node: &mut ExprAwait) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_expr_mut(&mut *node.base); + tokens_helper(v, &mut node.dot_token.spans); + tokens_helper(v, &mut node.await_token.span); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_expr_binary_mut<V>(v: &mut V, node: &mut ExprBinary) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_expr_mut(&mut *node.left); + v.visit_bin_op_mut(&mut node.op); + v.visit_expr_mut(&mut *node.right); +} +#[cfg(feature = "full")] +pub fn visit_expr_block_mut<V>(v: &mut V, node: &mut ExprBlock) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + if let Some(it) = &mut node.label { + v.visit_label_mut(it); + } + v.visit_block_mut(&mut node.block); +} +#[cfg(feature = "full")] +pub fn visit_expr_box_mut<V>(v: &mut V, node: &mut ExprBox) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + tokens_helper(v, &mut node.box_token.span); + v.visit_expr_mut(&mut *node.expr); +} +#[cfg(feature = "full")] +pub fn visit_expr_break_mut<V>(v: &mut V, node: &mut ExprBreak) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + tokens_helper(v, &mut node.break_token.span); + if let Some(it) = &mut node.label { + v.visit_lifetime_mut(it); + } + if let Some(it) = &mut node.expr { + v.visit_expr_mut(&mut **it); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_expr_call_mut<V>(v: &mut V, node: &mut ExprCall) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_expr_mut(&mut *node.func); + tokens_helper(v, &mut node.paren_token.span); + for el in Punctuated::pairs_mut(&mut node.args) { + let (it, p) = el.into_tuple(); + v.visit_expr_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_expr_cast_mut<V>(v: &mut V, node: &mut ExprCast) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_expr_mut(&mut *node.expr); + tokens_helper(v, &mut node.as_token.span); + v.visit_type_mut(&mut *node.ty); +} +#[cfg(feature = "full")] +pub fn visit_expr_closure_mut<V>(v: &mut V, node: &mut ExprClosure) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + if let Some(it) = &mut node.movability { + tokens_helper(v, &mut it.span); + } + if let Some(it) = &mut node.asyncness { + tokens_helper(v, &mut it.span); + } + if let Some(it) = &mut node.capture { + tokens_helper(v, &mut it.span); + } + tokens_helper(v, &mut node.or1_token.spans); + for el in Punctuated::pairs_mut(&mut node.inputs) { + let (it, p) = el.into_tuple(); + v.visit_pat_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } + tokens_helper(v, &mut node.or2_token.spans); + v.visit_return_type_mut(&mut node.output); + v.visit_expr_mut(&mut *node.body); +} +#[cfg(feature = "full")] +pub fn visit_expr_continue_mut<V>(v: &mut V, node: &mut ExprContinue) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + tokens_helper(v, &mut node.continue_token.span); + if let Some(it) = &mut node.label { + v.visit_lifetime_mut(it); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_expr_field_mut<V>(v: &mut V, node: &mut ExprField) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_expr_mut(&mut *node.base); + tokens_helper(v, &mut node.dot_token.spans); + v.visit_member_mut(&mut node.member); +} +#[cfg(feature = "full")] +pub fn visit_expr_for_loop_mut<V>(v: &mut V, node: &mut ExprForLoop) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + if let Some(it) = &mut node.label { + v.visit_label_mut(it); + } + tokens_helper(v, &mut node.for_token.span); + v.visit_pat_mut(&mut node.pat); + tokens_helper(v, &mut node.in_token.span); + v.visit_expr_mut(&mut *node.expr); + v.visit_block_mut(&mut node.body); +} +#[cfg(feature = "full")] +pub fn visit_expr_group_mut<V>(v: &mut V, node: &mut ExprGroup) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + tokens_helper(v, &mut node.group_token.span); + v.visit_expr_mut(&mut *node.expr); +} +#[cfg(feature = "full")] +pub fn visit_expr_if_mut<V>(v: &mut V, node: &mut ExprIf) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + tokens_helper(v, &mut node.if_token.span); + v.visit_expr_mut(&mut *node.cond); + v.visit_block_mut(&mut node.then_branch); + if let Some(it) = &mut node.else_branch { + tokens_helper(v, &mut (it).0.span); + v.visit_expr_mut(&mut *(it).1); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_expr_index_mut<V>(v: &mut V, node: &mut ExprIndex) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_expr_mut(&mut *node.expr); + tokens_helper(v, &mut node.bracket_token.span); + v.visit_expr_mut(&mut *node.index); +} +#[cfg(feature = "full")] +pub fn visit_expr_let_mut<V>(v: &mut V, node: &mut ExprLet) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + tokens_helper(v, &mut node.let_token.span); + v.visit_pat_mut(&mut node.pat); + tokens_helper(v, &mut node.eq_token.spans); + v.visit_expr_mut(&mut *node.expr); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_expr_lit_mut<V>(v: &mut V, node: &mut ExprLit) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_lit_mut(&mut node.lit); +} +#[cfg(feature = "full")] +pub fn visit_expr_loop_mut<V>(v: &mut V, node: &mut ExprLoop) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + if let Some(it) = &mut node.label { + v.visit_label_mut(it); + } + tokens_helper(v, &mut node.loop_token.span); + v.visit_block_mut(&mut node.body); +} +#[cfg(feature = "full")] +pub fn visit_expr_macro_mut<V>(v: &mut V, node: &mut ExprMacro) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_macro_mut(&mut node.mac); +} +#[cfg(feature = "full")] +pub fn visit_expr_match_mut<V>(v: &mut V, node: &mut ExprMatch) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + tokens_helper(v, &mut node.match_token.span); + v.visit_expr_mut(&mut *node.expr); + tokens_helper(v, &mut node.brace_token.span); + for it in &mut node.arms { + v.visit_arm_mut(it); + } +} +#[cfg(feature = "full")] +pub fn visit_expr_method_call_mut<V>(v: &mut V, node: &mut ExprMethodCall) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_expr_mut(&mut *node.receiver); + tokens_helper(v, &mut node.dot_token.spans); + v.visit_ident_mut(&mut node.method); + if let Some(it) = &mut node.turbofish { + v.visit_method_turbofish_mut(it); + } + tokens_helper(v, &mut node.paren_token.span); + for el in Punctuated::pairs_mut(&mut node.args) { + let (it, p) = el.into_tuple(); + v.visit_expr_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_expr_paren_mut<V>(v: &mut V, node: &mut ExprParen) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + tokens_helper(v, &mut node.paren_token.span); + v.visit_expr_mut(&mut *node.expr); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_expr_path_mut<V>(v: &mut V, node: &mut ExprPath) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + if let Some(it) = &mut node.qself { + v.visit_qself_mut(it); + } + v.visit_path_mut(&mut node.path); +} +#[cfg(feature = "full")] +pub fn visit_expr_range_mut<V>(v: &mut V, node: &mut ExprRange) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + if let Some(it) = &mut node.from { + v.visit_expr_mut(&mut **it); + } + v.visit_range_limits_mut(&mut node.limits); + if let Some(it) = &mut node.to { + v.visit_expr_mut(&mut **it); + } +} +#[cfg(feature = "full")] +pub fn visit_expr_reference_mut<V>(v: &mut V, node: &mut ExprReference) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + tokens_helper(v, &mut node.and_token.spans); + if let Some(it) = &mut node.mutability { + tokens_helper(v, &mut it.span); + } + v.visit_expr_mut(&mut *node.expr); +} +#[cfg(feature = "full")] +pub fn visit_expr_repeat_mut<V>(v: &mut V, node: &mut ExprRepeat) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + tokens_helper(v, &mut node.bracket_token.span); + v.visit_expr_mut(&mut *node.expr); + tokens_helper(v, &mut node.semi_token.spans); + v.visit_expr_mut(&mut *node.len); +} +#[cfg(feature = "full")] +pub fn visit_expr_return_mut<V>(v: &mut V, node: &mut ExprReturn) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + tokens_helper(v, &mut node.return_token.span); + if let Some(it) = &mut node.expr { + v.visit_expr_mut(&mut **it); + } +} +#[cfg(feature = "full")] +pub fn visit_expr_struct_mut<V>(v: &mut V, node: &mut ExprStruct) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_path_mut(&mut node.path); + tokens_helper(v, &mut node.brace_token.span); + for el in Punctuated::pairs_mut(&mut node.fields) { + let (it, p) = el.into_tuple(); + v.visit_field_value_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } + if let Some(it) = &mut node.dot2_token { + tokens_helper(v, &mut it.spans); + } + if let Some(it) = &mut node.rest { + v.visit_expr_mut(&mut **it); + } +} +#[cfg(feature = "full")] +pub fn visit_expr_try_mut<V>(v: &mut V, node: &mut ExprTry) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_expr_mut(&mut *node.expr); + tokens_helper(v, &mut node.question_token.spans); +} +#[cfg(feature = "full")] +pub fn visit_expr_try_block_mut<V>(v: &mut V, node: &mut ExprTryBlock) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + tokens_helper(v, &mut node.try_token.span); + v.visit_block_mut(&mut node.block); +} +#[cfg(feature = "full")] +pub fn visit_expr_tuple_mut<V>(v: &mut V, node: &mut ExprTuple) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + tokens_helper(v, &mut node.paren_token.span); + for el in Punctuated::pairs_mut(&mut node.elems) { + let (it, p) = el.into_tuple(); + v.visit_expr_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } +} +#[cfg(feature = "full")] +pub fn visit_expr_type_mut<V>(v: &mut V, node: &mut ExprType) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_expr_mut(&mut *node.expr); + tokens_helper(v, &mut node.colon_token.spans); + v.visit_type_mut(&mut *node.ty); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_expr_unary_mut<V>(v: &mut V, node: &mut ExprUnary) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_un_op_mut(&mut node.op); + v.visit_expr_mut(&mut *node.expr); +} +#[cfg(feature = "full")] +pub fn visit_expr_unsafe_mut<V>(v: &mut V, node: &mut ExprUnsafe) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + tokens_helper(v, &mut node.unsafe_token.span); + v.visit_block_mut(&mut node.block); +} +#[cfg(feature = "full")] +pub fn visit_expr_while_mut<V>(v: &mut V, node: &mut ExprWhile) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + if let Some(it) = &mut node.label { + v.visit_label_mut(it); + } + tokens_helper(v, &mut node.while_token.span); + v.visit_expr_mut(&mut *node.cond); + v.visit_block_mut(&mut node.body); +} +#[cfg(feature = "full")] +pub fn visit_expr_yield_mut<V>(v: &mut V, node: &mut ExprYield) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + tokens_helper(v, &mut node.yield_token.span); + if let Some(it) = &mut node.expr { + v.visit_expr_mut(&mut **it); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_field_mut<V>(v: &mut V, node: &mut Field) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_visibility_mut(&mut node.vis); + if let Some(it) = &mut node.ident { + v.visit_ident_mut(it); + } + if let Some(it) = &mut node.colon_token { + tokens_helper(v, &mut it.spans); + } + v.visit_type_mut(&mut node.ty); +} +#[cfg(feature = "full")] +pub fn visit_field_pat_mut<V>(v: &mut V, node: &mut FieldPat) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_member_mut(&mut node.member); + if let Some(it) = &mut node.colon_token { + tokens_helper(v, &mut it.spans); + } + v.visit_pat_mut(&mut *node.pat); +} +#[cfg(feature = "full")] +pub fn visit_field_value_mut<V>(v: &mut V, node: &mut FieldValue) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_member_mut(&mut node.member); + if let Some(it) = &mut node.colon_token { + tokens_helper(v, &mut it.spans); + } + v.visit_expr_mut(&mut node.expr); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_fields_mut<V>(v: &mut V, node: &mut Fields) +where + V: VisitMut + ?Sized, +{ + match node { + Fields::Named(_binding_0) => { + v.visit_fields_named_mut(_binding_0); + } + Fields::Unnamed(_binding_0) => { + v.visit_fields_unnamed_mut(_binding_0); + } + Fields::Unit => {} + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_fields_named_mut<V>(v: &mut V, node: &mut FieldsNamed) +where + V: VisitMut + ?Sized, +{ + tokens_helper(v, &mut node.brace_token.span); + for el in Punctuated::pairs_mut(&mut node.named) { + let (it, p) = el.into_tuple(); + v.visit_field_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_fields_unnamed_mut<V>(v: &mut V, node: &mut FieldsUnnamed) +where + V: VisitMut + ?Sized, +{ + tokens_helper(v, &mut node.paren_token.span); + for el in Punctuated::pairs_mut(&mut node.unnamed) { + let (it, p) = el.into_tuple(); + v.visit_field_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } +} +#[cfg(feature = "full")] +pub fn visit_file_mut<V>(v: &mut V, node: &mut File) +where + V: VisitMut + ?Sized, +{ + skip!(node.shebang); + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + for it in &mut node.items { + v.visit_item_mut(it); + } +} +#[cfg(feature = "full")] +pub fn visit_fn_arg_mut<V>(v: &mut V, node: &mut FnArg) +where + V: VisitMut + ?Sized, +{ + match node { + FnArg::Receiver(_binding_0) => { + v.visit_receiver_mut(_binding_0); + } + FnArg::Typed(_binding_0) => { + v.visit_pat_type_mut(_binding_0); + } + } +} +#[cfg(feature = "full")] +pub fn visit_foreign_item_mut<V>(v: &mut V, node: &mut ForeignItem) +where + V: VisitMut + ?Sized, +{ + match node { + ForeignItem::Fn(_binding_0) => { + v.visit_foreign_item_fn_mut(_binding_0); + } + ForeignItem::Static(_binding_0) => { + v.visit_foreign_item_static_mut(_binding_0); + } + ForeignItem::Type(_binding_0) => { + v.visit_foreign_item_type_mut(_binding_0); + } + ForeignItem::Macro(_binding_0) => { + v.visit_foreign_item_macro_mut(_binding_0); + } + ForeignItem::Verbatim(_binding_0) => { + skip!(_binding_0); + } + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } +} +#[cfg(feature = "full")] +pub fn visit_foreign_item_fn_mut<V>(v: &mut V, node: &mut ForeignItemFn) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_visibility_mut(&mut node.vis); + v.visit_signature_mut(&mut node.sig); + tokens_helper(v, &mut node.semi_token.spans); +} +#[cfg(feature = "full")] +pub fn visit_foreign_item_macro_mut<V>(v: &mut V, node: &mut ForeignItemMacro) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_macro_mut(&mut node.mac); + if let Some(it) = &mut node.semi_token { + tokens_helper(v, &mut it.spans); + } +} +#[cfg(feature = "full")] +pub fn visit_foreign_item_static_mut<V>(v: &mut V, node: &mut ForeignItemStatic) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_visibility_mut(&mut node.vis); + tokens_helper(v, &mut node.static_token.span); + if let Some(it) = &mut node.mutability { + tokens_helper(v, &mut it.span); + } + v.visit_ident_mut(&mut node.ident); + tokens_helper(v, &mut node.colon_token.spans); + v.visit_type_mut(&mut *node.ty); + tokens_helper(v, &mut node.semi_token.spans); +} +#[cfg(feature = "full")] +pub fn visit_foreign_item_type_mut<V>(v: &mut V, node: &mut ForeignItemType) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_visibility_mut(&mut node.vis); + tokens_helper(v, &mut node.type_token.span); + v.visit_ident_mut(&mut node.ident); + tokens_helper(v, &mut node.semi_token.spans); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_generic_argument_mut<V>(v: &mut V, node: &mut GenericArgument) +where + V: VisitMut + ?Sized, +{ + match node { + GenericArgument::Lifetime(_binding_0) => { + v.visit_lifetime_mut(_binding_0); + } + GenericArgument::Type(_binding_0) => { + v.visit_type_mut(_binding_0); + } + GenericArgument::Const(_binding_0) => { + v.visit_expr_mut(_binding_0); + } + GenericArgument::Binding(_binding_0) => { + v.visit_binding_mut(_binding_0); + } + GenericArgument::Constraint(_binding_0) => { + v.visit_constraint_mut(_binding_0); + } + } +} +#[cfg(feature = "full")] +pub fn visit_generic_method_argument_mut<V>(v: &mut V, node: &mut GenericMethodArgument) +where + V: VisitMut + ?Sized, +{ + match node { + GenericMethodArgument::Type(_binding_0) => { + v.visit_type_mut(_binding_0); + } + GenericMethodArgument::Const(_binding_0) => { + v.visit_expr_mut(_binding_0); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_generic_param_mut<V>(v: &mut V, node: &mut GenericParam) +where + V: VisitMut + ?Sized, +{ + match node { + GenericParam::Type(_binding_0) => { + v.visit_type_param_mut(_binding_0); + } + GenericParam::Lifetime(_binding_0) => { + v.visit_lifetime_def_mut(_binding_0); + } + GenericParam::Const(_binding_0) => { + v.visit_const_param_mut(_binding_0); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_generics_mut<V>(v: &mut V, node: &mut Generics) +where + V: VisitMut + ?Sized, +{ + if let Some(it) = &mut node.lt_token { + tokens_helper(v, &mut it.spans); + } + for el in Punctuated::pairs_mut(&mut node.params) { + let (it, p) = el.into_tuple(); + v.visit_generic_param_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } + if let Some(it) = &mut node.gt_token { + tokens_helper(v, &mut it.spans); + } + if let Some(it) = &mut node.where_clause { + v.visit_where_clause_mut(it); + } +} +pub fn visit_ident_mut<V>(v: &mut V, node: &mut Ident) +where + V: VisitMut + ?Sized, +{ + let mut span = node.span(); + v.visit_span_mut(&mut span); + node.set_span(span); +} +#[cfg(feature = "full")] +pub fn visit_impl_item_mut<V>(v: &mut V, node: &mut ImplItem) +where + V: VisitMut + ?Sized, +{ + match node { + ImplItem::Const(_binding_0) => { + v.visit_impl_item_const_mut(_binding_0); + } + ImplItem::Method(_binding_0) => { + v.visit_impl_item_method_mut(_binding_0); + } + ImplItem::Type(_binding_0) => { + v.visit_impl_item_type_mut(_binding_0); + } + ImplItem::Macro(_binding_0) => { + v.visit_impl_item_macro_mut(_binding_0); + } + ImplItem::Verbatim(_binding_0) => { + skip!(_binding_0); + } + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } +} +#[cfg(feature = "full")] +pub fn visit_impl_item_const_mut<V>(v: &mut V, node: &mut ImplItemConst) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_visibility_mut(&mut node.vis); + if let Some(it) = &mut node.defaultness { + tokens_helper(v, &mut it.span); + } + tokens_helper(v, &mut node.const_token.span); + v.visit_ident_mut(&mut node.ident); + tokens_helper(v, &mut node.colon_token.spans); + v.visit_type_mut(&mut node.ty); + tokens_helper(v, &mut node.eq_token.spans); + v.visit_expr_mut(&mut node.expr); + tokens_helper(v, &mut node.semi_token.spans); +} +#[cfg(feature = "full")] +pub fn visit_impl_item_macro_mut<V>(v: &mut V, node: &mut ImplItemMacro) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_macro_mut(&mut node.mac); + if let Some(it) = &mut node.semi_token { + tokens_helper(v, &mut it.spans); + } +} +#[cfg(feature = "full")] +pub fn visit_impl_item_method_mut<V>(v: &mut V, node: &mut ImplItemMethod) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_visibility_mut(&mut node.vis); + if let Some(it) = &mut node.defaultness { + tokens_helper(v, &mut it.span); + } + v.visit_signature_mut(&mut node.sig); + v.visit_block_mut(&mut node.block); +} +#[cfg(feature = "full")] +pub fn visit_impl_item_type_mut<V>(v: &mut V, node: &mut ImplItemType) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_visibility_mut(&mut node.vis); + if let Some(it) = &mut node.defaultness { + tokens_helper(v, &mut it.span); + } + tokens_helper(v, &mut node.type_token.span); + v.visit_ident_mut(&mut node.ident); + v.visit_generics_mut(&mut node.generics); + tokens_helper(v, &mut node.eq_token.spans); + v.visit_type_mut(&mut node.ty); + tokens_helper(v, &mut node.semi_token.spans); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_index_mut<V>(v: &mut V, node: &mut Index) +where + V: VisitMut + ?Sized, +{ + skip!(node.index); + v.visit_span_mut(&mut node.span); +} +#[cfg(feature = "full")] +pub fn visit_item_mut<V>(v: &mut V, node: &mut Item) +where + V: VisitMut + ?Sized, +{ + match node { + Item::Const(_binding_0) => { + v.visit_item_const_mut(_binding_0); + } + Item::Enum(_binding_0) => { + v.visit_item_enum_mut(_binding_0); + } + Item::ExternCrate(_binding_0) => { + v.visit_item_extern_crate_mut(_binding_0); + } + Item::Fn(_binding_0) => { + v.visit_item_fn_mut(_binding_0); + } + Item::ForeignMod(_binding_0) => { + v.visit_item_foreign_mod_mut(_binding_0); + } + Item::Impl(_binding_0) => { + v.visit_item_impl_mut(_binding_0); + } + Item::Macro(_binding_0) => { + v.visit_item_macro_mut(_binding_0); + } + Item::Macro2(_binding_0) => { + v.visit_item_macro2_mut(_binding_0); + } + Item::Mod(_binding_0) => { + v.visit_item_mod_mut(_binding_0); + } + Item::Static(_binding_0) => { + v.visit_item_static_mut(_binding_0); + } + Item::Struct(_binding_0) => { + v.visit_item_struct_mut(_binding_0); + } + Item::Trait(_binding_0) => { + v.visit_item_trait_mut(_binding_0); + } + Item::TraitAlias(_binding_0) => { + v.visit_item_trait_alias_mut(_binding_0); + } + Item::Type(_binding_0) => { + v.visit_item_type_mut(_binding_0); + } + Item::Union(_binding_0) => { + v.visit_item_union_mut(_binding_0); + } + Item::Use(_binding_0) => { + v.visit_item_use_mut(_binding_0); + } + Item::Verbatim(_binding_0) => { + skip!(_binding_0); + } + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } +} +#[cfg(feature = "full")] +pub fn visit_item_const_mut<V>(v: &mut V, node: &mut ItemConst) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_visibility_mut(&mut node.vis); + tokens_helper(v, &mut node.const_token.span); + v.visit_ident_mut(&mut node.ident); + tokens_helper(v, &mut node.colon_token.spans); + v.visit_type_mut(&mut *node.ty); + tokens_helper(v, &mut node.eq_token.spans); + v.visit_expr_mut(&mut *node.expr); + tokens_helper(v, &mut node.semi_token.spans); +} +#[cfg(feature = "full")] +pub fn visit_item_enum_mut<V>(v: &mut V, node: &mut ItemEnum) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_visibility_mut(&mut node.vis); + tokens_helper(v, &mut node.enum_token.span); + v.visit_ident_mut(&mut node.ident); + v.visit_generics_mut(&mut node.generics); + tokens_helper(v, &mut node.brace_token.span); + for el in Punctuated::pairs_mut(&mut node.variants) { + let (it, p) = el.into_tuple(); + v.visit_variant_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } +} +#[cfg(feature = "full")] +pub fn visit_item_extern_crate_mut<V>(v: &mut V, node: &mut ItemExternCrate) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_visibility_mut(&mut node.vis); + tokens_helper(v, &mut node.extern_token.span); + tokens_helper(v, &mut node.crate_token.span); + v.visit_ident_mut(&mut node.ident); + if let Some(it) = &mut node.rename { + tokens_helper(v, &mut (it).0.span); + v.visit_ident_mut(&mut (it).1); + } + tokens_helper(v, &mut node.semi_token.spans); +} +#[cfg(feature = "full")] +pub fn visit_item_fn_mut<V>(v: &mut V, node: &mut ItemFn) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_visibility_mut(&mut node.vis); + v.visit_signature_mut(&mut node.sig); + v.visit_block_mut(&mut *node.block); +} +#[cfg(feature = "full")] +pub fn visit_item_foreign_mod_mut<V>(v: &mut V, node: &mut ItemForeignMod) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_abi_mut(&mut node.abi); + tokens_helper(v, &mut node.brace_token.span); + for it in &mut node.items { + v.visit_foreign_item_mut(it); + } +} +#[cfg(feature = "full")] +pub fn visit_item_impl_mut<V>(v: &mut V, node: &mut ItemImpl) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + if let Some(it) = &mut node.defaultness { + tokens_helper(v, &mut it.span); + } + if let Some(it) = &mut node.unsafety { + tokens_helper(v, &mut it.span); + } + tokens_helper(v, &mut node.impl_token.span); + v.visit_generics_mut(&mut node.generics); + if let Some(it) = &mut node.trait_ { + if let Some(it) = &mut (it).0 { + tokens_helper(v, &mut it.spans); + } + v.visit_path_mut(&mut (it).1); + tokens_helper(v, &mut (it).2.span); + } + v.visit_type_mut(&mut *node.self_ty); + tokens_helper(v, &mut node.brace_token.span); + for it in &mut node.items { + v.visit_impl_item_mut(it); + } +} +#[cfg(feature = "full")] +pub fn visit_item_macro_mut<V>(v: &mut V, node: &mut ItemMacro) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + if let Some(it) = &mut node.ident { + v.visit_ident_mut(it); + } + v.visit_macro_mut(&mut node.mac); + if let Some(it) = &mut node.semi_token { + tokens_helper(v, &mut it.spans); + } +} +#[cfg(feature = "full")] +pub fn visit_item_macro2_mut<V>(v: &mut V, node: &mut ItemMacro2) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_visibility_mut(&mut node.vis); + tokens_helper(v, &mut node.macro_token.span); + v.visit_ident_mut(&mut node.ident); + skip!(node.rules); +} +#[cfg(feature = "full")] +pub fn visit_item_mod_mut<V>(v: &mut V, node: &mut ItemMod) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_visibility_mut(&mut node.vis); + tokens_helper(v, &mut node.mod_token.span); + v.visit_ident_mut(&mut node.ident); + if let Some(it) = &mut node.content { + tokens_helper(v, &mut (it).0.span); + for it in &mut (it).1 { + v.visit_item_mut(it); + } + } + if let Some(it) = &mut node.semi { + tokens_helper(v, &mut it.spans); + } +} +#[cfg(feature = "full")] +pub fn visit_item_static_mut<V>(v: &mut V, node: &mut ItemStatic) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_visibility_mut(&mut node.vis); + tokens_helper(v, &mut node.static_token.span); + if let Some(it) = &mut node.mutability { + tokens_helper(v, &mut it.span); + } + v.visit_ident_mut(&mut node.ident); + tokens_helper(v, &mut node.colon_token.spans); + v.visit_type_mut(&mut *node.ty); + tokens_helper(v, &mut node.eq_token.spans); + v.visit_expr_mut(&mut *node.expr); + tokens_helper(v, &mut node.semi_token.spans); +} +#[cfg(feature = "full")] +pub fn visit_item_struct_mut<V>(v: &mut V, node: &mut ItemStruct) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_visibility_mut(&mut node.vis); + tokens_helper(v, &mut node.struct_token.span); + v.visit_ident_mut(&mut node.ident); + v.visit_generics_mut(&mut node.generics); + v.visit_fields_mut(&mut node.fields); + if let Some(it) = &mut node.semi_token { + tokens_helper(v, &mut it.spans); + } +} +#[cfg(feature = "full")] +pub fn visit_item_trait_mut<V>(v: &mut V, node: &mut ItemTrait) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_visibility_mut(&mut node.vis); + if let Some(it) = &mut node.unsafety { + tokens_helper(v, &mut it.span); + } + if let Some(it) = &mut node.auto_token { + tokens_helper(v, &mut it.span); + } + tokens_helper(v, &mut node.trait_token.span); + v.visit_ident_mut(&mut node.ident); + v.visit_generics_mut(&mut node.generics); + if let Some(it) = &mut node.colon_token { + tokens_helper(v, &mut it.spans); + } + for el in Punctuated::pairs_mut(&mut node.supertraits) { + let (it, p) = el.into_tuple(); + v.visit_type_param_bound_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } + tokens_helper(v, &mut node.brace_token.span); + for it in &mut node.items { + v.visit_trait_item_mut(it); + } +} +#[cfg(feature = "full")] +pub fn visit_item_trait_alias_mut<V>(v: &mut V, node: &mut ItemTraitAlias) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_visibility_mut(&mut node.vis); + tokens_helper(v, &mut node.trait_token.span); + v.visit_ident_mut(&mut node.ident); + v.visit_generics_mut(&mut node.generics); + tokens_helper(v, &mut node.eq_token.spans); + for el in Punctuated::pairs_mut(&mut node.bounds) { + let (it, p) = el.into_tuple(); + v.visit_type_param_bound_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } + tokens_helper(v, &mut node.semi_token.spans); +} +#[cfg(feature = "full")] +pub fn visit_item_type_mut<V>(v: &mut V, node: &mut ItemType) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_visibility_mut(&mut node.vis); + tokens_helper(v, &mut node.type_token.span); + v.visit_ident_mut(&mut node.ident); + v.visit_generics_mut(&mut node.generics); + tokens_helper(v, &mut node.eq_token.spans); + v.visit_type_mut(&mut *node.ty); + tokens_helper(v, &mut node.semi_token.spans); +} +#[cfg(feature = "full")] +pub fn visit_item_union_mut<V>(v: &mut V, node: &mut ItemUnion) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_visibility_mut(&mut node.vis); + tokens_helper(v, &mut node.union_token.span); + v.visit_ident_mut(&mut node.ident); + v.visit_generics_mut(&mut node.generics); + v.visit_fields_named_mut(&mut node.fields); +} +#[cfg(feature = "full")] +pub fn visit_item_use_mut<V>(v: &mut V, node: &mut ItemUse) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_visibility_mut(&mut node.vis); + tokens_helper(v, &mut node.use_token.span); + if let Some(it) = &mut node.leading_colon { + tokens_helper(v, &mut it.spans); + } + v.visit_use_tree_mut(&mut node.tree); + tokens_helper(v, &mut node.semi_token.spans); +} +#[cfg(feature = "full")] +pub fn visit_label_mut<V>(v: &mut V, node: &mut Label) +where + V: VisitMut + ?Sized, +{ + v.visit_lifetime_mut(&mut node.name); + tokens_helper(v, &mut node.colon_token.spans); +} +pub fn visit_lifetime_mut<V>(v: &mut V, node: &mut Lifetime) +where + V: VisitMut + ?Sized, +{ + v.visit_span_mut(&mut node.apostrophe); + v.visit_ident_mut(&mut node.ident); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_lifetime_def_mut<V>(v: &mut V, node: &mut LifetimeDef) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_lifetime_mut(&mut node.lifetime); + if let Some(it) = &mut node.colon_token { + tokens_helper(v, &mut it.spans); + } + for el in Punctuated::pairs_mut(&mut node.bounds) { + let (it, p) = el.into_tuple(); + v.visit_lifetime_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } +} +pub fn visit_lit_mut<V>(v: &mut V, node: &mut Lit) +where + V: VisitMut + ?Sized, +{ + match node { + Lit::Str(_binding_0) => { + v.visit_lit_str_mut(_binding_0); + } + Lit::ByteStr(_binding_0) => { + v.visit_lit_byte_str_mut(_binding_0); + } + Lit::Byte(_binding_0) => { + v.visit_lit_byte_mut(_binding_0); + } + Lit::Char(_binding_0) => { + v.visit_lit_char_mut(_binding_0); + } + Lit::Int(_binding_0) => { + v.visit_lit_int_mut(_binding_0); + } + Lit::Float(_binding_0) => { + v.visit_lit_float_mut(_binding_0); + } + Lit::Bool(_binding_0) => { + v.visit_lit_bool_mut(_binding_0); + } + Lit::Verbatim(_binding_0) => { + skip!(_binding_0); + } + } +} +pub fn visit_lit_bool_mut<V>(v: &mut V, node: &mut LitBool) +where + V: VisitMut + ?Sized, +{ + skip!(node.value); + v.visit_span_mut(&mut node.span); +} +pub fn visit_lit_byte_mut<V>(v: &mut V, node: &mut LitByte) +where + V: VisitMut + ?Sized, +{} +pub fn visit_lit_byte_str_mut<V>(v: &mut V, node: &mut LitByteStr) +where + V: VisitMut + ?Sized, +{} +pub fn visit_lit_char_mut<V>(v: &mut V, node: &mut LitChar) +where + V: VisitMut + ?Sized, +{} +pub fn visit_lit_float_mut<V>(v: &mut V, node: &mut LitFloat) +where + V: VisitMut + ?Sized, +{} +pub fn visit_lit_int_mut<V>(v: &mut V, node: &mut LitInt) +where + V: VisitMut + ?Sized, +{} +pub fn visit_lit_str_mut<V>(v: &mut V, node: &mut LitStr) +where + V: VisitMut + ?Sized, +{} +#[cfg(feature = "full")] +pub fn visit_local_mut<V>(v: &mut V, node: &mut Local) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + tokens_helper(v, &mut node.let_token.span); + v.visit_pat_mut(&mut node.pat); + if let Some(it) = &mut node.init { + tokens_helper(v, &mut (it).0.spans); + v.visit_expr_mut(&mut *(it).1); + } + tokens_helper(v, &mut node.semi_token.spans); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_macro_mut<V>(v: &mut V, node: &mut Macro) +where + V: VisitMut + ?Sized, +{ + v.visit_path_mut(&mut node.path); + tokens_helper(v, &mut node.bang_token.spans); + v.visit_macro_delimiter_mut(&mut node.delimiter); + skip!(node.tokens); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_macro_delimiter_mut<V>(v: &mut V, node: &mut MacroDelimiter) +where + V: VisitMut + ?Sized, +{ + match node { + MacroDelimiter::Paren(_binding_0) => { + tokens_helper(v, &mut _binding_0.span); + } + MacroDelimiter::Brace(_binding_0) => { + tokens_helper(v, &mut _binding_0.span); + } + MacroDelimiter::Bracket(_binding_0) => { + tokens_helper(v, &mut _binding_0.span); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_member_mut<V>(v: &mut V, node: &mut Member) +where + V: VisitMut + ?Sized, +{ + match node { + Member::Named(_binding_0) => { + v.visit_ident_mut(_binding_0); + } + Member::Unnamed(_binding_0) => { + v.visit_index_mut(_binding_0); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_meta_mut<V>(v: &mut V, node: &mut Meta) +where + V: VisitMut + ?Sized, +{ + match node { + Meta::Path(_binding_0) => { + v.visit_path_mut(_binding_0); + } + Meta::List(_binding_0) => { + v.visit_meta_list_mut(_binding_0); + } + Meta::NameValue(_binding_0) => { + v.visit_meta_name_value_mut(_binding_0); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_meta_list_mut<V>(v: &mut V, node: &mut MetaList) +where + V: VisitMut + ?Sized, +{ + v.visit_path_mut(&mut node.path); + tokens_helper(v, &mut node.paren_token.span); + for el in Punctuated::pairs_mut(&mut node.nested) { + let (it, p) = el.into_tuple(); + v.visit_nested_meta_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_meta_name_value_mut<V>(v: &mut V, node: &mut MetaNameValue) +where + V: VisitMut + ?Sized, +{ + v.visit_path_mut(&mut node.path); + tokens_helper(v, &mut node.eq_token.spans); + v.visit_lit_mut(&mut node.lit); +} +#[cfg(feature = "full")] +pub fn visit_method_turbofish_mut<V>(v: &mut V, node: &mut MethodTurbofish) +where + V: VisitMut + ?Sized, +{ + tokens_helper(v, &mut node.colon2_token.spans); + tokens_helper(v, &mut node.lt_token.spans); + for el in Punctuated::pairs_mut(&mut node.args) { + let (it, p) = el.into_tuple(); + v.visit_generic_method_argument_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } + tokens_helper(v, &mut node.gt_token.spans); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_nested_meta_mut<V>(v: &mut V, node: &mut NestedMeta) +where + V: VisitMut + ?Sized, +{ + match node { + NestedMeta::Meta(_binding_0) => { + v.visit_meta_mut(_binding_0); + } + NestedMeta::Lit(_binding_0) => { + v.visit_lit_mut(_binding_0); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_parenthesized_generic_arguments_mut<V>( + v: &mut V, + node: &mut ParenthesizedGenericArguments, +) +where + V: VisitMut + ?Sized, +{ + tokens_helper(v, &mut node.paren_token.span); + for el in Punctuated::pairs_mut(&mut node.inputs) { + let (it, p) = el.into_tuple(); + v.visit_type_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } + v.visit_return_type_mut(&mut node.output); +} +#[cfg(feature = "full")] +pub fn visit_pat_mut<V>(v: &mut V, node: &mut Pat) +where + V: VisitMut + ?Sized, +{ + match node { + Pat::Box(_binding_0) => { + v.visit_pat_box_mut(_binding_0); + } + Pat::Ident(_binding_0) => { + v.visit_pat_ident_mut(_binding_0); + } + Pat::Lit(_binding_0) => { + v.visit_pat_lit_mut(_binding_0); + } + Pat::Macro(_binding_0) => { + v.visit_pat_macro_mut(_binding_0); + } + Pat::Or(_binding_0) => { + v.visit_pat_or_mut(_binding_0); + } + Pat::Path(_binding_0) => { + v.visit_pat_path_mut(_binding_0); + } + Pat::Range(_binding_0) => { + v.visit_pat_range_mut(_binding_0); + } + Pat::Reference(_binding_0) => { + v.visit_pat_reference_mut(_binding_0); + } + Pat::Rest(_binding_0) => { + v.visit_pat_rest_mut(_binding_0); + } + Pat::Slice(_binding_0) => { + v.visit_pat_slice_mut(_binding_0); + } + Pat::Struct(_binding_0) => { + v.visit_pat_struct_mut(_binding_0); + } + Pat::Tuple(_binding_0) => { + v.visit_pat_tuple_mut(_binding_0); + } + Pat::TupleStruct(_binding_0) => { + v.visit_pat_tuple_struct_mut(_binding_0); + } + Pat::Type(_binding_0) => { + v.visit_pat_type_mut(_binding_0); + } + Pat::Verbatim(_binding_0) => { + skip!(_binding_0); + } + Pat::Wild(_binding_0) => { + v.visit_pat_wild_mut(_binding_0); + } + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } +} +#[cfg(feature = "full")] +pub fn visit_pat_box_mut<V>(v: &mut V, node: &mut PatBox) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + tokens_helper(v, &mut node.box_token.span); + v.visit_pat_mut(&mut *node.pat); +} +#[cfg(feature = "full")] +pub fn visit_pat_ident_mut<V>(v: &mut V, node: &mut PatIdent) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + if let Some(it) = &mut node.by_ref { + tokens_helper(v, &mut it.span); + } + if let Some(it) = &mut node.mutability { + tokens_helper(v, &mut it.span); + } + v.visit_ident_mut(&mut node.ident); + if let Some(it) = &mut node.subpat { + tokens_helper(v, &mut (it).0.spans); + v.visit_pat_mut(&mut *(it).1); + } +} +#[cfg(feature = "full")] +pub fn visit_pat_lit_mut<V>(v: &mut V, node: &mut PatLit) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_expr_mut(&mut *node.expr); +} +#[cfg(feature = "full")] +pub fn visit_pat_macro_mut<V>(v: &mut V, node: &mut PatMacro) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_macro_mut(&mut node.mac); +} +#[cfg(feature = "full")] +pub fn visit_pat_or_mut<V>(v: &mut V, node: &mut PatOr) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + if let Some(it) = &mut node.leading_vert { + tokens_helper(v, &mut it.spans); + } + for el in Punctuated::pairs_mut(&mut node.cases) { + let (it, p) = el.into_tuple(); + v.visit_pat_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } +} +#[cfg(feature = "full")] +pub fn visit_pat_path_mut<V>(v: &mut V, node: &mut PatPath) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + if let Some(it) = &mut node.qself { + v.visit_qself_mut(it); + } + v.visit_path_mut(&mut node.path); +} +#[cfg(feature = "full")] +pub fn visit_pat_range_mut<V>(v: &mut V, node: &mut PatRange) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_expr_mut(&mut *node.lo); + v.visit_range_limits_mut(&mut node.limits); + v.visit_expr_mut(&mut *node.hi); +} +#[cfg(feature = "full")] +pub fn visit_pat_reference_mut<V>(v: &mut V, node: &mut PatReference) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + tokens_helper(v, &mut node.and_token.spans); + if let Some(it) = &mut node.mutability { + tokens_helper(v, &mut it.span); + } + v.visit_pat_mut(&mut *node.pat); +} +#[cfg(feature = "full")] +pub fn visit_pat_rest_mut<V>(v: &mut V, node: &mut PatRest) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + tokens_helper(v, &mut node.dot2_token.spans); +} +#[cfg(feature = "full")] +pub fn visit_pat_slice_mut<V>(v: &mut V, node: &mut PatSlice) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + tokens_helper(v, &mut node.bracket_token.span); + for el in Punctuated::pairs_mut(&mut node.elems) { + let (it, p) = el.into_tuple(); + v.visit_pat_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } +} +#[cfg(feature = "full")] +pub fn visit_pat_struct_mut<V>(v: &mut V, node: &mut PatStruct) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_path_mut(&mut node.path); + tokens_helper(v, &mut node.brace_token.span); + for el in Punctuated::pairs_mut(&mut node.fields) { + let (it, p) = el.into_tuple(); + v.visit_field_pat_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } + if let Some(it) = &mut node.dot2_token { + tokens_helper(v, &mut it.spans); + } +} +#[cfg(feature = "full")] +pub fn visit_pat_tuple_mut<V>(v: &mut V, node: &mut PatTuple) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + tokens_helper(v, &mut node.paren_token.span); + for el in Punctuated::pairs_mut(&mut node.elems) { + let (it, p) = el.into_tuple(); + v.visit_pat_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } +} +#[cfg(feature = "full")] +pub fn visit_pat_tuple_struct_mut<V>(v: &mut V, node: &mut PatTupleStruct) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_path_mut(&mut node.path); + v.visit_pat_tuple_mut(&mut node.pat); +} +#[cfg(feature = "full")] +pub fn visit_pat_type_mut<V>(v: &mut V, node: &mut PatType) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_pat_mut(&mut *node.pat); + tokens_helper(v, &mut node.colon_token.spans); + v.visit_type_mut(&mut *node.ty); +} +#[cfg(feature = "full")] +pub fn visit_pat_wild_mut<V>(v: &mut V, node: &mut PatWild) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + tokens_helper(v, &mut node.underscore_token.spans); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_path_mut<V>(v: &mut V, node: &mut Path) +where + V: VisitMut + ?Sized, +{ + if let Some(it) = &mut node.leading_colon { + tokens_helper(v, &mut it.spans); + } + for el in Punctuated::pairs_mut(&mut node.segments) { + let (it, p) = el.into_tuple(); + v.visit_path_segment_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_path_arguments_mut<V>(v: &mut V, node: &mut PathArguments) +where + V: VisitMut + ?Sized, +{ + match node { + PathArguments::None => {} + PathArguments::AngleBracketed(_binding_0) => { + v.visit_angle_bracketed_generic_arguments_mut(_binding_0); + } + PathArguments::Parenthesized(_binding_0) => { + v.visit_parenthesized_generic_arguments_mut(_binding_0); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_path_segment_mut<V>(v: &mut V, node: &mut PathSegment) +where + V: VisitMut + ?Sized, +{ + v.visit_ident_mut(&mut node.ident); + v.visit_path_arguments_mut(&mut node.arguments); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_predicate_eq_mut<V>(v: &mut V, node: &mut PredicateEq) +where + V: VisitMut + ?Sized, +{ + v.visit_type_mut(&mut node.lhs_ty); + tokens_helper(v, &mut node.eq_token.spans); + v.visit_type_mut(&mut node.rhs_ty); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_predicate_lifetime_mut<V>(v: &mut V, node: &mut PredicateLifetime) +where + V: VisitMut + ?Sized, +{ + v.visit_lifetime_mut(&mut node.lifetime); + tokens_helper(v, &mut node.colon_token.spans); + for el in Punctuated::pairs_mut(&mut node.bounds) { + let (it, p) = el.into_tuple(); + v.visit_lifetime_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_predicate_type_mut<V>(v: &mut V, node: &mut PredicateType) +where + V: VisitMut + ?Sized, +{ + if let Some(it) = &mut node.lifetimes { + v.visit_bound_lifetimes_mut(it); + } + v.visit_type_mut(&mut node.bounded_ty); + tokens_helper(v, &mut node.colon_token.spans); + for el in Punctuated::pairs_mut(&mut node.bounds) { + let (it, p) = el.into_tuple(); + v.visit_type_param_bound_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_qself_mut<V>(v: &mut V, node: &mut QSelf) +where + V: VisitMut + ?Sized, +{ + tokens_helper(v, &mut node.lt_token.spans); + v.visit_type_mut(&mut *node.ty); + skip!(node.position); + if let Some(it) = &mut node.as_token { + tokens_helper(v, &mut it.span); + } + tokens_helper(v, &mut node.gt_token.spans); +} +#[cfg(feature = "full")] +pub fn visit_range_limits_mut<V>(v: &mut V, node: &mut RangeLimits) +where + V: VisitMut + ?Sized, +{ + match node { + RangeLimits::HalfOpen(_binding_0) => { + tokens_helper(v, &mut _binding_0.spans); + } + RangeLimits::Closed(_binding_0) => { + tokens_helper(v, &mut _binding_0.spans); + } + } +} +#[cfg(feature = "full")] +pub fn visit_receiver_mut<V>(v: &mut V, node: &mut Receiver) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + if let Some(it) = &mut node.reference { + tokens_helper(v, &mut (it).0.spans); + if let Some(it) = &mut (it).1 { + v.visit_lifetime_mut(it); + } + } + if let Some(it) = &mut node.mutability { + tokens_helper(v, &mut it.span); + } + tokens_helper(v, &mut node.self_token.span); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_return_type_mut<V>(v: &mut V, node: &mut ReturnType) +where + V: VisitMut + ?Sized, +{ + match node { + ReturnType::Default => {} + ReturnType::Type(_binding_0, _binding_1) => { + tokens_helper(v, &mut _binding_0.spans); + v.visit_type_mut(&mut **_binding_1); + } + } +} +#[cfg(feature = "full")] +pub fn visit_signature_mut<V>(v: &mut V, node: &mut Signature) +where + V: VisitMut + ?Sized, +{ + if let Some(it) = &mut node.constness { + tokens_helper(v, &mut it.span); + } + if let Some(it) = &mut node.asyncness { + tokens_helper(v, &mut it.span); + } + if let Some(it) = &mut node.unsafety { + tokens_helper(v, &mut it.span); + } + if let Some(it) = &mut node.abi { + v.visit_abi_mut(it); + } + tokens_helper(v, &mut node.fn_token.span); + v.visit_ident_mut(&mut node.ident); + v.visit_generics_mut(&mut node.generics); + tokens_helper(v, &mut node.paren_token.span); + for el in Punctuated::pairs_mut(&mut node.inputs) { + let (it, p) = el.into_tuple(); + v.visit_fn_arg_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } + if let Some(it) = &mut node.variadic { + v.visit_variadic_mut(it); + } + v.visit_return_type_mut(&mut node.output); +} +pub fn visit_span_mut<V>(v: &mut V, node: &mut Span) +where + V: VisitMut + ?Sized, +{} +#[cfg(feature = "full")] +pub fn visit_stmt_mut<V>(v: &mut V, node: &mut Stmt) +where + V: VisitMut + ?Sized, +{ + match node { + Stmt::Local(_binding_0) => { + v.visit_local_mut(_binding_0); + } + Stmt::Item(_binding_0) => { + v.visit_item_mut(_binding_0); + } + Stmt::Expr(_binding_0) => { + v.visit_expr_mut(_binding_0); + } + Stmt::Semi(_binding_0, _binding_1) => { + v.visit_expr_mut(_binding_0); + tokens_helper(v, &mut _binding_1.spans); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_trait_bound_mut<V>(v: &mut V, node: &mut TraitBound) +where + V: VisitMut + ?Sized, +{ + if let Some(it) = &mut node.paren_token { + tokens_helper(v, &mut it.span); + } + v.visit_trait_bound_modifier_mut(&mut node.modifier); + if let Some(it) = &mut node.lifetimes { + v.visit_bound_lifetimes_mut(it); + } + v.visit_path_mut(&mut node.path); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_trait_bound_modifier_mut<V>(v: &mut V, node: &mut TraitBoundModifier) +where + V: VisitMut + ?Sized, +{ + match node { + TraitBoundModifier::None => {} + TraitBoundModifier::Maybe(_binding_0) => { + tokens_helper(v, &mut _binding_0.spans); + } + } +} +#[cfg(feature = "full")] +pub fn visit_trait_item_mut<V>(v: &mut V, node: &mut TraitItem) +where + V: VisitMut + ?Sized, +{ + match node { + TraitItem::Const(_binding_0) => { + v.visit_trait_item_const_mut(_binding_0); + } + TraitItem::Method(_binding_0) => { + v.visit_trait_item_method_mut(_binding_0); + } + TraitItem::Type(_binding_0) => { + v.visit_trait_item_type_mut(_binding_0); + } + TraitItem::Macro(_binding_0) => { + v.visit_trait_item_macro_mut(_binding_0); + } + TraitItem::Verbatim(_binding_0) => { + skip!(_binding_0); + } + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } +} +#[cfg(feature = "full")] +pub fn visit_trait_item_const_mut<V>(v: &mut V, node: &mut TraitItemConst) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + tokens_helper(v, &mut node.const_token.span); + v.visit_ident_mut(&mut node.ident); + tokens_helper(v, &mut node.colon_token.spans); + v.visit_type_mut(&mut node.ty); + if let Some(it) = &mut node.default { + tokens_helper(v, &mut (it).0.spans); + v.visit_expr_mut(&mut (it).1); + } + tokens_helper(v, &mut node.semi_token.spans); +} +#[cfg(feature = "full")] +pub fn visit_trait_item_macro_mut<V>(v: &mut V, node: &mut TraitItemMacro) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_macro_mut(&mut node.mac); + if let Some(it) = &mut node.semi_token { + tokens_helper(v, &mut it.spans); + } +} +#[cfg(feature = "full")] +pub fn visit_trait_item_method_mut<V>(v: &mut V, node: &mut TraitItemMethod) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_signature_mut(&mut node.sig); + if let Some(it) = &mut node.default { + v.visit_block_mut(it); + } + if let Some(it) = &mut node.semi_token { + tokens_helper(v, &mut it.spans); + } +} +#[cfg(feature = "full")] +pub fn visit_trait_item_type_mut<V>(v: &mut V, node: &mut TraitItemType) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + tokens_helper(v, &mut node.type_token.span); + v.visit_ident_mut(&mut node.ident); + v.visit_generics_mut(&mut node.generics); + if let Some(it) = &mut node.colon_token { + tokens_helper(v, &mut it.spans); + } + for el in Punctuated::pairs_mut(&mut node.bounds) { + let (it, p) = el.into_tuple(); + v.visit_type_param_bound_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } + if let Some(it) = &mut node.default { + tokens_helper(v, &mut (it).0.spans); + v.visit_type_mut(&mut (it).1); + } + tokens_helper(v, &mut node.semi_token.spans); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_type_mut<V>(v: &mut V, node: &mut Type) +where + V: VisitMut + ?Sized, +{ + match node { + Type::Array(_binding_0) => { + v.visit_type_array_mut(_binding_0); + } + Type::BareFn(_binding_0) => { + v.visit_type_bare_fn_mut(_binding_0); + } + Type::Group(_binding_0) => { + v.visit_type_group_mut(_binding_0); + } + Type::ImplTrait(_binding_0) => { + v.visit_type_impl_trait_mut(_binding_0); + } + Type::Infer(_binding_0) => { + v.visit_type_infer_mut(_binding_0); + } + Type::Macro(_binding_0) => { + v.visit_type_macro_mut(_binding_0); + } + Type::Never(_binding_0) => { + v.visit_type_never_mut(_binding_0); + } + Type::Paren(_binding_0) => { + v.visit_type_paren_mut(_binding_0); + } + Type::Path(_binding_0) => { + v.visit_type_path_mut(_binding_0); + } + Type::Ptr(_binding_0) => { + v.visit_type_ptr_mut(_binding_0); + } + Type::Reference(_binding_0) => { + v.visit_type_reference_mut(_binding_0); + } + Type::Slice(_binding_0) => { + v.visit_type_slice_mut(_binding_0); + } + Type::TraitObject(_binding_0) => { + v.visit_type_trait_object_mut(_binding_0); + } + Type::Tuple(_binding_0) => { + v.visit_type_tuple_mut(_binding_0); + } + Type::Verbatim(_binding_0) => { + skip!(_binding_0); + } + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_type_array_mut<V>(v: &mut V, node: &mut TypeArray) +where + V: VisitMut + ?Sized, +{ + tokens_helper(v, &mut node.bracket_token.span); + v.visit_type_mut(&mut *node.elem); + tokens_helper(v, &mut node.semi_token.spans); + v.visit_expr_mut(&mut node.len); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_type_bare_fn_mut<V>(v: &mut V, node: &mut TypeBareFn) +where + V: VisitMut + ?Sized, +{ + if let Some(it) = &mut node.lifetimes { + v.visit_bound_lifetimes_mut(it); + } + if let Some(it) = &mut node.unsafety { + tokens_helper(v, &mut it.span); + } + if let Some(it) = &mut node.abi { + v.visit_abi_mut(it); + } + tokens_helper(v, &mut node.fn_token.span); + tokens_helper(v, &mut node.paren_token.span); + for el in Punctuated::pairs_mut(&mut node.inputs) { + let (it, p) = el.into_tuple(); + v.visit_bare_fn_arg_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } + if let Some(it) = &mut node.variadic { + v.visit_variadic_mut(it); + } + v.visit_return_type_mut(&mut node.output); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_type_group_mut<V>(v: &mut V, node: &mut TypeGroup) +where + V: VisitMut + ?Sized, +{ + tokens_helper(v, &mut node.group_token.span); + v.visit_type_mut(&mut *node.elem); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_type_impl_trait_mut<V>(v: &mut V, node: &mut TypeImplTrait) +where + V: VisitMut + ?Sized, +{ + tokens_helper(v, &mut node.impl_token.span); + for el in Punctuated::pairs_mut(&mut node.bounds) { + let (it, p) = el.into_tuple(); + v.visit_type_param_bound_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_type_infer_mut<V>(v: &mut V, node: &mut TypeInfer) +where + V: VisitMut + ?Sized, +{ + tokens_helper(v, &mut node.underscore_token.spans); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_type_macro_mut<V>(v: &mut V, node: &mut TypeMacro) +where + V: VisitMut + ?Sized, +{ + v.visit_macro_mut(&mut node.mac); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_type_never_mut<V>(v: &mut V, node: &mut TypeNever) +where + V: VisitMut + ?Sized, +{ + tokens_helper(v, &mut node.bang_token.spans); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_type_param_mut<V>(v: &mut V, node: &mut TypeParam) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_ident_mut(&mut node.ident); + if let Some(it) = &mut node.colon_token { + tokens_helper(v, &mut it.spans); + } + for el in Punctuated::pairs_mut(&mut node.bounds) { + let (it, p) = el.into_tuple(); + v.visit_type_param_bound_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } + if let Some(it) = &mut node.eq_token { + tokens_helper(v, &mut it.spans); + } + if let Some(it) = &mut node.default { + v.visit_type_mut(it); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_type_param_bound_mut<V>(v: &mut V, node: &mut TypeParamBound) +where + V: VisitMut + ?Sized, +{ + match node { + TypeParamBound::Trait(_binding_0) => { + v.visit_trait_bound_mut(_binding_0); + } + TypeParamBound::Lifetime(_binding_0) => { + v.visit_lifetime_mut(_binding_0); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_type_paren_mut<V>(v: &mut V, node: &mut TypeParen) +where + V: VisitMut + ?Sized, +{ + tokens_helper(v, &mut node.paren_token.span); + v.visit_type_mut(&mut *node.elem); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_type_path_mut<V>(v: &mut V, node: &mut TypePath) +where + V: VisitMut + ?Sized, +{ + if let Some(it) = &mut node.qself { + v.visit_qself_mut(it); + } + v.visit_path_mut(&mut node.path); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_type_ptr_mut<V>(v: &mut V, node: &mut TypePtr) +where + V: VisitMut + ?Sized, +{ + tokens_helper(v, &mut node.star_token.spans); + if let Some(it) = &mut node.const_token { + tokens_helper(v, &mut it.span); + } + if let Some(it) = &mut node.mutability { + tokens_helper(v, &mut it.span); + } + v.visit_type_mut(&mut *node.elem); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_type_reference_mut<V>(v: &mut V, node: &mut TypeReference) +where + V: VisitMut + ?Sized, +{ + tokens_helper(v, &mut node.and_token.spans); + if let Some(it) = &mut node.lifetime { + v.visit_lifetime_mut(it); + } + if let Some(it) = &mut node.mutability { + tokens_helper(v, &mut it.span); + } + v.visit_type_mut(&mut *node.elem); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_type_slice_mut<V>(v: &mut V, node: &mut TypeSlice) +where + V: VisitMut + ?Sized, +{ + tokens_helper(v, &mut node.bracket_token.span); + v.visit_type_mut(&mut *node.elem); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_type_trait_object_mut<V>(v: &mut V, node: &mut TypeTraitObject) +where + V: VisitMut + ?Sized, +{ + if let Some(it) = &mut node.dyn_token { + tokens_helper(v, &mut it.span); + } + for el in Punctuated::pairs_mut(&mut node.bounds) { + let (it, p) = el.into_tuple(); + v.visit_type_param_bound_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_type_tuple_mut<V>(v: &mut V, node: &mut TypeTuple) +where + V: VisitMut + ?Sized, +{ + tokens_helper(v, &mut node.paren_token.span); + for el in Punctuated::pairs_mut(&mut node.elems) { + let (it, p) = el.into_tuple(); + v.visit_type_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_un_op_mut<V>(v: &mut V, node: &mut UnOp) +where + V: VisitMut + ?Sized, +{ + match node { + UnOp::Deref(_binding_0) => { + tokens_helper(v, &mut _binding_0.spans); + } + UnOp::Not(_binding_0) => { + tokens_helper(v, &mut _binding_0.spans); + } + UnOp::Neg(_binding_0) => { + tokens_helper(v, &mut _binding_0.spans); + } + } +} +#[cfg(feature = "full")] +pub fn visit_use_glob_mut<V>(v: &mut V, node: &mut UseGlob) +where + V: VisitMut + ?Sized, +{ + tokens_helper(v, &mut node.star_token.spans); +} +#[cfg(feature = "full")] +pub fn visit_use_group_mut<V>(v: &mut V, node: &mut UseGroup) +where + V: VisitMut + ?Sized, +{ + tokens_helper(v, &mut node.brace_token.span); + for el in Punctuated::pairs_mut(&mut node.items) { + let (it, p) = el.into_tuple(); + v.visit_use_tree_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } +} +#[cfg(feature = "full")] +pub fn visit_use_name_mut<V>(v: &mut V, node: &mut UseName) +where + V: VisitMut + ?Sized, +{ + v.visit_ident_mut(&mut node.ident); +} +#[cfg(feature = "full")] +pub fn visit_use_path_mut<V>(v: &mut V, node: &mut UsePath) +where + V: VisitMut + ?Sized, +{ + v.visit_ident_mut(&mut node.ident); + tokens_helper(v, &mut node.colon2_token.spans); + v.visit_use_tree_mut(&mut *node.tree); +} +#[cfg(feature = "full")] +pub fn visit_use_rename_mut<V>(v: &mut V, node: &mut UseRename) +where + V: VisitMut + ?Sized, +{ + v.visit_ident_mut(&mut node.ident); + tokens_helper(v, &mut node.as_token.span); + v.visit_ident_mut(&mut node.rename); +} +#[cfg(feature = "full")] +pub fn visit_use_tree_mut<V>(v: &mut V, node: &mut UseTree) +where + V: VisitMut + ?Sized, +{ + match node { + UseTree::Path(_binding_0) => { + v.visit_use_path_mut(_binding_0); + } + UseTree::Name(_binding_0) => { + v.visit_use_name_mut(_binding_0); + } + UseTree::Rename(_binding_0) => { + v.visit_use_rename_mut(_binding_0); + } + UseTree::Glob(_binding_0) => { + v.visit_use_glob_mut(_binding_0); + } + UseTree::Group(_binding_0) => { + v.visit_use_group_mut(_binding_0); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_variadic_mut<V>(v: &mut V, node: &mut Variadic) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + tokens_helper(v, &mut node.dots.spans); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_variant_mut<V>(v: &mut V, node: &mut Variant) +where + V: VisitMut + ?Sized, +{ + for it in &mut node.attrs { + v.visit_attribute_mut(it); + } + v.visit_ident_mut(&mut node.ident); + v.visit_fields_mut(&mut node.fields); + if let Some(it) = &mut node.discriminant { + tokens_helper(v, &mut (it).0.spans); + v.visit_expr_mut(&mut (it).1); + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_vis_crate_mut<V>(v: &mut V, node: &mut VisCrate) +where + V: VisitMut + ?Sized, +{ + tokens_helper(v, &mut node.crate_token.span); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_vis_public_mut<V>(v: &mut V, node: &mut VisPublic) +where + V: VisitMut + ?Sized, +{ + tokens_helper(v, &mut node.pub_token.span); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_vis_restricted_mut<V>(v: &mut V, node: &mut VisRestricted) +where + V: VisitMut + ?Sized, +{ + tokens_helper(v, &mut node.pub_token.span); + tokens_helper(v, &mut node.paren_token.span); + if let Some(it) = &mut node.in_token { + tokens_helper(v, &mut it.span); + } + v.visit_path_mut(&mut *node.path); +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_visibility_mut<V>(v: &mut V, node: &mut Visibility) +where + V: VisitMut + ?Sized, +{ + match node { + Visibility::Public(_binding_0) => { + v.visit_vis_public_mut(_binding_0); + } + Visibility::Crate(_binding_0) => { + v.visit_vis_crate_mut(_binding_0); + } + Visibility::Restricted(_binding_0) => { + v.visit_vis_restricted_mut(_binding_0); + } + Visibility::Inherited => {} + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_where_clause_mut<V>(v: &mut V, node: &mut WhereClause) +where + V: VisitMut + ?Sized, +{ + tokens_helper(v, &mut node.where_token.span); + for el in Punctuated::pairs_mut(&mut node.predicates) { + let (it, p) = el.into_tuple(); + v.visit_where_predicate_mut(it); + if let Some(p) = p { + tokens_helper(v, &mut p.spans); + } + } +} +#[cfg(any(feature = "derive", feature = "full"))] +pub fn visit_where_predicate_mut<V>(v: &mut V, node: &mut WherePredicate) +where + V: VisitMut + ?Sized, +{ + match node { + WherePredicate::Type(_binding_0) => { + v.visit_predicate_type_mut(_binding_0); + } + WherePredicate::Lifetime(_binding_0) => { + v.visit_predicate_lifetime_mut(_binding_0); + } + WherePredicate::Eq(_binding_0) => { + v.visit_predicate_eq_mut(_binding_0); + } + } +} diff --git a/third_party/rust/syn/src/gen_helper.rs b/third_party/rust/syn/src/gen_helper.rs new file mode 100644 index 0000000000..b2796126a3 --- /dev/null +++ b/third_party/rust/syn/src/gen_helper.rs @@ -0,0 +1,154 @@ +#[cfg(feature = "fold")] +pub mod fold { + use crate::fold::Fold; + use crate::punctuated::{Pair, Punctuated}; + use proc_macro2::Span; + + pub trait FoldHelper { + type Item; + fn lift<F>(self, f: F) -> Self + where + F: FnMut(Self::Item) -> Self::Item; + } + + impl<T> FoldHelper for Vec<T> { + type Item = T; + fn lift<F>(self, f: F) -> Self + where + F: FnMut(Self::Item) -> Self::Item, + { + self.into_iter().map(f).collect() + } + } + + impl<T, U> FoldHelper for Punctuated<T, U> { + type Item = T; + fn lift<F>(self, mut f: F) -> Self + where + F: FnMut(Self::Item) -> Self::Item, + { + self.into_pairs() + .map(Pair::into_tuple) + .map(|(t, u)| Pair::new(f(t), u)) + .collect() + } + } + + pub fn tokens_helper<F: Fold + ?Sized, S: Spans>(folder: &mut F, spans: &S) -> S { + spans.fold(folder) + } + + pub trait Spans { + fn fold<F: Fold + ?Sized>(&self, folder: &mut F) -> Self; + } + + impl Spans for Span { + fn fold<F: Fold + ?Sized>(&self, folder: &mut F) -> Self { + folder.fold_span(*self) + } + } + + impl Spans for [Span; 1] { + fn fold<F: Fold + ?Sized>(&self, folder: &mut F) -> Self { + [folder.fold_span(self[0])] + } + } + + impl Spans for [Span; 2] { + fn fold<F: Fold + ?Sized>(&self, folder: &mut F) -> Self { + [folder.fold_span(self[0]), folder.fold_span(self[1])] + } + } + + impl Spans for [Span; 3] { + fn fold<F: Fold + ?Sized>(&self, folder: &mut F) -> Self { + [ + folder.fold_span(self[0]), + folder.fold_span(self[1]), + folder.fold_span(self[2]), + ] + } + } +} + +#[cfg(feature = "visit")] +pub mod visit { + use crate::visit::Visit; + use proc_macro2::Span; + + pub fn tokens_helper<'ast, V: Visit<'ast> + ?Sized, S: Spans>(visitor: &mut V, spans: &S) { + spans.visit(visitor); + } + + pub trait Spans { + fn visit<'ast, V: Visit<'ast> + ?Sized>(&self, visitor: &mut V); + } + + impl Spans for Span { + fn visit<'ast, V: Visit<'ast> + ?Sized>(&self, visitor: &mut V) { + visitor.visit_span(self); + } + } + + impl Spans for [Span; 1] { + fn visit<'ast, V: Visit<'ast> + ?Sized>(&self, visitor: &mut V) { + visitor.visit_span(&self[0]); + } + } + + impl Spans for [Span; 2] { + fn visit<'ast, V: Visit<'ast> + ?Sized>(&self, visitor: &mut V) { + visitor.visit_span(&self[0]); + visitor.visit_span(&self[1]); + } + } + + impl Spans for [Span; 3] { + fn visit<'ast, V: Visit<'ast> + ?Sized>(&self, visitor: &mut V) { + visitor.visit_span(&self[0]); + visitor.visit_span(&self[1]); + visitor.visit_span(&self[2]); + } + } +} + +#[cfg(feature = "visit-mut")] +pub mod visit_mut { + use crate::visit_mut::VisitMut; + use proc_macro2::Span; + + pub fn tokens_helper<V: VisitMut + ?Sized, S: Spans>(visitor: &mut V, spans: &mut S) { + spans.visit_mut(visitor); + } + + pub trait Spans { + fn visit_mut<V: VisitMut + ?Sized>(&mut self, visitor: &mut V); + } + + impl Spans for Span { + fn visit_mut<V: VisitMut + ?Sized>(&mut self, visitor: &mut V) { + visitor.visit_span_mut(self); + } + } + + impl Spans for [Span; 1] { + fn visit_mut<V: VisitMut + ?Sized>(&mut self, visitor: &mut V) { + visitor.visit_span_mut(&mut self[0]); + } + } + + impl Spans for [Span; 2] { + fn visit_mut<V: VisitMut + ?Sized>(&mut self, visitor: &mut V) { + visitor.visit_span_mut(&mut self[0]); + visitor.visit_span_mut(&mut self[1]); + } + } + + impl Spans for [Span; 3] { + fn visit_mut<V: VisitMut + ?Sized>(&mut self, visitor: &mut V) { + visitor.visit_span_mut(&mut self[0]); + visitor.visit_span_mut(&mut self[1]); + visitor.visit_span_mut(&mut self[2]); + } + } +} diff --git a/third_party/rust/syn/src/generics.rs b/third_party/rust/syn/src/generics.rs new file mode 100644 index 0000000000..6d4fe847ed --- /dev/null +++ b/third_party/rust/syn/src/generics.rs @@ -0,0 +1,1362 @@ +use super::*; +use crate::punctuated::{Iter, IterMut, Punctuated}; +#[cfg(all(feature = "printing", feature = "extra-traits"))] +use std::fmt::{self, Debug}; +#[cfg(all(feature = "printing", feature = "extra-traits"))] +use std::hash::{Hash, Hasher}; + +ast_struct! { + /// Lifetimes and type parameters attached to a declaration of a function, + /// enum, trait, etc. + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct Generics { + pub lt_token: Option<Token![<]>, + pub params: Punctuated<GenericParam, Token![,]>, + pub gt_token: Option<Token![>]>, + pub where_clause: Option<WhereClause>, + } +} + +ast_enum_of_structs! { + /// A generic type parameter, lifetime, or const generic: `T: Into<String>`, + /// `'a: 'b`, `const LEN: usize`. + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + /// + /// # Syntax tree enum + /// + /// This type is a [syntax tree enum]. + /// + /// [syntax tree enum]: Expr#syntax-tree-enums + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub enum GenericParam { + /// A generic type parameter: `T: Into<String>`. + Type(TypeParam), + + /// A lifetime definition: `'a: 'b + 'c + 'd`. + Lifetime(LifetimeDef), + + /// A const generic parameter: `const LENGTH: usize`. + Const(ConstParam), + } +} + +ast_struct! { + /// A generic type parameter: `T: Into<String>`. + /// + /// *This type is available only if Syn is built with the `"derive"` or + /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct TypeParam { + pub attrs: Vec<Attribute>, + pub ident: Ident, + pub colon_token: Option<Token![:]>, + pub bounds: Punctuated<TypeParamBound, Token![+]>, + pub eq_token: Option<Token![=]>, + pub default: Option<Type>, + } +} + +ast_struct! { + /// A lifetime definition: `'a: 'b + 'c + 'd`. + /// + /// *This type is available only if Syn is built with the `"derive"` or + /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct LifetimeDef { + pub attrs: Vec<Attribute>, + pub lifetime: Lifetime, + pub colon_token: Option<Token![:]>, + pub bounds: Punctuated<Lifetime, Token![+]>, + } +} + +ast_struct! { + /// A const generic parameter: `const LENGTH: usize`. + /// + /// *This type is available only if Syn is built with the `"derive"` or + /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct ConstParam { + pub attrs: Vec<Attribute>, + pub const_token: Token![const], + pub ident: Ident, + pub colon_token: Token![:], + pub ty: Type, + pub eq_token: Option<Token![=]>, + pub default: Option<Expr>, + } +} + +impl Default for Generics { + fn default() -> Self { + Generics { + lt_token: None, + params: Punctuated::new(), + gt_token: None, + where_clause: None, + } + } +} + +impl Generics { + /// Returns an + /// <code + /// style="padding-right:0;">Iterator<Item = &</code><a + /// href="struct.TypeParam.html"><code + /// style="padding-left:0;padding-right:0;">TypeParam</code></a><code + /// style="padding-left:0;">></code> + /// over the type parameters in `self.params`. + pub fn type_params(&self) -> TypeParams { + TypeParams(self.params.iter()) + } + + /// Returns an + /// <code + /// style="padding-right:0;">Iterator<Item = &mut </code><a + /// href="struct.TypeParam.html"><code + /// style="padding-left:0;padding-right:0;">TypeParam</code></a><code + /// style="padding-left:0;">></code> + /// over the type parameters in `self.params`. + pub fn type_params_mut(&mut self) -> TypeParamsMut { + TypeParamsMut(self.params.iter_mut()) + } + + /// Returns an + /// <code + /// style="padding-right:0;">Iterator<Item = &</code><a + /// href="struct.LifetimeDef.html"><code + /// style="padding-left:0;padding-right:0;">LifetimeDef</code></a><code + /// style="padding-left:0;">></code> + /// over the lifetime parameters in `self.params`. + pub fn lifetimes(&self) -> Lifetimes { + Lifetimes(self.params.iter()) + } + + /// Returns an + /// <code + /// style="padding-right:0;">Iterator<Item = &mut </code><a + /// href="struct.LifetimeDef.html"><code + /// style="padding-left:0;padding-right:0;">LifetimeDef</code></a><code + /// style="padding-left:0;">></code> + /// over the lifetime parameters in `self.params`. + pub fn lifetimes_mut(&mut self) -> LifetimesMut { + LifetimesMut(self.params.iter_mut()) + } + + /// Returns an + /// <code + /// style="padding-right:0;">Iterator<Item = &</code><a + /// href="struct.ConstParam.html"><code + /// style="padding-left:0;padding-right:0;">ConstParam</code></a><code + /// style="padding-left:0;">></code> + /// over the constant parameters in `self.params`. + pub fn const_params(&self) -> ConstParams { + ConstParams(self.params.iter()) + } + + /// Returns an + /// <code + /// style="padding-right:0;">Iterator<Item = &mut </code><a + /// href="struct.ConstParam.html"><code + /// style="padding-left:0;padding-right:0;">ConstParam</code></a><code + /// style="padding-left:0;">></code> + /// over the constant parameters in `self.params`. + pub fn const_params_mut(&mut self) -> ConstParamsMut { + ConstParamsMut(self.params.iter_mut()) + } + + /// Initializes an empty `where`-clause if there is not one present already. + pub fn make_where_clause(&mut self) -> &mut WhereClause { + self.where_clause.get_or_insert_with(|| WhereClause { + where_token: <Token![where]>::default(), + predicates: Punctuated::new(), + }) + } +} + +pub struct TypeParams<'a>(Iter<'a, GenericParam>); + +impl<'a> Iterator for TypeParams<'a> { + type Item = &'a TypeParam; + + fn next(&mut self) -> Option<Self::Item> { + let next = match self.0.next() { + Some(item) => item, + None => return None, + }; + if let GenericParam::Type(type_param) = next { + Some(type_param) + } else { + self.next() + } + } +} + +pub struct TypeParamsMut<'a>(IterMut<'a, GenericParam>); + +impl<'a> Iterator for TypeParamsMut<'a> { + type Item = &'a mut TypeParam; + + fn next(&mut self) -> Option<Self::Item> { + let next = match self.0.next() { + Some(item) => item, + None => return None, + }; + if let GenericParam::Type(type_param) = next { + Some(type_param) + } else { + self.next() + } + } +} + +pub struct Lifetimes<'a>(Iter<'a, GenericParam>); + +impl<'a> Iterator for Lifetimes<'a> { + type Item = &'a LifetimeDef; + + fn next(&mut self) -> Option<Self::Item> { + let next = match self.0.next() { + Some(item) => item, + None => return None, + }; + if let GenericParam::Lifetime(lifetime) = next { + Some(lifetime) + } else { + self.next() + } + } +} + +pub struct LifetimesMut<'a>(IterMut<'a, GenericParam>); + +impl<'a> Iterator for LifetimesMut<'a> { + type Item = &'a mut LifetimeDef; + + fn next(&mut self) -> Option<Self::Item> { + let next = match self.0.next() { + Some(item) => item, + None => return None, + }; + if let GenericParam::Lifetime(lifetime) = next { + Some(lifetime) + } else { + self.next() + } + } +} + +pub struct ConstParams<'a>(Iter<'a, GenericParam>); + +impl<'a> Iterator for ConstParams<'a> { + type Item = &'a ConstParam; + + fn next(&mut self) -> Option<Self::Item> { + let next = match self.0.next() { + Some(item) => item, + None => return None, + }; + if let GenericParam::Const(const_param) = next { + Some(const_param) + } else { + self.next() + } + } +} + +pub struct ConstParamsMut<'a>(IterMut<'a, GenericParam>); + +impl<'a> Iterator for ConstParamsMut<'a> { + type Item = &'a mut ConstParam; + + fn next(&mut self) -> Option<Self::Item> { + let next = match self.0.next() { + Some(item) => item, + None => return None, + }; + if let GenericParam::Const(const_param) = next { + Some(const_param) + } else { + self.next() + } + } +} + +/// Returned by `Generics::split_for_impl`. +/// +/// *This type is available only if Syn is built with the `"derive"` or `"full"` +/// feature and the `"printing"` feature.* +#[cfg(feature = "printing")] +#[cfg_attr( + doc_cfg, + doc(cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))) +)] +pub struct ImplGenerics<'a>(&'a Generics); + +/// Returned by `Generics::split_for_impl`. +/// +/// *This type is available only if Syn is built with the `"derive"` or `"full"` +/// feature and the `"printing"` feature.* +#[cfg(feature = "printing")] +#[cfg_attr( + doc_cfg, + doc(cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))) +)] +pub struct TypeGenerics<'a>(&'a Generics); + +/// Returned by `TypeGenerics::as_turbofish`. +/// +/// *This type is available only if Syn is built with the `"derive"` or `"full"` +/// feature and the `"printing"` feature.* +#[cfg(feature = "printing")] +#[cfg_attr( + doc_cfg, + doc(cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))) +)] +pub struct Turbofish<'a>(&'a Generics); + +#[cfg(feature = "printing")] +impl Generics { + /// Split a type's generics into the pieces required for impl'ing a trait + /// for that type. + /// + /// ``` + /// # use proc_macro2::{Span, Ident}; + /// # use quote::quote; + /// # + /// # let generics: syn::Generics = Default::default(); + /// # let name = Ident::new("MyType", Span::call_site()); + /// # + /// let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); + /// quote! { + /// impl #impl_generics MyTrait for #name #ty_generics #where_clause { + /// // ... + /// } + /// } + /// # ; + /// ``` + /// + /// *This method is available only if Syn is built with the `"derive"` or + /// `"full"` feature and the `"printing"` feature.* + #[cfg_attr( + doc_cfg, + doc(cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))) + )] + pub fn split_for_impl(&self) -> (ImplGenerics, TypeGenerics, Option<&WhereClause>) { + ( + ImplGenerics(self), + TypeGenerics(self), + self.where_clause.as_ref(), + ) + } +} + +#[cfg(feature = "printing")] +macro_rules! generics_wrapper_impls { + ($ty:ident) => { + #[cfg(feature = "clone-impls")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] + impl<'a> Clone for $ty<'a> { + fn clone(&self) -> Self { + $ty(self.0) + } + } + + #[cfg(feature = "extra-traits")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + impl<'a> Debug for $ty<'a> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter + .debug_tuple(stringify!($ty)) + .field(self.0) + .finish() + } + } + + #[cfg(feature = "extra-traits")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + impl<'a> Eq for $ty<'a> {} + + #[cfg(feature = "extra-traits")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + impl<'a> PartialEq for $ty<'a> { + fn eq(&self, other: &Self) -> bool { + self.0 == other.0 + } + } + + #[cfg(feature = "extra-traits")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + impl<'a> Hash for $ty<'a> { + fn hash<H: Hasher>(&self, state: &mut H) { + self.0.hash(state); + } + } + }; +} + +#[cfg(feature = "printing")] +generics_wrapper_impls!(ImplGenerics); +#[cfg(feature = "printing")] +generics_wrapper_impls!(TypeGenerics); +#[cfg(feature = "printing")] +generics_wrapper_impls!(Turbofish); + +#[cfg(feature = "printing")] +impl<'a> TypeGenerics<'a> { + /// Turn a type's generics like `<X, Y>` into a turbofish like `::<X, Y>`. + /// + /// *This method is available only if Syn is built with the `"derive"` or + /// `"full"` feature and the `"printing"` feature.* + pub fn as_turbofish(&self) -> Turbofish { + Turbofish(self.0) + } +} + +ast_struct! { + /// A set of bound lifetimes: `for<'a, 'b, 'c>`. + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct BoundLifetimes { + pub for_token: Token![for], + pub lt_token: Token![<], + pub lifetimes: Punctuated<LifetimeDef, Token![,]>, + pub gt_token: Token![>], + } +} + +impl Default for BoundLifetimes { + fn default() -> Self { + BoundLifetimes { + for_token: Default::default(), + lt_token: Default::default(), + lifetimes: Punctuated::new(), + gt_token: Default::default(), + } + } +} + +impl LifetimeDef { + pub fn new(lifetime: Lifetime) -> Self { + LifetimeDef { + attrs: Vec::new(), + lifetime, + colon_token: None, + bounds: Punctuated::new(), + } + } +} + +impl From<Ident> for TypeParam { + fn from(ident: Ident) -> Self { + TypeParam { + attrs: vec![], + ident, + colon_token: None, + bounds: Punctuated::new(), + eq_token: None, + default: None, + } + } +} + +ast_enum_of_structs! { + /// A trait or lifetime used as a bound on a type parameter. + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub enum TypeParamBound { + Trait(TraitBound), + Lifetime(Lifetime), + } +} + +ast_struct! { + /// A trait used as a bound on a type parameter. + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct TraitBound { + pub paren_token: Option<token::Paren>, + pub modifier: TraitBoundModifier, + /// The `for<'a>` in `for<'a> Foo<&'a T>` + pub lifetimes: Option<BoundLifetimes>, + /// The `Foo<&'a T>` in `for<'a> Foo<&'a T>` + pub path: Path, + } +} + +ast_enum! { + /// A modifier on a trait bound, currently only used for the `?` in + /// `?Sized`. + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub enum TraitBoundModifier { + None, + Maybe(Token![?]), + } +} + +ast_struct! { + /// A `where` clause in a definition: `where T: Deserialize<'de>, D: + /// 'static`. + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct WhereClause { + pub where_token: Token![where], + pub predicates: Punctuated<WherePredicate, Token![,]>, + } +} + +ast_enum_of_structs! { + /// A single predicate in a `where` clause: `T: Deserialize<'de>`. + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + /// + /// # Syntax tree enum + /// + /// This type is a [syntax tree enum]. + /// + /// [syntax tree enum]: Expr#syntax-tree-enums + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub enum WherePredicate { + /// A type predicate in a `where` clause: `for<'c> Foo<'c>: Trait<'c>`. + Type(PredicateType), + + /// A lifetime predicate in a `where` clause: `'a: 'b + 'c`. + Lifetime(PredicateLifetime), + + /// An equality predicate in a `where` clause (unsupported). + Eq(PredicateEq), + } +} + +ast_struct! { + /// A type predicate in a `where` clause: `for<'c> Foo<'c>: Trait<'c>`. + /// + /// *This type is available only if Syn is built with the `"derive"` or + /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct PredicateType { + /// Any lifetimes from a `for` binding + pub lifetimes: Option<BoundLifetimes>, + /// The type being bounded + pub bounded_ty: Type, + pub colon_token: Token![:], + /// Trait and lifetime bounds (`Clone+Send+'static`) + pub bounds: Punctuated<TypeParamBound, Token![+]>, + } +} + +ast_struct! { + /// A lifetime predicate in a `where` clause: `'a: 'b + 'c`. + /// + /// *This type is available only if Syn is built with the `"derive"` or + /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct PredicateLifetime { + pub lifetime: Lifetime, + pub colon_token: Token![:], + pub bounds: Punctuated<Lifetime, Token![+]>, + } +} + +ast_struct! { + /// An equality predicate in a `where` clause (unsupported). + /// + /// *This type is available only if Syn is built with the `"derive"` or + /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct PredicateEq { + pub lhs_ty: Type, + pub eq_token: Token![=], + pub rhs_ty: Type, + } +} + +#[cfg(feature = "parsing")] +pub mod parsing { + use super::*; + use crate::ext::IdentExt; + use crate::parse::{Parse, ParseStream, Result}; + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for Generics { + fn parse(input: ParseStream) -> Result<Self> { + if !input.peek(Token![<]) { + return Ok(Generics::default()); + } + + let lt_token: Token![<] = input.parse()?; + + let mut params = Punctuated::new(); + loop { + if input.peek(Token![>]) { + break; + } + + let attrs = input.call(Attribute::parse_outer)?; + let lookahead = input.lookahead1(); + if lookahead.peek(Lifetime) { + params.push_value(GenericParam::Lifetime(LifetimeDef { + attrs, + ..input.parse()? + })); + } else if lookahead.peek(Ident) { + params.push_value(GenericParam::Type(TypeParam { + attrs, + ..input.parse()? + })); + } else if lookahead.peek(Token![const]) { + params.push_value(GenericParam::Const(ConstParam { + attrs, + ..input.parse()? + })); + } else if input.peek(Token![_]) { + params.push_value(GenericParam::Type(TypeParam { + attrs, + ident: input.call(Ident::parse_any)?, + colon_token: None, + bounds: Punctuated::new(), + eq_token: None, + default: None, + })); + } else { + return Err(lookahead.error()); + } + + if input.peek(Token![>]) { + break; + } + let punct = input.parse()?; + params.push_punct(punct); + } + + let gt_token: Token![>] = input.parse()?; + + Ok(Generics { + lt_token: Some(lt_token), + params, + gt_token: Some(gt_token), + where_clause: None, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for GenericParam { + fn parse(input: ParseStream) -> Result<Self> { + let attrs = input.call(Attribute::parse_outer)?; + + let lookahead = input.lookahead1(); + if lookahead.peek(Ident) { + Ok(GenericParam::Type(TypeParam { + attrs, + ..input.parse()? + })) + } else if lookahead.peek(Lifetime) { + Ok(GenericParam::Lifetime(LifetimeDef { + attrs, + ..input.parse()? + })) + } else if lookahead.peek(Token![const]) { + Ok(GenericParam::Const(ConstParam { + attrs, + ..input.parse()? + })) + } else { + Err(lookahead.error()) + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for LifetimeDef { + fn parse(input: ParseStream) -> Result<Self> { + let has_colon; + Ok(LifetimeDef { + attrs: input.call(Attribute::parse_outer)?, + lifetime: input.parse()?, + colon_token: { + if input.peek(Token![:]) { + has_colon = true; + Some(input.parse()?) + } else { + has_colon = false; + None + } + }, + bounds: { + let mut bounds = Punctuated::new(); + if has_colon { + loop { + if input.peek(Token![,]) || input.peek(Token![>]) { + break; + } + let value = input.parse()?; + bounds.push_value(value); + if !input.peek(Token![+]) { + break; + } + let punct = input.parse()?; + bounds.push_punct(punct); + } + } + bounds + }, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for BoundLifetimes { + fn parse(input: ParseStream) -> Result<Self> { + Ok(BoundLifetimes { + for_token: input.parse()?, + lt_token: input.parse()?, + lifetimes: { + let mut lifetimes = Punctuated::new(); + while !input.peek(Token![>]) { + lifetimes.push_value(input.parse()?); + if input.peek(Token![>]) { + break; + } + lifetimes.push_punct(input.parse()?); + } + lifetimes + }, + gt_token: input.parse()?, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for Option<BoundLifetimes> { + fn parse(input: ParseStream) -> Result<Self> { + if input.peek(Token![for]) { + input.parse().map(Some) + } else { + Ok(None) + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for TypeParam { + fn parse(input: ParseStream) -> Result<Self> { + let attrs = input.call(Attribute::parse_outer)?; + let ident: Ident = input.parse()?; + let colon_token: Option<Token![:]> = input.parse()?; + + let begin_bound = input.fork(); + let mut is_maybe_const = false; + let mut bounds = Punctuated::new(); + if colon_token.is_some() { + loop { + if input.peek(Token![,]) || input.peek(Token![>]) || input.peek(Token![=]) { + break; + } + if input.peek(Token![~]) && input.peek2(Token![const]) { + input.parse::<Token![~]>()?; + input.parse::<Token![const]>()?; + is_maybe_const = true; + } + let value: TypeParamBound = input.parse()?; + bounds.push_value(value); + if !input.peek(Token![+]) { + break; + } + let punct: Token![+] = input.parse()?; + bounds.push_punct(punct); + } + } + + let mut eq_token: Option<Token![=]> = input.parse()?; + let mut default = if eq_token.is_some() { + Some(input.parse::<Type>()?) + } else { + None + }; + + if is_maybe_const { + bounds.clear(); + eq_token = None; + default = Some(Type::Verbatim(verbatim::between(begin_bound, input))); + } + + Ok(TypeParam { + attrs, + ident, + colon_token, + bounds, + eq_token, + default, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for TypeParamBound { + fn parse(input: ParseStream) -> Result<Self> { + if input.peek(Lifetime) { + return input.parse().map(TypeParamBound::Lifetime); + } + + if input.peek(token::Paren) { + let content; + let paren_token = parenthesized!(content in input); + let mut bound: TraitBound = content.parse()?; + bound.paren_token = Some(paren_token); + return Ok(TypeParamBound::Trait(bound)); + } + + input.parse().map(TypeParamBound::Trait) + } + } + + impl TypeParamBound { + pub(crate) fn parse_multiple( + input: ParseStream, + allow_plus: bool, + ) -> Result<Punctuated<Self, Token![+]>> { + let mut bounds = Punctuated::new(); + loop { + bounds.push_value(input.parse()?); + if !(allow_plus && input.peek(Token![+])) { + break; + } + bounds.push_punct(input.parse()?); + if !(input.peek(Ident::peek_any) + || input.peek(Token![::]) + || input.peek(Token![?]) + || input.peek(Lifetime) + || input.peek(token::Paren)) + { + break; + } + } + Ok(bounds) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for TraitBound { + fn parse(input: ParseStream) -> Result<Self> { + #[cfg(feature = "full")] + let tilde_const = if input.peek(Token![~]) && input.peek2(Token![const]) { + let tilde_token = input.parse::<Token![~]>()?; + let const_token = input.parse::<Token![const]>()?; + Some((tilde_token, const_token)) + } else { + None + }; + + let modifier: TraitBoundModifier = input.parse()?; + let lifetimes: Option<BoundLifetimes> = input.parse()?; + + let mut path: Path = input.parse()?; + if path.segments.last().unwrap().arguments.is_empty() + && (input.peek(token::Paren) || input.peek(Token![::]) && input.peek3(token::Paren)) + { + input.parse::<Option<Token![::]>>()?; + let args: ParenthesizedGenericArguments = input.parse()?; + let parenthesized = PathArguments::Parenthesized(args); + path.segments.last_mut().unwrap().arguments = parenthesized; + } + + #[cfg(feature = "full")] + { + if let Some((tilde_token, const_token)) = tilde_const { + path.segments.insert( + 0, + PathSegment { + ident: Ident::new("const", const_token.span), + arguments: PathArguments::None, + }, + ); + let (_const, punct) = path.segments.pairs_mut().next().unwrap().into_tuple(); + *punct.unwrap() = Token![::](tilde_token.span); + } + } + + Ok(TraitBound { + paren_token: None, + modifier, + lifetimes, + path, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for TraitBoundModifier { + fn parse(input: ParseStream) -> Result<Self> { + if input.peek(Token![?]) { + input.parse().map(TraitBoundModifier::Maybe) + } else { + Ok(TraitBoundModifier::None) + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ConstParam { + fn parse(input: ParseStream) -> Result<Self> { + let mut default = None; + Ok(ConstParam { + attrs: input.call(Attribute::parse_outer)?, + const_token: input.parse()?, + ident: input.parse()?, + colon_token: input.parse()?, + ty: input.parse()?, + eq_token: { + if input.peek(Token![=]) { + let eq_token = input.parse()?; + default = Some(path::parsing::const_argument(input)?); + Some(eq_token) + } else { + None + } + }, + default, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for WhereClause { + fn parse(input: ParseStream) -> Result<Self> { + Ok(WhereClause { + where_token: input.parse()?, + predicates: { + let mut predicates = Punctuated::new(); + loop { + if input.is_empty() + || input.peek(token::Brace) + || input.peek(Token![,]) + || input.peek(Token![;]) + || input.peek(Token![:]) && !input.peek(Token![::]) + || input.peek(Token![=]) + { + break; + } + let value = input.parse()?; + predicates.push_value(value); + if !input.peek(Token![,]) { + break; + } + let punct = input.parse()?; + predicates.push_punct(punct); + } + predicates + }, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for Option<WhereClause> { + fn parse(input: ParseStream) -> Result<Self> { + if input.peek(Token![where]) { + input.parse().map(Some) + } else { + Ok(None) + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for WherePredicate { + fn parse(input: ParseStream) -> Result<Self> { + if input.peek(Lifetime) && input.peek2(Token![:]) { + Ok(WherePredicate::Lifetime(PredicateLifetime { + lifetime: input.parse()?, + colon_token: input.parse()?, + bounds: { + let mut bounds = Punctuated::new(); + loop { + if input.is_empty() + || input.peek(token::Brace) + || input.peek(Token![,]) + || input.peek(Token![;]) + || input.peek(Token![:]) + || input.peek(Token![=]) + { + break; + } + let value = input.parse()?; + bounds.push_value(value); + if !input.peek(Token![+]) { + break; + } + let punct = input.parse()?; + bounds.push_punct(punct); + } + bounds + }, + })) + } else { + Ok(WherePredicate::Type(PredicateType { + lifetimes: input.parse()?, + bounded_ty: input.parse()?, + colon_token: input.parse()?, + bounds: { + let mut bounds = Punctuated::new(); + loop { + if input.is_empty() + || input.peek(token::Brace) + || input.peek(Token![,]) + || input.peek(Token![;]) + || input.peek(Token![:]) && !input.peek(Token![::]) + || input.peek(Token![=]) + { + break; + } + let value = input.parse()?; + bounds.push_value(value); + if !input.peek(Token![+]) { + break; + } + let punct = input.parse()?; + bounds.push_punct(punct); + } + bounds + }, + })) + } + } + } +} + +#[cfg(feature = "printing")] +mod printing { + use super::*; + use crate::attr::FilterAttrs; + use crate::print::TokensOrDefault; + #[cfg(feature = "full")] + use crate::punctuated::Pair; + use proc_macro2::TokenStream; + #[cfg(feature = "full")] + use proc_macro2::TokenTree; + use quote::{ToTokens, TokenStreamExt}; + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for Generics { + fn to_tokens(&self, tokens: &mut TokenStream) { + if self.params.is_empty() { + return; + } + + TokensOrDefault(&self.lt_token).to_tokens(tokens); + + // Print lifetimes before types and consts, regardless of their + // order in self.params. + // + // TODO: ordering rules for const parameters vs type parameters have + // not been settled yet. https://github.com/rust-lang/rust/issues/44580 + let mut trailing_or_empty = true; + for param in self.params.pairs() { + if let GenericParam::Lifetime(_) = **param.value() { + param.to_tokens(tokens); + trailing_or_empty = param.punct().is_some(); + } + } + for param in self.params.pairs() { + match **param.value() { + GenericParam::Type(_) | GenericParam::Const(_) => { + if !trailing_or_empty { + <Token![,]>::default().to_tokens(tokens); + trailing_or_empty = true; + } + param.to_tokens(tokens); + } + GenericParam::Lifetime(_) => {} + } + } + + TokensOrDefault(&self.gt_token).to_tokens(tokens); + } + } + + impl<'a> ToTokens for ImplGenerics<'a> { + fn to_tokens(&self, tokens: &mut TokenStream) { + if self.0.params.is_empty() { + return; + } + + TokensOrDefault(&self.0.lt_token).to_tokens(tokens); + + // Print lifetimes before types and consts, regardless of their + // order in self.params. + // + // TODO: ordering rules for const parameters vs type parameters have + // not been settled yet. https://github.com/rust-lang/rust/issues/44580 + let mut trailing_or_empty = true; + for param in self.0.params.pairs() { + if let GenericParam::Lifetime(_) = **param.value() { + param.to_tokens(tokens); + trailing_or_empty = param.punct().is_some(); + } + } + for param in self.0.params.pairs() { + if let GenericParam::Lifetime(_) = **param.value() { + continue; + } + if !trailing_or_empty { + <Token![,]>::default().to_tokens(tokens); + trailing_or_empty = true; + } + match *param.value() { + GenericParam::Lifetime(_) => unreachable!(), + GenericParam::Type(param) => { + // Leave off the type parameter defaults + tokens.append_all(param.attrs.outer()); + param.ident.to_tokens(tokens); + if !param.bounds.is_empty() { + TokensOrDefault(¶m.colon_token).to_tokens(tokens); + param.bounds.to_tokens(tokens); + } + } + GenericParam::Const(param) => { + // Leave off the const parameter defaults + tokens.append_all(param.attrs.outer()); + param.const_token.to_tokens(tokens); + param.ident.to_tokens(tokens); + param.colon_token.to_tokens(tokens); + param.ty.to_tokens(tokens); + } + } + param.punct().to_tokens(tokens); + } + + TokensOrDefault(&self.0.gt_token).to_tokens(tokens); + } + } + + impl<'a> ToTokens for TypeGenerics<'a> { + fn to_tokens(&self, tokens: &mut TokenStream) { + if self.0.params.is_empty() { + return; + } + + TokensOrDefault(&self.0.lt_token).to_tokens(tokens); + + // Print lifetimes before types and consts, regardless of their + // order in self.params. + // + // TODO: ordering rules for const parameters vs type parameters have + // not been settled yet. https://github.com/rust-lang/rust/issues/44580 + let mut trailing_or_empty = true; + for param in self.0.params.pairs() { + if let GenericParam::Lifetime(def) = *param.value() { + // Leave off the lifetime bounds and attributes + def.lifetime.to_tokens(tokens); + param.punct().to_tokens(tokens); + trailing_or_empty = param.punct().is_some(); + } + } + for param in self.0.params.pairs() { + if let GenericParam::Lifetime(_) = **param.value() { + continue; + } + if !trailing_or_empty { + <Token![,]>::default().to_tokens(tokens); + trailing_or_empty = true; + } + match *param.value() { + GenericParam::Lifetime(_) => unreachable!(), + GenericParam::Type(param) => { + // Leave off the type parameter defaults + param.ident.to_tokens(tokens); + } + GenericParam::Const(param) => { + // Leave off the const parameter defaults + param.ident.to_tokens(tokens); + } + } + param.punct().to_tokens(tokens); + } + + TokensOrDefault(&self.0.gt_token).to_tokens(tokens); + } + } + + impl<'a> ToTokens for Turbofish<'a> { + fn to_tokens(&self, tokens: &mut TokenStream) { + if !self.0.params.is_empty() { + <Token![::]>::default().to_tokens(tokens); + TypeGenerics(self.0).to_tokens(tokens); + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for BoundLifetimes { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.for_token.to_tokens(tokens); + self.lt_token.to_tokens(tokens); + self.lifetimes.to_tokens(tokens); + self.gt_token.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for LifetimeDef { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.lifetime.to_tokens(tokens); + if !self.bounds.is_empty() { + TokensOrDefault(&self.colon_token).to_tokens(tokens); + self.bounds.to_tokens(tokens); + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for TypeParam { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.ident.to_tokens(tokens); + if !self.bounds.is_empty() { + TokensOrDefault(&self.colon_token).to_tokens(tokens); + self.bounds.to_tokens(tokens); + } + if let Some(default) = &self.default { + #[cfg(feature = "full")] + { + if self.eq_token.is_none() { + if let Type::Verbatim(default) = default { + let mut iter = default.clone().into_iter().peekable(); + while let Some(token) = iter.next() { + if let TokenTree::Punct(q) = token { + if q.as_char() == '~' { + if let Some(TokenTree::Ident(c)) = iter.peek() { + if c == "const" { + if self.bounds.is_empty() { + TokensOrDefault(&self.colon_token) + .to_tokens(tokens); + } + return default.to_tokens(tokens); + } + } + } + } + } + } + } + } + TokensOrDefault(&self.eq_token).to_tokens(tokens); + default.to_tokens(tokens); + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for TraitBound { + fn to_tokens(&self, tokens: &mut TokenStream) { + let to_tokens = |tokens: &mut TokenStream| { + #[cfg(feature = "full")] + let skip = match self.path.segments.pairs().next() { + Some(Pair::Punctuated(t, p)) if t.ident == "const" => { + Token![~](p.spans[0]).to_tokens(tokens); + t.to_tokens(tokens); + 1 + } + _ => 0, + }; + self.modifier.to_tokens(tokens); + self.lifetimes.to_tokens(tokens); + #[cfg(feature = "full")] + { + self.path.leading_colon.to_tokens(tokens); + tokens.append_all(self.path.segments.pairs().skip(skip)); + } + #[cfg(not(feature = "full"))] + { + self.path.to_tokens(tokens); + } + }; + match &self.paren_token { + Some(paren) => paren.surround(tokens, to_tokens), + None => to_tokens(tokens), + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for TraitBoundModifier { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + TraitBoundModifier::None => {} + TraitBoundModifier::Maybe(t) => t.to_tokens(tokens), + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ConstParam { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.const_token.to_tokens(tokens); + self.ident.to_tokens(tokens); + self.colon_token.to_tokens(tokens); + self.ty.to_tokens(tokens); + if let Some(default) = &self.default { + TokensOrDefault(&self.eq_token).to_tokens(tokens); + default.to_tokens(tokens); + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for WhereClause { + fn to_tokens(&self, tokens: &mut TokenStream) { + if !self.predicates.is_empty() { + self.where_token.to_tokens(tokens); + self.predicates.to_tokens(tokens); + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for PredicateType { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.lifetimes.to_tokens(tokens); + self.bounded_ty.to_tokens(tokens); + self.colon_token.to_tokens(tokens); + self.bounds.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for PredicateLifetime { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.lifetime.to_tokens(tokens); + self.colon_token.to_tokens(tokens); + self.bounds.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for PredicateEq { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.lhs_ty.to_tokens(tokens); + self.eq_token.to_tokens(tokens); + self.rhs_ty.to_tokens(tokens); + } + } +} diff --git a/third_party/rust/syn/src/group.rs b/third_party/rust/syn/src/group.rs new file mode 100644 index 0000000000..7fd273c1de --- /dev/null +++ b/third_party/rust/syn/src/group.rs @@ -0,0 +1,282 @@ +use crate::error::Result; +use crate::parse::ParseBuffer; +use crate::token; +use proc_macro2::{Delimiter, Span}; + +// Not public API. +#[doc(hidden)] +pub struct Parens<'a> { + pub token: token::Paren, + pub content: ParseBuffer<'a>, +} + +// Not public API. +#[doc(hidden)] +pub struct Braces<'a> { + pub token: token::Brace, + pub content: ParseBuffer<'a>, +} + +// Not public API. +#[doc(hidden)] +pub struct Brackets<'a> { + pub token: token::Bracket, + pub content: ParseBuffer<'a>, +} + +// Not public API. +#[cfg(any(feature = "full", feature = "derive"))] +#[doc(hidden)] +pub struct Group<'a> { + pub token: token::Group, + pub content: ParseBuffer<'a>, +} + +// Not public API. +#[doc(hidden)] +pub fn parse_parens<'a>(input: &ParseBuffer<'a>) -> Result<Parens<'a>> { + parse_delimited(input, Delimiter::Parenthesis).map(|(span, content)| Parens { + token: token::Paren(span), + content, + }) +} + +// Not public API. +#[doc(hidden)] +pub fn parse_braces<'a>(input: &ParseBuffer<'a>) -> Result<Braces<'a>> { + parse_delimited(input, Delimiter::Brace).map(|(span, content)| Braces { + token: token::Brace(span), + content, + }) +} + +// Not public API. +#[doc(hidden)] +pub fn parse_brackets<'a>(input: &ParseBuffer<'a>) -> Result<Brackets<'a>> { + parse_delimited(input, Delimiter::Bracket).map(|(span, content)| Brackets { + token: token::Bracket(span), + content, + }) +} + +#[cfg(any(feature = "full", feature = "derive"))] +pub(crate) fn parse_group<'a>(input: &ParseBuffer<'a>) -> Result<Group<'a>> { + parse_delimited(input, Delimiter::None).map(|(span, content)| Group { + token: token::Group(span), + content, + }) +} + +fn parse_delimited<'a>( + input: &ParseBuffer<'a>, + delimiter: Delimiter, +) -> Result<(Span, ParseBuffer<'a>)> { + input.step(|cursor| { + if let Some((content, span, rest)) = cursor.group(delimiter) { + let scope = crate::buffer::close_span_of_group(*cursor); + let nested = crate::parse::advance_step_cursor(cursor, content); + let unexpected = crate::parse::get_unexpected(input); + let content = crate::parse::new_parse_buffer(scope, nested, unexpected); + Ok(((span, content), rest)) + } else { + let message = match delimiter { + Delimiter::Parenthesis => "expected parentheses", + Delimiter::Brace => "expected curly braces", + Delimiter::Bracket => "expected square brackets", + Delimiter::None => "expected invisible group", + }; + Err(cursor.error(message)) + } + }) +} + +/// Parse a set of parentheses and expose their content to subsequent parsers. +/// +/// # Example +/// +/// ``` +/// # use quote::quote; +/// # +/// use syn::{parenthesized, token, Ident, Result, Token, Type}; +/// use syn::parse::{Parse, ParseStream}; +/// use syn::punctuated::Punctuated; +/// +/// // Parse a simplified tuple struct syntax like: +/// // +/// // struct S(A, B); +/// struct TupleStruct { +/// struct_token: Token![struct], +/// ident: Ident, +/// paren_token: token::Paren, +/// fields: Punctuated<Type, Token![,]>, +/// semi_token: Token![;], +/// } +/// +/// impl Parse for TupleStruct { +/// fn parse(input: ParseStream) -> Result<Self> { +/// let content; +/// Ok(TupleStruct { +/// struct_token: input.parse()?, +/// ident: input.parse()?, +/// paren_token: parenthesized!(content in input), +/// fields: content.parse_terminated(Type::parse)?, +/// semi_token: input.parse()?, +/// }) +/// } +/// } +/// # +/// # fn main() { +/// # let input = quote! { +/// # struct S(A, B); +/// # }; +/// # syn::parse2::<TupleStruct>(input).unwrap(); +/// # } +/// ``` +#[macro_export] +#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] +macro_rules! parenthesized { + ($content:ident in $cursor:expr) => { + match $crate::__private::parse_parens(&$cursor) { + $crate::__private::Ok(parens) => { + $content = parens.content; + parens.token + } + $crate::__private::Err(error) => { + return $crate::__private::Err(error); + } + } + }; +} + +/// Parse a set of curly braces and expose their content to subsequent parsers. +/// +/// # Example +/// +/// ``` +/// # use quote::quote; +/// # +/// use syn::{braced, token, Ident, Result, Token, Type}; +/// use syn::parse::{Parse, ParseStream}; +/// use syn::punctuated::Punctuated; +/// +/// // Parse a simplified struct syntax like: +/// // +/// // struct S { +/// // a: A, +/// // b: B, +/// // } +/// struct Struct { +/// struct_token: Token![struct], +/// ident: Ident, +/// brace_token: token::Brace, +/// fields: Punctuated<Field, Token![,]>, +/// } +/// +/// struct Field { +/// name: Ident, +/// colon_token: Token![:], +/// ty: Type, +/// } +/// +/// impl Parse for Struct { +/// fn parse(input: ParseStream) -> Result<Self> { +/// let content; +/// Ok(Struct { +/// struct_token: input.parse()?, +/// ident: input.parse()?, +/// brace_token: braced!(content in input), +/// fields: content.parse_terminated(Field::parse)?, +/// }) +/// } +/// } +/// +/// impl Parse for Field { +/// fn parse(input: ParseStream) -> Result<Self> { +/// Ok(Field { +/// name: input.parse()?, +/// colon_token: input.parse()?, +/// ty: input.parse()?, +/// }) +/// } +/// } +/// # +/// # fn main() { +/// # let input = quote! { +/// # struct S { +/// # a: A, +/// # b: B, +/// # } +/// # }; +/// # syn::parse2::<Struct>(input).unwrap(); +/// # } +/// ``` +#[macro_export] +#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] +macro_rules! braced { + ($content:ident in $cursor:expr) => { + match $crate::__private::parse_braces(&$cursor) { + $crate::__private::Ok(braces) => { + $content = braces.content; + braces.token + } + $crate::__private::Err(error) => { + return $crate::__private::Err(error); + } + } + }; +} + +/// Parse a set of square brackets and expose their content to subsequent +/// parsers. +/// +/// # Example +/// +/// ``` +/// # use quote::quote; +/// # +/// use proc_macro2::TokenStream; +/// use syn::{bracketed, token, Result, Token}; +/// use syn::parse::{Parse, ParseStream}; +/// +/// // Parse an outer attribute like: +/// // +/// // #[repr(C, packed)] +/// struct OuterAttribute { +/// pound_token: Token![#], +/// bracket_token: token::Bracket, +/// content: TokenStream, +/// } +/// +/// impl Parse for OuterAttribute { +/// fn parse(input: ParseStream) -> Result<Self> { +/// let content; +/// Ok(OuterAttribute { +/// pound_token: input.parse()?, +/// bracket_token: bracketed!(content in input), +/// content: content.parse()?, +/// }) +/// } +/// } +/// # +/// # fn main() { +/// # let input = quote! { +/// # #[repr(C, packed)] +/// # }; +/// # syn::parse2::<OuterAttribute>(input).unwrap(); +/// # } +/// ``` +#[macro_export] +#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] +macro_rules! bracketed { + ($content:ident in $cursor:expr) => { + match $crate::__private::parse_brackets(&$cursor) { + $crate::__private::Ok(brackets) => { + $content = brackets.content; + brackets.token + } + $crate::__private::Err(error) => { + return $crate::__private::Err(error); + } + } + }; +} diff --git a/third_party/rust/syn/src/ident.rs b/third_party/rust/syn/src/ident.rs new file mode 100644 index 0000000000..8e3d8bda97 --- /dev/null +++ b/third_party/rust/syn/src/ident.rs @@ -0,0 +1,101 @@ +#[cfg(feature = "parsing")] +use crate::buffer::Cursor; +#[cfg(feature = "parsing")] +use crate::lookahead; +#[cfg(feature = "parsing")] +use crate::parse::{Parse, ParseStream, Result}; +#[cfg(feature = "parsing")] +use crate::token::Token; + +pub use proc_macro2::Ident; + +#[cfg(feature = "parsing")] +#[doc(hidden)] +#[allow(non_snake_case)] +pub fn Ident(marker: lookahead::TokenMarker) -> Ident { + match marker {} +} + +#[cfg(feature = "parsing")] +fn accept_as_ident(ident: &Ident) -> bool { + match ident.to_string().as_str() { + "_" | + // Based on https://doc.rust-lang.org/grammar.html#keywords + // and https://github.com/rust-lang/rfcs/blob/master/text/2421-unreservations-2018.md + // and https://github.com/rust-lang/rfcs/blob/master/text/2420-unreserve-proc.md + "abstract" | "as" | "become" | "box" | "break" | "const" | "continue" | + "crate" | "do" | "else" | "enum" | "extern" | "false" | "final" | "fn" | + "for" | "if" | "impl" | "in" | "let" | "loop" | "macro" | "match" | + "mod" | "move" | "mut" | "override" | "priv" | "pub" | "ref" | + "return" | "Self" | "self" | "static" | "struct" | "super" | "trait" | + "true" | "type" | "typeof" | "unsafe" | "unsized" | "use" | "virtual" | + "where" | "while" | "yield" => false, + _ => true, + } +} + +#[cfg(feature = "parsing")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] +impl Parse for Ident { + fn parse(input: ParseStream) -> Result<Self> { + input.step(|cursor| { + if let Some((ident, rest)) = cursor.ident() { + if accept_as_ident(&ident) { + return Ok((ident, rest)); + } + } + Err(cursor.error("expected identifier")) + }) + } +} + +#[cfg(feature = "parsing")] +impl Token for Ident { + fn peek(cursor: Cursor) -> bool { + if let Some((ident, _rest)) = cursor.ident() { + accept_as_ident(&ident) + } else { + false + } + } + + fn display() -> &'static str { + "identifier" + } +} + +macro_rules! ident_from_token { + ($token:ident) => { + impl From<Token![$token]> for Ident { + fn from(token: Token![$token]) -> Ident { + Ident::new(stringify!($token), token.span) + } + } + }; +} + +ident_from_token!(self); +ident_from_token!(Self); +ident_from_token!(super); +ident_from_token!(crate); +ident_from_token!(extern); + +impl From<Token![_]> for Ident { + fn from(token: Token![_]) -> Ident { + Ident::new("_", token.span) + } +} + +pub fn xid_ok(symbol: &str) -> bool { + let mut chars = symbol.chars(); + let first = chars.next().unwrap(); + if !(first == '_' || unicode_ident::is_xid_start(first)) { + return false; + } + for ch in chars { + if !unicode_ident::is_xid_continue(ch) { + return false; + } + } + true +} diff --git a/third_party/rust/syn/src/item.rs b/third_party/rust/syn/src/item.rs new file mode 100644 index 0000000000..a1ef7ab43e --- /dev/null +++ b/third_party/rust/syn/src/item.rs @@ -0,0 +1,3313 @@ +use super::*; +use crate::derive::{Data, DataEnum, DataStruct, DataUnion, DeriveInput}; +use crate::punctuated::Punctuated; +use proc_macro2::TokenStream; + +#[cfg(feature = "parsing")] +use std::mem; + +ast_enum_of_structs! { + /// Things that can appear directly inside of a module or scope. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + /// + /// # Syntax tree enum + /// + /// This type is a [syntax tree enum]. + /// + /// [syntax tree enum]: Expr#syntax-tree-enums + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(not(syn_no_non_exhaustive), non_exhaustive)] + pub enum Item { + /// A constant item: `const MAX: u16 = 65535`. + Const(ItemConst), + + /// An enum definition: `enum Foo<A, B> { A(A), B(B) }`. + Enum(ItemEnum), + + /// An `extern crate` item: `extern crate serde`. + ExternCrate(ItemExternCrate), + + /// A free-standing function: `fn process(n: usize) -> Result<()> { ... + /// }`. + Fn(ItemFn), + + /// A block of foreign items: `extern "C" { ... }`. + ForeignMod(ItemForeignMod), + + /// An impl block providing trait or associated items: `impl<A> Trait + /// for Data<A> { ... }`. + Impl(ItemImpl), + + /// A macro invocation, which includes `macro_rules!` definitions. + Macro(ItemMacro), + + /// A 2.0-style declarative macro introduced by the `macro` keyword. + Macro2(ItemMacro2), + + /// A module or module declaration: `mod m` or `mod m { ... }`. + Mod(ItemMod), + + /// A static item: `static BIKE: Shed = Shed(42)`. + Static(ItemStatic), + + /// A struct definition: `struct Foo<A> { x: A }`. + Struct(ItemStruct), + + /// A trait definition: `pub trait Iterator { ... }`. + Trait(ItemTrait), + + /// A trait alias: `pub trait SharableIterator = Iterator + Sync`. + TraitAlias(ItemTraitAlias), + + /// A type alias: `type Result<T> = std::result::Result<T, MyError>`. + Type(ItemType), + + /// A union definition: `union Foo<A, B> { x: A, y: B }`. + Union(ItemUnion), + + /// A use declaration: `use std::collections::HashMap`. + Use(ItemUse), + + /// Tokens forming an item not interpreted by Syn. + Verbatim(TokenStream), + + // Not public API. + // + // For testing exhaustiveness in downstream code, use the following idiom: + // + // match item { + // Item::Const(item) => {...} + // Item::Enum(item) => {...} + // ... + // Item::Verbatim(item) => {...} + // + // #[cfg_attr(test, deny(non_exhaustive_omitted_patterns))] + // _ => { /* some sane fallback */ } + // } + // + // This way we fail your tests but don't break your library when adding + // a variant. You will be notified by a test failure when a variant is + // added, so that you can add code to handle it, but your library will + // continue to compile and work for downstream users in the interim. + #[cfg(syn_no_non_exhaustive)] + #[doc(hidden)] + __NonExhaustive, + } +} + +ast_struct! { + /// A constant item: `const MAX: u16 = 65535`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ItemConst { + pub attrs: Vec<Attribute>, + pub vis: Visibility, + pub const_token: Token![const], + pub ident: Ident, + pub colon_token: Token![:], + pub ty: Box<Type>, + pub eq_token: Token![=], + pub expr: Box<Expr>, + pub semi_token: Token![;], + } +} + +ast_struct! { + /// An enum definition: `enum Foo<A, B> { A(A), B(B) }`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ItemEnum { + pub attrs: Vec<Attribute>, + pub vis: Visibility, + pub enum_token: Token![enum], + pub ident: Ident, + pub generics: Generics, + pub brace_token: token::Brace, + pub variants: Punctuated<Variant, Token![,]>, + } +} + +ast_struct! { + /// An `extern crate` item: `extern crate serde`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ItemExternCrate { + pub attrs: Vec<Attribute>, + pub vis: Visibility, + pub extern_token: Token![extern], + pub crate_token: Token![crate], + pub ident: Ident, + pub rename: Option<(Token![as], Ident)>, + pub semi_token: Token![;], + } +} + +ast_struct! { + /// A free-standing function: `fn process(n: usize) -> Result<()> { ... + /// }`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ItemFn { + pub attrs: Vec<Attribute>, + pub vis: Visibility, + pub sig: Signature, + pub block: Box<Block>, + } +} + +ast_struct! { + /// A block of foreign items: `extern "C" { ... }`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ItemForeignMod { + pub attrs: Vec<Attribute>, + pub abi: Abi, + pub brace_token: token::Brace, + pub items: Vec<ForeignItem>, + } +} + +ast_struct! { + /// An impl block providing trait or associated items: `impl<A> Trait + /// for Data<A> { ... }`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ItemImpl { + pub attrs: Vec<Attribute>, + pub defaultness: Option<Token![default]>, + pub unsafety: Option<Token![unsafe]>, + pub impl_token: Token![impl], + pub generics: Generics, + /// Trait this impl implements. + pub trait_: Option<(Option<Token![!]>, Path, Token![for])>, + /// The Self type of the impl. + pub self_ty: Box<Type>, + pub brace_token: token::Brace, + pub items: Vec<ImplItem>, + } +} + +ast_struct! { + /// A macro invocation, which includes `macro_rules!` definitions. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ItemMacro { + pub attrs: Vec<Attribute>, + /// The `example` in `macro_rules! example { ... }`. + pub ident: Option<Ident>, + pub mac: Macro, + pub semi_token: Option<Token![;]>, + } +} + +ast_struct! { + /// A 2.0-style declarative macro introduced by the `macro` keyword. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ItemMacro2 { + pub attrs: Vec<Attribute>, + pub vis: Visibility, + pub macro_token: Token![macro], + pub ident: Ident, + pub rules: TokenStream, + } +} + +ast_struct! { + /// A module or module declaration: `mod m` or `mod m { ... }`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ItemMod { + pub attrs: Vec<Attribute>, + pub vis: Visibility, + pub mod_token: Token![mod], + pub ident: Ident, + pub content: Option<(token::Brace, Vec<Item>)>, + pub semi: Option<Token![;]>, + } +} + +ast_struct! { + /// A static item: `static BIKE: Shed = Shed(42)`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ItemStatic { + pub attrs: Vec<Attribute>, + pub vis: Visibility, + pub static_token: Token![static], + pub mutability: Option<Token![mut]>, + pub ident: Ident, + pub colon_token: Token![:], + pub ty: Box<Type>, + pub eq_token: Token![=], + pub expr: Box<Expr>, + pub semi_token: Token![;], + } +} + +ast_struct! { + /// A struct definition: `struct Foo<A> { x: A }`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ItemStruct { + pub attrs: Vec<Attribute>, + pub vis: Visibility, + pub struct_token: Token![struct], + pub ident: Ident, + pub generics: Generics, + pub fields: Fields, + pub semi_token: Option<Token![;]>, + } +} + +ast_struct! { + /// A trait definition: `pub trait Iterator { ... }`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ItemTrait { + pub attrs: Vec<Attribute>, + pub vis: Visibility, + pub unsafety: Option<Token![unsafe]>, + pub auto_token: Option<Token![auto]>, + pub trait_token: Token![trait], + pub ident: Ident, + pub generics: Generics, + pub colon_token: Option<Token![:]>, + pub supertraits: Punctuated<TypeParamBound, Token![+]>, + pub brace_token: token::Brace, + pub items: Vec<TraitItem>, + } +} + +ast_struct! { + /// A trait alias: `pub trait SharableIterator = Iterator + Sync`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ItemTraitAlias { + pub attrs: Vec<Attribute>, + pub vis: Visibility, + pub trait_token: Token![trait], + pub ident: Ident, + pub generics: Generics, + pub eq_token: Token![=], + pub bounds: Punctuated<TypeParamBound, Token![+]>, + pub semi_token: Token![;], + } +} + +ast_struct! { + /// A type alias: `type Result<T> = std::result::Result<T, MyError>`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ItemType { + pub attrs: Vec<Attribute>, + pub vis: Visibility, + pub type_token: Token![type], + pub ident: Ident, + pub generics: Generics, + pub eq_token: Token![=], + pub ty: Box<Type>, + pub semi_token: Token![;], + } +} + +ast_struct! { + /// A union definition: `union Foo<A, B> { x: A, y: B }`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ItemUnion { + pub attrs: Vec<Attribute>, + pub vis: Visibility, + pub union_token: Token![union], + pub ident: Ident, + pub generics: Generics, + pub fields: FieldsNamed, + } +} + +ast_struct! { + /// A use declaration: `use std::collections::HashMap`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ItemUse { + pub attrs: Vec<Attribute>, + pub vis: Visibility, + pub use_token: Token![use], + pub leading_colon: Option<Token![::]>, + pub tree: UseTree, + pub semi_token: Token![;], + } +} + +impl Item { + #[cfg(feature = "parsing")] + pub(crate) fn replace_attrs(&mut self, new: Vec<Attribute>) -> Vec<Attribute> { + match self { + Item::ExternCrate(ItemExternCrate { attrs, .. }) + | Item::Use(ItemUse { attrs, .. }) + | Item::Static(ItemStatic { attrs, .. }) + | Item::Const(ItemConst { attrs, .. }) + | Item::Fn(ItemFn { attrs, .. }) + | Item::Mod(ItemMod { attrs, .. }) + | Item::ForeignMod(ItemForeignMod { attrs, .. }) + | Item::Type(ItemType { attrs, .. }) + | Item::Struct(ItemStruct { attrs, .. }) + | Item::Enum(ItemEnum { attrs, .. }) + | Item::Union(ItemUnion { attrs, .. }) + | Item::Trait(ItemTrait { attrs, .. }) + | Item::TraitAlias(ItemTraitAlias { attrs, .. }) + | Item::Impl(ItemImpl { attrs, .. }) + | Item::Macro(ItemMacro { attrs, .. }) + | Item::Macro2(ItemMacro2 { attrs, .. }) => mem::replace(attrs, new), + Item::Verbatim(_) => Vec::new(), + + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + } + } +} + +impl From<DeriveInput> for Item { + fn from(input: DeriveInput) -> Item { + match input.data { + Data::Struct(data) => Item::Struct(ItemStruct { + attrs: input.attrs, + vis: input.vis, + struct_token: data.struct_token, + ident: input.ident, + generics: input.generics, + fields: data.fields, + semi_token: data.semi_token, + }), + Data::Enum(data) => Item::Enum(ItemEnum { + attrs: input.attrs, + vis: input.vis, + enum_token: data.enum_token, + ident: input.ident, + generics: input.generics, + brace_token: data.brace_token, + variants: data.variants, + }), + Data::Union(data) => Item::Union(ItemUnion { + attrs: input.attrs, + vis: input.vis, + union_token: data.union_token, + ident: input.ident, + generics: input.generics, + fields: data.fields, + }), + } + } +} + +impl From<ItemStruct> for DeriveInput { + fn from(input: ItemStruct) -> DeriveInput { + DeriveInput { + attrs: input.attrs, + vis: input.vis, + ident: input.ident, + generics: input.generics, + data: Data::Struct(DataStruct { + struct_token: input.struct_token, + fields: input.fields, + semi_token: input.semi_token, + }), + } + } +} + +impl From<ItemEnum> for DeriveInput { + fn from(input: ItemEnum) -> DeriveInput { + DeriveInput { + attrs: input.attrs, + vis: input.vis, + ident: input.ident, + generics: input.generics, + data: Data::Enum(DataEnum { + enum_token: input.enum_token, + brace_token: input.brace_token, + variants: input.variants, + }), + } + } +} + +impl From<ItemUnion> for DeriveInput { + fn from(input: ItemUnion) -> DeriveInput { + DeriveInput { + attrs: input.attrs, + vis: input.vis, + ident: input.ident, + generics: input.generics, + data: Data::Union(DataUnion { + union_token: input.union_token, + fields: input.fields, + }), + } + } +} + +ast_enum_of_structs! { + /// A suffix of an import tree in a `use` item: `Type as Renamed` or `*`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + /// + /// # Syntax tree enum + /// + /// This type is a [syntax tree enum]. + /// + /// [syntax tree enum]: Expr#syntax-tree-enums + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub enum UseTree { + /// A path prefix of imports in a `use` item: `std::...`. + Path(UsePath), + + /// An identifier imported by a `use` item: `HashMap`. + Name(UseName), + + /// An renamed identifier imported by a `use` item: `HashMap as Map`. + Rename(UseRename), + + /// A glob import in a `use` item: `*`. + Glob(UseGlob), + + /// A braced group of imports in a `use` item: `{A, B, C}`. + Group(UseGroup), + } +} + +ast_struct! { + /// A path prefix of imports in a `use` item: `std::...`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct UsePath { + pub ident: Ident, + pub colon2_token: Token![::], + pub tree: Box<UseTree>, + } +} + +ast_struct! { + /// An identifier imported by a `use` item: `HashMap`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct UseName { + pub ident: Ident, + } +} + +ast_struct! { + /// An renamed identifier imported by a `use` item: `HashMap as Map`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct UseRename { + pub ident: Ident, + pub as_token: Token![as], + pub rename: Ident, + } +} + +ast_struct! { + /// A glob import in a `use` item: `*`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct UseGlob { + pub star_token: Token![*], + } +} + +ast_struct! { + /// A braced group of imports in a `use` item: `{A, B, C}`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct UseGroup { + pub brace_token: token::Brace, + pub items: Punctuated<UseTree, Token![,]>, + } +} + +ast_enum_of_structs! { + /// An item within an `extern` block. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + /// + /// # Syntax tree enum + /// + /// This type is a [syntax tree enum]. + /// + /// [syntax tree enum]: Expr#syntax-tree-enums + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(not(syn_no_non_exhaustive), non_exhaustive)] + pub enum ForeignItem { + /// A foreign function in an `extern` block. + Fn(ForeignItemFn), + + /// A foreign static item in an `extern` block: `static ext: u8`. + Static(ForeignItemStatic), + + /// A foreign type in an `extern` block: `type void`. + Type(ForeignItemType), + + /// A macro invocation within an extern block. + Macro(ForeignItemMacro), + + /// Tokens in an `extern` block not interpreted by Syn. + Verbatim(TokenStream), + + // Not public API. + // + // For testing exhaustiveness in downstream code, use the following idiom: + // + // match item { + // ForeignItem::Fn(item) => {...} + // ForeignItem::Static(item) => {...} + // ... + // ForeignItem::Verbatim(item) => {...} + // + // #[cfg_attr(test, deny(non_exhaustive_omitted_patterns))] + // _ => { /* some sane fallback */ } + // } + // + // This way we fail your tests but don't break your library when adding + // a variant. You will be notified by a test failure when a variant is + // added, so that you can add code to handle it, but your library will + // continue to compile and work for downstream users in the interim. + #[cfg(syn_no_non_exhaustive)] + #[doc(hidden)] + __NonExhaustive, + } +} + +ast_struct! { + /// A foreign function in an `extern` block. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ForeignItemFn { + pub attrs: Vec<Attribute>, + pub vis: Visibility, + pub sig: Signature, + pub semi_token: Token![;], + } +} + +ast_struct! { + /// A foreign static item in an `extern` block: `static ext: u8`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ForeignItemStatic { + pub attrs: Vec<Attribute>, + pub vis: Visibility, + pub static_token: Token![static], + pub mutability: Option<Token![mut]>, + pub ident: Ident, + pub colon_token: Token![:], + pub ty: Box<Type>, + pub semi_token: Token![;], + } +} + +ast_struct! { + /// A foreign type in an `extern` block: `type void`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ForeignItemType { + pub attrs: Vec<Attribute>, + pub vis: Visibility, + pub type_token: Token![type], + pub ident: Ident, + pub semi_token: Token![;], + } +} + +ast_struct! { + /// A macro invocation within an extern block. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ForeignItemMacro { + pub attrs: Vec<Attribute>, + pub mac: Macro, + pub semi_token: Option<Token![;]>, + } +} + +ast_enum_of_structs! { + /// An item declaration within the definition of a trait. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + /// + /// # Syntax tree enum + /// + /// This type is a [syntax tree enum]. + /// + /// [syntax tree enum]: Expr#syntax-tree-enums + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(not(syn_no_non_exhaustive), non_exhaustive)] + pub enum TraitItem { + /// An associated constant within the definition of a trait. + Const(TraitItemConst), + + /// A trait method within the definition of a trait. + Method(TraitItemMethod), + + /// An associated type within the definition of a trait. + Type(TraitItemType), + + /// A macro invocation within the definition of a trait. + Macro(TraitItemMacro), + + /// Tokens within the definition of a trait not interpreted by Syn. + Verbatim(TokenStream), + + // Not public API. + // + // For testing exhaustiveness in downstream code, use the following idiom: + // + // match item { + // TraitItem::Const(item) => {...} + // TraitItem::Method(item) => {...} + // ... + // TraitItem::Verbatim(item) => {...} + // + // #[cfg_attr(test, deny(non_exhaustive_omitted_patterns))] + // _ => { /* some sane fallback */ } + // } + // + // This way we fail your tests but don't break your library when adding + // a variant. You will be notified by a test failure when a variant is + // added, so that you can add code to handle it, but your library will + // continue to compile and work for downstream users in the interim. + #[cfg(syn_no_non_exhaustive)] + #[doc(hidden)] + __NonExhaustive, + } +} + +ast_struct! { + /// An associated constant within the definition of a trait. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct TraitItemConst { + pub attrs: Vec<Attribute>, + pub const_token: Token![const], + pub ident: Ident, + pub colon_token: Token![:], + pub ty: Type, + pub default: Option<(Token![=], Expr)>, + pub semi_token: Token![;], + } +} + +ast_struct! { + /// A trait method within the definition of a trait. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct TraitItemMethod { + pub attrs: Vec<Attribute>, + pub sig: Signature, + pub default: Option<Block>, + pub semi_token: Option<Token![;]>, + } +} + +ast_struct! { + /// An associated type within the definition of a trait. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct TraitItemType { + pub attrs: Vec<Attribute>, + pub type_token: Token![type], + pub ident: Ident, + pub generics: Generics, + pub colon_token: Option<Token![:]>, + pub bounds: Punctuated<TypeParamBound, Token![+]>, + pub default: Option<(Token![=], Type)>, + pub semi_token: Token![;], + } +} + +ast_struct! { + /// A macro invocation within the definition of a trait. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct TraitItemMacro { + pub attrs: Vec<Attribute>, + pub mac: Macro, + pub semi_token: Option<Token![;]>, + } +} + +ast_enum_of_structs! { + /// An item within an impl block. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + /// + /// # Syntax tree enum + /// + /// This type is a [syntax tree enum]. + /// + /// [syntax tree enum]: Expr#syntax-tree-enums + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(not(syn_no_non_exhaustive), non_exhaustive)] + pub enum ImplItem { + /// An associated constant within an impl block. + Const(ImplItemConst), + + /// A method within an impl block. + Method(ImplItemMethod), + + /// An associated type within an impl block. + Type(ImplItemType), + + /// A macro invocation within an impl block. + Macro(ImplItemMacro), + + /// Tokens within an impl block not interpreted by Syn. + Verbatim(TokenStream), + + // Not public API. + // + // For testing exhaustiveness in downstream code, use the following idiom: + // + // match item { + // ImplItem::Const(item) => {...} + // ImplItem::Method(item) => {...} + // ... + // ImplItem::Verbatim(item) => {...} + // + // #[cfg_attr(test, deny(non_exhaustive_omitted_patterns))] + // _ => { /* some sane fallback */ } + // } + // + // This way we fail your tests but don't break your library when adding + // a variant. You will be notified by a test failure when a variant is + // added, so that you can add code to handle it, but your library will + // continue to compile and work for downstream users in the interim. + #[cfg(syn_no_non_exhaustive)] + #[doc(hidden)] + __NonExhaustive, + } +} + +ast_struct! { + /// An associated constant within an impl block. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ImplItemConst { + pub attrs: Vec<Attribute>, + pub vis: Visibility, + pub defaultness: Option<Token![default]>, + pub const_token: Token![const], + pub ident: Ident, + pub colon_token: Token![:], + pub ty: Type, + pub eq_token: Token![=], + pub expr: Expr, + pub semi_token: Token![;], + } +} + +ast_struct! { + /// A method within an impl block. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ImplItemMethod { + pub attrs: Vec<Attribute>, + pub vis: Visibility, + pub defaultness: Option<Token![default]>, + pub sig: Signature, + pub block: Block, + } +} + +ast_struct! { + /// An associated type within an impl block. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ImplItemType { + pub attrs: Vec<Attribute>, + pub vis: Visibility, + pub defaultness: Option<Token![default]>, + pub type_token: Token![type], + pub ident: Ident, + pub generics: Generics, + pub eq_token: Token![=], + pub ty: Type, + pub semi_token: Token![;], + } +} + +ast_struct! { + /// A macro invocation within an impl block. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ImplItemMacro { + pub attrs: Vec<Attribute>, + pub mac: Macro, + pub semi_token: Option<Token![;]>, + } +} + +ast_struct! { + /// A function signature in a trait or implementation: `unsafe fn + /// initialize(&self)`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct Signature { + pub constness: Option<Token![const]>, + pub asyncness: Option<Token![async]>, + pub unsafety: Option<Token![unsafe]>, + pub abi: Option<Abi>, + pub fn_token: Token![fn], + pub ident: Ident, + pub generics: Generics, + pub paren_token: token::Paren, + pub inputs: Punctuated<FnArg, Token![,]>, + pub variadic: Option<Variadic>, + pub output: ReturnType, + } +} + +impl Signature { + /// A method's `self` receiver, such as `&self` or `self: Box<Self>`. + pub fn receiver(&self) -> Option<&FnArg> { + let arg = self.inputs.first()?; + match arg { + FnArg::Receiver(_) => Some(arg), + FnArg::Typed(PatType { pat, .. }) => { + if let Pat::Ident(PatIdent { ident, .. }) = &**pat { + if ident == "self" { + return Some(arg); + } + } + None + } + } + } +} + +ast_enum_of_structs! { + /// An argument in a function signature: the `n: usize` in `fn f(n: usize)`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub enum FnArg { + /// The `self` argument of an associated method, whether taken by value + /// or by reference. + /// + /// Note that `self` receivers with a specified type, such as `self: + /// Box<Self>`, are parsed as a `FnArg::Typed`. + Receiver(Receiver), + + /// A function argument accepted by pattern and type. + Typed(PatType), + } +} + +ast_struct! { + /// The `self` argument of an associated method, whether taken by value + /// or by reference. + /// + /// Note that `self` receivers with a specified type, such as `self: + /// Box<Self>`, are parsed as a `FnArg::Typed`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct Receiver { + pub attrs: Vec<Attribute>, + pub reference: Option<(Token![&], Option<Lifetime>)>, + pub mutability: Option<Token![mut]>, + pub self_token: Token![self], + } +} + +impl Receiver { + pub fn lifetime(&self) -> Option<&Lifetime> { + self.reference.as_ref()?.1.as_ref() + } +} + +#[cfg(feature = "parsing")] +pub mod parsing { + use super::*; + use crate::ext::IdentExt; + use crate::parse::discouraged::Speculative; + use crate::parse::{Parse, ParseBuffer, ParseStream, Result}; + use crate::token::Brace; + use proc_macro2::{Delimiter, Group, Punct, Spacing, TokenTree}; + use std::iter::{self, FromIterator}; + + crate::custom_keyword!(macro_rules); + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for Item { + fn parse(input: ParseStream) -> Result<Self> { + let begin = input.fork(); + let mut attrs = input.call(Attribute::parse_outer)?; + let ahead = input.fork(); + let vis: Visibility = ahead.parse()?; + + let lookahead = ahead.lookahead1(); + let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) { + let vis: Visibility = input.parse()?; + let sig: Signature = input.parse()?; + if input.peek(Token![;]) { + input.parse::<Token![;]>()?; + Ok(Item::Verbatim(verbatim::between(begin, input))) + } else { + parse_rest_of_fn(input, Vec::new(), vis, sig).map(Item::Fn) + } + } else if lookahead.peek(Token![extern]) { + ahead.parse::<Token![extern]>()?; + let lookahead = ahead.lookahead1(); + if lookahead.peek(Token![crate]) { + input.parse().map(Item::ExternCrate) + } else if lookahead.peek(token::Brace) { + input.parse().map(Item::ForeignMod) + } else if lookahead.peek(LitStr) { + ahead.parse::<LitStr>()?; + let lookahead = ahead.lookahead1(); + if lookahead.peek(token::Brace) { + input.parse().map(Item::ForeignMod) + } else { + Err(lookahead.error()) + } + } else { + Err(lookahead.error()) + } + } else if lookahead.peek(Token![use]) { + input.parse().map(Item::Use) + } else if lookahead.peek(Token![static]) { + let vis = input.parse()?; + let static_token = input.parse()?; + let mutability = input.parse()?; + let ident = input.parse()?; + if input.peek(Token![=]) { + input.parse::<Token![=]>()?; + input.parse::<Expr>()?; + input.parse::<Token![;]>()?; + Ok(Item::Verbatim(verbatim::between(begin, input))) + } else { + let colon_token = input.parse()?; + let ty = input.parse()?; + if input.peek(Token![;]) { + input.parse::<Token![;]>()?; + Ok(Item::Verbatim(verbatim::between(begin, input))) + } else { + Ok(Item::Static(ItemStatic { + attrs: Vec::new(), + vis, + static_token, + mutability, + ident, + colon_token, + ty, + eq_token: input.parse()?, + expr: input.parse()?, + semi_token: input.parse()?, + })) + } + } + } else if lookahead.peek(Token![const]) { + ahead.parse::<Token![const]>()?; + let lookahead = ahead.lookahead1(); + if lookahead.peek(Ident) || lookahead.peek(Token![_]) { + let vis = input.parse()?; + let const_token = input.parse()?; + let ident = { + let lookahead = input.lookahead1(); + if lookahead.peek(Ident) || lookahead.peek(Token![_]) { + input.call(Ident::parse_any)? + } else { + return Err(lookahead.error()); + } + }; + let colon_token = input.parse()?; + let ty = input.parse()?; + if input.peek(Token![;]) { + input.parse::<Token![;]>()?; + Ok(Item::Verbatim(verbatim::between(begin, input))) + } else { + Ok(Item::Const(ItemConst { + attrs: Vec::new(), + vis, + const_token, + ident, + colon_token, + ty, + eq_token: input.parse()?, + expr: input.parse()?, + semi_token: input.parse()?, + })) + } + } else { + Err(lookahead.error()) + } + } else if lookahead.peek(Token![unsafe]) { + ahead.parse::<Token![unsafe]>()?; + let lookahead = ahead.lookahead1(); + if lookahead.peek(Token![trait]) + || lookahead.peek(Token![auto]) && ahead.peek2(Token![trait]) + { + input.parse().map(Item::Trait) + } else if lookahead.peek(Token![impl]) { + let allow_verbatim_impl = true; + if let Some(item) = parse_impl(input, allow_verbatim_impl)? { + Ok(Item::Impl(item)) + } else { + Ok(Item::Verbatim(verbatim::between(begin, input))) + } + } else if lookahead.peek(Token![extern]) { + input.parse::<Visibility>()?; + input.parse::<Token![unsafe]>()?; + input.parse::<ItemForeignMod>()?; + Ok(Item::Verbatim(verbatim::between(begin, input))) + } else if lookahead.peek(Token![mod]) { + input.parse::<Visibility>()?; + input.parse::<Token![unsafe]>()?; + input.parse::<ItemMod>()?; + Ok(Item::Verbatim(verbatim::between(begin, input))) + } else { + Err(lookahead.error()) + } + } else if lookahead.peek(Token![mod]) { + input.parse().map(Item::Mod) + } else if lookahead.peek(Token![type]) { + parse_item_type(begin, input) + } else if lookahead.peek(Token![struct]) { + input.parse().map(Item::Struct) + } else if lookahead.peek(Token![enum]) { + input.parse().map(Item::Enum) + } else if lookahead.peek(Token![union]) && ahead.peek2(Ident) { + input.parse().map(Item::Union) + } else if lookahead.peek(Token![trait]) { + input.call(parse_trait_or_trait_alias) + } else if lookahead.peek(Token![auto]) && ahead.peek2(Token![trait]) { + input.parse().map(Item::Trait) + } else if lookahead.peek(Token![impl]) + || lookahead.peek(Token![default]) && !ahead.peek2(Token![!]) + { + let allow_verbatim_impl = true; + if let Some(item) = parse_impl(input, allow_verbatim_impl)? { + Ok(Item::Impl(item)) + } else { + Ok(Item::Verbatim(verbatim::between(begin, input))) + } + } else if lookahead.peek(Token![macro]) { + input.parse().map(Item::Macro2) + } else if vis.is_inherited() + && (lookahead.peek(Ident) + || lookahead.peek(Token![self]) + || lookahead.peek(Token![super]) + || lookahead.peek(Token![crate]) + || lookahead.peek(Token![::])) + { + input.parse().map(Item::Macro) + } else if ahead.peek(macro_rules) { + input.advance_to(&ahead); + input.parse::<ItemMacro>()?; + Ok(Item::Verbatim(verbatim::between(begin, input))) + } else { + Err(lookahead.error()) + }?; + + attrs.extend(item.replace_attrs(Vec::new())); + item.replace_attrs(attrs); + Ok(item) + } + } + + struct FlexibleItemType { + vis: Visibility, + defaultness: Option<Token![default]>, + type_token: Token![type], + ident: Ident, + generics: Generics, + colon_token: Option<Token![:]>, + bounds: Punctuated<TypeParamBound, Token![+]>, + ty: Option<(Token![=], Type)>, + semi_token: Token![;], + } + + enum WhereClauseLocation { + // type Ty<T> where T: 'static = T; + BeforeEq, + // type Ty<T> = T where T: 'static; + #[allow(dead_code)] + AfterEq, + // TODO: goes away once the migration period on rust-lang/rust#89122 is over + Both, + } + + impl FlexibleItemType { + fn parse(input: ParseStream, where_clause_location: WhereClauseLocation) -> Result<Self> { + let vis: Visibility = input.parse()?; + let defaultness: Option<Token![default]> = input.parse()?; + let type_token: Token![type] = input.parse()?; + let ident: Ident = input.parse()?; + let mut generics: Generics = input.parse()?; + let colon_token: Option<Token![:]> = input.parse()?; + + let mut bounds = Punctuated::new(); + if colon_token.is_some() { + loop { + if input.peek(Token![where]) || input.peek(Token![=]) || input.peek(Token![;]) { + break; + } + bounds.push_value(input.parse::<TypeParamBound>()?); + if input.peek(Token![where]) || input.peek(Token![=]) || input.peek(Token![;]) { + break; + } + bounds.push_punct(input.parse::<Token![+]>()?); + } + } + + match where_clause_location { + WhereClauseLocation::BeforeEq | WhereClauseLocation::Both => { + generics.where_clause = input.parse()?; + } + _ => {} + } + + let ty = if let Some(eq_token) = input.parse()? { + Some((eq_token, input.parse::<Type>()?)) + } else { + None + }; + + match where_clause_location { + WhereClauseLocation::AfterEq | WhereClauseLocation::Both + if generics.where_clause.is_none() => + { + generics.where_clause = input.parse()?; + } + _ => {} + } + + let semi_token: Token![;] = input.parse()?; + + Ok(FlexibleItemType { + vis, + defaultness, + type_token, + ident, + generics, + colon_token, + bounds, + ty, + semi_token, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ItemMacro { + fn parse(input: ParseStream) -> Result<Self> { + let attrs = input.call(Attribute::parse_outer)?; + let path = input.call(Path::parse_mod_style)?; + let bang_token: Token![!] = input.parse()?; + let ident: Option<Ident> = input.parse()?; + let (delimiter, tokens) = input.call(mac::parse_delimiter)?; + let semi_token: Option<Token![;]> = if !delimiter.is_brace() { + Some(input.parse()?) + } else { + None + }; + Ok(ItemMacro { + attrs, + ident, + mac: Macro { + path, + bang_token, + delimiter, + tokens, + }, + semi_token, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ItemMacro2 { + fn parse(input: ParseStream) -> Result<Self> { + let attrs = input.call(Attribute::parse_outer)?; + let vis: Visibility = input.parse()?; + let macro_token: Token![macro] = input.parse()?; + let ident: Ident = input.parse()?; + let mut rules = TokenStream::new(); + + let mut lookahead = input.lookahead1(); + if lookahead.peek(token::Paren) { + let paren_content; + let paren_token = parenthesized!(paren_content in input); + let args: TokenStream = paren_content.parse()?; + let mut args = Group::new(Delimiter::Parenthesis, args); + args.set_span(paren_token.span); + rules.extend(iter::once(TokenTree::Group(args))); + lookahead = input.lookahead1(); + } + + if lookahead.peek(token::Brace) { + let brace_content; + let brace_token = braced!(brace_content in input); + let body: TokenStream = brace_content.parse()?; + let mut body = Group::new(Delimiter::Brace, body); + body.set_span(brace_token.span); + rules.extend(iter::once(TokenTree::Group(body))); + } else { + return Err(lookahead.error()); + } + + Ok(ItemMacro2 { + attrs, + vis, + macro_token, + ident, + rules, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ItemExternCrate { + fn parse(input: ParseStream) -> Result<Self> { + Ok(ItemExternCrate { + attrs: input.call(Attribute::parse_outer)?, + vis: input.parse()?, + extern_token: input.parse()?, + crate_token: input.parse()?, + ident: { + if input.peek(Token![self]) { + input.call(Ident::parse_any)? + } else { + input.parse()? + } + }, + rename: { + if input.peek(Token![as]) { + let as_token: Token![as] = input.parse()?; + let rename: Ident = if input.peek(Token![_]) { + Ident::from(input.parse::<Token![_]>()?) + } else { + input.parse()? + }; + Some((as_token, rename)) + } else { + None + } + }, + semi_token: input.parse()?, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ItemUse { + fn parse(input: ParseStream) -> Result<Self> { + Ok(ItemUse { + attrs: input.call(Attribute::parse_outer)?, + vis: input.parse()?, + use_token: input.parse()?, + leading_colon: input.parse()?, + tree: input.parse()?, + semi_token: input.parse()?, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for UseTree { + fn parse(input: ParseStream) -> Result<UseTree> { + let lookahead = input.lookahead1(); + if lookahead.peek(Ident) + || lookahead.peek(Token![self]) + || lookahead.peek(Token![super]) + || lookahead.peek(Token![crate]) + { + let ident = input.call(Ident::parse_any)?; + if input.peek(Token![::]) { + Ok(UseTree::Path(UsePath { + ident, + colon2_token: input.parse()?, + tree: Box::new(input.parse()?), + })) + } else if input.peek(Token![as]) { + Ok(UseTree::Rename(UseRename { + ident, + as_token: input.parse()?, + rename: { + if input.peek(Ident) { + input.parse()? + } else if input.peek(Token![_]) { + Ident::from(input.parse::<Token![_]>()?) + } else { + return Err(input.error("expected identifier or underscore")); + } + }, + })) + } else { + Ok(UseTree::Name(UseName { ident })) + } + } else if lookahead.peek(Token![*]) { + Ok(UseTree::Glob(UseGlob { + star_token: input.parse()?, + })) + } else if lookahead.peek(token::Brace) { + let content; + Ok(UseTree::Group(UseGroup { + brace_token: braced!(content in input), + items: content.parse_terminated(UseTree::parse)?, + })) + } else { + Err(lookahead.error()) + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ItemStatic { + fn parse(input: ParseStream) -> Result<Self> { + Ok(ItemStatic { + attrs: input.call(Attribute::parse_outer)?, + vis: input.parse()?, + static_token: input.parse()?, + mutability: input.parse()?, + ident: input.parse()?, + colon_token: input.parse()?, + ty: input.parse()?, + eq_token: input.parse()?, + expr: input.parse()?, + semi_token: input.parse()?, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ItemConst { + fn parse(input: ParseStream) -> Result<Self> { + Ok(ItemConst { + attrs: input.call(Attribute::parse_outer)?, + vis: input.parse()?, + const_token: input.parse()?, + ident: { + let lookahead = input.lookahead1(); + if lookahead.peek(Ident) || lookahead.peek(Token![_]) { + input.call(Ident::parse_any)? + } else { + return Err(lookahead.error()); + } + }, + colon_token: input.parse()?, + ty: input.parse()?, + eq_token: input.parse()?, + expr: input.parse()?, + semi_token: input.parse()?, + }) + } + } + + fn pop_variadic(args: &mut Punctuated<FnArg, Token![,]>) -> Option<Variadic> { + let trailing_punct = args.trailing_punct(); + + let last = match args.last_mut()? { + FnArg::Typed(last) => last, + _ => return None, + }; + + let ty = match last.ty.as_ref() { + Type::Verbatim(ty) => ty, + _ => return None, + }; + + let mut variadic = Variadic { + attrs: Vec::new(), + dots: parse2(ty.clone()).ok()?, + }; + + if let Pat::Verbatim(pat) = last.pat.as_ref() { + if pat.to_string() == "..." && !trailing_punct { + variadic.attrs = mem::replace(&mut last.attrs, Vec::new()); + args.pop(); + } + } + + Some(variadic) + } + + fn variadic_to_tokens(dots: &Token![...]) -> TokenStream { + TokenStream::from_iter(vec![ + TokenTree::Punct({ + let mut dot = Punct::new('.', Spacing::Joint); + dot.set_span(dots.spans[0]); + dot + }), + TokenTree::Punct({ + let mut dot = Punct::new('.', Spacing::Joint); + dot.set_span(dots.spans[1]); + dot + }), + TokenTree::Punct({ + let mut dot = Punct::new('.', Spacing::Alone); + dot.set_span(dots.spans[2]); + dot + }), + ]) + } + + fn peek_signature(input: ParseStream) -> bool { + let fork = input.fork(); + fork.parse::<Option<Token![const]>>().is_ok() + && fork.parse::<Option<Token![async]>>().is_ok() + && fork.parse::<Option<Token![unsafe]>>().is_ok() + && fork.parse::<Option<Abi>>().is_ok() + && fork.peek(Token![fn]) + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for Signature { + fn parse(input: ParseStream) -> Result<Self> { + let constness: Option<Token![const]> = input.parse()?; + let asyncness: Option<Token![async]> = input.parse()?; + let unsafety: Option<Token![unsafe]> = input.parse()?; + let abi: Option<Abi> = input.parse()?; + let fn_token: Token![fn] = input.parse()?; + let ident: Ident = input.parse()?; + let mut generics: Generics = input.parse()?; + + let content; + let paren_token = parenthesized!(content in input); + let mut inputs = parse_fn_args(&content)?; + let variadic = pop_variadic(&mut inputs); + + let output: ReturnType = input.parse()?; + generics.where_clause = input.parse()?; + + Ok(Signature { + constness, + asyncness, + unsafety, + abi, + fn_token, + ident, + generics, + paren_token, + inputs, + variadic, + output, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ItemFn { + fn parse(input: ParseStream) -> Result<Self> { + let outer_attrs = input.call(Attribute::parse_outer)?; + let vis: Visibility = input.parse()?; + let sig: Signature = input.parse()?; + parse_rest_of_fn(input, outer_attrs, vis, sig) + } + } + + fn parse_rest_of_fn( + input: ParseStream, + mut attrs: Vec<Attribute>, + vis: Visibility, + sig: Signature, + ) -> Result<ItemFn> { + let content; + let brace_token = braced!(content in input); + attr::parsing::parse_inner(&content, &mut attrs)?; + let stmts = content.call(Block::parse_within)?; + + Ok(ItemFn { + attrs, + vis, + sig, + block: Box::new(Block { brace_token, stmts }), + }) + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for FnArg { + fn parse(input: ParseStream) -> Result<Self> { + let attrs = input.call(Attribute::parse_outer)?; + + let ahead = input.fork(); + if let Ok(mut receiver) = ahead.parse::<Receiver>() { + if !ahead.peek(Token![:]) { + input.advance_to(&ahead); + receiver.attrs = attrs; + return Ok(FnArg::Receiver(receiver)); + } + } + + let mut typed = input.call(fn_arg_typed)?; + typed.attrs = attrs; + Ok(FnArg::Typed(typed)) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for Receiver { + fn parse(input: ParseStream) -> Result<Self> { + Ok(Receiver { + attrs: Vec::new(), + reference: { + if input.peek(Token![&]) { + Some((input.parse()?, input.parse()?)) + } else { + None + } + }, + mutability: input.parse()?, + self_token: input.parse()?, + }) + } + } + + fn parse_fn_args(input: ParseStream) -> Result<Punctuated<FnArg, Token![,]>> { + let mut args = Punctuated::new(); + let mut has_receiver = false; + + while !input.is_empty() { + let attrs = input.call(Attribute::parse_outer)?; + + let arg = if let Some(dots) = input.parse::<Option<Token![...]>>()? { + FnArg::Typed(PatType { + attrs, + pat: Box::new(Pat::Verbatim(variadic_to_tokens(&dots))), + colon_token: Token![:](dots.spans[0]), + ty: Box::new(Type::Verbatim(variadic_to_tokens(&dots))), + }) + } else { + let mut arg: FnArg = input.parse()?; + match &mut arg { + FnArg::Receiver(receiver) if has_receiver => { + return Err(Error::new( + receiver.self_token.span, + "unexpected second method receiver", + )); + } + FnArg::Receiver(receiver) if !args.is_empty() => { + return Err(Error::new( + receiver.self_token.span, + "unexpected method receiver", + )); + } + FnArg::Receiver(receiver) => { + has_receiver = true; + receiver.attrs = attrs; + } + FnArg::Typed(arg) => arg.attrs = attrs, + } + arg + }; + args.push_value(arg); + + if input.is_empty() { + break; + } + + let comma: Token![,] = input.parse()?; + args.push_punct(comma); + } + + Ok(args) + } + + fn fn_arg_typed(input: ParseStream) -> Result<PatType> { + // Hack to parse pre-2018 syntax in + // test/ui/rfc-2565-param-attrs/param-attrs-pretty.rs + // because the rest of the test case is valuable. + if input.peek(Ident) && input.peek2(Token![<]) { + let span = input.fork().parse::<Ident>()?.span(); + return Ok(PatType { + attrs: Vec::new(), + pat: Box::new(Pat::Wild(PatWild { + attrs: Vec::new(), + underscore_token: Token![_](span), + })), + colon_token: Token![:](span), + ty: input.parse()?, + }); + } + + Ok(PatType { + attrs: Vec::new(), + pat: Box::new(pat::parsing::multi_pat(input)?), + colon_token: input.parse()?, + ty: Box::new(match input.parse::<Option<Token![...]>>()? { + Some(dot3) => Type::Verbatim(variadic_to_tokens(&dot3)), + None => input.parse()?, + }), + }) + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ItemMod { + fn parse(input: ParseStream) -> Result<Self> { + let mut attrs = input.call(Attribute::parse_outer)?; + let vis: Visibility = input.parse()?; + let mod_token: Token![mod] = input.parse()?; + let ident: Ident = input.parse()?; + + let lookahead = input.lookahead1(); + if lookahead.peek(Token![;]) { + Ok(ItemMod { + attrs, + vis, + mod_token, + ident, + content: None, + semi: Some(input.parse()?), + }) + } else if lookahead.peek(token::Brace) { + let content; + let brace_token = braced!(content in input); + attr::parsing::parse_inner(&content, &mut attrs)?; + + let mut items = Vec::new(); + while !content.is_empty() { + items.push(content.parse()?); + } + + Ok(ItemMod { + attrs, + vis, + mod_token, + ident, + content: Some((brace_token, items)), + semi: None, + }) + } else { + Err(lookahead.error()) + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ItemForeignMod { + fn parse(input: ParseStream) -> Result<Self> { + let mut attrs = input.call(Attribute::parse_outer)?; + let abi: Abi = input.parse()?; + + let content; + let brace_token = braced!(content in input); + attr::parsing::parse_inner(&content, &mut attrs)?; + let mut items = Vec::new(); + while !content.is_empty() { + items.push(content.parse()?); + } + + Ok(ItemForeignMod { + attrs, + abi, + brace_token, + items, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ForeignItem { + fn parse(input: ParseStream) -> Result<Self> { + let begin = input.fork(); + let mut attrs = input.call(Attribute::parse_outer)?; + let ahead = input.fork(); + let vis: Visibility = ahead.parse()?; + + let lookahead = ahead.lookahead1(); + let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) { + let vis: Visibility = input.parse()?; + let sig: Signature = input.parse()?; + if input.peek(token::Brace) { + let content; + braced!(content in input); + content.call(Attribute::parse_inner)?; + content.call(Block::parse_within)?; + + Ok(ForeignItem::Verbatim(verbatim::between(begin, input))) + } else { + Ok(ForeignItem::Fn(ForeignItemFn { + attrs: Vec::new(), + vis, + sig, + semi_token: input.parse()?, + })) + } + } else if lookahead.peek(Token![static]) { + let vis = input.parse()?; + let static_token = input.parse()?; + let mutability = input.parse()?; + let ident = input.parse()?; + let colon_token = input.parse()?; + let ty = input.parse()?; + if input.peek(Token![=]) { + input.parse::<Token![=]>()?; + input.parse::<Expr>()?; + input.parse::<Token![;]>()?; + Ok(ForeignItem::Verbatim(verbatim::between(begin, input))) + } else { + Ok(ForeignItem::Static(ForeignItemStatic { + attrs: Vec::new(), + vis, + static_token, + mutability, + ident, + colon_token, + ty, + semi_token: input.parse()?, + })) + } + } else if lookahead.peek(Token![type]) { + parse_foreign_item_type(begin, input) + } else if vis.is_inherited() + && (lookahead.peek(Ident) + || lookahead.peek(Token![self]) + || lookahead.peek(Token![super]) + || lookahead.peek(Token![crate]) + || lookahead.peek(Token![::])) + { + input.parse().map(ForeignItem::Macro) + } else { + Err(lookahead.error()) + }?; + + let item_attrs = match &mut item { + ForeignItem::Fn(item) => &mut item.attrs, + ForeignItem::Static(item) => &mut item.attrs, + ForeignItem::Type(item) => &mut item.attrs, + ForeignItem::Macro(item) => &mut item.attrs, + ForeignItem::Verbatim(_) => return Ok(item), + + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + }; + attrs.append(item_attrs); + *item_attrs = attrs; + + Ok(item) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ForeignItemFn { + fn parse(input: ParseStream) -> Result<Self> { + let attrs = input.call(Attribute::parse_outer)?; + let vis: Visibility = input.parse()?; + let sig: Signature = input.parse()?; + let semi_token: Token![;] = input.parse()?; + Ok(ForeignItemFn { + attrs, + vis, + sig, + semi_token, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ForeignItemStatic { + fn parse(input: ParseStream) -> Result<Self> { + Ok(ForeignItemStatic { + attrs: input.call(Attribute::parse_outer)?, + vis: input.parse()?, + static_token: input.parse()?, + mutability: input.parse()?, + ident: input.parse()?, + colon_token: input.parse()?, + ty: input.parse()?, + semi_token: input.parse()?, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ForeignItemType { + fn parse(input: ParseStream) -> Result<Self> { + Ok(ForeignItemType { + attrs: input.call(Attribute::parse_outer)?, + vis: input.parse()?, + type_token: input.parse()?, + ident: input.parse()?, + semi_token: input.parse()?, + }) + } + } + + fn parse_foreign_item_type(begin: ParseBuffer, input: ParseStream) -> Result<ForeignItem> { + let FlexibleItemType { + vis, + defaultness, + type_token, + ident, + generics, + colon_token, + bounds: _, + ty, + semi_token, + } = FlexibleItemType::parse(input, WhereClauseLocation::BeforeEq)?; + + if defaultness.is_some() + || generics.lt_token.is_some() + || generics.where_clause.is_some() + || colon_token.is_some() + || ty.is_some() + { + Ok(ForeignItem::Verbatim(verbatim::between(begin, input))) + } else { + Ok(ForeignItem::Type(ForeignItemType { + attrs: Vec::new(), + vis, + type_token, + ident, + semi_token, + })) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ForeignItemMacro { + fn parse(input: ParseStream) -> Result<Self> { + let attrs = input.call(Attribute::parse_outer)?; + let mac: Macro = input.parse()?; + let semi_token: Option<Token![;]> = if mac.delimiter.is_brace() { + None + } else { + Some(input.parse()?) + }; + Ok(ForeignItemMacro { + attrs, + mac, + semi_token, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ItemType { + fn parse(input: ParseStream) -> Result<Self> { + Ok(ItemType { + attrs: input.call(Attribute::parse_outer)?, + vis: input.parse()?, + type_token: input.parse()?, + ident: input.parse()?, + generics: { + let mut generics: Generics = input.parse()?; + generics.where_clause = input.parse()?; + generics + }, + eq_token: input.parse()?, + ty: input.parse()?, + semi_token: input.parse()?, + }) + } + } + + fn parse_item_type(begin: ParseBuffer, input: ParseStream) -> Result<Item> { + let FlexibleItemType { + vis, + defaultness, + type_token, + ident, + generics, + colon_token, + bounds: _, + ty, + semi_token, + } = FlexibleItemType::parse(input, WhereClauseLocation::BeforeEq)?; + + if defaultness.is_some() || colon_token.is_some() || ty.is_none() { + Ok(Item::Verbatim(verbatim::between(begin, input))) + } else { + let (eq_token, ty) = ty.unwrap(); + Ok(Item::Type(ItemType { + attrs: Vec::new(), + vis, + type_token, + ident, + generics, + eq_token, + ty: Box::new(ty), + semi_token, + })) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ItemStruct { + fn parse(input: ParseStream) -> Result<Self> { + let attrs = input.call(Attribute::parse_outer)?; + let vis = input.parse::<Visibility>()?; + let struct_token = input.parse::<Token![struct]>()?; + let ident = input.parse::<Ident>()?; + let generics = input.parse::<Generics>()?; + let (where_clause, fields, semi_token) = derive::parsing::data_struct(input)?; + Ok(ItemStruct { + attrs, + vis, + struct_token, + ident, + generics: Generics { + where_clause, + ..generics + }, + fields, + semi_token, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ItemEnum { + fn parse(input: ParseStream) -> Result<Self> { + let attrs = input.call(Attribute::parse_outer)?; + let vis = input.parse::<Visibility>()?; + let enum_token = input.parse::<Token![enum]>()?; + let ident = input.parse::<Ident>()?; + let generics = input.parse::<Generics>()?; + let (where_clause, brace_token, variants) = derive::parsing::data_enum(input)?; + Ok(ItemEnum { + attrs, + vis, + enum_token, + ident, + generics: Generics { + where_clause, + ..generics + }, + brace_token, + variants, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ItemUnion { + fn parse(input: ParseStream) -> Result<Self> { + let attrs = input.call(Attribute::parse_outer)?; + let vis = input.parse::<Visibility>()?; + let union_token = input.parse::<Token![union]>()?; + let ident = input.parse::<Ident>()?; + let generics = input.parse::<Generics>()?; + let (where_clause, fields) = derive::parsing::data_union(input)?; + Ok(ItemUnion { + attrs, + vis, + union_token, + ident, + generics: Generics { + where_clause, + ..generics + }, + fields, + }) + } + } + + fn parse_trait_or_trait_alias(input: ParseStream) -> Result<Item> { + let (attrs, vis, trait_token, ident, generics) = parse_start_of_trait_alias(input)?; + let lookahead = input.lookahead1(); + if lookahead.peek(token::Brace) + || lookahead.peek(Token![:]) + || lookahead.peek(Token![where]) + { + let unsafety = None; + let auto_token = None; + parse_rest_of_trait( + input, + attrs, + vis, + unsafety, + auto_token, + trait_token, + ident, + generics, + ) + .map(Item::Trait) + } else if lookahead.peek(Token![=]) { + parse_rest_of_trait_alias(input, attrs, vis, trait_token, ident, generics) + .map(Item::TraitAlias) + } else { + Err(lookahead.error()) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ItemTrait { + fn parse(input: ParseStream) -> Result<Self> { + let outer_attrs = input.call(Attribute::parse_outer)?; + let vis: Visibility = input.parse()?; + let unsafety: Option<Token![unsafe]> = input.parse()?; + let auto_token: Option<Token![auto]> = input.parse()?; + let trait_token: Token![trait] = input.parse()?; + let ident: Ident = input.parse()?; + let generics: Generics = input.parse()?; + parse_rest_of_trait( + input, + outer_attrs, + vis, + unsafety, + auto_token, + trait_token, + ident, + generics, + ) + } + } + + fn parse_rest_of_trait( + input: ParseStream, + mut attrs: Vec<Attribute>, + vis: Visibility, + unsafety: Option<Token![unsafe]>, + auto_token: Option<Token![auto]>, + trait_token: Token![trait], + ident: Ident, + mut generics: Generics, + ) -> Result<ItemTrait> { + let colon_token: Option<Token![:]> = input.parse()?; + + let mut supertraits = Punctuated::new(); + if colon_token.is_some() { + loop { + if input.peek(Token![where]) || input.peek(token::Brace) { + break; + } + supertraits.push_value(input.parse()?); + if input.peek(Token![where]) || input.peek(token::Brace) { + break; + } + supertraits.push_punct(input.parse()?); + } + } + + generics.where_clause = input.parse()?; + + let content; + let brace_token = braced!(content in input); + attr::parsing::parse_inner(&content, &mut attrs)?; + let mut items = Vec::new(); + while !content.is_empty() { + items.push(content.parse()?); + } + + Ok(ItemTrait { + attrs, + vis, + unsafety, + auto_token, + trait_token, + ident, + generics, + colon_token, + supertraits, + brace_token, + items, + }) + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ItemTraitAlias { + fn parse(input: ParseStream) -> Result<Self> { + let (attrs, vis, trait_token, ident, generics) = parse_start_of_trait_alias(input)?; + parse_rest_of_trait_alias(input, attrs, vis, trait_token, ident, generics) + } + } + + fn parse_start_of_trait_alias( + input: ParseStream, + ) -> Result<(Vec<Attribute>, Visibility, Token![trait], Ident, Generics)> { + let attrs = input.call(Attribute::parse_outer)?; + let vis: Visibility = input.parse()?; + let trait_token: Token![trait] = input.parse()?; + let ident: Ident = input.parse()?; + let generics: Generics = input.parse()?; + Ok((attrs, vis, trait_token, ident, generics)) + } + + fn parse_rest_of_trait_alias( + input: ParseStream, + attrs: Vec<Attribute>, + vis: Visibility, + trait_token: Token![trait], + ident: Ident, + mut generics: Generics, + ) -> Result<ItemTraitAlias> { + let eq_token: Token![=] = input.parse()?; + + let mut bounds = Punctuated::new(); + loop { + if input.peek(Token![where]) || input.peek(Token![;]) { + break; + } + bounds.push_value(input.parse()?); + if input.peek(Token![where]) || input.peek(Token![;]) { + break; + } + bounds.push_punct(input.parse()?); + } + + generics.where_clause = input.parse()?; + let semi_token: Token![;] = input.parse()?; + + Ok(ItemTraitAlias { + attrs, + vis, + trait_token, + ident, + generics, + eq_token, + bounds, + semi_token, + }) + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for TraitItem { + fn parse(input: ParseStream) -> Result<Self> { + let begin = input.fork(); + let mut attrs = input.call(Attribute::parse_outer)?; + let vis: Visibility = input.parse()?; + let defaultness: Option<Token![default]> = input.parse()?; + let ahead = input.fork(); + + let lookahead = ahead.lookahead1(); + let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) { + input.parse().map(TraitItem::Method) + } else if lookahead.peek(Token![const]) { + ahead.parse::<Token![const]>()?; + let lookahead = ahead.lookahead1(); + if lookahead.peek(Ident) || lookahead.peek(Token![_]) { + input.parse().map(TraitItem::Const) + } else if lookahead.peek(Token![async]) + || lookahead.peek(Token![unsafe]) + || lookahead.peek(Token![extern]) + || lookahead.peek(Token![fn]) + { + input.parse().map(TraitItem::Method) + } else { + Err(lookahead.error()) + } + } else if lookahead.peek(Token![type]) { + parse_trait_item_type(begin.fork(), input) + } else if lookahead.peek(Ident) + || lookahead.peek(Token![self]) + || lookahead.peek(Token![super]) + || lookahead.peek(Token![crate]) + || lookahead.peek(Token![::]) + { + input.parse().map(TraitItem::Macro) + } else { + Err(lookahead.error()) + }?; + + match (vis, defaultness) { + (Visibility::Inherited, None) => {} + _ => return Ok(TraitItem::Verbatim(verbatim::between(begin, input))), + } + + let item_attrs = match &mut item { + TraitItem::Const(item) => &mut item.attrs, + TraitItem::Method(item) => &mut item.attrs, + TraitItem::Type(item) => &mut item.attrs, + TraitItem::Macro(item) => &mut item.attrs, + TraitItem::Verbatim(_) => unreachable!(), + + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + }; + attrs.append(item_attrs); + *item_attrs = attrs; + Ok(item) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for TraitItemConst { + fn parse(input: ParseStream) -> Result<Self> { + Ok(TraitItemConst { + attrs: input.call(Attribute::parse_outer)?, + const_token: input.parse()?, + ident: { + let lookahead = input.lookahead1(); + if lookahead.peek(Ident) || lookahead.peek(Token![_]) { + input.call(Ident::parse_any)? + } else { + return Err(lookahead.error()); + } + }, + colon_token: input.parse()?, + ty: input.parse()?, + default: { + if input.peek(Token![=]) { + let eq_token: Token![=] = input.parse()?; + let default: Expr = input.parse()?; + Some((eq_token, default)) + } else { + None + } + }, + semi_token: input.parse()?, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for TraitItemMethod { + fn parse(input: ParseStream) -> Result<Self> { + let mut attrs = input.call(Attribute::parse_outer)?; + let sig: Signature = input.parse()?; + + let lookahead = input.lookahead1(); + let (brace_token, stmts, semi_token) = if lookahead.peek(token::Brace) { + let content; + let brace_token = braced!(content in input); + attr::parsing::parse_inner(&content, &mut attrs)?; + let stmts = content.call(Block::parse_within)?; + (Some(brace_token), stmts, None) + } else if lookahead.peek(Token![;]) { + let semi_token: Token![;] = input.parse()?; + (None, Vec::new(), Some(semi_token)) + } else { + return Err(lookahead.error()); + }; + + Ok(TraitItemMethod { + attrs, + sig, + default: brace_token.map(|brace_token| Block { brace_token, stmts }), + semi_token, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for TraitItemType { + fn parse(input: ParseStream) -> Result<Self> { + let attrs = input.call(Attribute::parse_outer)?; + let type_token: Token![type] = input.parse()?; + let ident: Ident = input.parse()?; + let mut generics: Generics = input.parse()?; + let colon_token: Option<Token![:]> = input.parse()?; + + let mut bounds = Punctuated::new(); + if colon_token.is_some() { + while !input.peek(Token![where]) && !input.peek(Token![=]) && !input.peek(Token![;]) + { + if !bounds.is_empty() { + bounds.push_punct(input.parse()?); + } + bounds.push_value(input.parse()?); + } + } + + let default = if input.peek(Token![=]) { + let eq_token: Token![=] = input.parse()?; + let default: Type = input.parse()?; + Some((eq_token, default)) + } else { + None + }; + + generics.where_clause = input.parse()?; + let semi_token: Token![;] = input.parse()?; + + Ok(TraitItemType { + attrs, + type_token, + ident, + generics, + colon_token, + bounds, + default, + semi_token, + }) + } + } + + fn parse_trait_item_type(begin: ParseBuffer, input: ParseStream) -> Result<TraitItem> { + let FlexibleItemType { + vis, + defaultness, + type_token, + ident, + generics, + colon_token, + bounds, + ty, + semi_token, + } = FlexibleItemType::parse(input, WhereClauseLocation::Both)?; + + if defaultness.is_some() || vis.is_some() { + Ok(TraitItem::Verbatim(verbatim::between(begin, input))) + } else { + Ok(TraitItem::Type(TraitItemType { + attrs: Vec::new(), + type_token, + ident, + generics, + colon_token, + bounds, + default: ty, + semi_token, + })) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for TraitItemMacro { + fn parse(input: ParseStream) -> Result<Self> { + let attrs = input.call(Attribute::parse_outer)?; + let mac: Macro = input.parse()?; + let semi_token: Option<Token![;]> = if mac.delimiter.is_brace() { + None + } else { + Some(input.parse()?) + }; + Ok(TraitItemMacro { + attrs, + mac, + semi_token, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ItemImpl { + fn parse(input: ParseStream) -> Result<Self> { + let allow_verbatim_impl = false; + parse_impl(input, allow_verbatim_impl).map(Option::unwrap) + } + } + + fn parse_impl(input: ParseStream, allow_verbatim_impl: bool) -> Result<Option<ItemImpl>> { + let mut attrs = input.call(Attribute::parse_outer)?; + let has_visibility = allow_verbatim_impl && input.parse::<Visibility>()?.is_some(); + let defaultness: Option<Token![default]> = input.parse()?; + let unsafety: Option<Token![unsafe]> = input.parse()?; + let impl_token: Token![impl] = input.parse()?; + + let has_generics = input.peek(Token![<]) + && (input.peek2(Token![>]) + || input.peek2(Token![#]) + || (input.peek2(Ident) || input.peek2(Lifetime)) + && (input.peek3(Token![:]) + || input.peek3(Token![,]) + || input.peek3(Token![>]) + || input.peek3(Token![=])) + || input.peek2(Token![const])); + let mut generics: Generics = if has_generics { + input.parse()? + } else { + Generics::default() + }; + + let is_const_impl = allow_verbatim_impl + && (input.peek(Token![const]) || input.peek(Token![?]) && input.peek2(Token![const])); + if is_const_impl { + input.parse::<Option<Token![?]>>()?; + input.parse::<Token![const]>()?; + } + + let begin = input.fork(); + let polarity = if input.peek(Token![!]) && !input.peek2(token::Brace) { + Some(input.parse::<Token![!]>()?) + } else { + None + }; + + #[cfg(not(feature = "printing"))] + let first_ty_span = input.span(); + let mut first_ty: Type = input.parse()?; + let self_ty: Type; + let trait_; + + let is_impl_for = input.peek(Token![for]); + if is_impl_for { + let for_token: Token![for] = input.parse()?; + let mut first_ty_ref = &first_ty; + while let Type::Group(ty) = first_ty_ref { + first_ty_ref = &ty.elem; + } + if let Type::Path(TypePath { qself: None, .. }) = first_ty_ref { + while let Type::Group(ty) = first_ty { + first_ty = *ty.elem; + } + if let Type::Path(TypePath { qself: None, path }) = first_ty { + trait_ = Some((polarity, path, for_token)); + } else { + unreachable!(); + } + } else if !allow_verbatim_impl { + #[cfg(feature = "printing")] + return Err(Error::new_spanned(first_ty_ref, "expected trait path")); + #[cfg(not(feature = "printing"))] + return Err(Error::new(first_ty_span, "expected trait path")); + } else { + trait_ = None; + } + self_ty = input.parse()?; + } else { + trait_ = None; + self_ty = if polarity.is_none() { + first_ty + } else { + Type::Verbatim(verbatim::between(begin, input)) + }; + } + + generics.where_clause = input.parse()?; + + let content; + let brace_token = braced!(content in input); + attr::parsing::parse_inner(&content, &mut attrs)?; + + let mut items = Vec::new(); + while !content.is_empty() { + items.push(content.parse()?); + } + + if has_visibility || is_const_impl || is_impl_for && trait_.is_none() { + Ok(None) + } else { + Ok(Some(ItemImpl { + attrs, + defaultness, + unsafety, + impl_token, + generics, + trait_, + self_ty: Box::new(self_ty), + brace_token, + items, + })) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ImplItem { + fn parse(input: ParseStream) -> Result<Self> { + let begin = input.fork(); + let mut attrs = input.call(Attribute::parse_outer)?; + let ahead = input.fork(); + let vis: Visibility = ahead.parse()?; + + let mut lookahead = ahead.lookahead1(); + let defaultness = if lookahead.peek(Token![default]) && !ahead.peek2(Token![!]) { + let defaultness: Token![default] = ahead.parse()?; + lookahead = ahead.lookahead1(); + Some(defaultness) + } else { + None + }; + + let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) { + input.parse().map(ImplItem::Method) + } else if lookahead.peek(Token![const]) { + let const_token: Token![const] = ahead.parse()?; + let lookahead = ahead.lookahead1(); + if lookahead.peek(Ident) || lookahead.peek(Token![_]) { + input.advance_to(&ahead); + let ident: Ident = input.call(Ident::parse_any)?; + let colon_token: Token![:] = input.parse()?; + let ty: Type = input.parse()?; + if let Some(eq_token) = input.parse()? { + return Ok(ImplItem::Const(ImplItemConst { + attrs, + vis, + defaultness, + const_token, + ident, + colon_token, + ty, + eq_token, + expr: input.parse()?, + semi_token: input.parse()?, + })); + } else { + input.parse::<Token![;]>()?; + return Ok(ImplItem::Verbatim(verbatim::between(begin, input))); + } + } else { + Err(lookahead.error()) + } + } else if lookahead.peek(Token![type]) { + parse_impl_item_type(begin, input) + } else if vis.is_inherited() + && defaultness.is_none() + && (lookahead.peek(Ident) + || lookahead.peek(Token![self]) + || lookahead.peek(Token![super]) + || lookahead.peek(Token![crate]) + || lookahead.peek(Token![::])) + { + input.parse().map(ImplItem::Macro) + } else { + Err(lookahead.error()) + }?; + + { + let item_attrs = match &mut item { + ImplItem::Const(item) => &mut item.attrs, + ImplItem::Method(item) => &mut item.attrs, + ImplItem::Type(item) => &mut item.attrs, + ImplItem::Macro(item) => &mut item.attrs, + ImplItem::Verbatim(_) => return Ok(item), + + #[cfg(syn_no_non_exhaustive)] + _ => unreachable!(), + }; + attrs.append(item_attrs); + *item_attrs = attrs; + } + + Ok(item) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ImplItemConst { + fn parse(input: ParseStream) -> Result<Self> { + Ok(ImplItemConst { + attrs: input.call(Attribute::parse_outer)?, + vis: input.parse()?, + defaultness: input.parse()?, + const_token: input.parse()?, + ident: { + let lookahead = input.lookahead1(); + if lookahead.peek(Ident) || lookahead.peek(Token![_]) { + input.call(Ident::parse_any)? + } else { + return Err(lookahead.error()); + } + }, + colon_token: input.parse()?, + ty: input.parse()?, + eq_token: input.parse()?, + expr: input.parse()?, + semi_token: input.parse()?, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ImplItemMethod { + fn parse(input: ParseStream) -> Result<Self> { + let mut attrs = input.call(Attribute::parse_outer)?; + let vis: Visibility = input.parse()?; + let defaultness: Option<Token![default]> = input.parse()?; + let sig: Signature = input.parse()?; + + let block = if let Some(semi) = input.parse::<Option<Token![;]>>()? { + // Accept methods without a body in an impl block because + // rustc's *parser* does not reject them (the compilation error + // is emitted later than parsing) and it can be useful for macro + // DSLs. + let mut punct = Punct::new(';', Spacing::Alone); + punct.set_span(semi.span); + let tokens = TokenStream::from_iter(vec![TokenTree::Punct(punct)]); + Block { + brace_token: Brace { span: semi.span }, + stmts: vec![Stmt::Item(Item::Verbatim(tokens))], + } + } else { + let content; + let brace_token = braced!(content in input); + attrs.extend(content.call(Attribute::parse_inner)?); + Block { + brace_token, + stmts: content.call(Block::parse_within)?, + } + }; + + Ok(ImplItemMethod { + attrs, + vis, + defaultness, + sig, + block, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ImplItemType { + fn parse(input: ParseStream) -> Result<Self> { + let attrs = input.call(Attribute::parse_outer)?; + let vis: Visibility = input.parse()?; + let defaultness: Option<Token![default]> = input.parse()?; + let type_token: Token![type] = input.parse()?; + let ident: Ident = input.parse()?; + let mut generics: Generics = input.parse()?; + let eq_token: Token![=] = input.parse()?; + let ty: Type = input.parse()?; + generics.where_clause = input.parse()?; + let semi_token: Token![;] = input.parse()?; + Ok(ImplItemType { + attrs, + vis, + defaultness, + type_token, + ident, + generics, + eq_token, + ty, + semi_token, + }) + } + } + + fn parse_impl_item_type(begin: ParseBuffer, input: ParseStream) -> Result<ImplItem> { + let FlexibleItemType { + vis, + defaultness, + type_token, + ident, + generics, + colon_token, + bounds: _, + ty, + semi_token, + } = FlexibleItemType::parse(input, WhereClauseLocation::Both)?; + + if colon_token.is_some() || ty.is_none() { + Ok(ImplItem::Verbatim(verbatim::between(begin, input))) + } else { + let (eq_token, ty) = ty.unwrap(); + Ok(ImplItem::Type(ImplItemType { + attrs: Vec::new(), + vis, + defaultness, + type_token, + ident, + generics, + eq_token, + ty, + semi_token, + })) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ImplItemMacro { + fn parse(input: ParseStream) -> Result<Self> { + let attrs = input.call(Attribute::parse_outer)?; + let mac: Macro = input.parse()?; + let semi_token: Option<Token![;]> = if mac.delimiter.is_brace() { + None + } else { + Some(input.parse()?) + }; + Ok(ImplItemMacro { + attrs, + mac, + semi_token, + }) + } + } + + impl Visibility { + fn is_inherited(&self) -> bool { + match *self { + Visibility::Inherited => true, + _ => false, + } + } + } + + impl MacroDelimiter { + fn is_brace(&self) -> bool { + match *self { + MacroDelimiter::Brace(_) => true, + MacroDelimiter::Paren(_) | MacroDelimiter::Bracket(_) => false, + } + } + } +} + +#[cfg(feature = "printing")] +mod printing { + use super::*; + use crate::attr::FilterAttrs; + use crate::print::TokensOrDefault; + use proc_macro2::TokenStream; + use quote::{ToTokens, TokenStreamExt}; + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ItemExternCrate { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.vis.to_tokens(tokens); + self.extern_token.to_tokens(tokens); + self.crate_token.to_tokens(tokens); + self.ident.to_tokens(tokens); + if let Some((as_token, rename)) = &self.rename { + as_token.to_tokens(tokens); + rename.to_tokens(tokens); + } + self.semi_token.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ItemUse { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.vis.to_tokens(tokens); + self.use_token.to_tokens(tokens); + self.leading_colon.to_tokens(tokens); + self.tree.to_tokens(tokens); + self.semi_token.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ItemStatic { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.vis.to_tokens(tokens); + self.static_token.to_tokens(tokens); + self.mutability.to_tokens(tokens); + self.ident.to_tokens(tokens); + self.colon_token.to_tokens(tokens); + self.ty.to_tokens(tokens); + self.eq_token.to_tokens(tokens); + self.expr.to_tokens(tokens); + self.semi_token.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ItemConst { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.vis.to_tokens(tokens); + self.const_token.to_tokens(tokens); + self.ident.to_tokens(tokens); + self.colon_token.to_tokens(tokens); + self.ty.to_tokens(tokens); + self.eq_token.to_tokens(tokens); + self.expr.to_tokens(tokens); + self.semi_token.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ItemFn { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.vis.to_tokens(tokens); + self.sig.to_tokens(tokens); + self.block.brace_token.surround(tokens, |tokens| { + tokens.append_all(self.attrs.inner()); + tokens.append_all(&self.block.stmts); + }); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ItemMod { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.vis.to_tokens(tokens); + self.mod_token.to_tokens(tokens); + self.ident.to_tokens(tokens); + if let Some((brace, items)) = &self.content { + brace.surround(tokens, |tokens| { + tokens.append_all(self.attrs.inner()); + tokens.append_all(items); + }); + } else { + TokensOrDefault(&self.semi).to_tokens(tokens); + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ItemForeignMod { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.abi.to_tokens(tokens); + self.brace_token.surround(tokens, |tokens| { + tokens.append_all(self.attrs.inner()); + tokens.append_all(&self.items); + }); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ItemType { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.vis.to_tokens(tokens); + self.type_token.to_tokens(tokens); + self.ident.to_tokens(tokens); + self.generics.to_tokens(tokens); + self.generics.where_clause.to_tokens(tokens); + self.eq_token.to_tokens(tokens); + self.ty.to_tokens(tokens); + self.semi_token.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ItemEnum { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.vis.to_tokens(tokens); + self.enum_token.to_tokens(tokens); + self.ident.to_tokens(tokens); + self.generics.to_tokens(tokens); + self.generics.where_clause.to_tokens(tokens); + self.brace_token.surround(tokens, |tokens| { + self.variants.to_tokens(tokens); + }); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ItemStruct { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.vis.to_tokens(tokens); + self.struct_token.to_tokens(tokens); + self.ident.to_tokens(tokens); + self.generics.to_tokens(tokens); + match &self.fields { + Fields::Named(fields) => { + self.generics.where_clause.to_tokens(tokens); + fields.to_tokens(tokens); + } + Fields::Unnamed(fields) => { + fields.to_tokens(tokens); + self.generics.where_clause.to_tokens(tokens); + TokensOrDefault(&self.semi_token).to_tokens(tokens); + } + Fields::Unit => { + self.generics.where_clause.to_tokens(tokens); + TokensOrDefault(&self.semi_token).to_tokens(tokens); + } + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ItemUnion { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.vis.to_tokens(tokens); + self.union_token.to_tokens(tokens); + self.ident.to_tokens(tokens); + self.generics.to_tokens(tokens); + self.generics.where_clause.to_tokens(tokens); + self.fields.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ItemTrait { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.vis.to_tokens(tokens); + self.unsafety.to_tokens(tokens); + self.auto_token.to_tokens(tokens); + self.trait_token.to_tokens(tokens); + self.ident.to_tokens(tokens); + self.generics.to_tokens(tokens); + if !self.supertraits.is_empty() { + TokensOrDefault(&self.colon_token).to_tokens(tokens); + self.supertraits.to_tokens(tokens); + } + self.generics.where_clause.to_tokens(tokens); + self.brace_token.surround(tokens, |tokens| { + tokens.append_all(self.attrs.inner()); + tokens.append_all(&self.items); + }); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ItemTraitAlias { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.vis.to_tokens(tokens); + self.trait_token.to_tokens(tokens); + self.ident.to_tokens(tokens); + self.generics.to_tokens(tokens); + self.eq_token.to_tokens(tokens); + self.bounds.to_tokens(tokens); + self.generics.where_clause.to_tokens(tokens); + self.semi_token.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ItemImpl { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.defaultness.to_tokens(tokens); + self.unsafety.to_tokens(tokens); + self.impl_token.to_tokens(tokens); + self.generics.to_tokens(tokens); + if let Some((polarity, path, for_token)) = &self.trait_ { + polarity.to_tokens(tokens); + path.to_tokens(tokens); + for_token.to_tokens(tokens); + } + self.self_ty.to_tokens(tokens); + self.generics.where_clause.to_tokens(tokens); + self.brace_token.surround(tokens, |tokens| { + tokens.append_all(self.attrs.inner()); + tokens.append_all(&self.items); + }); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ItemMacro { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.mac.path.to_tokens(tokens); + self.mac.bang_token.to_tokens(tokens); + self.ident.to_tokens(tokens); + match &self.mac.delimiter { + MacroDelimiter::Paren(paren) => { + paren.surround(tokens, |tokens| self.mac.tokens.to_tokens(tokens)); + } + MacroDelimiter::Brace(brace) => { + brace.surround(tokens, |tokens| self.mac.tokens.to_tokens(tokens)); + } + MacroDelimiter::Bracket(bracket) => { + bracket.surround(tokens, |tokens| self.mac.tokens.to_tokens(tokens)); + } + } + self.semi_token.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ItemMacro2 { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.vis.to_tokens(tokens); + self.macro_token.to_tokens(tokens); + self.ident.to_tokens(tokens); + self.rules.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for UsePath { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.ident.to_tokens(tokens); + self.colon2_token.to_tokens(tokens); + self.tree.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for UseName { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.ident.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for UseRename { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.ident.to_tokens(tokens); + self.as_token.to_tokens(tokens); + self.rename.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for UseGlob { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.star_token.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for UseGroup { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.brace_token.surround(tokens, |tokens| { + self.items.to_tokens(tokens); + }); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for TraitItemConst { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.const_token.to_tokens(tokens); + self.ident.to_tokens(tokens); + self.colon_token.to_tokens(tokens); + self.ty.to_tokens(tokens); + if let Some((eq_token, default)) = &self.default { + eq_token.to_tokens(tokens); + default.to_tokens(tokens); + } + self.semi_token.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for TraitItemMethod { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.sig.to_tokens(tokens); + match &self.default { + Some(block) => { + block.brace_token.surround(tokens, |tokens| { + tokens.append_all(self.attrs.inner()); + tokens.append_all(&block.stmts); + }); + } + None => { + TokensOrDefault(&self.semi_token).to_tokens(tokens); + } + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for TraitItemType { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.type_token.to_tokens(tokens); + self.ident.to_tokens(tokens); + self.generics.to_tokens(tokens); + if !self.bounds.is_empty() { + TokensOrDefault(&self.colon_token).to_tokens(tokens); + self.bounds.to_tokens(tokens); + } + if let Some((eq_token, default)) = &self.default { + eq_token.to_tokens(tokens); + default.to_tokens(tokens); + } + self.generics.where_clause.to_tokens(tokens); + self.semi_token.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for TraitItemMacro { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.mac.to_tokens(tokens); + self.semi_token.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ImplItemConst { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.vis.to_tokens(tokens); + self.defaultness.to_tokens(tokens); + self.const_token.to_tokens(tokens); + self.ident.to_tokens(tokens); + self.colon_token.to_tokens(tokens); + self.ty.to_tokens(tokens); + self.eq_token.to_tokens(tokens); + self.expr.to_tokens(tokens); + self.semi_token.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ImplItemMethod { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.vis.to_tokens(tokens); + self.defaultness.to_tokens(tokens); + self.sig.to_tokens(tokens); + if self.block.stmts.len() == 1 { + if let Stmt::Item(Item::Verbatim(verbatim)) = &self.block.stmts[0] { + if verbatim.to_string() == ";" { + verbatim.to_tokens(tokens); + return; + } + } + } + self.block.brace_token.surround(tokens, |tokens| { + tokens.append_all(self.attrs.inner()); + tokens.append_all(&self.block.stmts); + }); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ImplItemType { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.vis.to_tokens(tokens); + self.defaultness.to_tokens(tokens); + self.type_token.to_tokens(tokens); + self.ident.to_tokens(tokens); + self.generics.to_tokens(tokens); + self.eq_token.to_tokens(tokens); + self.ty.to_tokens(tokens); + self.generics.where_clause.to_tokens(tokens); + self.semi_token.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ImplItemMacro { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.mac.to_tokens(tokens); + self.semi_token.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ForeignItemFn { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.vis.to_tokens(tokens); + self.sig.to_tokens(tokens); + self.semi_token.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ForeignItemStatic { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.vis.to_tokens(tokens); + self.static_token.to_tokens(tokens); + self.mutability.to_tokens(tokens); + self.ident.to_tokens(tokens); + self.colon_token.to_tokens(tokens); + self.ty.to_tokens(tokens); + self.semi_token.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ForeignItemType { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.vis.to_tokens(tokens); + self.type_token.to_tokens(tokens); + self.ident.to_tokens(tokens); + self.semi_token.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ForeignItemMacro { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.mac.to_tokens(tokens); + self.semi_token.to_tokens(tokens); + } + } + + fn maybe_variadic_to_tokens(arg: &FnArg, tokens: &mut TokenStream) -> bool { + let arg = match arg { + FnArg::Typed(arg) => arg, + FnArg::Receiver(receiver) => { + receiver.to_tokens(tokens); + return false; + } + }; + + match arg.ty.as_ref() { + Type::Verbatim(ty) if ty.to_string() == "..." => { + match arg.pat.as_ref() { + Pat::Verbatim(pat) if pat.to_string() == "..." => { + tokens.append_all(arg.attrs.outer()); + pat.to_tokens(tokens); + } + _ => arg.to_tokens(tokens), + } + true + } + _ => { + arg.to_tokens(tokens); + false + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for Signature { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.constness.to_tokens(tokens); + self.asyncness.to_tokens(tokens); + self.unsafety.to_tokens(tokens); + self.abi.to_tokens(tokens); + self.fn_token.to_tokens(tokens); + self.ident.to_tokens(tokens); + self.generics.to_tokens(tokens); + self.paren_token.surround(tokens, |tokens| { + let mut last_is_variadic = false; + for pair in self.inputs.pairs() { + last_is_variadic = maybe_variadic_to_tokens(pair.value(), tokens); + pair.punct().to_tokens(tokens); + } + if self.variadic.is_some() && !last_is_variadic { + if !self.inputs.empty_or_trailing() { + <Token![,]>::default().to_tokens(tokens); + } + self.variadic.to_tokens(tokens); + } + }); + self.output.to_tokens(tokens); + self.generics.where_clause.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for Receiver { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + if let Some((ampersand, lifetime)) = &self.reference { + ampersand.to_tokens(tokens); + lifetime.to_tokens(tokens); + } + self.mutability.to_tokens(tokens); + self.self_token.to_tokens(tokens); + } + } +} diff --git a/third_party/rust/syn/src/lib.rs b/third_party/rust/syn/src/lib.rs new file mode 100644 index 0000000000..e47ba28c6c --- /dev/null +++ b/third_party/rust/syn/src/lib.rs @@ -0,0 +1,984 @@ +//! [![github]](https://github.com/dtolnay/syn) [![crates-io]](https://crates.io/crates/syn) [![docs-rs]](crate) +//! +//! [github]: https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github +//! [crates-io]: https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust +//! [docs-rs]: https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs +//! +//! <br> +//! +//! Syn is a parsing library for parsing a stream of Rust tokens into a syntax +//! tree of Rust source code. +//! +//! Currently this library is geared toward use in Rust procedural macros, but +//! contains some APIs that may be useful more generally. +//! +//! - **Data structures** — Syn provides a complete syntax tree that can +//! represent any valid Rust source code. The syntax tree is rooted at +//! [`syn::File`] which represents a full source file, but there are other +//! entry points that may be useful to procedural macros including +//! [`syn::Item`], [`syn::Expr`] and [`syn::Type`]. +//! +//! - **Derives** — Of particular interest to derive macros is +//! [`syn::DeriveInput`] which is any of the three legal input items to a +//! derive macro. An example below shows using this type in a library that can +//! derive implementations of a user-defined trait. +//! +//! - **Parsing** — Parsing in Syn is built around [parser functions] with the +//! signature `fn(ParseStream) -> Result<T>`. Every syntax tree node defined +//! by Syn is individually parsable and may be used as a building block for +//! custom syntaxes, or you may dream up your own brand new syntax without +//! involving any of our syntax tree types. +//! +//! - **Location information** — Every token parsed by Syn is associated with a +//! `Span` that tracks line and column information back to the source of that +//! token. These spans allow a procedural macro to display detailed error +//! messages pointing to all the right places in the user's code. There is an +//! example of this below. +//! +//! - **Feature flags** — Functionality is aggressively feature gated so your +//! procedural macros enable only what they need, and do not pay in compile +//! time for all the rest. +//! +//! [`syn::File`]: File +//! [`syn::Item`]: Item +//! [`syn::Expr`]: Expr +//! [`syn::Type`]: Type +//! [`syn::DeriveInput`]: DeriveInput +//! [parser functions]: mod@parse +//! +//! <br> +//! +//! # Example of a derive macro +//! +//! The canonical derive macro using Syn looks like this. We write an ordinary +//! Rust function tagged with a `proc_macro_derive` attribute and the name of +//! the trait we are deriving. Any time that derive appears in the user's code, +//! the Rust compiler passes their data structure as tokens into our macro. We +//! get to execute arbitrary Rust code to figure out what to do with those +//! tokens, then hand some tokens back to the compiler to compile into the +//! user's crate. +//! +//! [`TokenStream`]: proc_macro::TokenStream +//! +//! ```toml +//! [dependencies] +//! syn = "1.0" +//! quote = "1.0" +//! +//! [lib] +//! proc-macro = true +//! ``` +//! +//! ``` +//! # extern crate proc_macro; +//! # +//! use proc_macro::TokenStream; +//! use quote::quote; +//! use syn::{parse_macro_input, DeriveInput}; +//! +//! # const IGNORE_TOKENS: &str = stringify! { +//! #[proc_macro_derive(MyMacro)] +//! # }; +//! pub fn my_macro(input: TokenStream) -> TokenStream { +//! // Parse the input tokens into a syntax tree +//! let input = parse_macro_input!(input as DeriveInput); +//! +//! // Build the output, possibly using quasi-quotation +//! let expanded = quote! { +//! // ... +//! }; +//! +//! // Hand the output tokens back to the compiler +//! TokenStream::from(expanded) +//! } +//! ``` +//! +//! The [`heapsize`] example directory shows a complete working implementation +//! of a derive macro. It works on any Rust compiler 1.31+. The example derives +//! a `HeapSize` trait which computes an estimate of the amount of heap memory +//! owned by a value. +//! +//! [`heapsize`]: https://github.com/dtolnay/syn/tree/master/examples/heapsize +//! +//! ``` +//! pub trait HeapSize { +//! /// Total number of bytes of heap memory owned by `self`. +//! fn heap_size_of_children(&self) -> usize; +//! } +//! ``` +//! +//! The derive macro allows users to write `#[derive(HeapSize)]` on data +//! structures in their program. +//! +//! ``` +//! # const IGNORE_TOKENS: &str = stringify! { +//! #[derive(HeapSize)] +//! # }; +//! struct Demo<'a, T: ?Sized> { +//! a: Box<T>, +//! b: u8, +//! c: &'a str, +//! d: String, +//! } +//! ``` +//! +//! <p><br></p> +//! +//! # Spans and error reporting +//! +//! The token-based procedural macro API provides great control over where the +//! compiler's error messages are displayed in user code. Consider the error the +//! user sees if one of their field types does not implement `HeapSize`. +//! +//! ``` +//! # const IGNORE_TOKENS: &str = stringify! { +//! #[derive(HeapSize)] +//! # }; +//! struct Broken { +//! ok: String, +//! bad: std::thread::Thread, +//! } +//! ``` +//! +//! By tracking span information all the way through the expansion of a +//! procedural macro as shown in the `heapsize` example, token-based macros in +//! Syn are able to trigger errors that directly pinpoint the source of the +//! problem. +//! +//! ```text +//! error[E0277]: the trait bound `std::thread::Thread: HeapSize` is not satisfied +//! --> src/main.rs:7:5 +//! | +//! 7 | bad: std::thread::Thread, +//! | ^^^^^^^^^^^^^^^^^^^^^^^^ the trait `HeapSize` is not implemented for `Thread` +//! ``` +//! +//! <br> +//! +//! # Parsing a custom syntax +//! +//! The [`lazy-static`] example directory shows the implementation of a +//! `functionlike!(...)` procedural macro in which the input tokens are parsed +//! using Syn's parsing API. +//! +//! [`lazy-static`]: https://github.com/dtolnay/syn/tree/master/examples/lazy-static +//! +//! The example reimplements the popular `lazy_static` crate from crates.io as a +//! procedural macro. +//! +//! ``` +//! # macro_rules! lazy_static { +//! # ($($tt:tt)*) => {} +//! # } +//! # +//! lazy_static! { +//! static ref USERNAME: Regex = Regex::new("^[a-z0-9_-]{3,16}$").unwrap(); +//! } +//! ``` +//! +//! The implementation shows how to trigger custom warnings and error messages +//! on the macro input. +//! +//! ```text +//! warning: come on, pick a more creative name +//! --> src/main.rs:10:16 +//! | +//! 10 | static ref FOO: String = "lazy_static".to_owned(); +//! | ^^^ +//! ``` +//! +//! <br> +//! +//! # Testing +//! +//! When testing macros, we often care not just that the macro can be used +//! successfully but also that when the macro is provided with invalid input it +//! produces maximally helpful error messages. Consider using the [`trybuild`] +//! crate to write tests for errors that are emitted by your macro or errors +//! detected by the Rust compiler in the expanded code following misuse of the +//! macro. Such tests help avoid regressions from later refactors that +//! mistakenly make an error no longer trigger or be less helpful than it used +//! to be. +//! +//! [`trybuild`]: https://github.com/dtolnay/trybuild +//! +//! <br> +//! +//! # Debugging +//! +//! When developing a procedural macro it can be helpful to look at what the +//! generated code looks like. Use `cargo rustc -- -Zunstable-options +//! --pretty=expanded` or the [`cargo expand`] subcommand. +//! +//! [`cargo expand`]: https://github.com/dtolnay/cargo-expand +//! +//! To show the expanded code for some crate that uses your procedural macro, +//! run `cargo expand` from that crate. To show the expanded code for one of +//! your own test cases, run `cargo expand --test the_test_case` where the last +//! argument is the name of the test file without the `.rs` extension. +//! +//! This write-up by Brandon W Maister discusses debugging in more detail: +//! [Debugging Rust's new Custom Derive system][debugging]. +//! +//! [debugging]: https://quodlibetor.github.io/posts/debugging-rusts-new-custom-derive-system/ +//! +//! <br> +//! +//! # Optional features +//! +//! Syn puts a lot of functionality behind optional features in order to +//! optimize compile time for the most common use cases. The following features +//! are available. +//! +//! - **`derive`** *(enabled by default)* — Data structures for representing the +//! possible input to a derive macro, including structs and enums and types. +//! - **`full`** — Data structures for representing the syntax tree of all valid +//! Rust source code, including items and expressions. +//! - **`parsing`** *(enabled by default)* — Ability to parse input tokens into +//! a syntax tree node of a chosen type. +//! - **`printing`** *(enabled by default)* — Ability to print a syntax tree +//! node as tokens of Rust source code. +//! - **`visit`** — Trait for traversing a syntax tree. +//! - **`visit-mut`** — Trait for traversing and mutating in place a syntax +//! tree. +//! - **`fold`** — Trait for transforming an owned syntax tree. +//! - **`clone-impls`** *(enabled by default)* — Clone impls for all syntax tree +//! types. +//! - **`extra-traits`** — Debug, Eq, PartialEq, Hash impls for all syntax tree +//! types. +//! - **`proc-macro`** *(enabled by default)* — Runtime dependency on the +//! dynamic library libproc_macro from rustc toolchain. + +// Syn types in rustdoc of other crates get linked to here. +#![doc(html_root_url = "https://docs.rs/syn/1.0.107")] +#![cfg_attr(doc_cfg, feature(doc_cfg))] +#![allow(non_camel_case_types)] +#![allow( + clippy::bool_to_int_with_if, + clippy::cast_lossless, + clippy::cast_possible_truncation, + clippy::cast_possible_wrap, + clippy::cast_ptr_alignment, + clippy::default_trait_access, + clippy::doc_markdown, + clippy::expl_impl_clone_on_copy, + clippy::explicit_auto_deref, + clippy::if_not_else, + clippy::inherent_to_string, + clippy::items_after_statements, + clippy::large_enum_variant, + clippy::manual_assert, + clippy::match_on_vec_items, + clippy::match_same_arms, + clippy::match_wildcard_for_single_variants, // clippy bug: https://github.com/rust-lang/rust-clippy/issues/6984 + clippy::missing_errors_doc, + clippy::missing_panics_doc, + clippy::module_name_repetitions, + clippy::must_use_candidate, + clippy::needless_doctest_main, + clippy::needless_pass_by_value, + clippy::never_loop, + clippy::redundant_else, + clippy::return_self_not_must_use, + clippy::similar_names, + clippy::single_match_else, + clippy::too_many_arguments, + clippy::too_many_lines, + clippy::trivially_copy_pass_by_ref, + clippy::unnecessary_unwrap, + clippy::used_underscore_binding, + clippy::wildcard_imports +)] + +#[cfg(all( + not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))), + feature = "proc-macro" +))] +extern crate proc_macro; +extern crate proc_macro2; + +#[cfg(feature = "printing")] +extern crate quote; + +#[macro_use] +mod macros; + +#[cfg(feature = "parsing")] +#[macro_use] +mod group; + +#[macro_use] +pub mod token; + +mod ident; +pub use crate::ident::Ident; + +#[cfg(any(feature = "full", feature = "derive"))] +mod attr; +#[cfg(any(feature = "full", feature = "derive"))] +pub use crate::attr::{ + AttrStyle, Attribute, AttributeArgs, Meta, MetaList, MetaNameValue, NestedMeta, +}; + +mod bigint; + +#[cfg(any(feature = "full", feature = "derive"))] +mod data; +#[cfg(any(feature = "full", feature = "derive"))] +pub use crate::data::{ + Field, Fields, FieldsNamed, FieldsUnnamed, Variant, VisCrate, VisPublic, VisRestricted, + Visibility, +}; + +#[cfg(any(feature = "full", feature = "derive"))] +mod expr; +#[cfg(feature = "full")] +pub use crate::expr::{ + Arm, FieldValue, GenericMethodArgument, Label, MethodTurbofish, RangeLimits, +}; +#[cfg(any(feature = "full", feature = "derive"))] +pub use crate::expr::{ + Expr, ExprArray, ExprAssign, ExprAssignOp, ExprAsync, ExprAwait, ExprBinary, ExprBlock, + ExprBox, ExprBreak, ExprCall, ExprCast, ExprClosure, ExprContinue, ExprField, ExprForLoop, + ExprGroup, ExprIf, ExprIndex, ExprLet, ExprLit, ExprLoop, ExprMacro, ExprMatch, ExprMethodCall, + ExprParen, ExprPath, ExprRange, ExprReference, ExprRepeat, ExprReturn, ExprStruct, ExprTry, + ExprTryBlock, ExprTuple, ExprType, ExprUnary, ExprUnsafe, ExprWhile, ExprYield, Index, Member, +}; + +#[cfg(any(feature = "full", feature = "derive"))] +mod generics; +#[cfg(any(feature = "full", feature = "derive"))] +pub use crate::generics::{ + BoundLifetimes, ConstParam, GenericParam, Generics, LifetimeDef, PredicateEq, + PredicateLifetime, PredicateType, TraitBound, TraitBoundModifier, TypeParam, TypeParamBound, + WhereClause, WherePredicate, +}; +#[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))] +pub use crate::generics::{ImplGenerics, Turbofish, TypeGenerics}; + +#[cfg(feature = "full")] +mod item; +#[cfg(feature = "full")] +pub use crate::item::{ + FnArg, ForeignItem, ForeignItemFn, ForeignItemMacro, ForeignItemStatic, ForeignItemType, + ImplItem, ImplItemConst, ImplItemMacro, ImplItemMethod, ImplItemType, Item, ItemConst, + ItemEnum, ItemExternCrate, ItemFn, ItemForeignMod, ItemImpl, ItemMacro, ItemMacro2, ItemMod, + ItemStatic, ItemStruct, ItemTrait, ItemTraitAlias, ItemType, ItemUnion, ItemUse, Receiver, + Signature, TraitItem, TraitItemConst, TraitItemMacro, TraitItemMethod, TraitItemType, UseGlob, + UseGroup, UseName, UsePath, UseRename, UseTree, +}; + +#[cfg(feature = "full")] +mod file; +#[cfg(feature = "full")] +pub use crate::file::File; + +mod lifetime; +pub use crate::lifetime::Lifetime; + +mod lit; +pub use crate::lit::{ + Lit, LitBool, LitByte, LitByteStr, LitChar, LitFloat, LitInt, LitStr, StrStyle, +}; + +#[cfg(any(feature = "full", feature = "derive"))] +mod mac; +#[cfg(any(feature = "full", feature = "derive"))] +pub use crate::mac::{Macro, MacroDelimiter}; + +#[cfg(any(feature = "full", feature = "derive"))] +mod derive; +#[cfg(feature = "derive")] +pub use crate::derive::{Data, DataEnum, DataStruct, DataUnion, DeriveInput}; + +#[cfg(any(feature = "full", feature = "derive"))] +mod op; +#[cfg(any(feature = "full", feature = "derive"))] +pub use crate::op::{BinOp, UnOp}; + +#[cfg(feature = "full")] +mod stmt; +#[cfg(feature = "full")] +pub use crate::stmt::{Block, Local, Stmt}; + +#[cfg(any(feature = "full", feature = "derive"))] +mod ty; +#[cfg(any(feature = "full", feature = "derive"))] +pub use crate::ty::{ + Abi, BareFnArg, ReturnType, Type, TypeArray, TypeBareFn, TypeGroup, TypeImplTrait, TypeInfer, + TypeMacro, TypeNever, TypeParen, TypePath, TypePtr, TypeReference, TypeSlice, TypeTraitObject, + TypeTuple, Variadic, +}; + +#[cfg(feature = "full")] +mod pat; +#[cfg(feature = "full")] +pub use crate::pat::{ + FieldPat, Pat, PatBox, PatIdent, PatLit, PatMacro, PatOr, PatPath, PatRange, PatReference, + PatRest, PatSlice, PatStruct, PatTuple, PatTupleStruct, PatType, PatWild, +}; + +#[cfg(any(feature = "full", feature = "derive"))] +mod path; +#[cfg(any(feature = "full", feature = "derive"))] +pub use crate::path::{ + AngleBracketedGenericArguments, Binding, Constraint, GenericArgument, + ParenthesizedGenericArguments, Path, PathArguments, PathSegment, QSelf, +}; + +#[cfg(feature = "parsing")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] +pub mod buffer; +mod drops; +#[cfg(feature = "parsing")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] +pub mod ext; +pub mod punctuated; +#[cfg(all(any(feature = "full", feature = "derive"), feature = "extra-traits"))] +mod tt; + +// Not public API except the `parse_quote!` macro. +#[cfg(feature = "parsing")] +#[doc(hidden)] +pub mod parse_quote; + +// Not public API except the `parse_macro_input!` macro. +#[cfg(all( + not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))), + feature = "parsing", + feature = "proc-macro" +))] +#[doc(hidden)] +pub mod parse_macro_input; + +#[cfg(all(feature = "parsing", feature = "printing"))] +#[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "printing"))))] +pub mod spanned; + +#[cfg(all(feature = "parsing", feature = "full"))] +mod whitespace; + +mod gen { + /// Syntax tree traversal to walk a shared borrow of a syntax tree. + /// + /// Each method of the [`Visit`] trait is a hook that can be overridden to + /// customize the behavior when visiting the corresponding type of node. By + /// default, every method recursively visits the substructure of the input + /// by invoking the right visitor method of each of its fields. + /// + /// [`Visit`]: visit::Visit + /// + /// ``` + /// # use syn::{Attribute, BinOp, Expr, ExprBinary}; + /// # + /// pub trait Visit<'ast> { + /// /* ... */ + /// + /// fn visit_expr_binary(&mut self, node: &'ast ExprBinary) { + /// visit_expr_binary(self, node); + /// } + /// + /// /* ... */ + /// # fn visit_attribute(&mut self, node: &'ast Attribute); + /// # fn visit_expr(&mut self, node: &'ast Expr); + /// # fn visit_bin_op(&mut self, node: &'ast BinOp); + /// } + /// + /// pub fn visit_expr_binary<'ast, V>(v: &mut V, node: &'ast ExprBinary) + /// where + /// V: Visit<'ast> + ?Sized, + /// { + /// for attr in &node.attrs { + /// v.visit_attribute(attr); + /// } + /// v.visit_expr(&*node.left); + /// v.visit_bin_op(&node.op); + /// v.visit_expr(&*node.right); + /// } + /// + /// /* ... */ + /// ``` + /// + /// *This module is available only if Syn is built with the `"visit"` feature.* + /// + /// <br> + /// + /// # Example + /// + /// This visitor will print the name of every freestanding function in the + /// syntax tree, including nested functions. + /// + /// ``` + /// // [dependencies] + /// // quote = "1.0" + /// // syn = { version = "1.0", features = ["full", "visit"] } + /// + /// use quote::quote; + /// use syn::visit::{self, Visit}; + /// use syn::{File, ItemFn}; + /// + /// struct FnVisitor; + /// + /// impl<'ast> Visit<'ast> for FnVisitor { + /// fn visit_item_fn(&mut self, node: &'ast ItemFn) { + /// println!("Function with name={}", node.sig.ident); + /// + /// // Delegate to the default impl to visit any nested functions. + /// visit::visit_item_fn(self, node); + /// } + /// } + /// + /// fn main() { + /// let code = quote! { + /// pub fn f() { + /// fn g() {} + /// } + /// }; + /// + /// let syntax_tree: File = syn::parse2(code).unwrap(); + /// FnVisitor.visit_file(&syntax_tree); + /// } + /// ``` + /// + /// The `'ast` lifetime on the input references means that the syntax tree + /// outlives the complete recursive visit call, so the visitor is allowed to + /// hold on to references into the syntax tree. + /// + /// ``` + /// use quote::quote; + /// use syn::visit::{self, Visit}; + /// use syn::{File, ItemFn}; + /// + /// struct FnVisitor<'ast> { + /// functions: Vec<&'ast ItemFn>, + /// } + /// + /// impl<'ast> Visit<'ast> for FnVisitor<'ast> { + /// fn visit_item_fn(&mut self, node: &'ast ItemFn) { + /// self.functions.push(node); + /// visit::visit_item_fn(self, node); + /// } + /// } + /// + /// fn main() { + /// let code = quote! { + /// pub fn f() { + /// fn g() {} + /// } + /// }; + /// + /// let syntax_tree: File = syn::parse2(code).unwrap(); + /// let mut visitor = FnVisitor { functions: Vec::new() }; + /// visitor.visit_file(&syntax_tree); + /// for f in visitor.functions { + /// println!("Function with name={}", f.sig.ident); + /// } + /// } + /// ``` + #[cfg(feature = "visit")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "visit")))] + #[rustfmt::skip] + pub mod visit; + + /// Syntax tree traversal to mutate an exclusive borrow of a syntax tree in + /// place. + /// + /// Each method of the [`VisitMut`] trait is a hook that can be overridden + /// to customize the behavior when mutating the corresponding type of node. + /// By default, every method recursively visits the substructure of the + /// input by invoking the right visitor method of each of its fields. + /// + /// [`VisitMut`]: visit_mut::VisitMut + /// + /// ``` + /// # use syn::{Attribute, BinOp, Expr, ExprBinary}; + /// # + /// pub trait VisitMut { + /// /* ... */ + /// + /// fn visit_expr_binary_mut(&mut self, node: &mut ExprBinary) { + /// visit_expr_binary_mut(self, node); + /// } + /// + /// /* ... */ + /// # fn visit_attribute_mut(&mut self, node: &mut Attribute); + /// # fn visit_expr_mut(&mut self, node: &mut Expr); + /// # fn visit_bin_op_mut(&mut self, node: &mut BinOp); + /// } + /// + /// pub fn visit_expr_binary_mut<V>(v: &mut V, node: &mut ExprBinary) + /// where + /// V: VisitMut + ?Sized, + /// { + /// for attr in &mut node.attrs { + /// v.visit_attribute_mut(attr); + /// } + /// v.visit_expr_mut(&mut *node.left); + /// v.visit_bin_op_mut(&mut node.op); + /// v.visit_expr_mut(&mut *node.right); + /// } + /// + /// /* ... */ + /// ``` + /// + /// *This module is available only if Syn is built with the `"visit-mut"` + /// feature.* + /// + /// <br> + /// + /// # Example + /// + /// This mut visitor replace occurrences of u256 suffixed integer literals + /// like `999u256` with a macro invocation `bigint::u256!(999)`. + /// + /// ``` + /// // [dependencies] + /// // quote = "1.0" + /// // syn = { version = "1.0", features = ["full", "visit-mut"] } + /// + /// use quote::quote; + /// use syn::visit_mut::{self, VisitMut}; + /// use syn::{parse_quote, Expr, File, Lit, LitInt}; + /// + /// struct BigintReplace; + /// + /// impl VisitMut for BigintReplace { + /// fn visit_expr_mut(&mut self, node: &mut Expr) { + /// if let Expr::Lit(expr) = &node { + /// if let Lit::Int(int) = &expr.lit { + /// if int.suffix() == "u256" { + /// let digits = int.base10_digits(); + /// let unsuffixed: LitInt = syn::parse_str(digits).unwrap(); + /// *node = parse_quote!(bigint::u256!(#unsuffixed)); + /// return; + /// } + /// } + /// } + /// + /// // Delegate to the default impl to visit nested expressions. + /// visit_mut::visit_expr_mut(self, node); + /// } + /// } + /// + /// fn main() { + /// let code = quote! { + /// fn main() { + /// let _ = 999u256; + /// } + /// }; + /// + /// let mut syntax_tree: File = syn::parse2(code).unwrap(); + /// BigintReplace.visit_file_mut(&mut syntax_tree); + /// println!("{}", quote!(#syntax_tree)); + /// } + /// ``` + #[cfg(feature = "visit-mut")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "visit-mut")))] + #[rustfmt::skip] + pub mod visit_mut; + + /// Syntax tree traversal to transform the nodes of an owned syntax tree. + /// + /// Each method of the [`Fold`] trait is a hook that can be overridden to + /// customize the behavior when transforming the corresponding type of node. + /// By default, every method recursively visits the substructure of the + /// input by invoking the right visitor method of each of its fields. + /// + /// [`Fold`]: fold::Fold + /// + /// ``` + /// # use syn::{Attribute, BinOp, Expr, ExprBinary}; + /// # + /// pub trait Fold { + /// /* ... */ + /// + /// fn fold_expr_binary(&mut self, node: ExprBinary) -> ExprBinary { + /// fold_expr_binary(self, node) + /// } + /// + /// /* ... */ + /// # fn fold_attribute(&mut self, node: Attribute) -> Attribute; + /// # fn fold_expr(&mut self, node: Expr) -> Expr; + /// # fn fold_bin_op(&mut self, node: BinOp) -> BinOp; + /// } + /// + /// pub fn fold_expr_binary<V>(v: &mut V, node: ExprBinary) -> ExprBinary + /// where + /// V: Fold + ?Sized, + /// { + /// ExprBinary { + /// attrs: node + /// .attrs + /// .into_iter() + /// .map(|attr| v.fold_attribute(attr)) + /// .collect(), + /// left: Box::new(v.fold_expr(*node.left)), + /// op: v.fold_bin_op(node.op), + /// right: Box::new(v.fold_expr(*node.right)), + /// } + /// } + /// + /// /* ... */ + /// ``` + /// + /// *This module is available only if Syn is built with the `"fold"` feature.* + /// + /// <br> + /// + /// # Example + /// + /// This fold inserts parentheses to fully parenthesizes any expression. + /// + /// ``` + /// // [dependencies] + /// // quote = "1.0" + /// // syn = { version = "1.0", features = ["fold", "full"] } + /// + /// use quote::quote; + /// use syn::fold::{fold_expr, Fold}; + /// use syn::{token, Expr, ExprParen}; + /// + /// struct ParenthesizeEveryExpr; + /// + /// impl Fold for ParenthesizeEveryExpr { + /// fn fold_expr(&mut self, expr: Expr) -> Expr { + /// Expr::Paren(ExprParen { + /// attrs: Vec::new(), + /// expr: Box::new(fold_expr(self, expr)), + /// paren_token: token::Paren::default(), + /// }) + /// } + /// } + /// + /// fn main() { + /// let code = quote! { a() + b(1) * c.d }; + /// let expr: Expr = syn::parse2(code).unwrap(); + /// let parenthesized = ParenthesizeEveryExpr.fold_expr(expr); + /// println!("{}", quote!(#parenthesized)); + /// + /// // Output: (((a)()) + (((b)((1))) * ((c).d))) + /// } + /// ``` + #[cfg(feature = "fold")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "fold")))] + #[rustfmt::skip] + pub mod fold; + + #[cfg(feature = "clone-impls")] + #[rustfmt::skip] + mod clone; + + #[cfg(feature = "extra-traits")] + #[rustfmt::skip] + mod eq; + + #[cfg(feature = "extra-traits")] + #[rustfmt::skip] + mod hash; + + #[cfg(feature = "extra-traits")] + #[rustfmt::skip] + mod debug; + + #[cfg(any(feature = "full", feature = "derive"))] + #[path = "../gen_helper.rs"] + mod helper; +} +pub use crate::gen::*; + +// Not public API. +#[doc(hidden)] +#[path = "export.rs"] +pub mod __private; + +mod custom_keyword; +mod custom_punctuation; +mod sealed; +mod span; +mod thread; + +#[cfg(feature = "parsing")] +mod lookahead; + +#[cfg(feature = "parsing")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] +pub mod parse; + +#[cfg(feature = "full")] +mod reserved; + +#[cfg(all(any(feature = "full", feature = "derive"), feature = "parsing"))] +mod verbatim; + +#[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))] +mod print; + +//////////////////////////////////////////////////////////////////////////////// + +mod error; +pub use crate::error::{Error, Result}; + +/// Parse tokens of source code into the chosen syntax tree node. +/// +/// This is preferred over parsing a string because tokens are able to preserve +/// information about where in the user's code they were originally written (the +/// "span" of the token), possibly allowing the compiler to produce better error +/// messages. +/// +/// This function parses a `proc_macro::TokenStream` which is the type used for +/// interop with the compiler in a procedural macro. To parse a +/// `proc_macro2::TokenStream`, use [`syn::parse2`] instead. +/// +/// [`syn::parse2`]: parse2 +/// +/// *This function is available only if Syn is built with both the `"parsing"` and +/// `"proc-macro"` features.* +/// +/// # Examples +/// +/// ``` +/// # extern crate proc_macro; +/// # +/// use proc_macro::TokenStream; +/// use quote::quote; +/// use syn::DeriveInput; +/// +/// # const IGNORE_TOKENS: &str = stringify! { +/// #[proc_macro_derive(MyMacro)] +/// # }; +/// pub fn my_macro(input: TokenStream) -> TokenStream { +/// // Parse the tokens into a syntax tree +/// let ast: DeriveInput = syn::parse(input).unwrap(); +/// +/// // Build the output, possibly using quasi-quotation +/// let expanded = quote! { +/// /* ... */ +/// }; +/// +/// // Convert into a token stream and return it +/// expanded.into() +/// } +/// ``` +#[cfg(all( + not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))), + feature = "parsing", + feature = "proc-macro" +))] +#[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "proc-macro"))))] +pub fn parse<T: parse::Parse>(tokens: proc_macro::TokenStream) -> Result<T> { + parse::Parser::parse(T::parse, tokens) +} + +/// Parse a proc-macro2 token stream into the chosen syntax tree node. +/// +/// This function will check that the input is fully parsed. If there are +/// any unparsed tokens at the end of the stream, an error is returned. +/// +/// This function parses a `proc_macro2::TokenStream` which is commonly useful +/// when the input comes from a node of the Syn syntax tree, for example the +/// body tokens of a [`Macro`] node. When in a procedural macro parsing the +/// `proc_macro::TokenStream` provided by the compiler, use [`syn::parse`] +/// instead. +/// +/// [`syn::parse`]: parse() +/// +/// *This function is available only if Syn is built with the `"parsing"` feature.* +#[cfg(feature = "parsing")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] +pub fn parse2<T: parse::Parse>(tokens: proc_macro2::TokenStream) -> Result<T> { + parse::Parser::parse2(T::parse, tokens) +} + +/// Parse a string of Rust code into the chosen syntax tree node. +/// +/// *This function is available only if Syn is built with the `"parsing"` feature.* +/// +/// # Hygiene +/// +/// Every span in the resulting syntax tree will be set to resolve at the macro +/// call site. +/// +/// # Examples +/// +/// ``` +/// use syn::{Expr, Result}; +/// +/// fn run() -> Result<()> { +/// let code = "assert_eq!(u8::max_value(), 255)"; +/// let expr = syn::parse_str::<Expr>(code)?; +/// println!("{:#?}", expr); +/// Ok(()) +/// } +/// # +/// # run().unwrap(); +/// ``` +#[cfg(feature = "parsing")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] +pub fn parse_str<T: parse::Parse>(s: &str) -> Result<T> { + parse::Parser::parse_str(T::parse, s) +} + +// FIXME the name parse_file makes it sound like you might pass in a path to a +// file, rather than the content. +/// Parse the content of a file of Rust code. +/// +/// This is different from `syn::parse_str::<File>(content)` in two ways: +/// +/// - It discards a leading byte order mark `\u{FEFF}` if the file has one. +/// - It preserves the shebang line of the file, such as `#!/usr/bin/env rustx`. +/// +/// If present, either of these would be an error using `from_str`. +/// +/// *This function is available only if Syn is built with the `"parsing"` and +/// `"full"` features.* +/// +/// # Examples +/// +/// ```no_run +/// use std::error::Error; +/// use std::fs::File; +/// use std::io::Read; +/// +/// fn run() -> Result<(), Box<Error>> { +/// let mut file = File::open("path/to/code.rs")?; +/// let mut content = String::new(); +/// file.read_to_string(&mut content)?; +/// +/// let ast = syn::parse_file(&content)?; +/// if let Some(shebang) = ast.shebang { +/// println!("{}", shebang); +/// } +/// println!("{} items", ast.items.len()); +/// +/// Ok(()) +/// } +/// # +/// # run().unwrap(); +/// ``` +#[cfg(all(feature = "parsing", feature = "full"))] +#[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "full"))))] +pub fn parse_file(mut content: &str) -> Result<File> { + // Strip the BOM if it is present + const BOM: &str = "\u{feff}"; + if content.starts_with(BOM) { + content = &content[BOM.len()..]; + } + + let mut shebang = None; + if content.starts_with("#!") { + let rest = whitespace::skip(&content[2..]); + if !rest.starts_with('[') { + if let Some(idx) = content.find('\n') { + shebang = Some(content[..idx].to_string()); + content = &content[idx..]; + } else { + shebang = Some(content.to_string()); + content = ""; + } + } + } + + let mut file: File = parse_str(content)?; + file.shebang = shebang; + Ok(file) +} diff --git a/third_party/rust/syn/src/lifetime.rs b/third_party/rust/syn/src/lifetime.rs new file mode 100644 index 0000000000..5dc1753a84 --- /dev/null +++ b/third_party/rust/syn/src/lifetime.rs @@ -0,0 +1,154 @@ +use proc_macro2::{Ident, Span}; +use std::cmp::Ordering; +use std::fmt::{self, Display}; +use std::hash::{Hash, Hasher}; + +#[cfg(feature = "parsing")] +use crate::lookahead; + +/// A Rust lifetime: `'a`. +/// +/// Lifetime names must conform to the following rules: +/// +/// - Must start with an apostrophe. +/// - Must not consist of just an apostrophe: `'`. +/// - Character after the apostrophe must be `_` or a Unicode code point with +/// the XID_Start property. +/// - All following characters must be Unicode code points with the XID_Continue +/// property. +pub struct Lifetime { + pub apostrophe: Span, + pub ident: Ident, +} + +impl Lifetime { + /// # Panics + /// + /// Panics if the lifetime does not conform to the bulleted rules above. + /// + /// # Invocation + /// + /// ``` + /// # use proc_macro2::Span; + /// # use syn::Lifetime; + /// # + /// # fn f() -> Lifetime { + /// Lifetime::new("'a", Span::call_site()) + /// # } + /// ``` + pub fn new(symbol: &str, span: Span) -> Self { + if !symbol.starts_with('\'') { + panic!( + "lifetime name must start with apostrophe as in \"'a\", got {:?}", + symbol + ); + } + + if symbol == "'" { + panic!("lifetime name must not be empty"); + } + + if !crate::ident::xid_ok(&symbol[1..]) { + panic!("{:?} is not a valid lifetime name", symbol); + } + + Lifetime { + apostrophe: span, + ident: Ident::new(&symbol[1..], span), + } + } + + pub fn span(&self) -> Span { + self.apostrophe + .join(self.ident.span()) + .unwrap_or(self.apostrophe) + } + + pub fn set_span(&mut self, span: Span) { + self.apostrophe = span; + self.ident.set_span(span); + } +} + +impl Display for Lifetime { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + "'".fmt(formatter)?; + self.ident.fmt(formatter) + } +} + +impl Clone for Lifetime { + fn clone(&self) -> Self { + Lifetime { + apostrophe: self.apostrophe, + ident: self.ident.clone(), + } + } +} + +impl PartialEq for Lifetime { + fn eq(&self, other: &Lifetime) -> bool { + self.ident.eq(&other.ident) + } +} + +impl Eq for Lifetime {} + +impl PartialOrd for Lifetime { + fn partial_cmp(&self, other: &Lifetime) -> Option<Ordering> { + Some(self.cmp(other)) + } +} + +impl Ord for Lifetime { + fn cmp(&self, other: &Lifetime) -> Ordering { + self.ident.cmp(&other.ident) + } +} + +impl Hash for Lifetime { + fn hash<H: Hasher>(&self, h: &mut H) { + self.ident.hash(h); + } +} + +#[cfg(feature = "parsing")] +#[doc(hidden)] +#[allow(non_snake_case)] +pub fn Lifetime(marker: lookahead::TokenMarker) -> Lifetime { + match marker {} +} + +#[cfg(feature = "parsing")] +pub mod parsing { + use super::*; + use crate::parse::{Parse, ParseStream, Result}; + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for Lifetime { + fn parse(input: ParseStream) -> Result<Self> { + input.step(|cursor| { + cursor + .lifetime() + .ok_or_else(|| cursor.error("expected lifetime")) + }) + } + } +} + +#[cfg(feature = "printing")] +mod printing { + use super::*; + use proc_macro2::{Punct, Spacing, TokenStream}; + use quote::{ToTokens, TokenStreamExt}; + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for Lifetime { + fn to_tokens(&self, tokens: &mut TokenStream) { + let mut apostrophe = Punct::new('\'', Spacing::Joint); + apostrophe.set_span(self.apostrophe); + tokens.append(apostrophe); + self.ident.to_tokens(tokens); + } + } +} diff --git a/third_party/rust/syn/src/lit.rs b/third_party/rust/syn/src/lit.rs new file mode 100644 index 0000000000..130b40ed1f --- /dev/null +++ b/third_party/rust/syn/src/lit.rs @@ -0,0 +1,1600 @@ +#[cfg(feature = "parsing")] +use crate::lookahead; +#[cfg(feature = "parsing")] +use crate::parse::{Parse, Parser}; +use crate::{Error, Result}; +use proc_macro2::{Ident, Literal, Span}; +#[cfg(feature = "parsing")] +use proc_macro2::{TokenStream, TokenTree}; +use std::fmt::{self, Display}; +#[cfg(feature = "extra-traits")] +use std::hash::{Hash, Hasher}; +use std::str::{self, FromStr}; + +ast_enum_of_structs! { + /// A Rust literal such as a string or integer or boolean. + /// + /// # Syntax tree enum + /// + /// This type is a [syntax tree enum]. + /// + /// [syntax tree enum]: crate::Expr#syntax-tree-enums + pub enum Lit { + /// A UTF-8 string literal: `"foo"`. + Str(LitStr), + + /// A byte string literal: `b"foo"`. + ByteStr(LitByteStr), + + /// A byte literal: `b'f'`. + Byte(LitByte), + + /// A character literal: `'a'`. + Char(LitChar), + + /// An integer literal: `1` or `1u16`. + Int(LitInt), + + /// A floating point literal: `1f64` or `1.0e10f64`. + /// + /// Must be finite. May not be infinite or NaN. + Float(LitFloat), + + /// A boolean literal: `true` or `false`. + Bool(LitBool), + + /// A raw token literal not interpreted by Syn. + Verbatim(Literal), + } +} + +ast_struct! { + /// A UTF-8 string literal: `"foo"`. + pub struct LitStr { + repr: Box<LitRepr>, + } +} + +ast_struct! { + /// A byte string literal: `b"foo"`. + pub struct LitByteStr { + repr: Box<LitRepr>, + } +} + +ast_struct! { + /// A byte literal: `b'f'`. + pub struct LitByte { + repr: Box<LitRepr>, + } +} + +ast_struct! { + /// A character literal: `'a'`. + pub struct LitChar { + repr: Box<LitRepr>, + } +} + +struct LitRepr { + token: Literal, + suffix: Box<str>, +} + +ast_struct! { + /// An integer literal: `1` or `1u16`. + pub struct LitInt { + repr: Box<LitIntRepr>, + } +} + +struct LitIntRepr { + token: Literal, + digits: Box<str>, + suffix: Box<str>, +} + +ast_struct! { + /// A floating point literal: `1f64` or `1.0e10f64`. + /// + /// Must be finite. May not be infinite or NaN. + pub struct LitFloat { + repr: Box<LitFloatRepr>, + } +} + +struct LitFloatRepr { + token: Literal, + digits: Box<str>, + suffix: Box<str>, +} + +ast_struct! { + /// A boolean literal: `true` or `false`. + pub struct LitBool { + pub value: bool, + pub span: Span, + } +} + +impl LitStr { + pub fn new(value: &str, span: Span) -> Self { + let mut token = Literal::string(value); + token.set_span(span); + LitStr { + repr: Box::new(LitRepr { + token, + suffix: Box::<str>::default(), + }), + } + } + + pub fn value(&self) -> String { + let repr = self.repr.token.to_string(); + let (value, _suffix) = value::parse_lit_str(&repr); + String::from(value) + } + + /// Parse a syntax tree node from the content of this string literal. + /// + /// All spans in the syntax tree will point to the span of this `LitStr`. + /// + /// # Example + /// + /// ``` + /// use proc_macro2::Span; + /// use syn::{Attribute, Error, Ident, Lit, Meta, MetaNameValue, Path, Result}; + /// + /// // Parses the path from an attribute that looks like: + /// // + /// // #[path = "a::b::c"] + /// // + /// // or returns `None` if the input is some other attribute. + /// fn get_path(attr: &Attribute) -> Result<Option<Path>> { + /// if !attr.path.is_ident("path") { + /// return Ok(None); + /// } + /// + /// match attr.parse_meta()? { + /// Meta::NameValue(MetaNameValue { lit: Lit::Str(lit_str), .. }) => { + /// lit_str.parse().map(Some) + /// } + /// _ => { + /// let message = "expected #[path = \"...\"]"; + /// Err(Error::new_spanned(attr, message)) + /// } + /// } + /// } + /// ``` + #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn parse<T: Parse>(&self) -> Result<T> { + self.parse_with(T::parse) + } + + /// Invoke parser on the content of this string literal. + /// + /// All spans in the syntax tree will point to the span of this `LitStr`. + /// + /// # Example + /// + /// ``` + /// # use proc_macro2::Span; + /// # use syn::{LitStr, Result}; + /// # + /// # fn main() -> Result<()> { + /// # let lit_str = LitStr::new("a::b::c", Span::call_site()); + /// # + /// # const IGNORE: &str = stringify! { + /// let lit_str: LitStr = /* ... */; + /// # }; + /// + /// // Parse a string literal like "a::b::c" into a Path, not allowing + /// // generic arguments on any of the path segments. + /// let basic_path = lit_str.parse_with(syn::Path::parse_mod_style)?; + /// # + /// # Ok(()) + /// # } + /// ``` + #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn parse_with<F: Parser>(&self, parser: F) -> Result<F::Output> { + use proc_macro2::Group; + + // Token stream with every span replaced by the given one. + fn respan_token_stream(stream: TokenStream, span: Span) -> TokenStream { + stream + .into_iter() + .map(|token| respan_token_tree(token, span)) + .collect() + } + + // Token tree with every span replaced by the given one. + fn respan_token_tree(mut token: TokenTree, span: Span) -> TokenTree { + match &mut token { + TokenTree::Group(g) => { + let stream = respan_token_stream(g.stream(), span); + *g = Group::new(g.delimiter(), stream); + g.set_span(span); + } + other => other.set_span(span), + } + token + } + + // Parse string literal into a token stream with every span equal to the + // original literal's span. + let mut tokens = TokenStream::from_str(&self.value())?; + tokens = respan_token_stream(tokens, self.span()); + + parser.parse2(tokens) + } + + pub fn span(&self) -> Span { + self.repr.token.span() + } + + pub fn set_span(&mut self, span: Span) { + self.repr.token.set_span(span); + } + + pub fn suffix(&self) -> &str { + &self.repr.suffix + } + + pub fn token(&self) -> Literal { + self.repr.token.clone() + } +} + +impl LitByteStr { + pub fn new(value: &[u8], span: Span) -> Self { + let mut token = Literal::byte_string(value); + token.set_span(span); + LitByteStr { + repr: Box::new(LitRepr { + token, + suffix: Box::<str>::default(), + }), + } + } + + pub fn value(&self) -> Vec<u8> { + let repr = self.repr.token.to_string(); + let (value, _suffix) = value::parse_lit_byte_str(&repr); + value + } + + pub fn span(&self) -> Span { + self.repr.token.span() + } + + pub fn set_span(&mut self, span: Span) { + self.repr.token.set_span(span); + } + + pub fn suffix(&self) -> &str { + &self.repr.suffix + } + + pub fn token(&self) -> Literal { + self.repr.token.clone() + } +} + +impl LitByte { + pub fn new(value: u8, span: Span) -> Self { + let mut token = Literal::u8_suffixed(value); + token.set_span(span); + LitByte { + repr: Box::new(LitRepr { + token, + suffix: Box::<str>::default(), + }), + } + } + + pub fn value(&self) -> u8 { + let repr = self.repr.token.to_string(); + let (value, _suffix) = value::parse_lit_byte(&repr); + value + } + + pub fn span(&self) -> Span { + self.repr.token.span() + } + + pub fn set_span(&mut self, span: Span) { + self.repr.token.set_span(span); + } + + pub fn suffix(&self) -> &str { + &self.repr.suffix + } + + pub fn token(&self) -> Literal { + self.repr.token.clone() + } +} + +impl LitChar { + pub fn new(value: char, span: Span) -> Self { + let mut token = Literal::character(value); + token.set_span(span); + LitChar { + repr: Box::new(LitRepr { + token, + suffix: Box::<str>::default(), + }), + } + } + + pub fn value(&self) -> char { + let repr = self.repr.token.to_string(); + let (value, _suffix) = value::parse_lit_char(&repr); + value + } + + pub fn span(&self) -> Span { + self.repr.token.span() + } + + pub fn set_span(&mut self, span: Span) { + self.repr.token.set_span(span); + } + + pub fn suffix(&self) -> &str { + &self.repr.suffix + } + + pub fn token(&self) -> Literal { + self.repr.token.clone() + } +} + +impl LitInt { + pub fn new(repr: &str, span: Span) -> Self { + let (digits, suffix) = match value::parse_lit_int(repr) { + Some(parse) => parse, + None => panic!("Not an integer literal: `{}`", repr), + }; + + let mut token = match value::to_literal(repr, &digits, &suffix) { + Some(token) => token, + None => panic!("Unsupported integer literal: `{}`", repr), + }; + + token.set_span(span); + LitInt { + repr: Box::new(LitIntRepr { + token, + digits, + suffix, + }), + } + } + + pub fn base10_digits(&self) -> &str { + &self.repr.digits + } + + /// Parses the literal into a selected number type. + /// + /// This is equivalent to `lit.base10_digits().parse()` except that the + /// resulting errors will be correctly spanned to point to the literal token + /// in the macro input. + /// + /// ``` + /// use syn::LitInt; + /// use syn::parse::{Parse, ParseStream, Result}; + /// + /// struct Port { + /// value: u16, + /// } + /// + /// impl Parse for Port { + /// fn parse(input: ParseStream) -> Result<Self> { + /// let lit: LitInt = input.parse()?; + /// let value = lit.base10_parse::<u16>()?; + /// Ok(Port { value }) + /// } + /// } + /// ``` + pub fn base10_parse<N>(&self) -> Result<N> + where + N: FromStr, + N::Err: Display, + { + self.base10_digits() + .parse() + .map_err(|err| Error::new(self.span(), err)) + } + + pub fn suffix(&self) -> &str { + &self.repr.suffix + } + + pub fn span(&self) -> Span { + self.repr.token.span() + } + + pub fn set_span(&mut self, span: Span) { + self.repr.token.set_span(span); + } + + pub fn token(&self) -> Literal { + self.repr.token.clone() + } +} + +impl From<Literal> for LitInt { + fn from(token: Literal) -> Self { + let repr = token.to_string(); + if let Some((digits, suffix)) = value::parse_lit_int(&repr) { + LitInt { + repr: Box::new(LitIntRepr { + token, + digits, + suffix, + }), + } + } else { + panic!("Not an integer literal: `{}`", repr); + } + } +} + +impl Display for LitInt { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + self.repr.token.fmt(formatter) + } +} + +impl LitFloat { + pub fn new(repr: &str, span: Span) -> Self { + let (digits, suffix) = match value::parse_lit_float(repr) { + Some(parse) => parse, + None => panic!("Not a float literal: `{}`", repr), + }; + + let mut token = match value::to_literal(repr, &digits, &suffix) { + Some(token) => token, + None => panic!("Unsupported float literal: `{}`", repr), + }; + + token.set_span(span); + LitFloat { + repr: Box::new(LitFloatRepr { + token, + digits, + suffix, + }), + } + } + + pub fn base10_digits(&self) -> &str { + &self.repr.digits + } + + pub fn base10_parse<N>(&self) -> Result<N> + where + N: FromStr, + N::Err: Display, + { + self.base10_digits() + .parse() + .map_err(|err| Error::new(self.span(), err)) + } + + pub fn suffix(&self) -> &str { + &self.repr.suffix + } + + pub fn span(&self) -> Span { + self.repr.token.span() + } + + pub fn set_span(&mut self, span: Span) { + self.repr.token.set_span(span); + } + + pub fn token(&self) -> Literal { + self.repr.token.clone() + } +} + +impl From<Literal> for LitFloat { + fn from(token: Literal) -> Self { + let repr = token.to_string(); + if let Some((digits, suffix)) = value::parse_lit_float(&repr) { + LitFloat { + repr: Box::new(LitFloatRepr { + token, + digits, + suffix, + }), + } + } else { + panic!("Not a float literal: `{}`", repr); + } + } +} + +impl Display for LitFloat { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + self.repr.token.fmt(formatter) + } +} + +impl LitBool { + pub fn new(value: bool, span: Span) -> Self { + LitBool { value, span } + } + + pub fn value(&self) -> bool { + self.value + } + + pub fn span(&self) -> Span { + self.span + } + + pub fn set_span(&mut self, span: Span) { + self.span = span; + } + + pub fn token(&self) -> Ident { + let s = if self.value { "true" } else { "false" }; + Ident::new(s, self.span) + } +} + +#[cfg(feature = "extra-traits")] +mod debug_impls { + use super::*; + use std::fmt::{self, Debug}; + + #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + impl Debug for LitStr { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter + .debug_struct("LitStr") + .field("token", &format_args!("{}", self.repr.token)) + .finish() + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + impl Debug for LitByteStr { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter + .debug_struct("LitByteStr") + .field("token", &format_args!("{}", self.repr.token)) + .finish() + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + impl Debug for LitByte { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter + .debug_struct("LitByte") + .field("token", &format_args!("{}", self.repr.token)) + .finish() + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + impl Debug for LitChar { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter + .debug_struct("LitChar") + .field("token", &format_args!("{}", self.repr.token)) + .finish() + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + impl Debug for LitInt { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter + .debug_struct("LitInt") + .field("token", &format_args!("{}", self.repr.token)) + .finish() + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + impl Debug for LitFloat { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter + .debug_struct("LitFloat") + .field("token", &format_args!("{}", self.repr.token)) + .finish() + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + impl Debug for LitBool { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter + .debug_struct("LitBool") + .field("value", &self.value) + .finish() + } + } +} + +#[cfg(feature = "clone-impls")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for LitRepr { + fn clone(&self) -> Self { + LitRepr { + token: self.token.clone(), + suffix: self.suffix.clone(), + } + } +} + +#[cfg(feature = "clone-impls")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for LitIntRepr { + fn clone(&self) -> Self { + LitIntRepr { + token: self.token.clone(), + digits: self.digits.clone(), + suffix: self.suffix.clone(), + } + } +} + +#[cfg(feature = "clone-impls")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for LitFloatRepr { + fn clone(&self) -> Self { + LitFloatRepr { + token: self.token.clone(), + digits: self.digits.clone(), + suffix: self.suffix.clone(), + } + } +} + +macro_rules! lit_extra_traits { + ($ty:ident) => { + #[cfg(feature = "clone-impls")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] + impl Clone for $ty { + fn clone(&self) -> Self { + $ty { + repr: self.repr.clone(), + } + } + } + + #[cfg(feature = "extra-traits")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + impl PartialEq for $ty { + fn eq(&self, other: &Self) -> bool { + self.repr.token.to_string() == other.repr.token.to_string() + } + } + + #[cfg(feature = "extra-traits")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + impl Hash for $ty { + fn hash<H>(&self, state: &mut H) + where + H: Hasher, + { + self.repr.token.to_string().hash(state); + } + } + + #[cfg(feature = "parsing")] + #[doc(hidden)] + #[allow(non_snake_case)] + pub fn $ty(marker: lookahead::TokenMarker) -> $ty { + match marker {} + } + }; +} + +lit_extra_traits!(LitStr); +lit_extra_traits!(LitByteStr); +lit_extra_traits!(LitByte); +lit_extra_traits!(LitChar); +lit_extra_traits!(LitInt); +lit_extra_traits!(LitFloat); + +#[cfg(feature = "parsing")] +#[doc(hidden)] +#[allow(non_snake_case)] +pub fn LitBool(marker: lookahead::TokenMarker) -> LitBool { + match marker {} +} + +ast_enum! { + /// The style of a string literal, either plain quoted or a raw string like + /// `r##"data"##`. + pub enum StrStyle #no_visit { + /// An ordinary string like `"data"`. + Cooked, + /// A raw string like `r##"data"##`. + /// + /// The unsigned integer is the number of `#` symbols used. + Raw(usize), + } +} + +#[cfg(feature = "parsing")] +#[doc(hidden)] +#[allow(non_snake_case)] +pub fn Lit(marker: lookahead::TokenMarker) -> Lit { + match marker {} +} + +#[cfg(feature = "parsing")] +pub mod parsing { + use super::*; + use crate::buffer::Cursor; + use crate::parse::{Parse, ParseStream, Result}; + use proc_macro2::Punct; + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for Lit { + fn parse(input: ParseStream) -> Result<Self> { + input.step(|cursor| { + if let Some((lit, rest)) = cursor.literal() { + return Ok((Lit::new(lit), rest)); + } + + if let Some((ident, rest)) = cursor.ident() { + let value = ident == "true"; + if value || ident == "false" { + let lit_bool = LitBool { + value, + span: ident.span(), + }; + return Ok((Lit::Bool(lit_bool), rest)); + } + } + + if let Some((punct, rest)) = cursor.punct() { + if punct.as_char() == '-' { + if let Some((lit, rest)) = parse_negative_lit(punct, rest) { + return Ok((lit, rest)); + } + } + } + + Err(cursor.error("expected literal")) + }) + } + } + + fn parse_negative_lit(neg: Punct, cursor: Cursor) -> Option<(Lit, Cursor)> { + let (lit, rest) = cursor.literal()?; + + let mut span = neg.span(); + span = span.join(lit.span()).unwrap_or(span); + + let mut repr = lit.to_string(); + repr.insert(0, '-'); + + if let Some((digits, suffix)) = value::parse_lit_int(&repr) { + if let Some(mut token) = value::to_literal(&repr, &digits, &suffix) { + token.set_span(span); + return Some(( + Lit::Int(LitInt { + repr: Box::new(LitIntRepr { + token, + digits, + suffix, + }), + }), + rest, + )); + } + } + + let (digits, suffix) = value::parse_lit_float(&repr)?; + let mut token = value::to_literal(&repr, &digits, &suffix)?; + token.set_span(span); + Some(( + Lit::Float(LitFloat { + repr: Box::new(LitFloatRepr { + token, + digits, + suffix, + }), + }), + rest, + )) + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for LitStr { + fn parse(input: ParseStream) -> Result<Self> { + let head = input.fork(); + match input.parse() { + Ok(Lit::Str(lit)) => Ok(lit), + _ => Err(head.error("expected string literal")), + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for LitByteStr { + fn parse(input: ParseStream) -> Result<Self> { + let head = input.fork(); + match input.parse() { + Ok(Lit::ByteStr(lit)) => Ok(lit), + _ => Err(head.error("expected byte string literal")), + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for LitByte { + fn parse(input: ParseStream) -> Result<Self> { + let head = input.fork(); + match input.parse() { + Ok(Lit::Byte(lit)) => Ok(lit), + _ => Err(head.error("expected byte literal")), + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for LitChar { + fn parse(input: ParseStream) -> Result<Self> { + let head = input.fork(); + match input.parse() { + Ok(Lit::Char(lit)) => Ok(lit), + _ => Err(head.error("expected character literal")), + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for LitInt { + fn parse(input: ParseStream) -> Result<Self> { + let head = input.fork(); + match input.parse() { + Ok(Lit::Int(lit)) => Ok(lit), + _ => Err(head.error("expected integer literal")), + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for LitFloat { + fn parse(input: ParseStream) -> Result<Self> { + let head = input.fork(); + match input.parse() { + Ok(Lit::Float(lit)) => Ok(lit), + _ => Err(head.error("expected floating point literal")), + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for LitBool { + fn parse(input: ParseStream) -> Result<Self> { + let head = input.fork(); + match input.parse() { + Ok(Lit::Bool(lit)) => Ok(lit), + _ => Err(head.error("expected boolean literal")), + } + } + } +} + +#[cfg(feature = "printing")] +mod printing { + use super::*; + use proc_macro2::TokenStream; + use quote::{ToTokens, TokenStreamExt}; + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for LitStr { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.repr.token.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for LitByteStr { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.repr.token.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for LitByte { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.repr.token.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for LitChar { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.repr.token.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for LitInt { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.repr.token.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for LitFloat { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.repr.token.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for LitBool { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append(self.token()); + } + } +} + +mod value { + use super::*; + use crate::bigint::BigInt; + use std::char; + use std::ops::{Index, RangeFrom}; + + impl Lit { + /// Interpret a Syn literal from a proc-macro2 literal. + pub fn new(token: Literal) -> Self { + let repr = token.to_string(); + + match byte(&repr, 0) { + b'"' | b'r' => { + let (_, suffix) = parse_lit_str(&repr); + return Lit::Str(LitStr { + repr: Box::new(LitRepr { token, suffix }), + }); + } + b'b' => match byte(&repr, 1) { + b'"' | b'r' => { + let (_, suffix) = parse_lit_byte_str(&repr); + return Lit::ByteStr(LitByteStr { + repr: Box::new(LitRepr { token, suffix }), + }); + } + b'\'' => { + let (_, suffix) = parse_lit_byte(&repr); + return Lit::Byte(LitByte { + repr: Box::new(LitRepr { token, suffix }), + }); + } + _ => {} + }, + b'\'' => { + let (_, suffix) = parse_lit_char(&repr); + return Lit::Char(LitChar { + repr: Box::new(LitRepr { token, suffix }), + }); + } + b'0'..=b'9' | b'-' => { + if let Some((digits, suffix)) = parse_lit_int(&repr) { + return Lit::Int(LitInt { + repr: Box::new(LitIntRepr { + token, + digits, + suffix, + }), + }); + } + if let Some((digits, suffix)) = parse_lit_float(&repr) { + return Lit::Float(LitFloat { + repr: Box::new(LitFloatRepr { + token, + digits, + suffix, + }), + }); + } + } + b't' | b'f' => { + if repr == "true" || repr == "false" { + return Lit::Bool(LitBool { + value: repr == "true", + span: token.span(), + }); + } + } + _ => {} + } + + panic!("Unrecognized literal: `{}`", repr); + } + + pub fn suffix(&self) -> &str { + match self { + Lit::Str(lit) => lit.suffix(), + Lit::ByteStr(lit) => lit.suffix(), + Lit::Byte(lit) => lit.suffix(), + Lit::Char(lit) => lit.suffix(), + Lit::Int(lit) => lit.suffix(), + Lit::Float(lit) => lit.suffix(), + Lit::Bool(_) | Lit::Verbatim(_) => "", + } + } + + pub fn span(&self) -> Span { + match self { + Lit::Str(lit) => lit.span(), + Lit::ByteStr(lit) => lit.span(), + Lit::Byte(lit) => lit.span(), + Lit::Char(lit) => lit.span(), + Lit::Int(lit) => lit.span(), + Lit::Float(lit) => lit.span(), + Lit::Bool(lit) => lit.span, + Lit::Verbatim(lit) => lit.span(), + } + } + + pub fn set_span(&mut self, span: Span) { + match self { + Lit::Str(lit) => lit.set_span(span), + Lit::ByteStr(lit) => lit.set_span(span), + Lit::Byte(lit) => lit.set_span(span), + Lit::Char(lit) => lit.set_span(span), + Lit::Int(lit) => lit.set_span(span), + Lit::Float(lit) => lit.set_span(span), + Lit::Bool(lit) => lit.span = span, + Lit::Verbatim(lit) => lit.set_span(span), + } + } + } + + /// Get the byte at offset idx, or a default of `b'\0'` if we're looking + /// past the end of the input buffer. + pub fn byte<S: AsRef<[u8]> + ?Sized>(s: &S, idx: usize) -> u8 { + let s = s.as_ref(); + if idx < s.len() { + s[idx] + } else { + 0 + } + } + + fn next_chr(s: &str) -> char { + s.chars().next().unwrap_or('\0') + } + + // Returns (content, suffix). + pub fn parse_lit_str(s: &str) -> (Box<str>, Box<str>) { + match byte(s, 0) { + b'"' => parse_lit_str_cooked(s), + b'r' => parse_lit_str_raw(s), + _ => unreachable!(), + } + } + + // Clippy false positive + // https://github.com/rust-lang-nursery/rust-clippy/issues/2329 + #[allow(clippy::needless_continue)] + fn parse_lit_str_cooked(mut s: &str) -> (Box<str>, Box<str>) { + assert_eq!(byte(s, 0), b'"'); + s = &s[1..]; + + let mut content = String::new(); + 'outer: loop { + let ch = match byte(s, 0) { + b'"' => break, + b'\\' => { + let b = byte(s, 1); + s = &s[2..]; + match b { + b'x' => { + let (byte, rest) = backslash_x(s); + s = rest; + assert!(byte <= 0x80, "Invalid \\x byte in string literal"); + char::from_u32(u32::from(byte)).unwrap() + } + b'u' => { + let (chr, rest) = backslash_u(s); + s = rest; + chr + } + b'n' => '\n', + b'r' => '\r', + b't' => '\t', + b'\\' => '\\', + b'0' => '\0', + b'\'' => '\'', + b'"' => '"', + b'\r' | b'\n' => loop { + let ch = next_chr(s); + if ch.is_whitespace() { + s = &s[ch.len_utf8()..]; + } else { + continue 'outer; + } + }, + b => panic!("unexpected byte {:?} after \\ character in byte literal", b), + } + } + b'\r' => { + assert_eq!(byte(s, 1), b'\n', "Bare CR not allowed in string"); + s = &s[2..]; + '\n' + } + _ => { + let ch = next_chr(s); + s = &s[ch.len_utf8()..]; + ch + } + }; + content.push(ch); + } + + assert!(s.starts_with('"')); + let content = content.into_boxed_str(); + let suffix = s[1..].to_owned().into_boxed_str(); + (content, suffix) + } + + fn parse_lit_str_raw(mut s: &str) -> (Box<str>, Box<str>) { + assert_eq!(byte(s, 0), b'r'); + s = &s[1..]; + + let mut pounds = 0; + while byte(s, pounds) == b'#' { + pounds += 1; + } + assert_eq!(byte(s, pounds), b'"'); + let close = s.rfind('"').unwrap(); + for end in s[close + 1..close + 1 + pounds].bytes() { + assert_eq!(end, b'#'); + } + + let content = s[pounds + 1..close].to_owned().into_boxed_str(); + let suffix = s[close + 1 + pounds..].to_owned().into_boxed_str(); + (content, suffix) + } + + // Returns (content, suffix). + pub fn parse_lit_byte_str(s: &str) -> (Vec<u8>, Box<str>) { + assert_eq!(byte(s, 0), b'b'); + match byte(s, 1) { + b'"' => parse_lit_byte_str_cooked(s), + b'r' => parse_lit_byte_str_raw(s), + _ => unreachable!(), + } + } + + // Clippy false positive + // https://github.com/rust-lang-nursery/rust-clippy/issues/2329 + #[allow(clippy::needless_continue)] + fn parse_lit_byte_str_cooked(mut s: &str) -> (Vec<u8>, Box<str>) { + assert_eq!(byte(s, 0), b'b'); + assert_eq!(byte(s, 1), b'"'); + s = &s[2..]; + + // We're going to want to have slices which don't respect codepoint boundaries. + let mut v = s.as_bytes(); + + let mut out = Vec::new(); + 'outer: loop { + let byte = match byte(v, 0) { + b'"' => break, + b'\\' => { + let b = byte(v, 1); + v = &v[2..]; + match b { + b'x' => { + let (b, rest) = backslash_x(v); + v = rest; + b + } + b'n' => b'\n', + b'r' => b'\r', + b't' => b'\t', + b'\\' => b'\\', + b'0' => b'\0', + b'\'' => b'\'', + b'"' => b'"', + b'\r' | b'\n' => loop { + let byte = byte(v, 0); + let ch = char::from_u32(u32::from(byte)).unwrap(); + if ch.is_whitespace() { + v = &v[1..]; + } else { + continue 'outer; + } + }, + b => panic!("unexpected byte {:?} after \\ character in byte literal", b), + } + } + b'\r' => { + assert_eq!(byte(v, 1), b'\n', "Bare CR not allowed in string"); + v = &v[2..]; + b'\n' + } + b => { + v = &v[1..]; + b + } + }; + out.push(byte); + } + + assert_eq!(byte(v, 0), b'"'); + let suffix = s[s.len() - v.len() + 1..].to_owned().into_boxed_str(); + (out, suffix) + } + + fn parse_lit_byte_str_raw(s: &str) -> (Vec<u8>, Box<str>) { + assert_eq!(byte(s, 0), b'b'); + let (value, suffix) = parse_lit_str_raw(&s[1..]); + (String::from(value).into_bytes(), suffix) + } + + // Returns (value, suffix). + pub fn parse_lit_byte(s: &str) -> (u8, Box<str>) { + assert_eq!(byte(s, 0), b'b'); + assert_eq!(byte(s, 1), b'\''); + + // We're going to want to have slices which don't respect codepoint boundaries. + let mut v = s[2..].as_bytes(); + + let b = match byte(v, 0) { + b'\\' => { + let b = byte(v, 1); + v = &v[2..]; + match b { + b'x' => { + let (b, rest) = backslash_x(v); + v = rest; + b + } + b'n' => b'\n', + b'r' => b'\r', + b't' => b'\t', + b'\\' => b'\\', + b'0' => b'\0', + b'\'' => b'\'', + b'"' => b'"', + b => panic!("unexpected byte {:?} after \\ character in byte literal", b), + } + } + b => { + v = &v[1..]; + b + } + }; + + assert_eq!(byte(v, 0), b'\''); + let suffix = s[s.len() - v.len() + 1..].to_owned().into_boxed_str(); + (b, suffix) + } + + // Returns (value, suffix). + pub fn parse_lit_char(mut s: &str) -> (char, Box<str>) { + assert_eq!(byte(s, 0), b'\''); + s = &s[1..]; + + let ch = match byte(s, 0) { + b'\\' => { + let b = byte(s, 1); + s = &s[2..]; + match b { + b'x' => { + let (byte, rest) = backslash_x(s); + s = rest; + assert!(byte <= 0x80, "Invalid \\x byte in string literal"); + char::from_u32(u32::from(byte)).unwrap() + } + b'u' => { + let (chr, rest) = backslash_u(s); + s = rest; + chr + } + b'n' => '\n', + b'r' => '\r', + b't' => '\t', + b'\\' => '\\', + b'0' => '\0', + b'\'' => '\'', + b'"' => '"', + b => panic!("unexpected byte {:?} after \\ character in byte literal", b), + } + } + _ => { + let ch = next_chr(s); + s = &s[ch.len_utf8()..]; + ch + } + }; + assert_eq!(byte(s, 0), b'\''); + let suffix = s[1..].to_owned().into_boxed_str(); + (ch, suffix) + } + + fn backslash_x<S>(s: &S) -> (u8, &S) + where + S: Index<RangeFrom<usize>, Output = S> + AsRef<[u8]> + ?Sized, + { + let mut ch = 0; + let b0 = byte(s, 0); + let b1 = byte(s, 1); + ch += 0x10 + * match b0 { + b'0'..=b'9' => b0 - b'0', + b'a'..=b'f' => 10 + (b0 - b'a'), + b'A'..=b'F' => 10 + (b0 - b'A'), + _ => panic!("unexpected non-hex character after \\x"), + }; + ch += match b1 { + b'0'..=b'9' => b1 - b'0', + b'a'..=b'f' => 10 + (b1 - b'a'), + b'A'..=b'F' => 10 + (b1 - b'A'), + _ => panic!("unexpected non-hex character after \\x"), + }; + (ch, &s[2..]) + } + + fn backslash_u(mut s: &str) -> (char, &str) { + if byte(s, 0) != b'{' { + panic!("{}", "expected { after \\u"); + } + s = &s[1..]; + + let mut ch = 0; + let mut digits = 0; + loop { + let b = byte(s, 0); + let digit = match b { + b'0'..=b'9' => b - b'0', + b'a'..=b'f' => 10 + b - b'a', + b'A'..=b'F' => 10 + b - b'A', + b'_' if digits > 0 => { + s = &s[1..]; + continue; + } + b'}' if digits == 0 => panic!("invalid empty unicode escape"), + b'}' => break, + _ => panic!("unexpected non-hex character after \\u"), + }; + if digits == 6 { + panic!("overlong unicode escape (must have at most 6 hex digits)"); + } + ch *= 0x10; + ch += u32::from(digit); + digits += 1; + s = &s[1..]; + } + assert!(byte(s, 0) == b'}'); + s = &s[1..]; + + if let Some(ch) = char::from_u32(ch) { + (ch, s) + } else { + panic!("character code {:x} is not a valid unicode character", ch); + } + } + + // Returns base 10 digits and suffix. + pub fn parse_lit_int(mut s: &str) -> Option<(Box<str>, Box<str>)> { + let negative = byte(s, 0) == b'-'; + if negative { + s = &s[1..]; + } + + let base = match (byte(s, 0), byte(s, 1)) { + (b'0', b'x') => { + s = &s[2..]; + 16 + } + (b'0', b'o') => { + s = &s[2..]; + 8 + } + (b'0', b'b') => { + s = &s[2..]; + 2 + } + (b'0'..=b'9', _) => 10, + _ => return None, + }; + + let mut value = BigInt::new(); + 'outer: loop { + let b = byte(s, 0); + let digit = match b { + b'0'..=b'9' => b - b'0', + b'a'..=b'f' if base > 10 => b - b'a' + 10, + b'A'..=b'F' if base > 10 => b - b'A' + 10, + b'_' => { + s = &s[1..]; + continue; + } + // If looking at a floating point literal, we don't want to + // consider it an integer. + b'.' if base == 10 => return None, + b'e' | b'E' if base == 10 => { + let mut has_exp = false; + for (i, b) in s[1..].bytes().enumerate() { + match b { + b'_' => {} + b'-' | b'+' => return None, + b'0'..=b'9' => has_exp = true, + _ => { + let suffix = &s[1 + i..]; + if has_exp && crate::ident::xid_ok(suffix) { + return None; + } else { + break 'outer; + } + } + } + } + if has_exp { + return None; + } else { + break; + } + } + _ => break, + }; + + if digit >= base { + return None; + } + + value *= base; + value += digit; + s = &s[1..]; + } + + let suffix = s; + if suffix.is_empty() || crate::ident::xid_ok(suffix) { + let mut repr = value.to_string(); + if negative { + repr.insert(0, '-'); + } + Some((repr.into_boxed_str(), suffix.to_owned().into_boxed_str())) + } else { + None + } + } + + // Returns base 10 digits and suffix. + pub fn parse_lit_float(input: &str) -> Option<(Box<str>, Box<str>)> { + // Rust's floating point literals are very similar to the ones parsed by + // the standard library, except that rust's literals can contain + // ignorable underscores. Let's remove those underscores. + + let mut bytes = input.to_owned().into_bytes(); + + let start = (*bytes.first()? == b'-') as usize; + match bytes.get(start)? { + b'0'..=b'9' => {} + _ => return None, + } + + let mut read = start; + let mut write = start; + let mut has_dot = false; + let mut has_e = false; + let mut has_sign = false; + let mut has_exponent = false; + while read < bytes.len() { + match bytes[read] { + b'_' => { + // Don't increase write + read += 1; + continue; + } + b'0'..=b'9' => { + if has_e { + has_exponent = true; + } + bytes[write] = bytes[read]; + } + b'.' => { + if has_e || has_dot { + return None; + } + has_dot = true; + bytes[write] = b'.'; + } + b'e' | b'E' => { + match bytes[read + 1..] + .iter() + .find(|b| **b != b'_') + .unwrap_or(&b'\0') + { + b'-' | b'+' | b'0'..=b'9' => {} + _ => break, + } + if has_e { + if has_exponent { + break; + } else { + return None; + } + } + has_e = true; + bytes[write] = b'e'; + } + b'-' | b'+' => { + if has_sign || has_exponent || !has_e { + return None; + } + has_sign = true; + if bytes[read] == b'-' { + bytes[write] = bytes[read]; + } else { + // Omit '+' + read += 1; + continue; + } + } + _ => break, + } + read += 1; + write += 1; + } + + if has_e && !has_exponent { + return None; + } + + let mut digits = String::from_utf8(bytes).unwrap(); + let suffix = digits.split_off(read); + digits.truncate(write); + if suffix.is_empty() || crate::ident::xid_ok(&suffix) { + Some((digits.into_boxed_str(), suffix.into_boxed_str())) + } else { + None + } + } + + #[allow(clippy::unnecessary_wraps)] + pub fn to_literal(repr: &str, digits: &str, suffix: &str) -> Option<Literal> { + #[cfg(syn_no_negative_literal_parse)] + { + // Rustc older than https://github.com/rust-lang/rust/pull/87262. + if repr.starts_with('-') { + let f64_parse_finite = || digits.parse().ok().filter(|x: &f64| x.is_finite()); + let f32_parse_finite = || digits.parse().ok().filter(|x: &f32| x.is_finite()); + return if suffix == "f64" { + f64_parse_finite().map(Literal::f64_suffixed) + } else if suffix == "f32" { + f32_parse_finite().map(Literal::f32_suffixed) + } else if suffix == "i64" { + digits.parse().ok().map(Literal::i64_suffixed) + } else if suffix == "i32" { + digits.parse().ok().map(Literal::i32_suffixed) + } else if suffix == "i16" { + digits.parse().ok().map(Literal::i16_suffixed) + } else if suffix == "i8" { + digits.parse().ok().map(Literal::i8_suffixed) + } else if !suffix.is_empty() { + None + } else if digits.contains('.') { + f64_parse_finite().map(Literal::f64_unsuffixed) + } else { + digits.parse().ok().map(Literal::i64_unsuffixed) + }; + } + } + let _ = digits; + let _ = suffix; + Some(repr.parse::<Literal>().unwrap()) + } +} diff --git a/third_party/rust/syn/src/lookahead.rs b/third_party/rust/syn/src/lookahead.rs new file mode 100644 index 0000000000..0cf5cf5a83 --- /dev/null +++ b/third_party/rust/syn/src/lookahead.rs @@ -0,0 +1,169 @@ +use crate::buffer::Cursor; +use crate::error::{self, Error}; +use crate::sealed::lookahead::Sealed; +use crate::span::IntoSpans; +use crate::token::Token; +use proc_macro2::{Delimiter, Span}; +use std::cell::RefCell; + +/// Support for checking the next token in a stream to decide how to parse. +/// +/// An important advantage over [`ParseStream::peek`] is that here we +/// automatically construct an appropriate error message based on the token +/// alternatives that get peeked. If you are producing your own error message, +/// go ahead and use `ParseStream::peek` instead. +/// +/// Use [`ParseStream::lookahead1`] to construct this object. +/// +/// [`ParseStream::peek`]: crate::parse::ParseBuffer::peek +/// [`ParseStream::lookahead1`]: crate::parse::ParseBuffer::lookahead1 +/// +/// Consuming tokens from the source stream after constructing a lookahead +/// object does not also advance the lookahead object. +/// +/// # Example +/// +/// ``` +/// use syn::{ConstParam, Ident, Lifetime, LifetimeDef, Result, Token, TypeParam}; +/// use syn::parse::{Parse, ParseStream}; +/// +/// // A generic parameter, a single one of the comma-separated elements inside +/// // angle brackets in: +/// // +/// // fn f<T: Clone, 'a, 'b: 'a, const N: usize>() { ... } +/// // +/// // On invalid input, lookahead gives us a reasonable error message. +/// // +/// // error: expected one of: identifier, lifetime, `const` +/// // | +/// // 5 | fn f<!Sized>() {} +/// // | ^ +/// enum GenericParam { +/// Type(TypeParam), +/// Lifetime(LifetimeDef), +/// Const(ConstParam), +/// } +/// +/// impl Parse for GenericParam { +/// fn parse(input: ParseStream) -> Result<Self> { +/// let lookahead = input.lookahead1(); +/// if lookahead.peek(Ident) { +/// input.parse().map(GenericParam::Type) +/// } else if lookahead.peek(Lifetime) { +/// input.parse().map(GenericParam::Lifetime) +/// } else if lookahead.peek(Token![const]) { +/// input.parse().map(GenericParam::Const) +/// } else { +/// Err(lookahead.error()) +/// } +/// } +/// } +/// ``` +pub struct Lookahead1<'a> { + scope: Span, + cursor: Cursor<'a>, + comparisons: RefCell<Vec<&'static str>>, +} + +pub fn new(scope: Span, cursor: Cursor) -> Lookahead1 { + Lookahead1 { + scope, + cursor, + comparisons: RefCell::new(Vec::new()), + } +} + +fn peek_impl( + lookahead: &Lookahead1, + peek: fn(Cursor) -> bool, + display: fn() -> &'static str, +) -> bool { + if peek(lookahead.cursor) { + return true; + } + lookahead.comparisons.borrow_mut().push(display()); + false +} + +impl<'a> Lookahead1<'a> { + /// Looks at the next token in the parse stream to determine whether it + /// matches the requested type of token. + /// + /// # Syntax + /// + /// Note that this method does not use turbofish syntax. Pass the peek type + /// inside of parentheses. + /// + /// - `input.peek(Token![struct])` + /// - `input.peek(Token![==])` + /// - `input.peek(Ident)` *(does not accept keywords)* + /// - `input.peek(Ident::peek_any)` + /// - `input.peek(Lifetime)` + /// - `input.peek(token::Brace)` + pub fn peek<T: Peek>(&self, token: T) -> bool { + let _ = token; + peek_impl(self, T::Token::peek, T::Token::display) + } + + /// Triggers an error at the current position of the parse stream. + /// + /// The error message will identify all of the expected token types that + /// have been peeked against this lookahead instance. + pub fn error(self) -> Error { + let comparisons = self.comparisons.borrow(); + match comparisons.len() { + 0 => { + if self.cursor.eof() { + Error::new(self.scope, "unexpected end of input") + } else { + Error::new(self.cursor.span(), "unexpected token") + } + } + 1 => { + let message = format!("expected {}", comparisons[0]); + error::new_at(self.scope, self.cursor, message) + } + 2 => { + let message = format!("expected {} or {}", comparisons[0], comparisons[1]); + error::new_at(self.scope, self.cursor, message) + } + _ => { + let join = comparisons.join(", "); + let message = format!("expected one of: {}", join); + error::new_at(self.scope, self.cursor, message) + } + } + } +} + +/// Types that can be parsed by looking at just one token. +/// +/// Use [`ParseStream::peek`] to peek one of these types in a parse stream +/// without consuming it from the stream. +/// +/// This trait is sealed and cannot be implemented for types outside of Syn. +/// +/// [`ParseStream::peek`]: crate::parse::ParseBuffer::peek +pub trait Peek: Sealed { + // Not public API. + #[doc(hidden)] + type Token: Token; +} + +impl<F: Copy + FnOnce(TokenMarker) -> T, T: Token> Peek for F { + type Token = T; +} + +pub enum TokenMarker {} + +impl<S> IntoSpans<S> for TokenMarker { + fn into_spans(self) -> S { + match self {} + } +} + +pub fn is_delimiter(cursor: Cursor, delimiter: Delimiter) -> bool { + cursor.group(delimiter).is_some() +} + +impl<F: Copy + FnOnce(TokenMarker) -> T, T: Token> Sealed for F {} diff --git a/third_party/rust/syn/src/mac.rs b/third_party/rust/syn/src/mac.rs new file mode 100644 index 0000000000..3d84f8e48b --- /dev/null +++ b/third_party/rust/syn/src/mac.rs @@ -0,0 +1,219 @@ +use super::*; +use crate::token::{Brace, Bracket, Paren}; +use proc_macro2::TokenStream; +#[cfg(feature = "parsing")] +use proc_macro2::{Delimiter, Group, Span, TokenTree}; + +#[cfg(feature = "parsing")] +use crate::parse::{Parse, ParseStream, Parser, Result}; + +ast_struct! { + /// A macro invocation: `println!("{}", mac)`. + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct Macro { + pub path: Path, + pub bang_token: Token![!], + pub delimiter: MacroDelimiter, + pub tokens: TokenStream, + } +} + +ast_enum! { + /// A grouping token that surrounds a macro body: `m!(...)` or `m!{...}` or `m![...]`. + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub enum MacroDelimiter { + Paren(Paren), + Brace(Brace), + Bracket(Bracket), + } +} + +#[cfg(feature = "parsing")] +fn delimiter_span_close(macro_delimiter: &MacroDelimiter) -> Span { + let delimiter = match macro_delimiter { + MacroDelimiter::Paren(_) => Delimiter::Parenthesis, + MacroDelimiter::Brace(_) => Delimiter::Brace, + MacroDelimiter::Bracket(_) => Delimiter::Bracket, + }; + let mut group = Group::new(delimiter, TokenStream::new()); + group.set_span(match macro_delimiter { + MacroDelimiter::Paren(token) => token.span, + MacroDelimiter::Brace(token) => token.span, + MacroDelimiter::Bracket(token) => token.span, + }); + group.span_close() +} + +impl Macro { + /// Parse the tokens within the macro invocation's delimiters into a syntax + /// tree. + /// + /// This is equivalent to `syn::parse2::<T>(mac.tokens)` except that it + /// produces a more useful span when `tokens` is empty. + /// + /// # Example + /// + /// ``` + /// use syn::{parse_quote, Expr, ExprLit, Ident, Lit, LitStr, Macro, Token}; + /// use syn::ext::IdentExt; + /// use syn::parse::{Error, Parse, ParseStream, Result}; + /// use syn::punctuated::Punctuated; + /// + /// // The arguments expected by libcore's format_args macro, and as a + /// // result most other formatting and printing macros like println. + /// // + /// // println!("{} is {number:.prec$}", "x", prec=5, number=0.01) + /// struct FormatArgs { + /// format_string: Expr, + /// positional_args: Vec<Expr>, + /// named_args: Vec<(Ident, Expr)>, + /// } + /// + /// impl Parse for FormatArgs { + /// fn parse(input: ParseStream) -> Result<Self> { + /// let format_string: Expr; + /// let mut positional_args = Vec::new(); + /// let mut named_args = Vec::new(); + /// + /// format_string = input.parse()?; + /// while !input.is_empty() { + /// input.parse::<Token![,]>()?; + /// if input.is_empty() { + /// break; + /// } + /// if input.peek(Ident::peek_any) && input.peek2(Token![=]) { + /// while !input.is_empty() { + /// let name: Ident = input.call(Ident::parse_any)?; + /// input.parse::<Token![=]>()?; + /// let value: Expr = input.parse()?; + /// named_args.push((name, value)); + /// if input.is_empty() { + /// break; + /// } + /// input.parse::<Token![,]>()?; + /// } + /// break; + /// } + /// positional_args.push(input.parse()?); + /// } + /// + /// Ok(FormatArgs { + /// format_string, + /// positional_args, + /// named_args, + /// }) + /// } + /// } + /// + /// // Extract the first argument, the format string literal, from an + /// // invocation of a formatting or printing macro. + /// fn get_format_string(m: &Macro) -> Result<LitStr> { + /// let args: FormatArgs = m.parse_body()?; + /// match args.format_string { + /// Expr::Lit(ExprLit { lit: Lit::Str(lit), .. }) => Ok(lit), + /// other => { + /// // First argument was not a string literal expression. + /// // Maybe something like: println!(concat!(...), ...) + /// Err(Error::new_spanned(other, "format string must be a string literal")) + /// } + /// } + /// } + /// + /// fn main() { + /// let invocation = parse_quote! { + /// println!("{:?}", Instant::now()) + /// }; + /// let lit = get_format_string(&invocation).unwrap(); + /// assert_eq!(lit.value(), "{:?}"); + /// } + /// ``` + #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn parse_body<T: Parse>(&self) -> Result<T> { + self.parse_body_with(T::parse) + } + + /// Parse the tokens within the macro invocation's delimiters using the + /// given parser. + #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn parse_body_with<F: Parser>(&self, parser: F) -> Result<F::Output> { + let scope = delimiter_span_close(&self.delimiter); + crate::parse::parse_scoped(parser, scope, self.tokens.clone()) + } +} + +#[cfg(feature = "parsing")] +pub fn parse_delimiter(input: ParseStream) -> Result<(MacroDelimiter, TokenStream)> { + input.step(|cursor| { + if let Some((TokenTree::Group(g), rest)) = cursor.token_tree() { + let span = g.span(); + let delimiter = match g.delimiter() { + Delimiter::Parenthesis => MacroDelimiter::Paren(Paren(span)), + Delimiter::Brace => MacroDelimiter::Brace(Brace(span)), + Delimiter::Bracket => MacroDelimiter::Bracket(Bracket(span)), + Delimiter::None => { + return Err(cursor.error("expected delimiter")); + } + }; + Ok(((delimiter, g.stream()), rest)) + } else { + Err(cursor.error("expected delimiter")) + } + }) +} + +#[cfg(feature = "parsing")] +pub mod parsing { + use super::*; + use crate::parse::{Parse, ParseStream, Result}; + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for Macro { + fn parse(input: ParseStream) -> Result<Self> { + let tokens; + Ok(Macro { + path: input.call(Path::parse_mod_style)?, + bang_token: input.parse()?, + delimiter: { + let (delimiter, content) = parse_delimiter(input)?; + tokens = content; + delimiter + }, + tokens, + }) + } + } +} + +#[cfg(feature = "printing")] +mod printing { + use super::*; + use proc_macro2::TokenStream; + use quote::ToTokens; + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for Macro { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.path.to_tokens(tokens); + self.bang_token.to_tokens(tokens); + match &self.delimiter { + MacroDelimiter::Paren(paren) => { + paren.surround(tokens, |tokens| self.tokens.to_tokens(tokens)); + } + MacroDelimiter::Brace(brace) => { + brace.surround(tokens, |tokens| self.tokens.to_tokens(tokens)); + } + MacroDelimiter::Bracket(bracket) => { + bracket.surround(tokens, |tokens| self.tokens.to_tokens(tokens)); + } + } + } + } +} diff --git a/third_party/rust/syn/src/macros.rs b/third_party/rust/syn/src/macros.rs new file mode 100644 index 0000000000..da10a1a550 --- /dev/null +++ b/third_party/rust/syn/src/macros.rs @@ -0,0 +1,177 @@ +#[cfg_attr( + not(any(feature = "full", feature = "derive")), + allow(unknown_lints, unused_macro_rules) +)] +macro_rules! ast_struct { + ( + [$($attrs_pub:tt)*] + struct $name:ident #full $($rest:tt)* + ) => { + #[cfg(feature = "full")] + $($attrs_pub)* struct $name $($rest)* + + #[cfg(not(feature = "full"))] + $($attrs_pub)* struct $name { + _noconstruct: ::std::marker::PhantomData<::proc_macro2::Span>, + } + + #[cfg(all(not(feature = "full"), feature = "printing"))] + impl ::quote::ToTokens for $name { + fn to_tokens(&self, _: &mut ::proc_macro2::TokenStream) { + unreachable!() + } + } + }; + + ( + [$($attrs_pub:tt)*] + struct $name:ident $($rest:tt)* + ) => { + $($attrs_pub)* struct $name $($rest)* + }; + + ($($t:tt)*) => { + strip_attrs_pub!(ast_struct!($($t)*)); + }; +} + +macro_rules! ast_enum { + // Drop the `#no_visit` attribute, if present. + ( + [$($attrs_pub:tt)*] + enum $name:ident #no_visit $($rest:tt)* + ) => ( + ast_enum!([$($attrs_pub)*] enum $name $($rest)*); + ); + + ( + [$($attrs_pub:tt)*] + enum $name:ident $($rest:tt)* + ) => ( + $($attrs_pub)* enum $name $($rest)* + ); + + ($($t:tt)*) => { + strip_attrs_pub!(ast_enum!($($t)*)); + }; +} + +macro_rules! ast_enum_of_structs { + ( + $(#[$enum_attr:meta])* + $pub:ident $enum:ident $name:ident $body:tt + $($remaining:tt)* + ) => { + ast_enum!($(#[$enum_attr])* $pub $enum $name $body); + ast_enum_of_structs_impl!($pub $enum $name $body $($remaining)*); + }; +} + +macro_rules! ast_enum_of_structs_impl { + ( + $pub:ident $enum:ident $name:ident { + $( + $(#[cfg $cfg_attr:tt])* + $(#[doc $($doc_attr:tt)*])* + $variant:ident $( ($($member:ident)::+) )*, + )* + } + + $($remaining:tt)* + ) => { + check_keyword_matches!(pub $pub); + check_keyword_matches!(enum $enum); + + $($( + ast_enum_from_struct!($name::$variant, $($member)::+); + )*)* + + #[cfg(feature = "printing")] + generate_to_tokens! { + $($remaining)* + () + tokens + $name { + $( + $(#[cfg $cfg_attr])* + $(#[doc $($doc_attr)*])* + $variant $($($member)::+)*, + )* + } + } + }; +} + +macro_rules! ast_enum_from_struct { + // No From<TokenStream> for verbatim variants. + ($name:ident::Verbatim, $member:ident) => {}; + + ($name:ident::$variant:ident, $member:ident) => { + impl From<$member> for $name { + fn from(e: $member) -> $name { + $name::$variant(e) + } + } + }; +} + +#[cfg(feature = "printing")] +#[cfg_attr( + not(any(feature = "full", feature = "derive")), + allow(unknown_lints, unused_macro_rules) +)] +macro_rules! generate_to_tokens { + (do_not_generate_to_tokens $($foo:tt)*) => (); + + ( + ($($arms:tt)*) $tokens:ident $name:ident { + $(#[cfg $cfg_attr:tt])* + $(#[doc $($doc_attr:tt)*])* + $variant:ident, + $($next:tt)* + } + ) => { + generate_to_tokens!( + ($($arms)* $(#[cfg $cfg_attr])* $name::$variant => {}) + $tokens $name { $($next)* } + ); + }; + + ( + ($($arms:tt)*) $tokens:ident $name:ident { + $(#[cfg $cfg_attr:tt])* + $(#[doc $($doc_attr:tt)*])* + $variant:ident $member:ident, + $($next:tt)* + } + ) => { + generate_to_tokens!( + ($($arms)* $(#[cfg $cfg_attr])* $name::$variant(_e) => _e.to_tokens($tokens),) + $tokens $name { $($next)* } + ); + }; + + (($($arms:tt)*) $tokens:ident $name:ident {}) => { + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ::quote::ToTokens for $name { + fn to_tokens(&self, $tokens: &mut ::proc_macro2::TokenStream) { + match self { + $($arms)* + } + } + } + }; +} + +macro_rules! strip_attrs_pub { + ($mac:ident!($(#[$m:meta])* $pub:ident $($t:tt)*)) => { + check_keyword_matches!(pub $pub); + + $mac!([$(#[$m])* $pub] $($t)*); + }; +} + +macro_rules! check_keyword_matches { + (enum enum) => {}; + (pub pub) => {}; +} diff --git a/third_party/rust/syn/src/op.rs b/third_party/rust/syn/src/op.rs new file mode 100644 index 0000000000..b8ef9a7fbd --- /dev/null +++ b/third_party/rust/syn/src/op.rs @@ -0,0 +1,234 @@ +ast_enum! { + /// A binary operator: `+`, `+=`, `&`. + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub enum BinOp { + /// The `+` operator (addition) + Add(Token![+]), + /// The `-` operator (subtraction) + Sub(Token![-]), + /// The `*` operator (multiplication) + Mul(Token![*]), + /// The `/` operator (division) + Div(Token![/]), + /// The `%` operator (modulus) + Rem(Token![%]), + /// The `&&` operator (logical and) + And(Token![&&]), + /// The `||` operator (logical or) + Or(Token![||]), + /// The `^` operator (bitwise xor) + BitXor(Token![^]), + /// The `&` operator (bitwise and) + BitAnd(Token![&]), + /// The `|` operator (bitwise or) + BitOr(Token![|]), + /// The `<<` operator (shift left) + Shl(Token![<<]), + /// The `>>` operator (shift right) + Shr(Token![>>]), + /// The `==` operator (equality) + Eq(Token![==]), + /// The `<` operator (less than) + Lt(Token![<]), + /// The `<=` operator (less than or equal to) + Le(Token![<=]), + /// The `!=` operator (not equal to) + Ne(Token![!=]), + /// The `>=` operator (greater than or equal to) + Ge(Token![>=]), + /// The `>` operator (greater than) + Gt(Token![>]), + /// The `+=` operator + AddEq(Token![+=]), + /// The `-=` operator + SubEq(Token![-=]), + /// The `*=` operator + MulEq(Token![*=]), + /// The `/=` operator + DivEq(Token![/=]), + /// The `%=` operator + RemEq(Token![%=]), + /// The `^=` operator + BitXorEq(Token![^=]), + /// The `&=` operator + BitAndEq(Token![&=]), + /// The `|=` operator + BitOrEq(Token![|=]), + /// The `<<=` operator + ShlEq(Token![<<=]), + /// The `>>=` operator + ShrEq(Token![>>=]), + } +} + +ast_enum! { + /// A unary operator: `*`, `!`, `-`. + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub enum UnOp { + /// The `*` operator for dereferencing + Deref(Token![*]), + /// The `!` operator for logical inversion + Not(Token![!]), + /// The `-` operator for negation + Neg(Token![-]), + } +} + +#[cfg(feature = "parsing")] +pub mod parsing { + use super::*; + use crate::parse::{Parse, ParseStream, Result}; + + fn parse_binop(input: ParseStream) -> Result<BinOp> { + if input.peek(Token![&&]) { + input.parse().map(BinOp::And) + } else if input.peek(Token![||]) { + input.parse().map(BinOp::Or) + } else if input.peek(Token![<<]) { + input.parse().map(BinOp::Shl) + } else if input.peek(Token![>>]) { + input.parse().map(BinOp::Shr) + } else if input.peek(Token![==]) { + input.parse().map(BinOp::Eq) + } else if input.peek(Token![<=]) { + input.parse().map(BinOp::Le) + } else if input.peek(Token![!=]) { + input.parse().map(BinOp::Ne) + } else if input.peek(Token![>=]) { + input.parse().map(BinOp::Ge) + } else if input.peek(Token![+]) { + input.parse().map(BinOp::Add) + } else if input.peek(Token![-]) { + input.parse().map(BinOp::Sub) + } else if input.peek(Token![*]) { + input.parse().map(BinOp::Mul) + } else if input.peek(Token![/]) { + input.parse().map(BinOp::Div) + } else if input.peek(Token![%]) { + input.parse().map(BinOp::Rem) + } else if input.peek(Token![^]) { + input.parse().map(BinOp::BitXor) + } else if input.peek(Token![&]) { + input.parse().map(BinOp::BitAnd) + } else if input.peek(Token![|]) { + input.parse().map(BinOp::BitOr) + } else if input.peek(Token![<]) { + input.parse().map(BinOp::Lt) + } else if input.peek(Token![>]) { + input.parse().map(BinOp::Gt) + } else { + Err(input.error("expected binary operator")) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for BinOp { + #[cfg(not(feature = "full"))] + fn parse(input: ParseStream) -> Result<Self> { + parse_binop(input) + } + + #[cfg(feature = "full")] + fn parse(input: ParseStream) -> Result<Self> { + if input.peek(Token![+=]) { + input.parse().map(BinOp::AddEq) + } else if input.peek(Token![-=]) { + input.parse().map(BinOp::SubEq) + } else if input.peek(Token![*=]) { + input.parse().map(BinOp::MulEq) + } else if input.peek(Token![/=]) { + input.parse().map(BinOp::DivEq) + } else if input.peek(Token![%=]) { + input.parse().map(BinOp::RemEq) + } else if input.peek(Token![^=]) { + input.parse().map(BinOp::BitXorEq) + } else if input.peek(Token![&=]) { + input.parse().map(BinOp::BitAndEq) + } else if input.peek(Token![|=]) { + input.parse().map(BinOp::BitOrEq) + } else if input.peek(Token![<<=]) { + input.parse().map(BinOp::ShlEq) + } else if input.peek(Token![>>=]) { + input.parse().map(BinOp::ShrEq) + } else { + parse_binop(input) + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for UnOp { + fn parse(input: ParseStream) -> Result<Self> { + let lookahead = input.lookahead1(); + if lookahead.peek(Token![*]) { + input.parse().map(UnOp::Deref) + } else if lookahead.peek(Token![!]) { + input.parse().map(UnOp::Not) + } else if lookahead.peek(Token![-]) { + input.parse().map(UnOp::Neg) + } else { + Err(lookahead.error()) + } + } + } +} + +#[cfg(feature = "printing")] +mod printing { + use super::*; + use proc_macro2::TokenStream; + use quote::ToTokens; + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for BinOp { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + BinOp::Add(t) => t.to_tokens(tokens), + BinOp::Sub(t) => t.to_tokens(tokens), + BinOp::Mul(t) => t.to_tokens(tokens), + BinOp::Div(t) => t.to_tokens(tokens), + BinOp::Rem(t) => t.to_tokens(tokens), + BinOp::And(t) => t.to_tokens(tokens), + BinOp::Or(t) => t.to_tokens(tokens), + BinOp::BitXor(t) => t.to_tokens(tokens), + BinOp::BitAnd(t) => t.to_tokens(tokens), + BinOp::BitOr(t) => t.to_tokens(tokens), + BinOp::Shl(t) => t.to_tokens(tokens), + BinOp::Shr(t) => t.to_tokens(tokens), + BinOp::Eq(t) => t.to_tokens(tokens), + BinOp::Lt(t) => t.to_tokens(tokens), + BinOp::Le(t) => t.to_tokens(tokens), + BinOp::Ne(t) => t.to_tokens(tokens), + BinOp::Ge(t) => t.to_tokens(tokens), + BinOp::Gt(t) => t.to_tokens(tokens), + BinOp::AddEq(t) => t.to_tokens(tokens), + BinOp::SubEq(t) => t.to_tokens(tokens), + BinOp::MulEq(t) => t.to_tokens(tokens), + BinOp::DivEq(t) => t.to_tokens(tokens), + BinOp::RemEq(t) => t.to_tokens(tokens), + BinOp::BitXorEq(t) => t.to_tokens(tokens), + BinOp::BitAndEq(t) => t.to_tokens(tokens), + BinOp::BitOrEq(t) => t.to_tokens(tokens), + BinOp::ShlEq(t) => t.to_tokens(tokens), + BinOp::ShrEq(t) => t.to_tokens(tokens), + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for UnOp { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + UnOp::Deref(t) => t.to_tokens(tokens), + UnOp::Not(t) => t.to_tokens(tokens), + UnOp::Neg(t) => t.to_tokens(tokens), + } + } + } +} diff --git a/third_party/rust/syn/src/parse.rs b/third_party/rust/syn/src/parse.rs new file mode 100644 index 0000000000..bac4ca05ab --- /dev/null +++ b/third_party/rust/syn/src/parse.rs @@ -0,0 +1,1314 @@ +//! Parsing interface for parsing a token stream into a syntax tree node. +//! +//! Parsing in Syn is built on parser functions that take in a [`ParseStream`] +//! and produce a [`Result<T>`] where `T` is some syntax tree node. Underlying +//! these parser functions is a lower level mechanism built around the +//! [`Cursor`] type. `Cursor` is a cheaply copyable cursor over a range of +//! tokens in a token stream. +//! +//! [`Result<T>`]: Result +//! [`Cursor`]: crate::buffer::Cursor +//! +//! # Example +//! +//! Here is a snippet of parsing code to get a feel for the style of the +//! library. We define data structures for a subset of Rust syntax including +//! enums (not shown) and structs, then provide implementations of the [`Parse`] +//! trait to parse these syntax tree data structures from a token stream. +//! +//! Once `Parse` impls have been defined, they can be called conveniently from a +//! procedural macro through [`parse_macro_input!`] as shown at the bottom of +//! the snippet. If the caller provides syntactically invalid input to the +//! procedural macro, they will receive a helpful compiler error message +//! pointing out the exact token that triggered the failure to parse. +//! +//! [`parse_macro_input!`]: crate::parse_macro_input! +//! +//! ``` +//! # extern crate proc_macro; +//! # +//! use proc_macro::TokenStream; +//! use syn::{braced, parse_macro_input, token, Field, Ident, Result, Token}; +//! use syn::parse::{Parse, ParseStream}; +//! use syn::punctuated::Punctuated; +//! +//! enum Item { +//! Struct(ItemStruct), +//! Enum(ItemEnum), +//! } +//! +//! struct ItemStruct { +//! struct_token: Token![struct], +//! ident: Ident, +//! brace_token: token::Brace, +//! fields: Punctuated<Field, Token![,]>, +//! } +//! # +//! # enum ItemEnum {} +//! +//! impl Parse for Item { +//! fn parse(input: ParseStream) -> Result<Self> { +//! let lookahead = input.lookahead1(); +//! if lookahead.peek(Token![struct]) { +//! input.parse().map(Item::Struct) +//! } else if lookahead.peek(Token![enum]) { +//! input.parse().map(Item::Enum) +//! } else { +//! Err(lookahead.error()) +//! } +//! } +//! } +//! +//! impl Parse for ItemStruct { +//! fn parse(input: ParseStream) -> Result<Self> { +//! let content; +//! Ok(ItemStruct { +//! struct_token: input.parse()?, +//! ident: input.parse()?, +//! brace_token: braced!(content in input), +//! fields: content.parse_terminated(Field::parse_named)?, +//! }) +//! } +//! } +//! # +//! # impl Parse for ItemEnum { +//! # fn parse(input: ParseStream) -> Result<Self> { +//! # unimplemented!() +//! # } +//! # } +//! +//! # const IGNORE: &str = stringify! { +//! #[proc_macro] +//! # }; +//! pub fn my_macro(tokens: TokenStream) -> TokenStream { +//! let input = parse_macro_input!(tokens as Item); +//! +//! /* ... */ +//! # "".parse().unwrap() +//! } +//! ``` +//! +//! # The `syn::parse*` functions +//! +//! The [`syn::parse`], [`syn::parse2`], and [`syn::parse_str`] functions serve +//! as an entry point for parsing syntax tree nodes that can be parsed in an +//! obvious default way. These functions can return any syntax tree node that +//! implements the [`Parse`] trait, which includes most types in Syn. +//! +//! [`syn::parse`]: crate::parse() +//! [`syn::parse2`]: crate::parse2() +//! [`syn::parse_str`]: crate::parse_str() +//! +//! ``` +//! use syn::Type; +//! +//! # fn run_parser() -> syn::Result<()> { +//! let t: Type = syn::parse_str("std::collections::HashMap<String, Value>")?; +//! # Ok(()) +//! # } +//! # +//! # run_parser().unwrap(); +//! ``` +//! +//! The [`parse_quote!`] macro also uses this approach. +//! +//! [`parse_quote!`]: crate::parse_quote! +//! +//! # The `Parser` trait +//! +//! Some types can be parsed in several ways depending on context. For example +//! an [`Attribute`] can be either "outer" like `#[...]` or "inner" like +//! `#![...]` and parsing the wrong one would be a bug. Similarly [`Punctuated`] +//! may or may not allow trailing punctuation, and parsing it the wrong way +//! would either reject valid input or accept invalid input. +//! +//! [`Attribute`]: crate::Attribute +//! [`Punctuated`]: crate::punctuated +//! +//! The `Parse` trait is not implemented in these cases because there is no good +//! behavior to consider the default. +//! +//! ```compile_fail +//! # extern crate proc_macro; +//! # +//! # use syn::punctuated::Punctuated; +//! # use syn::{PathSegment, Result, Token}; +//! # +//! # fn f(tokens: proc_macro::TokenStream) -> Result<()> { +//! # +//! // Can't parse `Punctuated` without knowing whether trailing punctuation +//! // should be allowed in this context. +//! let path: Punctuated<PathSegment, Token![::]> = syn::parse(tokens)?; +//! # +//! # Ok(()) +//! # } +//! ``` +//! +//! In these cases the types provide a choice of parser functions rather than a +//! single `Parse` implementation, and those parser functions can be invoked +//! through the [`Parser`] trait. +//! +//! +//! ``` +//! # extern crate proc_macro; +//! # +//! use proc_macro::TokenStream; +//! use syn::parse::Parser; +//! use syn::punctuated::Punctuated; +//! use syn::{Attribute, Expr, PathSegment, Result, Token}; +//! +//! fn call_some_parser_methods(input: TokenStream) -> Result<()> { +//! // Parse a nonempty sequence of path segments separated by `::` punctuation +//! // with no trailing punctuation. +//! let tokens = input.clone(); +//! let parser = Punctuated::<PathSegment, Token![::]>::parse_separated_nonempty; +//! let _path = parser.parse(tokens)?; +//! +//! // Parse a possibly empty sequence of expressions terminated by commas with +//! // an optional trailing punctuation. +//! let tokens = input.clone(); +//! let parser = Punctuated::<Expr, Token![,]>::parse_terminated; +//! let _args = parser.parse(tokens)?; +//! +//! // Parse zero or more outer attributes but not inner attributes. +//! let tokens = input.clone(); +//! let parser = Attribute::parse_outer; +//! let _attrs = parser.parse(tokens)?; +//! +//! Ok(()) +//! } +//! ``` +//! +//! --- +//! +//! *This module is available only if Syn is built with the `"parsing"` feature.* + +#[path = "discouraged.rs"] +pub mod discouraged; + +use crate::buffer::{Cursor, TokenBuffer}; +use crate::error; +use crate::lookahead; +#[cfg(all( + not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))), + feature = "proc-macro" +))] +use crate::proc_macro; +use crate::punctuated::Punctuated; +use crate::token::Token; +use proc_macro2::{self, Delimiter, Group, Literal, Punct, Span, TokenStream, TokenTree}; +use std::cell::Cell; +use std::fmt::{self, Debug, Display}; +#[cfg(feature = "extra-traits")] +use std::hash::{Hash, Hasher}; +use std::marker::PhantomData; +use std::mem; +use std::ops::Deref; +use std::rc::Rc; +use std::str::FromStr; + +pub use crate::error::{Error, Result}; +pub use crate::lookahead::{Lookahead1, Peek}; + +/// Parsing interface implemented by all types that can be parsed in a default +/// way from a token stream. +/// +/// Refer to the [module documentation] for details about implementing and using +/// the `Parse` trait. +/// +/// [module documentation]: self +pub trait Parse: Sized { + fn parse(input: ParseStream) -> Result<Self>; +} + +/// Input to a Syn parser function. +/// +/// See the methods of this type under the documentation of [`ParseBuffer`]. For +/// an overview of parsing in Syn, refer to the [module documentation]. +/// +/// [module documentation]: self +pub type ParseStream<'a> = &'a ParseBuffer<'a>; + +/// Cursor position within a buffered token stream. +/// +/// This type is more commonly used through the type alias [`ParseStream`] which +/// is an alias for `&ParseBuffer`. +/// +/// `ParseStream` is the input type for all parser functions in Syn. They have +/// the signature `fn(ParseStream) -> Result<T>`. +/// +/// ## Calling a parser function +/// +/// There is no public way to construct a `ParseBuffer`. Instead, if you are +/// looking to invoke a parser function that requires `ParseStream` as input, +/// you will need to go through one of the public parsing entry points. +/// +/// - The [`parse_macro_input!`] macro if parsing input of a procedural macro; +/// - One of [the `syn::parse*` functions][syn-parse]; or +/// - A method of the [`Parser`] trait. +/// +/// [`parse_macro_input!`]: crate::parse_macro_input! +/// [syn-parse]: self#the-synparse-functions +pub struct ParseBuffer<'a> { + scope: Span, + // Instead of Cell<Cursor<'a>> so that ParseBuffer<'a> is covariant in 'a. + // The rest of the code in this module needs to be careful that only a + // cursor derived from this `cell` is ever assigned to this `cell`. + // + // Cell<Cursor<'a>> cannot be covariant in 'a because then we could take a + // ParseBuffer<'a>, upcast to ParseBuffer<'short> for some lifetime shorter + // than 'a, and then assign a Cursor<'short> into the Cell. + // + // By extension, it would not be safe to expose an API that accepts a + // Cursor<'a> and trusts that it lives as long as the cursor currently in + // the cell. + cell: Cell<Cursor<'static>>, + marker: PhantomData<Cursor<'a>>, + unexpected: Cell<Option<Rc<Cell<Unexpected>>>>, +} + +impl<'a> Drop for ParseBuffer<'a> { + fn drop(&mut self) { + if let Some(unexpected_span) = span_of_unexpected_ignoring_nones(self.cursor()) { + let (inner, old_span) = inner_unexpected(self); + if old_span.is_none() { + inner.set(Unexpected::Some(unexpected_span)); + } + } + } +} + +impl<'a> Display for ParseBuffer<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + Display::fmt(&self.cursor().token_stream(), f) + } +} + +impl<'a> Debug for ParseBuffer<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + Debug::fmt(&self.cursor().token_stream(), f) + } +} + +/// Cursor state associated with speculative parsing. +/// +/// This type is the input of the closure provided to [`ParseStream::step`]. +/// +/// [`ParseStream::step`]: ParseBuffer::step +/// +/// # Example +/// +/// ``` +/// use proc_macro2::TokenTree; +/// use syn::Result; +/// use syn::parse::ParseStream; +/// +/// // This function advances the stream past the next occurrence of `@`. If +/// // no `@` is present in the stream, the stream position is unchanged and +/// // an error is returned. +/// fn skip_past_next_at(input: ParseStream) -> Result<()> { +/// input.step(|cursor| { +/// let mut rest = *cursor; +/// while let Some((tt, next)) = rest.token_tree() { +/// match &tt { +/// TokenTree::Punct(punct) if punct.as_char() == '@' => { +/// return Ok(((), next)); +/// } +/// _ => rest = next, +/// } +/// } +/// Err(cursor.error("no `@` was found after this point")) +/// }) +/// } +/// # +/// # fn remainder_after_skipping_past_next_at( +/// # input: ParseStream, +/// # ) -> Result<proc_macro2::TokenStream> { +/// # skip_past_next_at(input)?; +/// # input.parse() +/// # } +/// # +/// # use syn::parse::Parser; +/// # let remainder = remainder_after_skipping_past_next_at +/// # .parse_str("a @ b c") +/// # .unwrap(); +/// # assert_eq!(remainder.to_string(), "b c"); +/// ``` +pub struct StepCursor<'c, 'a> { + scope: Span, + // This field is covariant in 'c. + cursor: Cursor<'c>, + // This field is contravariant in 'c. Together these make StepCursor + // invariant in 'c. Also covariant in 'a. The user cannot cast 'c to a + // different lifetime but can upcast into a StepCursor with a shorter + // lifetime 'a. + // + // As long as we only ever construct a StepCursor for which 'c outlives 'a, + // this means if ever a StepCursor<'c, 'a> exists we are guaranteed that 'c + // outlives 'a. + marker: PhantomData<fn(Cursor<'c>) -> Cursor<'a>>, +} + +impl<'c, 'a> Deref for StepCursor<'c, 'a> { + type Target = Cursor<'c>; + + fn deref(&self) -> &Self::Target { + &self.cursor + } +} + +impl<'c, 'a> Copy for StepCursor<'c, 'a> {} + +impl<'c, 'a> Clone for StepCursor<'c, 'a> { + fn clone(&self) -> Self { + *self + } +} + +impl<'c, 'a> StepCursor<'c, 'a> { + /// Triggers an error at the current position of the parse stream. + /// + /// The `ParseStream::step` invocation will return this same error without + /// advancing the stream state. + pub fn error<T: Display>(self, message: T) -> Error { + error::new_at(self.scope, self.cursor, message) + } +} + +pub(crate) fn advance_step_cursor<'c, 'a>(proof: StepCursor<'c, 'a>, to: Cursor<'c>) -> Cursor<'a> { + // Refer to the comments within the StepCursor definition. We use the + // fact that a StepCursor<'c, 'a> exists as proof that 'c outlives 'a. + // Cursor is covariant in its lifetime parameter so we can cast a + // Cursor<'c> to one with the shorter lifetime Cursor<'a>. + let _ = proof; + unsafe { mem::transmute::<Cursor<'c>, Cursor<'a>>(to) } +} + +pub(crate) fn new_parse_buffer( + scope: Span, + cursor: Cursor, + unexpected: Rc<Cell<Unexpected>>, +) -> ParseBuffer { + ParseBuffer { + scope, + // See comment on `cell` in the struct definition. + cell: Cell::new(unsafe { mem::transmute::<Cursor, Cursor<'static>>(cursor) }), + marker: PhantomData, + unexpected: Cell::new(Some(unexpected)), + } +} + +pub(crate) enum Unexpected { + None, + Some(Span), + Chain(Rc<Cell<Unexpected>>), +} + +impl Default for Unexpected { + fn default() -> Self { + Unexpected::None + } +} + +impl Clone for Unexpected { + fn clone(&self) -> Self { + match self { + Unexpected::None => Unexpected::None, + Unexpected::Some(span) => Unexpected::Some(*span), + Unexpected::Chain(next) => Unexpected::Chain(next.clone()), + } + } +} + +// We call this on Cell<Unexpected> and Cell<Option<T>> where temporarily +// swapping in a None is cheap. +fn cell_clone<T: Default + Clone>(cell: &Cell<T>) -> T { + let prev = cell.take(); + let ret = prev.clone(); + cell.set(prev); + ret +} + +fn inner_unexpected(buffer: &ParseBuffer) -> (Rc<Cell<Unexpected>>, Option<Span>) { + let mut unexpected = get_unexpected(buffer); + loop { + match cell_clone(&unexpected) { + Unexpected::None => return (unexpected, None), + Unexpected::Some(span) => return (unexpected, Some(span)), + Unexpected::Chain(next) => unexpected = next, + } + } +} + +pub(crate) fn get_unexpected(buffer: &ParseBuffer) -> Rc<Cell<Unexpected>> { + cell_clone(&buffer.unexpected).unwrap() +} + +fn span_of_unexpected_ignoring_nones(mut cursor: Cursor) -> Option<Span> { + if cursor.eof() { + return None; + } + while let Some((inner, _span, rest)) = cursor.group(Delimiter::None) { + if let Some(unexpected) = span_of_unexpected_ignoring_nones(inner) { + return Some(unexpected); + } + cursor = rest; + } + if cursor.eof() { + None + } else { + Some(cursor.span()) + } +} + +impl<'a> ParseBuffer<'a> { + /// Parses a syntax tree node of type `T`, advancing the position of our + /// parse stream past it. + pub fn parse<T: Parse>(&self) -> Result<T> { + T::parse(self) + } + + /// Calls the given parser function to parse a syntax tree node of type `T` + /// from this stream. + /// + /// # Example + /// + /// The parser below invokes [`Attribute::parse_outer`] to parse a vector of + /// zero or more outer attributes. + /// + /// [`Attribute::parse_outer`]: crate::Attribute::parse_outer + /// + /// ``` + /// use syn::{Attribute, Ident, Result, Token}; + /// use syn::parse::{Parse, ParseStream}; + /// + /// // Parses a unit struct with attributes. + /// // + /// // #[path = "s.tmpl"] + /// // struct S; + /// struct UnitStruct { + /// attrs: Vec<Attribute>, + /// struct_token: Token![struct], + /// name: Ident, + /// semi_token: Token![;], + /// } + /// + /// impl Parse for UnitStruct { + /// fn parse(input: ParseStream) -> Result<Self> { + /// Ok(UnitStruct { + /// attrs: input.call(Attribute::parse_outer)?, + /// struct_token: input.parse()?, + /// name: input.parse()?, + /// semi_token: input.parse()?, + /// }) + /// } + /// } + /// ``` + pub fn call<T>(&self, function: fn(ParseStream) -> Result<T>) -> Result<T> { + function(self) + } + + /// Looks at the next token in the parse stream to determine whether it + /// matches the requested type of token. + /// + /// Does not advance the position of the parse stream. + /// + /// # Syntax + /// + /// Note that this method does not use turbofish syntax. Pass the peek type + /// inside of parentheses. + /// + /// - `input.peek(Token![struct])` + /// - `input.peek(Token![==])` + /// - `input.peek(Ident)` *(does not accept keywords)* + /// - `input.peek(Ident::peek_any)` + /// - `input.peek(Lifetime)` + /// - `input.peek(token::Brace)` + /// + /// # Example + /// + /// In this example we finish parsing the list of supertraits when the next + /// token in the input is either `where` or an opening curly brace. + /// + /// ``` + /// use syn::{braced, token, Generics, Ident, Result, Token, TypeParamBound}; + /// use syn::parse::{Parse, ParseStream}; + /// use syn::punctuated::Punctuated; + /// + /// // Parses a trait definition containing no associated items. + /// // + /// // trait Marker<'de, T>: A + B<'de> where Box<T>: Clone {} + /// struct MarkerTrait { + /// trait_token: Token![trait], + /// ident: Ident, + /// generics: Generics, + /// colon_token: Option<Token![:]>, + /// supertraits: Punctuated<TypeParamBound, Token![+]>, + /// brace_token: token::Brace, + /// } + /// + /// impl Parse for MarkerTrait { + /// fn parse(input: ParseStream) -> Result<Self> { + /// let trait_token: Token![trait] = input.parse()?; + /// let ident: Ident = input.parse()?; + /// let mut generics: Generics = input.parse()?; + /// let colon_token: Option<Token![:]> = input.parse()?; + /// + /// let mut supertraits = Punctuated::new(); + /// if colon_token.is_some() { + /// loop { + /// supertraits.push_value(input.parse()?); + /// if input.peek(Token![where]) || input.peek(token::Brace) { + /// break; + /// } + /// supertraits.push_punct(input.parse()?); + /// } + /// } + /// + /// generics.where_clause = input.parse()?; + /// let content; + /// let empty_brace_token = braced!(content in input); + /// + /// Ok(MarkerTrait { + /// trait_token, + /// ident, + /// generics, + /// colon_token, + /// supertraits, + /// brace_token: empty_brace_token, + /// }) + /// } + /// } + /// ``` + pub fn peek<T: Peek>(&self, token: T) -> bool { + let _ = token; + T::Token::peek(self.cursor()) + } + + /// Looks at the second-next token in the parse stream. + /// + /// This is commonly useful as a way to implement contextual keywords. + /// + /// # Example + /// + /// This example needs to use `peek2` because the symbol `union` is not a + /// keyword in Rust. We can't use just `peek` and decide to parse a union if + /// the very next token is `union`, because someone is free to write a `mod + /// union` and a macro invocation that looks like `union::some_macro! { ... + /// }`. In other words `union` is a contextual keyword. + /// + /// ``` + /// use syn::{Ident, ItemUnion, Macro, Result, Token}; + /// use syn::parse::{Parse, ParseStream}; + /// + /// // Parses either a union or a macro invocation. + /// enum UnionOrMacro { + /// // union MaybeUninit<T> { uninit: (), value: T } + /// Union(ItemUnion), + /// // lazy_static! { ... } + /// Macro(Macro), + /// } + /// + /// impl Parse for UnionOrMacro { + /// fn parse(input: ParseStream) -> Result<Self> { + /// if input.peek(Token![union]) && input.peek2(Ident) { + /// input.parse().map(UnionOrMacro::Union) + /// } else { + /// input.parse().map(UnionOrMacro::Macro) + /// } + /// } + /// } + /// ``` + pub fn peek2<T: Peek>(&self, token: T) -> bool { + fn peek2(buffer: &ParseBuffer, peek: fn(Cursor) -> bool) -> bool { + if let Some(group) = buffer.cursor().group(Delimiter::None) { + if group.0.skip().map_or(false, peek) { + return true; + } + } + buffer.cursor().skip().map_or(false, peek) + } + + let _ = token; + peek2(self, T::Token::peek) + } + + /// Looks at the third-next token in the parse stream. + pub fn peek3<T: Peek>(&self, token: T) -> bool { + fn peek3(buffer: &ParseBuffer, peek: fn(Cursor) -> bool) -> bool { + if let Some(group) = buffer.cursor().group(Delimiter::None) { + if group.0.skip().and_then(Cursor::skip).map_or(false, peek) { + return true; + } + } + buffer + .cursor() + .skip() + .and_then(Cursor::skip) + .map_or(false, peek) + } + + let _ = token; + peek3(self, T::Token::peek) + } + + /// Parses zero or more occurrences of `T` separated by punctuation of type + /// `P`, with optional trailing punctuation. + /// + /// Parsing continues until the end of this parse stream. The entire content + /// of this parse stream must consist of `T` and `P`. + /// + /// # Example + /// + /// ``` + /// # use quote::quote; + /// # + /// use syn::{parenthesized, token, Ident, Result, Token, Type}; + /// use syn::parse::{Parse, ParseStream}; + /// use syn::punctuated::Punctuated; + /// + /// // Parse a simplified tuple struct syntax like: + /// // + /// // struct S(A, B); + /// struct TupleStruct { + /// struct_token: Token![struct], + /// ident: Ident, + /// paren_token: token::Paren, + /// fields: Punctuated<Type, Token![,]>, + /// semi_token: Token![;], + /// } + /// + /// impl Parse for TupleStruct { + /// fn parse(input: ParseStream) -> Result<Self> { + /// let content; + /// Ok(TupleStruct { + /// struct_token: input.parse()?, + /// ident: input.parse()?, + /// paren_token: parenthesized!(content in input), + /// fields: content.parse_terminated(Type::parse)?, + /// semi_token: input.parse()?, + /// }) + /// } + /// } + /// # + /// # let input = quote! { + /// # struct S(A, B); + /// # }; + /// # syn::parse2::<TupleStruct>(input).unwrap(); + /// ``` + pub fn parse_terminated<T, P: Parse>( + &self, + parser: fn(ParseStream) -> Result<T>, + ) -> Result<Punctuated<T, P>> { + Punctuated::parse_terminated_with(self, parser) + } + + /// Returns whether there are tokens remaining in this stream. + /// + /// This method returns true at the end of the content of a set of + /// delimiters, as well as at the very end of the complete macro input. + /// + /// # Example + /// + /// ``` + /// use syn::{braced, token, Ident, Item, Result, Token}; + /// use syn::parse::{Parse, ParseStream}; + /// + /// // Parses a Rust `mod m { ... }` containing zero or more items. + /// struct Mod { + /// mod_token: Token![mod], + /// name: Ident, + /// brace_token: token::Brace, + /// items: Vec<Item>, + /// } + /// + /// impl Parse for Mod { + /// fn parse(input: ParseStream) -> Result<Self> { + /// let content; + /// Ok(Mod { + /// mod_token: input.parse()?, + /// name: input.parse()?, + /// brace_token: braced!(content in input), + /// items: { + /// let mut items = Vec::new(); + /// while !content.is_empty() { + /// items.push(content.parse()?); + /// } + /// items + /// }, + /// }) + /// } + /// } + /// ``` + pub fn is_empty(&self) -> bool { + self.cursor().eof() + } + + /// Constructs a helper for peeking at the next token in this stream and + /// building an error message if it is not one of a set of expected tokens. + /// + /// # Example + /// + /// ``` + /// use syn::{ConstParam, Ident, Lifetime, LifetimeDef, Result, Token, TypeParam}; + /// use syn::parse::{Parse, ParseStream}; + /// + /// // A generic parameter, a single one of the comma-separated elements inside + /// // angle brackets in: + /// // + /// // fn f<T: Clone, 'a, 'b: 'a, const N: usize>() { ... } + /// // + /// // On invalid input, lookahead gives us a reasonable error message. + /// // + /// // error: expected one of: identifier, lifetime, `const` + /// // | + /// // 5 | fn f<!Sized>() {} + /// // | ^ + /// enum GenericParam { + /// Type(TypeParam), + /// Lifetime(LifetimeDef), + /// Const(ConstParam), + /// } + /// + /// impl Parse for GenericParam { + /// fn parse(input: ParseStream) -> Result<Self> { + /// let lookahead = input.lookahead1(); + /// if lookahead.peek(Ident) { + /// input.parse().map(GenericParam::Type) + /// } else if lookahead.peek(Lifetime) { + /// input.parse().map(GenericParam::Lifetime) + /// } else if lookahead.peek(Token![const]) { + /// input.parse().map(GenericParam::Const) + /// } else { + /// Err(lookahead.error()) + /// } + /// } + /// } + /// ``` + pub fn lookahead1(&self) -> Lookahead1<'a> { + lookahead::new(self.scope, self.cursor()) + } + + /// Forks a parse stream so that parsing tokens out of either the original + /// or the fork does not advance the position of the other. + /// + /// # Performance + /// + /// Forking a parse stream is a cheap fixed amount of work and does not + /// involve copying token buffers. Where you might hit performance problems + /// is if your macro ends up parsing a large amount of content more than + /// once. + /// + /// ``` + /// # use syn::{Expr, Result}; + /// # use syn::parse::ParseStream; + /// # + /// # fn bad(input: ParseStream) -> Result<Expr> { + /// // Do not do this. + /// if input.fork().parse::<Expr>().is_ok() { + /// return input.parse::<Expr>(); + /// } + /// # unimplemented!() + /// # } + /// ``` + /// + /// As a rule, avoid parsing an unbounded amount of tokens out of a forked + /// parse stream. Only use a fork when the amount of work performed against + /// the fork is small and bounded. + /// + /// When complex speculative parsing against the forked stream is + /// unavoidable, use [`parse::discouraged::Speculative`] to advance the + /// original stream once the fork's parse is determined to have been + /// successful. + /// + /// For a lower level way to perform speculative parsing at the token level, + /// consider using [`ParseStream::step`] instead. + /// + /// [`parse::discouraged::Speculative`]: discouraged::Speculative + /// [`ParseStream::step`]: ParseBuffer::step + /// + /// # Example + /// + /// The parse implementation shown here parses possibly restricted `pub` + /// visibilities. + /// + /// - `pub` + /// - `pub(crate)` + /// - `pub(self)` + /// - `pub(super)` + /// - `pub(in some::path)` + /// + /// To handle the case of visibilities inside of tuple structs, the parser + /// needs to distinguish parentheses that specify visibility restrictions + /// from parentheses that form part of a tuple type. + /// + /// ``` + /// # struct A; + /// # struct B; + /// # struct C; + /// # + /// struct S(pub(crate) A, pub (B, C)); + /// ``` + /// + /// In this example input the first tuple struct element of `S` has + /// `pub(crate)` visibility while the second tuple struct element has `pub` + /// visibility; the parentheses around `(B, C)` are part of the type rather + /// than part of a visibility restriction. + /// + /// The parser uses a forked parse stream to check the first token inside of + /// parentheses after the `pub` keyword. This is a small bounded amount of + /// work performed against the forked parse stream. + /// + /// ``` + /// use syn::{parenthesized, token, Ident, Path, Result, Token}; + /// use syn::ext::IdentExt; + /// use syn::parse::{Parse, ParseStream}; + /// + /// struct PubVisibility { + /// pub_token: Token![pub], + /// restricted: Option<Restricted>, + /// } + /// + /// struct Restricted { + /// paren_token: token::Paren, + /// in_token: Option<Token![in]>, + /// path: Path, + /// } + /// + /// impl Parse for PubVisibility { + /// fn parse(input: ParseStream) -> Result<Self> { + /// let pub_token: Token![pub] = input.parse()?; + /// + /// if input.peek(token::Paren) { + /// let ahead = input.fork(); + /// let mut content; + /// parenthesized!(content in ahead); + /// + /// if content.peek(Token![crate]) + /// || content.peek(Token![self]) + /// || content.peek(Token![super]) + /// { + /// return Ok(PubVisibility { + /// pub_token, + /// restricted: Some(Restricted { + /// paren_token: parenthesized!(content in input), + /// in_token: None, + /// path: Path::from(content.call(Ident::parse_any)?), + /// }), + /// }); + /// } else if content.peek(Token![in]) { + /// return Ok(PubVisibility { + /// pub_token, + /// restricted: Some(Restricted { + /// paren_token: parenthesized!(content in input), + /// in_token: Some(content.parse()?), + /// path: content.call(Path::parse_mod_style)?, + /// }), + /// }); + /// } + /// } + /// + /// Ok(PubVisibility { + /// pub_token, + /// restricted: None, + /// }) + /// } + /// } + /// ``` + pub fn fork(&self) -> Self { + ParseBuffer { + scope: self.scope, + cell: self.cell.clone(), + marker: PhantomData, + // Not the parent's unexpected. Nothing cares whether the clone + // parses all the way unless we `advance_to`. + unexpected: Cell::new(Some(Rc::new(Cell::new(Unexpected::None)))), + } + } + + /// Triggers an error at the current position of the parse stream. + /// + /// # Example + /// + /// ``` + /// use syn::{Expr, Result, Token}; + /// use syn::parse::{Parse, ParseStream}; + /// + /// // Some kind of loop: `while` or `for` or `loop`. + /// struct Loop { + /// expr: Expr, + /// } + /// + /// impl Parse for Loop { + /// fn parse(input: ParseStream) -> Result<Self> { + /// if input.peek(Token![while]) + /// || input.peek(Token![for]) + /// || input.peek(Token![loop]) + /// { + /// Ok(Loop { + /// expr: input.parse()?, + /// }) + /// } else { + /// Err(input.error("expected some kind of loop")) + /// } + /// } + /// } + /// ``` + pub fn error<T: Display>(&self, message: T) -> Error { + error::new_at(self.scope, self.cursor(), message) + } + + /// Speculatively parses tokens from this parse stream, advancing the + /// position of this stream only if parsing succeeds. + /// + /// This is a powerful low-level API used for defining the `Parse` impls of + /// the basic built-in token types. It is not something that will be used + /// widely outside of the Syn codebase. + /// + /// # Example + /// + /// ``` + /// use proc_macro2::TokenTree; + /// use syn::Result; + /// use syn::parse::ParseStream; + /// + /// // This function advances the stream past the next occurrence of `@`. If + /// // no `@` is present in the stream, the stream position is unchanged and + /// // an error is returned. + /// fn skip_past_next_at(input: ParseStream) -> Result<()> { + /// input.step(|cursor| { + /// let mut rest = *cursor; + /// while let Some((tt, next)) = rest.token_tree() { + /// match &tt { + /// TokenTree::Punct(punct) if punct.as_char() == '@' => { + /// return Ok(((), next)); + /// } + /// _ => rest = next, + /// } + /// } + /// Err(cursor.error("no `@` was found after this point")) + /// }) + /// } + /// # + /// # fn remainder_after_skipping_past_next_at( + /// # input: ParseStream, + /// # ) -> Result<proc_macro2::TokenStream> { + /// # skip_past_next_at(input)?; + /// # input.parse() + /// # } + /// # + /// # use syn::parse::Parser; + /// # let remainder = remainder_after_skipping_past_next_at + /// # .parse_str("a @ b c") + /// # .unwrap(); + /// # assert_eq!(remainder.to_string(), "b c"); + /// ``` + pub fn step<F, R>(&self, function: F) -> Result<R> + where + F: for<'c> FnOnce(StepCursor<'c, 'a>) -> Result<(R, Cursor<'c>)>, + { + // Since the user's function is required to work for any 'c, we know + // that the Cursor<'c> they return is either derived from the input + // StepCursor<'c, 'a> or from a Cursor<'static>. + // + // It would not be legal to write this function without the invariant + // lifetime 'c in StepCursor<'c, 'a>. If this function were written only + // in terms of 'a, the user could take our ParseBuffer<'a>, upcast it to + // a ParseBuffer<'short> which some shorter lifetime than 'a, invoke + // `step` on their ParseBuffer<'short> with a closure that returns + // Cursor<'short>, and we would wrongly write that Cursor<'short> into + // the Cell intended to hold Cursor<'a>. + // + // In some cases it may be necessary for R to contain a Cursor<'a>. + // Within Syn we solve this using `advance_step_cursor` which uses the + // existence of a StepCursor<'c, 'a> as proof that it is safe to cast + // from Cursor<'c> to Cursor<'a>. If needed outside of Syn, it would be + // safe to expose that API as a method on StepCursor. + let (node, rest) = function(StepCursor { + scope: self.scope, + cursor: self.cell.get(), + marker: PhantomData, + })?; + self.cell.set(rest); + Ok(node) + } + + /// Returns the `Span` of the next token in the parse stream, or + /// `Span::call_site()` if this parse stream has completely exhausted its + /// input `TokenStream`. + pub fn span(&self) -> Span { + let cursor = self.cursor(); + if cursor.eof() { + self.scope + } else { + crate::buffer::open_span_of_group(cursor) + } + } + + /// Provides low-level access to the token representation underlying this + /// parse stream. + /// + /// Cursors are immutable so no operations you perform against the cursor + /// will affect the state of this parse stream. + pub fn cursor(&self) -> Cursor<'a> { + self.cell.get() + } + + fn check_unexpected(&self) -> Result<()> { + match inner_unexpected(self).1 { + Some(span) => Err(Error::new(span, "unexpected token")), + None => Ok(()), + } + } +} + +#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] +impl<T: Parse> Parse for Box<T> { + fn parse(input: ParseStream) -> Result<Self> { + input.parse().map(Box::new) + } +} + +#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] +impl<T: Parse + Token> Parse for Option<T> { + fn parse(input: ParseStream) -> Result<Self> { + if T::peek(input.cursor()) { + Ok(Some(input.parse()?)) + } else { + Ok(None) + } + } +} + +#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] +impl Parse for TokenStream { + fn parse(input: ParseStream) -> Result<Self> { + input.step(|cursor| Ok((cursor.token_stream(), Cursor::empty()))) + } +} + +#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] +impl Parse for TokenTree { + fn parse(input: ParseStream) -> Result<Self> { + input.step(|cursor| match cursor.token_tree() { + Some((tt, rest)) => Ok((tt, rest)), + None => Err(cursor.error("expected token tree")), + }) + } +} + +#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] +impl Parse for Group { + fn parse(input: ParseStream) -> Result<Self> { + input.step(|cursor| { + for delim in &[Delimiter::Parenthesis, Delimiter::Brace, Delimiter::Bracket] { + if let Some((inside, span, rest)) = cursor.group(*delim) { + let mut group = Group::new(*delim, inside.token_stream()); + group.set_span(span); + return Ok((group, rest)); + } + } + Err(cursor.error("expected group token")) + }) + } +} + +#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] +impl Parse for Punct { + fn parse(input: ParseStream) -> Result<Self> { + input.step(|cursor| match cursor.punct() { + Some((punct, rest)) => Ok((punct, rest)), + None => Err(cursor.error("expected punctuation token")), + }) + } +} + +#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] +impl Parse for Literal { + fn parse(input: ParseStream) -> Result<Self> { + input.step(|cursor| match cursor.literal() { + Some((literal, rest)) => Ok((literal, rest)), + None => Err(cursor.error("expected literal token")), + }) + } +} + +/// Parser that can parse Rust tokens into a particular syntax tree node. +/// +/// Refer to the [module documentation] for details about parsing in Syn. +/// +/// [module documentation]: self +/// +/// *This trait is available only if Syn is built with the `"parsing"` feature.* +pub trait Parser: Sized { + type Output; + + /// Parse a proc-macro2 token stream into the chosen syntax tree node. + /// + /// This function will check that the input is fully parsed. If there are + /// any unparsed tokens at the end of the stream, an error is returned. + fn parse2(self, tokens: TokenStream) -> Result<Self::Output>; + + /// Parse tokens of source code into the chosen syntax tree node. + /// + /// This function will check that the input is fully parsed. If there are + /// any unparsed tokens at the end of the stream, an error is returned. + /// + /// *This method is available only if Syn is built with both the `"parsing"` and + /// `"proc-macro"` features.* + #[cfg(all( + not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))), + feature = "proc-macro" + ))] + fn parse(self, tokens: proc_macro::TokenStream) -> Result<Self::Output> { + self.parse2(proc_macro2::TokenStream::from(tokens)) + } + + /// Parse a string of Rust code into the chosen syntax tree node. + /// + /// This function will check that the input is fully parsed. If there are + /// any unparsed tokens at the end of the string, an error is returned. + /// + /// # Hygiene + /// + /// Every span in the resulting syntax tree will be set to resolve at the + /// macro call site. + fn parse_str(self, s: &str) -> Result<Self::Output> { + self.parse2(proc_macro2::TokenStream::from_str(s)?) + } + + // Not public API. + #[doc(hidden)] + #[cfg(any(feature = "full", feature = "derive"))] + fn __parse_scoped(self, scope: Span, tokens: TokenStream) -> Result<Self::Output> { + let _ = scope; + self.parse2(tokens) + } + + // Not public API. + #[doc(hidden)] + #[cfg(any(feature = "full", feature = "derive"))] + fn __parse_stream(self, input: ParseStream) -> Result<Self::Output> { + input.parse().and_then(|tokens| self.parse2(tokens)) + } +} + +fn tokens_to_parse_buffer(tokens: &TokenBuffer) -> ParseBuffer { + let scope = Span::call_site(); + let cursor = tokens.begin(); + let unexpected = Rc::new(Cell::new(Unexpected::None)); + new_parse_buffer(scope, cursor, unexpected) +} + +impl<F, T> Parser for F +where + F: FnOnce(ParseStream) -> Result<T>, +{ + type Output = T; + + fn parse2(self, tokens: TokenStream) -> Result<T> { + let buf = TokenBuffer::new2(tokens); + let state = tokens_to_parse_buffer(&buf); + let node = self(&state)?; + state.check_unexpected()?; + if let Some(unexpected_span) = span_of_unexpected_ignoring_nones(state.cursor()) { + Err(Error::new(unexpected_span, "unexpected token")) + } else { + Ok(node) + } + } + + #[cfg(any(feature = "full", feature = "derive"))] + fn __parse_scoped(self, scope: Span, tokens: TokenStream) -> Result<Self::Output> { + let buf = TokenBuffer::new2(tokens); + let cursor = buf.begin(); + let unexpected = Rc::new(Cell::new(Unexpected::None)); + let state = new_parse_buffer(scope, cursor, unexpected); + let node = self(&state)?; + state.check_unexpected()?; + if let Some(unexpected_span) = span_of_unexpected_ignoring_nones(state.cursor()) { + Err(Error::new(unexpected_span, "unexpected token")) + } else { + Ok(node) + } + } + + #[cfg(any(feature = "full", feature = "derive"))] + fn __parse_stream(self, input: ParseStream) -> Result<Self::Output> { + self(input) + } +} + +#[cfg(any(feature = "full", feature = "derive"))] +pub(crate) fn parse_scoped<F: Parser>(f: F, scope: Span, tokens: TokenStream) -> Result<F::Output> { + f.__parse_scoped(scope, tokens) +} + +#[cfg(any(feature = "full", feature = "derive"))] +pub(crate) fn parse_stream<F: Parser>(f: F, input: ParseStream) -> Result<F::Output> { + f.__parse_stream(input) +} + +/// An empty syntax tree node that consumes no tokens when parsed. +/// +/// This is useful for attribute macros that want to ensure they are not +/// provided any attribute args. +/// +/// ``` +/// # extern crate proc_macro; +/// # +/// use proc_macro::TokenStream; +/// use syn::parse_macro_input; +/// use syn::parse::Nothing; +/// +/// # const IGNORE: &str = stringify! { +/// #[proc_macro_attribute] +/// # }; +/// pub fn my_attr(args: TokenStream, input: TokenStream) -> TokenStream { +/// parse_macro_input!(args as Nothing); +/// +/// /* ... */ +/// # "".parse().unwrap() +/// } +/// ``` +/// +/// ```text +/// error: unexpected token +/// --> src/main.rs:3:19 +/// | +/// 3 | #[my_attr(asdf)] +/// | ^^^^ +/// ``` +pub struct Nothing; + +impl Parse for Nothing { + fn parse(_input: ParseStream) -> Result<Self> { + Ok(Nothing) + } +} + +#[cfg(feature = "extra-traits")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for Nothing { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.write_str("Nothing") + } +} + +#[cfg(feature = "extra-traits")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Eq for Nothing {} + +#[cfg(feature = "extra-traits")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl PartialEq for Nothing { + fn eq(&self, _other: &Self) -> bool { + true + } +} + +#[cfg(feature = "extra-traits")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Hash for Nothing { + fn hash<H: Hasher>(&self, _state: &mut H) {} +} diff --git a/third_party/rust/syn/src/parse_macro_input.rs b/third_party/rust/syn/src/parse_macro_input.rs new file mode 100644 index 0000000000..6163cd70af --- /dev/null +++ b/third_party/rust/syn/src/parse_macro_input.rs @@ -0,0 +1,179 @@ +/// Parse the input TokenStream of a macro, triggering a compile error if the +/// tokens fail to parse. +/// +/// Refer to the [`parse` module] documentation for more details about parsing +/// in Syn. +/// +/// [`parse` module]: mod@crate::parse +/// +/// <br> +/// +/// # Intended usage +/// +/// This macro must be called from a function that returns +/// `proc_macro::TokenStream`. Usually this will be your proc macro entry point, +/// the function that has the #\[proc_macro\] / #\[proc_macro_derive\] / +/// #\[proc_macro_attribute\] attribute. +/// +/// ``` +/// # extern crate proc_macro; +/// # +/// use proc_macro::TokenStream; +/// use syn::{parse_macro_input, Result}; +/// use syn::parse::{Parse, ParseStream}; +/// +/// struct MyMacroInput { +/// /* ... */ +/// } +/// +/// impl Parse for MyMacroInput { +/// fn parse(input: ParseStream) -> Result<Self> { +/// /* ... */ +/// # Ok(MyMacroInput {}) +/// } +/// } +/// +/// # const IGNORE: &str = stringify! { +/// #[proc_macro] +/// # }; +/// pub fn my_macro(tokens: TokenStream) -> TokenStream { +/// let input = parse_macro_input!(tokens as MyMacroInput); +/// +/// /* ... */ +/// # "".parse().unwrap() +/// } +/// ``` +/// +/// <br> +/// +/// # Usage with Parser +/// +/// This macro can also be used with the [`Parser` trait] for types that have +/// multiple ways that they can be parsed. +/// +/// [`Parser` trait]: crate::parse::Parser +/// +/// ``` +/// # extern crate proc_macro; +/// # +/// # use proc_macro::TokenStream; +/// # use syn::{parse_macro_input, Result}; +/// # use syn::parse::ParseStream; +/// # +/// # struct MyMacroInput {} +/// # +/// impl MyMacroInput { +/// fn parse_alternate(input: ParseStream) -> Result<Self> { +/// /* ... */ +/// # Ok(MyMacroInput {}) +/// } +/// } +/// +/// # const IGNORE: &str = stringify! { +/// #[proc_macro] +/// # }; +/// pub fn my_macro(tokens: TokenStream) -> TokenStream { +/// let input = parse_macro_input!(tokens with MyMacroInput::parse_alternate); +/// +/// /* ... */ +/// # "".parse().unwrap() +/// } +/// ``` +/// +/// <br> +/// +/// # Expansion +/// +/// `parse_macro_input!($variable as $Type)` expands to something like: +/// +/// ```no_run +/// # extern crate proc_macro; +/// # +/// # macro_rules! doc_test { +/// # ($variable:ident as $Type:ty) => { +/// match syn::parse::<$Type>($variable) { +/// Ok(syntax_tree) => syntax_tree, +/// Err(err) => return proc_macro::TokenStream::from(err.to_compile_error()), +/// } +/// # }; +/// # } +/// # +/// # fn test(input: proc_macro::TokenStream) -> proc_macro::TokenStream { +/// # let _ = doc_test!(input as syn::Ident); +/// # proc_macro::TokenStream::new() +/// # } +/// ``` +#[macro_export] +#[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "proc-macro"))))] +macro_rules! parse_macro_input { + ($tokenstream:ident as $ty:ty) => { + match $crate::parse_macro_input::parse::<$ty>($tokenstream) { + $crate::__private::Ok(data) => data, + $crate::__private::Err(err) => { + return $crate::__private::TokenStream::from(err.to_compile_error()); + } + } + }; + ($tokenstream:ident with $parser:path) => { + match $crate::parse::Parser::parse($parser, $tokenstream) { + $crate::__private::Ok(data) => data, + $crate::__private::Err(err) => { + return $crate::__private::TokenStream::from(err.to_compile_error()); + } + } + }; + ($tokenstream:ident) => { + $crate::parse_macro_input!($tokenstream as _) + }; +} + +//////////////////////////////////////////////////////////////////////////////// +// Can parse any type that implements Parse. + +use crate::parse::{Parse, ParseStream, Parser, Result}; +use proc_macro::TokenStream; + +// Not public API. +#[doc(hidden)] +pub fn parse<T: ParseMacroInput>(token_stream: TokenStream) -> Result<T> { + T::parse.parse(token_stream) +} + +// Not public API. +#[doc(hidden)] +pub trait ParseMacroInput: Sized { + fn parse(input: ParseStream) -> Result<Self>; +} + +impl<T: Parse> ParseMacroInput for T { + fn parse(input: ParseStream) -> Result<Self> { + <T as Parse>::parse(input) + } +} + +//////////////////////////////////////////////////////////////////////////////// +// Any other types that we want `parse_macro_input!` to be able to parse. + +#[cfg(any(feature = "full", feature = "derive"))] +use crate::AttributeArgs; + +#[cfg(any(feature = "full", feature = "derive"))] +impl ParseMacroInput for AttributeArgs { + fn parse(input: ParseStream) -> Result<Self> { + let mut metas = Vec::new(); + + loop { + if input.is_empty() { + break; + } + let value = input.parse()?; + metas.push(value); + if input.is_empty() { + break; + } + input.parse::<Token![,]>()?; + } + + Ok(metas) + } +} diff --git a/third_party/rust/syn/src/parse_quote.rs b/third_party/rust/syn/src/parse_quote.rs new file mode 100644 index 0000000000..8deae4e526 --- /dev/null +++ b/third_party/rust/syn/src/parse_quote.rs @@ -0,0 +1,167 @@ +/// Quasi-quotation macro that accepts input like the [`quote!`] macro but uses +/// type inference to figure out a return type for those tokens. +/// +/// [`quote!`]: https://docs.rs/quote/1.0/quote/index.html +/// +/// The return type can be any syntax tree node that implements the [`Parse`] +/// trait. +/// +/// [`Parse`]: crate::parse::Parse +/// +/// ``` +/// use quote::quote; +/// use syn::{parse_quote, Stmt}; +/// +/// fn main() { +/// let name = quote!(v); +/// let ty = quote!(u8); +/// +/// let stmt: Stmt = parse_quote! { +/// let #name: #ty = Default::default(); +/// }; +/// +/// println!("{:#?}", stmt); +/// } +/// ``` +/// +/// *This macro is available only if Syn is built with the `"parsing"` feature, +/// although interpolation of syntax tree nodes into the quoted tokens is only +/// supported if Syn is built with the `"printing"` feature as well.* +/// +/// # Example +/// +/// The following helper function adds a bound `T: HeapSize` to every type +/// parameter `T` in the input generics. +/// +/// ``` +/// use syn::{parse_quote, Generics, GenericParam}; +/// +/// // Add a bound `T: HeapSize` to every type parameter T. +/// fn add_trait_bounds(mut generics: Generics) -> Generics { +/// for param in &mut generics.params { +/// if let GenericParam::Type(type_param) = param { +/// type_param.bounds.push(parse_quote!(HeapSize)); +/// } +/// } +/// generics +/// } +/// ``` +/// +/// # Special cases +/// +/// This macro can parse the following additional types as a special case even +/// though they do not implement the `Parse` trait. +/// +/// - [`Attribute`] — parses one attribute, allowing either outer like `#[...]` +/// or inner like `#![...]` +/// - [`Punctuated<T, P>`] — parses zero or more `T` separated by punctuation +/// `P` with optional trailing punctuation +/// - [`Vec<Stmt>`] — parses the same as `Block::parse_within` +/// +/// [`Vec<Stmt>`]: Block::parse_within +/// +/// # Panics +/// +/// Panics if the tokens fail to parse as the expected syntax tree type. The +/// caller is responsible for ensuring that the input tokens are syntactically +/// valid. +#[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "printing"))))] +#[macro_export] +macro_rules! parse_quote { + ($($tt:tt)*) => { + $crate::parse_quote::parse($crate::__private::quote::quote!($($tt)*)) + }; +} + +/// This macro is [`parse_quote!`] + [`quote_spanned!`][quote::quote_spanned]. +/// +/// Please refer to each of their documentation. +/// +/// # Example +/// +/// ``` +/// use quote::{quote, quote_spanned}; +/// use syn::spanned::Spanned; +/// use syn::{parse_quote_spanned, ReturnType, Signature}; +/// +/// // Changes `fn()` to `fn() -> Pin<Box<dyn Future<Output = ()>>>`, +/// // and `fn() -> T` to `fn() -> Pin<Box<dyn Future<Output = T>>>`, +/// // without introducing any call_site() spans. +/// fn make_ret_pinned_future(sig: &mut Signature) { +/// let ret = match &sig.output { +/// ReturnType::Default => quote_spanned!(sig.paren_token.span=> ()), +/// ReturnType::Type(_, ret) => quote!(#ret), +/// }; +/// sig.output = parse_quote_spanned! {ret.span()=> +/// -> ::std::pin::Pin<::std::boxed::Box<dyn ::std::future::Future<Output = #ret>>> +/// }; +/// } +/// ``` +#[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "printing"))))] +#[macro_export] +macro_rules! parse_quote_spanned { + ($span:expr=> $($tt:tt)*) => { + $crate::parse_quote::parse($crate::__private::quote::quote_spanned!($span=> $($tt)*)) + }; +} + +//////////////////////////////////////////////////////////////////////////////// +// Can parse any type that implements Parse. + +use crate::parse::{Parse, ParseStream, Parser, Result}; +use proc_macro2::TokenStream; + +// Not public API. +#[doc(hidden)] +pub fn parse<T: ParseQuote>(token_stream: TokenStream) -> T { + let parser = T::parse; + match parser.parse2(token_stream) { + Ok(t) => t, + Err(err) => panic!("{}", err), + } +} + +// Not public API. +#[doc(hidden)] +pub trait ParseQuote: Sized { + fn parse(input: ParseStream) -> Result<Self>; +} + +impl<T: Parse> ParseQuote for T { + fn parse(input: ParseStream) -> Result<Self> { + <T as Parse>::parse(input) + } +} + +//////////////////////////////////////////////////////////////////////////////// +// Any other types that we want `parse_quote!` to be able to parse. + +use crate::punctuated::Punctuated; +#[cfg(any(feature = "full", feature = "derive"))] +use crate::{attr, Attribute}; +#[cfg(feature = "full")] +use crate::{Block, Stmt}; + +#[cfg(any(feature = "full", feature = "derive"))] +impl ParseQuote for Attribute { + fn parse(input: ParseStream) -> Result<Self> { + if input.peek(Token![#]) && input.peek2(Token![!]) { + attr::parsing::single_parse_inner(input) + } else { + attr::parsing::single_parse_outer(input) + } + } +} + +impl<T: Parse, P: Parse> ParseQuote for Punctuated<T, P> { + fn parse(input: ParseStream) -> Result<Self> { + Self::parse_terminated(input) + } +} + +#[cfg(feature = "full")] +impl ParseQuote for Vec<Stmt> { + fn parse(input: ParseStream) -> Result<Self> { + Block::parse_within(input) + } +} diff --git a/third_party/rust/syn/src/pat.rs b/third_party/rust/syn/src/pat.rs new file mode 100644 index 0000000000..b279186aa0 --- /dev/null +++ b/third_party/rust/syn/src/pat.rs @@ -0,0 +1,927 @@ +use super::*; +use crate::punctuated::Punctuated; +use proc_macro2::TokenStream; + +ast_enum_of_structs! { + /// A pattern in a local binding, function signature, match expression, or + /// various other places. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + /// + /// # Syntax tree enum + /// + /// This type is a [syntax tree enum]. + /// + /// [syntax tree enum]: Expr#syntax-tree-enums + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + #[cfg_attr(not(syn_no_non_exhaustive), non_exhaustive)] + pub enum Pat { + /// A box pattern: `box v`. + Box(PatBox), + + /// A pattern that binds a new variable: `ref mut binding @ SUBPATTERN`. + Ident(PatIdent), + + /// A literal pattern: `0`. + /// + /// This holds an `Expr` rather than a `Lit` because negative numbers + /// are represented as an `Expr::Unary`. + Lit(PatLit), + + /// A macro in pattern position. + Macro(PatMacro), + + /// A pattern that matches any one of a set of cases. + Or(PatOr), + + /// A path pattern like `Color::Red`, optionally qualified with a + /// self-type. + /// + /// Unqualified path patterns can legally refer to variants, structs, + /// constants or associated constants. Qualified path patterns like + /// `<A>::B::C` and `<A as Trait>::B::C` can only legally refer to + /// associated constants. + Path(PatPath), + + /// A range pattern: `1..=2`. + Range(PatRange), + + /// A reference pattern: `&mut var`. + Reference(PatReference), + + /// The dots in a tuple or slice pattern: `[0, 1, ..]` + Rest(PatRest), + + /// A dynamically sized slice pattern: `[a, b, ref i @ .., y, z]`. + Slice(PatSlice), + + /// A struct or struct variant pattern: `Variant { x, y, .. }`. + Struct(PatStruct), + + /// A tuple pattern: `(a, b)`. + Tuple(PatTuple), + + /// A tuple struct or tuple variant pattern: `Variant(x, y, .., z)`. + TupleStruct(PatTupleStruct), + + /// A type ascription pattern: `foo: f64`. + Type(PatType), + + /// Tokens in pattern position not interpreted by Syn. + Verbatim(TokenStream), + + /// A pattern that matches any value: `_`. + Wild(PatWild), + + // Not public API. + // + // For testing exhaustiveness in downstream code, use the following idiom: + // + // match pat { + // Pat::Box(pat) => {...} + // Pat::Ident(pat) => {...} + // ... + // Pat::Wild(pat) => {...} + // + // #[cfg_attr(test, deny(non_exhaustive_omitted_patterns))] + // _ => { /* some sane fallback */ } + // } + // + // This way we fail your tests but don't break your library when adding + // a variant. You will be notified by a test failure when a variant is + // added, so that you can add code to handle it, but your library will + // continue to compile and work for downstream users in the interim. + #[cfg(syn_no_non_exhaustive)] + #[doc(hidden)] + __NonExhaustive, + } +} + +ast_struct! { + /// A box pattern: `box v`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct PatBox { + pub attrs: Vec<Attribute>, + pub box_token: Token![box], + pub pat: Box<Pat>, + } +} + +ast_struct! { + /// A pattern that binds a new variable: `ref mut binding @ SUBPATTERN`. + /// + /// It may also be a unit struct or struct variant (e.g. `None`), or a + /// constant; these cannot be distinguished syntactically. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct PatIdent { + pub attrs: Vec<Attribute>, + pub by_ref: Option<Token![ref]>, + pub mutability: Option<Token![mut]>, + pub ident: Ident, + pub subpat: Option<(Token![@], Box<Pat>)>, + } +} + +ast_struct! { + /// A literal pattern: `0`. + /// + /// This holds an `Expr` rather than a `Lit` because negative numbers + /// are represented as an `Expr::Unary`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct PatLit { + pub attrs: Vec<Attribute>, + pub expr: Box<Expr>, + } +} + +ast_struct! { + /// A macro in pattern position. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct PatMacro { + pub attrs: Vec<Attribute>, + pub mac: Macro, + } +} + +ast_struct! { + /// A pattern that matches any one of a set of cases. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct PatOr { + pub attrs: Vec<Attribute>, + pub leading_vert: Option<Token![|]>, + pub cases: Punctuated<Pat, Token![|]>, + } +} + +ast_struct! { + /// A path pattern like `Color::Red`, optionally qualified with a + /// self-type. + /// + /// Unqualified path patterns can legally refer to variants, structs, + /// constants or associated constants. Qualified path patterns like + /// `<A>::B::C` and `<A as Trait>::B::C` can only legally refer to + /// associated constants. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct PatPath { + pub attrs: Vec<Attribute>, + pub qself: Option<QSelf>, + pub path: Path, + } +} + +ast_struct! { + /// A range pattern: `1..=2`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct PatRange { + pub attrs: Vec<Attribute>, + pub lo: Box<Expr>, + pub limits: RangeLimits, + pub hi: Box<Expr>, + } +} + +ast_struct! { + /// A reference pattern: `&mut var`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct PatReference { + pub attrs: Vec<Attribute>, + pub and_token: Token![&], + pub mutability: Option<Token![mut]>, + pub pat: Box<Pat>, + } +} + +ast_struct! { + /// The dots in a tuple or slice pattern: `[0, 1, ..]` + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct PatRest { + pub attrs: Vec<Attribute>, + pub dot2_token: Token![..], + } +} + +ast_struct! { + /// A dynamically sized slice pattern: `[a, b, ref i @ .., y, z]`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct PatSlice { + pub attrs: Vec<Attribute>, + pub bracket_token: token::Bracket, + pub elems: Punctuated<Pat, Token![,]>, + } +} + +ast_struct! { + /// A struct or struct variant pattern: `Variant { x, y, .. }`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct PatStruct { + pub attrs: Vec<Attribute>, + pub path: Path, + pub brace_token: token::Brace, + pub fields: Punctuated<FieldPat, Token![,]>, + pub dot2_token: Option<Token![..]>, + } +} + +ast_struct! { + /// A tuple pattern: `(a, b)`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct PatTuple { + pub attrs: Vec<Attribute>, + pub paren_token: token::Paren, + pub elems: Punctuated<Pat, Token![,]>, + } +} + +ast_struct! { + /// A tuple struct or tuple variant pattern: `Variant(x, y, .., z)`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct PatTupleStruct { + pub attrs: Vec<Attribute>, + pub path: Path, + pub pat: PatTuple, + } +} + +ast_struct! { + /// A type ascription pattern: `foo: f64`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct PatType { + pub attrs: Vec<Attribute>, + pub pat: Box<Pat>, + pub colon_token: Token![:], + pub ty: Box<Type>, + } +} + +ast_struct! { + /// A pattern that matches any value: `_`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct PatWild { + pub attrs: Vec<Attribute>, + pub underscore_token: Token![_], + } +} + +ast_struct! { + /// A single field in a struct pattern. + /// + /// Patterns like the fields of Foo `{ x, ref y, ref mut z }` are treated + /// the same as `x: x, y: ref y, z: ref mut z` but there is no colon token. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct FieldPat { + pub attrs: Vec<Attribute>, + pub member: Member, + pub colon_token: Option<Token![:]>, + pub pat: Box<Pat>, + } +} + +#[cfg(feature = "parsing")] +pub mod parsing { + use super::*; + use crate::ext::IdentExt; + use crate::parse::{Parse, ParseBuffer, ParseStream, Result}; + use crate::path; + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for Pat { + fn parse(input: ParseStream) -> Result<Self> { + let begin = input.fork(); + let lookahead = input.lookahead1(); + if { + let ahead = input.fork(); + ahead.parse::<Option<Ident>>()?.is_some() + && (ahead.peek(Token![::]) + || ahead.peek(Token![!]) + || ahead.peek(token::Brace) + || ahead.peek(token::Paren) + || ahead.peek(Token![..]) + && ahead.parse::<RangeLimits>().is_ok() + && !(ahead.is_empty() || ahead.peek(Token![,]))) + } || { + let ahead = input.fork(); + ahead.parse::<Option<Token![self]>>()?.is_some() && ahead.peek(Token![::]) + } || lookahead.peek(Token![::]) + || lookahead.peek(Token![<]) + || input.peek(Token![Self]) + || input.peek(Token![super]) + || input.peek(Token![crate]) + { + pat_path_or_macro_or_struct_or_range(input) + } else if lookahead.peek(Token![_]) { + input.call(pat_wild).map(Pat::Wild) + } else if input.peek(Token![box]) { + input.call(pat_box).map(Pat::Box) + } else if input.peek(Token![-]) || lookahead.peek(Lit) || lookahead.peek(Token![const]) + { + pat_lit_or_range(input) + } else if lookahead.peek(Token![ref]) + || lookahead.peek(Token![mut]) + || input.peek(Token![self]) + || input.peek(Ident) + { + input.call(pat_ident).map(Pat::Ident) + } else if lookahead.peek(Token![&]) { + input.call(pat_reference).map(Pat::Reference) + } else if lookahead.peek(token::Paren) { + input.call(pat_tuple).map(Pat::Tuple) + } else if lookahead.peek(token::Bracket) { + input.call(pat_slice).map(Pat::Slice) + } else if lookahead.peek(Token![..]) && !input.peek(Token![...]) { + pat_range_half_open(input, begin) + } else if lookahead.peek(Token![const]) { + input.call(pat_const).map(Pat::Verbatim) + } else { + Err(lookahead.error()) + } + } + } + + fn pat_path_or_macro_or_struct_or_range(input: ParseStream) -> Result<Pat> { + let begin = input.fork(); + let (qself, path) = path::parsing::qpath(input, true)?; + + if qself.is_none() && input.peek(Token![!]) && !input.peek(Token![!=]) { + let mut contains_arguments = false; + for segment in &path.segments { + match segment.arguments { + PathArguments::None => {} + PathArguments::AngleBracketed(_) | PathArguments::Parenthesized(_) => { + contains_arguments = true; + } + } + } + + if !contains_arguments { + let bang_token: Token![!] = input.parse()?; + let (delimiter, tokens) = mac::parse_delimiter(input)?; + return Ok(Pat::Macro(PatMacro { + attrs: Vec::new(), + mac: Macro { + path, + bang_token, + delimiter, + tokens, + }, + })); + } + } + + if input.peek(token::Brace) { + let pat = pat_struct(begin.fork(), input, path)?; + if qself.is_some() { + Ok(Pat::Verbatim(verbatim::between(begin, input))) + } else { + Ok(pat) + } + } else if input.peek(token::Paren) { + let pat = pat_tuple_struct(input, path)?; + if qself.is_some() { + Ok(Pat::Verbatim(verbatim::between(begin, input))) + } else { + Ok(Pat::TupleStruct(pat)) + } + } else if input.peek(Token![..]) { + pat_range(input, begin, qself, path) + } else { + Ok(Pat::Path(PatPath { + attrs: Vec::new(), + qself, + path, + })) + } + } + + fn pat_wild(input: ParseStream) -> Result<PatWild> { + Ok(PatWild { + attrs: Vec::new(), + underscore_token: input.parse()?, + }) + } + + fn pat_box(input: ParseStream) -> Result<PatBox> { + Ok(PatBox { + attrs: Vec::new(), + box_token: input.parse()?, + pat: input.parse()?, + }) + } + + fn pat_ident(input: ParseStream) -> Result<PatIdent> { + Ok(PatIdent { + attrs: Vec::new(), + by_ref: input.parse()?, + mutability: input.parse()?, + ident: input.call(Ident::parse_any)?, + subpat: { + if input.peek(Token![@]) { + let at_token: Token![@] = input.parse()?; + let subpat: Pat = input.parse()?; + Some((at_token, Box::new(subpat))) + } else { + None + } + }, + }) + } + + fn pat_tuple_struct(input: ParseStream, path: Path) -> Result<PatTupleStruct> { + Ok(PatTupleStruct { + attrs: Vec::new(), + path, + pat: input.call(pat_tuple)?, + }) + } + + fn pat_struct(begin: ParseBuffer, input: ParseStream, path: Path) -> Result<Pat> { + let content; + let brace_token = braced!(content in input); + + let mut fields = Punctuated::new(); + let mut dot2_token = None; + while !content.is_empty() { + let attrs = content.call(Attribute::parse_outer)?; + if content.peek(Token![..]) { + dot2_token = Some(content.parse()?); + if !attrs.is_empty() { + return Ok(Pat::Verbatim(verbatim::between(begin, input))); + } + break; + } + let mut value = content.call(field_pat)?; + value.attrs = attrs; + fields.push_value(value); + if content.is_empty() { + break; + } + let punct: Token![,] = content.parse()?; + fields.push_punct(punct); + } + + Ok(Pat::Struct(PatStruct { + attrs: Vec::new(), + path, + brace_token, + fields, + dot2_token, + })) + } + + impl Member { + fn is_unnamed(&self) -> bool { + match *self { + Member::Named(_) => false, + Member::Unnamed(_) => true, + } + } + } + + fn field_pat(input: ParseStream) -> Result<FieldPat> { + let boxed: Option<Token![box]> = input.parse()?; + let by_ref: Option<Token![ref]> = input.parse()?; + let mutability: Option<Token![mut]> = input.parse()?; + let member: Member = input.parse()?; + + if boxed.is_none() && by_ref.is_none() && mutability.is_none() && input.peek(Token![:]) + || member.is_unnamed() + { + return Ok(FieldPat { + attrs: Vec::new(), + member, + colon_token: input.parse()?, + pat: Box::new(multi_pat_with_leading_vert(input)?), + }); + } + + let ident = match member { + Member::Named(ident) => ident, + Member::Unnamed(_) => unreachable!(), + }; + + let mut pat = Pat::Ident(PatIdent { + attrs: Vec::new(), + by_ref, + mutability, + ident: ident.clone(), + subpat: None, + }); + + if let Some(boxed) = boxed { + pat = Pat::Box(PatBox { + attrs: Vec::new(), + box_token: boxed, + pat: Box::new(pat), + }); + } + + Ok(FieldPat { + attrs: Vec::new(), + member: Member::Named(ident), + colon_token: None, + pat: Box::new(pat), + }) + } + + fn pat_range( + input: ParseStream, + begin: ParseBuffer, + qself: Option<QSelf>, + path: Path, + ) -> Result<Pat> { + let limits: RangeLimits = input.parse()?; + let hi = input.call(pat_lit_expr)?; + if let Some(hi) = hi { + Ok(Pat::Range(PatRange { + attrs: Vec::new(), + lo: Box::new(Expr::Path(ExprPath { + attrs: Vec::new(), + qself, + path, + })), + limits, + hi, + })) + } else { + Ok(Pat::Verbatim(verbatim::between(begin, input))) + } + } + + fn pat_range_half_open(input: ParseStream, begin: ParseBuffer) -> Result<Pat> { + let limits: RangeLimits = input.parse()?; + let hi = input.call(pat_lit_expr)?; + if hi.is_some() { + Ok(Pat::Verbatim(verbatim::between(begin, input))) + } else { + match limits { + RangeLimits::HalfOpen(dot2_token) => Ok(Pat::Rest(PatRest { + attrs: Vec::new(), + dot2_token, + })), + RangeLimits::Closed(_) => Err(input.error("expected range upper bound")), + } + } + } + + fn pat_tuple(input: ParseStream) -> Result<PatTuple> { + let content; + let paren_token = parenthesized!(content in input); + + let mut elems = Punctuated::new(); + while !content.is_empty() { + let value = multi_pat_with_leading_vert(&content)?; + elems.push_value(value); + if content.is_empty() { + break; + } + let punct = content.parse()?; + elems.push_punct(punct); + } + + Ok(PatTuple { + attrs: Vec::new(), + paren_token, + elems, + }) + } + + fn pat_reference(input: ParseStream) -> Result<PatReference> { + Ok(PatReference { + attrs: Vec::new(), + and_token: input.parse()?, + mutability: input.parse()?, + pat: input.parse()?, + }) + } + + fn pat_lit_or_range(input: ParseStream) -> Result<Pat> { + let begin = input.fork(); + let lo = input.call(pat_lit_expr)?.unwrap(); + if input.peek(Token![..]) { + let limits: RangeLimits = input.parse()?; + let hi = input.call(pat_lit_expr)?; + if let Some(hi) = hi { + Ok(Pat::Range(PatRange { + attrs: Vec::new(), + lo, + limits, + hi, + })) + } else { + Ok(Pat::Verbatim(verbatim::between(begin, input))) + } + } else if let Expr::Verbatim(verbatim) = *lo { + Ok(Pat::Verbatim(verbatim)) + } else { + Ok(Pat::Lit(PatLit { + attrs: Vec::new(), + expr: lo, + })) + } + } + + fn pat_lit_expr(input: ParseStream) -> Result<Option<Box<Expr>>> { + if input.is_empty() + || input.peek(Token![|]) + || input.peek(Token![=]) + || input.peek(Token![:]) && !input.peek(Token![::]) + || input.peek(Token![,]) + || input.peek(Token![;]) + { + return Ok(None); + } + + let neg: Option<Token![-]> = input.parse()?; + + let lookahead = input.lookahead1(); + let expr = if lookahead.peek(Lit) { + Expr::Lit(input.parse()?) + } else if lookahead.peek(Ident) + || lookahead.peek(Token![::]) + || lookahead.peek(Token![<]) + || lookahead.peek(Token![self]) + || lookahead.peek(Token![Self]) + || lookahead.peek(Token![super]) + || lookahead.peek(Token![crate]) + { + Expr::Path(input.parse()?) + } else if lookahead.peek(Token![const]) { + Expr::Verbatim(input.call(expr::parsing::expr_const)?) + } else { + return Err(lookahead.error()); + }; + + Ok(Some(Box::new(if let Some(neg) = neg { + Expr::Unary(ExprUnary { + attrs: Vec::new(), + op: UnOp::Neg(neg), + expr: Box::new(expr), + }) + } else { + expr + }))) + } + + fn pat_slice(input: ParseStream) -> Result<PatSlice> { + let content; + let bracket_token = bracketed!(content in input); + + let mut elems = Punctuated::new(); + while !content.is_empty() { + let value = multi_pat_with_leading_vert(&content)?; + elems.push_value(value); + if content.is_empty() { + break; + } + let punct = content.parse()?; + elems.push_punct(punct); + } + + Ok(PatSlice { + attrs: Vec::new(), + bracket_token, + elems, + }) + } + + fn pat_const(input: ParseStream) -> Result<TokenStream> { + let begin = input.fork(); + input.parse::<Token![const]>()?; + + let content; + braced!(content in input); + content.call(Attribute::parse_inner)?; + content.call(Block::parse_within)?; + + Ok(verbatim::between(begin, input)) + } + + pub fn multi_pat(input: ParseStream) -> Result<Pat> { + multi_pat_impl(input, None) + } + + pub fn multi_pat_with_leading_vert(input: ParseStream) -> Result<Pat> { + let leading_vert: Option<Token![|]> = input.parse()?; + multi_pat_impl(input, leading_vert) + } + + fn multi_pat_impl(input: ParseStream, leading_vert: Option<Token![|]>) -> Result<Pat> { + let mut pat: Pat = input.parse()?; + if leading_vert.is_some() + || input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=]) + { + let mut cases = Punctuated::new(); + cases.push_value(pat); + while input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=]) { + let punct = input.parse()?; + cases.push_punct(punct); + let pat: Pat = input.parse()?; + cases.push_value(pat); + } + pat = Pat::Or(PatOr { + attrs: Vec::new(), + leading_vert, + cases, + }); + } + Ok(pat) + } +} + +#[cfg(feature = "printing")] +mod printing { + use super::*; + use crate::attr::FilterAttrs; + use proc_macro2::TokenStream; + use quote::{ToTokens, TokenStreamExt}; + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for PatWild { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.underscore_token.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for PatIdent { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.by_ref.to_tokens(tokens); + self.mutability.to_tokens(tokens); + self.ident.to_tokens(tokens); + if let Some((at_token, subpat)) = &self.subpat { + at_token.to_tokens(tokens); + subpat.to_tokens(tokens); + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for PatStruct { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.path.to_tokens(tokens); + self.brace_token.surround(tokens, |tokens| { + self.fields.to_tokens(tokens); + // NOTE: We need a comma before the dot2 token if it is present. + if !self.fields.empty_or_trailing() && self.dot2_token.is_some() { + <Token![,]>::default().to_tokens(tokens); + } + self.dot2_token.to_tokens(tokens); + }); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for PatTupleStruct { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.path.to_tokens(tokens); + self.pat.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for PatType { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.pat.to_tokens(tokens); + self.colon_token.to_tokens(tokens); + self.ty.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for PatPath { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + path::printing::print_path(tokens, &self.qself, &self.path); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for PatTuple { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.paren_token.surround(tokens, |tokens| { + self.elems.to_tokens(tokens); + }); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for PatBox { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.box_token.to_tokens(tokens); + self.pat.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for PatReference { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.and_token.to_tokens(tokens); + self.mutability.to_tokens(tokens); + self.pat.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for PatRest { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.dot2_token.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for PatLit { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.expr.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for PatRange { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.lo.to_tokens(tokens); + self.limits.to_tokens(tokens); + self.hi.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for PatSlice { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.bracket_token.surround(tokens, |tokens| { + self.elems.to_tokens(tokens); + }); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for PatMacro { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.mac.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for PatOr { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.leading_vert.to_tokens(tokens); + self.cases.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for FieldPat { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + if let Some(colon_token) = &self.colon_token { + self.member.to_tokens(tokens); + colon_token.to_tokens(tokens); + } + self.pat.to_tokens(tokens); + } + } +} diff --git a/third_party/rust/syn/src/path.rs b/third_party/rust/syn/src/path.rs new file mode 100644 index 0000000000..6cdb43ac54 --- /dev/null +++ b/third_party/rust/syn/src/path.rs @@ -0,0 +1,854 @@ +use super::*; +use crate::punctuated::Punctuated; + +ast_struct! { + /// A path at which a named item is exported (e.g. `std::collections::HashMap`). + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct Path { + pub leading_colon: Option<Token![::]>, + pub segments: Punctuated<PathSegment, Token![::]>, + } +} + +impl<T> From<T> for Path +where + T: Into<PathSegment>, +{ + fn from(segment: T) -> Self { + let mut path = Path { + leading_colon: None, + segments: Punctuated::new(), + }; + path.segments.push_value(segment.into()); + path + } +} + +ast_struct! { + /// A segment of a path together with any path arguments on that segment. + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct PathSegment { + pub ident: Ident, + pub arguments: PathArguments, + } +} + +impl<T> From<T> for PathSegment +where + T: Into<Ident>, +{ + fn from(ident: T) -> Self { + PathSegment { + ident: ident.into(), + arguments: PathArguments::None, + } + } +} + +ast_enum! { + /// Angle bracketed or parenthesized arguments of a path segment. + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + /// + /// ## Angle bracketed + /// + /// The `<'a, T>` in `std::slice::iter<'a, T>`. + /// + /// ## Parenthesized + /// + /// The `(A, B) -> C` in `Fn(A, B) -> C`. + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub enum PathArguments { + None, + /// The `<'a, T>` in `std::slice::iter<'a, T>`. + AngleBracketed(AngleBracketedGenericArguments), + /// The `(A, B) -> C` in `Fn(A, B) -> C`. + Parenthesized(ParenthesizedGenericArguments), + } +} + +impl Default for PathArguments { + fn default() -> Self { + PathArguments::None + } +} + +impl PathArguments { + pub fn is_empty(&self) -> bool { + match self { + PathArguments::None => true, + PathArguments::AngleBracketed(bracketed) => bracketed.args.is_empty(), + PathArguments::Parenthesized(_) => false, + } + } + + pub fn is_none(&self) -> bool { + match self { + PathArguments::None => true, + PathArguments::AngleBracketed(_) | PathArguments::Parenthesized(_) => false, + } + } +} + +ast_enum! { + /// An individual generic argument, like `'a`, `T`, or `Item = T`. + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub enum GenericArgument { + /// A lifetime argument. + Lifetime(Lifetime), + /// A type argument. + Type(Type), + /// A const expression. Must be inside of a block. + /// + /// NOTE: Identity expressions are represented as Type arguments, as + /// they are indistinguishable syntactically. + Const(Expr), + /// A binding (equality constraint) on an associated type: the `Item = + /// u8` in `Iterator<Item = u8>`. + Binding(Binding), + /// An associated type bound: `Iterator<Item: Display>`. + Constraint(Constraint), + } +} + +ast_struct! { + /// Angle bracketed arguments of a path segment: the `<K, V>` in `HashMap<K, + /// V>`. + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct AngleBracketedGenericArguments { + pub colon2_token: Option<Token![::]>, + pub lt_token: Token![<], + pub args: Punctuated<GenericArgument, Token![,]>, + pub gt_token: Token![>], + } +} + +ast_struct! { + /// A binding (equality constraint) on an associated type: `Item = u8`. + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct Binding { + pub ident: Ident, + pub eq_token: Token![=], + pub ty: Type, + } +} + +ast_struct! { + /// An associated type bound: `Iterator<Item: Display>`. + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct Constraint { + pub ident: Ident, + pub colon_token: Token![:], + pub bounds: Punctuated<TypeParamBound, Token![+]>, + } +} + +ast_struct! { + /// Arguments of a function path segment: the `(A, B) -> C` in `Fn(A,B) -> + /// C`. + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct ParenthesizedGenericArguments { + pub paren_token: token::Paren, + /// `(A, B)` + pub inputs: Punctuated<Type, Token![,]>, + /// `C` + pub output: ReturnType, + } +} + +ast_struct! { + /// The explicit Self type in a qualified path: the `T` in `<T as + /// Display>::fmt`. + /// + /// The actual path, including the trait and the associated item, is stored + /// separately. The `position` field represents the index of the associated + /// item qualified with this Self type. + /// + /// ```text + /// <Vec<T> as a::b::Trait>::AssociatedItem + /// ^~~~~~ ~~~~~~~~~~~~~~^ + /// ty position = 3 + /// + /// <Vec<T>>::AssociatedItem + /// ^~~~~~ ^ + /// ty position = 0 + /// ``` + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct QSelf { + pub lt_token: Token![<], + pub ty: Box<Type>, + pub position: usize, + pub as_token: Option<Token![as]>, + pub gt_token: Token![>], + } +} + +#[cfg(feature = "parsing")] +pub mod parsing { + use super::*; + + use crate::ext::IdentExt; + use crate::parse::{Parse, ParseStream, Result}; + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for Path { + fn parse(input: ParseStream) -> Result<Self> { + Self::parse_helper(input, false) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for GenericArgument { + fn parse(input: ParseStream) -> Result<Self> { + if input.peek(Lifetime) && !input.peek2(Token![+]) { + return Ok(GenericArgument::Lifetime(input.parse()?)); + } + + if input.peek(Ident) && input.peek2(Token![=]) { + let ident: Ident = input.parse()?; + let eq_token: Token![=] = input.parse()?; + + let ty = if input.peek(Lit) { + let begin = input.fork(); + input.parse::<Lit>()?; + Type::Verbatim(verbatim::between(begin, input)) + } else if input.peek(token::Brace) { + let begin = input.fork(); + + #[cfg(feature = "full")] + { + input.parse::<ExprBlock>()?; + } + + #[cfg(not(feature = "full"))] + { + let content; + braced!(content in input); + content.parse::<Expr>()?; + } + + Type::Verbatim(verbatim::between(begin, input)) + } else { + input.parse()? + }; + + return Ok(GenericArgument::Binding(Binding { + ident, + eq_token, + ty, + })); + } + + #[cfg(feature = "full")] + { + if input.peek(Ident) && input.peek2(Token![:]) && !input.peek2(Token![::]) { + return Ok(GenericArgument::Constraint(input.parse()?)); + } + } + + if input.peek(Lit) || input.peek(token::Brace) { + return const_argument(input).map(GenericArgument::Const); + } + + #[cfg(feature = "full")] + let begin = input.fork(); + + let argument: Type = input.parse()?; + + #[cfg(feature = "full")] + { + if match &argument { + Type::Path(argument) + if argument.qself.is_none() + && argument.path.leading_colon.is_none() + && argument.path.segments.len() == 1 => + { + match argument.path.segments[0].arguments { + PathArguments::AngleBracketed(_) => true, + _ => false, + } + } + _ => false, + } && if input.peek(Token![=]) { + input.parse::<Token![=]>()?; + input.parse::<Type>()?; + true + } else if input.peek(Token![:]) { + input.parse::<Token![:]>()?; + input.call(constraint_bounds)?; + true + } else { + false + } { + let verbatim = verbatim::between(begin, input); + return Ok(GenericArgument::Type(Type::Verbatim(verbatim))); + } + } + + Ok(GenericArgument::Type(argument)) + } + } + + pub fn const_argument(input: ParseStream) -> Result<Expr> { + let lookahead = input.lookahead1(); + + if input.peek(Lit) { + let lit = input.parse()?; + return Ok(Expr::Lit(lit)); + } + + #[cfg(feature = "full")] + { + if input.peek(Ident) { + let ident: Ident = input.parse()?; + return Ok(Expr::Path(ExprPath { + attrs: Vec::new(), + qself: None, + path: Path::from(ident), + })); + } + } + + if input.peek(token::Brace) { + #[cfg(feature = "full")] + { + let block: ExprBlock = input.parse()?; + return Ok(Expr::Block(block)); + } + + #[cfg(not(feature = "full"))] + { + let begin = input.fork(); + let content; + braced!(content in input); + content.parse::<Expr>()?; + let verbatim = verbatim::between(begin, input); + return Ok(Expr::Verbatim(verbatim)); + } + } + + Err(lookahead.error()) + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for AngleBracketedGenericArguments { + fn parse(input: ParseStream) -> Result<Self> { + Ok(AngleBracketedGenericArguments { + colon2_token: input.parse()?, + lt_token: input.parse()?, + args: { + let mut args = Punctuated::new(); + loop { + if input.peek(Token![>]) { + break; + } + let value = input.parse()?; + args.push_value(value); + if input.peek(Token![>]) { + break; + } + let punct = input.parse()?; + args.push_punct(punct); + } + args + }, + gt_token: input.parse()?, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ParenthesizedGenericArguments { + fn parse(input: ParseStream) -> Result<Self> { + let content; + Ok(ParenthesizedGenericArguments { + paren_token: parenthesized!(content in input), + inputs: content.parse_terminated(Type::parse)?, + output: input.call(ReturnType::without_plus)?, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for PathSegment { + fn parse(input: ParseStream) -> Result<Self> { + Self::parse_helper(input, false) + } + } + + impl PathSegment { + fn parse_helper(input: ParseStream, expr_style: bool) -> Result<Self> { + if input.peek(Token![super]) || input.peek(Token![self]) || input.peek(Token![crate]) { + let ident = input.call(Ident::parse_any)?; + return Ok(PathSegment::from(ident)); + } + + let ident = if input.peek(Token![Self]) { + input.call(Ident::parse_any)? + } else { + input.parse()? + }; + + if !expr_style && input.peek(Token![<]) && !input.peek(Token![<=]) + || input.peek(Token![::]) && input.peek3(Token![<]) + { + Ok(PathSegment { + ident, + arguments: PathArguments::AngleBracketed(input.parse()?), + }) + } else { + Ok(PathSegment::from(ident)) + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for Binding { + fn parse(input: ParseStream) -> Result<Self> { + Ok(Binding { + ident: input.parse()?, + eq_token: input.parse()?, + ty: input.parse()?, + }) + } + } + + #[cfg(feature = "full")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for Constraint { + fn parse(input: ParseStream) -> Result<Self> { + Ok(Constraint { + ident: input.parse()?, + colon_token: input.parse()?, + bounds: constraint_bounds(input)?, + }) + } + } + + #[cfg(feature = "full")] + fn constraint_bounds(input: ParseStream) -> Result<Punctuated<TypeParamBound, Token![+]>> { + let mut bounds = Punctuated::new(); + loop { + if input.peek(Token![,]) || input.peek(Token![>]) { + break; + } + let value = input.parse()?; + bounds.push_value(value); + if !input.peek(Token![+]) { + break; + } + let punct = input.parse()?; + bounds.push_punct(punct); + } + Ok(bounds) + } + + impl Path { + /// Parse a `Path` containing no path arguments on any of its segments. + /// + /// *This function is available only if Syn is built with the `"parsing"` + /// feature.* + /// + /// # Example + /// + /// ``` + /// use syn::{Path, Result, Token}; + /// use syn::parse::{Parse, ParseStream}; + /// + /// // A simplified single `use` statement like: + /// // + /// // use std::collections::HashMap; + /// // + /// // Note that generic parameters are not allowed in a `use` statement + /// // so the following must not be accepted. + /// // + /// // use a::<b>::c; + /// struct SingleUse { + /// use_token: Token![use], + /// path: Path, + /// } + /// + /// impl Parse for SingleUse { + /// fn parse(input: ParseStream) -> Result<Self> { + /// Ok(SingleUse { + /// use_token: input.parse()?, + /// path: input.call(Path::parse_mod_style)?, + /// }) + /// } + /// } + /// ``` + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn parse_mod_style(input: ParseStream) -> Result<Self> { + Ok(Path { + leading_colon: input.parse()?, + segments: { + let mut segments = Punctuated::new(); + loop { + if !input.peek(Ident) + && !input.peek(Token![super]) + && !input.peek(Token![self]) + && !input.peek(Token![Self]) + && !input.peek(Token![crate]) + { + break; + } + let ident = Ident::parse_any(input)?; + segments.push_value(PathSegment::from(ident)); + if !input.peek(Token![::]) { + break; + } + let punct = input.parse()?; + segments.push_punct(punct); + } + if segments.is_empty() { + return Err(input.error("expected path")); + } else if segments.trailing_punct() { + return Err(input.error("expected path segment")); + } + segments + }, + }) + } + + /// Determines whether this is a path of length 1 equal to the given + /// ident. + /// + /// For them to compare equal, it must be the case that: + /// + /// - the path has no leading colon, + /// - the number of path segments is 1, + /// - the first path segment has no angle bracketed or parenthesized + /// path arguments, and + /// - the ident of the first path segment is equal to the given one. + /// + /// *This function is available only if Syn is built with the `"parsing"` + /// feature.* + /// + /// # Example + /// + /// ``` + /// use syn::{Attribute, Error, Meta, NestedMeta, Result}; + /// # use std::iter::FromIterator; + /// + /// fn get_serde_meta_items(attr: &Attribute) -> Result<Vec<NestedMeta>> { + /// if attr.path.is_ident("serde") { + /// match attr.parse_meta()? { + /// Meta::List(meta) => Ok(Vec::from_iter(meta.nested)), + /// bad => Err(Error::new_spanned(bad, "unrecognized attribute")), + /// } + /// } else { + /// Ok(Vec::new()) + /// } + /// } + /// ``` + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn is_ident<I: ?Sized>(&self, ident: &I) -> bool + where + Ident: PartialEq<I>, + { + match self.get_ident() { + Some(id) => id == ident, + None => false, + } + } + + /// If this path consists of a single ident, returns the ident. + /// + /// A path is considered an ident if: + /// + /// - the path has no leading colon, + /// - the number of path segments is 1, and + /// - the first path segment has no angle bracketed or parenthesized + /// path arguments. + /// + /// *This function is available only if Syn is built with the `"parsing"` + /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn get_ident(&self) -> Option<&Ident> { + if self.leading_colon.is_none() + && self.segments.len() == 1 + && self.segments[0].arguments.is_none() + { + Some(&self.segments[0].ident) + } else { + None + } + } + + pub(crate) fn parse_helper(input: ParseStream, expr_style: bool) -> Result<Self> { + let mut path = Path { + leading_colon: input.parse()?, + segments: { + let mut segments = Punctuated::new(); + let value = PathSegment::parse_helper(input, expr_style)?; + segments.push_value(value); + segments + }, + }; + Path::parse_rest(input, &mut path, expr_style)?; + Ok(path) + } + + pub(crate) fn parse_rest( + input: ParseStream, + path: &mut Self, + expr_style: bool, + ) -> Result<()> { + while input.peek(Token![::]) && !input.peek3(token::Paren) { + let punct: Token![::] = input.parse()?; + path.segments.push_punct(punct); + let value = PathSegment::parse_helper(input, expr_style)?; + path.segments.push_value(value); + } + Ok(()) + } + } + + pub fn qpath(input: ParseStream, expr_style: bool) -> Result<(Option<QSelf>, Path)> { + if input.peek(Token![<]) { + let lt_token: Token![<] = input.parse()?; + let this: Type = input.parse()?; + let path = if input.peek(Token![as]) { + let as_token: Token![as] = input.parse()?; + let path: Path = input.parse()?; + Some((as_token, path)) + } else { + None + }; + let gt_token: Token![>] = input.parse()?; + let colon2_token: Token![::] = input.parse()?; + let mut rest = Punctuated::new(); + loop { + let path = PathSegment::parse_helper(input, expr_style)?; + rest.push_value(path); + if !input.peek(Token![::]) { + break; + } + let punct: Token![::] = input.parse()?; + rest.push_punct(punct); + } + let (position, as_token, path) = match path { + Some((as_token, mut path)) => { + let pos = path.segments.len(); + path.segments.push_punct(colon2_token); + path.segments.extend(rest.into_pairs()); + (pos, Some(as_token), path) + } + None => { + let path = Path { + leading_colon: Some(colon2_token), + segments: rest, + }; + (0, None, path) + } + }; + let qself = QSelf { + lt_token, + ty: Box::new(this), + position, + as_token, + gt_token, + }; + Ok((Some(qself), path)) + } else { + let path = Path::parse_helper(input, expr_style)?; + Ok((None, path)) + } + } +} + +#[cfg(feature = "printing")] +pub(crate) mod printing { + use super::*; + use crate::print::TokensOrDefault; + use proc_macro2::TokenStream; + use quote::ToTokens; + use std::cmp; + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for Path { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.leading_colon.to_tokens(tokens); + self.segments.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for PathSegment { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.ident.to_tokens(tokens); + self.arguments.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for PathArguments { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + PathArguments::None => {} + PathArguments::AngleBracketed(arguments) => { + arguments.to_tokens(tokens); + } + PathArguments::Parenthesized(arguments) => { + arguments.to_tokens(tokens); + } + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for GenericArgument { + #[allow(clippy::match_same_arms)] + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + GenericArgument::Lifetime(lt) => lt.to_tokens(tokens), + GenericArgument::Type(ty) => ty.to_tokens(tokens), + GenericArgument::Const(e) => match *e { + Expr::Lit(_) => e.to_tokens(tokens), + + // NOTE: We should probably support parsing blocks with only + // expressions in them without the full feature for const + // generics. + #[cfg(feature = "full")] + Expr::Block(_) => e.to_tokens(tokens), + + // ERROR CORRECTION: Add braces to make sure that the + // generated code is valid. + _ => token::Brace::default().surround(tokens, |tokens| { + e.to_tokens(tokens); + }), + }, + GenericArgument::Binding(tb) => tb.to_tokens(tokens), + GenericArgument::Constraint(tc) => tc.to_tokens(tokens), + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for AngleBracketedGenericArguments { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.colon2_token.to_tokens(tokens); + self.lt_token.to_tokens(tokens); + + // Print lifetimes before types/consts/bindings, regardless of their + // order in self.args. + let mut trailing_or_empty = true; + for param in self.args.pairs() { + match **param.value() { + GenericArgument::Lifetime(_) => { + param.to_tokens(tokens); + trailing_or_empty = param.punct().is_some(); + } + GenericArgument::Type(_) + | GenericArgument::Const(_) + | GenericArgument::Binding(_) + | GenericArgument::Constraint(_) => {} + } + } + for param in self.args.pairs() { + match **param.value() { + GenericArgument::Type(_) + | GenericArgument::Const(_) + | GenericArgument::Binding(_) + | GenericArgument::Constraint(_) => { + if !trailing_or_empty { + <Token![,]>::default().to_tokens(tokens); + } + param.to_tokens(tokens); + trailing_or_empty = param.punct().is_some(); + } + GenericArgument::Lifetime(_) => {} + } + } + + self.gt_token.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for Binding { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.ident.to_tokens(tokens); + self.eq_token.to_tokens(tokens); + self.ty.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for Constraint { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.ident.to_tokens(tokens); + self.colon_token.to_tokens(tokens); + self.bounds.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ParenthesizedGenericArguments { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.paren_token.surround(tokens, |tokens| { + self.inputs.to_tokens(tokens); + }); + self.output.to_tokens(tokens); + } + } + + pub(crate) fn print_path(tokens: &mut TokenStream, qself: &Option<QSelf>, path: &Path) { + let qself = match qself { + Some(qself) => qself, + None => { + path.to_tokens(tokens); + return; + } + }; + qself.lt_token.to_tokens(tokens); + qself.ty.to_tokens(tokens); + + let pos = cmp::min(qself.position, path.segments.len()); + let mut segments = path.segments.pairs(); + if pos > 0 { + TokensOrDefault(&qself.as_token).to_tokens(tokens); + path.leading_colon.to_tokens(tokens); + for (i, segment) in segments.by_ref().take(pos).enumerate() { + if i + 1 == pos { + segment.value().to_tokens(tokens); + qself.gt_token.to_tokens(tokens); + segment.punct().to_tokens(tokens); + } else { + segment.to_tokens(tokens); + } + } + } else { + qself.gt_token.to_tokens(tokens); + path.leading_colon.to_tokens(tokens); + } + for segment in segments { + segment.to_tokens(tokens); + } + } +} diff --git a/third_party/rust/syn/src/print.rs b/third_party/rust/syn/src/print.rs new file mode 100644 index 0000000000..da4e07e3b3 --- /dev/null +++ b/third_party/rust/syn/src/print.rs @@ -0,0 +1,16 @@ +use proc_macro2::TokenStream; +use quote::ToTokens; + +pub struct TokensOrDefault<'a, T: 'a>(pub &'a Option<T>); + +impl<'a, T> ToTokens for TokensOrDefault<'a, T> +where + T: ToTokens + Default, +{ + fn to_tokens(&self, tokens: &mut TokenStream) { + match self.0 { + Some(t) => t.to_tokens(tokens), + None => T::default().to_tokens(tokens), + } + } +} diff --git a/third_party/rust/syn/src/punctuated.rs b/third_party/rust/syn/src/punctuated.rs new file mode 100644 index 0000000000..b7d0185e80 --- /dev/null +++ b/third_party/rust/syn/src/punctuated.rs @@ -0,0 +1,1087 @@ +//! A punctuated sequence of syntax tree nodes separated by punctuation. +//! +//! Lots of things in Rust are punctuated sequences. +//! +//! - The fields of a struct are `Punctuated<Field, Token![,]>`. +//! - The segments of a path are `Punctuated<PathSegment, Token![::]>`. +//! - The bounds on a generic parameter are `Punctuated<TypeParamBound, +//! Token![+]>`. +//! - The arguments to a function call are `Punctuated<Expr, Token![,]>`. +//! +//! This module provides a common representation for these punctuated sequences +//! in the form of the [`Punctuated<T, P>`] type. We store a vector of pairs of +//! syntax tree node + punctuation, where every node in the sequence is followed +//! by punctuation except for possibly the final one. +//! +//! [`Punctuated<T, P>`]: Punctuated +//! +//! ```text +//! a_function_call(arg1, arg2, arg3); +//! ~~~~^ ~~~~^ ~~~~ +//! ``` + +#[cfg(feature = "extra-traits")] +use std::fmt::{self, Debug}; +#[cfg(feature = "extra-traits")] +use std::hash::{Hash, Hasher}; +#[cfg(any(feature = "full", feature = "derive"))] +use std::iter; +use std::iter::FromIterator; +use std::ops::{Index, IndexMut}; +use std::option; +use std::slice; +use std::vec; + +use crate::drops::{NoDrop, TrivialDrop}; +#[cfg(feature = "parsing")] +use crate::parse::{Parse, ParseStream, Result}; +#[cfg(feature = "parsing")] +use crate::token::Token; + +/// A punctuated sequence of syntax tree nodes of type `T` separated by +/// punctuation of type `P`. +/// +/// Refer to the [module documentation] for details about punctuated sequences. +/// +/// [module documentation]: self +pub struct Punctuated<T, P> { + inner: Vec<(T, P)>, + last: Option<Box<T>>, +} + +impl<T, P> Punctuated<T, P> { + /// Creates an empty punctuated sequence. + #[cfg(not(syn_no_const_vec_new))] + pub const fn new() -> Self { + Punctuated { + inner: Vec::new(), + last: None, + } + } + + /// Creates an empty punctuated sequence. + #[cfg(syn_no_const_vec_new)] + pub fn new() -> Self { + Punctuated { + inner: Vec::new(), + last: None, + } + } + + /// Determines whether this punctuated sequence is empty, meaning it + /// contains no syntax tree nodes or punctuation. + pub fn is_empty(&self) -> bool { + self.inner.len() == 0 && self.last.is_none() + } + + /// Returns the number of syntax tree nodes in this punctuated sequence. + /// + /// This is the number of nodes of type `T`, not counting the punctuation of + /// type `P`. + pub fn len(&self) -> usize { + self.inner.len() + if self.last.is_some() { 1 } else { 0 } + } + + /// Borrows the first element in this sequence. + pub fn first(&self) -> Option<&T> { + self.iter().next() + } + + /// Mutably borrows the first element in this sequence. + pub fn first_mut(&mut self) -> Option<&mut T> { + self.iter_mut().next() + } + + /// Borrows the last element in this sequence. + pub fn last(&self) -> Option<&T> { + self.iter().next_back() + } + + /// Mutably borrows the last element in this sequence. + pub fn last_mut(&mut self) -> Option<&mut T> { + self.iter_mut().next_back() + } + + /// Returns an iterator over borrowed syntax tree nodes of type `&T`. + pub fn iter(&self) -> Iter<T> { + Iter { + inner: Box::new(NoDrop::new(PrivateIter { + inner: self.inner.iter(), + last: self.last.as_ref().map(Box::as_ref).into_iter(), + })), + } + } + + /// Returns an iterator over mutably borrowed syntax tree nodes of type + /// `&mut T`. + pub fn iter_mut(&mut self) -> IterMut<T> { + IterMut { + inner: Box::new(NoDrop::new(PrivateIterMut { + inner: self.inner.iter_mut(), + last: self.last.as_mut().map(Box::as_mut).into_iter(), + })), + } + } + + /// Returns an iterator over the contents of this sequence as borrowed + /// punctuated pairs. + pub fn pairs(&self) -> Pairs<T, P> { + Pairs { + inner: self.inner.iter(), + last: self.last.as_ref().map(Box::as_ref).into_iter(), + } + } + + /// Returns an iterator over the contents of this sequence as mutably + /// borrowed punctuated pairs. + pub fn pairs_mut(&mut self) -> PairsMut<T, P> { + PairsMut { + inner: self.inner.iter_mut(), + last: self.last.as_mut().map(Box::as_mut).into_iter(), + } + } + + /// Returns an iterator over the contents of this sequence as owned + /// punctuated pairs. + pub fn into_pairs(self) -> IntoPairs<T, P> { + IntoPairs { + inner: self.inner.into_iter(), + last: self.last.map(|t| *t).into_iter(), + } + } + + /// Appends a syntax tree node onto the end of this punctuated sequence. The + /// sequence must previously have a trailing punctuation. + /// + /// Use [`push`] instead if the punctuated sequence may or may not already + /// have trailing punctuation. + /// + /// [`push`]: Punctuated::push + /// + /// # Panics + /// + /// Panics if the sequence does not already have a trailing punctuation when + /// this method is called. + pub fn push_value(&mut self, value: T) { + assert!( + self.empty_or_trailing(), + "Punctuated::push_value: cannot push value if Punctuated is missing trailing punctuation", + ); + + self.last = Some(Box::new(value)); + } + + /// Appends a trailing punctuation onto the end of this punctuated sequence. + /// The sequence must be non-empty and must not already have trailing + /// punctuation. + /// + /// # Panics + /// + /// Panics if the sequence is empty or already has a trailing punctuation. + pub fn push_punct(&mut self, punctuation: P) { + assert!( + self.last.is_some(), + "Punctuated::push_punct: cannot push punctuation if Punctuated is empty or already has trailing punctuation", + ); + + let last = self.last.take().unwrap(); + self.inner.push((*last, punctuation)); + } + + /// Removes the last punctuated pair from this sequence, or `None` if the + /// sequence is empty. + pub fn pop(&mut self) -> Option<Pair<T, P>> { + if self.last.is_some() { + self.last.take().map(|t| Pair::End(*t)) + } else { + self.inner.pop().map(|(t, p)| Pair::Punctuated(t, p)) + } + } + + /// Determines whether this punctuated sequence ends with a trailing + /// punctuation. + pub fn trailing_punct(&self) -> bool { + self.last.is_none() && !self.is_empty() + } + + /// Returns true if either this `Punctuated` is empty, or it has a trailing + /// punctuation. + /// + /// Equivalent to `punctuated.is_empty() || punctuated.trailing_punct()`. + pub fn empty_or_trailing(&self) -> bool { + self.last.is_none() + } + + /// Appends a syntax tree node onto the end of this punctuated sequence. + /// + /// If there is not a trailing punctuation in this sequence when this method + /// is called, the default value of punctuation type `P` is inserted before + /// the given value of type `T`. + pub fn push(&mut self, value: T) + where + P: Default, + { + if !self.empty_or_trailing() { + self.push_punct(Default::default()); + } + self.push_value(value); + } + + /// Inserts an element at position `index`. + /// + /// # Panics + /// + /// Panics if `index` is greater than the number of elements previously in + /// this punctuated sequence. + pub fn insert(&mut self, index: usize, value: T) + where + P: Default, + { + assert!( + index <= self.len(), + "Punctuated::insert: index out of range", + ); + + if index == self.len() { + self.push(value); + } else { + self.inner.insert(index, (value, Default::default())); + } + } + + /// Clears the sequence of all values and punctuation, making it empty. + pub fn clear(&mut self) { + self.inner.clear(); + self.last = None; + } + + /// Parses zero or more occurrences of `T` separated by punctuation of type + /// `P`, with optional trailing punctuation. + /// + /// Parsing continues until the end of this parse stream. The entire content + /// of this parse stream must consist of `T` and `P`. + /// + /// *This function is available only if Syn is built with the `"parsing"` + /// feature.* + #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn parse_terminated(input: ParseStream) -> Result<Self> + where + T: Parse, + P: Parse, + { + Self::parse_terminated_with(input, T::parse) + } + + /// Parses zero or more occurrences of `T` using the given parse function, + /// separated by punctuation of type `P`, with optional trailing + /// punctuation. + /// + /// Like [`parse_terminated`], the entire content of this stream is expected + /// to be parsed. + /// + /// [`parse_terminated`]: Punctuated::parse_terminated + /// + /// *This function is available only if Syn is built with the `"parsing"` + /// feature.* + #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn parse_terminated_with( + input: ParseStream, + parser: fn(ParseStream) -> Result<T>, + ) -> Result<Self> + where + P: Parse, + { + let mut punctuated = Punctuated::new(); + + loop { + if input.is_empty() { + break; + } + let value = parser(input)?; + punctuated.push_value(value); + if input.is_empty() { + break; + } + let punct = input.parse()?; + punctuated.push_punct(punct); + } + + Ok(punctuated) + } + + /// Parses one or more occurrences of `T` separated by punctuation of type + /// `P`, not accepting trailing punctuation. + /// + /// Parsing continues as long as punctuation `P` is present at the head of + /// the stream. This method returns upon parsing a `T` and observing that it + /// is not followed by a `P`, even if there are remaining tokens in the + /// stream. + /// + /// *This function is available only if Syn is built with the `"parsing"` + /// feature.* + #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn parse_separated_nonempty(input: ParseStream) -> Result<Self> + where + T: Parse, + P: Token + Parse, + { + Self::parse_separated_nonempty_with(input, T::parse) + } + + /// Parses one or more occurrences of `T` using the given parse function, + /// separated by punctuation of type `P`, not accepting trailing + /// punctuation. + /// + /// Like [`parse_separated_nonempty`], may complete early without parsing + /// the entire content of this stream. + /// + /// [`parse_separated_nonempty`]: Punctuated::parse_separated_nonempty + /// + /// *This function is available only if Syn is built with the `"parsing"` + /// feature.* + #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn parse_separated_nonempty_with( + input: ParseStream, + parser: fn(ParseStream) -> Result<T>, + ) -> Result<Self> + where + P: Token + Parse, + { + let mut punctuated = Punctuated::new(); + + loop { + let value = parser(input)?; + punctuated.push_value(value); + if !P::peek(input.cursor()) { + break; + } + let punct = input.parse()?; + punctuated.push_punct(punct); + } + + Ok(punctuated) + } +} + +#[cfg(feature = "clone-impls")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl<T, P> Clone for Punctuated<T, P> +where + T: Clone, + P: Clone, +{ + fn clone(&self) -> Self { + Punctuated { + inner: self.inner.clone(), + last: self.last.clone(), + } + } +} + +#[cfg(feature = "extra-traits")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl<T, P> Eq for Punctuated<T, P> +where + T: Eq, + P: Eq, +{ +} + +#[cfg(feature = "extra-traits")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl<T, P> PartialEq for Punctuated<T, P> +where + T: PartialEq, + P: PartialEq, +{ + fn eq(&self, other: &Self) -> bool { + let Punctuated { inner, last } = self; + *inner == other.inner && *last == other.last + } +} + +#[cfg(feature = "extra-traits")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl<T, P> Hash for Punctuated<T, P> +where + T: Hash, + P: Hash, +{ + fn hash<H: Hasher>(&self, state: &mut H) { + let Punctuated { inner, last } = self; + inner.hash(state); + last.hash(state); + } +} + +#[cfg(feature = "extra-traits")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl<T: Debug, P: Debug> Debug for Punctuated<T, P> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let mut list = f.debug_list(); + for (t, p) in &self.inner { + list.entry(t); + list.entry(p); + } + if let Some(last) = &self.last { + list.entry(last); + } + list.finish() + } +} + +impl<T, P> FromIterator<T> for Punctuated<T, P> +where + P: Default, +{ + fn from_iter<I: IntoIterator<Item = T>>(i: I) -> Self { + let mut ret = Punctuated::new(); + ret.extend(i); + ret + } +} + +impl<T, P> Extend<T> for Punctuated<T, P> +where + P: Default, +{ + fn extend<I: IntoIterator<Item = T>>(&mut self, i: I) { + for value in i { + self.push(value); + } + } +} + +impl<T, P> FromIterator<Pair<T, P>> for Punctuated<T, P> { + fn from_iter<I: IntoIterator<Item = Pair<T, P>>>(i: I) -> Self { + let mut ret = Punctuated::new(); + ret.extend(i); + ret + } +} + +impl<T, P> Extend<Pair<T, P>> for Punctuated<T, P> { + fn extend<I: IntoIterator<Item = Pair<T, P>>>(&mut self, i: I) { + assert!( + self.empty_or_trailing(), + "Punctuated::extend: Punctuated is not empty or does not have a trailing punctuation", + ); + + let mut nomore = false; + for pair in i { + if nomore { + panic!("Punctuated extended with items after a Pair::End"); + } + match pair { + Pair::Punctuated(a, b) => self.inner.push((a, b)), + Pair::End(a) => { + self.last = Some(Box::new(a)); + nomore = true; + } + } + } + } +} + +impl<T, P> IntoIterator for Punctuated<T, P> { + type Item = T; + type IntoIter = IntoIter<T>; + + fn into_iter(self) -> Self::IntoIter { + let mut elements = Vec::with_capacity(self.len()); + elements.extend(self.inner.into_iter().map(|pair| pair.0)); + elements.extend(self.last.map(|t| *t)); + + IntoIter { + inner: elements.into_iter(), + } + } +} + +impl<'a, T, P> IntoIterator for &'a Punctuated<T, P> { + type Item = &'a T; + type IntoIter = Iter<'a, T>; + + fn into_iter(self) -> Self::IntoIter { + Punctuated::iter(self) + } +} + +impl<'a, T, P> IntoIterator for &'a mut Punctuated<T, P> { + type Item = &'a mut T; + type IntoIter = IterMut<'a, T>; + + fn into_iter(self) -> Self::IntoIter { + Punctuated::iter_mut(self) + } +} + +impl<T, P> Default for Punctuated<T, P> { + fn default() -> Self { + Punctuated::new() + } +} + +/// An iterator over borrowed pairs of type `Pair<&T, &P>`. +/// +/// Refer to the [module documentation] for details about punctuated sequences. +/// +/// [module documentation]: self +pub struct Pairs<'a, T: 'a, P: 'a> { + inner: slice::Iter<'a, (T, P)>, + last: option::IntoIter<&'a T>, +} + +impl<'a, T, P> Iterator for Pairs<'a, T, P> { + type Item = Pair<&'a T, &'a P>; + + fn next(&mut self) -> Option<Self::Item> { + self.inner + .next() + .map(|(t, p)| Pair::Punctuated(t, p)) + .or_else(|| self.last.next().map(Pair::End)) + } + + fn size_hint(&self) -> (usize, Option<usize>) { + (self.len(), Some(self.len())) + } +} + +impl<'a, T, P> DoubleEndedIterator for Pairs<'a, T, P> { + fn next_back(&mut self) -> Option<Self::Item> { + self.last + .next() + .map(Pair::End) + .or_else(|| self.inner.next_back().map(|(t, p)| Pair::Punctuated(t, p))) + } +} + +impl<'a, T, P> ExactSizeIterator for Pairs<'a, T, P> { + fn len(&self) -> usize { + self.inner.len() + self.last.len() + } +} + +// No Clone bound on T or P. +impl<'a, T, P> Clone for Pairs<'a, T, P> { + fn clone(&self) -> Self { + Pairs { + inner: self.inner.clone(), + last: self.last.clone(), + } + } +} + +/// An iterator over mutably borrowed pairs of type `Pair<&mut T, &mut P>`. +/// +/// Refer to the [module documentation] for details about punctuated sequences. +/// +/// [module documentation]: self +pub struct PairsMut<'a, T: 'a, P: 'a> { + inner: slice::IterMut<'a, (T, P)>, + last: option::IntoIter<&'a mut T>, +} + +impl<'a, T, P> Iterator for PairsMut<'a, T, P> { + type Item = Pair<&'a mut T, &'a mut P>; + + fn next(&mut self) -> Option<Self::Item> { + self.inner + .next() + .map(|(t, p)| Pair::Punctuated(t, p)) + .or_else(|| self.last.next().map(Pair::End)) + } + + fn size_hint(&self) -> (usize, Option<usize>) { + (self.len(), Some(self.len())) + } +} + +impl<'a, T, P> DoubleEndedIterator for PairsMut<'a, T, P> { + fn next_back(&mut self) -> Option<Self::Item> { + self.last + .next() + .map(Pair::End) + .or_else(|| self.inner.next_back().map(|(t, p)| Pair::Punctuated(t, p))) + } +} + +impl<'a, T, P> ExactSizeIterator for PairsMut<'a, T, P> { + fn len(&self) -> usize { + self.inner.len() + self.last.len() + } +} + +/// An iterator over owned pairs of type `Pair<T, P>`. +/// +/// Refer to the [module documentation] for details about punctuated sequences. +/// +/// [module documentation]: self +pub struct IntoPairs<T, P> { + inner: vec::IntoIter<(T, P)>, + last: option::IntoIter<T>, +} + +impl<T, P> Iterator for IntoPairs<T, P> { + type Item = Pair<T, P>; + + fn next(&mut self) -> Option<Self::Item> { + self.inner + .next() + .map(|(t, p)| Pair::Punctuated(t, p)) + .or_else(|| self.last.next().map(Pair::End)) + } + + fn size_hint(&self) -> (usize, Option<usize>) { + (self.len(), Some(self.len())) + } +} + +impl<T, P> DoubleEndedIterator for IntoPairs<T, P> { + fn next_back(&mut self) -> Option<Self::Item> { + self.last + .next() + .map(Pair::End) + .or_else(|| self.inner.next_back().map(|(t, p)| Pair::Punctuated(t, p))) + } +} + +impl<T, P> ExactSizeIterator for IntoPairs<T, P> { + fn len(&self) -> usize { + self.inner.len() + self.last.len() + } +} + +impl<T, P> Clone for IntoPairs<T, P> +where + T: Clone, + P: Clone, +{ + fn clone(&self) -> Self { + IntoPairs { + inner: self.inner.clone(), + last: self.last.clone(), + } + } +} + +/// An iterator over owned values of type `T`. +/// +/// Refer to the [module documentation] for details about punctuated sequences. +/// +/// [module documentation]: self +pub struct IntoIter<T> { + inner: vec::IntoIter<T>, +} + +impl<T> Iterator for IntoIter<T> { + type Item = T; + + fn next(&mut self) -> Option<Self::Item> { + self.inner.next() + } + + fn size_hint(&self) -> (usize, Option<usize>) { + (self.len(), Some(self.len())) + } +} + +impl<T> DoubleEndedIterator for IntoIter<T> { + fn next_back(&mut self) -> Option<Self::Item> { + self.inner.next_back() + } +} + +impl<T> ExactSizeIterator for IntoIter<T> { + fn len(&self) -> usize { + self.inner.len() + } +} + +impl<T> Clone for IntoIter<T> +where + T: Clone, +{ + fn clone(&self) -> Self { + IntoIter { + inner: self.inner.clone(), + } + } +} + +/// An iterator over borrowed values of type `&T`. +/// +/// Refer to the [module documentation] for details about punctuated sequences. +/// +/// [module documentation]: self +pub struct Iter<'a, T: 'a> { + // The `Item = &'a T` needs to be specified to support rustc 1.31 and older. + // On modern compilers we would be able to write just IterTrait<'a, T> where + // Item can be inferred unambiguously from the supertrait. + inner: Box<NoDrop<dyn IterTrait<'a, T, Item = &'a T> + 'a>>, +} + +trait IterTrait<'a, T: 'a>: + DoubleEndedIterator<Item = &'a T> + ExactSizeIterator<Item = &'a T> +{ + fn clone_box(&self) -> Box<NoDrop<dyn IterTrait<'a, T, Item = &'a T> + 'a>>; +} + +struct PrivateIter<'a, T: 'a, P: 'a> { + inner: slice::Iter<'a, (T, P)>, + last: option::IntoIter<&'a T>, +} + +impl<'a, T, P> TrivialDrop for PrivateIter<'a, T, P> +where + slice::Iter<'a, (T, P)>: TrivialDrop, + option::IntoIter<&'a T>: TrivialDrop, +{ +} + +#[cfg(any(feature = "full", feature = "derive"))] +pub(crate) fn empty_punctuated_iter<'a, T>() -> Iter<'a, T> { + Iter { + inner: Box::new(NoDrop::new(iter::empty())), + } +} + +// No Clone bound on T. +impl<'a, T> Clone for Iter<'a, T> { + fn clone(&self) -> Self { + Iter { + inner: self.inner.clone_box(), + } + } +} + +impl<'a, T> Iterator for Iter<'a, T> { + type Item = &'a T; + + fn next(&mut self) -> Option<Self::Item> { + self.inner.next() + } + + fn size_hint(&self) -> (usize, Option<usize>) { + (self.len(), Some(self.len())) + } +} + +impl<'a, T> DoubleEndedIterator for Iter<'a, T> { + fn next_back(&mut self) -> Option<Self::Item> { + self.inner.next_back() + } +} + +impl<'a, T> ExactSizeIterator for Iter<'a, T> { + fn len(&self) -> usize { + self.inner.len() + } +} + +impl<'a, T, P> Iterator for PrivateIter<'a, T, P> { + type Item = &'a T; + + fn next(&mut self) -> Option<Self::Item> { + self.inner + .next() + .map(|pair| &pair.0) + .or_else(|| self.last.next()) + } +} + +impl<'a, T, P> DoubleEndedIterator for PrivateIter<'a, T, P> { + fn next_back(&mut self) -> Option<Self::Item> { + self.last + .next() + .or_else(|| self.inner.next_back().map(|pair| &pair.0)) + } +} + +impl<'a, T, P> ExactSizeIterator for PrivateIter<'a, T, P> { + fn len(&self) -> usize { + self.inner.len() + self.last.len() + } +} + +// No Clone bound on T or P. +impl<'a, T, P> Clone for PrivateIter<'a, T, P> { + fn clone(&self) -> Self { + PrivateIter { + inner: self.inner.clone(), + last: self.last.clone(), + } + } +} + +impl<'a, T, I> IterTrait<'a, T> for I +where + T: 'a, + I: DoubleEndedIterator<Item = &'a T> + + ExactSizeIterator<Item = &'a T> + + Clone + + TrivialDrop + + 'a, +{ + fn clone_box(&self) -> Box<NoDrop<dyn IterTrait<'a, T, Item = &'a T> + 'a>> { + Box::new(NoDrop::new(self.clone())) + } +} + +/// An iterator over mutably borrowed values of type `&mut T`. +/// +/// Refer to the [module documentation] for details about punctuated sequences. +/// +/// [module documentation]: self +pub struct IterMut<'a, T: 'a> { + inner: Box<NoDrop<dyn IterMutTrait<'a, T, Item = &'a mut T> + 'a>>, +} + +trait IterMutTrait<'a, T: 'a>: + DoubleEndedIterator<Item = &'a mut T> + ExactSizeIterator<Item = &'a mut T> +{ +} + +struct PrivateIterMut<'a, T: 'a, P: 'a> { + inner: slice::IterMut<'a, (T, P)>, + last: option::IntoIter<&'a mut T>, +} + +impl<'a, T, P> TrivialDrop for PrivateIterMut<'a, T, P> +where + slice::IterMut<'a, (T, P)>: TrivialDrop, + option::IntoIter<&'a mut T>: TrivialDrop, +{ +} + +#[cfg(any(feature = "full", feature = "derive"))] +pub(crate) fn empty_punctuated_iter_mut<'a, T>() -> IterMut<'a, T> { + IterMut { + inner: Box::new(NoDrop::new(iter::empty())), + } +} + +impl<'a, T> Iterator for IterMut<'a, T> { + type Item = &'a mut T; + + fn next(&mut self) -> Option<Self::Item> { + self.inner.next() + } + + fn size_hint(&self) -> (usize, Option<usize>) { + (self.len(), Some(self.len())) + } +} + +impl<'a, T> DoubleEndedIterator for IterMut<'a, T> { + fn next_back(&mut self) -> Option<Self::Item> { + self.inner.next_back() + } +} + +impl<'a, T> ExactSizeIterator for IterMut<'a, T> { + fn len(&self) -> usize { + self.inner.len() + } +} + +impl<'a, T, P> Iterator for PrivateIterMut<'a, T, P> { + type Item = &'a mut T; + + fn next(&mut self) -> Option<Self::Item> { + self.inner + .next() + .map(|pair| &mut pair.0) + .or_else(|| self.last.next()) + } +} + +impl<'a, T, P> DoubleEndedIterator for PrivateIterMut<'a, T, P> { + fn next_back(&mut self) -> Option<Self::Item> { + self.last + .next() + .or_else(|| self.inner.next_back().map(|pair| &mut pair.0)) + } +} + +impl<'a, T, P> ExactSizeIterator for PrivateIterMut<'a, T, P> { + fn len(&self) -> usize { + self.inner.len() + self.last.len() + } +} + +impl<'a, T, I> IterMutTrait<'a, T> for I +where + T: 'a, + I: DoubleEndedIterator<Item = &'a mut T> + ExactSizeIterator<Item = &'a mut T> + 'a, +{ +} + +/// A single syntax tree node of type `T` followed by its trailing punctuation +/// of type `P` if any. +/// +/// Refer to the [module documentation] for details about punctuated sequences. +/// +/// [module documentation]: self +pub enum Pair<T, P> { + Punctuated(T, P), + End(T), +} + +impl<T, P> Pair<T, P> { + /// Extracts the syntax tree node from this punctuated pair, discarding the + /// following punctuation. + pub fn into_value(self) -> T { + match self { + Pair::Punctuated(t, _) | Pair::End(t) => t, + } + } + + /// Borrows the syntax tree node from this punctuated pair. + pub fn value(&self) -> &T { + match self { + Pair::Punctuated(t, _) | Pair::End(t) => t, + } + } + + /// Mutably borrows the syntax tree node from this punctuated pair. + pub fn value_mut(&mut self) -> &mut T { + match self { + Pair::Punctuated(t, _) | Pair::End(t) => t, + } + } + + /// Borrows the punctuation from this punctuated pair, unless this pair is + /// the final one and there is no trailing punctuation. + pub fn punct(&self) -> Option<&P> { + match self { + Pair::Punctuated(_, p) => Some(p), + Pair::End(_) => None, + } + } + + /// Mutably borrows the punctuation from this punctuated pair, unless the + /// pair is the final one and there is no trailing punctuation. + /// + /// # Example + /// + /// ``` + /// # use proc_macro2::Span; + /// # use syn::punctuated::Punctuated; + /// # use syn::{parse_quote, Token, TypeParamBound}; + /// # + /// # let mut punctuated = Punctuated::<TypeParamBound, Token![+]>::new(); + /// # let span = Span::call_site(); + /// # + /// punctuated.insert(0, parse_quote!('lifetime)); + /// if let Some(punct) = punctuated.pairs_mut().next().unwrap().punct_mut() { + /// punct.span = span; + /// } + /// ``` + pub fn punct_mut(&mut self) -> Option<&mut P> { + match self { + Pair::Punctuated(_, p) => Some(p), + Pair::End(_) => None, + } + } + + /// Creates a punctuated pair out of a syntax tree node and an optional + /// following punctuation. + pub fn new(t: T, p: Option<P>) -> Self { + match p { + Some(p) => Pair::Punctuated(t, p), + None => Pair::End(t), + } + } + + /// Produces this punctuated pair as a tuple of syntax tree node and + /// optional following punctuation. + pub fn into_tuple(self) -> (T, Option<P>) { + match self { + Pair::Punctuated(t, p) => (t, Some(p)), + Pair::End(t) => (t, None), + } + } +} + +#[cfg(feature = "clone-impls")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl<T, P> Clone for Pair<T, P> +where + T: Clone, + P: Clone, +{ + fn clone(&self) -> Self { + match self { + Pair::Punctuated(t, p) => Pair::Punctuated(t.clone(), p.clone()), + Pair::End(t) => Pair::End(t.clone()), + } + } +} + +impl<T, P> Index<usize> for Punctuated<T, P> { + type Output = T; + + fn index(&self, index: usize) -> &Self::Output { + if index == self.len() - 1 { + match &self.last { + Some(t) => t, + None => &self.inner[index].0, + } + } else { + &self.inner[index].0 + } + } +} + +impl<T, P> IndexMut<usize> for Punctuated<T, P> { + fn index_mut(&mut self, index: usize) -> &mut Self::Output { + if index == self.len() - 1 { + match &mut self.last { + Some(t) => t, + None => &mut self.inner[index].0, + } + } else { + &mut self.inner[index].0 + } + } +} + +#[cfg(feature = "printing")] +mod printing { + use super::*; + use proc_macro2::TokenStream; + use quote::{ToTokens, TokenStreamExt}; + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl<T, P> ToTokens for Punctuated<T, P> + where + T: ToTokens, + P: ToTokens, + { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.pairs()); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl<T, P> ToTokens for Pair<T, P> + where + T: ToTokens, + P: ToTokens, + { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + Pair::Punctuated(a, b) => { + a.to_tokens(tokens); + b.to_tokens(tokens); + } + Pair::End(a) => a.to_tokens(tokens), + } + } + } +} diff --git a/third_party/rust/syn/src/reserved.rs b/third_party/rust/syn/src/reserved.rs new file mode 100644 index 0000000000..abfdf43a91 --- /dev/null +++ b/third_party/rust/syn/src/reserved.rs @@ -0,0 +1,44 @@ +// Type for a syntax tree node that is reserved for future use. +// +// For example ExprReference contains a field `raw` of type Reserved. If `&raw +// place` syntax becomes a thing as per https://github.com/rust-lang/rfcs/pull/2582, +// we can backward compatibly change `raw`'s type to Option<Token![raw]> without +// the possibility of breaking any code. + +use proc_macro2::Span; +use std::marker::PhantomData; + +#[cfg(feature = "extra-traits")] +use std::fmt::{self, Debug}; + +ast_struct! { + pub struct Reserved { + _private: PhantomData<Span>, + } +} + +impl Default for Reserved { + fn default() -> Self { + Reserved { + _private: PhantomData, + } + } +} + +#[cfg(feature = "clone-impls")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] +impl Clone for Reserved { + fn clone(&self) -> Self { + Reserved { + _private: self._private, + } + } +} + +#[cfg(feature = "extra-traits")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] +impl Debug for Reserved { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.debug_struct("Reserved").finish() + } +} diff --git a/third_party/rust/syn/src/sealed.rs b/third_party/rust/syn/src/sealed.rs new file mode 100644 index 0000000000..0b11bc99a1 --- /dev/null +++ b/third_party/rust/syn/src/sealed.rs @@ -0,0 +1,4 @@ +#[cfg(feature = "parsing")] +pub mod lookahead { + pub trait Sealed: Copy {} +} diff --git a/third_party/rust/syn/src/span.rs b/third_party/rust/syn/src/span.rs new file mode 100644 index 0000000000..27a7fe846d --- /dev/null +++ b/third_party/rust/syn/src/span.rs @@ -0,0 +1,67 @@ +use proc_macro2::Span; + +pub trait IntoSpans<S> { + fn into_spans(self) -> S; +} + +impl IntoSpans<[Span; 1]> for Span { + fn into_spans(self) -> [Span; 1] { + [self] + } +} + +impl IntoSpans<[Span; 2]> for Span { + fn into_spans(self) -> [Span; 2] { + [self, self] + } +} + +impl IntoSpans<[Span; 3]> for Span { + fn into_spans(self) -> [Span; 3] { + [self, self, self] + } +} + +impl IntoSpans<[Span; 1]> for [Span; 1] { + fn into_spans(self) -> [Span; 1] { + self + } +} + +impl IntoSpans<[Span; 2]> for [Span; 2] { + fn into_spans(self) -> [Span; 2] { + self + } +} + +impl IntoSpans<[Span; 3]> for [Span; 3] { + fn into_spans(self) -> [Span; 3] { + self + } +} + +#[cfg(feature = "parsing")] +pub trait FromSpans: Sized { + fn from_spans(spans: &[Span]) -> Self; +} + +#[cfg(feature = "parsing")] +impl FromSpans for [Span; 1] { + fn from_spans(spans: &[Span]) -> Self { + [spans[0]] + } +} + +#[cfg(feature = "parsing")] +impl FromSpans for [Span; 2] { + fn from_spans(spans: &[Span]) -> Self { + [spans[0], spans[1]] + } +} + +#[cfg(feature = "parsing")] +impl FromSpans for [Span; 3] { + fn from_spans(spans: &[Span]) -> Self { + [spans[0], spans[1], spans[2]] + } +} diff --git a/third_party/rust/syn/src/spanned.rs b/third_party/rust/syn/src/spanned.rs new file mode 100644 index 0000000000..d51ffb3fa5 --- /dev/null +++ b/third_party/rust/syn/src/spanned.rs @@ -0,0 +1,114 @@ +//! A trait that can provide the `Span` of the complete contents of a syntax +//! tree node. +//! +//! *This module is available only if Syn is built with both the `"parsing"` and +//! `"printing"` features.* +//! +//! <br> +//! +//! # Example +//! +//! Suppose in a procedural macro we have a [`Type`] that we want to assert +//! implements the [`Sync`] trait. Maybe this is the type of one of the fields +//! of a struct for which we are deriving a trait implementation, and we need to +//! be able to pass a reference to one of those fields across threads. +//! +//! [`Type`]: crate::Type +//! [`Sync`]: std::marker::Sync +//! +//! If the field type does *not* implement `Sync` as required, we want the +//! compiler to report an error pointing out exactly which type it was. +//! +//! The following macro code takes a variable `ty` of type `Type` and produces a +//! static assertion that `Sync` is implemented for that type. +//! +//! ``` +//! # extern crate proc_macro; +//! # +//! use proc_macro::TokenStream; +//! use proc_macro2::Span; +//! use quote::quote_spanned; +//! use syn::Type; +//! use syn::spanned::Spanned; +//! +//! # const IGNORE_TOKENS: &str = stringify! { +//! #[proc_macro_derive(MyMacro)] +//! # }; +//! pub fn my_macro(input: TokenStream) -> TokenStream { +//! # let ty = get_a_type(); +//! /* ... */ +//! +//! let assert_sync = quote_spanned! {ty.span()=> +//! struct _AssertSync where #ty: Sync; +//! }; +//! +//! /* ... */ +//! # input +//! } +//! # +//! # fn get_a_type() -> Type { +//! # unimplemented!() +//! # } +//! ``` +//! +//! By inserting this `assert_sync` fragment into the output code generated by +//! our macro, the user's code will fail to compile if `ty` does not implement +//! `Sync`. The errors they would see look like the following. +//! +//! ```text +//! error[E0277]: the trait bound `*const i32: std::marker::Sync` is not satisfied +//! --> src/main.rs:10:21 +//! | +//! 10 | bad_field: *const i32, +//! | ^^^^^^^^^^ `*const i32` cannot be shared between threads safely +//! ``` +//! +//! In this technique, using the `Type`'s span for the error message makes the +//! error appear in the correct place underlining the right type. +//! +//! <br> +//! +//! # Limitations +//! +//! The underlying [`proc_macro::Span::join`] method is nightly-only. When +//! called from within a procedural macro in a nightly compiler, `Spanned` will +//! use `join` to produce the intended span. When not using a nightly compiler, +//! only the span of the *first token* of the syntax tree node is returned. +//! +//! In the common case of wanting to use the joined span as the span of a +//! `syn::Error`, consider instead using [`syn::Error::new_spanned`] which is +//! able to span the error correctly under the complete syntax tree node without +//! needing the unstable `join`. +//! +//! [`syn::Error::new_spanned`]: crate::Error::new_spanned + +use proc_macro2::Span; +use quote::spanned::Spanned as ToTokens; + +/// A trait that can provide the `Span` of the complete contents of a syntax +/// tree node. +/// +/// This trait is automatically implemented for all types that implement +/// [`ToTokens`] from the `quote` crate, as well as for `Span` itself. +/// +/// [`ToTokens`]: quote::ToTokens +/// +/// See the [module documentation] for an example. +/// +/// [module documentation]: self +/// +/// *This trait is available only if Syn is built with both the `"parsing"` and +/// `"printing"` features.* +pub trait Spanned { + /// Returns a `Span` covering the complete contents of this syntax tree + /// node, or [`Span::call_site()`] if this node is empty. + /// + /// [`Span::call_site()`]: proc_macro2::Span::call_site + fn span(&self) -> Span; +} + +impl<T: ?Sized + ToTokens> Spanned for T { + fn span(&self) -> Span { + self.__span() + } +} diff --git a/third_party/rust/syn/src/stmt.rs b/third_party/rust/syn/src/stmt.rs new file mode 100644 index 0000000000..58bd013ecf --- /dev/null +++ b/third_party/rust/syn/src/stmt.rs @@ -0,0 +1,349 @@ +use super::*; + +ast_struct! { + /// A braced block containing Rust statements. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct Block { + pub brace_token: token::Brace, + /// Statements in a block + pub stmts: Vec<Stmt>, + } +} + +ast_enum! { + /// A statement, usually ending in a semicolon. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub enum Stmt { + /// A local (let) binding. + Local(Local), + + /// An item definition. + Item(Item), + + /// Expr without trailing semicolon. + Expr(Expr), + + /// Expression with trailing semicolon. + Semi(Expr, Token![;]), + } +} + +ast_struct! { + /// A local `let` binding: `let x: u64 = s.parse()?`. + /// + /// *This type is available only if Syn is built with the `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct Local { + pub attrs: Vec<Attribute>, + pub let_token: Token![let], + pub pat: Pat, + pub init: Option<(Token![=], Box<Expr>)>, + pub semi_token: Token![;], + } +} + +#[cfg(feature = "parsing")] +pub mod parsing { + use super::*; + use crate::parse::discouraged::Speculative; + use crate::parse::{Parse, ParseBuffer, ParseStream, Result}; + use proc_macro2::TokenStream; + + impl Block { + /// Parse the body of a block as zero or more statements, possibly + /// including one trailing expression. + /// + /// *This function is available only if Syn is built with the `"parsing"` + /// feature.* + /// + /// # Example + /// + /// ``` + /// use syn::{braced, token, Attribute, Block, Ident, Result, Stmt, Token}; + /// use syn::parse::{Parse, ParseStream}; + /// + /// // Parse a function with no generics or parameter list. + /// // + /// // fn playground { + /// // let mut x = 1; + /// // x += 1; + /// // println!("{}", x); + /// // } + /// struct MiniFunction { + /// attrs: Vec<Attribute>, + /// fn_token: Token![fn], + /// name: Ident, + /// brace_token: token::Brace, + /// stmts: Vec<Stmt>, + /// } + /// + /// impl Parse for MiniFunction { + /// fn parse(input: ParseStream) -> Result<Self> { + /// let outer_attrs = input.call(Attribute::parse_outer)?; + /// let fn_token: Token![fn] = input.parse()?; + /// let name: Ident = input.parse()?; + /// + /// let content; + /// let brace_token = braced!(content in input); + /// let inner_attrs = content.call(Attribute::parse_inner)?; + /// let stmts = content.call(Block::parse_within)?; + /// + /// Ok(MiniFunction { + /// attrs: { + /// let mut attrs = outer_attrs; + /// attrs.extend(inner_attrs); + /// attrs + /// }, + /// fn_token, + /// name, + /// brace_token, + /// stmts, + /// }) + /// } + /// } + /// ``` + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn parse_within(input: ParseStream) -> Result<Vec<Stmt>> { + let mut stmts = Vec::new(); + loop { + while let Some(semi) = input.parse::<Option<Token![;]>>()? { + stmts.push(Stmt::Semi(Expr::Verbatim(TokenStream::new()), semi)); + } + if input.is_empty() { + break; + } + let s = parse_stmt(input, true)?; + let requires_semicolon = if let Stmt::Expr(s) = &s { + expr::requires_terminator(s) + } else { + false + }; + stmts.push(s); + if input.is_empty() { + break; + } else if requires_semicolon { + return Err(input.error("unexpected token")); + } + } + Ok(stmts) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for Block { + fn parse(input: ParseStream) -> Result<Self> { + let content; + Ok(Block { + brace_token: braced!(content in input), + stmts: content.call(Block::parse_within)?, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for Stmt { + fn parse(input: ParseStream) -> Result<Self> { + parse_stmt(input, false) + } + } + + fn parse_stmt(input: ParseStream, allow_nosemi: bool) -> Result<Stmt> { + let begin = input.fork(); + let mut attrs = input.call(Attribute::parse_outer)?; + + // brace-style macros; paren and bracket macros get parsed as + // expression statements. + let ahead = input.fork(); + if let Ok(path) = ahead.call(Path::parse_mod_style) { + if ahead.peek(Token![!]) + && (ahead.peek2(token::Brace) + && !(ahead.peek3(Token![.]) || ahead.peek3(Token![?])) + || ahead.peek2(Ident)) + { + input.advance_to(&ahead); + return stmt_mac(input, attrs, path); + } + } + + if input.peek(Token![let]) { + stmt_local(input, attrs, begin) + } else if input.peek(Token![pub]) + || input.peek(Token![crate]) && !input.peek2(Token![::]) + || input.peek(Token![extern]) + || input.peek(Token![use]) + || input.peek(Token![static]) + && (input.peek2(Token![mut]) + || input.peek2(Ident) + && !(input.peek2(Token![async]) + && (input.peek3(Token![move]) || input.peek3(Token![|])))) + || input.peek(Token![const]) && !input.peek2(token::Brace) + || input.peek(Token![unsafe]) && !input.peek2(token::Brace) + || input.peek(Token![async]) + && (input.peek2(Token![unsafe]) + || input.peek2(Token![extern]) + || input.peek2(Token![fn])) + || input.peek(Token![fn]) + || input.peek(Token![mod]) + || input.peek(Token![type]) + || input.peek(Token![struct]) + || input.peek(Token![enum]) + || input.peek(Token![union]) && input.peek2(Ident) + || input.peek(Token![auto]) && input.peek2(Token![trait]) + || input.peek(Token![trait]) + || input.peek(Token![default]) + && (input.peek2(Token![unsafe]) || input.peek2(Token![impl])) + || input.peek(Token![impl]) + || input.peek(Token![macro]) + { + let mut item: Item = input.parse()?; + attrs.extend(item.replace_attrs(Vec::new())); + item.replace_attrs(attrs); + Ok(Stmt::Item(item)) + } else { + stmt_expr(input, allow_nosemi, attrs) + } + } + + fn stmt_mac(input: ParseStream, attrs: Vec<Attribute>, path: Path) -> Result<Stmt> { + let bang_token: Token![!] = input.parse()?; + let ident: Option<Ident> = input.parse()?; + let (delimiter, tokens) = mac::parse_delimiter(input)?; + let semi_token: Option<Token![;]> = input.parse()?; + + Ok(Stmt::Item(Item::Macro(ItemMacro { + attrs, + ident, + mac: Macro { + path, + bang_token, + delimiter, + tokens, + }, + semi_token, + }))) + } + + fn stmt_local(input: ParseStream, attrs: Vec<Attribute>, begin: ParseBuffer) -> Result<Stmt> { + let let_token: Token![let] = input.parse()?; + + let mut pat: Pat = pat::parsing::multi_pat_with_leading_vert(input)?; + if input.peek(Token![:]) { + let colon_token: Token![:] = input.parse()?; + let ty: Type = input.parse()?; + pat = Pat::Type(PatType { + attrs: Vec::new(), + pat: Box::new(pat), + colon_token, + ty: Box::new(ty), + }); + } + + let init = if input.peek(Token![=]) { + let eq_token: Token![=] = input.parse()?; + let init: Expr = input.parse()?; + + if input.peek(Token![else]) { + input.parse::<Token![else]>()?; + let content; + braced!(content in input); + content.call(Block::parse_within)?; + let verbatim = Expr::Verbatim(verbatim::between(begin, input)); + let semi_token: Token![;] = input.parse()?; + return Ok(Stmt::Semi(verbatim, semi_token)); + } + + Some((eq_token, Box::new(init))) + } else { + None + }; + + let semi_token: Token![;] = input.parse()?; + + Ok(Stmt::Local(Local { + attrs, + let_token, + pat, + init, + semi_token, + })) + } + + fn stmt_expr( + input: ParseStream, + allow_nosemi: bool, + mut attrs: Vec<Attribute>, + ) -> Result<Stmt> { + let mut e = expr::parsing::expr_early(input)?; + + let mut attr_target = &mut e; + loop { + attr_target = match attr_target { + Expr::Assign(e) => &mut e.left, + Expr::AssignOp(e) => &mut e.left, + Expr::Binary(e) => &mut e.left, + _ => break, + }; + } + attrs.extend(attr_target.replace_attrs(Vec::new())); + attr_target.replace_attrs(attrs); + + if input.peek(Token![;]) { + return Ok(Stmt::Semi(e, input.parse()?)); + } + + if allow_nosemi || !expr::requires_terminator(&e) { + Ok(Stmt::Expr(e)) + } else { + Err(input.error("expected semicolon")) + } + } +} + +#[cfg(feature = "printing")] +mod printing { + use super::*; + use proc_macro2::TokenStream; + use quote::{ToTokens, TokenStreamExt}; + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for Block { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.brace_token.surround(tokens, |tokens| { + tokens.append_all(&self.stmts); + }); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for Stmt { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + Stmt::Local(local) => local.to_tokens(tokens), + Stmt::Item(item) => item.to_tokens(tokens), + Stmt::Expr(expr) => expr.to_tokens(tokens), + Stmt::Semi(expr, semi) => { + expr.to_tokens(tokens); + semi.to_tokens(tokens); + } + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for Local { + fn to_tokens(&self, tokens: &mut TokenStream) { + expr::printing::outer_attrs_to_tokens(&self.attrs, tokens); + self.let_token.to_tokens(tokens); + self.pat.to_tokens(tokens); + if let Some((eq_token, init)) = &self.init { + eq_token.to_tokens(tokens); + init.to_tokens(tokens); + } + self.semi_token.to_tokens(tokens); + } + } +} diff --git a/third_party/rust/syn/src/thread.rs b/third_party/rust/syn/src/thread.rs new file mode 100644 index 0000000000..9e5d8ad85e --- /dev/null +++ b/third_party/rust/syn/src/thread.rs @@ -0,0 +1,41 @@ +use std::fmt::{self, Debug}; +use std::thread::{self, ThreadId}; + +/// ThreadBound is a Sync-maker and Send-maker that allows accessing a value +/// of type T only from the original thread on which the ThreadBound was +/// constructed. +pub struct ThreadBound<T> { + value: T, + thread_id: ThreadId, +} + +unsafe impl<T> Sync for ThreadBound<T> {} + +// Send bound requires Copy, as otherwise Drop could run in the wrong place. +unsafe impl<T: Copy> Send for ThreadBound<T> {} + +impl<T> ThreadBound<T> { + pub fn new(value: T) -> Self { + ThreadBound { + value, + thread_id: thread::current().id(), + } + } + + pub fn get(&self) -> Option<&T> { + if thread::current().id() == self.thread_id { + Some(&self.value) + } else { + None + } + } +} + +impl<T: Debug> Debug for ThreadBound<T> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + match self.get() { + Some(value) => Debug::fmt(value, formatter), + None => formatter.write_str("unknown"), + } + } +} diff --git a/third_party/rust/syn/src/token.rs b/third_party/rust/syn/src/token.rs new file mode 100644 index 0000000000..2208b07d04 --- /dev/null +++ b/third_party/rust/syn/src/token.rs @@ -0,0 +1,1013 @@ +//! Tokens representing Rust punctuation, keywords, and delimiters. +//! +//! The type names in this module can be difficult to keep straight, so we +//! prefer to use the [`Token!`] macro instead. This is a type-macro that +//! expands to the token type of the given token. +//! +//! [`Token!`]: crate::Token +//! +//! # Example +//! +//! The [`ItemStatic`] syntax tree node is defined like this. +//! +//! [`ItemStatic`]: crate::ItemStatic +//! +//! ``` +//! # use syn::{Attribute, Expr, Ident, Token, Type, Visibility}; +//! # +//! pub struct ItemStatic { +//! pub attrs: Vec<Attribute>, +//! pub vis: Visibility, +//! pub static_token: Token![static], +//! pub mutability: Option<Token![mut]>, +//! pub ident: Ident, +//! pub colon_token: Token![:], +//! pub ty: Box<Type>, +//! pub eq_token: Token![=], +//! pub expr: Box<Expr>, +//! pub semi_token: Token![;], +//! } +//! ``` +//! +//! # Parsing +//! +//! Keywords and punctuation can be parsed through the [`ParseStream::parse`] +//! method. Delimiter tokens are parsed using the [`parenthesized!`], +//! [`bracketed!`] and [`braced!`] macros. +//! +//! [`ParseStream::parse`]: crate::parse::ParseBuffer::parse() +//! [`parenthesized!`]: crate::parenthesized! +//! [`bracketed!`]: crate::bracketed! +//! [`braced!`]: crate::braced! +//! +//! ``` +//! use syn::{Attribute, Result}; +//! use syn::parse::{Parse, ParseStream}; +//! # +//! # enum ItemStatic {} +//! +//! // Parse the ItemStatic struct shown above. +//! impl Parse for ItemStatic { +//! fn parse(input: ParseStream) -> Result<Self> { +//! # use syn::ItemStatic; +//! # fn parse(input: ParseStream) -> Result<ItemStatic> { +//! Ok(ItemStatic { +//! attrs: input.call(Attribute::parse_outer)?, +//! vis: input.parse()?, +//! static_token: input.parse()?, +//! mutability: input.parse()?, +//! ident: input.parse()?, +//! colon_token: input.parse()?, +//! ty: input.parse()?, +//! eq_token: input.parse()?, +//! expr: input.parse()?, +//! semi_token: input.parse()?, +//! }) +//! # } +//! # unimplemented!() +//! } +//! } +//! ``` +//! +//! # Other operations +//! +//! Every keyword and punctuation token supports the following operations. +//! +//! - [Peeking] — `input.peek(Token![...])` +//! +//! - [Parsing] — `input.parse::<Token![...]>()?` +//! +//! - [Printing] — `quote!( ... #the_token ... )` +//! +//! - Construction from a [`Span`] — `let the_token = Token![...](sp)` +//! +//! - Field access to its span — `let sp = the_token.span` +//! +//! [Peeking]: crate::parse::ParseBuffer::peek() +//! [Parsing]: crate::parse::ParseBuffer::parse() +//! [Printing]: https://docs.rs/quote/1.0/quote/trait.ToTokens.html +//! [`Span`]: https://docs.rs/proc-macro2/1.0/proc_macro2/struct.Span.html + +use self::private::WithSpan; +#[cfg(feature = "parsing")] +use crate::buffer::Cursor; +#[cfg(feature = "parsing")] +use crate::error::Result; +#[cfg(feature = "parsing")] +use crate::lifetime::Lifetime; +#[cfg(feature = "parsing")] +use crate::lit::{Lit, LitBool, LitByte, LitByteStr, LitChar, LitFloat, LitInt, LitStr}; +#[cfg(feature = "parsing")] +use crate::lookahead; +#[cfg(feature = "parsing")] +use crate::parse::{Parse, ParseStream}; +use crate::span::IntoSpans; +#[cfg(any(feature = "parsing", feature = "printing"))] +use proc_macro2::Ident; +use proc_macro2::Span; +#[cfg(feature = "printing")] +use proc_macro2::TokenStream; +#[cfg(feature = "parsing")] +use proc_macro2::{Delimiter, Literal, Punct, TokenTree}; +#[cfg(feature = "printing")] +use quote::{ToTokens, TokenStreamExt}; +#[cfg(feature = "extra-traits")] +use std::cmp; +#[cfg(feature = "extra-traits")] +use std::fmt::{self, Debug}; +#[cfg(feature = "extra-traits")] +use std::hash::{Hash, Hasher}; +use std::ops::{Deref, DerefMut}; + +/// Marker trait for types that represent single tokens. +/// +/// This trait is sealed and cannot be implemented for types outside of Syn. +#[cfg(feature = "parsing")] +pub trait Token: private::Sealed { + // Not public API. + #[doc(hidden)] + fn peek(cursor: Cursor) -> bool; + + // Not public API. + #[doc(hidden)] + fn display() -> &'static str; +} + +mod private { + use proc_macro2::Span; + + #[cfg(feature = "parsing")] + pub trait Sealed {} + + /// Support writing `token.span` rather than `token.spans[0]` on tokens that + /// hold a single span. + #[repr(C)] + pub struct WithSpan { + pub span: Span, + } +} + +#[cfg(feature = "parsing")] +impl private::Sealed for Ident {} + +#[cfg(feature = "parsing")] +fn peek_impl(cursor: Cursor, peek: fn(ParseStream) -> bool) -> bool { + use crate::parse::Unexpected; + use std::cell::Cell; + use std::rc::Rc; + + let scope = Span::call_site(); + let unexpected = Rc::new(Cell::new(Unexpected::None)); + let buffer = crate::parse::new_parse_buffer(scope, cursor, unexpected); + peek(&buffer) +} + +macro_rules! impl_token { + ($display:tt $name:ty) => { + #[cfg(feature = "parsing")] + impl Token for $name { + fn peek(cursor: Cursor) -> bool { + fn peek(input: ParseStream) -> bool { + <$name as Parse>::parse(input).is_ok() + } + peek_impl(cursor, peek) + } + + fn display() -> &'static str { + $display + } + } + + #[cfg(feature = "parsing")] + impl private::Sealed for $name {} + }; +} + +impl_token!("lifetime" Lifetime); +impl_token!("literal" Lit); +impl_token!("string literal" LitStr); +impl_token!("byte string literal" LitByteStr); +impl_token!("byte literal" LitByte); +impl_token!("character literal" LitChar); +impl_token!("integer literal" LitInt); +impl_token!("floating point literal" LitFloat); +impl_token!("boolean literal" LitBool); +impl_token!("group token" proc_macro2::Group); + +macro_rules! impl_low_level_token { + ($display:tt $ty:ident $get:ident) => { + #[cfg(feature = "parsing")] + impl Token for $ty { + fn peek(cursor: Cursor) -> bool { + cursor.$get().is_some() + } + + fn display() -> &'static str { + $display + } + } + + #[cfg(feature = "parsing")] + impl private::Sealed for $ty {} + }; +} + +impl_low_level_token!("punctuation token" Punct punct); +impl_low_level_token!("literal" Literal literal); +impl_low_level_token!("token" TokenTree token_tree); + +// Not public API. +#[doc(hidden)] +#[cfg(feature = "parsing")] +pub trait CustomToken { + fn peek(cursor: Cursor) -> bool; + fn display() -> &'static str; +} + +#[cfg(feature = "parsing")] +impl<T: CustomToken> private::Sealed for T {} + +#[cfg(feature = "parsing")] +impl<T: CustomToken> Token for T { + fn peek(cursor: Cursor) -> bool { + <Self as CustomToken>::peek(cursor) + } + + fn display() -> &'static str { + <Self as CustomToken>::display() + } +} + +macro_rules! define_keywords { + ($($token:tt pub struct $name:ident #[$doc:meta])*) => { + $( + #[$doc] + /// + /// Don't try to remember the name of this type — use the + /// [`Token!`] macro instead. + /// + /// [`Token!`]: crate::token + pub struct $name { + pub span: Span, + } + + #[doc(hidden)] + #[allow(non_snake_case)] + pub fn $name<S: IntoSpans<[Span; 1]>>(span: S) -> $name { + $name { + span: span.into_spans()[0], + } + } + + impl std::default::Default for $name { + fn default() -> Self { + $name { + span: Span::call_site(), + } + } + } + + #[cfg(feature = "clone-impls")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] + impl Copy for $name {} + + #[cfg(feature = "clone-impls")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] + impl Clone for $name { + fn clone(&self) -> Self { + *self + } + } + + #[cfg(feature = "extra-traits")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + impl Debug for $name { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.write_str(stringify!($name)) + } + } + + #[cfg(feature = "extra-traits")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + impl cmp::Eq for $name {} + + #[cfg(feature = "extra-traits")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + impl PartialEq for $name { + fn eq(&self, _other: &$name) -> bool { + true + } + } + + #[cfg(feature = "extra-traits")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + impl Hash for $name { + fn hash<H: Hasher>(&self, _state: &mut H) {} + } + + #[cfg(feature = "printing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for $name { + fn to_tokens(&self, tokens: &mut TokenStream) { + printing::keyword($token, self.span, tokens); + } + } + + #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for $name { + fn parse(input: ParseStream) -> Result<Self> { + Ok($name { + span: parsing::keyword(input, $token)?, + }) + } + } + + #[cfg(feature = "parsing")] + impl Token for $name { + fn peek(cursor: Cursor) -> bool { + parsing::peek_keyword(cursor, $token) + } + + fn display() -> &'static str { + concat!("`", $token, "`") + } + } + + #[cfg(feature = "parsing")] + impl private::Sealed for $name {} + )* + }; +} + +macro_rules! impl_deref_if_len_is_1 { + ($name:ident/1) => { + impl Deref for $name { + type Target = WithSpan; + + fn deref(&self) -> &Self::Target { + unsafe { &*(self as *const Self as *const WithSpan) } + } + } + + impl DerefMut for $name { + fn deref_mut(&mut self) -> &mut Self::Target { + unsafe { &mut *(self as *mut Self as *mut WithSpan) } + } + } + }; + + ($name:ident/$len:tt) => {}; +} + +macro_rules! define_punctuation_structs { + ($($token:tt pub struct $name:ident/$len:tt #[$doc:meta])*) => { + $( + #[repr(C)] + #[$doc] + /// + /// Don't try to remember the name of this type — use the + /// [`Token!`] macro instead. + /// + /// [`Token!`]: crate::token + pub struct $name { + pub spans: [Span; $len], + } + + #[doc(hidden)] + #[allow(non_snake_case)] + pub fn $name<S: IntoSpans<[Span; $len]>>(spans: S) -> $name { + $name { + spans: spans.into_spans(), + } + } + + impl std::default::Default for $name { + fn default() -> Self { + $name { + spans: [Span::call_site(); $len], + } + } + } + + #[cfg(feature = "clone-impls")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] + impl Copy for $name {} + + #[cfg(feature = "clone-impls")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] + impl Clone for $name { + fn clone(&self) -> Self { + *self + } + } + + #[cfg(feature = "extra-traits")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + impl Debug for $name { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.write_str(stringify!($name)) + } + } + + #[cfg(feature = "extra-traits")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + impl cmp::Eq for $name {} + + #[cfg(feature = "extra-traits")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + impl PartialEq for $name { + fn eq(&self, _other: &$name) -> bool { + true + } + } + + #[cfg(feature = "extra-traits")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + impl Hash for $name { + fn hash<H: Hasher>(&self, _state: &mut H) {} + } + + impl_deref_if_len_is_1!($name/$len); + )* + }; +} + +macro_rules! define_punctuation { + ($($token:tt pub struct $name:ident/$len:tt #[$doc:meta])*) => { + $( + define_punctuation_structs! { + $token pub struct $name/$len #[$doc] + } + + #[cfg(feature = "printing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for $name { + fn to_tokens(&self, tokens: &mut TokenStream) { + printing::punct($token, &self.spans, tokens); + } + } + + #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for $name { + fn parse(input: ParseStream) -> Result<Self> { + Ok($name { + spans: parsing::punct(input, $token)?, + }) + } + } + + #[cfg(feature = "parsing")] + impl Token for $name { + fn peek(cursor: Cursor) -> bool { + parsing::peek_punct(cursor, $token) + } + + fn display() -> &'static str { + concat!("`", $token, "`") + } + } + + #[cfg(feature = "parsing")] + impl private::Sealed for $name {} + )* + }; +} + +macro_rules! define_delimiters { + ($($token:tt pub struct $name:ident #[$doc:meta])*) => { + $( + #[$doc] + pub struct $name { + pub span: Span, + } + + #[doc(hidden)] + #[allow(non_snake_case)] + pub fn $name<S: IntoSpans<[Span; 1]>>(span: S) -> $name { + $name { + span: span.into_spans()[0], + } + } + + impl std::default::Default for $name { + fn default() -> Self { + $name { + span: Span::call_site(), + } + } + } + + #[cfg(feature = "clone-impls")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] + impl Copy for $name {} + + #[cfg(feature = "clone-impls")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))] + impl Clone for $name { + fn clone(&self) -> Self { + *self + } + } + + #[cfg(feature = "extra-traits")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + impl Debug for $name { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.write_str(stringify!($name)) + } + } + + #[cfg(feature = "extra-traits")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + impl cmp::Eq for $name {} + + #[cfg(feature = "extra-traits")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + impl PartialEq for $name { + fn eq(&self, _other: &$name) -> bool { + true + } + } + + #[cfg(feature = "extra-traits")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))] + impl Hash for $name { + fn hash<H: Hasher>(&self, _state: &mut H) {} + } + + impl $name { + #[cfg(feature = "printing")] + pub fn surround<F>(&self, tokens: &mut TokenStream, f: F) + where + F: FnOnce(&mut TokenStream), + { + printing::delim($token, self.span, tokens, f); + } + } + + #[cfg(feature = "parsing")] + impl private::Sealed for $name {} + )* + }; +} + +define_punctuation_structs! { + "_" pub struct Underscore/1 /// `_` +} + +#[cfg(feature = "printing")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] +impl ToTokens for Underscore { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append(Ident::new("_", self.span)); + } +} + +#[cfg(feature = "parsing")] +#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] +impl Parse for Underscore { + fn parse(input: ParseStream) -> Result<Self> { + input.step(|cursor| { + if let Some((ident, rest)) = cursor.ident() { + if ident == "_" { + return Ok((Underscore(ident.span()), rest)); + } + } + if let Some((punct, rest)) = cursor.punct() { + if punct.as_char() == '_' { + return Ok((Underscore(punct.span()), rest)); + } + } + Err(cursor.error("expected `_`")) + }) + } +} + +#[cfg(feature = "parsing")] +impl Token for Underscore { + fn peek(cursor: Cursor) -> bool { + if let Some((ident, _rest)) = cursor.ident() { + return ident == "_"; + } + if let Some((punct, _rest)) = cursor.punct() { + return punct.as_char() == '_'; + } + false + } + + fn display() -> &'static str { + "`_`" + } +} + +#[cfg(feature = "parsing")] +impl private::Sealed for Underscore {} + +#[cfg(feature = "parsing")] +impl Token for Paren { + fn peek(cursor: Cursor) -> bool { + lookahead::is_delimiter(cursor, Delimiter::Parenthesis) + } + + fn display() -> &'static str { + "parentheses" + } +} + +#[cfg(feature = "parsing")] +impl Token for Brace { + fn peek(cursor: Cursor) -> bool { + lookahead::is_delimiter(cursor, Delimiter::Brace) + } + + fn display() -> &'static str { + "curly braces" + } +} + +#[cfg(feature = "parsing")] +impl Token for Bracket { + fn peek(cursor: Cursor) -> bool { + lookahead::is_delimiter(cursor, Delimiter::Bracket) + } + + fn display() -> &'static str { + "square brackets" + } +} + +#[cfg(feature = "parsing")] +impl Token for Group { + fn peek(cursor: Cursor) -> bool { + lookahead::is_delimiter(cursor, Delimiter::None) + } + + fn display() -> &'static str { + "invisible group" + } +} + +define_keywords! { + "abstract" pub struct Abstract /// `abstract` + "as" pub struct As /// `as` + "async" pub struct Async /// `async` + "auto" pub struct Auto /// `auto` + "await" pub struct Await /// `await` + "become" pub struct Become /// `become` + "box" pub struct Box /// `box` + "break" pub struct Break /// `break` + "const" pub struct Const /// `const` + "continue" pub struct Continue /// `continue` + "crate" pub struct Crate /// `crate` + "default" pub struct Default /// `default` + "do" pub struct Do /// `do` + "dyn" pub struct Dyn /// `dyn` + "else" pub struct Else /// `else` + "enum" pub struct Enum /// `enum` + "extern" pub struct Extern /// `extern` + "final" pub struct Final /// `final` + "fn" pub struct Fn /// `fn` + "for" pub struct For /// `for` + "if" pub struct If /// `if` + "impl" pub struct Impl /// `impl` + "in" pub struct In /// `in` + "let" pub struct Let /// `let` + "loop" pub struct Loop /// `loop` + "macro" pub struct Macro /// `macro` + "match" pub struct Match /// `match` + "mod" pub struct Mod /// `mod` + "move" pub struct Move /// `move` + "mut" pub struct Mut /// `mut` + "override" pub struct Override /// `override` + "priv" pub struct Priv /// `priv` + "pub" pub struct Pub /// `pub` + "ref" pub struct Ref /// `ref` + "return" pub struct Return /// `return` + "Self" pub struct SelfType /// `Self` + "self" pub struct SelfValue /// `self` + "static" pub struct Static /// `static` + "struct" pub struct Struct /// `struct` + "super" pub struct Super /// `super` + "trait" pub struct Trait /// `trait` + "try" pub struct Try /// `try` + "type" pub struct Type /// `type` + "typeof" pub struct Typeof /// `typeof` + "union" pub struct Union /// `union` + "unsafe" pub struct Unsafe /// `unsafe` + "unsized" pub struct Unsized /// `unsized` + "use" pub struct Use /// `use` + "virtual" pub struct Virtual /// `virtual` + "where" pub struct Where /// `where` + "while" pub struct While /// `while` + "yield" pub struct Yield /// `yield` +} + +define_punctuation! { + "+" pub struct Add/1 /// `+` + "+=" pub struct AddEq/2 /// `+=` + "&" pub struct And/1 /// `&` + "&&" pub struct AndAnd/2 /// `&&` + "&=" pub struct AndEq/2 /// `&=` + "@" pub struct At/1 /// `@` + "!" pub struct Bang/1 /// `!` + "^" pub struct Caret/1 /// `^` + "^=" pub struct CaretEq/2 /// `^=` + ":" pub struct Colon/1 /// `:` + "::" pub struct Colon2/2 /// `::` + "," pub struct Comma/1 /// `,` + "/" pub struct Div/1 /// `/` + "/=" pub struct DivEq/2 /// `/=` + "$" pub struct Dollar/1 /// `$` + "." pub struct Dot/1 /// `.` + ".." pub struct Dot2/2 /// `..` + "..." pub struct Dot3/3 /// `...` + "..=" pub struct DotDotEq/3 /// `..=` + "=" pub struct Eq/1 /// `=` + "==" pub struct EqEq/2 /// `==` + ">=" pub struct Ge/2 /// `>=` + ">" pub struct Gt/1 /// `>` + "<=" pub struct Le/2 /// `<=` + "<" pub struct Lt/1 /// `<` + "*=" pub struct MulEq/2 /// `*=` + "!=" pub struct Ne/2 /// `!=` + "|" pub struct Or/1 /// `|` + "|=" pub struct OrEq/2 /// `|=` + "||" pub struct OrOr/2 /// `||` + "#" pub struct Pound/1 /// `#` + "?" pub struct Question/1 /// `?` + "->" pub struct RArrow/2 /// `->` + "<-" pub struct LArrow/2 /// `<-` + "%" pub struct Rem/1 /// `%` + "%=" pub struct RemEq/2 /// `%=` + "=>" pub struct FatArrow/2 /// `=>` + ";" pub struct Semi/1 /// `;` + "<<" pub struct Shl/2 /// `<<` + "<<=" pub struct ShlEq/3 /// `<<=` + ">>" pub struct Shr/2 /// `>>` + ">>=" pub struct ShrEq/3 /// `>>=` + "*" pub struct Star/1 /// `*` + "-" pub struct Sub/1 /// `-` + "-=" pub struct SubEq/2 /// `-=` + "~" pub struct Tilde/1 /// `~` +} + +define_delimiters! { + "{" pub struct Brace /// `{...}` + "[" pub struct Bracket /// `[...]` + "(" pub struct Paren /// `(...)` + " " pub struct Group /// None-delimited group +} + +macro_rules! export_token_macro { + ($($await_rule:tt)*) => { + /// A type-macro that expands to the name of the Rust type representation of a + /// given token. + /// + /// See the [token module] documentation for details and examples. + /// + /// [token module]: crate::token + // Unfortunate duplication due to a rustdoc bug. + // https://github.com/rust-lang/rust/issues/45939 + #[macro_export] + macro_rules! Token { + [abstract] => { $crate::token::Abstract }; + [as] => { $crate::token::As }; + [async] => { $crate::token::Async }; + [auto] => { $crate::token::Auto }; + $($await_rule => { $crate::token::Await };)* + [become] => { $crate::token::Become }; + [box] => { $crate::token::Box }; + [break] => { $crate::token::Break }; + [const] => { $crate::token::Const }; + [continue] => { $crate::token::Continue }; + [crate] => { $crate::token::Crate }; + [default] => { $crate::token::Default }; + [do] => { $crate::token::Do }; + [dyn] => { $crate::token::Dyn }; + [else] => { $crate::token::Else }; + [enum] => { $crate::token::Enum }; + [extern] => { $crate::token::Extern }; + [final] => { $crate::token::Final }; + [fn] => { $crate::token::Fn }; + [for] => { $crate::token::For }; + [if] => { $crate::token::If }; + [impl] => { $crate::token::Impl }; + [in] => { $crate::token::In }; + [let] => { $crate::token::Let }; + [loop] => { $crate::token::Loop }; + [macro] => { $crate::token::Macro }; + [match] => { $crate::token::Match }; + [mod] => { $crate::token::Mod }; + [move] => { $crate::token::Move }; + [mut] => { $crate::token::Mut }; + [override] => { $crate::token::Override }; + [priv] => { $crate::token::Priv }; + [pub] => { $crate::token::Pub }; + [ref] => { $crate::token::Ref }; + [return] => { $crate::token::Return }; + [Self] => { $crate::token::SelfType }; + [self] => { $crate::token::SelfValue }; + [static] => { $crate::token::Static }; + [struct] => { $crate::token::Struct }; + [super] => { $crate::token::Super }; + [trait] => { $crate::token::Trait }; + [try] => { $crate::token::Try }; + [type] => { $crate::token::Type }; + [typeof] => { $crate::token::Typeof }; + [union] => { $crate::token::Union }; + [unsafe] => { $crate::token::Unsafe }; + [unsized] => { $crate::token::Unsized }; + [use] => { $crate::token::Use }; + [virtual] => { $crate::token::Virtual }; + [where] => { $crate::token::Where }; + [while] => { $crate::token::While }; + [yield] => { $crate::token::Yield }; + [+] => { $crate::token::Add }; + [+=] => { $crate::token::AddEq }; + [&] => { $crate::token::And }; + [&&] => { $crate::token::AndAnd }; + [&=] => { $crate::token::AndEq }; + [@] => { $crate::token::At }; + [!] => { $crate::token::Bang }; + [^] => { $crate::token::Caret }; + [^=] => { $crate::token::CaretEq }; + [:] => { $crate::token::Colon }; + [::] => { $crate::token::Colon2 }; + [,] => { $crate::token::Comma }; + [/] => { $crate::token::Div }; + [/=] => { $crate::token::DivEq }; + [$] => { $crate::token::Dollar }; + [.] => { $crate::token::Dot }; + [..] => { $crate::token::Dot2 }; + [...] => { $crate::token::Dot3 }; + [..=] => { $crate::token::DotDotEq }; + [=] => { $crate::token::Eq }; + [==] => { $crate::token::EqEq }; + [>=] => { $crate::token::Ge }; + [>] => { $crate::token::Gt }; + [<=] => { $crate::token::Le }; + [<] => { $crate::token::Lt }; + [*=] => { $crate::token::MulEq }; + [!=] => { $crate::token::Ne }; + [|] => { $crate::token::Or }; + [|=] => { $crate::token::OrEq }; + [||] => { $crate::token::OrOr }; + [#] => { $crate::token::Pound }; + [?] => { $crate::token::Question }; + [->] => { $crate::token::RArrow }; + [<-] => { $crate::token::LArrow }; + [%] => { $crate::token::Rem }; + [%=] => { $crate::token::RemEq }; + [=>] => { $crate::token::FatArrow }; + [;] => { $crate::token::Semi }; + [<<] => { $crate::token::Shl }; + [<<=] => { $crate::token::ShlEq }; + [>>] => { $crate::token::Shr }; + [>>=] => { $crate::token::ShrEq }; + [*] => { $crate::token::Star }; + [-] => { $crate::token::Sub }; + [-=] => { $crate::token::SubEq }; + [~] => { $crate::token::Tilde }; + [_] => { $crate::token::Underscore }; + } + }; +} + +// Old rustc does not permit `await` appearing anywhere in the source file. +// https://github.com/rust-lang/rust/issues/57919 +// We put the Token![await] rule in a place that is not lexed by old rustc. +#[cfg(not(syn_omit_await_from_token_macro))] +include!("await.rs"); // export_token_macro! {[await]} +#[cfg(syn_omit_await_from_token_macro)] +export_token_macro! {} + +// Not public API. +#[doc(hidden)] +#[cfg(feature = "parsing")] +pub mod parsing { + use crate::buffer::Cursor; + use crate::error::{Error, Result}; + use crate::parse::ParseStream; + use crate::span::FromSpans; + use proc_macro2::{Spacing, Span}; + + pub fn keyword(input: ParseStream, token: &str) -> Result<Span> { + input.step(|cursor| { + if let Some((ident, rest)) = cursor.ident() { + if ident == token { + return Ok((ident.span(), rest)); + } + } + Err(cursor.error(format!("expected `{}`", token))) + }) + } + + pub fn peek_keyword(cursor: Cursor, token: &str) -> bool { + if let Some((ident, _rest)) = cursor.ident() { + ident == token + } else { + false + } + } + + pub fn punct<S: FromSpans>(input: ParseStream, token: &str) -> Result<S> { + let mut spans = [input.span(); 3]; + punct_helper(input, token, &mut spans)?; + Ok(S::from_spans(&spans)) + } + + fn punct_helper(input: ParseStream, token: &str, spans: &mut [Span; 3]) -> Result<()> { + input.step(|cursor| { + let mut cursor = *cursor; + assert!(token.len() <= spans.len()); + + for (i, ch) in token.chars().enumerate() { + match cursor.punct() { + Some((punct, rest)) => { + spans[i] = punct.span(); + if punct.as_char() != ch { + break; + } else if i == token.len() - 1 { + return Ok(((), rest)); + } else if punct.spacing() != Spacing::Joint { + break; + } + cursor = rest; + } + None => break, + } + } + + Err(Error::new(spans[0], format!("expected `{}`", token))) + }) + } + + pub fn peek_punct(mut cursor: Cursor, token: &str) -> bool { + for (i, ch) in token.chars().enumerate() { + match cursor.punct() { + Some((punct, rest)) => { + if punct.as_char() != ch { + break; + } else if i == token.len() - 1 { + return true; + } else if punct.spacing() != Spacing::Joint { + break; + } + cursor = rest; + } + None => break, + } + } + false + } +} + +// Not public API. +#[doc(hidden)] +#[cfg(feature = "printing")] +pub mod printing { + use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream}; + use quote::TokenStreamExt; + + pub fn punct(s: &str, spans: &[Span], tokens: &mut TokenStream) { + assert_eq!(s.len(), spans.len()); + + let mut chars = s.chars(); + let mut spans = spans.iter(); + let ch = chars.next_back().unwrap(); + let span = spans.next_back().unwrap(); + for (ch, span) in chars.zip(spans) { + let mut op = Punct::new(ch, Spacing::Joint); + op.set_span(*span); + tokens.append(op); + } + + let mut op = Punct::new(ch, Spacing::Alone); + op.set_span(*span); + tokens.append(op); + } + + pub fn keyword(s: &str, span: Span, tokens: &mut TokenStream) { + tokens.append(Ident::new(s, span)); + } + + pub fn delim<F>(s: &str, span: Span, tokens: &mut TokenStream, f: F) + where + F: FnOnce(&mut TokenStream), + { + let delim = match s { + "(" => Delimiter::Parenthesis, + "[" => Delimiter::Bracket, + "{" => Delimiter::Brace, + " " => Delimiter::None, + _ => panic!("unknown delimiter: {}", s), + }; + let mut inner = TokenStream::new(); + f(&mut inner); + let mut g = Group::new(delim, inner); + g.set_span(span); + tokens.append(g); + } +} diff --git a/third_party/rust/syn/src/tt.rs b/third_party/rust/syn/src/tt.rs new file mode 100644 index 0000000000..d87c0ed4dc --- /dev/null +++ b/third_party/rust/syn/src/tt.rs @@ -0,0 +1,107 @@ +use proc_macro2::{Delimiter, TokenStream, TokenTree}; +use std::hash::{Hash, Hasher}; + +pub struct TokenTreeHelper<'a>(pub &'a TokenTree); + +impl<'a> PartialEq for TokenTreeHelper<'a> { + fn eq(&self, other: &Self) -> bool { + use proc_macro2::Spacing; + + match (self.0, other.0) { + (TokenTree::Group(g1), TokenTree::Group(g2)) => { + match (g1.delimiter(), g2.delimiter()) { + (Delimiter::Parenthesis, Delimiter::Parenthesis) + | (Delimiter::Brace, Delimiter::Brace) + | (Delimiter::Bracket, Delimiter::Bracket) + | (Delimiter::None, Delimiter::None) => {} + _ => return false, + } + + let s1 = g1.stream().into_iter(); + let mut s2 = g2.stream().into_iter(); + + for item1 in s1 { + let item2 = match s2.next() { + Some(item) => item, + None => return false, + }; + if TokenTreeHelper(&item1) != TokenTreeHelper(&item2) { + return false; + } + } + s2.next().is_none() + } + (TokenTree::Punct(o1), TokenTree::Punct(o2)) => { + o1.as_char() == o2.as_char() + && match (o1.spacing(), o2.spacing()) { + (Spacing::Alone, Spacing::Alone) | (Spacing::Joint, Spacing::Joint) => true, + _ => false, + } + } + (TokenTree::Literal(l1), TokenTree::Literal(l2)) => l1.to_string() == l2.to_string(), + (TokenTree::Ident(s1), TokenTree::Ident(s2)) => s1 == s2, + _ => false, + } + } +} + +impl<'a> Hash for TokenTreeHelper<'a> { + fn hash<H: Hasher>(&self, h: &mut H) { + use proc_macro2::Spacing; + + match self.0 { + TokenTree::Group(g) => { + 0u8.hash(h); + match g.delimiter() { + Delimiter::Parenthesis => 0u8.hash(h), + Delimiter::Brace => 1u8.hash(h), + Delimiter::Bracket => 2u8.hash(h), + Delimiter::None => 3u8.hash(h), + } + + for item in g.stream() { + TokenTreeHelper(&item).hash(h); + } + 0xffu8.hash(h); // terminator w/ a variant we don't normally hash + } + TokenTree::Punct(op) => { + 1u8.hash(h); + op.as_char().hash(h); + match op.spacing() { + Spacing::Alone => 0u8.hash(h), + Spacing::Joint => 1u8.hash(h), + } + } + TokenTree::Literal(lit) => (2u8, lit.to_string()).hash(h), + TokenTree::Ident(word) => (3u8, word).hash(h), + } + } +} + +pub struct TokenStreamHelper<'a>(pub &'a TokenStream); + +impl<'a> PartialEq for TokenStreamHelper<'a> { + fn eq(&self, other: &Self) -> bool { + let left = self.0.clone().into_iter().collect::<Vec<_>>(); + let right = other.0.clone().into_iter().collect::<Vec<_>>(); + if left.len() != right.len() { + return false; + } + for (a, b) in left.into_iter().zip(right) { + if TokenTreeHelper(&a) != TokenTreeHelper(&b) { + return false; + } + } + true + } +} + +impl<'a> Hash for TokenStreamHelper<'a> { + fn hash<H: Hasher>(&self, state: &mut H) { + let tts = self.0.clone().into_iter().collect::<Vec<_>>(); + tts.len().hash(state); + for tt in tts { + TokenTreeHelper(&tt).hash(state); + } + } +} diff --git a/third_party/rust/syn/src/ty.rs b/third_party/rust/syn/src/ty.rs new file mode 100644 index 0000000000..8c841e2f79 --- /dev/null +++ b/third_party/rust/syn/src/ty.rs @@ -0,0 +1,1304 @@ +use super::*; +use crate::punctuated::Punctuated; +use proc_macro2::TokenStream; + +ast_enum_of_structs! { + /// The possible types that a Rust value could have. + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + /// + /// # Syntax tree enum + /// + /// This type is a [syntax tree enum]. + /// + /// [syntax tree enum]: Expr#syntax-tree-enums + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[cfg_attr(not(syn_no_non_exhaustive), non_exhaustive)] + pub enum Type { + /// A fixed size array type: `[T; n]`. + Array(TypeArray), + + /// A bare function type: `fn(usize) -> bool`. + BareFn(TypeBareFn), + + /// A type contained within invisible delimiters. + Group(TypeGroup), + + /// An `impl Bound1 + Bound2 + Bound3` type where `Bound` is a trait or + /// a lifetime. + ImplTrait(TypeImplTrait), + + /// Indication that a type should be inferred by the compiler: `_`. + Infer(TypeInfer), + + /// A macro in the type position. + Macro(TypeMacro), + + /// The never type: `!`. + Never(TypeNever), + + /// A parenthesized type equivalent to the inner type. + Paren(TypeParen), + + /// A path like `std::slice::Iter`, optionally qualified with a + /// self-type as in `<Vec<T> as SomeTrait>::Associated`. + Path(TypePath), + + /// A raw pointer type: `*const T` or `*mut T`. + Ptr(TypePtr), + + /// A reference type: `&'a T` or `&'a mut T`. + Reference(TypeReference), + + /// A dynamically sized slice type: `[T]`. + Slice(TypeSlice), + + /// A trait object type `dyn Bound1 + Bound2 + Bound3` where `Bound` is a + /// trait or a lifetime. + TraitObject(TypeTraitObject), + + /// A tuple type: `(A, B, C, String)`. + Tuple(TypeTuple), + + /// Tokens in type position not interpreted by Syn. + Verbatim(TokenStream), + + // Not public API. + // + // For testing exhaustiveness in downstream code, use the following idiom: + // + // match ty { + // Type::Array(ty) => {...} + // Type::BareFn(ty) => {...} + // ... + // Type::Verbatim(ty) => {...} + // + // #[cfg_attr(test, deny(non_exhaustive_omitted_patterns))] + // _ => { /* some sane fallback */ } + // } + // + // This way we fail your tests but don't break your library when adding + // a variant. You will be notified by a test failure when a variant is + // added, so that you can add code to handle it, but your library will + // continue to compile and work for downstream users in the interim. + #[cfg(syn_no_non_exhaustive)] + #[doc(hidden)] + __NonExhaustive, + } +} + +ast_struct! { + /// A fixed size array type: `[T; n]`. + /// + /// *This type is available only if Syn is built with the `"derive"` or + /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct TypeArray { + pub bracket_token: token::Bracket, + pub elem: Box<Type>, + pub semi_token: Token![;], + pub len: Expr, + } +} + +ast_struct! { + /// A bare function type: `fn(usize) -> bool`. + /// + /// *This type is available only if Syn is built with the `"derive"` or + /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct TypeBareFn { + pub lifetimes: Option<BoundLifetimes>, + pub unsafety: Option<Token![unsafe]>, + pub abi: Option<Abi>, + pub fn_token: Token![fn], + pub paren_token: token::Paren, + pub inputs: Punctuated<BareFnArg, Token![,]>, + pub variadic: Option<Variadic>, + pub output: ReturnType, + } +} + +ast_struct! { + /// A type contained within invisible delimiters. + /// + /// *This type is available only if Syn is built with the `"derive"` or + /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct TypeGroup { + pub group_token: token::Group, + pub elem: Box<Type>, + } +} + +ast_struct! { + /// An `impl Bound1 + Bound2 + Bound3` type where `Bound` is a trait or + /// a lifetime. + /// + /// *This type is available only if Syn is built with the `"derive"` or + /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct TypeImplTrait { + pub impl_token: Token![impl], + pub bounds: Punctuated<TypeParamBound, Token![+]>, + } +} + +ast_struct! { + /// Indication that a type should be inferred by the compiler: `_`. + /// + /// *This type is available only if Syn is built with the `"derive"` or + /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct TypeInfer { + pub underscore_token: Token![_], + } +} + +ast_struct! { + /// A macro in the type position. + /// + /// *This type is available only if Syn is built with the `"derive"` or + /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct TypeMacro { + pub mac: Macro, + } +} + +ast_struct! { + /// The never type: `!`. + /// + /// *This type is available only if Syn is built with the `"derive"` or + /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct TypeNever { + pub bang_token: Token![!], + } +} + +ast_struct! { + /// A parenthesized type equivalent to the inner type. + /// + /// *This type is available only if Syn is built with the `"derive"` or + /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct TypeParen { + pub paren_token: token::Paren, + pub elem: Box<Type>, + } +} + +ast_struct! { + /// A path like `std::slice::Iter`, optionally qualified with a + /// self-type as in `<Vec<T> as SomeTrait>::Associated`. + /// + /// *This type is available only if Syn is built with the `"derive"` or + /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct TypePath { + pub qself: Option<QSelf>, + pub path: Path, + } +} + +ast_struct! { + /// A raw pointer type: `*const T` or `*mut T`. + /// + /// *This type is available only if Syn is built with the `"derive"` or + /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct TypePtr { + pub star_token: Token![*], + pub const_token: Option<Token![const]>, + pub mutability: Option<Token![mut]>, + pub elem: Box<Type>, + } +} + +ast_struct! { + /// A reference type: `&'a T` or `&'a mut T`. + /// + /// *This type is available only if Syn is built with the `"derive"` or + /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct TypeReference { + pub and_token: Token![&], + pub lifetime: Option<Lifetime>, + pub mutability: Option<Token![mut]>, + pub elem: Box<Type>, + } +} + +ast_struct! { + /// A dynamically sized slice type: `[T]`. + /// + /// *This type is available only if Syn is built with the `"derive"` or + /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct TypeSlice { + pub bracket_token: token::Bracket, + pub elem: Box<Type>, + } +} + +ast_struct! { + /// A trait object type `dyn Bound1 + Bound2 + Bound3` where `Bound` is a + /// trait or a lifetime. + /// + /// *This type is available only if Syn is built with the `"derive"` or + /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct TypeTraitObject { + pub dyn_token: Option<Token![dyn]>, + pub bounds: Punctuated<TypeParamBound, Token![+]>, + } +} + +ast_struct! { + /// A tuple type: `(A, B, C, String)`. + /// + /// *This type is available only if Syn is built with the `"derive"` or + /// `"full"` feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct TypeTuple { + pub paren_token: token::Paren, + pub elems: Punctuated<Type, Token![,]>, + } +} + +ast_struct! { + /// The binary interface of a function: `extern "C"`. + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct Abi { + pub extern_token: Token![extern], + pub name: Option<LitStr>, + } +} + +ast_struct! { + /// An argument in a function type: the `usize` in `fn(usize) -> bool`. + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct BareFnArg { + pub attrs: Vec<Attribute>, + pub name: Option<(Ident, Token![:])>, + pub ty: Type, + } +} + +ast_struct! { + /// The variadic argument of a foreign function. + /// + /// ```rust + /// # struct c_char; + /// # struct c_int; + /// # + /// extern "C" { + /// fn printf(format: *const c_char, ...) -> c_int; + /// // ^^^ + /// } + /// ``` + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct Variadic { + pub attrs: Vec<Attribute>, + pub dots: Token![...], + } +} + +ast_enum! { + /// Return type of a function signature. + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature.* + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub enum ReturnType { + /// Return type is not specified. + /// + /// Functions default to `()` and closures default to type inference. + Default, + /// A particular type is returned. + Type(Token![->], Box<Type>), + } +} + +#[cfg(feature = "parsing")] +pub mod parsing { + use super::*; + use crate::ext::IdentExt; + use crate::parse::{Parse, ParseStream, Result}; + use crate::path; + use proc_macro2::{Punct, Spacing, Span, TokenTree}; + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for Type { + fn parse(input: ParseStream) -> Result<Self> { + let allow_plus = true; + let allow_group_generic = true; + ambig_ty(input, allow_plus, allow_group_generic) + } + } + + impl Type { + /// In some positions, types may not contain the `+` character, to + /// disambiguate them. For example in the expression `1 as T`, T may not + /// contain a `+` character. + /// + /// This parser does not allow a `+`, while the default parser does. + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn without_plus(input: ParseStream) -> Result<Self> { + let allow_plus = false; + let allow_group_generic = true; + ambig_ty(input, allow_plus, allow_group_generic) + } + } + + pub(crate) fn ambig_ty( + input: ParseStream, + allow_plus: bool, + allow_group_generic: bool, + ) -> Result<Type> { + let begin = input.fork(); + + if input.peek(token::Group) { + let mut group: TypeGroup = input.parse()?; + if input.peek(Token![::]) && input.peek3(Ident::peek_any) { + if let Type::Path(mut ty) = *group.elem { + Path::parse_rest(input, &mut ty.path, false)?; + return Ok(Type::Path(ty)); + } else { + return Ok(Type::Path(TypePath { + qself: Some(QSelf { + lt_token: Token![<](group.group_token.span), + position: 0, + as_token: None, + gt_token: Token![>](group.group_token.span), + ty: group.elem, + }), + path: Path::parse_helper(input, false)?, + })); + } + } else if input.peek(Token![<]) && allow_group_generic + || input.peek(Token![::]) && input.peek3(Token![<]) + { + if let Type::Path(mut ty) = *group.elem { + let arguments = &mut ty.path.segments.last_mut().unwrap().arguments; + if let PathArguments::None = arguments { + *arguments = PathArguments::AngleBracketed(input.parse()?); + Path::parse_rest(input, &mut ty.path, false)?; + return Ok(Type::Path(ty)); + } else { + group.elem = Box::new(Type::Path(ty)); + } + } + } + return Ok(Type::Group(group)); + } + + let mut lifetimes = None::<BoundLifetimes>; + let mut lookahead = input.lookahead1(); + if lookahead.peek(Token![for]) { + lifetimes = input.parse()?; + lookahead = input.lookahead1(); + if !lookahead.peek(Ident) + && !lookahead.peek(Token![fn]) + && !lookahead.peek(Token![unsafe]) + && !lookahead.peek(Token![extern]) + && !lookahead.peek(Token![super]) + && !lookahead.peek(Token![self]) + && !lookahead.peek(Token![Self]) + && !lookahead.peek(Token![crate]) + || input.peek(Token![dyn]) + { + return Err(lookahead.error()); + } + } + + if lookahead.peek(token::Paren) { + let content; + let paren_token = parenthesized!(content in input); + if content.is_empty() { + return Ok(Type::Tuple(TypeTuple { + paren_token, + elems: Punctuated::new(), + })); + } + if content.peek(Lifetime) { + return Ok(Type::Paren(TypeParen { + paren_token, + elem: Box::new(Type::TraitObject(content.parse()?)), + })); + } + if content.peek(Token![?]) { + return Ok(Type::TraitObject(TypeTraitObject { + dyn_token: None, + bounds: { + let mut bounds = Punctuated::new(); + bounds.push_value(TypeParamBound::Trait(TraitBound { + paren_token: Some(paren_token), + ..content.parse()? + })); + while let Some(plus) = input.parse()? { + bounds.push_punct(plus); + bounds.push_value(input.parse()?); + } + bounds + }, + })); + } + let mut first: Type = content.parse()?; + if content.peek(Token![,]) { + return Ok(Type::Tuple(TypeTuple { + paren_token, + elems: { + let mut elems = Punctuated::new(); + elems.push_value(first); + elems.push_punct(content.parse()?); + while !content.is_empty() { + elems.push_value(content.parse()?); + if content.is_empty() { + break; + } + elems.push_punct(content.parse()?); + } + elems + }, + })); + } + if allow_plus && input.peek(Token![+]) { + loop { + let first = match first { + Type::Path(TypePath { qself: None, path }) => { + TypeParamBound::Trait(TraitBound { + paren_token: Some(paren_token), + modifier: TraitBoundModifier::None, + lifetimes: None, + path, + }) + } + Type::TraitObject(TypeTraitObject { + dyn_token: None, + bounds, + }) => { + if bounds.len() > 1 || bounds.trailing_punct() { + first = Type::TraitObject(TypeTraitObject { + dyn_token: None, + bounds, + }); + break; + } + match bounds.into_iter().next().unwrap() { + TypeParamBound::Trait(trait_bound) => { + TypeParamBound::Trait(TraitBound { + paren_token: Some(paren_token), + ..trait_bound + }) + } + other @ TypeParamBound::Lifetime(_) => other, + } + } + _ => break, + }; + return Ok(Type::TraitObject(TypeTraitObject { + dyn_token: None, + bounds: { + let mut bounds = Punctuated::new(); + bounds.push_value(first); + while let Some(plus) = input.parse()? { + bounds.push_punct(plus); + bounds.push_value(input.parse()?); + } + bounds + }, + })); + } + } + Ok(Type::Paren(TypeParen { + paren_token, + elem: Box::new(first), + })) + } else if lookahead.peek(Token![fn]) + || lookahead.peek(Token![unsafe]) + || lookahead.peek(Token![extern]) + { + let allow_mut_self = true; + if let Some(mut bare_fn) = parse_bare_fn(input, allow_mut_self)? { + bare_fn.lifetimes = lifetimes; + Ok(Type::BareFn(bare_fn)) + } else { + Ok(Type::Verbatim(verbatim::between(begin, input))) + } + } else if lookahead.peek(Ident) + || input.peek(Token![super]) + || input.peek(Token![self]) + || input.peek(Token![Self]) + || input.peek(Token![crate]) + || lookahead.peek(Token![::]) + || lookahead.peek(Token![<]) + { + let dyn_token: Option<Token![dyn]> = input.parse()?; + if let Some(dyn_token) = dyn_token { + let dyn_span = dyn_token.span; + let star_token: Option<Token![*]> = input.parse()?; + let bounds = TypeTraitObject::parse_bounds(dyn_span, input, allow_plus)?; + return Ok(if star_token.is_some() { + Type::Verbatim(verbatim::between(begin, input)) + } else { + Type::TraitObject(TypeTraitObject { + dyn_token: Some(dyn_token), + bounds, + }) + }); + } + + let ty: TypePath = input.parse()?; + if ty.qself.is_some() { + return Ok(Type::Path(ty)); + } + + if input.peek(Token![!]) && !input.peek(Token![!=]) { + let mut contains_arguments = false; + for segment in &ty.path.segments { + match segment.arguments { + PathArguments::None => {} + PathArguments::AngleBracketed(_) | PathArguments::Parenthesized(_) => { + contains_arguments = true; + } + } + } + + if !contains_arguments { + let bang_token: Token![!] = input.parse()?; + let (delimiter, tokens) = mac::parse_delimiter(input)?; + return Ok(Type::Macro(TypeMacro { + mac: Macro { + path: ty.path, + bang_token, + delimiter, + tokens, + }, + })); + } + } + + if lifetimes.is_some() || allow_plus && input.peek(Token![+]) { + let mut bounds = Punctuated::new(); + bounds.push_value(TypeParamBound::Trait(TraitBound { + paren_token: None, + modifier: TraitBoundModifier::None, + lifetimes, + path: ty.path, + })); + if allow_plus { + while input.peek(Token![+]) { + bounds.push_punct(input.parse()?); + if !(input.peek(Ident::peek_any) + || input.peek(Token![::]) + || input.peek(Token![?]) + || input.peek(Lifetime) + || input.peek(token::Paren)) + { + break; + } + bounds.push_value(input.parse()?); + } + } + return Ok(Type::TraitObject(TypeTraitObject { + dyn_token: None, + bounds, + })); + } + + Ok(Type::Path(ty)) + } else if lookahead.peek(token::Bracket) { + let content; + let bracket_token = bracketed!(content in input); + let elem: Type = content.parse()?; + if content.peek(Token![;]) { + Ok(Type::Array(TypeArray { + bracket_token, + elem: Box::new(elem), + semi_token: content.parse()?, + len: content.parse()?, + })) + } else { + Ok(Type::Slice(TypeSlice { + bracket_token, + elem: Box::new(elem), + })) + } + } else if lookahead.peek(Token![*]) { + input.parse().map(Type::Ptr) + } else if lookahead.peek(Token![&]) { + input.parse().map(Type::Reference) + } else if lookahead.peek(Token![!]) && !input.peek(Token![=]) { + input.parse().map(Type::Never) + } else if lookahead.peek(Token![impl]) { + TypeImplTrait::parse(input, allow_plus).map(Type::ImplTrait) + } else if lookahead.peek(Token![_]) { + input.parse().map(Type::Infer) + } else if lookahead.peek(Lifetime) { + input.parse().map(Type::TraitObject) + } else { + Err(lookahead.error()) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for TypeSlice { + fn parse(input: ParseStream) -> Result<Self> { + let content; + Ok(TypeSlice { + bracket_token: bracketed!(content in input), + elem: content.parse()?, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for TypeArray { + fn parse(input: ParseStream) -> Result<Self> { + let content; + Ok(TypeArray { + bracket_token: bracketed!(content in input), + elem: content.parse()?, + semi_token: content.parse()?, + len: content.parse()?, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for TypePtr { + fn parse(input: ParseStream) -> Result<Self> { + let star_token: Token![*] = input.parse()?; + + let lookahead = input.lookahead1(); + let (const_token, mutability) = if lookahead.peek(Token![const]) { + (Some(input.parse()?), None) + } else if lookahead.peek(Token![mut]) { + (None, Some(input.parse()?)) + } else { + return Err(lookahead.error()); + }; + + Ok(TypePtr { + star_token, + const_token, + mutability, + elem: Box::new(input.call(Type::without_plus)?), + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for TypeReference { + fn parse(input: ParseStream) -> Result<Self> { + Ok(TypeReference { + and_token: input.parse()?, + lifetime: input.parse()?, + mutability: input.parse()?, + // & binds tighter than +, so we don't allow + here. + elem: Box::new(input.call(Type::without_plus)?), + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for TypeBareFn { + fn parse(input: ParseStream) -> Result<Self> { + let allow_mut_self = false; + parse_bare_fn(input, allow_mut_self).map(Option::unwrap) + } + } + + fn parse_bare_fn(input: ParseStream, allow_mut_self: bool) -> Result<Option<TypeBareFn>> { + let args; + let mut variadic = None; + let mut has_mut_self = false; + + let bare_fn = TypeBareFn { + lifetimes: input.parse()?, + unsafety: input.parse()?, + abi: input.parse()?, + fn_token: input.parse()?, + paren_token: parenthesized!(args in input), + inputs: { + let mut inputs = Punctuated::new(); + + while !args.is_empty() { + let attrs = args.call(Attribute::parse_outer)?; + + if inputs.empty_or_trailing() && args.peek(Token![...]) { + variadic = Some(Variadic { + attrs, + dots: args.parse()?, + }); + break; + } + + if let Some(arg) = parse_bare_fn_arg(&args, allow_mut_self)? { + inputs.push_value(BareFnArg { attrs, ..arg }); + } else { + has_mut_self = true; + } + if args.is_empty() { + break; + } + + let comma = args.parse()?; + if !has_mut_self { + inputs.push_punct(comma); + } + } + + inputs + }, + variadic, + output: input.call(ReturnType::without_plus)?, + }; + + if has_mut_self { + Ok(None) + } else { + Ok(Some(bare_fn)) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for TypeNever { + fn parse(input: ParseStream) -> Result<Self> { + Ok(TypeNever { + bang_token: input.parse()?, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for TypeInfer { + fn parse(input: ParseStream) -> Result<Self> { + Ok(TypeInfer { + underscore_token: input.parse()?, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for TypeTuple { + fn parse(input: ParseStream) -> Result<Self> { + let content; + let paren_token = parenthesized!(content in input); + + if content.is_empty() { + return Ok(TypeTuple { + paren_token, + elems: Punctuated::new(), + }); + } + + let first: Type = content.parse()?; + Ok(TypeTuple { + paren_token, + elems: { + let mut elems = Punctuated::new(); + elems.push_value(first); + elems.push_punct(content.parse()?); + while !content.is_empty() { + elems.push_value(content.parse()?); + if content.is_empty() { + break; + } + elems.push_punct(content.parse()?); + } + elems + }, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for TypeMacro { + fn parse(input: ParseStream) -> Result<Self> { + Ok(TypeMacro { + mac: input.parse()?, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for TypePath { + fn parse(input: ParseStream) -> Result<Self> { + let expr_style = false; + let (qself, mut path) = path::parsing::qpath(input, expr_style)?; + + while path.segments.last().unwrap().arguments.is_empty() + && (input.peek(token::Paren) || input.peek(Token![::]) && input.peek3(token::Paren)) + { + input.parse::<Option<Token![::]>>()?; + let args: ParenthesizedGenericArguments = input.parse()?; + let allow_associated_type = cfg!(feature = "full") + && match &args.output { + ReturnType::Default => true, + ReturnType::Type(_, ty) => match **ty { + // TODO: probably some of the other kinds allow this too. + Type::Paren(_) => true, + _ => false, + }, + }; + let parenthesized = PathArguments::Parenthesized(args); + path.segments.last_mut().unwrap().arguments = parenthesized; + if allow_associated_type { + Path::parse_rest(input, &mut path, expr_style)?; + } + } + + Ok(TypePath { qself, path }) + } + } + + impl ReturnType { + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn without_plus(input: ParseStream) -> Result<Self> { + let allow_plus = false; + Self::parse(input, allow_plus) + } + + pub(crate) fn parse(input: ParseStream, allow_plus: bool) -> Result<Self> { + if input.peek(Token![->]) { + let arrow = input.parse()?; + let allow_group_generic = true; + let ty = ambig_ty(input, allow_plus, allow_group_generic)?; + Ok(ReturnType::Type(arrow, Box::new(ty))) + } else { + Ok(ReturnType::Default) + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for ReturnType { + fn parse(input: ParseStream) -> Result<Self> { + let allow_plus = true; + Self::parse(input, allow_plus) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for TypeTraitObject { + fn parse(input: ParseStream) -> Result<Self> { + let allow_plus = true; + Self::parse(input, allow_plus) + } + } + + impl TypeTraitObject { + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn without_plus(input: ParseStream) -> Result<Self> { + let allow_plus = false; + Self::parse(input, allow_plus) + } + + // Only allow multiple trait references if allow_plus is true. + pub(crate) fn parse(input: ParseStream, allow_plus: bool) -> Result<Self> { + let dyn_token: Option<Token![dyn]> = input.parse()?; + let dyn_span = match &dyn_token { + Some(token) => token.span, + None => input.span(), + }; + let bounds = Self::parse_bounds(dyn_span, input, allow_plus)?; + Ok(TypeTraitObject { dyn_token, bounds }) + } + + fn parse_bounds( + dyn_span: Span, + input: ParseStream, + allow_plus: bool, + ) -> Result<Punctuated<TypeParamBound, Token![+]>> { + let bounds = TypeParamBound::parse_multiple(input, allow_plus)?; + let mut last_lifetime_span = None; + let mut at_least_one_trait = false; + for bound in &bounds { + match bound { + TypeParamBound::Trait(_) => { + at_least_one_trait = true; + break; + } + TypeParamBound::Lifetime(lifetime) => { + last_lifetime_span = Some(lifetime.ident.span()); + } + } + } + // Just lifetimes like `'a + 'b` is not a TraitObject. + if !at_least_one_trait { + let msg = "at least one trait is required for an object type"; + return Err(error::new2(dyn_span, last_lifetime_span.unwrap(), msg)); + } + Ok(bounds) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for TypeImplTrait { + fn parse(input: ParseStream) -> Result<Self> { + let allow_plus = true; + Self::parse(input, allow_plus) + } + } + + impl TypeImplTrait { + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn without_plus(input: ParseStream) -> Result<Self> { + let allow_plus = false; + Self::parse(input, allow_plus) + } + + pub(crate) fn parse(input: ParseStream, allow_plus: bool) -> Result<Self> { + let impl_token: Token![impl] = input.parse()?; + let bounds = TypeParamBound::parse_multiple(input, allow_plus)?; + let mut last_lifetime_span = None; + let mut at_least_one_trait = false; + for bound in &bounds { + match bound { + TypeParamBound::Trait(_) => { + at_least_one_trait = true; + break; + } + TypeParamBound::Lifetime(lifetime) => { + last_lifetime_span = Some(lifetime.ident.span()); + } + } + } + if !at_least_one_trait { + let msg = "at least one trait must be specified"; + return Err(error::new2( + impl_token.span, + last_lifetime_span.unwrap(), + msg, + )); + } + Ok(TypeImplTrait { impl_token, bounds }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for TypeGroup { + fn parse(input: ParseStream) -> Result<Self> { + let group = crate::group::parse_group(input)?; + Ok(TypeGroup { + group_token: group.token, + elem: group.content.parse()?, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for TypeParen { + fn parse(input: ParseStream) -> Result<Self> { + let allow_plus = false; + Self::parse(input, allow_plus) + } + } + + impl TypeParen { + fn parse(input: ParseStream, allow_plus: bool) -> Result<Self> { + let content; + Ok(TypeParen { + paren_token: parenthesized!(content in input), + elem: Box::new({ + let allow_group_generic = true; + ambig_ty(&content, allow_plus, allow_group_generic)? + }), + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for BareFnArg { + fn parse(input: ParseStream) -> Result<Self> { + let allow_mut_self = false; + parse_bare_fn_arg(input, allow_mut_self).map(Option::unwrap) + } + } + + fn parse_bare_fn_arg( + input: ParseStream, + mut allow_mut_self: bool, + ) -> Result<Option<BareFnArg>> { + let mut has_mut_self = false; + let arg = BareFnArg { + attrs: input.call(Attribute::parse_outer)?, + name: { + if (input.peek(Ident) || input.peek(Token![_]) || input.peek(Token![self])) + && input.peek2(Token![:]) + && !input.peek2(Token![::]) + { + let name = input.call(Ident::parse_any)?; + let colon: Token![:] = input.parse()?; + Some((name, colon)) + } else if allow_mut_self + && input.peek(Token![mut]) + && input.peek2(Token![self]) + && input.peek3(Token![:]) + && !input.peek3(Token![::]) + { + has_mut_self = true; + allow_mut_self = false; + input.parse::<Token![mut]>()?; + input.parse::<Token![self]>()?; + input.parse::<Token![:]>()?; + None + } else { + None + } + }, + ty: if !has_mut_self && input.peek(Token![...]) { + let dot3 = input.parse::<Token![...]>()?; + let args = vec![ + TokenTree::Punct(Punct::new('.', Spacing::Joint)), + TokenTree::Punct(Punct::new('.', Spacing::Joint)), + TokenTree::Punct(Punct::new('.', Spacing::Alone)), + ]; + let tokens: TokenStream = args + .into_iter() + .zip(&dot3.spans) + .map(|(mut arg, span)| { + arg.set_span(*span); + arg + }) + .collect(); + Type::Verbatim(tokens) + } else if allow_mut_self && input.peek(Token![mut]) && input.peek2(Token![self]) { + has_mut_self = true; + input.parse::<Token![mut]>()?; + Type::Path(TypePath { + qself: None, + path: input.parse::<Token![self]>()?.into(), + }) + } else { + input.parse()? + }, + }; + + if has_mut_self { + Ok(None) + } else { + Ok(Some(arg)) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for Abi { + fn parse(input: ParseStream) -> Result<Self> { + Ok(Abi { + extern_token: input.parse()?, + name: input.parse()?, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for Option<Abi> { + fn parse(input: ParseStream) -> Result<Self> { + if input.peek(Token![extern]) { + input.parse().map(Some) + } else { + Ok(None) + } + } + } +} + +#[cfg(feature = "printing")] +mod printing { + use super::*; + use crate::attr::FilterAttrs; + use crate::print::TokensOrDefault; + use proc_macro2::TokenStream; + use quote::{ToTokens, TokenStreamExt}; + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for TypeSlice { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.bracket_token.surround(tokens, |tokens| { + self.elem.to_tokens(tokens); + }); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for TypeArray { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.bracket_token.surround(tokens, |tokens| { + self.elem.to_tokens(tokens); + self.semi_token.to_tokens(tokens); + self.len.to_tokens(tokens); + }); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for TypePtr { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.star_token.to_tokens(tokens); + match &self.mutability { + Some(tok) => tok.to_tokens(tokens), + None => { + TokensOrDefault(&self.const_token).to_tokens(tokens); + } + } + self.elem.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for TypeReference { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.and_token.to_tokens(tokens); + self.lifetime.to_tokens(tokens); + self.mutability.to_tokens(tokens); + self.elem.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for TypeBareFn { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.lifetimes.to_tokens(tokens); + self.unsafety.to_tokens(tokens); + self.abi.to_tokens(tokens); + self.fn_token.to_tokens(tokens); + self.paren_token.surround(tokens, |tokens| { + self.inputs.to_tokens(tokens); + if let Some(variadic) = &self.variadic { + if !self.inputs.empty_or_trailing() { + let span = variadic.dots.spans[0]; + Token![,](span).to_tokens(tokens); + } + variadic.to_tokens(tokens); + } + }); + self.output.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for TypeNever { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.bang_token.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for TypeTuple { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.paren_token.surround(tokens, |tokens| { + self.elems.to_tokens(tokens); + }); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for TypePath { + fn to_tokens(&self, tokens: &mut TokenStream) { + path::printing::print_path(tokens, &self.qself, &self.path); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for TypeTraitObject { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.dyn_token.to_tokens(tokens); + self.bounds.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for TypeImplTrait { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.impl_token.to_tokens(tokens); + self.bounds.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for TypeGroup { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.group_token.surround(tokens, |tokens| { + self.elem.to_tokens(tokens); + }); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for TypeParen { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.paren_token.surround(tokens, |tokens| { + self.elem.to_tokens(tokens); + }); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for TypeInfer { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.underscore_token.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for TypeMacro { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.mac.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for ReturnType { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + ReturnType::Default => {} + ReturnType::Type(arrow, ty) => { + arrow.to_tokens(tokens); + ty.to_tokens(tokens); + } + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for BareFnArg { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + if let Some((name, colon)) = &self.name { + name.to_tokens(tokens); + colon.to_tokens(tokens); + } + self.ty.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for Variadic { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(self.attrs.outer()); + self.dots.to_tokens(tokens); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for Abi { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.extern_token.to_tokens(tokens); + self.name.to_tokens(tokens); + } + } +} diff --git a/third_party/rust/syn/src/verbatim.rs b/third_party/rust/syn/src/verbatim.rs new file mode 100644 index 0000000000..58cf68d17a --- /dev/null +++ b/third_party/rust/syn/src/verbatim.rs @@ -0,0 +1,33 @@ +use crate::parse::{ParseBuffer, ParseStream}; +use proc_macro2::{Delimiter, TokenStream}; +use std::cmp::Ordering; +use std::iter; + +pub fn between<'a>(begin: ParseBuffer<'a>, end: ParseStream<'a>) -> TokenStream { + let end = end.cursor(); + let mut cursor = begin.cursor(); + assert!(crate::buffer::same_buffer(end, cursor)); + + let mut tokens = TokenStream::new(); + while cursor != end { + let (tt, next) = cursor.token_tree().unwrap(); + + if crate::buffer::cmp_assuming_same_buffer(end, next) == Ordering::Less { + // A syntax node can cross the boundary of a None-delimited group + // due to such groups being transparent to the parser in most cases. + // Any time this occurs the group is known to be semantically + // irrelevant. https://github.com/dtolnay/syn/issues/1235 + if let Some((inside, _span, after)) = cursor.group(Delimiter::None) { + assert!(next == after); + cursor = inside; + continue; + } else { + panic!("verbatim end must not be inside a delimited group"); + } + } + + tokens.extend(iter::once(tt)); + cursor = next; + } + tokens +} diff --git a/third_party/rust/syn/src/whitespace.rs b/third_party/rust/syn/src/whitespace.rs new file mode 100644 index 0000000000..7be082e1a2 --- /dev/null +++ b/third_party/rust/syn/src/whitespace.rs @@ -0,0 +1,65 @@ +pub fn skip(mut s: &str) -> &str { + 'skip: while !s.is_empty() { + let byte = s.as_bytes()[0]; + if byte == b'/' { + if s.starts_with("//") + && (!s.starts_with("///") || s.starts_with("////")) + && !s.starts_with("//!") + { + if let Some(i) = s.find('\n') { + s = &s[i + 1..]; + continue; + } else { + return ""; + } + } else if s.starts_with("/**/") { + s = &s[4..]; + continue; + } else if s.starts_with("/*") + && (!s.starts_with("/**") || s.starts_with("/***")) + && !s.starts_with("/*!") + { + let mut depth = 0; + let bytes = s.as_bytes(); + let mut i = 0; + let upper = bytes.len() - 1; + while i < upper { + if bytes[i] == b'/' && bytes[i + 1] == b'*' { + depth += 1; + i += 1; // eat '*' + } else if bytes[i] == b'*' && bytes[i + 1] == b'/' { + depth -= 1; + if depth == 0 { + s = &s[i + 2..]; + continue 'skip; + } + i += 1; // eat '/' + } + i += 1; + } + return s; + } + } + match byte { + b' ' | 0x09..=0x0d => { + s = &s[1..]; + continue; + } + b if b <= 0x7f => {} + _ => { + let ch = s.chars().next().unwrap(); + if is_whitespace(ch) { + s = &s[ch.len_utf8()..]; + continue; + } + } + } + return s; + } + s +} + +fn is_whitespace(ch: char) -> bool { + // Rust treats left-to-right mark and right-to-left mark as whitespace + ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}' +} diff --git a/third_party/rust/syn/tests/common/eq.rs b/third_party/rust/syn/tests/common/eq.rs new file mode 100644 index 0000000000..41d6d4118c --- /dev/null +++ b/third_party/rust/syn/tests/common/eq.rs @@ -0,0 +1,806 @@ +#![allow(unused_macro_rules)] + +extern crate rustc_ast; +extern crate rustc_data_structures; +extern crate rustc_span; +extern crate thin_vec; + +use rustc_ast::ast::AngleBracketedArg; +use rustc_ast::ast::AngleBracketedArgs; +use rustc_ast::ast::AnonConst; +use rustc_ast::ast::Arm; +use rustc_ast::ast::AssocConstraint; +use rustc_ast::ast::AssocConstraintKind; +use rustc_ast::ast::AssocItemKind; +use rustc_ast::ast::Async; +use rustc_ast::ast::AttrArgs; +use rustc_ast::ast::AttrArgsEq; +use rustc_ast::ast::AttrId; +use rustc_ast::ast::AttrItem; +use rustc_ast::ast::AttrKind; +use rustc_ast::ast::AttrStyle; +use rustc_ast::ast::Attribute; +use rustc_ast::ast::BareFnTy; +use rustc_ast::ast::BinOpKind; +use rustc_ast::ast::BindingAnnotation; +use rustc_ast::ast::Block; +use rustc_ast::ast::BlockCheckMode; +use rustc_ast::ast::BorrowKind; +use rustc_ast::ast::ByRef; +use rustc_ast::ast::CaptureBy; +use rustc_ast::ast::Closure; +use rustc_ast::ast::ClosureBinder; +use rustc_ast::ast::Const; +use rustc_ast::ast::Crate; +use rustc_ast::ast::Defaultness; +use rustc_ast::ast::DelimArgs; +use rustc_ast::ast::EnumDef; +use rustc_ast::ast::Expr; +use rustc_ast::ast::ExprField; +use rustc_ast::ast::ExprKind; +use rustc_ast::ast::Extern; +use rustc_ast::ast::FieldDef; +use rustc_ast::ast::FloatTy; +use rustc_ast::ast::Fn; +use rustc_ast::ast::FnDecl; +use rustc_ast::ast::FnHeader; +use rustc_ast::ast::FnRetTy; +use rustc_ast::ast::FnSig; +use rustc_ast::ast::ForeignItemKind; +use rustc_ast::ast::ForeignMod; +use rustc_ast::ast::GenericArg; +use rustc_ast::ast::GenericArgs; +use rustc_ast::ast::GenericBound; +use rustc_ast::ast::GenericParam; +use rustc_ast::ast::GenericParamKind; +use rustc_ast::ast::Generics; +use rustc_ast::ast::Impl; +use rustc_ast::ast::ImplPolarity; +use rustc_ast::ast::Inline; +use rustc_ast::ast::InlineAsm; +use rustc_ast::ast::InlineAsmOperand; +use rustc_ast::ast::InlineAsmOptions; +use rustc_ast::ast::InlineAsmRegOrRegClass; +use rustc_ast::ast::InlineAsmSym; +use rustc_ast::ast::InlineAsmTemplatePiece; +use rustc_ast::ast::IntTy; +use rustc_ast::ast::IsAuto; +use rustc_ast::ast::Item; +use rustc_ast::ast::ItemKind; +use rustc_ast::ast::Label; +use rustc_ast::ast::Lifetime; +use rustc_ast::ast::LitFloatType; +use rustc_ast::ast::LitIntType; +use rustc_ast::ast::LitKind; +use rustc_ast::ast::Local; +use rustc_ast::ast::LocalKind; +use rustc_ast::ast::MacCall; +use rustc_ast::ast::MacCallStmt; +use rustc_ast::ast::MacDelimiter; +use rustc_ast::ast::MacStmtStyle; +use rustc_ast::ast::MacroDef; +use rustc_ast::ast::MetaItemLit; +use rustc_ast::ast::MethodCall; +use rustc_ast::ast::ModKind; +use rustc_ast::ast::ModSpans; +use rustc_ast::ast::Movability; +use rustc_ast::ast::MutTy; +use rustc_ast::ast::Mutability; +use rustc_ast::ast::NodeId; +use rustc_ast::ast::NormalAttr; +use rustc_ast::ast::Param; +use rustc_ast::ast::ParenthesizedArgs; +use rustc_ast::ast::Pat; +use rustc_ast::ast::PatField; +use rustc_ast::ast::PatKind; +use rustc_ast::ast::Path; +use rustc_ast::ast::PathSegment; +use rustc_ast::ast::PolyTraitRef; +use rustc_ast::ast::QSelf; +use rustc_ast::ast::RangeEnd; +use rustc_ast::ast::RangeLimits; +use rustc_ast::ast::RangeSyntax; +use rustc_ast::ast::Stmt; +use rustc_ast::ast::StmtKind; +use rustc_ast::ast::StrLit; +use rustc_ast::ast::StrStyle; +use rustc_ast::ast::StructExpr; +use rustc_ast::ast::StructRest; +use rustc_ast::ast::Term; +use rustc_ast::ast::Trait; +use rustc_ast::ast::TraitBoundModifier; +use rustc_ast::ast::TraitObjectSyntax; +use rustc_ast::ast::TraitRef; +use rustc_ast::ast::Ty; +use rustc_ast::ast::TyAlias; +use rustc_ast::ast::TyAliasWhereClause; +use rustc_ast::ast::TyKind; +use rustc_ast::ast::UintTy; +use rustc_ast::ast::UnOp; +use rustc_ast::ast::Unsafe; +use rustc_ast::ast::UnsafeSource; +use rustc_ast::ast::UseTree; +use rustc_ast::ast::UseTreeKind; +use rustc_ast::ast::Variant; +use rustc_ast::ast::VariantData; +use rustc_ast::ast::Visibility; +use rustc_ast::ast::VisibilityKind; +use rustc_ast::ast::WhereBoundPredicate; +use rustc_ast::ast::WhereClause; +use rustc_ast::ast::WhereEqPredicate; +use rustc_ast::ast::WherePredicate; +use rustc_ast::ast::WhereRegionPredicate; +use rustc_ast::ptr::P; +use rustc_ast::token::{self, CommentKind, Delimiter, Lit, Nonterminal, Token, TokenKind}; +use rustc_ast::tokenstream::{ + AttrTokenStream, AttrTokenTree, AttributesData, DelimSpan, LazyAttrTokenStream, Spacing, + TokenStream, TokenTree, +}; +use rustc_data_structures::sync::Lrc; +use rustc_span::source_map::Spanned; +use rustc_span::symbol::{sym, Ident}; +use rustc_span::{Span, Symbol, SyntaxContext, DUMMY_SP}; +use thin_vec::ThinVec; + +pub trait SpanlessEq { + fn eq(&self, other: &Self) -> bool; +} + +impl<T: ?Sized + SpanlessEq> SpanlessEq for Box<T> { + fn eq(&self, other: &Self) -> bool { + SpanlessEq::eq(&**self, &**other) + } +} + +impl<T: ?Sized + SpanlessEq> SpanlessEq for P<T> { + fn eq(&self, other: &Self) -> bool { + SpanlessEq::eq(&**self, &**other) + } +} + +impl<T: ?Sized + SpanlessEq> SpanlessEq for Lrc<T> { + fn eq(&self, other: &Self) -> bool { + SpanlessEq::eq(&**self, &**other) + } +} + +impl<T: SpanlessEq> SpanlessEq for Option<T> { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (None, None) => true, + (Some(this), Some(other)) => SpanlessEq::eq(this, other), + _ => false, + } + } +} + +impl<T: SpanlessEq> SpanlessEq for [T] { + fn eq(&self, other: &Self) -> bool { + self.len() == other.len() && self.iter().zip(other).all(|(a, b)| SpanlessEq::eq(a, b)) + } +} + +impl<T: SpanlessEq> SpanlessEq for Vec<T> { + fn eq(&self, other: &Self) -> bool { + <[T] as SpanlessEq>::eq(self, other) + } +} + +impl<T: SpanlessEq> SpanlessEq for ThinVec<T> { + fn eq(&self, other: &Self) -> bool { + self.len() == other.len() + && self + .iter() + .zip(other.iter()) + .all(|(a, b)| SpanlessEq::eq(a, b)) + } +} + +impl<T: SpanlessEq> SpanlessEq for Spanned<T> { + fn eq(&self, other: &Self) -> bool { + SpanlessEq::eq(&self.node, &other.node) + } +} + +impl<A: SpanlessEq, B: SpanlessEq> SpanlessEq for (A, B) { + fn eq(&self, other: &Self) -> bool { + SpanlessEq::eq(&self.0, &other.0) && SpanlessEq::eq(&self.1, &other.1) + } +} + +impl<A: SpanlessEq, B: SpanlessEq, C: SpanlessEq> SpanlessEq for (A, B, C) { + fn eq(&self, other: &Self) -> bool { + SpanlessEq::eq(&self.0, &other.0) + && SpanlessEq::eq(&self.1, &other.1) + && SpanlessEq::eq(&self.2, &other.2) + } +} + +macro_rules! spanless_eq_true { + ($name:ty) => { + impl SpanlessEq for $name { + fn eq(&self, _other: &Self) -> bool { + true + } + } + }; +} + +spanless_eq_true!(Span); +spanless_eq_true!(DelimSpan); +spanless_eq_true!(AttrId); +spanless_eq_true!(NodeId); +spanless_eq_true!(SyntaxContext); +spanless_eq_true!(Spacing); + +macro_rules! spanless_eq_partial_eq { + ($name:ty) => { + impl SpanlessEq for $name { + fn eq(&self, other: &Self) -> bool { + PartialEq::eq(self, other) + } + } + }; +} + +spanless_eq_partial_eq!(bool); +spanless_eq_partial_eq!(u8); +spanless_eq_partial_eq!(u16); +spanless_eq_partial_eq!(u128); +spanless_eq_partial_eq!(usize); +spanless_eq_partial_eq!(char); +spanless_eq_partial_eq!(String); +spanless_eq_partial_eq!(Symbol); +spanless_eq_partial_eq!(CommentKind); +spanless_eq_partial_eq!(Delimiter); +spanless_eq_partial_eq!(InlineAsmOptions); +spanless_eq_partial_eq!(token::LitKind); + +macro_rules! spanless_eq_struct { + { + $($name:ident)::+ $(<$param:ident>)? + $([$field:tt $this:ident $other:ident])* + $(![$ignore:tt])*; + } => { + impl $(<$param: SpanlessEq>)* SpanlessEq for $($name)::+ $(<$param>)* { + fn eq(&self, other: &Self) -> bool { + let $($name)::+ { $($field: $this,)* $($ignore: _,)* } = self; + let $($name)::+ { $($field: $other,)* $($ignore: _,)* } = other; + true $(&& SpanlessEq::eq($this, $other))* + } + } + }; + + { + $($name:ident)::+ $(<$param:ident>)? + $([$field:tt $this:ident $other:ident])* + $(![$ignore:tt])*; + !$next:tt + $($rest:tt)* + } => { + spanless_eq_struct! { + $($name)::+ $(<$param>)* + $([$field $this $other])* + $(![$ignore])* + ![$next]; + $($rest)* + } + }; + + { + $($name:ident)::+ $(<$param:ident>)? + $([$field:tt $this:ident $other:ident])* + $(![$ignore:tt])*; + $next:tt + $($rest:tt)* + } => { + spanless_eq_struct! { + $($name)::+ $(<$param>)* + $([$field $this $other])* + [$next this other] + $(![$ignore])*; + $($rest)* + } + }; +} + +macro_rules! spanless_eq_enum { + { + $($name:ident)::+; + $([$($variant:ident)::+; $([$field:tt $this:ident $other:ident])* $(![$ignore:tt])*])* + } => { + impl SpanlessEq for $($name)::+ { + fn eq(&self, other: &Self) -> bool { + match self { + $( + $($variant)::+ { .. } => {} + )* + } + #[allow(unreachable_patterns)] + match (self, other) { + $( + ( + $($variant)::+ { $($field: $this,)* $($ignore: _,)* }, + $($variant)::+ { $($field: $other,)* $($ignore: _,)* }, + ) => { + true $(&& SpanlessEq::eq($this, $other))* + } + )* + _ => false, + } + } + } + }; + + { + $($name:ident)::+; + $([$($variant:ident)::+; $($fields:tt)*])* + $next:ident [$([$($named:tt)*])* $(![$ignore:tt])*] (!$i:tt $($field:tt)*) + $($rest:tt)* + } => { + spanless_eq_enum! { + $($name)::+; + $([$($variant)::+; $($fields)*])* + $next [$([$($named)*])* $(![$ignore])* ![$i]] ($($field)*) + $($rest)* + } + }; + + { + $($name:ident)::+; + $([$($variant:ident)::+; $($fields:tt)*])* + $next:ident [$([$($named:tt)*])* $(![$ignore:tt])*] ($i:tt $($field:tt)*) + $($rest:tt)* + } => { + spanless_eq_enum! { + $($name)::+; + $([$($variant)::+; $($fields)*])* + $next [$([$($named)*])* [$i this other] $(![$ignore])*] ($($field)*) + $($rest)* + } + }; + + { + $($name:ident)::+; + $([$($variant:ident)::+; $($fields:tt)*])* + $next:ident [$($named:tt)*] () + $($rest:tt)* + } => { + spanless_eq_enum! { + $($name)::+; + $([$($variant)::+; $($fields)*])* + [$($name)::+::$next; $($named)*] + $($rest)* + } + }; + + { + $($name:ident)::+; + $([$($variant:ident)::+; $($fields:tt)*])* + $next:ident ($($field:tt)*) + $($rest:tt)* + } => { + spanless_eq_enum! { + $($name)::+; + $([$($variant)::+; $($fields)*])* + $next [] ($($field)*) + $($rest)* + } + }; + + { + $($name:ident)::+; + $([$($variant:ident)::+; $($fields:tt)*])* + $next:ident + $($rest:tt)* + } => { + spanless_eq_enum! { + $($name)::+; + $([$($variant)::+; $($fields)*])* + [$($name)::+::$next;] + $($rest)* + } + }; +} + +spanless_eq_struct!(AngleBracketedArgs; span args); +spanless_eq_struct!(AnonConst; id value); +spanless_eq_struct!(Arm; attrs pat guard body span id is_placeholder); +spanless_eq_struct!(AssocConstraint; id ident gen_args kind span); +spanless_eq_struct!(AttrItem; path args tokens); +spanless_eq_struct!(AttrTokenStream; 0); +spanless_eq_struct!(Attribute; kind id style span); +spanless_eq_struct!(AttributesData; attrs tokens); +spanless_eq_struct!(BareFnTy; unsafety ext generic_params decl decl_span); +spanless_eq_struct!(BindingAnnotation; 0 1); +spanless_eq_struct!(Block; stmts id rules span tokens could_be_bare_literal); +spanless_eq_struct!(Closure; binder capture_clause asyncness movability fn_decl body !fn_decl_span); +spanless_eq_struct!(Crate; attrs items spans id is_placeholder); +spanless_eq_struct!(DelimArgs; dspan delim tokens); +spanless_eq_struct!(EnumDef; variants); +spanless_eq_struct!(Expr; id kind span attrs !tokens); +spanless_eq_struct!(ExprField; attrs id span ident expr is_shorthand is_placeholder); +spanless_eq_struct!(FieldDef; attrs id span vis ident ty is_placeholder); +spanless_eq_struct!(FnDecl; inputs output); +spanless_eq_struct!(FnHeader; constness asyncness unsafety ext); +spanless_eq_struct!(Fn; defaultness generics sig body); +spanless_eq_struct!(FnSig; header decl span); +spanless_eq_struct!(ForeignMod; unsafety abi items); +spanless_eq_struct!(GenericParam; id ident attrs bounds is_placeholder kind !colon_span); +spanless_eq_struct!(Generics; params where_clause span); +spanless_eq_struct!(Impl; defaultness unsafety generics constness polarity of_trait self_ty items); +spanless_eq_struct!(InlineAsm; template template_strs operands clobber_abis options line_spans); +spanless_eq_struct!(InlineAsmSym; id qself path); +spanless_eq_struct!(Item<K>; attrs id span vis ident kind !tokens); +spanless_eq_struct!(Label; ident); +spanless_eq_struct!(Lifetime; id ident); +spanless_eq_struct!(Lit; kind symbol suffix); +spanless_eq_struct!(Local; pat ty kind id span attrs !tokens); +spanless_eq_struct!(MacCall; path args prior_type_ascription); +spanless_eq_struct!(MacCallStmt; mac style attrs tokens); +spanless_eq_struct!(MacroDef; body macro_rules); +spanless_eq_struct!(MetaItemLit; token_lit kind span); +spanless_eq_struct!(MethodCall; seg receiver args !span); +spanless_eq_struct!(ModSpans; !inner_span !inject_use_span); +spanless_eq_struct!(MutTy; ty mutbl); +spanless_eq_struct!(NormalAttr; item tokens); +spanless_eq_struct!(ParenthesizedArgs; span inputs inputs_span output); +spanless_eq_struct!(Pat; id kind span tokens); +spanless_eq_struct!(PatField; ident pat is_shorthand attrs id span is_placeholder); +spanless_eq_struct!(Path; span segments tokens); +spanless_eq_struct!(PathSegment; ident id args); +spanless_eq_struct!(PolyTraitRef; bound_generic_params trait_ref span); +spanless_eq_struct!(QSelf; ty path_span position); +spanless_eq_struct!(Stmt; id kind span); +spanless_eq_struct!(StrLit; style symbol suffix span symbol_unescaped); +spanless_eq_struct!(StructExpr; qself path fields rest); +spanless_eq_struct!(Token; kind span); +spanless_eq_struct!(Trait; unsafety is_auto generics bounds items); +spanless_eq_struct!(TraitRef; path ref_id); +spanless_eq_struct!(Ty; id kind span tokens); +spanless_eq_struct!(TyAlias; defaultness generics where_clauses !where_predicates_split bounds ty); +spanless_eq_struct!(TyAliasWhereClause; !0 1); +spanless_eq_struct!(UseTree; prefix kind span); +spanless_eq_struct!(Variant; attrs id span !vis ident data disr_expr is_placeholder); +spanless_eq_struct!(Visibility; kind span tokens); +spanless_eq_struct!(WhereBoundPredicate; span bound_generic_params bounded_ty bounds); +spanless_eq_struct!(WhereClause; has_where_token predicates span); +spanless_eq_struct!(WhereEqPredicate; span lhs_ty rhs_ty); +spanless_eq_struct!(WhereRegionPredicate; span lifetime bounds); +spanless_eq_enum!(AngleBracketedArg; Arg(0) Constraint(0)); +spanless_eq_enum!(AssocItemKind; Const(0 1 2) Fn(0) Type(0) MacCall(0)); +spanless_eq_enum!(AssocConstraintKind; Equality(term) Bound(bounds)); +spanless_eq_enum!(Async; Yes(span closure_id return_impl_trait_id) No); +spanless_eq_enum!(AttrArgs; Empty Delimited(0) Eq(0 1)); +spanless_eq_enum!(AttrArgsEq; Ast(0) Hir(0)); +spanless_eq_enum!(AttrStyle; Outer Inner); +spanless_eq_enum!(AttrTokenTree; Token(0 1) Delimited(0 1 2) Attributes(0)); +spanless_eq_enum!(BinOpKind; Add Sub Mul Div Rem And Or BitXor BitAnd BitOr Shl Shr Eq Lt Le Ne Ge Gt); +spanless_eq_enum!(BlockCheckMode; Default Unsafe(0)); +spanless_eq_enum!(BorrowKind; Ref Raw); +spanless_eq_enum!(ByRef; Yes No); +spanless_eq_enum!(CaptureBy; Value Ref); +spanless_eq_enum!(ClosureBinder; NotPresent For(span generic_params)); +spanless_eq_enum!(Const; Yes(0) No); +spanless_eq_enum!(Defaultness; Default(0) Final); +spanless_eq_enum!(Extern; None Implicit(0) Explicit(0 1)); +spanless_eq_enum!(FloatTy; F32 F64); +spanless_eq_enum!(FnRetTy; Default(0) Ty(0)); +spanless_eq_enum!(ForeignItemKind; Static(0 1 2) Fn(0) TyAlias(0) MacCall(0)); +spanless_eq_enum!(GenericArg; Lifetime(0) Type(0) Const(0)); +spanless_eq_enum!(GenericArgs; AngleBracketed(0) Parenthesized(0)); +spanless_eq_enum!(GenericBound; Trait(0 1) Outlives(0)); +spanless_eq_enum!(GenericParamKind; Lifetime Type(default) Const(ty kw_span default)); +spanless_eq_enum!(ImplPolarity; Positive Negative(0)); +spanless_eq_enum!(Inline; Yes No); +spanless_eq_enum!(InlineAsmRegOrRegClass; Reg(0) RegClass(0)); +spanless_eq_enum!(InlineAsmTemplatePiece; String(0) Placeholder(operand_idx modifier span)); +spanless_eq_enum!(IntTy; Isize I8 I16 I32 I64 I128); +spanless_eq_enum!(IsAuto; Yes No); +spanless_eq_enum!(LitFloatType; Suffixed(0) Unsuffixed); +spanless_eq_enum!(LitIntType; Signed(0) Unsigned(0) Unsuffixed); +spanless_eq_enum!(LocalKind; Decl Init(0) InitElse(0 1)); +spanless_eq_enum!(MacDelimiter; Parenthesis Bracket Brace); +spanless_eq_enum!(MacStmtStyle; Semicolon Braces NoBraces); +spanless_eq_enum!(ModKind; Loaded(0 1 2) Unloaded); +spanless_eq_enum!(Movability; Static Movable); +spanless_eq_enum!(Mutability; Mut Not); +spanless_eq_enum!(RangeEnd; Included(0) Excluded); +spanless_eq_enum!(RangeLimits; HalfOpen Closed); +spanless_eq_enum!(StmtKind; Local(0) Item(0) Expr(0) Semi(0) Empty MacCall(0)); +spanless_eq_enum!(StrStyle; Cooked Raw(0)); +spanless_eq_enum!(StructRest; Base(0) Rest(0) None); +spanless_eq_enum!(Term; Ty(0) Const(0)); +spanless_eq_enum!(TokenTree; Token(0 1) Delimited(0 1 2)); +spanless_eq_enum!(TraitBoundModifier; None Maybe MaybeConst MaybeConstMaybe); +spanless_eq_enum!(TraitObjectSyntax; Dyn DynStar None); +spanless_eq_enum!(UintTy; Usize U8 U16 U32 U64 U128); +spanless_eq_enum!(UnOp; Deref Not Neg); +spanless_eq_enum!(Unsafe; Yes(0) No); +spanless_eq_enum!(UnsafeSource; CompilerGenerated UserProvided); +spanless_eq_enum!(UseTreeKind; Simple(0) Nested(0) Glob); +spanless_eq_enum!(VariantData; Struct(0 1) Tuple(0 1) Unit(0)); +spanless_eq_enum!(VisibilityKind; Public Restricted(path id shorthand) Inherited); +spanless_eq_enum!(WherePredicate; BoundPredicate(0) RegionPredicate(0) EqPredicate(0)); +spanless_eq_enum!(ExprKind; Box(0) Array(0) ConstBlock(0) Call(0 1) + MethodCall(0) Tup(0) Binary(0 1 2) Unary(0 1) Lit(0) Cast(0 1) Type(0 1) + Let(0 1 2) If(0 1 2) While(0 1 2) ForLoop(0 1 2 3) Loop(0 1 2) Match(0 1) + Closure(0) Block(0 1) Async(0 1 2) Await(0) TryBlock(0) Assign(0 1 2) + AssignOp(0 1 2) Field(0 1) Index(0 1) Underscore Range(0 1 2) Path(0 1) + AddrOf(0 1 2) Break(0 1) Continue(0) Ret(0) InlineAsm(0) MacCall(0) + Struct(0) Repeat(0 1) Paren(0) Try(0) Yield(0) Yeet(0) IncludedBytes(0) + Err); +spanless_eq_enum!(InlineAsmOperand; In(reg expr) Out(reg late expr) + InOut(reg late expr) SplitInOut(reg late in_expr out_expr) Const(anon_const) + Sym(sym)); +spanless_eq_enum!(ItemKind; ExternCrate(0) Use(0) Static(0 1 2) Const(0 1 2) + Fn(0) Mod(0 1) ForeignMod(0) GlobalAsm(0) TyAlias(0) Enum(0 1) Struct(0 1) + Union(0 1) Trait(0) TraitAlias(0 1) Impl(0) MacCall(0) MacroDef(0)); +spanless_eq_enum!(LitKind; Str(0 1) ByteStr(0) Byte(0) Char(0) Int(0 1) + Float(0 1) Bool(0) Err); +spanless_eq_enum!(PatKind; Wild Ident(0 1 2) Struct(0 1 2 3) TupleStruct(0 1 2) + Or(0) Path(0 1) Tuple(0) Box(0) Ref(0 1) Lit(0) Range(0 1 2) Slice(0) Rest + Paren(0) MacCall(0)); +spanless_eq_enum!(TyKind; Slice(0) Array(0 1) Ptr(0) Rptr(0 1) BareFn(0) Never + Tup(0) Path(0 1) TraitObject(0 1) ImplTrait(0 1) Paren(0) Typeof(0) Infer + ImplicitSelf MacCall(0) Err CVarArgs); + +impl SpanlessEq for Ident { + fn eq(&self, other: &Self) -> bool { + self.as_str() == other.as_str() + } +} + +impl SpanlessEq for RangeSyntax { + fn eq(&self, _other: &Self) -> bool { + match self { + RangeSyntax::DotDotDot | RangeSyntax::DotDotEq => true, + } + } +} + +impl SpanlessEq for Param { + fn eq(&self, other: &Self) -> bool { + let Param { + attrs, + ty, + pat, + id, + span: _, + is_placeholder, + } = self; + let Param { + attrs: attrs2, + ty: ty2, + pat: pat2, + id: id2, + span: _, + is_placeholder: is_placeholder2, + } = other; + SpanlessEq::eq(id, id2) + && SpanlessEq::eq(is_placeholder, is_placeholder2) + && (matches!(ty.kind, TyKind::Err) + || matches!(ty2.kind, TyKind::Err) + || SpanlessEq::eq(attrs, attrs2) + && SpanlessEq::eq(ty, ty2) + && SpanlessEq::eq(pat, pat2)) + } +} + +impl SpanlessEq for TokenKind { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (TokenKind::Literal(this), TokenKind::Literal(other)) => SpanlessEq::eq(this, other), + (TokenKind::DotDotEq, _) | (TokenKind::DotDotDot, _) => match other { + TokenKind::DotDotEq | TokenKind::DotDotDot => true, + _ => false, + }, + (TokenKind::Interpolated(this), TokenKind::Interpolated(other)) => { + match (this.as_ref(), other.as_ref()) { + (Nonterminal::NtExpr(this), Nonterminal::NtExpr(other)) => { + SpanlessEq::eq(this, other) + } + _ => this == other, + } + } + _ => self == other, + } + } +} + +impl SpanlessEq for TokenStream { + fn eq(&self, other: &Self) -> bool { + let mut this_trees = self.trees(); + let mut other_trees = other.trees(); + loop { + let this = match this_trees.next() { + None => return other_trees.next().is_none(), + Some(tree) => tree, + }; + let other = match other_trees.next() { + None => return false, + Some(tree) => tree, + }; + if SpanlessEq::eq(this, other) { + continue; + } + if let (TokenTree::Token(this, _), TokenTree::Token(other, _)) = (this, other) { + if match (&this.kind, &other.kind) { + (TokenKind::Literal(this), TokenKind::Literal(other)) => { + SpanlessEq::eq(this, other) + } + (TokenKind::DocComment(_kind, style, symbol), TokenKind::Pound) => { + doc_comment(*style, *symbol, &mut other_trees) + } + (TokenKind::Pound, TokenKind::DocComment(_kind, style, symbol)) => { + doc_comment(*style, *symbol, &mut this_trees) + } + _ => false, + } { + continue; + } + } + return false; + } + } +} + +fn doc_comment<'a>( + style: AttrStyle, + unescaped: Symbol, + trees: &mut impl Iterator<Item = &'a TokenTree>, +) -> bool { + if match style { + AttrStyle::Outer => false, + AttrStyle::Inner => true, + } { + match trees.next() { + Some(TokenTree::Token( + Token { + kind: TokenKind::Not, + span: _, + }, + _spacing, + )) => {} + _ => return false, + } + } + let stream = match trees.next() { + Some(TokenTree::Delimited(_span, Delimiter::Bracket, stream)) => stream, + _ => return false, + }; + let mut trees = stream.trees(); + match trees.next() { + Some(TokenTree::Token( + Token { + kind: TokenKind::Ident(symbol, false), + span: _, + }, + _spacing, + )) if *symbol == sym::doc => {} + _ => return false, + } + match trees.next() { + Some(TokenTree::Token( + Token { + kind: TokenKind::Eq, + span: _, + }, + _spacing, + )) => {} + _ => return false, + } + match trees.next() { + Some(TokenTree::Token(token, _spacing)) => { + is_escaped_literal_token(token, unescaped) && trees.next().is_none() + } + _ => false, + } +} + +fn is_escaped_literal_token(token: &Token, unescaped: Symbol) -> bool { + match token { + Token { + kind: TokenKind::Literal(lit), + span: _, + } => match MetaItemLit::from_token_lit(*lit, DUMMY_SP) { + Ok(lit) => is_escaped_literal_meta_item_lit(&lit, unescaped), + Err(_) => false, + }, + Token { + kind: TokenKind::Interpolated(nonterminal), + span: _, + } => match nonterminal.as_ref() { + Nonterminal::NtExpr(expr) => match &expr.kind { + ExprKind::Lit(lit) => is_escaped_lit(lit, unescaped), + _ => false, + }, + _ => false, + }, + _ => false, + } +} + +fn is_escaped_literal_attr_args(value: &AttrArgsEq, unescaped: Symbol) -> bool { + match value { + AttrArgsEq::Ast(expr) => match &expr.kind { + ExprKind::Lit(lit) => is_escaped_lit(lit, unescaped), + _ => false, + }, + AttrArgsEq::Hir(lit) => is_escaped_literal_meta_item_lit(lit, unescaped), + } +} + +fn is_escaped_literal_meta_item_lit(lit: &MetaItemLit, unescaped: Symbol) -> bool { + match lit { + MetaItemLit { + token_lit: + Lit { + kind: token::LitKind::Str, + symbol: _, + suffix: None, + }, + kind, + span: _, + } => is_escaped_lit_kind(kind, unescaped), + _ => false, + } +} + +fn is_escaped_lit(lit: &Lit, unescaped: Symbol) -> bool { + match lit { + Lit { + kind: token::LitKind::Str, + symbol: _, + suffix: None, + } => match LitKind::from_token_lit(*lit) { + Ok(lit_kind) => is_escaped_lit_kind(&lit_kind, unescaped), + _ => false, + }, + _ => false, + } +} + +fn is_escaped_lit_kind(kind: &LitKind, unescaped: Symbol) -> bool { + match kind { + LitKind::Str(symbol, StrStyle::Cooked) => { + symbol.as_str().replace('\r', "") == unescaped.as_str().replace('\r', "") + } + _ => false, + } +} + +impl SpanlessEq for LazyAttrTokenStream { + fn eq(&self, other: &Self) -> bool { + let this = self.to_attr_token_stream(); + let other = other.to_attr_token_stream(); + SpanlessEq::eq(&this, &other) + } +} + +impl SpanlessEq for AttrKind { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (AttrKind::Normal(normal), AttrKind::Normal(normal2)) => { + SpanlessEq::eq(normal, normal2) + } + (AttrKind::DocComment(kind, symbol), AttrKind::DocComment(kind2, symbol2)) => { + SpanlessEq::eq(kind, kind2) && SpanlessEq::eq(symbol, symbol2) + } + (AttrKind::DocComment(kind, unescaped), AttrKind::Normal(normal2)) => { + match kind { + CommentKind::Line | CommentKind::Block => {} + } + let path = Path::from_ident(Ident::with_dummy_span(sym::doc)); + SpanlessEq::eq(&path, &normal2.item.path) + && match &normal2.item.args { + AttrArgs::Empty | AttrArgs::Delimited(_) => false, + AttrArgs::Eq(_span, value) => { + is_escaped_literal_attr_args(value, *unescaped) + } + } + } + (AttrKind::Normal(_), AttrKind::DocComment(..)) => SpanlessEq::eq(other, self), + } + } +} diff --git a/third_party/rust/syn/tests/common/mod.rs b/third_party/rust/syn/tests/common/mod.rs new file mode 100644 index 0000000000..2156530b7c --- /dev/null +++ b/third_party/rust/syn/tests/common/mod.rs @@ -0,0 +1,28 @@ +#![allow(dead_code)] +#![allow(clippy::module_name_repetitions, clippy::shadow_unrelated)] + +use rayon::ThreadPoolBuilder; +use std::env; + +pub mod eq; +pub mod parse; + +/// Read the `ABORT_AFTER_FAILURE` environment variable, and parse it. +pub fn abort_after() -> usize { + match env::var("ABORT_AFTER_FAILURE") { + Ok(s) => s.parse().expect("failed to parse ABORT_AFTER_FAILURE"), + Err(_) => usize::max_value(), + } +} + +/// Configure Rayon threadpool. +pub fn rayon_init() { + let stack_size = match env::var("RUST_MIN_STACK") { + Ok(s) => s.parse().expect("failed to parse RUST_MIN_STACK"), + Err(_) => 20 * 1024 * 1024, + }; + ThreadPoolBuilder::new() + .stack_size(stack_size) + .build_global() + .unwrap(); +} diff --git a/third_party/rust/syn/tests/common/parse.rs b/third_party/rust/syn/tests/common/parse.rs new file mode 100644 index 0000000000..636d0a37a0 --- /dev/null +++ b/third_party/rust/syn/tests/common/parse.rs @@ -0,0 +1,48 @@ +extern crate rustc_ast; +extern crate rustc_expand; +extern crate rustc_parse as parse; +extern crate rustc_session; +extern crate rustc_span; + +use rustc_ast::ast; +use rustc_ast::ptr::P; +use rustc_session::parse::ParseSess; +use rustc_span::source_map::FilePathMapping; +use rustc_span::FileName; +use std::panic; + +pub fn librustc_expr(input: &str) -> Option<P<ast::Expr>> { + match panic::catch_unwind(|| { + let sess = ParseSess::new(FilePathMapping::empty()); + let e = parse::new_parser_from_source_str( + &sess, + FileName::Custom("test_precedence".to_string()), + input.to_string(), + ) + .parse_expr(); + match e { + Ok(expr) => Some(expr), + Err(mut diagnostic) => { + diagnostic.emit(); + None + } + } + }) { + Ok(Some(e)) => Some(e), + Ok(None) => None, + Err(_) => { + errorf!("librustc panicked\n"); + None + } + } +} + +pub fn syn_expr(input: &str) -> Option<syn::Expr> { + match syn::parse_str(input) { + Ok(e) => Some(e), + Err(msg) => { + errorf!("syn failed to parse\n{:?}\n", msg); + None + } + } +} diff --git a/third_party/rust/syn/tests/debug/gen.rs b/third_party/rust/syn/tests/debug/gen.rs new file mode 100644 index 0000000000..cfd63d117b --- /dev/null +++ b/third_party/rust/syn/tests/debug/gen.rs @@ -0,0 +1,5640 @@ +// This file is @generated by syn-internal-codegen. +// It is not intended for manual editing. + +use super::{Lite, RefCast}; +use std::fmt::{self, Debug, Display}; +impl Debug for Lite<syn::Abi> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("Abi"); + if let Some(val) = &_val.name { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::LitStr); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("name", Print::ref_cast(val)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::AngleBracketedGenericArguments> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("AngleBracketedGenericArguments"); + if let Some(val) = &_val.colon2_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Colon2); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("colon2_token", Print::ref_cast(val)); + } + if !_val.args.is_empty() { + formatter.field("args", Lite(&_val.args)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::Arm> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("Arm"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("pat", Lite(&_val.pat)); + if let Some(val) = &_val.guard { + #[derive(RefCast)] + #[repr(transparent)] + struct Print((syn::token::If, Box<syn::Expr>)); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(&_val.1), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("guard", Print::ref_cast(val)); + } + formatter.field("body", Lite(&_val.body)); + if let Some(val) = &_val.comma { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Comma); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("comma", Print::ref_cast(val)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::AttrStyle> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + match _val { + syn::AttrStyle::Outer => formatter.write_str("Outer"), + syn::AttrStyle::Inner(_val) => { + formatter.write_str("Inner")?; + Ok(()) + } + } + } +} +impl Debug for Lite<syn::Attribute> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("Attribute"); + formatter.field("style", Lite(&_val.style)); + formatter.field("path", Lite(&_val.path)); + formatter.field("tokens", Lite(&_val.tokens)); + formatter.finish() + } +} +impl Debug for Lite<syn::BareFnArg> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("BareFnArg"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.name { + #[derive(RefCast)] + #[repr(transparent)] + struct Print((proc_macro2::Ident, syn::token::Colon)); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(&_val.0), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("name", Print::ref_cast(val)); + } + formatter.field("ty", Lite(&_val.ty)); + formatter.finish() + } +} +impl Debug for Lite<syn::BinOp> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + match _val { + syn::BinOp::Add(_val) => { + formatter.write_str("Add")?; + Ok(()) + } + syn::BinOp::Sub(_val) => { + formatter.write_str("Sub")?; + Ok(()) + } + syn::BinOp::Mul(_val) => { + formatter.write_str("Mul")?; + Ok(()) + } + syn::BinOp::Div(_val) => { + formatter.write_str("Div")?; + Ok(()) + } + syn::BinOp::Rem(_val) => { + formatter.write_str("Rem")?; + Ok(()) + } + syn::BinOp::And(_val) => { + formatter.write_str("And")?; + Ok(()) + } + syn::BinOp::Or(_val) => { + formatter.write_str("Or")?; + Ok(()) + } + syn::BinOp::BitXor(_val) => { + formatter.write_str("BitXor")?; + Ok(()) + } + syn::BinOp::BitAnd(_val) => { + formatter.write_str("BitAnd")?; + Ok(()) + } + syn::BinOp::BitOr(_val) => { + formatter.write_str("BitOr")?; + Ok(()) + } + syn::BinOp::Shl(_val) => { + formatter.write_str("Shl")?; + Ok(()) + } + syn::BinOp::Shr(_val) => { + formatter.write_str("Shr")?; + Ok(()) + } + syn::BinOp::Eq(_val) => { + formatter.write_str("Eq")?; + Ok(()) + } + syn::BinOp::Lt(_val) => { + formatter.write_str("Lt")?; + Ok(()) + } + syn::BinOp::Le(_val) => { + formatter.write_str("Le")?; + Ok(()) + } + syn::BinOp::Ne(_val) => { + formatter.write_str("Ne")?; + Ok(()) + } + syn::BinOp::Ge(_val) => { + formatter.write_str("Ge")?; + Ok(()) + } + syn::BinOp::Gt(_val) => { + formatter.write_str("Gt")?; + Ok(()) + } + syn::BinOp::AddEq(_val) => { + formatter.write_str("AddEq")?; + Ok(()) + } + syn::BinOp::SubEq(_val) => { + formatter.write_str("SubEq")?; + Ok(()) + } + syn::BinOp::MulEq(_val) => { + formatter.write_str("MulEq")?; + Ok(()) + } + syn::BinOp::DivEq(_val) => { + formatter.write_str("DivEq")?; + Ok(()) + } + syn::BinOp::RemEq(_val) => { + formatter.write_str("RemEq")?; + Ok(()) + } + syn::BinOp::BitXorEq(_val) => { + formatter.write_str("BitXorEq")?; + Ok(()) + } + syn::BinOp::BitAndEq(_val) => { + formatter.write_str("BitAndEq")?; + Ok(()) + } + syn::BinOp::BitOrEq(_val) => { + formatter.write_str("BitOrEq")?; + Ok(()) + } + syn::BinOp::ShlEq(_val) => { + formatter.write_str("ShlEq")?; + Ok(()) + } + syn::BinOp::ShrEq(_val) => { + formatter.write_str("ShrEq")?; + Ok(()) + } + } + } +} +impl Debug for Lite<syn::Binding> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("Binding"); + formatter.field("ident", Lite(&_val.ident)); + formatter.field("ty", Lite(&_val.ty)); + formatter.finish() + } +} +impl Debug for Lite<syn::Block> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("Block"); + if !_val.stmts.is_empty() { + formatter.field("stmts", Lite(&_val.stmts)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::BoundLifetimes> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("BoundLifetimes"); + if !_val.lifetimes.is_empty() { + formatter.field("lifetimes", Lite(&_val.lifetimes)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::ConstParam> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ConstParam"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("ident", Lite(&_val.ident)); + formatter.field("ty", Lite(&_val.ty)); + if let Some(val) = &_val.eq_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Eq); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("eq_token", Print::ref_cast(val)); + } + if let Some(val) = &_val.default { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::Expr); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("default", Print::ref_cast(val)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::Constraint> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("Constraint"); + formatter.field("ident", Lite(&_val.ident)); + if !_val.bounds.is_empty() { + formatter.field("bounds", Lite(&_val.bounds)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::Data> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + match _val { + syn::Data::Struct(_val) => { + let mut formatter = formatter.debug_struct("Data::Struct"); + formatter.field("fields", Lite(&_val.fields)); + if let Some(val) = &_val.semi_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Semi); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("semi_token", Print::ref_cast(val)); + } + formatter.finish() + } + syn::Data::Enum(_val) => { + let mut formatter = formatter.debug_struct("Data::Enum"); + if !_val.variants.is_empty() { + formatter.field("variants", Lite(&_val.variants)); + } + formatter.finish() + } + syn::Data::Union(_val) => { + let mut formatter = formatter.debug_struct("Data::Union"); + formatter.field("fields", Lite(&_val.fields)); + formatter.finish() + } + } + } +} +impl Debug for Lite<syn::DataEnum> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("DataEnum"); + if !_val.variants.is_empty() { + formatter.field("variants", Lite(&_val.variants)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::DataStruct> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("DataStruct"); + formatter.field("fields", Lite(&_val.fields)); + if let Some(val) = &_val.semi_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Semi); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("semi_token", Print::ref_cast(val)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::DataUnion> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("DataUnion"); + formatter.field("fields", Lite(&_val.fields)); + formatter.finish() + } +} +impl Debug for Lite<syn::DeriveInput> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("DeriveInput"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + formatter.field("ident", Lite(&_val.ident)); + formatter.field("generics", Lite(&_val.generics)); + formatter.field("data", Lite(&_val.data)); + formatter.finish() + } +} +impl Debug for Lite<syn::Expr> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + match _val { + syn::Expr::Array(_val) => { + let mut formatter = formatter.debug_struct("Expr::Array"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if !_val.elems.is_empty() { + formatter.field("elems", Lite(&_val.elems)); + } + formatter.finish() + } + syn::Expr::Assign(_val) => { + let mut formatter = formatter.debug_struct("Expr::Assign"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("left", Lite(&_val.left)); + formatter.field("right", Lite(&_val.right)); + formatter.finish() + } + syn::Expr::AssignOp(_val) => { + let mut formatter = formatter.debug_struct("Expr::AssignOp"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("left", Lite(&_val.left)); + formatter.field("op", Lite(&_val.op)); + formatter.field("right", Lite(&_val.right)); + formatter.finish() + } + syn::Expr::Async(_val) => { + let mut formatter = formatter.debug_struct("Expr::Async"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.capture { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Move); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("capture", Print::ref_cast(val)); + } + formatter.field("block", Lite(&_val.block)); + formatter.finish() + } + syn::Expr::Await(_val) => { + let mut formatter = formatter.debug_struct("Expr::Await"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("base", Lite(&_val.base)); + formatter.finish() + } + syn::Expr::Binary(_val) => { + let mut formatter = formatter.debug_struct("Expr::Binary"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("left", Lite(&_val.left)); + formatter.field("op", Lite(&_val.op)); + formatter.field("right", Lite(&_val.right)); + formatter.finish() + } + syn::Expr::Block(_val) => { + let mut formatter = formatter.debug_struct("Expr::Block"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.label { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::Label); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("label", Print::ref_cast(val)); + } + formatter.field("block", Lite(&_val.block)); + formatter.finish() + } + syn::Expr::Box(_val) => { + let mut formatter = formatter.debug_struct("Expr::Box"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("expr", Lite(&_val.expr)); + formatter.finish() + } + syn::Expr::Break(_val) => { + let mut formatter = formatter.debug_struct("Expr::Break"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.label { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::Lifetime); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("label", Print::ref_cast(val)); + } + if let Some(val) = &_val.expr { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(Box<syn::Expr>); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("expr", Print::ref_cast(val)); + } + formatter.finish() + } + syn::Expr::Call(_val) => { + let mut formatter = formatter.debug_struct("Expr::Call"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("func", Lite(&_val.func)); + if !_val.args.is_empty() { + formatter.field("args", Lite(&_val.args)); + } + formatter.finish() + } + syn::Expr::Cast(_val) => { + let mut formatter = formatter.debug_struct("Expr::Cast"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("expr", Lite(&_val.expr)); + formatter.field("ty", Lite(&_val.ty)); + formatter.finish() + } + syn::Expr::Closure(_val) => { + let mut formatter = formatter.debug_struct("Expr::Closure"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.movability { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Static); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("movability", Print::ref_cast(val)); + } + if let Some(val) = &_val.asyncness { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Async); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("asyncness", Print::ref_cast(val)); + } + if let Some(val) = &_val.capture { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Move); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("capture", Print::ref_cast(val)); + } + if !_val.inputs.is_empty() { + formatter.field("inputs", Lite(&_val.inputs)); + } + formatter.field("output", Lite(&_val.output)); + formatter.field("body", Lite(&_val.body)); + formatter.finish() + } + syn::Expr::Continue(_val) => { + let mut formatter = formatter.debug_struct("Expr::Continue"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.label { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::Lifetime); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("label", Print::ref_cast(val)); + } + formatter.finish() + } + syn::Expr::Field(_val) => { + let mut formatter = formatter.debug_struct("Expr::Field"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("base", Lite(&_val.base)); + formatter.field("member", Lite(&_val.member)); + formatter.finish() + } + syn::Expr::ForLoop(_val) => { + let mut formatter = formatter.debug_struct("Expr::ForLoop"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.label { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::Label); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("label", Print::ref_cast(val)); + } + formatter.field("pat", Lite(&_val.pat)); + formatter.field("expr", Lite(&_val.expr)); + formatter.field("body", Lite(&_val.body)); + formatter.finish() + } + syn::Expr::Group(_val) => { + let mut formatter = formatter.debug_struct("Expr::Group"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("expr", Lite(&_val.expr)); + formatter.finish() + } + syn::Expr::If(_val) => { + let mut formatter = formatter.debug_struct("Expr::If"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("cond", Lite(&_val.cond)); + formatter.field("then_branch", Lite(&_val.then_branch)); + if let Some(val) = &_val.else_branch { + #[derive(RefCast)] + #[repr(transparent)] + struct Print((syn::token::Else, Box<syn::Expr>)); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(&_val.1), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("else_branch", Print::ref_cast(val)); + } + formatter.finish() + } + syn::Expr::Index(_val) => { + let mut formatter = formatter.debug_struct("Expr::Index"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("expr", Lite(&_val.expr)); + formatter.field("index", Lite(&_val.index)); + formatter.finish() + } + syn::Expr::Let(_val) => { + let mut formatter = formatter.debug_struct("Expr::Let"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("pat", Lite(&_val.pat)); + formatter.field("expr", Lite(&_val.expr)); + formatter.finish() + } + syn::Expr::Lit(_val) => { + let mut formatter = formatter.debug_struct("Expr::Lit"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("lit", Lite(&_val.lit)); + formatter.finish() + } + syn::Expr::Loop(_val) => { + let mut formatter = formatter.debug_struct("Expr::Loop"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.label { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::Label); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("label", Print::ref_cast(val)); + } + formatter.field("body", Lite(&_val.body)); + formatter.finish() + } + syn::Expr::Macro(_val) => { + let mut formatter = formatter.debug_struct("Expr::Macro"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("mac", Lite(&_val.mac)); + formatter.finish() + } + syn::Expr::Match(_val) => { + let mut formatter = formatter.debug_struct("Expr::Match"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("expr", Lite(&_val.expr)); + if !_val.arms.is_empty() { + formatter.field("arms", Lite(&_val.arms)); + } + formatter.finish() + } + syn::Expr::MethodCall(_val) => { + let mut formatter = formatter.debug_struct("Expr::MethodCall"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("receiver", Lite(&_val.receiver)); + formatter.field("method", Lite(&_val.method)); + if let Some(val) = &_val.turbofish { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::MethodTurbofish); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("turbofish", Print::ref_cast(val)); + } + if !_val.args.is_empty() { + formatter.field("args", Lite(&_val.args)); + } + formatter.finish() + } + syn::Expr::Paren(_val) => { + let mut formatter = formatter.debug_struct("Expr::Paren"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("expr", Lite(&_val.expr)); + formatter.finish() + } + syn::Expr::Path(_val) => { + let mut formatter = formatter.debug_struct("Expr::Path"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.qself { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::QSelf); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("qself", Print::ref_cast(val)); + } + formatter.field("path", Lite(&_val.path)); + formatter.finish() + } + syn::Expr::Range(_val) => { + let mut formatter = formatter.debug_struct("Expr::Range"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.from { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(Box<syn::Expr>); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("from", Print::ref_cast(val)); + } + formatter.field("limits", Lite(&_val.limits)); + if let Some(val) = &_val.to { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(Box<syn::Expr>); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("to", Print::ref_cast(val)); + } + formatter.finish() + } + syn::Expr::Reference(_val) => { + let mut formatter = formatter.debug_struct("Expr::Reference"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.mutability { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Mut); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("mutability", Print::ref_cast(val)); + } + formatter.field("expr", Lite(&_val.expr)); + formatter.finish() + } + syn::Expr::Repeat(_val) => { + let mut formatter = formatter.debug_struct("Expr::Repeat"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("expr", Lite(&_val.expr)); + formatter.field("len", Lite(&_val.len)); + formatter.finish() + } + syn::Expr::Return(_val) => { + let mut formatter = formatter.debug_struct("Expr::Return"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.expr { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(Box<syn::Expr>); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("expr", Print::ref_cast(val)); + } + formatter.finish() + } + syn::Expr::Struct(_val) => { + let mut formatter = formatter.debug_struct("Expr::Struct"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("path", Lite(&_val.path)); + if !_val.fields.is_empty() { + formatter.field("fields", Lite(&_val.fields)); + } + if let Some(val) = &_val.dot2_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Dot2); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("dot2_token", Print::ref_cast(val)); + } + if let Some(val) = &_val.rest { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(Box<syn::Expr>); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("rest", Print::ref_cast(val)); + } + formatter.finish() + } + syn::Expr::Try(_val) => { + let mut formatter = formatter.debug_struct("Expr::Try"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("expr", Lite(&_val.expr)); + formatter.finish() + } + syn::Expr::TryBlock(_val) => { + let mut formatter = formatter.debug_struct("Expr::TryBlock"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("block", Lite(&_val.block)); + formatter.finish() + } + syn::Expr::Tuple(_val) => { + let mut formatter = formatter.debug_struct("Expr::Tuple"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if !_val.elems.is_empty() { + formatter.field("elems", Lite(&_val.elems)); + } + formatter.finish() + } + syn::Expr::Type(_val) => { + let mut formatter = formatter.debug_struct("Expr::Type"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("expr", Lite(&_val.expr)); + formatter.field("ty", Lite(&_val.ty)); + formatter.finish() + } + syn::Expr::Unary(_val) => { + let mut formatter = formatter.debug_struct("Expr::Unary"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("op", Lite(&_val.op)); + formatter.field("expr", Lite(&_val.expr)); + formatter.finish() + } + syn::Expr::Unsafe(_val) => { + let mut formatter = formatter.debug_struct("Expr::Unsafe"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("block", Lite(&_val.block)); + formatter.finish() + } + syn::Expr::Verbatim(_val) => { + formatter.write_str("Verbatim")?; + formatter.write_str("(`")?; + Display::fmt(_val, formatter)?; + formatter.write_str("`)")?; + Ok(()) + } + syn::Expr::While(_val) => { + let mut formatter = formatter.debug_struct("Expr::While"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.label { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::Label); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("label", Print::ref_cast(val)); + } + formatter.field("cond", Lite(&_val.cond)); + formatter.field("body", Lite(&_val.body)); + formatter.finish() + } + syn::Expr::Yield(_val) => { + let mut formatter = formatter.debug_struct("Expr::Yield"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.expr { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(Box<syn::Expr>); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("expr", Print::ref_cast(val)); + } + formatter.finish() + } + _ => unreachable!(), + } + } +} +impl Debug for Lite<syn::ExprArray> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprArray"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if !_val.elems.is_empty() { + formatter.field("elems", Lite(&_val.elems)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::ExprAssign> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprAssign"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("left", Lite(&_val.left)); + formatter.field("right", Lite(&_val.right)); + formatter.finish() + } +} +impl Debug for Lite<syn::ExprAssignOp> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprAssignOp"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("left", Lite(&_val.left)); + formatter.field("op", Lite(&_val.op)); + formatter.field("right", Lite(&_val.right)); + formatter.finish() + } +} +impl Debug for Lite<syn::ExprAsync> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprAsync"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.capture { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Move); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("capture", Print::ref_cast(val)); + } + formatter.field("block", Lite(&_val.block)); + formatter.finish() + } +} +impl Debug for Lite<syn::ExprAwait> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprAwait"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("base", Lite(&_val.base)); + formatter.finish() + } +} +impl Debug for Lite<syn::ExprBinary> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprBinary"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("left", Lite(&_val.left)); + formatter.field("op", Lite(&_val.op)); + formatter.field("right", Lite(&_val.right)); + formatter.finish() + } +} +impl Debug for Lite<syn::ExprBlock> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprBlock"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.label { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::Label); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("label", Print::ref_cast(val)); + } + formatter.field("block", Lite(&_val.block)); + formatter.finish() + } +} +impl Debug for Lite<syn::ExprBox> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprBox"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("expr", Lite(&_val.expr)); + formatter.finish() + } +} +impl Debug for Lite<syn::ExprBreak> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprBreak"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.label { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::Lifetime); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("label", Print::ref_cast(val)); + } + if let Some(val) = &_val.expr { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(Box<syn::Expr>); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("expr", Print::ref_cast(val)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::ExprCall> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprCall"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("func", Lite(&_val.func)); + if !_val.args.is_empty() { + formatter.field("args", Lite(&_val.args)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::ExprCast> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprCast"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("expr", Lite(&_val.expr)); + formatter.field("ty", Lite(&_val.ty)); + formatter.finish() + } +} +impl Debug for Lite<syn::ExprClosure> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprClosure"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.movability { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Static); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("movability", Print::ref_cast(val)); + } + if let Some(val) = &_val.asyncness { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Async); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("asyncness", Print::ref_cast(val)); + } + if let Some(val) = &_val.capture { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Move); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("capture", Print::ref_cast(val)); + } + if !_val.inputs.is_empty() { + formatter.field("inputs", Lite(&_val.inputs)); + } + formatter.field("output", Lite(&_val.output)); + formatter.field("body", Lite(&_val.body)); + formatter.finish() + } +} +impl Debug for Lite<syn::ExprContinue> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprContinue"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.label { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::Lifetime); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("label", Print::ref_cast(val)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::ExprField> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprField"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("base", Lite(&_val.base)); + formatter.field("member", Lite(&_val.member)); + formatter.finish() + } +} +impl Debug for Lite<syn::ExprForLoop> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprForLoop"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.label { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::Label); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("label", Print::ref_cast(val)); + } + formatter.field("pat", Lite(&_val.pat)); + formatter.field("expr", Lite(&_val.expr)); + formatter.field("body", Lite(&_val.body)); + formatter.finish() + } +} +impl Debug for Lite<syn::ExprGroup> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprGroup"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("expr", Lite(&_val.expr)); + formatter.finish() + } +} +impl Debug for Lite<syn::ExprIf> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprIf"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("cond", Lite(&_val.cond)); + formatter.field("then_branch", Lite(&_val.then_branch)); + if let Some(val) = &_val.else_branch { + #[derive(RefCast)] + #[repr(transparent)] + struct Print((syn::token::Else, Box<syn::Expr>)); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(&_val.1), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("else_branch", Print::ref_cast(val)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::ExprIndex> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprIndex"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("expr", Lite(&_val.expr)); + formatter.field("index", Lite(&_val.index)); + formatter.finish() + } +} +impl Debug for Lite<syn::ExprLet> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprLet"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("pat", Lite(&_val.pat)); + formatter.field("expr", Lite(&_val.expr)); + formatter.finish() + } +} +impl Debug for Lite<syn::ExprLit> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprLit"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("lit", Lite(&_val.lit)); + formatter.finish() + } +} +impl Debug for Lite<syn::ExprLoop> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprLoop"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.label { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::Label); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("label", Print::ref_cast(val)); + } + formatter.field("body", Lite(&_val.body)); + formatter.finish() + } +} +impl Debug for Lite<syn::ExprMacro> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprMacro"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("mac", Lite(&_val.mac)); + formatter.finish() + } +} +impl Debug for Lite<syn::ExprMatch> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprMatch"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("expr", Lite(&_val.expr)); + if !_val.arms.is_empty() { + formatter.field("arms", Lite(&_val.arms)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::ExprMethodCall> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprMethodCall"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("receiver", Lite(&_val.receiver)); + formatter.field("method", Lite(&_val.method)); + if let Some(val) = &_val.turbofish { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::MethodTurbofish); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("turbofish", Print::ref_cast(val)); + } + if !_val.args.is_empty() { + formatter.field("args", Lite(&_val.args)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::ExprParen> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprParen"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("expr", Lite(&_val.expr)); + formatter.finish() + } +} +impl Debug for Lite<syn::ExprPath> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprPath"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.qself { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::QSelf); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("qself", Print::ref_cast(val)); + } + formatter.field("path", Lite(&_val.path)); + formatter.finish() + } +} +impl Debug for Lite<syn::ExprRange> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprRange"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.from { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(Box<syn::Expr>); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("from", Print::ref_cast(val)); + } + formatter.field("limits", Lite(&_val.limits)); + if let Some(val) = &_val.to { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(Box<syn::Expr>); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("to", Print::ref_cast(val)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::ExprReference> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprReference"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.mutability { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Mut); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("mutability", Print::ref_cast(val)); + } + formatter.field("expr", Lite(&_val.expr)); + formatter.finish() + } +} +impl Debug for Lite<syn::ExprRepeat> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprRepeat"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("expr", Lite(&_val.expr)); + formatter.field("len", Lite(&_val.len)); + formatter.finish() + } +} +impl Debug for Lite<syn::ExprReturn> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprReturn"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.expr { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(Box<syn::Expr>); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("expr", Print::ref_cast(val)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::ExprStruct> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprStruct"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("path", Lite(&_val.path)); + if !_val.fields.is_empty() { + formatter.field("fields", Lite(&_val.fields)); + } + if let Some(val) = &_val.dot2_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Dot2); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("dot2_token", Print::ref_cast(val)); + } + if let Some(val) = &_val.rest { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(Box<syn::Expr>); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("rest", Print::ref_cast(val)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::ExprTry> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprTry"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("expr", Lite(&_val.expr)); + formatter.finish() + } +} +impl Debug for Lite<syn::ExprTryBlock> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprTryBlock"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("block", Lite(&_val.block)); + formatter.finish() + } +} +impl Debug for Lite<syn::ExprTuple> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprTuple"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if !_val.elems.is_empty() { + formatter.field("elems", Lite(&_val.elems)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::ExprType> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprType"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("expr", Lite(&_val.expr)); + formatter.field("ty", Lite(&_val.ty)); + formatter.finish() + } +} +impl Debug for Lite<syn::ExprUnary> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprUnary"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("op", Lite(&_val.op)); + formatter.field("expr", Lite(&_val.expr)); + formatter.finish() + } +} +impl Debug for Lite<syn::ExprUnsafe> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprUnsafe"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("block", Lite(&_val.block)); + formatter.finish() + } +} +impl Debug for Lite<syn::ExprWhile> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprWhile"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.label { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::Label); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("label", Print::ref_cast(val)); + } + formatter.field("cond", Lite(&_val.cond)); + formatter.field("body", Lite(&_val.body)); + formatter.finish() + } +} +impl Debug for Lite<syn::ExprYield> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ExprYield"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.expr { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(Box<syn::Expr>); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("expr", Print::ref_cast(val)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::Field> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("Field"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + if let Some(val) = &_val.ident { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(proc_macro2::Ident); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("ident", Print::ref_cast(val)); + } + if let Some(val) = &_val.colon_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Colon); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("colon_token", Print::ref_cast(val)); + } + formatter.field("ty", Lite(&_val.ty)); + formatter.finish() + } +} +impl Debug for Lite<syn::FieldPat> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("FieldPat"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("member", Lite(&_val.member)); + if let Some(val) = &_val.colon_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Colon); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("colon_token", Print::ref_cast(val)); + } + formatter.field("pat", Lite(&_val.pat)); + formatter.finish() + } +} +impl Debug for Lite<syn::FieldValue> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("FieldValue"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("member", Lite(&_val.member)); + if let Some(val) = &_val.colon_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Colon); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("colon_token", Print::ref_cast(val)); + } + formatter.field("expr", Lite(&_val.expr)); + formatter.finish() + } +} +impl Debug for Lite<syn::Fields> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + match _val { + syn::Fields::Named(_val) => { + let mut formatter = formatter.debug_struct("Fields::Named"); + if !_val.named.is_empty() { + formatter.field("named", Lite(&_val.named)); + } + formatter.finish() + } + syn::Fields::Unnamed(_val) => { + let mut formatter = formatter.debug_struct("Fields::Unnamed"); + if !_val.unnamed.is_empty() { + formatter.field("unnamed", Lite(&_val.unnamed)); + } + formatter.finish() + } + syn::Fields::Unit => formatter.write_str("Unit"), + } + } +} +impl Debug for Lite<syn::FieldsNamed> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("FieldsNamed"); + if !_val.named.is_empty() { + formatter.field("named", Lite(&_val.named)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::FieldsUnnamed> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("FieldsUnnamed"); + if !_val.unnamed.is_empty() { + formatter.field("unnamed", Lite(&_val.unnamed)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::File> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("File"); + if let Some(val) = &_val.shebang { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(String); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("shebang", Print::ref_cast(val)); + } + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if !_val.items.is_empty() { + formatter.field("items", Lite(&_val.items)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::FnArg> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + match _val { + syn::FnArg::Receiver(_val) => { + formatter.write_str("Receiver")?; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + syn::FnArg::Typed(_val) => { + formatter.write_str("Typed")?; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + } +} +impl Debug for Lite<syn::ForeignItem> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + match _val { + syn::ForeignItem::Fn(_val) => { + let mut formatter = formatter.debug_struct("ForeignItem::Fn"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + formatter.field("sig", Lite(&_val.sig)); + formatter.finish() + } + syn::ForeignItem::Static(_val) => { + let mut formatter = formatter.debug_struct("ForeignItem::Static"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + if let Some(val) = &_val.mutability { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Mut); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("mutability", Print::ref_cast(val)); + } + formatter.field("ident", Lite(&_val.ident)); + formatter.field("ty", Lite(&_val.ty)); + formatter.finish() + } + syn::ForeignItem::Type(_val) => { + let mut formatter = formatter.debug_struct("ForeignItem::Type"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + formatter.field("ident", Lite(&_val.ident)); + formatter.finish() + } + syn::ForeignItem::Macro(_val) => { + let mut formatter = formatter.debug_struct("ForeignItem::Macro"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("mac", Lite(&_val.mac)); + if let Some(val) = &_val.semi_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Semi); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("semi_token", Print::ref_cast(val)); + } + formatter.finish() + } + syn::ForeignItem::Verbatim(_val) => { + formatter.write_str("Verbatim")?; + formatter.write_str("(`")?; + Display::fmt(_val, formatter)?; + formatter.write_str("`)")?; + Ok(()) + } + _ => unreachable!(), + } + } +} +impl Debug for Lite<syn::ForeignItemFn> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ForeignItemFn"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + formatter.field("sig", Lite(&_val.sig)); + formatter.finish() + } +} +impl Debug for Lite<syn::ForeignItemMacro> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ForeignItemMacro"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("mac", Lite(&_val.mac)); + if let Some(val) = &_val.semi_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Semi); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("semi_token", Print::ref_cast(val)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::ForeignItemStatic> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ForeignItemStatic"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + if let Some(val) = &_val.mutability { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Mut); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("mutability", Print::ref_cast(val)); + } + formatter.field("ident", Lite(&_val.ident)); + formatter.field("ty", Lite(&_val.ty)); + formatter.finish() + } +} +impl Debug for Lite<syn::ForeignItemType> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ForeignItemType"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + formatter.field("ident", Lite(&_val.ident)); + formatter.finish() + } +} +impl Debug for Lite<syn::GenericArgument> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + match _val { + syn::GenericArgument::Lifetime(_val) => { + formatter.write_str("Lifetime")?; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + syn::GenericArgument::Type(_val) => { + formatter.write_str("Type")?; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + syn::GenericArgument::Const(_val) => { + formatter.write_str("Const")?; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + syn::GenericArgument::Binding(_val) => { + formatter.write_str("Binding")?; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + syn::GenericArgument::Constraint(_val) => { + formatter.write_str("Constraint")?; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + } +} +impl Debug for Lite<syn::GenericMethodArgument> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + match _val { + syn::GenericMethodArgument::Type(_val) => { + formatter.write_str("Type")?; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + syn::GenericMethodArgument::Const(_val) => { + formatter.write_str("Const")?; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + } +} +impl Debug for Lite<syn::GenericParam> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + match _val { + syn::GenericParam::Type(_val) => { + formatter.write_str("Type")?; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + syn::GenericParam::Lifetime(_val) => { + formatter.write_str("Lifetime")?; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + syn::GenericParam::Const(_val) => { + formatter.write_str("Const")?; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + } +} +impl Debug for Lite<syn::Generics> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("Generics"); + if let Some(val) = &_val.lt_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Lt); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("lt_token", Print::ref_cast(val)); + } + if !_val.params.is_empty() { + formatter.field("params", Lite(&_val.params)); + } + if let Some(val) = &_val.gt_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Gt); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("gt_token", Print::ref_cast(val)); + } + if let Some(val) = &_val.where_clause { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::WhereClause); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("where_clause", Print::ref_cast(val)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::ImplItem> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + match _val { + syn::ImplItem::Const(_val) => { + let mut formatter = formatter.debug_struct("ImplItem::Const"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + if let Some(val) = &_val.defaultness { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Default); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("defaultness", Print::ref_cast(val)); + } + formatter.field("ident", Lite(&_val.ident)); + formatter.field("ty", Lite(&_val.ty)); + formatter.field("expr", Lite(&_val.expr)); + formatter.finish() + } + syn::ImplItem::Method(_val) => { + let mut formatter = formatter.debug_struct("ImplItem::Method"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + if let Some(val) = &_val.defaultness { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Default); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("defaultness", Print::ref_cast(val)); + } + formatter.field("sig", Lite(&_val.sig)); + formatter.field("block", Lite(&_val.block)); + formatter.finish() + } + syn::ImplItem::Type(_val) => { + let mut formatter = formatter.debug_struct("ImplItem::Type"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + if let Some(val) = &_val.defaultness { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Default); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("defaultness", Print::ref_cast(val)); + } + formatter.field("ident", Lite(&_val.ident)); + formatter.field("generics", Lite(&_val.generics)); + formatter.field("ty", Lite(&_val.ty)); + formatter.finish() + } + syn::ImplItem::Macro(_val) => { + let mut formatter = formatter.debug_struct("ImplItem::Macro"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("mac", Lite(&_val.mac)); + if let Some(val) = &_val.semi_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Semi); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("semi_token", Print::ref_cast(val)); + } + formatter.finish() + } + syn::ImplItem::Verbatim(_val) => { + formatter.write_str("Verbatim")?; + formatter.write_str("(`")?; + Display::fmt(_val, formatter)?; + formatter.write_str("`)")?; + Ok(()) + } + _ => unreachable!(), + } + } +} +impl Debug for Lite<syn::ImplItemConst> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ImplItemConst"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + if let Some(val) = &_val.defaultness { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Default); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("defaultness", Print::ref_cast(val)); + } + formatter.field("ident", Lite(&_val.ident)); + formatter.field("ty", Lite(&_val.ty)); + formatter.field("expr", Lite(&_val.expr)); + formatter.finish() + } +} +impl Debug for Lite<syn::ImplItemMacro> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ImplItemMacro"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("mac", Lite(&_val.mac)); + if let Some(val) = &_val.semi_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Semi); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("semi_token", Print::ref_cast(val)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::ImplItemMethod> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ImplItemMethod"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + if let Some(val) = &_val.defaultness { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Default); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("defaultness", Print::ref_cast(val)); + } + formatter.field("sig", Lite(&_val.sig)); + formatter.field("block", Lite(&_val.block)); + formatter.finish() + } +} +impl Debug for Lite<syn::ImplItemType> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ImplItemType"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + if let Some(val) = &_val.defaultness { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Default); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("defaultness", Print::ref_cast(val)); + } + formatter.field("ident", Lite(&_val.ident)); + formatter.field("generics", Lite(&_val.generics)); + formatter.field("ty", Lite(&_val.ty)); + formatter.finish() + } +} +impl Debug for Lite<syn::Index> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("Index"); + formatter.field("index", Lite(&_val.index)); + formatter.finish() + } +} +impl Debug for Lite<syn::Item> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + match _val { + syn::Item::Const(_val) => { + let mut formatter = formatter.debug_struct("Item::Const"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + formatter.field("ident", Lite(&_val.ident)); + formatter.field("ty", Lite(&_val.ty)); + formatter.field("expr", Lite(&_val.expr)); + formatter.finish() + } + syn::Item::Enum(_val) => { + let mut formatter = formatter.debug_struct("Item::Enum"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + formatter.field("ident", Lite(&_val.ident)); + formatter.field("generics", Lite(&_val.generics)); + if !_val.variants.is_empty() { + formatter.field("variants", Lite(&_val.variants)); + } + formatter.finish() + } + syn::Item::ExternCrate(_val) => { + let mut formatter = formatter.debug_struct("Item::ExternCrate"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + formatter.field("ident", Lite(&_val.ident)); + if let Some(val) = &_val.rename { + #[derive(RefCast)] + #[repr(transparent)] + struct Print((syn::token::As, proc_macro2::Ident)); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(&_val.1), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("rename", Print::ref_cast(val)); + } + formatter.finish() + } + syn::Item::Fn(_val) => { + let mut formatter = formatter.debug_struct("Item::Fn"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + formatter.field("sig", Lite(&_val.sig)); + formatter.field("block", Lite(&_val.block)); + formatter.finish() + } + syn::Item::ForeignMod(_val) => { + let mut formatter = formatter.debug_struct("Item::ForeignMod"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("abi", Lite(&_val.abi)); + if !_val.items.is_empty() { + formatter.field("items", Lite(&_val.items)); + } + formatter.finish() + } + syn::Item::Impl(_val) => { + let mut formatter = formatter.debug_struct("Item::Impl"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.defaultness { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Default); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("defaultness", Print::ref_cast(val)); + } + if let Some(val) = &_val.unsafety { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Unsafe); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("unsafety", Print::ref_cast(val)); + } + formatter.field("generics", Lite(&_val.generics)); + if let Some(val) = &_val.trait_ { + #[derive(RefCast)] + #[repr(transparent)] + struct Print((Option<syn::token::Bang>, syn::Path, syn::token::For)); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt( + &( + { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(Option<syn::token::Bang>); + impl Debug for Print { + fn fmt( + &self, + formatter: &mut fmt::Formatter, + ) -> fmt::Result { + match &self.0 { + Some(_val) => { + formatter.write_str("Some")?; + Ok(()) + } + None => formatter.write_str("None"), + } + } + } + Print::ref_cast(&_val.0) + }, + Lite(&_val.1), + ), + formatter, + )?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("trait_", Print::ref_cast(val)); + } + formatter.field("self_ty", Lite(&_val.self_ty)); + if !_val.items.is_empty() { + formatter.field("items", Lite(&_val.items)); + } + formatter.finish() + } + syn::Item::Macro(_val) => { + let mut formatter = formatter.debug_struct("Item::Macro"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.ident { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(proc_macro2::Ident); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("ident", Print::ref_cast(val)); + } + formatter.field("mac", Lite(&_val.mac)); + if let Some(val) = &_val.semi_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Semi); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("semi_token", Print::ref_cast(val)); + } + formatter.finish() + } + syn::Item::Macro2(_val) => { + let mut formatter = formatter.debug_struct("Item::Macro2"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + formatter.field("ident", Lite(&_val.ident)); + formatter.field("rules", Lite(&_val.rules)); + formatter.finish() + } + syn::Item::Mod(_val) => { + let mut formatter = formatter.debug_struct("Item::Mod"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + formatter.field("ident", Lite(&_val.ident)); + if let Some(val) = &_val.content { + #[derive(RefCast)] + #[repr(transparent)] + struct Print((syn::token::Brace, Vec<syn::Item>)); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(&_val.1), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("content", Print::ref_cast(val)); + } + if let Some(val) = &_val.semi { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Semi); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("semi", Print::ref_cast(val)); + } + formatter.finish() + } + syn::Item::Static(_val) => { + let mut formatter = formatter.debug_struct("Item::Static"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + if let Some(val) = &_val.mutability { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Mut); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("mutability", Print::ref_cast(val)); + } + formatter.field("ident", Lite(&_val.ident)); + formatter.field("ty", Lite(&_val.ty)); + formatter.field("expr", Lite(&_val.expr)); + formatter.finish() + } + syn::Item::Struct(_val) => { + let mut formatter = formatter.debug_struct("Item::Struct"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + formatter.field("ident", Lite(&_val.ident)); + formatter.field("generics", Lite(&_val.generics)); + formatter.field("fields", Lite(&_val.fields)); + if let Some(val) = &_val.semi_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Semi); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("semi_token", Print::ref_cast(val)); + } + formatter.finish() + } + syn::Item::Trait(_val) => { + let mut formatter = formatter.debug_struct("Item::Trait"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + if let Some(val) = &_val.unsafety { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Unsafe); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("unsafety", Print::ref_cast(val)); + } + if let Some(val) = &_val.auto_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Auto); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("auto_token", Print::ref_cast(val)); + } + formatter.field("ident", Lite(&_val.ident)); + formatter.field("generics", Lite(&_val.generics)); + if let Some(val) = &_val.colon_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Colon); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("colon_token", Print::ref_cast(val)); + } + if !_val.supertraits.is_empty() { + formatter.field("supertraits", Lite(&_val.supertraits)); + } + if !_val.items.is_empty() { + formatter.field("items", Lite(&_val.items)); + } + formatter.finish() + } + syn::Item::TraitAlias(_val) => { + let mut formatter = formatter.debug_struct("Item::TraitAlias"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + formatter.field("ident", Lite(&_val.ident)); + formatter.field("generics", Lite(&_val.generics)); + if !_val.bounds.is_empty() { + formatter.field("bounds", Lite(&_val.bounds)); + } + formatter.finish() + } + syn::Item::Type(_val) => { + let mut formatter = formatter.debug_struct("Item::Type"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + formatter.field("ident", Lite(&_val.ident)); + formatter.field("generics", Lite(&_val.generics)); + formatter.field("ty", Lite(&_val.ty)); + formatter.finish() + } + syn::Item::Union(_val) => { + let mut formatter = formatter.debug_struct("Item::Union"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + formatter.field("ident", Lite(&_val.ident)); + formatter.field("generics", Lite(&_val.generics)); + formatter.field("fields", Lite(&_val.fields)); + formatter.finish() + } + syn::Item::Use(_val) => { + let mut formatter = formatter.debug_struct("Item::Use"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + if let Some(val) = &_val.leading_colon { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Colon2); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("leading_colon", Print::ref_cast(val)); + } + formatter.field("tree", Lite(&_val.tree)); + formatter.finish() + } + syn::Item::Verbatim(_val) => { + formatter.write_str("Verbatim")?; + formatter.write_str("(`")?; + Display::fmt(_val, formatter)?; + formatter.write_str("`)")?; + Ok(()) + } + _ => unreachable!(), + } + } +} +impl Debug for Lite<syn::ItemConst> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ItemConst"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + formatter.field("ident", Lite(&_val.ident)); + formatter.field("ty", Lite(&_val.ty)); + formatter.field("expr", Lite(&_val.expr)); + formatter.finish() + } +} +impl Debug for Lite<syn::ItemEnum> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ItemEnum"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + formatter.field("ident", Lite(&_val.ident)); + formatter.field("generics", Lite(&_val.generics)); + if !_val.variants.is_empty() { + formatter.field("variants", Lite(&_val.variants)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::ItemExternCrate> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ItemExternCrate"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + formatter.field("ident", Lite(&_val.ident)); + if let Some(val) = &_val.rename { + #[derive(RefCast)] + #[repr(transparent)] + struct Print((syn::token::As, proc_macro2::Ident)); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(&_val.1), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("rename", Print::ref_cast(val)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::ItemFn> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ItemFn"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + formatter.field("sig", Lite(&_val.sig)); + formatter.field("block", Lite(&_val.block)); + formatter.finish() + } +} +impl Debug for Lite<syn::ItemForeignMod> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ItemForeignMod"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("abi", Lite(&_val.abi)); + if !_val.items.is_empty() { + formatter.field("items", Lite(&_val.items)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::ItemImpl> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ItemImpl"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.defaultness { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Default); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("defaultness", Print::ref_cast(val)); + } + if let Some(val) = &_val.unsafety { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Unsafe); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("unsafety", Print::ref_cast(val)); + } + formatter.field("generics", Lite(&_val.generics)); + if let Some(val) = &_val.trait_ { + #[derive(RefCast)] + #[repr(transparent)] + struct Print((Option<syn::token::Bang>, syn::Path, syn::token::For)); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt( + &( + { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(Option<syn::token::Bang>); + impl Debug for Print { + fn fmt( + &self, + formatter: &mut fmt::Formatter, + ) -> fmt::Result { + match &self.0 { + Some(_val) => { + formatter.write_str("Some")?; + Ok(()) + } + None => formatter.write_str("None"), + } + } + } + Print::ref_cast(&_val.0) + }, + Lite(&_val.1), + ), + formatter, + )?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("trait_", Print::ref_cast(val)); + } + formatter.field("self_ty", Lite(&_val.self_ty)); + if !_val.items.is_empty() { + formatter.field("items", Lite(&_val.items)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::ItemMacro> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ItemMacro"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.ident { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(proc_macro2::Ident); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("ident", Print::ref_cast(val)); + } + formatter.field("mac", Lite(&_val.mac)); + if let Some(val) = &_val.semi_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Semi); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("semi_token", Print::ref_cast(val)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::ItemMacro2> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ItemMacro2"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + formatter.field("ident", Lite(&_val.ident)); + formatter.field("rules", Lite(&_val.rules)); + formatter.finish() + } +} +impl Debug for Lite<syn::ItemMod> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ItemMod"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + formatter.field("ident", Lite(&_val.ident)); + if let Some(val) = &_val.content { + #[derive(RefCast)] + #[repr(transparent)] + struct Print((syn::token::Brace, Vec<syn::Item>)); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(&_val.1), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("content", Print::ref_cast(val)); + } + if let Some(val) = &_val.semi { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Semi); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("semi", Print::ref_cast(val)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::ItemStatic> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ItemStatic"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + if let Some(val) = &_val.mutability { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Mut); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("mutability", Print::ref_cast(val)); + } + formatter.field("ident", Lite(&_val.ident)); + formatter.field("ty", Lite(&_val.ty)); + formatter.field("expr", Lite(&_val.expr)); + formatter.finish() + } +} +impl Debug for Lite<syn::ItemStruct> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ItemStruct"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + formatter.field("ident", Lite(&_val.ident)); + formatter.field("generics", Lite(&_val.generics)); + formatter.field("fields", Lite(&_val.fields)); + if let Some(val) = &_val.semi_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Semi); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("semi_token", Print::ref_cast(val)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::ItemTrait> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ItemTrait"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + if let Some(val) = &_val.unsafety { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Unsafe); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("unsafety", Print::ref_cast(val)); + } + if let Some(val) = &_val.auto_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Auto); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("auto_token", Print::ref_cast(val)); + } + formatter.field("ident", Lite(&_val.ident)); + formatter.field("generics", Lite(&_val.generics)); + if let Some(val) = &_val.colon_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Colon); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("colon_token", Print::ref_cast(val)); + } + if !_val.supertraits.is_empty() { + formatter.field("supertraits", Lite(&_val.supertraits)); + } + if !_val.items.is_empty() { + formatter.field("items", Lite(&_val.items)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::ItemTraitAlias> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ItemTraitAlias"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + formatter.field("ident", Lite(&_val.ident)); + formatter.field("generics", Lite(&_val.generics)); + if !_val.bounds.is_empty() { + formatter.field("bounds", Lite(&_val.bounds)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::ItemType> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ItemType"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + formatter.field("ident", Lite(&_val.ident)); + formatter.field("generics", Lite(&_val.generics)); + formatter.field("ty", Lite(&_val.ty)); + formatter.finish() + } +} +impl Debug for Lite<syn::ItemUnion> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ItemUnion"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + formatter.field("ident", Lite(&_val.ident)); + formatter.field("generics", Lite(&_val.generics)); + formatter.field("fields", Lite(&_val.fields)); + formatter.finish() + } +} +impl Debug for Lite<syn::ItemUse> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ItemUse"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("vis", Lite(&_val.vis)); + if let Some(val) = &_val.leading_colon { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Colon2); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("leading_colon", Print::ref_cast(val)); + } + formatter.field("tree", Lite(&_val.tree)); + formatter.finish() + } +} +impl Debug for Lite<syn::Label> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("Label"); + formatter.field("name", Lite(&_val.name)); + formatter.finish() + } +} +impl Debug for Lite<syn::Lifetime> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("Lifetime"); + formatter.field("ident", Lite(&_val.ident)); + formatter.finish() + } +} +impl Debug for Lite<syn::LifetimeDef> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("LifetimeDef"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("lifetime", Lite(&_val.lifetime)); + if let Some(val) = &_val.colon_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Colon); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("colon_token", Print::ref_cast(val)); + } + if !_val.bounds.is_empty() { + formatter.field("bounds", Lite(&_val.bounds)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::Lit> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + match _val { + syn::Lit::Str(_val) => write!(formatter, "{:?}", _val.value()), + syn::Lit::ByteStr(_val) => write!(formatter, "{:?}", _val.value()), + syn::Lit::Byte(_val) => write!(formatter, "{:?}", _val.value()), + syn::Lit::Char(_val) => write!(formatter, "{:?}", _val.value()), + syn::Lit::Int(_val) => write!(formatter, "{}", _val), + syn::Lit::Float(_val) => write!(formatter, "{}", _val), + syn::Lit::Bool(_val) => { + let mut formatter = formatter.debug_struct("Lit::Bool"); + formatter.field("value", Lite(&_val.value)); + formatter.finish() + } + syn::Lit::Verbatim(_val) => { + formatter.write_str("Verbatim")?; + formatter.write_str("(`")?; + Display::fmt(_val, formatter)?; + formatter.write_str("`)")?; + Ok(()) + } + } + } +} +impl Debug for Lite<syn::LitBool> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("LitBool"); + formatter.field("value", Lite(&_val.value)); + formatter.finish() + } +} +impl Debug for Lite<syn::LitByte> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + write!(formatter, "{:?}", _val.value()) + } +} +impl Debug for Lite<syn::LitByteStr> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + write!(formatter, "{:?}", _val.value()) + } +} +impl Debug for Lite<syn::LitChar> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + write!(formatter, "{:?}", _val.value()) + } +} +impl Debug for Lite<syn::LitFloat> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + write!(formatter, "{}", _val) + } +} +impl Debug for Lite<syn::LitInt> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + write!(formatter, "{}", _val) + } +} +impl Debug for Lite<syn::LitStr> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + write!(formatter, "{:?}", _val.value()) + } +} +impl Debug for Lite<syn::Local> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("Local"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("pat", Lite(&_val.pat)); + if let Some(val) = &_val.init { + #[derive(RefCast)] + #[repr(transparent)] + struct Print((syn::token::Eq, Box<syn::Expr>)); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(&_val.1), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("init", Print::ref_cast(val)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::Macro> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("Macro"); + formatter.field("path", Lite(&_val.path)); + formatter.field("delimiter", Lite(&_val.delimiter)); + formatter.field("tokens", Lite(&_val.tokens)); + formatter.finish() + } +} +impl Debug for Lite<syn::MacroDelimiter> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + match _val { + syn::MacroDelimiter::Paren(_val) => { + formatter.write_str("Paren")?; + Ok(()) + } + syn::MacroDelimiter::Brace(_val) => { + formatter.write_str("Brace")?; + Ok(()) + } + syn::MacroDelimiter::Bracket(_val) => { + formatter.write_str("Bracket")?; + Ok(()) + } + } + } +} +impl Debug for Lite<syn::Member> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + match _val { + syn::Member::Named(_val) => { + formatter.write_str("Named")?; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + syn::Member::Unnamed(_val) => { + formatter.write_str("Unnamed")?; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + } +} +impl Debug for Lite<syn::Meta> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + match _val { + syn::Meta::Path(_val) => { + formatter.write_str("Path")?; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + syn::Meta::List(_val) => { + let mut formatter = formatter.debug_struct("Meta::List"); + formatter.field("path", Lite(&_val.path)); + if !_val.nested.is_empty() { + formatter.field("nested", Lite(&_val.nested)); + } + formatter.finish() + } + syn::Meta::NameValue(_val) => { + let mut formatter = formatter.debug_struct("Meta::NameValue"); + formatter.field("path", Lite(&_val.path)); + formatter.field("lit", Lite(&_val.lit)); + formatter.finish() + } + } + } +} +impl Debug for Lite<syn::MetaList> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("MetaList"); + formatter.field("path", Lite(&_val.path)); + if !_val.nested.is_empty() { + formatter.field("nested", Lite(&_val.nested)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::MetaNameValue> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("MetaNameValue"); + formatter.field("path", Lite(&_val.path)); + formatter.field("lit", Lite(&_val.lit)); + formatter.finish() + } +} +impl Debug for Lite<syn::MethodTurbofish> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("MethodTurbofish"); + if !_val.args.is_empty() { + formatter.field("args", Lite(&_val.args)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::NestedMeta> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + match _val { + syn::NestedMeta::Meta(_val) => { + formatter.write_str("Meta")?; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + syn::NestedMeta::Lit(_val) => { + formatter.write_str("Lit")?; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + } +} +impl Debug for Lite<syn::ParenthesizedGenericArguments> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("ParenthesizedGenericArguments"); + if !_val.inputs.is_empty() { + formatter.field("inputs", Lite(&_val.inputs)); + } + formatter.field("output", Lite(&_val.output)); + formatter.finish() + } +} +impl Debug for Lite<syn::Pat> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + match _val { + syn::Pat::Box(_val) => { + let mut formatter = formatter.debug_struct("Pat::Box"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("pat", Lite(&_val.pat)); + formatter.finish() + } + syn::Pat::Ident(_val) => { + let mut formatter = formatter.debug_struct("Pat::Ident"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.by_ref { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Ref); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("by_ref", Print::ref_cast(val)); + } + if let Some(val) = &_val.mutability { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Mut); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("mutability", Print::ref_cast(val)); + } + formatter.field("ident", Lite(&_val.ident)); + if let Some(val) = &_val.subpat { + #[derive(RefCast)] + #[repr(transparent)] + struct Print((syn::token::At, Box<syn::Pat>)); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(&_val.1), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("subpat", Print::ref_cast(val)); + } + formatter.finish() + } + syn::Pat::Lit(_val) => { + let mut formatter = formatter.debug_struct("Pat::Lit"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("expr", Lite(&_val.expr)); + formatter.finish() + } + syn::Pat::Macro(_val) => { + let mut formatter = formatter.debug_struct("Pat::Macro"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("mac", Lite(&_val.mac)); + formatter.finish() + } + syn::Pat::Or(_val) => { + let mut formatter = formatter.debug_struct("Pat::Or"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.leading_vert { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Or); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("leading_vert", Print::ref_cast(val)); + } + if !_val.cases.is_empty() { + formatter.field("cases", Lite(&_val.cases)); + } + formatter.finish() + } + syn::Pat::Path(_val) => { + let mut formatter = formatter.debug_struct("Pat::Path"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.qself { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::QSelf); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("qself", Print::ref_cast(val)); + } + formatter.field("path", Lite(&_val.path)); + formatter.finish() + } + syn::Pat::Range(_val) => { + let mut formatter = formatter.debug_struct("Pat::Range"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("lo", Lite(&_val.lo)); + formatter.field("limits", Lite(&_val.limits)); + formatter.field("hi", Lite(&_val.hi)); + formatter.finish() + } + syn::Pat::Reference(_val) => { + let mut formatter = formatter.debug_struct("Pat::Reference"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.mutability { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Mut); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("mutability", Print::ref_cast(val)); + } + formatter.field("pat", Lite(&_val.pat)); + formatter.finish() + } + syn::Pat::Rest(_val) => { + let mut formatter = formatter.debug_struct("Pat::Rest"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.finish() + } + syn::Pat::Slice(_val) => { + let mut formatter = formatter.debug_struct("Pat::Slice"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if !_val.elems.is_empty() { + formatter.field("elems", Lite(&_val.elems)); + } + formatter.finish() + } + syn::Pat::Struct(_val) => { + let mut formatter = formatter.debug_struct("Pat::Struct"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("path", Lite(&_val.path)); + if !_val.fields.is_empty() { + formatter.field("fields", Lite(&_val.fields)); + } + if let Some(val) = &_val.dot2_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Dot2); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("dot2_token", Print::ref_cast(val)); + } + formatter.finish() + } + syn::Pat::Tuple(_val) => { + let mut formatter = formatter.debug_struct("Pat::Tuple"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if !_val.elems.is_empty() { + formatter.field("elems", Lite(&_val.elems)); + } + formatter.finish() + } + syn::Pat::TupleStruct(_val) => { + let mut formatter = formatter.debug_struct("Pat::TupleStruct"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("path", Lite(&_val.path)); + formatter.field("pat", Lite(&_val.pat)); + formatter.finish() + } + syn::Pat::Type(_val) => { + let mut formatter = formatter.debug_struct("Pat::Type"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("pat", Lite(&_val.pat)); + formatter.field("ty", Lite(&_val.ty)); + formatter.finish() + } + syn::Pat::Verbatim(_val) => { + formatter.write_str("Verbatim")?; + formatter.write_str("(`")?; + Display::fmt(_val, formatter)?; + formatter.write_str("`)")?; + Ok(()) + } + syn::Pat::Wild(_val) => { + let mut formatter = formatter.debug_struct("Pat::Wild"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.finish() + } + _ => unreachable!(), + } + } +} +impl Debug for Lite<syn::PatBox> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("PatBox"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("pat", Lite(&_val.pat)); + formatter.finish() + } +} +impl Debug for Lite<syn::PatIdent> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("PatIdent"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.by_ref { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Ref); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("by_ref", Print::ref_cast(val)); + } + if let Some(val) = &_val.mutability { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Mut); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("mutability", Print::ref_cast(val)); + } + formatter.field("ident", Lite(&_val.ident)); + if let Some(val) = &_val.subpat { + #[derive(RefCast)] + #[repr(transparent)] + struct Print((syn::token::At, Box<syn::Pat>)); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(&_val.1), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("subpat", Print::ref_cast(val)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::PatLit> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("PatLit"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("expr", Lite(&_val.expr)); + formatter.finish() + } +} +impl Debug for Lite<syn::PatMacro> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("PatMacro"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("mac", Lite(&_val.mac)); + formatter.finish() + } +} +impl Debug for Lite<syn::PatOr> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("PatOr"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.leading_vert { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Or); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("leading_vert", Print::ref_cast(val)); + } + if !_val.cases.is_empty() { + formatter.field("cases", Lite(&_val.cases)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::PatPath> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("PatPath"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.qself { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::QSelf); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("qself", Print::ref_cast(val)); + } + formatter.field("path", Lite(&_val.path)); + formatter.finish() + } +} +impl Debug for Lite<syn::PatRange> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("PatRange"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("lo", Lite(&_val.lo)); + formatter.field("limits", Lite(&_val.limits)); + formatter.field("hi", Lite(&_val.hi)); + formatter.finish() + } +} +impl Debug for Lite<syn::PatReference> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("PatReference"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.mutability { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Mut); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("mutability", Print::ref_cast(val)); + } + formatter.field("pat", Lite(&_val.pat)); + formatter.finish() + } +} +impl Debug for Lite<syn::PatRest> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("PatRest"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::PatSlice> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("PatSlice"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if !_val.elems.is_empty() { + formatter.field("elems", Lite(&_val.elems)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::PatStruct> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("PatStruct"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("path", Lite(&_val.path)); + if !_val.fields.is_empty() { + formatter.field("fields", Lite(&_val.fields)); + } + if let Some(val) = &_val.dot2_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Dot2); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("dot2_token", Print::ref_cast(val)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::PatTuple> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("PatTuple"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if !_val.elems.is_empty() { + formatter.field("elems", Lite(&_val.elems)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::PatTupleStruct> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("PatTupleStruct"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("path", Lite(&_val.path)); + formatter.field("pat", Lite(&_val.pat)); + formatter.finish() + } +} +impl Debug for Lite<syn::PatType> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("PatType"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("pat", Lite(&_val.pat)); + formatter.field("ty", Lite(&_val.ty)); + formatter.finish() + } +} +impl Debug for Lite<syn::PatWild> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("PatWild"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::Path> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("Path"); + if let Some(val) = &_val.leading_colon { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Colon2); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("leading_colon", Print::ref_cast(val)); + } + if !_val.segments.is_empty() { + formatter.field("segments", Lite(&_val.segments)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::PathArguments> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + match _val { + syn::PathArguments::None => formatter.write_str("None"), + syn::PathArguments::AngleBracketed(_val) => { + let mut formatter = formatter + .debug_struct("PathArguments::AngleBracketed"); + if let Some(val) = &_val.colon2_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Colon2); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("colon2_token", Print::ref_cast(val)); + } + if !_val.args.is_empty() { + formatter.field("args", Lite(&_val.args)); + } + formatter.finish() + } + syn::PathArguments::Parenthesized(_val) => { + let mut formatter = formatter + .debug_struct("PathArguments::Parenthesized"); + if !_val.inputs.is_empty() { + formatter.field("inputs", Lite(&_val.inputs)); + } + formatter.field("output", Lite(&_val.output)); + formatter.finish() + } + } + } +} +impl Debug for Lite<syn::PathSegment> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("PathSegment"); + formatter.field("ident", Lite(&_val.ident)); + formatter.field("arguments", Lite(&_val.arguments)); + formatter.finish() + } +} +impl Debug for Lite<syn::PredicateEq> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("PredicateEq"); + formatter.field("lhs_ty", Lite(&_val.lhs_ty)); + formatter.field("rhs_ty", Lite(&_val.rhs_ty)); + formatter.finish() + } +} +impl Debug for Lite<syn::PredicateLifetime> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("PredicateLifetime"); + formatter.field("lifetime", Lite(&_val.lifetime)); + if !_val.bounds.is_empty() { + formatter.field("bounds", Lite(&_val.bounds)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::PredicateType> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("PredicateType"); + if let Some(val) = &_val.lifetimes { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::BoundLifetimes); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("lifetimes", Print::ref_cast(val)); + } + formatter.field("bounded_ty", Lite(&_val.bounded_ty)); + if !_val.bounds.is_empty() { + formatter.field("bounds", Lite(&_val.bounds)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::QSelf> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("QSelf"); + formatter.field("ty", Lite(&_val.ty)); + formatter.field("position", Lite(&_val.position)); + if let Some(val) = &_val.as_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::As); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("as_token", Print::ref_cast(val)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::RangeLimits> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + match _val { + syn::RangeLimits::HalfOpen(_val) => { + formatter.write_str("HalfOpen")?; + Ok(()) + } + syn::RangeLimits::Closed(_val) => { + formatter.write_str("Closed")?; + Ok(()) + } + } + } +} +impl Debug for Lite<syn::Receiver> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("Receiver"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + if let Some(val) = &_val.reference { + #[derive(RefCast)] + #[repr(transparent)] + struct Print((syn::token::And, Option<syn::Lifetime>)); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt( + { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(Option<syn::Lifetime>); + impl Debug for Print { + fn fmt( + &self, + formatter: &mut fmt::Formatter, + ) -> fmt::Result { + match &self.0 { + Some(_val) => { + formatter.write_str("Some")?; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + None => formatter.write_str("None"), + } + } + } + Print::ref_cast(&_val.1) + }, + formatter, + )?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("reference", Print::ref_cast(val)); + } + if let Some(val) = &_val.mutability { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Mut); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("mutability", Print::ref_cast(val)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::ReturnType> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + match _val { + syn::ReturnType::Default => formatter.write_str("Default"), + syn::ReturnType::Type(_v0, _v1) => { + let mut formatter = formatter.debug_tuple("Type"); + formatter.field(Lite(_v1)); + formatter.finish() + } + } + } +} +impl Debug for Lite<syn::Signature> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("Signature"); + if let Some(val) = &_val.constness { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Const); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("constness", Print::ref_cast(val)); + } + if let Some(val) = &_val.asyncness { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Async); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("asyncness", Print::ref_cast(val)); + } + if let Some(val) = &_val.unsafety { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Unsafe); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("unsafety", Print::ref_cast(val)); + } + if let Some(val) = &_val.abi { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::Abi); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("abi", Print::ref_cast(val)); + } + formatter.field("ident", Lite(&_val.ident)); + formatter.field("generics", Lite(&_val.generics)); + if !_val.inputs.is_empty() { + formatter.field("inputs", Lite(&_val.inputs)); + } + if let Some(val) = &_val.variadic { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::Variadic); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("variadic", Print::ref_cast(val)); + } + formatter.field("output", Lite(&_val.output)); + formatter.finish() + } +} +impl Debug for Lite<syn::Stmt> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + match _val { + syn::Stmt::Local(_val) => { + formatter.write_str("Local")?; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + syn::Stmt::Item(_val) => { + formatter.write_str("Item")?; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + syn::Stmt::Expr(_val) => { + formatter.write_str("Expr")?; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + syn::Stmt::Semi(_v0, _v1) => { + let mut formatter = formatter.debug_tuple("Semi"); + formatter.field(Lite(_v0)); + formatter.finish() + } + } + } +} +impl Debug for Lite<syn::TraitBound> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("TraitBound"); + if let Some(val) = &_val.paren_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Paren); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("paren_token", Print::ref_cast(val)); + } + formatter.field("modifier", Lite(&_val.modifier)); + if let Some(val) = &_val.lifetimes { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::BoundLifetimes); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("lifetimes", Print::ref_cast(val)); + } + formatter.field("path", Lite(&_val.path)); + formatter.finish() + } +} +impl Debug for Lite<syn::TraitBoundModifier> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + match _val { + syn::TraitBoundModifier::None => formatter.write_str("None"), + syn::TraitBoundModifier::Maybe(_val) => { + formatter.write_str("Maybe")?; + Ok(()) + } + } + } +} +impl Debug for Lite<syn::TraitItem> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + match _val { + syn::TraitItem::Const(_val) => { + let mut formatter = formatter.debug_struct("TraitItem::Const"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("ident", Lite(&_val.ident)); + formatter.field("ty", Lite(&_val.ty)); + if let Some(val) = &_val.default { + #[derive(RefCast)] + #[repr(transparent)] + struct Print((syn::token::Eq, syn::Expr)); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(&_val.1), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("default", Print::ref_cast(val)); + } + formatter.finish() + } + syn::TraitItem::Method(_val) => { + let mut formatter = formatter.debug_struct("TraitItem::Method"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("sig", Lite(&_val.sig)); + if let Some(val) = &_val.default { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::Block); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("default", Print::ref_cast(val)); + } + if let Some(val) = &_val.semi_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Semi); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("semi_token", Print::ref_cast(val)); + } + formatter.finish() + } + syn::TraitItem::Type(_val) => { + let mut formatter = formatter.debug_struct("TraitItem::Type"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("ident", Lite(&_val.ident)); + formatter.field("generics", Lite(&_val.generics)); + if let Some(val) = &_val.colon_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Colon); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("colon_token", Print::ref_cast(val)); + } + if !_val.bounds.is_empty() { + formatter.field("bounds", Lite(&_val.bounds)); + } + if let Some(val) = &_val.default { + #[derive(RefCast)] + #[repr(transparent)] + struct Print((syn::token::Eq, syn::Type)); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(&_val.1), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("default", Print::ref_cast(val)); + } + formatter.finish() + } + syn::TraitItem::Macro(_val) => { + let mut formatter = formatter.debug_struct("TraitItem::Macro"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("mac", Lite(&_val.mac)); + if let Some(val) = &_val.semi_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Semi); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("semi_token", Print::ref_cast(val)); + } + formatter.finish() + } + syn::TraitItem::Verbatim(_val) => { + formatter.write_str("Verbatim")?; + formatter.write_str("(`")?; + Display::fmt(_val, formatter)?; + formatter.write_str("`)")?; + Ok(()) + } + _ => unreachable!(), + } + } +} +impl Debug for Lite<syn::TraitItemConst> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("TraitItemConst"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("ident", Lite(&_val.ident)); + formatter.field("ty", Lite(&_val.ty)); + if let Some(val) = &_val.default { + #[derive(RefCast)] + #[repr(transparent)] + struct Print((syn::token::Eq, syn::Expr)); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(&_val.1), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("default", Print::ref_cast(val)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::TraitItemMacro> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("TraitItemMacro"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("mac", Lite(&_val.mac)); + if let Some(val) = &_val.semi_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Semi); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("semi_token", Print::ref_cast(val)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::TraitItemMethod> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("TraitItemMethod"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("sig", Lite(&_val.sig)); + if let Some(val) = &_val.default { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::Block); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("default", Print::ref_cast(val)); + } + if let Some(val) = &_val.semi_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Semi); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("semi_token", Print::ref_cast(val)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::TraitItemType> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("TraitItemType"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("ident", Lite(&_val.ident)); + formatter.field("generics", Lite(&_val.generics)); + if let Some(val) = &_val.colon_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Colon); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("colon_token", Print::ref_cast(val)); + } + if !_val.bounds.is_empty() { + formatter.field("bounds", Lite(&_val.bounds)); + } + if let Some(val) = &_val.default { + #[derive(RefCast)] + #[repr(transparent)] + struct Print((syn::token::Eq, syn::Type)); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(&_val.1), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("default", Print::ref_cast(val)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::Type> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + match _val { + syn::Type::Array(_val) => { + let mut formatter = formatter.debug_struct("Type::Array"); + formatter.field("elem", Lite(&_val.elem)); + formatter.field("len", Lite(&_val.len)); + formatter.finish() + } + syn::Type::BareFn(_val) => { + let mut formatter = formatter.debug_struct("Type::BareFn"); + if let Some(val) = &_val.lifetimes { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::BoundLifetimes); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("lifetimes", Print::ref_cast(val)); + } + if let Some(val) = &_val.unsafety { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Unsafe); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("unsafety", Print::ref_cast(val)); + } + if let Some(val) = &_val.abi { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::Abi); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("abi", Print::ref_cast(val)); + } + if !_val.inputs.is_empty() { + formatter.field("inputs", Lite(&_val.inputs)); + } + if let Some(val) = &_val.variadic { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::Variadic); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("variadic", Print::ref_cast(val)); + } + formatter.field("output", Lite(&_val.output)); + formatter.finish() + } + syn::Type::Group(_val) => { + let mut formatter = formatter.debug_struct("Type::Group"); + formatter.field("elem", Lite(&_val.elem)); + formatter.finish() + } + syn::Type::ImplTrait(_val) => { + let mut formatter = formatter.debug_struct("Type::ImplTrait"); + if !_val.bounds.is_empty() { + formatter.field("bounds", Lite(&_val.bounds)); + } + formatter.finish() + } + syn::Type::Infer(_val) => { + let mut formatter = formatter.debug_struct("Type::Infer"); + formatter.finish() + } + syn::Type::Macro(_val) => { + let mut formatter = formatter.debug_struct("Type::Macro"); + formatter.field("mac", Lite(&_val.mac)); + formatter.finish() + } + syn::Type::Never(_val) => { + let mut formatter = formatter.debug_struct("Type::Never"); + formatter.finish() + } + syn::Type::Paren(_val) => { + let mut formatter = formatter.debug_struct("Type::Paren"); + formatter.field("elem", Lite(&_val.elem)); + formatter.finish() + } + syn::Type::Path(_val) => { + let mut formatter = formatter.debug_struct("Type::Path"); + if let Some(val) = &_val.qself { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::QSelf); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("qself", Print::ref_cast(val)); + } + formatter.field("path", Lite(&_val.path)); + formatter.finish() + } + syn::Type::Ptr(_val) => { + let mut formatter = formatter.debug_struct("Type::Ptr"); + if let Some(val) = &_val.const_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Const); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("const_token", Print::ref_cast(val)); + } + if let Some(val) = &_val.mutability { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Mut); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("mutability", Print::ref_cast(val)); + } + formatter.field("elem", Lite(&_val.elem)); + formatter.finish() + } + syn::Type::Reference(_val) => { + let mut formatter = formatter.debug_struct("Type::Reference"); + if let Some(val) = &_val.lifetime { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::Lifetime); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("lifetime", Print::ref_cast(val)); + } + if let Some(val) = &_val.mutability { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Mut); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("mutability", Print::ref_cast(val)); + } + formatter.field("elem", Lite(&_val.elem)); + formatter.finish() + } + syn::Type::Slice(_val) => { + let mut formatter = formatter.debug_struct("Type::Slice"); + formatter.field("elem", Lite(&_val.elem)); + formatter.finish() + } + syn::Type::TraitObject(_val) => { + let mut formatter = formatter.debug_struct("Type::TraitObject"); + if let Some(val) = &_val.dyn_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Dyn); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("dyn_token", Print::ref_cast(val)); + } + if !_val.bounds.is_empty() { + formatter.field("bounds", Lite(&_val.bounds)); + } + formatter.finish() + } + syn::Type::Tuple(_val) => { + let mut formatter = formatter.debug_struct("Type::Tuple"); + if !_val.elems.is_empty() { + formatter.field("elems", Lite(&_val.elems)); + } + formatter.finish() + } + syn::Type::Verbatim(_val) => { + formatter.write_str("Verbatim")?; + formatter.write_str("(`")?; + Display::fmt(_val, formatter)?; + formatter.write_str("`)")?; + Ok(()) + } + _ => unreachable!(), + } + } +} +impl Debug for Lite<syn::TypeArray> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("TypeArray"); + formatter.field("elem", Lite(&_val.elem)); + formatter.field("len", Lite(&_val.len)); + formatter.finish() + } +} +impl Debug for Lite<syn::TypeBareFn> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("TypeBareFn"); + if let Some(val) = &_val.lifetimes { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::BoundLifetimes); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("lifetimes", Print::ref_cast(val)); + } + if let Some(val) = &_val.unsafety { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Unsafe); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("unsafety", Print::ref_cast(val)); + } + if let Some(val) = &_val.abi { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::Abi); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("abi", Print::ref_cast(val)); + } + if !_val.inputs.is_empty() { + formatter.field("inputs", Lite(&_val.inputs)); + } + if let Some(val) = &_val.variadic { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::Variadic); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("variadic", Print::ref_cast(val)); + } + formatter.field("output", Lite(&_val.output)); + formatter.finish() + } +} +impl Debug for Lite<syn::TypeGroup> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("TypeGroup"); + formatter.field("elem", Lite(&_val.elem)); + formatter.finish() + } +} +impl Debug for Lite<syn::TypeImplTrait> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("TypeImplTrait"); + if !_val.bounds.is_empty() { + formatter.field("bounds", Lite(&_val.bounds)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::TypeInfer> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("TypeInfer"); + formatter.finish() + } +} +impl Debug for Lite<syn::TypeMacro> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("TypeMacro"); + formatter.field("mac", Lite(&_val.mac)); + formatter.finish() + } +} +impl Debug for Lite<syn::TypeNever> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("TypeNever"); + formatter.finish() + } +} +impl Debug for Lite<syn::TypeParam> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("TypeParam"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("ident", Lite(&_val.ident)); + if let Some(val) = &_val.colon_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Colon); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("colon_token", Print::ref_cast(val)); + } + if !_val.bounds.is_empty() { + formatter.field("bounds", Lite(&_val.bounds)); + } + if let Some(val) = &_val.eq_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Eq); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("eq_token", Print::ref_cast(val)); + } + if let Some(val) = &_val.default { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::Type); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("default", Print::ref_cast(val)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::TypeParamBound> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + match _val { + syn::TypeParamBound::Trait(_val) => { + formatter.write_str("Trait")?; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + syn::TypeParamBound::Lifetime(_val) => { + formatter.write_str("Lifetime")?; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + } +} +impl Debug for Lite<syn::TypeParen> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("TypeParen"); + formatter.field("elem", Lite(&_val.elem)); + formatter.finish() + } +} +impl Debug for Lite<syn::TypePath> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("TypePath"); + if let Some(val) = &_val.qself { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::QSelf); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("qself", Print::ref_cast(val)); + } + formatter.field("path", Lite(&_val.path)); + formatter.finish() + } +} +impl Debug for Lite<syn::TypePtr> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("TypePtr"); + if let Some(val) = &_val.const_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Const); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("const_token", Print::ref_cast(val)); + } + if let Some(val) = &_val.mutability { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Mut); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("mutability", Print::ref_cast(val)); + } + formatter.field("elem", Lite(&_val.elem)); + formatter.finish() + } +} +impl Debug for Lite<syn::TypeReference> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("TypeReference"); + if let Some(val) = &_val.lifetime { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::Lifetime); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("lifetime", Print::ref_cast(val)); + } + if let Some(val) = &_val.mutability { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Mut); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("mutability", Print::ref_cast(val)); + } + formatter.field("elem", Lite(&_val.elem)); + formatter.finish() + } +} +impl Debug for Lite<syn::TypeSlice> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("TypeSlice"); + formatter.field("elem", Lite(&_val.elem)); + formatter.finish() + } +} +impl Debug for Lite<syn::TypeTraitObject> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("TypeTraitObject"); + if let Some(val) = &_val.dyn_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::Dyn); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("dyn_token", Print::ref_cast(val)); + } + if !_val.bounds.is_empty() { + formatter.field("bounds", Lite(&_val.bounds)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::TypeTuple> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("TypeTuple"); + if !_val.elems.is_empty() { + formatter.field("elems", Lite(&_val.elems)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::UnOp> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + match _val { + syn::UnOp::Deref(_val) => { + formatter.write_str("Deref")?; + Ok(()) + } + syn::UnOp::Not(_val) => { + formatter.write_str("Not")?; + Ok(()) + } + syn::UnOp::Neg(_val) => { + formatter.write_str("Neg")?; + Ok(()) + } + } + } +} +impl Debug for Lite<syn::UseGlob> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("UseGlob"); + formatter.finish() + } +} +impl Debug for Lite<syn::UseGroup> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("UseGroup"); + if !_val.items.is_empty() { + formatter.field("items", Lite(&_val.items)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::UseName> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("UseName"); + formatter.field("ident", Lite(&_val.ident)); + formatter.finish() + } +} +impl Debug for Lite<syn::UsePath> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("UsePath"); + formatter.field("ident", Lite(&_val.ident)); + formatter.field("tree", Lite(&_val.tree)); + formatter.finish() + } +} +impl Debug for Lite<syn::UseRename> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("UseRename"); + formatter.field("ident", Lite(&_val.ident)); + formatter.field("rename", Lite(&_val.rename)); + formatter.finish() + } +} +impl Debug for Lite<syn::UseTree> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + match _val { + syn::UseTree::Path(_val) => { + formatter.write_str("Path")?; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + syn::UseTree::Name(_val) => { + formatter.write_str("Name")?; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + syn::UseTree::Rename(_val) => { + formatter.write_str("Rename")?; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + syn::UseTree::Glob(_val) => { + formatter.write_str("Glob")?; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + syn::UseTree::Group(_val) => { + formatter.write_str("Group")?; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + } +} +impl Debug for Lite<syn::Variadic> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("Variadic"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::Variant> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("Variant"); + if !_val.attrs.is_empty() { + formatter.field("attrs", Lite(&_val.attrs)); + } + formatter.field("ident", Lite(&_val.ident)); + formatter.field("fields", Lite(&_val.fields)); + if let Some(val) = &_val.discriminant { + #[derive(RefCast)] + #[repr(transparent)] + struct Print((syn::token::Eq, syn::Expr)); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + let _val = &self.0; + formatter.write_str("(")?; + Debug::fmt(Lite(&_val.1), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + formatter.field("discriminant", Print::ref_cast(val)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::VisCrate> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("VisCrate"); + formatter.finish() + } +} +impl Debug for Lite<syn::VisPublic> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("VisPublic"); + formatter.finish() + } +} +impl Debug for Lite<syn::VisRestricted> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("VisRestricted"); + if let Some(val) = &_val.in_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::In); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("in_token", Print::ref_cast(val)); + } + formatter.field("path", Lite(&_val.path)); + formatter.finish() + } +} +impl Debug for Lite<syn::Visibility> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + match _val { + syn::Visibility::Public(_val) => { + let mut formatter = formatter.debug_struct("Visibility::Public"); + formatter.finish() + } + syn::Visibility::Crate(_val) => { + let mut formatter = formatter.debug_struct("Visibility::Crate"); + formatter.finish() + } + syn::Visibility::Restricted(_val) => { + let mut formatter = formatter.debug_struct("Visibility::Restricted"); + if let Some(val) = &_val.in_token { + #[derive(RefCast)] + #[repr(transparent)] + struct Print(syn::token::In); + impl Debug for Print { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Some")?; + Ok(()) + } + } + formatter.field("in_token", Print::ref_cast(val)); + } + formatter.field("path", Lite(&_val.path)); + formatter.finish() + } + syn::Visibility::Inherited => formatter.write_str("Inherited"), + } + } +} +impl Debug for Lite<syn::WhereClause> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + let mut formatter = formatter.debug_struct("WhereClause"); + if !_val.predicates.is_empty() { + formatter.field("predicates", Lite(&_val.predicates)); + } + formatter.finish() + } +} +impl Debug for Lite<syn::WherePredicate> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let _val = &self.value; + match _val { + syn::WherePredicate::Type(_val) => { + formatter.write_str("Type")?; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + syn::WherePredicate::Lifetime(_val) => { + formatter.write_str("Lifetime")?; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + syn::WherePredicate::Eq(_val) => { + formatter.write_str("Eq")?; + formatter.write_str("(")?; + Debug::fmt(Lite(_val), formatter)?; + formatter.write_str(")")?; + Ok(()) + } + } + } +} diff --git a/third_party/rust/syn/tests/debug/mod.rs b/third_party/rust/syn/tests/debug/mod.rs new file mode 100644 index 0000000000..0a0991a924 --- /dev/null +++ b/third_party/rust/syn/tests/debug/mod.rs @@ -0,0 +1,125 @@ +#![allow( + clippy::no_effect_underscore_binding, + clippy::too_many_lines, + clippy::used_underscore_binding +)] + +#[rustfmt::skip] +mod gen; + +use proc_macro2::{Ident, Literal, TokenStream}; +use ref_cast::RefCast; +use std::fmt::{self, Debug}; +use std::ops::Deref; +use syn::punctuated::Punctuated; + +#[derive(RefCast)] +#[repr(transparent)] +pub struct Lite<T: ?Sized> { + value: T, +} + +#[allow(non_snake_case)] +pub fn Lite<T: ?Sized>(value: &T) -> &Lite<T> { + Lite::ref_cast(value) +} + +impl<T: ?Sized> Deref for Lite<T> { + type Target = T; + + fn deref(&self) -> &Self::Target { + &self.value + } +} + +impl Debug for Lite<bool> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + write!(formatter, "{}", self.value) + } +} + +impl Debug for Lite<u32> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + write!(formatter, "{}", self.value) + } +} + +impl Debug for Lite<usize> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + write!(formatter, "{}", self.value) + } +} + +impl Debug for Lite<String> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + write!(formatter, "{:?}", self.value) + } +} + +impl Debug for Lite<Ident> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + write!(formatter, "{:?}", self.value.to_string()) + } +} + +impl Debug for Lite<Literal> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + write!(formatter, "{}", self.value) + } +} + +impl Debug for Lite<TokenStream> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + let string = self.value.to_string(); + if string.len() <= 80 { + write!(formatter, "TokenStream(`{}`)", self.value) + } else { + formatter + .debug_tuple("TokenStream") + .field(&format_args!("`{}`", string)) + .finish() + } + } +} + +impl<'a, T> Debug for Lite<&'a T> +where + Lite<T>: Debug, +{ + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + Debug::fmt(Lite(self.value), formatter) + } +} + +impl<T> Debug for Lite<Box<T>> +where + Lite<T>: Debug, +{ + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + Debug::fmt(Lite(&*self.value), formatter) + } +} + +impl<T> Debug for Lite<Vec<T>> +where + Lite<T>: Debug, +{ + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter + .debug_list() + .entries(self.value.iter().map(Lite)) + .finish() + } +} + +impl<T, P> Debug for Lite<Punctuated<T, P>> +where + Lite<T>: Debug, +{ + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter + .debug_list() + .entries(self.value.iter().map(Lite)) + .finish() + } +} diff --git a/third_party/rust/syn/tests/macros/mod.rs b/third_party/rust/syn/tests/macros/mod.rs new file mode 100644 index 0000000000..5ca88b083b --- /dev/null +++ b/third_party/rust/syn/tests/macros/mod.rs @@ -0,0 +1,79 @@ +#![allow(unused_macros, unused_macro_rules)] + +#[path = "../debug/mod.rs"] +pub mod debug; + +use syn::parse::{Parse, Result}; + +macro_rules! errorf { + ($($tt:tt)*) => {{ + use ::std::io::Write; + let stderr = ::std::io::stderr(); + write!(stderr.lock(), $($tt)*).unwrap(); + }}; +} + +macro_rules! punctuated { + ($($e:expr,)+) => {{ + let mut seq = ::syn::punctuated::Punctuated::new(); + $( + seq.push($e); + )+ + seq + }}; + + ($($e:expr),+) => { + punctuated!($($e,)+) + }; +} + +macro_rules! snapshot { + ($($args:tt)*) => { + snapshot_impl!(() $($args)*) + }; +} + +macro_rules! snapshot_impl { + (($expr:ident) as $t:ty, @$snapshot:literal) => { + let $expr = crate::macros::Tokens::parse::<$t>($expr).unwrap(); + let debug = crate::macros::debug::Lite(&$expr); + if !cfg!(miri) { + insta::assert_debug_snapshot!(debug, @$snapshot); + } + }; + (($($expr:tt)*) as $t:ty, @$snapshot:literal) => {{ + let syntax_tree = crate::macros::Tokens::parse::<$t>($($expr)*).unwrap(); + let debug = crate::macros::debug::Lite(&syntax_tree); + if !cfg!(miri) { + insta::assert_debug_snapshot!(debug, @$snapshot); + } + syntax_tree + }}; + (($($expr:tt)*) , @$snapshot:literal) => {{ + let syntax_tree = $($expr)*; + let debug = crate::macros::debug::Lite(&syntax_tree); + if !cfg!(miri) { + insta::assert_debug_snapshot!(debug, @$snapshot); + } + syntax_tree + }}; + (($($expr:tt)*) $next:tt $($rest:tt)*) => { + snapshot_impl!(($($expr)* $next) $($rest)*) + }; +} + +pub trait Tokens { + fn parse<T: Parse>(self) -> Result<T>; +} + +impl<'a> Tokens for &'a str { + fn parse<T: Parse>(self) -> Result<T> { + syn::parse_str(self) + } +} + +impl Tokens for proc_macro2::TokenStream { + fn parse<T: Parse>(self) -> Result<T> { + syn::parse2(self) + } +} diff --git a/third_party/rust/syn/tests/regression.rs b/third_party/rust/syn/tests/regression.rs new file mode 100644 index 0000000000..fb2b25c89b --- /dev/null +++ b/third_party/rust/syn/tests/regression.rs @@ -0,0 +1,3 @@ +mod regression { + automod::dir!("tests/regression"); +} diff --git a/third_party/rust/syn/tests/regression/issue1108.rs b/third_party/rust/syn/tests/regression/issue1108.rs new file mode 100644 index 0000000000..4fd30c0c7a --- /dev/null +++ b/third_party/rust/syn/tests/regression/issue1108.rs @@ -0,0 +1,5 @@ +#[test] +fn issue1108() { + let data = "impl<x<>>::x for"; + _ = syn::parse_file(data); +} diff --git a/third_party/rust/syn/tests/regression/issue1235.rs b/third_party/rust/syn/tests/regression/issue1235.rs new file mode 100644 index 0000000000..8836030664 --- /dev/null +++ b/third_party/rust/syn/tests/regression/issue1235.rs @@ -0,0 +1,32 @@ +use proc_macro2::{Delimiter, Group}; +use quote::quote; + +#[test] +fn main() { + // Okay. Rustc allows top-level `static` with no value syntactically, but + // not semantically. Syn parses as Item::Verbatim. + let tokens = quote! { + pub static FOO: usize; + pub static BAR: usize; + }; + let file = syn::parse2::<syn::File>(tokens).unwrap(); + println!("{:#?}", file); + + // Okay. + let inner = Group::new( + Delimiter::None, + quote!(static FOO: usize = 0; pub static BAR: usize = 0), + ); + let tokens = quote!(pub #inner;); + let file = syn::parse2::<syn::File>(tokens).unwrap(); + println!("{:#?}", file); + + // Formerly parser crash. + let inner = Group::new( + Delimiter::None, + quote!(static FOO: usize; pub static BAR: usize), + ); + let tokens = quote!(pub #inner;); + let file = syn::parse2::<syn::File>(tokens).unwrap(); + println!("{:#?}", file); +} diff --git a/third_party/rust/syn/tests/repo/mod.rs b/third_party/rust/syn/tests/repo/mod.rs new file mode 100644 index 0000000000..8418b87192 --- /dev/null +++ b/third_party/rust/syn/tests/repo/mod.rs @@ -0,0 +1,215 @@ +#![allow(clippy::manual_assert)] + +mod progress; + +use self::progress::Progress; +use anyhow::Result; +use flate2::read::GzDecoder; +use std::fs; +use std::path::Path; +use tar::Archive; +use walkdir::DirEntry; + +const REVISION: &str = "98ad6a5519651af36e246c0335c964dd52c554ba"; + +#[rustfmt::skip] +static EXCLUDE_FILES: &[&str] = &[ + // TODO: impl ~const T {} + // https://github.com/dtolnay/syn/issues/1051 + "src/test/ui/rfc-2632-const-trait-impl/syntax.rs", + + // Compile-fail expr parameter in const generic position: f::<1 + 2>() + "src/test/ui/const-generics/early/closing-args-token.rs", + "src/test/ui/const-generics/early/const-expression-parameter.rs", + + // Need at least one trait in impl Trait, no such type as impl 'static + "src/test/ui/type-alias-impl-trait/generic_type_does_not_live_long_enough.rs", + + // Deprecated anonymous parameter syntax in traits + "src/test/ui/issues/issue-13105.rs", + "src/test/ui/issues/issue-13775.rs", + "src/test/ui/issues/issue-34074.rs", + "src/test/ui/proc-macro/trait-fn-args-2015.rs", + "src/tools/rustfmt/tests/source/trait.rs", + "src/tools/rustfmt/tests/target/trait.rs", + + // Various extensions to Rust syntax made up by rust-analyzer + "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0012_type_item_where_clause.rs", + "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rs", + "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rs", + "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0179_use_tree_abs_star.rs", + "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0188_const_param_default_path.rs", + "src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0015_use_tree.rs", + "src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0029_range_forms.rs", + "src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0051_parameter_attrs.rs", + "src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0055_dot_dot_dot.rs", + "src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0068_item_modifiers.rs", + "src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0031_block_inner_attrs.rs", + "src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0045_ambiguous_trait_object.rs", + "src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0046_mutable_const_item.rs", + + // Placeholder syntax for "throw expressions" + "src/test/pretty/yeet-expr.rs", + "src/test/ui/try-trait/yeet-for-option.rs", + "src/test/ui/try-trait/yeet-for-result.rs", + + // Excessive nesting + "src/test/ui/issues/issue-74564-if-expr-stack-overflow.rs", + + // Testing tools on invalid syntax + "src/test/run-make/translation/test.rs", + "src/test/ui/generics/issue-94432-garbage-ice.rs", + "src/tools/rustfmt/tests/coverage/target/comments.rs", + "src/tools/rustfmt/tests/parser/issue-4126/invalid.rs", + "src/tools/rustfmt/tests/parser/issue_4418.rs", + "src/tools/rustfmt/tests/parser/unclosed-delims/issue_4466.rs", + "src/tools/rustfmt/tests/source/configs/disable_all_formatting/true.rs", + "src/tools/rustfmt/tests/source/configs/spaces_around_ranges/false.rs", + "src/tools/rustfmt/tests/source/configs/spaces_around_ranges/true.rs", + "src/tools/rustfmt/tests/source/type.rs", + "src/tools/rustfmt/tests/target/configs/spaces_around_ranges/false.rs", + "src/tools/rustfmt/tests/target/configs/spaces_around_ranges/true.rs", + "src/tools/rustfmt/tests/target/type.rs", + + // Generated file containing a top-level expression, used with `include!` + "compiler/rustc_codegen_gcc/src/intrinsic/archs.rs", + + // Clippy lint lists represented as expressions + "src/tools/clippy/clippy_lints/src/lib.deprecated.rs", + "src/tools/clippy/clippy_lints/src/lib.register_all.rs", + "src/tools/clippy/clippy_lints/src/lib.register_cargo.rs", + "src/tools/clippy/clippy_lints/src/lib.register_complexity.rs", + "src/tools/clippy/clippy_lints/src/lib.register_correctness.rs", + "src/tools/clippy/clippy_lints/src/lib.register_internal.rs", + "src/tools/clippy/clippy_lints/src/lib.register_lints.rs", + "src/tools/clippy/clippy_lints/src/lib.register_nursery.rs", + "src/tools/clippy/clippy_lints/src/lib.register_pedantic.rs", + "src/tools/clippy/clippy_lints/src/lib.register_perf.rs", + "src/tools/clippy/clippy_lints/src/lib.register_restriction.rs", + "src/tools/clippy/clippy_lints/src/lib.register_style.rs", + "src/tools/clippy/clippy_lints/src/lib.register_suspicious.rs", + + // Not actually test cases + "src/test/ui/lint/expansion-time-include.rs", + "src/test/ui/macros/auxiliary/macro-comma-support.rs", + "src/test/ui/macros/auxiliary/macro-include-items-expr.rs", + "src/test/ui/macros/include-single-expr-helper.rs", + "src/test/ui/macros/include-single-expr-helper-1.rs", + "src/test/ui/parser/issues/auxiliary/issue-21146-inc.rs", +]; + +#[rustfmt::skip] +static EXCLUDE_DIRS: &[&str] = &[ + // Inputs that intentionally do not parse + "src/tools/rust-analyzer/crates/parser/test_data/parser/err", + "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err", + + // Inputs that lex but do not necessarily parse + "src/tools/rust-analyzer/crates/parser/test_data/lexer", + + // Inputs that used to crash rust-analyzer, but aren't necessarily supposed to parse + "src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures", + "src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures", +]; + +pub fn base_dir_filter(entry: &DirEntry) -> bool { + let path = entry.path(); + + let mut path_string = path.to_string_lossy(); + if cfg!(windows) { + path_string = path_string.replace('\\', "/").into(); + } + let path_string = if path_string == "tests/rust" { + return true; + } else if let Some(path) = path_string.strip_prefix("tests/rust/") { + path + } else { + panic!("unexpected path in Rust dist: {}", path_string); + }; + + if path.is_dir() { + return !EXCLUDE_DIRS.contains(&path_string); + } + + if path.extension().map_or(true, |e| e != "rs") { + return false; + } + + if path_string.starts_with("src/test/ui") || path_string.starts_with("src/test/rustdoc-ui") { + let stderr_path = path.with_extension("stderr"); + if stderr_path.exists() { + // Expected to fail in some way + return false; + } + } + + !EXCLUDE_FILES.contains(&path_string) +} + +#[allow(dead_code)] +pub fn edition(path: &Path) -> &'static str { + if path.ends_with("dyn-2015-no-warnings-without-lints.rs") { + "2015" + } else { + "2018" + } +} + +pub fn clone_rust() { + let needs_clone = match fs::read_to_string("tests/rust/COMMIT") { + Err(_) => true, + Ok(contents) => contents.trim() != REVISION, + }; + if needs_clone { + download_and_unpack().unwrap(); + } + let mut missing = String::new(); + let test_src = Path::new("tests/rust"); + for exclude in EXCLUDE_FILES { + if !test_src.join(exclude).is_file() { + missing += "\ntests/rust/"; + missing += exclude; + } + } + for exclude in EXCLUDE_DIRS { + if !test_src.join(exclude).is_dir() { + missing += "\ntests/rust/"; + missing += exclude; + missing += "/"; + } + } + if !missing.is_empty() { + panic!("excluded test file does not exist:{}\n", missing); + } +} + +fn download_and_unpack() -> Result<()> { + let url = format!( + "https://github.com/rust-lang/rust/archive/{}.tar.gz", + REVISION + ); + let response = reqwest::blocking::get(url)?.error_for_status()?; + let progress = Progress::new(response); + let decoder = GzDecoder::new(progress); + let mut archive = Archive::new(decoder); + let prefix = format!("rust-{}", REVISION); + + let tests_rust = Path::new("tests/rust"); + if tests_rust.exists() { + fs::remove_dir_all(tests_rust)?; + } + + for entry in archive.entries()? { + let mut entry = entry?; + let path = entry.path()?; + if path == Path::new("pax_global_header") { + continue; + } + let relative = path.strip_prefix(&prefix)?; + let out = tests_rust.join(relative); + entry.unpack(&out)?; + } + + fs::write("tests/rust/COMMIT", REVISION)?; + Ok(()) +} diff --git a/third_party/rust/syn/tests/repo/progress.rs b/third_party/rust/syn/tests/repo/progress.rs new file mode 100644 index 0000000000..28c8a44b12 --- /dev/null +++ b/third_party/rust/syn/tests/repo/progress.rs @@ -0,0 +1,37 @@ +use std::io::{Read, Result}; +use std::time::{Duration, Instant}; + +pub struct Progress<R> { + bytes: usize, + tick: Instant, + stream: R, +} + +impl<R> Progress<R> { + pub fn new(stream: R) -> Self { + Progress { + bytes: 0, + tick: Instant::now() + Duration::from_millis(2000), + stream, + } + } +} + +impl<R: Read> Read for Progress<R> { + fn read(&mut self, buf: &mut [u8]) -> Result<usize> { + let num = self.stream.read(buf)?; + self.bytes += num; + let now = Instant::now(); + if now > self.tick { + self.tick = now + Duration::from_millis(500); + errorf!("downloading... {} bytes\n", self.bytes); + } + Ok(num) + } +} + +impl<R> Drop for Progress<R> { + fn drop(&mut self) { + errorf!("done ({} bytes)\n", self.bytes); + } +} diff --git a/third_party/rust/syn/tests/test_asyncness.rs b/third_party/rust/syn/tests/test_asyncness.rs new file mode 100644 index 0000000000..0efef5976f --- /dev/null +++ b/third_party/rust/syn/tests/test_asyncness.rs @@ -0,0 +1,37 @@ +#[macro_use] +mod macros; + +use syn::{Expr, Item}; + +#[test] +fn test_async_fn() { + let input = "async fn process() {}"; + + snapshot!(input as Item, @r###" + Item::Fn { + vis: Inherited, + sig: Signature { + asyncness: Some, + ident: "process", + generics: Generics, + output: Default, + }, + block: Block, + } + "###); +} + +#[test] +fn test_async_closure() { + let input = "async || {}"; + + snapshot!(input as Expr, @r###" + Expr::Closure { + asyncness: Some, + output: Default, + body: Expr::Block { + block: Block, + }, + } + "###); +} diff --git a/third_party/rust/syn/tests/test_attribute.rs b/third_party/rust/syn/tests/test_attribute.rs new file mode 100644 index 0000000000..c26bd090ec --- /dev/null +++ b/third_party/rust/syn/tests/test_attribute.rs @@ -0,0 +1,336 @@ +#[macro_use] +mod macros; + +use syn::parse::Parser; +use syn::{Attribute, Meta}; + +#[test] +fn test_meta_item_word() { + let meta = test("#[foo]"); + + snapshot!(meta, @r###" + Path(Path { + segments: [ + PathSegment { + ident: "foo", + arguments: None, + }, + ], + }) + "###); +} + +#[test] +fn test_meta_item_name_value() { + let meta = test("#[foo = 5]"); + + snapshot!(meta, @r###" + Meta::NameValue { + path: Path { + segments: [ + PathSegment { + ident: "foo", + arguments: None, + }, + ], + }, + lit: 5, + } + "###); +} + +#[test] +fn test_meta_item_bool_value() { + let meta = test("#[foo = true]"); + + snapshot!(meta, @r###" + Meta::NameValue { + path: Path { + segments: [ + PathSegment { + ident: "foo", + arguments: None, + }, + ], + }, + lit: Lit::Bool { + value: true, + }, + } + "###); + + let meta = test("#[foo = false]"); + + snapshot!(meta, @r###" + Meta::NameValue { + path: Path { + segments: [ + PathSegment { + ident: "foo", + arguments: None, + }, + ], + }, + lit: Lit::Bool { + value: false, + }, + } + "###); +} + +#[test] +fn test_meta_item_list_lit() { + let meta = test("#[foo(5)]"); + + snapshot!(meta, @r###" + Meta::List { + path: Path { + segments: [ + PathSegment { + ident: "foo", + arguments: None, + }, + ], + }, + nested: [ + Lit(5), + ], + } + "###); +} + +#[test] +fn test_meta_item_list_word() { + let meta = test("#[foo(bar)]"); + + snapshot!(meta, @r###" + Meta::List { + path: Path { + segments: [ + PathSegment { + ident: "foo", + arguments: None, + }, + ], + }, + nested: [ + Meta(Path(Path { + segments: [ + PathSegment { + ident: "bar", + arguments: None, + }, + ], + })), + ], + } + "###); +} + +#[test] +fn test_meta_item_list_name_value() { + let meta = test("#[foo(bar = 5)]"); + + snapshot!(meta, @r###" + Meta::List { + path: Path { + segments: [ + PathSegment { + ident: "foo", + arguments: None, + }, + ], + }, + nested: [ + Meta(Meta::NameValue { + path: Path { + segments: [ + PathSegment { + ident: "bar", + arguments: None, + }, + ], + }, + lit: 5, + }), + ], + } + "###); +} + +#[test] +fn test_meta_item_list_bool_value() { + let meta = test("#[foo(bar = true)]"); + + snapshot!(meta, @r###" + Meta::List { + path: Path { + segments: [ + PathSegment { + ident: "foo", + arguments: None, + }, + ], + }, + nested: [ + Meta(Meta::NameValue { + path: Path { + segments: [ + PathSegment { + ident: "bar", + arguments: None, + }, + ], + }, + lit: Lit::Bool { + value: true, + }, + }), + ], + } + "###); +} + +#[test] +fn test_meta_item_multiple() { + let meta = test("#[foo(word, name = 5, list(name2 = 6), word2)]"); + + snapshot!(meta, @r###" + Meta::List { + path: Path { + segments: [ + PathSegment { + ident: "foo", + arguments: None, + }, + ], + }, + nested: [ + Meta(Path(Path { + segments: [ + PathSegment { + ident: "word", + arguments: None, + }, + ], + })), + Meta(Meta::NameValue { + path: Path { + segments: [ + PathSegment { + ident: "name", + arguments: None, + }, + ], + }, + lit: 5, + }), + Meta(Meta::List { + path: Path { + segments: [ + PathSegment { + ident: "list", + arguments: None, + }, + ], + }, + nested: [ + Meta(Meta::NameValue { + path: Path { + segments: [ + PathSegment { + ident: "name2", + arguments: None, + }, + ], + }, + lit: 6, + }), + ], + }), + Meta(Path(Path { + segments: [ + PathSegment { + ident: "word2", + arguments: None, + }, + ], + })), + ], + } + "###); +} + +#[test] +fn test_bool_lit() { + let meta = test("#[foo(true)]"); + + snapshot!(meta, @r###" + Meta::List { + path: Path { + segments: [ + PathSegment { + ident: "foo", + arguments: None, + }, + ], + }, + nested: [ + Lit(Lit::Bool { + value: true, + }), + ], + } + "###); +} + +#[test] +fn test_negative_lit() { + let meta = test("#[form(min = -1, max = 200)]"); + + snapshot!(meta, @r###" + Meta::List { + path: Path { + segments: [ + PathSegment { + ident: "form", + arguments: None, + }, + ], + }, + nested: [ + Meta(Meta::NameValue { + path: Path { + segments: [ + PathSegment { + ident: "min", + arguments: None, + }, + ], + }, + lit: -1, + }), + Meta(Meta::NameValue { + path: Path { + segments: [ + PathSegment { + ident: "max", + arguments: None, + }, + ], + }, + lit: 200, + }), + ], + } + "###); +} + +fn test(input: &str) -> Meta { + let attrs = Attribute::parse_outer.parse_str(input).unwrap(); + + assert_eq!(attrs.len(), 1); + let attr = attrs.into_iter().next().unwrap(); + + attr.parse_meta().unwrap() +} diff --git a/third_party/rust/syn/tests/test_derive_input.rs b/third_party/rust/syn/tests/test_derive_input.rs new file mode 100644 index 0000000000..1eff01186a --- /dev/null +++ b/third_party/rust/syn/tests/test_derive_input.rs @@ -0,0 +1,894 @@ +#![allow(clippy::assertions_on_result_states, clippy::too_many_lines)] + +#[macro_use] +mod macros; + +use quote::quote; +use syn::{Data, DeriveInput}; + +#[test] +fn test_unit() { + let input = quote! { + struct Unit; + }; + + snapshot!(input as DeriveInput, @r###" + DeriveInput { + vis: Inherited, + ident: "Unit", + generics: Generics, + data: Data::Struct { + fields: Unit, + semi_token: Some, + }, + } + "###); +} + +#[test] +fn test_struct() { + let input = quote! { + #[derive(Debug, Clone)] + pub struct Item { + pub ident: Ident, + pub attrs: Vec<Attribute> + } + }; + + snapshot!(input as DeriveInput, @r###" + DeriveInput { + attrs: [ + Attribute { + style: Outer, + path: Path { + segments: [ + PathSegment { + ident: "derive", + arguments: None, + }, + ], + }, + tokens: TokenStream(`(Debug , Clone)`), + }, + ], + vis: Visibility::Public, + ident: "Item", + generics: Generics, + data: Data::Struct { + fields: Fields::Named { + named: [ + Field { + vis: Visibility::Public, + ident: Some("ident"), + colon_token: Some, + ty: Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "Ident", + arguments: None, + }, + ], + }, + }, + }, + Field { + vis: Visibility::Public, + ident: Some("attrs"), + colon_token: Some, + ty: Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "Vec", + arguments: PathArguments::AngleBracketed { + args: [ + Type(Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "Attribute", + arguments: None, + }, + ], + }, + }), + ], + }, + }, + ], + }, + }, + }, + ], + }, + }, + } + "###); + + snapshot!(input.attrs[0].parse_meta().unwrap(), @r###" + Meta::List { + path: Path { + segments: [ + PathSegment { + ident: "derive", + arguments: None, + }, + ], + }, + nested: [ + Meta(Path(Path { + segments: [ + PathSegment { + ident: "Debug", + arguments: None, + }, + ], + })), + Meta(Path(Path { + segments: [ + PathSegment { + ident: "Clone", + arguments: None, + }, + ], + })), + ], + } + "###); +} + +#[test] +fn test_union() { + let input = quote! { + union MaybeUninit<T> { + uninit: (), + value: T + } + }; + + snapshot!(input as DeriveInput, @r###" + DeriveInput { + vis: Inherited, + ident: "MaybeUninit", + generics: Generics { + lt_token: Some, + params: [ + Type(TypeParam { + ident: "T", + }), + ], + gt_token: Some, + }, + data: Data::Union { + fields: FieldsNamed { + named: [ + Field { + vis: Inherited, + ident: Some("uninit"), + colon_token: Some, + ty: Type::Tuple, + }, + Field { + vis: Inherited, + ident: Some("value"), + colon_token: Some, + ty: Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "T", + arguments: None, + }, + ], + }, + }, + }, + ], + }, + }, + } + "###); +} + +#[test] +#[cfg(feature = "full")] +fn test_enum() { + let input = quote! { + /// See the std::result module documentation for details. + #[must_use] + pub enum Result<T, E> { + Ok(T), + Err(E), + Surprise = 0isize, + + // Smuggling data into a proc_macro_derive, + // in the style of https://github.com/dtolnay/proc-macro-hack + ProcMacroHack = (0, "data").0 + } + }; + + snapshot!(input as DeriveInput, @r###" + DeriveInput { + attrs: [ + Attribute { + style: Outer, + path: Path { + segments: [ + PathSegment { + ident: "doc", + arguments: None, + }, + ], + }, + tokens: TokenStream(`= r" See the std::result module documentation for details."`), + }, + Attribute { + style: Outer, + path: Path { + segments: [ + PathSegment { + ident: "must_use", + arguments: None, + }, + ], + }, + tokens: TokenStream(``), + }, + ], + vis: Visibility::Public, + ident: "Result", + generics: Generics { + lt_token: Some, + params: [ + Type(TypeParam { + ident: "T", + }), + Type(TypeParam { + ident: "E", + }), + ], + gt_token: Some, + }, + data: Data::Enum { + variants: [ + Variant { + ident: "Ok", + fields: Fields::Unnamed { + unnamed: [ + Field { + vis: Inherited, + ty: Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "T", + arguments: None, + }, + ], + }, + }, + }, + ], + }, + }, + Variant { + ident: "Err", + fields: Fields::Unnamed { + unnamed: [ + Field { + vis: Inherited, + ty: Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "E", + arguments: None, + }, + ], + }, + }, + }, + ], + }, + }, + Variant { + ident: "Surprise", + fields: Unit, + discriminant: Some(Expr::Lit { + lit: 0isize, + }), + }, + Variant { + ident: "ProcMacroHack", + fields: Unit, + discriminant: Some(Expr::Field { + base: Expr::Tuple { + elems: [ + Expr::Lit { + lit: 0, + }, + Expr::Lit { + lit: "data", + }, + ], + }, + member: Unnamed(Index { + index: 0, + }), + }), + }, + ], + }, + } + "###); + + let meta_items: Vec<_> = input + .attrs + .into_iter() + .map(|attr| attr.parse_meta().unwrap()) + .collect(); + + snapshot!(meta_items, @r###" + [ + Meta::NameValue { + path: Path { + segments: [ + PathSegment { + ident: "doc", + arguments: None, + }, + ], + }, + lit: " See the std::result module documentation for details.", + }, + Path(Path { + segments: [ + PathSegment { + ident: "must_use", + arguments: None, + }, + ], + }), + ] + "###); +} + +#[test] +fn test_attr_with_path() { + let input = quote! { + #[::attr_args::identity + fn main() { assert_eq!(foo(), "Hello, world!"); }] + struct Dummy; + }; + + snapshot!(input as DeriveInput, @r###" + DeriveInput { + attrs: [ + Attribute { + style: Outer, + path: Path { + leading_colon: Some, + segments: [ + PathSegment { + ident: "attr_args", + arguments: None, + }, + PathSegment { + ident: "identity", + arguments: None, + }, + ], + }, + tokens: TokenStream(`fn main () { assert_eq ! (foo () , "Hello, world!") ; }`), + }, + ], + vis: Inherited, + ident: "Dummy", + generics: Generics, + data: Data::Struct { + fields: Unit, + semi_token: Some, + }, + } + "###); + + assert!(input.attrs[0].parse_meta().is_err()); +} + +#[test] +fn test_attr_with_non_mod_style_path() { + let input = quote! { + #[inert <T>] + struct S; + }; + + snapshot!(input as DeriveInput, @r###" + DeriveInput { + attrs: [ + Attribute { + style: Outer, + path: Path { + segments: [ + PathSegment { + ident: "inert", + arguments: None, + }, + ], + }, + tokens: TokenStream(`< T >`), + }, + ], + vis: Inherited, + ident: "S", + generics: Generics, + data: Data::Struct { + fields: Unit, + semi_token: Some, + }, + } + "###); + + assert!(input.attrs[0].parse_meta().is_err()); +} + +#[test] +fn test_attr_with_mod_style_path_with_self() { + let input = quote! { + #[foo::self] + struct S; + }; + + snapshot!(input as DeriveInput, @r###" + DeriveInput { + attrs: [ + Attribute { + style: Outer, + path: Path { + segments: [ + PathSegment { + ident: "foo", + arguments: None, + }, + PathSegment { + ident: "self", + arguments: None, + }, + ], + }, + tokens: TokenStream(``), + }, + ], + vis: Inherited, + ident: "S", + generics: Generics, + data: Data::Struct { + fields: Unit, + semi_token: Some, + }, + } + "###); + + snapshot!(input.attrs[0].parse_meta().unwrap(), @r###" + Path(Path { + segments: [ + PathSegment { + ident: "foo", + arguments: None, + }, + PathSegment { + ident: "self", + arguments: None, + }, + ], + }) + "###); +} + +#[test] +fn test_pub_restricted() { + // Taken from tests/rust/src/test/ui/resolve/auxiliary/privacy-struct-ctor.rs + let input = quote! { + pub(in m) struct Z(pub(in m::n) u8); + }; + + snapshot!(input as DeriveInput, @r###" + DeriveInput { + vis: Visibility::Restricted { + in_token: Some, + path: Path { + segments: [ + PathSegment { + ident: "m", + arguments: None, + }, + ], + }, + }, + ident: "Z", + generics: Generics, + data: Data::Struct { + fields: Fields::Unnamed { + unnamed: [ + Field { + vis: Visibility::Restricted { + in_token: Some, + path: Path { + segments: [ + PathSegment { + ident: "m", + arguments: None, + }, + PathSegment { + ident: "n", + arguments: None, + }, + ], + }, + }, + ty: Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "u8", + arguments: None, + }, + ], + }, + }, + }, + ], + }, + semi_token: Some, + }, + } + "###); +} + +#[test] +fn test_vis_crate() { + let input = quote! { + crate struct S; + }; + + snapshot!(input as DeriveInput, @r###" + DeriveInput { + vis: Visibility::Crate, + ident: "S", + generics: Generics, + data: Data::Struct { + fields: Unit, + semi_token: Some, + }, + } + "###); +} + +#[test] +fn test_pub_restricted_crate() { + let input = quote! { + pub(crate) struct S; + }; + + snapshot!(input as DeriveInput, @r###" + DeriveInput { + vis: Visibility::Restricted { + path: Path { + segments: [ + PathSegment { + ident: "crate", + arguments: None, + }, + ], + }, + }, + ident: "S", + generics: Generics, + data: Data::Struct { + fields: Unit, + semi_token: Some, + }, + } + "###); +} + +#[test] +fn test_pub_restricted_super() { + let input = quote! { + pub(super) struct S; + }; + + snapshot!(input as DeriveInput, @r###" + DeriveInput { + vis: Visibility::Restricted { + path: Path { + segments: [ + PathSegment { + ident: "super", + arguments: None, + }, + ], + }, + }, + ident: "S", + generics: Generics, + data: Data::Struct { + fields: Unit, + semi_token: Some, + }, + } + "###); +} + +#[test] +fn test_pub_restricted_in_super() { + let input = quote! { + pub(in super) struct S; + }; + + snapshot!(input as DeriveInput, @r###" + DeriveInput { + vis: Visibility::Restricted { + in_token: Some, + path: Path { + segments: [ + PathSegment { + ident: "super", + arguments: None, + }, + ], + }, + }, + ident: "S", + generics: Generics, + data: Data::Struct { + fields: Unit, + semi_token: Some, + }, + } + "###); +} + +#[test] +fn test_fields_on_unit_struct() { + let input = quote! { + struct S; + }; + + snapshot!(input as DeriveInput, @r###" + DeriveInput { + vis: Inherited, + ident: "S", + generics: Generics, + data: Data::Struct { + fields: Unit, + semi_token: Some, + }, + } + "###); + + let data = match input.data { + Data::Struct(data) => data, + _ => panic!("expected a struct"), + }; + + assert_eq!(0, data.fields.iter().count()); +} + +#[test] +fn test_fields_on_named_struct() { + let input = quote! { + struct S { + foo: i32, + pub bar: String, + } + }; + + snapshot!(input as DeriveInput, @r###" + DeriveInput { + vis: Inherited, + ident: "S", + generics: Generics, + data: Data::Struct { + fields: Fields::Named { + named: [ + Field { + vis: Inherited, + ident: Some("foo"), + colon_token: Some, + ty: Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "i32", + arguments: None, + }, + ], + }, + }, + }, + Field { + vis: Visibility::Public, + ident: Some("bar"), + colon_token: Some, + ty: Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "String", + arguments: None, + }, + ], + }, + }, + }, + ], + }, + }, + } + "###); + + let data = match input.data { + Data::Struct(data) => data, + _ => panic!("expected a struct"), + }; + + snapshot!(data.fields.into_iter().collect::<Vec<_>>(), @r###" + [ + Field { + vis: Inherited, + ident: Some("foo"), + colon_token: Some, + ty: Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "i32", + arguments: None, + }, + ], + }, + }, + }, + Field { + vis: Visibility::Public, + ident: Some("bar"), + colon_token: Some, + ty: Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "String", + arguments: None, + }, + ], + }, + }, + }, + ] + "###); +} + +#[test] +fn test_fields_on_tuple_struct() { + let input = quote! { + struct S(i32, pub String); + }; + + snapshot!(input as DeriveInput, @r###" + DeriveInput { + vis: Inherited, + ident: "S", + generics: Generics, + data: Data::Struct { + fields: Fields::Unnamed { + unnamed: [ + Field { + vis: Inherited, + ty: Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "i32", + arguments: None, + }, + ], + }, + }, + }, + Field { + vis: Visibility::Public, + ty: Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "String", + arguments: None, + }, + ], + }, + }, + }, + ], + }, + semi_token: Some, + }, + } + "###); + + let data = match input.data { + Data::Struct(data) => data, + _ => panic!("expected a struct"), + }; + + snapshot!(data.fields.iter().collect::<Vec<_>>(), @r###" + [ + Field { + vis: Inherited, + ty: Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "i32", + arguments: None, + }, + ], + }, + }, + }, + Field { + vis: Visibility::Public, + ty: Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "String", + arguments: None, + }, + ], + }, + }, + }, + ] + "###); +} + +#[test] +fn test_ambiguous_crate() { + let input = quote! { + // The field type is `(crate::X)` not `crate (::X)`. + struct S(crate::X); + }; + + snapshot!(input as DeriveInput, @r###" + DeriveInput { + vis: Inherited, + ident: "S", + generics: Generics, + data: Data::Struct { + fields: Fields::Unnamed { + unnamed: [ + Field { + vis: Inherited, + ty: Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "crate", + arguments: None, + }, + PathSegment { + ident: "X", + arguments: None, + }, + ], + }, + }, + }, + ], + }, + semi_token: Some, + }, + } + "###); +} diff --git a/third_party/rust/syn/tests/test_expr.rs b/third_party/rust/syn/tests/test_expr.rs new file mode 100644 index 0000000000..e5b151fd80 --- /dev/null +++ b/third_party/rust/syn/tests/test_expr.rs @@ -0,0 +1,306 @@ +#[macro_use] +mod macros; + +use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree}; +use quote::quote; +use std::iter::FromIterator; +use syn::{Expr, ExprRange}; + +#[test] +fn test_expr_parse() { + let tokens = quote!(..100u32); + snapshot!(tokens as Expr, @r###" + Expr::Range { + limits: HalfOpen, + to: Some(Expr::Lit { + lit: 100u32, + }), + } + "###); + + let tokens = quote!(..100u32); + snapshot!(tokens as ExprRange, @r###" + ExprRange { + limits: HalfOpen, + to: Some(Expr::Lit { + lit: 100u32, + }), + } + "###); +} + +#[test] +fn test_await() { + // Must not parse as Expr::Field. + let tokens = quote!(fut.await); + + snapshot!(tokens as Expr, @r###" + Expr::Await { + base: Expr::Path { + path: Path { + segments: [ + PathSegment { + ident: "fut", + arguments: None, + }, + ], + }, + }, + } + "###); +} + +#[rustfmt::skip] +#[test] +fn test_tuple_multi_index() { + let expected = snapshot!("tuple.0.0" as Expr, @r###" + Expr::Field { + base: Expr::Field { + base: Expr::Path { + path: Path { + segments: [ + PathSegment { + ident: "tuple", + arguments: None, + }, + ], + }, + }, + member: Unnamed(Index { + index: 0, + }), + }, + member: Unnamed(Index { + index: 0, + }), + } + "###); + + for &input in &[ + "tuple .0.0", + "tuple. 0.0", + "tuple.0 .0", + "tuple.0. 0", + "tuple . 0 . 0", + ] { + assert_eq!(expected, syn::parse_str(input).unwrap()); + } + + for tokens in vec![ + quote!(tuple.0.0), + quote!(tuple .0.0), + quote!(tuple. 0.0), + quote!(tuple.0 .0), + quote!(tuple.0. 0), + quote!(tuple . 0 . 0), + ] { + assert_eq!(expected, syn::parse2(tokens).unwrap()); + } +} + +#[test] +fn test_macro_variable_func() { + // mimics the token stream corresponding to `$fn()` + let tokens = TokenStream::from_iter(vec![ + TokenTree::Group(Group::new(Delimiter::None, quote! { f })), + TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())), + ]); + + snapshot!(tokens as Expr, @r###" + Expr::Call { + func: Expr::Group { + expr: Expr::Path { + path: Path { + segments: [ + PathSegment { + ident: "f", + arguments: None, + }, + ], + }, + }, + }, + } + "###); + + let tokens = TokenStream::from_iter(vec![ + TokenTree::Punct(Punct::new('#', Spacing::Alone)), + TokenTree::Group(Group::new(Delimiter::Bracket, quote! { outside })), + TokenTree::Group(Group::new(Delimiter::None, quote! { #[inside] f })), + TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())), + ]); + + snapshot!(tokens as Expr, @r###" + Expr::Call { + attrs: [ + Attribute { + style: Outer, + path: Path { + segments: [ + PathSegment { + ident: "outside", + arguments: None, + }, + ], + }, + tokens: TokenStream(``), + }, + ], + func: Expr::Group { + expr: Expr::Path { + attrs: [ + Attribute { + style: Outer, + path: Path { + segments: [ + PathSegment { + ident: "inside", + arguments: None, + }, + ], + }, + tokens: TokenStream(``), + }, + ], + path: Path { + segments: [ + PathSegment { + ident: "f", + arguments: None, + }, + ], + }, + }, + }, + } + "###); +} + +#[test] +fn test_macro_variable_macro() { + // mimics the token stream corresponding to `$macro!()` + let tokens = TokenStream::from_iter(vec![ + TokenTree::Group(Group::new(Delimiter::None, quote! { m })), + TokenTree::Punct(Punct::new('!', Spacing::Alone)), + TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())), + ]); + + snapshot!(tokens as Expr, @r###" + Expr::Macro { + mac: Macro { + path: Path { + segments: [ + PathSegment { + ident: "m", + arguments: None, + }, + ], + }, + delimiter: Paren, + tokens: TokenStream(``), + }, + } + "###); +} + +#[test] +fn test_macro_variable_struct() { + // mimics the token stream corresponding to `$struct {}` + let tokens = TokenStream::from_iter(vec![ + TokenTree::Group(Group::new(Delimiter::None, quote! { S })), + TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())), + ]); + + snapshot!(tokens as Expr, @r###" + Expr::Struct { + path: Path { + segments: [ + PathSegment { + ident: "S", + arguments: None, + }, + ], + }, + } + "###); +} + +#[test] +fn test_macro_variable_match_arm() { + // mimics the token stream corresponding to `match v { _ => $expr }` + let tokens = TokenStream::from_iter(vec![ + TokenTree::Ident(Ident::new("match", Span::call_site())), + TokenTree::Ident(Ident::new("v", Span::call_site())), + TokenTree::Group(Group::new( + Delimiter::Brace, + TokenStream::from_iter(vec![ + TokenTree::Punct(Punct::new('_', Spacing::Alone)), + TokenTree::Punct(Punct::new('=', Spacing::Joint)), + TokenTree::Punct(Punct::new('>', Spacing::Alone)), + TokenTree::Group(Group::new(Delimiter::None, quote! { #[a] () })), + ]), + )), + ]); + + snapshot!(tokens as Expr, @r###" + Expr::Match { + expr: Expr::Path { + path: Path { + segments: [ + PathSegment { + ident: "v", + arguments: None, + }, + ], + }, + }, + arms: [ + Arm { + pat: Pat::Wild, + body: Expr::Group { + expr: Expr::Tuple { + attrs: [ + Attribute { + style: Outer, + path: Path { + segments: [ + PathSegment { + ident: "a", + arguments: None, + }, + ], + }, + tokens: TokenStream(``), + }, + ], + }, + }, + }, + ], + } + "###); +} + +// https://github.com/dtolnay/syn/issues/1019 +#[test] +fn test_closure_vs_rangefull() { + #[rustfmt::skip] // rustfmt bug: https://github.com/rust-lang/rustfmt/issues/4808 + let tokens = quote!(|| .. .method()); + snapshot!(tokens as Expr, @r###" + Expr::MethodCall { + receiver: Expr::Closure { + output: Default, + body: Expr::Range { + limits: HalfOpen, + }, + }, + method: "method", + } + "###); +} + +#[test] +fn test_postfix_operator_after_cast() { + syn::parse_str::<Expr>("|| &x as T[0]").unwrap_err(); + syn::parse_str::<Expr>("|| () as ()()").unwrap_err(); +} diff --git a/third_party/rust/syn/tests/test_generics.rs b/third_party/rust/syn/tests/test_generics.rs new file mode 100644 index 0000000000..330f02f4dc --- /dev/null +++ b/third_party/rust/syn/tests/test_generics.rs @@ -0,0 +1,285 @@ +#![allow(clippy::too_many_lines)] + +#[macro_use] +mod macros; + +use quote::quote; +use syn::{DeriveInput, ItemFn, TypeParamBound, WhereClause, WherePredicate}; + +#[test] +fn test_split_for_impl() { + let input = quote! { + struct S<'a, 'b: 'a, #[may_dangle] T: 'a = ()> where T: Debug; + }; + + snapshot!(input as DeriveInput, @r###" + DeriveInput { + vis: Inherited, + ident: "S", + generics: Generics { + lt_token: Some, + params: [ + Lifetime(LifetimeDef { + lifetime: Lifetime { + ident: "a", + }, + }), + Lifetime(LifetimeDef { + lifetime: Lifetime { + ident: "b", + }, + colon_token: Some, + bounds: [ + Lifetime { + ident: "a", + }, + ], + }), + Type(TypeParam { + attrs: [ + Attribute { + style: Outer, + path: Path { + segments: [ + PathSegment { + ident: "may_dangle", + arguments: None, + }, + ], + }, + tokens: TokenStream(``), + }, + ], + ident: "T", + colon_token: Some, + bounds: [ + Lifetime(Lifetime { + ident: "a", + }), + ], + eq_token: Some, + default: Some(Type::Tuple), + }), + ], + gt_token: Some, + where_clause: Some(WhereClause { + predicates: [ + Type(PredicateType { + bounded_ty: Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "T", + arguments: None, + }, + ], + }, + }, + bounds: [ + Trait(TraitBound { + modifier: None, + path: Path { + segments: [ + PathSegment { + ident: "Debug", + arguments: None, + }, + ], + }, + }), + ], + }), + ], + }), + }, + data: Data::Struct { + fields: Unit, + semi_token: Some, + }, + } + "###); + + let generics = input.generics; + let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); + + let generated = quote! { + impl #impl_generics MyTrait for Test #ty_generics #where_clause {} + }; + let expected = quote! { + impl<'a, 'b: 'a, #[may_dangle] T: 'a> MyTrait + for Test<'a, 'b, T> + where + T: Debug + {} + }; + assert_eq!(generated.to_string(), expected.to_string()); + + let turbofish = ty_generics.as_turbofish(); + let generated = quote! { + Test #turbofish + }; + let expected = quote! { + Test::<'a, 'b, T> + }; + assert_eq!(generated.to_string(), expected.to_string()); +} + +#[test] +fn test_ty_param_bound() { + let tokens = quote!('a); + snapshot!(tokens as TypeParamBound, @r###" + Lifetime(Lifetime { + ident: "a", + }) + "###); + + let tokens = quote!('_); + snapshot!(tokens as TypeParamBound, @r###" + Lifetime(Lifetime { + ident: "_", + }) + "###); + + let tokens = quote!(Debug); + snapshot!(tokens as TypeParamBound, @r###" + Trait(TraitBound { + modifier: None, + path: Path { + segments: [ + PathSegment { + ident: "Debug", + arguments: None, + }, + ], + }, + }) + "###); + + let tokens = quote!(?Sized); + snapshot!(tokens as TypeParamBound, @r###" + Trait(TraitBound { + modifier: Maybe, + path: Path { + segments: [ + PathSegment { + ident: "Sized", + arguments: None, + }, + ], + }, + }) + "###); +} + +#[test] +fn test_fn_precedence_in_where_clause() { + // This should parse as two separate bounds, `FnOnce() -> i32` and `Send` - not + // `FnOnce() -> (i32 + Send)`. + let input = quote! { + fn f<G>() + where + G: FnOnce() -> i32 + Send, + { + } + }; + + snapshot!(input as ItemFn, @r###" + ItemFn { + vis: Inherited, + sig: Signature { + ident: "f", + generics: Generics { + lt_token: Some, + params: [ + Type(TypeParam { + ident: "G", + }), + ], + gt_token: Some, + where_clause: Some(WhereClause { + predicates: [ + Type(PredicateType { + bounded_ty: Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "G", + arguments: None, + }, + ], + }, + }, + bounds: [ + Trait(TraitBound { + modifier: None, + path: Path { + segments: [ + PathSegment { + ident: "FnOnce", + arguments: PathArguments::Parenthesized { + output: Type( + Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "i32", + arguments: None, + }, + ], + }, + }, + ), + }, + }, + ], + }, + }), + Trait(TraitBound { + modifier: None, + path: Path { + segments: [ + PathSegment { + ident: "Send", + arguments: None, + }, + ], + }, + }), + ], + }), + ], + }), + }, + output: Default, + }, + block: Block, + } + "###); + + let where_clause = input.sig.generics.where_clause.as_ref().unwrap(); + assert_eq!(where_clause.predicates.len(), 1); + + let predicate = match &where_clause.predicates[0] { + WherePredicate::Type(pred) => pred, + _ => panic!("wrong predicate kind"), + }; + + assert_eq!(predicate.bounds.len(), 2, "{:#?}", predicate.bounds); + + let first_bound = &predicate.bounds[0]; + assert_eq!(quote!(#first_bound).to_string(), "FnOnce () -> i32"); + + let second_bound = &predicate.bounds[1]; + assert_eq!(quote!(#second_bound).to_string(), "Send"); +} + +#[test] +fn test_where_clause_at_end_of_input() { + let input = quote! { + where + }; + + snapshot!(input as WhereClause, @"WhereClause"); + + assert_eq!(input.predicates.len(), 0); +} diff --git a/third_party/rust/syn/tests/test_grouping.rs b/third_party/rust/syn/tests/test_grouping.rs new file mode 100644 index 0000000000..9eb7eee9b7 --- /dev/null +++ b/third_party/rust/syn/tests/test_grouping.rs @@ -0,0 +1,52 @@ +#[macro_use] +mod macros; + +use proc_macro2::{Delimiter, Group, Literal, Punct, Spacing, TokenStream, TokenTree}; +use std::iter::FromIterator; +use syn::Expr; + +#[test] +fn test_grouping() { + let tokens: TokenStream = TokenStream::from_iter(vec![ + TokenTree::Literal(Literal::i32_suffixed(1)), + TokenTree::Punct(Punct::new('+', Spacing::Alone)), + TokenTree::Group(Group::new( + Delimiter::None, + TokenStream::from_iter(vec![ + TokenTree::Literal(Literal::i32_suffixed(2)), + TokenTree::Punct(Punct::new('+', Spacing::Alone)), + TokenTree::Literal(Literal::i32_suffixed(3)), + ]), + )), + TokenTree::Punct(Punct::new('*', Spacing::Alone)), + TokenTree::Literal(Literal::i32_suffixed(4)), + ]); + + assert_eq!(tokens.to_string(), "1i32 + 2i32 + 3i32 * 4i32"); + + snapshot!(tokens as Expr, @r###" + Expr::Binary { + left: Expr::Lit { + lit: 1i32, + }, + op: Add, + right: Expr::Binary { + left: Expr::Group { + expr: Expr::Binary { + left: Expr::Lit { + lit: 2i32, + }, + op: Add, + right: Expr::Lit { + lit: 3i32, + }, + }, + }, + op: Mul, + right: Expr::Lit { + lit: 4i32, + }, + }, + } + "###); +} diff --git a/third_party/rust/syn/tests/test_ident.rs b/third_party/rust/syn/tests/test_ident.rs new file mode 100644 index 0000000000..ee01bfcc9f --- /dev/null +++ b/third_party/rust/syn/tests/test_ident.rs @@ -0,0 +1,85 @@ +use proc_macro2::{Ident, Span, TokenStream}; +use std::str::FromStr; +use syn::Result; + +fn parse(s: &str) -> Result<Ident> { + syn::parse2(TokenStream::from_str(s).unwrap()) +} + +fn new(s: &str) -> Ident { + Ident::new(s, Span::call_site()) +} + +#[test] +fn ident_parse() { + parse("String").unwrap(); +} + +#[test] +fn ident_parse_keyword() { + parse("abstract").unwrap_err(); +} + +#[test] +fn ident_parse_empty() { + parse("").unwrap_err(); +} + +#[test] +fn ident_parse_lifetime() { + parse("'static").unwrap_err(); +} + +#[test] +fn ident_parse_underscore() { + parse("_").unwrap_err(); +} + +#[test] +fn ident_parse_number() { + parse("255").unwrap_err(); +} + +#[test] +fn ident_parse_invalid() { + parse("a#").unwrap_err(); +} + +#[test] +fn ident_new() { + new("String"); +} + +#[test] +fn ident_new_keyword() { + new("abstract"); +} + +#[test] +#[should_panic(expected = "use Option<Ident>")] +fn ident_new_empty() { + new(""); +} + +#[test] +#[should_panic(expected = "not a valid Ident")] +fn ident_new_lifetime() { + new("'static"); +} + +#[test] +fn ident_new_underscore() { + new("_"); +} + +#[test] +#[should_panic(expected = "use Literal instead")] +fn ident_new_number() { + new("255"); +} + +#[test] +#[should_panic(expected = "\"a#\" is not a valid Ident")] +fn ident_new_invalid() { + new("a#"); +} diff --git a/third_party/rust/syn/tests/test_item.rs b/third_party/rust/syn/tests/test_item.rs new file mode 100644 index 0000000000..96df4b1aad --- /dev/null +++ b/third_party/rust/syn/tests/test_item.rs @@ -0,0 +1,336 @@ +#[macro_use] +mod macros; + +use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree}; +use quote::quote; +use std::iter::FromIterator; +use syn::{Item, ItemTrait}; + +#[test] +fn test_macro_variable_attr() { + // mimics the token stream corresponding to `$attr fn f() {}` + let tokens = TokenStream::from_iter(vec![ + TokenTree::Group(Group::new(Delimiter::None, quote! { #[test] })), + TokenTree::Ident(Ident::new("fn", Span::call_site())), + TokenTree::Ident(Ident::new("f", Span::call_site())), + TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())), + TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())), + ]); + + snapshot!(tokens as Item, @r###" + Item::Fn { + attrs: [ + Attribute { + style: Outer, + path: Path { + segments: [ + PathSegment { + ident: "test", + arguments: None, + }, + ], + }, + tokens: TokenStream(``), + }, + ], + vis: Inherited, + sig: Signature { + ident: "f", + generics: Generics, + output: Default, + }, + block: Block, + } + "###); +} + +#[test] +fn test_negative_impl() { + // Rustc parses all of the following. + + #[cfg(any())] + impl ! {} + let tokens = quote! { + impl ! {} + }; + snapshot!(tokens as Item, @r###" + Item::Impl { + generics: Generics, + self_ty: Type::Never, + } + "###); + + #[cfg(any())] + #[rustfmt::skip] + impl !Trait {} + let tokens = quote! { + impl !Trait {} + }; + snapshot!(tokens as Item, @r###" + Item::Impl { + generics: Generics, + self_ty: Verbatim(`! Trait`), + } + "###); + + #[cfg(any())] + impl !Trait for T {} + let tokens = quote! { + impl !Trait for T {} + }; + snapshot!(tokens as Item, @r###" + Item::Impl { + generics: Generics, + trait_: Some(( + Some, + Path { + segments: [ + PathSegment { + ident: "Trait", + arguments: None, + }, + ], + }, + )), + self_ty: Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "T", + arguments: None, + }, + ], + }, + }, + } + "###); + + #[cfg(any())] + #[rustfmt::skip] + impl !! {} + let tokens = quote! { + impl !! {} + }; + snapshot!(tokens as Item, @r###" + Item::Impl { + generics: Generics, + self_ty: Verbatim(`! !`), + } + "###); +} + +#[test] +fn test_macro_variable_impl() { + // mimics the token stream corresponding to `impl $trait for $ty {}` + let tokens = TokenStream::from_iter(vec![ + TokenTree::Ident(Ident::new("impl", Span::call_site())), + TokenTree::Group(Group::new(Delimiter::None, quote!(Trait))), + TokenTree::Ident(Ident::new("for", Span::call_site())), + TokenTree::Group(Group::new(Delimiter::None, quote!(Type))), + TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())), + ]); + + snapshot!(tokens as Item, @r###" + Item::Impl { + generics: Generics, + trait_: Some(( + None, + Path { + segments: [ + PathSegment { + ident: "Trait", + arguments: None, + }, + ], + }, + )), + self_ty: Type::Group { + elem: Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "Type", + arguments: None, + }, + ], + }, + }, + }, + } + "###); +} + +#[test] +fn test_supertraits() { + // Rustc parses all of the following. + + #[rustfmt::skip] + let tokens = quote!(trait Trait where {}); + snapshot!(tokens as ItemTrait, @r###" + ItemTrait { + vis: Inherited, + ident: "Trait", + generics: Generics { + where_clause: Some(WhereClause), + }, + } + "###); + + #[rustfmt::skip] + let tokens = quote!(trait Trait: where {}); + snapshot!(tokens as ItemTrait, @r###" + ItemTrait { + vis: Inherited, + ident: "Trait", + generics: Generics { + where_clause: Some(WhereClause), + }, + colon_token: Some, + } + "###); + + #[rustfmt::skip] + let tokens = quote!(trait Trait: Sized where {}); + snapshot!(tokens as ItemTrait, @r###" + ItemTrait { + vis: Inherited, + ident: "Trait", + generics: Generics { + where_clause: Some(WhereClause), + }, + colon_token: Some, + supertraits: [ + Trait(TraitBound { + modifier: None, + path: Path { + segments: [ + PathSegment { + ident: "Sized", + arguments: None, + }, + ], + }, + }), + ], + } + "###); + + #[rustfmt::skip] + let tokens = quote!(trait Trait: Sized + where {}); + snapshot!(tokens as ItemTrait, @r###" + ItemTrait { + vis: Inherited, + ident: "Trait", + generics: Generics { + where_clause: Some(WhereClause), + }, + colon_token: Some, + supertraits: [ + Trait(TraitBound { + modifier: None, + path: Path { + segments: [ + PathSegment { + ident: "Sized", + arguments: None, + }, + ], + }, + }), + ], + } + "###); +} + +#[test] +fn test_type_empty_bounds() { + #[rustfmt::skip] + let tokens = quote! { + trait Foo { + type Bar: ; + } + }; + + snapshot!(tokens as ItemTrait, @r###" + ItemTrait { + vis: Inherited, + ident: "Foo", + generics: Generics, + items: [ + TraitItem::Type { + ident: "Bar", + generics: Generics, + colon_token: Some, + }, + ], + } + "###); +} + +#[test] +fn test_impl_visibility() { + let tokens = quote! { + pub default unsafe impl union {} + }; + + snapshot!(tokens as Item, @"Verbatim(`pub default unsafe impl union { }`)"); +} + +#[test] +fn test_impl_type_parameter_defaults() { + #[cfg(any())] + impl<T = ()> () {} + let tokens = quote! { + impl<T = ()> () {} + }; + snapshot!(tokens as Item, @r###" + Item::Impl { + generics: Generics { + lt_token: Some, + params: [ + Type(TypeParam { + ident: "T", + eq_token: Some, + default: Some(Type::Tuple), + }), + ], + gt_token: Some, + }, + self_ty: Type::Tuple, + }"###); +} + +#[test] +fn test_impl_trait_trailing_plus() { + let tokens = quote! { + fn f() -> impl Sized + {} + }; + + snapshot!(tokens as Item, @r###" + Item::Fn { + vis: Inherited, + sig: Signature { + ident: "f", + generics: Generics, + output: Type( + Type::ImplTrait { + bounds: [ + Trait(TraitBound { + modifier: None, + path: Path { + segments: [ + PathSegment { + ident: "Sized", + arguments: None, + }, + ], + }, + }), + ], + }, + ), + }, + block: Block, + } + "###); +} diff --git a/third_party/rust/syn/tests/test_iterators.rs b/third_party/rust/syn/tests/test_iterators.rs new file mode 100644 index 0000000000..0ab0fb9147 --- /dev/null +++ b/third_party/rust/syn/tests/test_iterators.rs @@ -0,0 +1,68 @@ +use syn::punctuated::{Pair, Punctuated}; +use syn::Token; + +#[macro_use] +mod macros; + +macro_rules! check_exact_size_iterator { + ($iter:expr) => {{ + let iter = $iter; + let size_hint = iter.size_hint(); + let len = iter.len(); + let count = iter.count(); + assert_eq!(len, count); + assert_eq!(size_hint, (count, Some(count))); + }}; +} + +#[test] +fn pairs() { + let mut p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4); + + check_exact_size_iterator!(p.pairs()); + check_exact_size_iterator!(p.pairs_mut()); + check_exact_size_iterator!(p.into_pairs()); + + let mut p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4); + + assert_eq!(p.pairs().next_back().map(Pair::into_value), Some(&4)); + assert_eq!( + p.pairs_mut().next_back().map(Pair::into_value), + Some(&mut 4) + ); + assert_eq!(p.into_pairs().next_back().map(Pair::into_value), Some(4)); +} + +#[test] +fn iter() { + let mut p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4); + + check_exact_size_iterator!(p.iter()); + check_exact_size_iterator!(p.iter_mut()); + check_exact_size_iterator!(p.into_iter()); + + let mut p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4); + + assert_eq!(p.iter().next_back(), Some(&4)); + assert_eq!(p.iter_mut().next_back(), Some(&mut 4)); + assert_eq!(p.into_iter().next_back(), Some(4)); +} + +#[test] +fn may_dangle() { + let p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4); + for element in &p { + if *element == 2 { + drop(p); + break; + } + } + + let mut p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4); + for element in &mut p { + if *element == 2 { + drop(p); + break; + } + } +} diff --git a/third_party/rust/syn/tests/test_lit.rs b/third_party/rust/syn/tests/test_lit.rs new file mode 100644 index 0000000000..ebe077c617 --- /dev/null +++ b/third_party/rust/syn/tests/test_lit.rs @@ -0,0 +1,266 @@ +#![allow(clippy::float_cmp, clippy::non_ascii_literal)] + +#[macro_use] +mod macros; + +use proc_macro2::{Delimiter, Group, Literal, Span, TokenStream, TokenTree}; +use quote::ToTokens; +use std::iter::FromIterator; +use std::str::FromStr; +use syn::{Lit, LitFloat, LitInt, LitStr}; + +fn lit(s: &str) -> Lit { + match TokenStream::from_str(s) + .unwrap() + .into_iter() + .next() + .unwrap() + { + TokenTree::Literal(lit) => Lit::new(lit), + _ => panic!(), + } +} + +#[test] +fn strings() { + fn test_string(s: &str, value: &str) { + match lit(s) { + Lit::Str(lit) => { + assert_eq!(lit.value(), value); + let again = lit.into_token_stream().to_string(); + if again != s { + test_string(&again, value); + } + } + wrong => panic!("{:?}", wrong), + } + } + + test_string("\"a\"", "a"); + test_string("\"\\n\"", "\n"); + test_string("\"\\r\"", "\r"); + test_string("\"\\t\"", "\t"); + test_string("\"🐕\"", "🐕"); // NOTE: This is an emoji + test_string("\"\\\"\"", "\""); + test_string("\"'\"", "'"); + test_string("\"\"", ""); + test_string("\"\\u{1F415}\"", "\u{1F415}"); + test_string("\"\\u{1_2__3_}\"", "\u{123}"); + test_string( + "\"contains\nnewlines\\\nescaped newlines\"", + "contains\nnewlinesescaped newlines", + ); + test_string("r\"raw\nstring\\\nhere\"", "raw\nstring\\\nhere"); + test_string("\"...\"q", "..."); + test_string("r\"...\"q", "..."); + test_string("r##\"...\"##q", "..."); +} + +#[test] +fn byte_strings() { + fn test_byte_string(s: &str, value: &[u8]) { + match lit(s) { + Lit::ByteStr(lit) => { + assert_eq!(lit.value(), value); + let again = lit.into_token_stream().to_string(); + if again != s { + test_byte_string(&again, value); + } + } + wrong => panic!("{:?}", wrong), + } + } + + test_byte_string("b\"a\"", b"a"); + test_byte_string("b\"\\n\"", b"\n"); + test_byte_string("b\"\\r\"", b"\r"); + test_byte_string("b\"\\t\"", b"\t"); + test_byte_string("b\"\\\"\"", b"\""); + test_byte_string("b\"'\"", b"'"); + test_byte_string("b\"\"", b""); + test_byte_string( + "b\"contains\nnewlines\\\nescaped newlines\"", + b"contains\nnewlinesescaped newlines", + ); + test_byte_string("br\"raw\nstring\\\nhere\"", b"raw\nstring\\\nhere"); + test_byte_string("b\"...\"q", b"..."); + test_byte_string("br\"...\"q", b"..."); + test_byte_string("br##\"...\"##q", b"..."); +} + +#[test] +fn bytes() { + fn test_byte(s: &str, value: u8) { + match lit(s) { + Lit::Byte(lit) => { + assert_eq!(lit.value(), value); + let again = lit.into_token_stream().to_string(); + assert_eq!(again, s); + } + wrong => panic!("{:?}", wrong), + } + } + + test_byte("b'a'", b'a'); + test_byte("b'\\n'", b'\n'); + test_byte("b'\\r'", b'\r'); + test_byte("b'\\t'", b'\t'); + test_byte("b'\\''", b'\''); + test_byte("b'\"'", b'"'); + test_byte("b'a'q", b'a'); +} + +#[test] +fn chars() { + fn test_char(s: &str, value: char) { + match lit(s) { + Lit::Char(lit) => { + assert_eq!(lit.value(), value); + let again = lit.into_token_stream().to_string(); + if again != s { + test_char(&again, value); + } + } + wrong => panic!("{:?}", wrong), + } + } + + test_char("'a'", 'a'); + test_char("'\\n'", '\n'); + test_char("'\\r'", '\r'); + test_char("'\\t'", '\t'); + test_char("'🐕'", '🐕'); // NOTE: This is an emoji + test_char("'\\''", '\''); + test_char("'\"'", '"'); + test_char("'\\u{1F415}'", '\u{1F415}'); + test_char("'a'q", 'a'); +} + +#[test] +fn ints() { + fn test_int(s: &str, value: u64, suffix: &str) { + match lit(s) { + Lit::Int(lit) => { + assert_eq!(lit.base10_digits().parse::<u64>().unwrap(), value); + assert_eq!(lit.suffix(), suffix); + let again = lit.into_token_stream().to_string(); + if again != s { + test_int(&again, value, suffix); + } + } + wrong => panic!("{:?}", wrong), + } + } + + test_int("5", 5, ""); + test_int("5u32", 5, "u32"); + test_int("0E", 0, "E"); + test_int("0ECMA", 0, "ECMA"); + test_int("0o0A", 0, "A"); + test_int("5_0", 50, ""); + test_int("5_____0_____", 50, ""); + test_int("0x7f", 127, ""); + test_int("0x7F", 127, ""); + test_int("0b1001", 9, ""); + test_int("0o73", 59, ""); + test_int("0x7Fu8", 127, "u8"); + test_int("0b1001i8", 9, "i8"); + test_int("0o73u32", 59, "u32"); + test_int("0x__7___f_", 127, ""); + test_int("0x__7___F_", 127, ""); + test_int("0b_1_0__01", 9, ""); + test_int("0o_7__3", 59, ""); + test_int("0x_7F__u8", 127, "u8"); + test_int("0b__10__0_1i8", 9, "i8"); + test_int("0o__7__________________3u32", 59, "u32"); + test_int("0e1\u{5c5}", 0, "e1\u{5c5}"); +} + +#[test] +fn floats() { + fn test_float(s: &str, value: f64, suffix: &str) { + match lit(s) { + Lit::Float(lit) => { + assert_eq!(lit.base10_digits().parse::<f64>().unwrap(), value); + assert_eq!(lit.suffix(), suffix); + let again = lit.into_token_stream().to_string(); + if again != s { + test_float(&again, value, suffix); + } + } + wrong => panic!("{:?}", wrong), + } + } + + test_float("5.5", 5.5, ""); + test_float("5.5E12", 5.5e12, ""); + test_float("5.5e12", 5.5e12, ""); + test_float("1.0__3e-12", 1.03e-12, ""); + test_float("1.03e+12", 1.03e12, ""); + test_float("9e99e99", 9e99, "e99"); + test_float("1e_0", 1.0, ""); + test_float("0.0ECMA", 0.0, "ECMA"); +} + +#[test] +fn negative() { + let span = Span::call_site(); + assert_eq!("-1", LitInt::new("-1", span).to_string()); + assert_eq!("-1i8", LitInt::new("-1i8", span).to_string()); + assert_eq!("-1i16", LitInt::new("-1i16", span).to_string()); + assert_eq!("-1i32", LitInt::new("-1i32", span).to_string()); + assert_eq!("-1i64", LitInt::new("-1i64", span).to_string()); + assert_eq!("-1.5", LitFloat::new("-1.5", span).to_string()); + assert_eq!("-1.5f32", LitFloat::new("-1.5f32", span).to_string()); + assert_eq!("-1.5f64", LitFloat::new("-1.5f64", span).to_string()); +} + +#[test] +fn suffix() { + fn get_suffix(token: &str) -> String { + let lit = syn::parse_str::<Lit>(token).unwrap(); + match lit { + Lit::Str(lit) => lit.suffix().to_owned(), + Lit::ByteStr(lit) => lit.suffix().to_owned(), + Lit::Byte(lit) => lit.suffix().to_owned(), + Lit::Char(lit) => lit.suffix().to_owned(), + Lit::Int(lit) => lit.suffix().to_owned(), + Lit::Float(lit) => lit.suffix().to_owned(), + _ => unimplemented!(), + } + } + + assert_eq!(get_suffix("\"\"s"), "s"); + assert_eq!(get_suffix("r\"\"r"), "r"); + assert_eq!(get_suffix("b\"\"b"), "b"); + assert_eq!(get_suffix("br\"\"br"), "br"); + assert_eq!(get_suffix("r#\"\"#r"), "r"); + assert_eq!(get_suffix("'c'c"), "c"); + assert_eq!(get_suffix("b'b'b"), "b"); + assert_eq!(get_suffix("1i32"), "i32"); + assert_eq!(get_suffix("1_i32"), "i32"); + assert_eq!(get_suffix("1.0f32"), "f32"); + assert_eq!(get_suffix("1.0_f32"), "f32"); +} + +#[test] +fn test_deep_group_empty() { + let tokens = TokenStream::from_iter(vec![TokenTree::Group(Group::new( + Delimiter::None, + TokenStream::from_iter(vec![TokenTree::Group(Group::new( + Delimiter::None, + TokenStream::from_iter(vec![TokenTree::Literal(Literal::string("hi"))]), + ))]), + ))]); + + snapshot!(tokens as Lit, @r#""hi""# ); +} + +#[test] +fn test_error() { + let err = syn::parse_str::<LitStr>("...").unwrap_err(); + assert_eq!("expected string literal", err.to_string()); + + let err = syn::parse_str::<LitStr>("5").unwrap_err(); + assert_eq!("expected string literal", err.to_string()); +} diff --git a/third_party/rust/syn/tests/test_meta.rs b/third_party/rust/syn/tests/test_meta.rs new file mode 100644 index 0000000000..9b3f30deeb --- /dev/null +++ b/third_party/rust/syn/tests/test_meta.rs @@ -0,0 +1,378 @@ +#![allow(clippy::shadow_unrelated, clippy::too_many_lines)] + +#[macro_use] +mod macros; + +use syn::{Meta, MetaList, MetaNameValue, NestedMeta}; + +#[test] +fn test_parse_meta_item_word() { + let input = "hello"; + + snapshot!(input as Meta, @r###" + Path(Path { + segments: [ + PathSegment { + ident: "hello", + arguments: None, + }, + ], + }) + "###); +} + +#[test] +fn test_parse_meta_name_value() { + let input = "foo = 5"; + let (inner, meta) = (input, input); + + snapshot!(inner as MetaNameValue, @r###" + MetaNameValue { + path: Path { + segments: [ + PathSegment { + ident: "foo", + arguments: None, + }, + ], + }, + lit: 5, + } + "###); + + snapshot!(meta as Meta, @r###" + Meta::NameValue { + path: Path { + segments: [ + PathSegment { + ident: "foo", + arguments: None, + }, + ], + }, + lit: 5, + } + "###); + + assert_eq!(meta, inner.into()); +} + +#[test] +fn test_parse_meta_name_value_with_keyword() { + let input = "static = 5"; + let (inner, meta) = (input, input); + + snapshot!(inner as MetaNameValue, @r###" + MetaNameValue { + path: Path { + segments: [ + PathSegment { + ident: "static", + arguments: None, + }, + ], + }, + lit: 5, + } + "###); + + snapshot!(meta as Meta, @r###" + Meta::NameValue { + path: Path { + segments: [ + PathSegment { + ident: "static", + arguments: None, + }, + ], + }, + lit: 5, + } + "###); + + assert_eq!(meta, inner.into()); +} + +#[test] +fn test_parse_meta_name_value_with_bool() { + let input = "true = 5"; + let (inner, meta) = (input, input); + + snapshot!(inner as MetaNameValue, @r###" + MetaNameValue { + path: Path { + segments: [ + PathSegment { + ident: "true", + arguments: None, + }, + ], + }, + lit: 5, + } + "###); + + snapshot!(meta as Meta, @r###" + Meta::NameValue { + path: Path { + segments: [ + PathSegment { + ident: "true", + arguments: None, + }, + ], + }, + lit: 5, + } + "###); + + assert_eq!(meta, inner.into()); +} + +#[test] +fn test_parse_meta_item_list_lit() { + let input = "foo(5)"; + let (inner, meta) = (input, input); + + snapshot!(inner as MetaList, @r###" + MetaList { + path: Path { + segments: [ + PathSegment { + ident: "foo", + arguments: None, + }, + ], + }, + nested: [ + Lit(5), + ], + } + "###); + + snapshot!(meta as Meta, @r###" + Meta::List { + path: Path { + segments: [ + PathSegment { + ident: "foo", + arguments: None, + }, + ], + }, + nested: [ + Lit(5), + ], + } + "###); + + assert_eq!(meta, inner.into()); +} + +#[test] +fn test_parse_meta_item_multiple() { + let input = "foo(word, name = 5, list(name2 = 6), word2)"; + let (inner, meta) = (input, input); + + snapshot!(inner as MetaList, @r###" + MetaList { + path: Path { + segments: [ + PathSegment { + ident: "foo", + arguments: None, + }, + ], + }, + nested: [ + Meta(Path(Path { + segments: [ + PathSegment { + ident: "word", + arguments: None, + }, + ], + })), + Meta(Meta::NameValue { + path: Path { + segments: [ + PathSegment { + ident: "name", + arguments: None, + }, + ], + }, + lit: 5, + }), + Meta(Meta::List { + path: Path { + segments: [ + PathSegment { + ident: "list", + arguments: None, + }, + ], + }, + nested: [ + Meta(Meta::NameValue { + path: Path { + segments: [ + PathSegment { + ident: "name2", + arguments: None, + }, + ], + }, + lit: 6, + }), + ], + }), + Meta(Path(Path { + segments: [ + PathSegment { + ident: "word2", + arguments: None, + }, + ], + })), + ], + } + "###); + + snapshot!(meta as Meta, @r###" + Meta::List { + path: Path { + segments: [ + PathSegment { + ident: "foo", + arguments: None, + }, + ], + }, + nested: [ + Meta(Path(Path { + segments: [ + PathSegment { + ident: "word", + arguments: None, + }, + ], + })), + Meta(Meta::NameValue { + path: Path { + segments: [ + PathSegment { + ident: "name", + arguments: None, + }, + ], + }, + lit: 5, + }), + Meta(Meta::List { + path: Path { + segments: [ + PathSegment { + ident: "list", + arguments: None, + }, + ], + }, + nested: [ + Meta(Meta::NameValue { + path: Path { + segments: [ + PathSegment { + ident: "name2", + arguments: None, + }, + ], + }, + lit: 6, + }), + ], + }), + Meta(Path(Path { + segments: [ + PathSegment { + ident: "word2", + arguments: None, + }, + ], + })), + ], + } + "###); + + assert_eq!(meta, inner.into()); +} + +#[test] +fn test_parse_nested_meta() { + let input = "5"; + snapshot!(input as NestedMeta, @"Lit(5)"); + + let input = "list(name2 = 6)"; + snapshot!(input as NestedMeta, @r###" + Meta(Meta::List { + path: Path { + segments: [ + PathSegment { + ident: "list", + arguments: None, + }, + ], + }, + nested: [ + Meta(Meta::NameValue { + path: Path { + segments: [ + PathSegment { + ident: "name2", + arguments: None, + }, + ], + }, + lit: 6, + }), + ], + }) + "###); +} + +#[test] +fn test_parse_path() { + let input = "::serde::Serialize"; + snapshot!(input as Meta, @r###" + Path(Path { + leading_colon: Some, + segments: [ + PathSegment { + ident: "serde", + arguments: None, + }, + PathSegment { + ident: "Serialize", + arguments: None, + }, + ], + }) + "###); + + let input = "::serde::Serialize"; + snapshot!(input as NestedMeta, @r###" + Meta(Path(Path { + leading_colon: Some, + segments: [ + PathSegment { + ident: "serde", + arguments: None, + }, + PathSegment { + ident: "Serialize", + arguments: None, + }, + ], + })) + "###); +} diff --git a/third_party/rust/syn/tests/test_parse_buffer.rs b/third_party/rust/syn/tests/test_parse_buffer.rs new file mode 100644 index 0000000000..cc23e9ba6c --- /dev/null +++ b/third_party/rust/syn/tests/test_parse_buffer.rs @@ -0,0 +1,92 @@ +#![allow(clippy::non_ascii_literal)] + +use proc_macro2::{Delimiter, Group, Punct, Spacing, TokenStream, TokenTree}; +use std::iter::FromIterator; +use syn::parse::{discouraged::Speculative, Parse, ParseStream, Parser, Result}; +use syn::{parenthesized, Token}; + +#[test] +#[should_panic(expected = "Fork was not derived from the advancing parse stream")] +fn smuggled_speculative_cursor_between_sources() { + struct BreakRules; + impl Parse for BreakRules { + fn parse(input1: ParseStream) -> Result<Self> { + let nested = |input2: ParseStream| { + input1.advance_to(input2); + Ok(Self) + }; + nested.parse_str("") + } + } + + syn::parse_str::<BreakRules>("").unwrap(); +} + +#[test] +#[should_panic(expected = "Fork was not derived from the advancing parse stream")] +fn smuggled_speculative_cursor_between_brackets() { + struct BreakRules; + impl Parse for BreakRules { + fn parse(input: ParseStream) -> Result<Self> { + let a; + let b; + parenthesized!(a in input); + parenthesized!(b in input); + a.advance_to(&b); + Ok(Self) + } + } + + syn::parse_str::<BreakRules>("()()").unwrap(); +} + +#[test] +#[should_panic(expected = "Fork was not derived from the advancing parse stream")] +fn smuggled_speculative_cursor_into_brackets() { + struct BreakRules; + impl Parse for BreakRules { + fn parse(input: ParseStream) -> Result<Self> { + let a; + parenthesized!(a in input); + input.advance_to(&a); + Ok(Self) + } + } + + syn::parse_str::<BreakRules>("()").unwrap(); +} + +#[test] +fn trailing_empty_none_group() { + fn parse(input: ParseStream) -> Result<()> { + input.parse::<Token![+]>()?; + + let content; + parenthesized!(content in input); + content.parse::<Token![+]>()?; + + Ok(()) + } + + // `+ ( + <Ø Ø> ) <Ø <Ø Ø> Ø>` + let tokens = TokenStream::from_iter(vec![ + TokenTree::Punct(Punct::new('+', Spacing::Alone)), + TokenTree::Group(Group::new( + Delimiter::Parenthesis, + TokenStream::from_iter(vec![ + TokenTree::Punct(Punct::new('+', Spacing::Alone)), + TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())), + ]), + )), + TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())), + TokenTree::Group(Group::new( + Delimiter::None, + TokenStream::from_iter(vec![TokenTree::Group(Group::new( + Delimiter::None, + TokenStream::new(), + ))]), + )), + ]); + + parse.parse2(tokens).unwrap(); +} diff --git a/third_party/rust/syn/tests/test_parse_stream.rs b/third_party/rust/syn/tests/test_parse_stream.rs new file mode 100644 index 0000000000..cc14fa032e --- /dev/null +++ b/third_party/rust/syn/tests/test_parse_stream.rs @@ -0,0 +1,12 @@ +use syn::ext::IdentExt; +use syn::parse::ParseStream; +use syn::{Ident, Token}; + +#[test] +fn test_peek() { + _ = |input: ParseStream| { + _ = input.peek(Ident); + _ = input.peek(Ident::peek_any); + _ = input.peek(Token![::]); + }; +} diff --git a/third_party/rust/syn/tests/test_pat.rs b/third_party/rust/syn/tests/test_pat.rs new file mode 100644 index 0000000000..695e4736ee --- /dev/null +++ b/third_party/rust/syn/tests/test_pat.rs @@ -0,0 +1,67 @@ +#[macro_use] +mod macros; + +use proc_macro2::{Delimiter, Group, TokenStream, TokenTree}; +use quote::quote; +use std::iter::FromIterator; +use syn::{Item, Pat, Stmt}; + +#[test] +fn test_pat_ident() { + match syn::parse2(quote!(self)).unwrap() { + Pat::Ident(_) => (), + value => panic!("expected PatIdent, got {:?}", value), + } +} + +#[test] +fn test_pat_path() { + match syn::parse2(quote!(self::CONST)).unwrap() { + Pat::Path(_) => (), + value => panic!("expected PatPath, got {:?}", value), + } +} + +#[test] +fn test_leading_vert() { + // https://github.com/rust-lang/rust/blob/1.43.0/src/test/ui/or-patterns/remove-leading-vert.rs + + syn::parse_str::<Item>("fn f() {}").unwrap(); + syn::parse_str::<Item>("fn fun1(| A: E) {}").unwrap_err(); + syn::parse_str::<Item>("fn fun2(|| A: E) {}").unwrap_err(); + + syn::parse_str::<Stmt>("let | () = ();").unwrap(); + syn::parse_str::<Stmt>("let (| A): E;").unwrap(); + syn::parse_str::<Stmt>("let (|| A): (E);").unwrap_err(); + syn::parse_str::<Stmt>("let (| A,): (E,);").unwrap(); + syn::parse_str::<Stmt>("let [| A]: [E; 1];").unwrap(); + syn::parse_str::<Stmt>("let [|| A]: [E; 1];").unwrap_err(); + syn::parse_str::<Stmt>("let TS(| A): TS;").unwrap(); + syn::parse_str::<Stmt>("let TS(|| A): TS;").unwrap_err(); + syn::parse_str::<Stmt>("let NS { f: | A }: NS;").unwrap(); + syn::parse_str::<Stmt>("let NS { f: || A }: NS;").unwrap_err(); +} + +#[test] +fn test_group() { + let group = Group::new(Delimiter::None, quote!(Some(_))); + let tokens = TokenStream::from_iter(vec![TokenTree::Group(group)]); + + snapshot!(tokens as Pat, @r###" + Pat::TupleStruct { + path: Path { + segments: [ + PathSegment { + ident: "Some", + arguments: None, + }, + ], + }, + pat: PatTuple { + elems: [ + Pat::Wild, + ], + }, + } + "###); +} diff --git a/third_party/rust/syn/tests/test_path.rs b/third_party/rust/syn/tests/test_path.rs new file mode 100644 index 0000000000..c732eff70c --- /dev/null +++ b/third_party/rust/syn/tests/test_path.rs @@ -0,0 +1,126 @@ +#[macro_use] +mod macros; + +use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree}; +use quote::{quote, ToTokens}; +use std::iter::FromIterator; +use syn::{parse_quote, Expr, Type, TypePath}; + +#[test] +fn parse_interpolated_leading_component() { + // mimics the token stream corresponding to `$mod::rest` + let tokens = TokenStream::from_iter(vec![ + TokenTree::Group(Group::new(Delimiter::None, quote! { first })), + TokenTree::Punct(Punct::new(':', Spacing::Joint)), + TokenTree::Punct(Punct::new(':', Spacing::Alone)), + TokenTree::Ident(Ident::new("rest", Span::call_site())), + ]); + + snapshot!(tokens.clone() as Expr, @r###" + Expr::Path { + path: Path { + segments: [ + PathSegment { + ident: "first", + arguments: None, + }, + PathSegment { + ident: "rest", + arguments: None, + }, + ], + }, + } + "###); + + snapshot!(tokens as Type, @r###" + Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "first", + arguments: None, + }, + PathSegment { + ident: "rest", + arguments: None, + }, + ], + }, + } + "###); +} + +#[test] +fn print_incomplete_qpath() { + // qpath with `as` token + let mut ty: TypePath = parse_quote!(<Self as A>::Q); + snapshot!(ty.to_token_stream(), @r###" + TokenStream(`< Self as A > :: Q`) + "###); + assert!(ty.path.segments.pop().is_some()); + snapshot!(ty.to_token_stream(), @r###" + TokenStream(`< Self as A > ::`) + "###); + assert!(ty.path.segments.pop().is_some()); + snapshot!(ty.to_token_stream(), @r###" + TokenStream(`< Self >`) + "###); + assert!(ty.path.segments.pop().is_none()); + + // qpath without `as` token + let mut ty: TypePath = parse_quote!(<Self>::A::B); + snapshot!(ty.to_token_stream(), @r###" + TokenStream(`< Self > :: A :: B`) + "###); + assert!(ty.path.segments.pop().is_some()); + snapshot!(ty.to_token_stream(), @r###" + TokenStream(`< Self > :: A ::`) + "###); + assert!(ty.path.segments.pop().is_some()); + snapshot!(ty.to_token_stream(), @r###" + TokenStream(`< Self > ::`) + "###); + assert!(ty.path.segments.pop().is_none()); + + // normal path + let mut ty: TypePath = parse_quote!(Self::A::B); + snapshot!(ty.to_token_stream(), @r###" + TokenStream(`Self :: A :: B`) + "###); + assert!(ty.path.segments.pop().is_some()); + snapshot!(ty.to_token_stream(), @r###" + TokenStream(`Self :: A ::`) + "###); + assert!(ty.path.segments.pop().is_some()); + snapshot!(ty.to_token_stream(), @r###" + TokenStream(`Self ::`) + "###); + assert!(ty.path.segments.pop().is_some()); + snapshot!(ty.to_token_stream(), @r###" + TokenStream(``) + "###); + assert!(ty.path.segments.pop().is_none()); +} + +#[test] +fn parse_parenthesized_path_arguments_with_disambiguator() { + #[rustfmt::skip] + let tokens = quote!(FnOnce::() -> !); + snapshot!(tokens as Type, @r###" + Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "FnOnce", + arguments: PathArguments::Parenthesized { + output: Type( + Type::Never, + ), + }, + }, + ], + }, + } + "###); +} diff --git a/third_party/rust/syn/tests/test_precedence.rs b/third_party/rust/syn/tests/test_precedence.rs new file mode 100644 index 0000000000..dbcd74f163 --- /dev/null +++ b/third_party/rust/syn/tests/test_precedence.rs @@ -0,0 +1,460 @@ +#![cfg(not(syn_disable_nightly_tests))] +#![cfg(not(miri))] +#![recursion_limit = "1024"] +#![feature(rustc_private)] +#![allow( + clippy::explicit_deref_methods, + clippy::manual_assert, + clippy::match_wildcard_for_single_variants, + clippy::too_many_lines +)] + +//! The tests in this module do the following: +//! +//! 1. Parse a given expression in both `syn` and `librustc`. +//! 2. Fold over the expression adding brackets around each subexpression (with +//! some complications - see the `syn_brackets` and `librustc_brackets` +//! methods). +//! 3. Serialize the `syn` expression back into a string, and re-parse it with +//! `librustc`. +//! 4. Respan all of the expressions, replacing the spans with the default +//! spans. +//! 5. Compare the expressions with one another, if they are not equal fail. + +extern crate rustc_ast; +extern crate rustc_data_structures; +extern crate rustc_span; +extern crate thin_vec; + +use crate::common::eq::SpanlessEq; +use crate::common::parse; +use quote::quote; +use rayon::iter::{IntoParallelIterator, ParallelIterator}; +use regex::Regex; +use rustc_ast::ast; +use rustc_ast::ptr::P; +use rustc_span::edition::Edition; +use std::fs; +use std::process; +use std::sync::atomic::{AtomicUsize, Ordering}; +use walkdir::{DirEntry, WalkDir}; + +#[macro_use] +mod macros; + +#[allow(dead_code)] +mod common; + +mod repo; + +/// Test some pre-set expressions chosen by us. +#[test] +fn test_simple_precedence() { + const EXPRS: &[&str] = &[ + "1 + 2 * 3 + 4", + "1 + 2 * ( 3 + 4 )", + "{ for i in r { } *some_ptr += 1; }", + "{ loop { break 5; } }", + "{ if true { () }.mthd() }", + "{ for i in unsafe { 20 } { } }", + ]; + + let mut failed = 0; + + for input in EXPRS { + let expr = if let Some(expr) = parse::syn_expr(input) { + expr + } else { + failed += 1; + continue; + }; + + let pf = match test_expressions(Edition::Edition2018, vec![expr]) { + (1, 0) => "passed", + (0, 1) => { + failed += 1; + "failed" + } + _ => unreachable!(), + }; + errorf!("=== {}: {}\n", input, pf); + } + + if failed > 0 { + panic!("Failed {} tests", failed); + } +} + +/// Test expressions from rustc, like in `test_round_trip`. +#[test] +fn test_rustc_precedence() { + common::rayon_init(); + repo::clone_rust(); + let abort_after = common::abort_after(); + if abort_after == 0 { + panic!("Skipping all precedence tests"); + } + + let passed = AtomicUsize::new(0); + let failed = AtomicUsize::new(0); + + // 2018 edition is hard + let edition_regex = Regex::new(r"\b(async|try)[!(]").unwrap(); + + WalkDir::new("tests/rust") + .sort_by(|a, b| a.file_name().cmp(b.file_name())) + .into_iter() + .filter_entry(repo::base_dir_filter) + .collect::<Result<Vec<DirEntry>, walkdir::Error>>() + .unwrap() + .into_par_iter() + .for_each(|entry| { + let path = entry.path(); + if path.is_dir() { + return; + } + + let content = fs::read_to_string(path).unwrap(); + let content = edition_regex.replace_all(&content, "_$0"); + + let (l_passed, l_failed) = match syn::parse_file(&content) { + Ok(file) => { + let edition = repo::edition(path).parse().unwrap(); + let exprs = collect_exprs(file); + test_expressions(edition, exprs) + } + Err(msg) => { + errorf!("syn failed to parse\n{:?}\n", msg); + (0, 1) + } + }; + + errorf!( + "=== {}: {} passed | {} failed\n", + path.display(), + l_passed, + l_failed + ); + + passed.fetch_add(l_passed, Ordering::Relaxed); + let prev_failed = failed.fetch_add(l_failed, Ordering::Relaxed); + + if prev_failed + l_failed >= abort_after { + process::exit(1); + } + }); + + let passed = passed.load(Ordering::Relaxed); + let failed = failed.load(Ordering::Relaxed); + + errorf!("\n===== Precedence Test Results =====\n"); + errorf!("{} passed | {} failed\n", passed, failed); + + if failed > 0 { + panic!("{} failures", failed); + } +} + +fn test_expressions(edition: Edition, exprs: Vec<syn::Expr>) -> (usize, usize) { + let mut passed = 0; + let mut failed = 0; + + rustc_span::create_session_if_not_set_then(edition, |_| { + for expr in exprs { + let raw = quote!(#expr).to_string(); + + let librustc_ast = if let Some(e) = librustc_parse_and_rewrite(&raw) { + e + } else { + failed += 1; + errorf!("\nFAIL - librustc failed to parse raw\n"); + continue; + }; + + let syn_expr = syn_brackets(expr); + let syn_ast = if let Some(e) = parse::librustc_expr("e!(#syn_expr).to_string()) { + e + } else { + failed += 1; + errorf!("\nFAIL - librustc failed to parse bracketed\n"); + continue; + }; + + if SpanlessEq::eq(&syn_ast, &librustc_ast) { + passed += 1; + } else { + failed += 1; + errorf!("\nFAIL\n{:?}\n!=\n{:?}\n", syn_ast, librustc_ast); + } + } + }); + + (passed, failed) +} + +fn librustc_parse_and_rewrite(input: &str) -> Option<P<ast::Expr>> { + parse::librustc_expr(input).and_then(librustc_brackets) +} + +/// Wrap every expression which is not already wrapped in parens with parens, to +/// reveal the precedence of the parsed expressions, and produce a stringified +/// form of the resulting expression. +/// +/// This method operates on librustc objects. +fn librustc_brackets(mut librustc_expr: P<ast::Expr>) -> Option<P<ast::Expr>> { + use rustc_ast::ast::{ + Attribute, Block, BorrowKind, Expr, ExprField, ExprKind, GenericArg, Local, LocalKind, Pat, + Stmt, StmtKind, StructExpr, StructRest, Ty, + }; + use rustc_ast::mut_visit::{noop_visit_generic_arg, noop_visit_local, MutVisitor}; + use rustc_data_structures::map_in_place::MapInPlace; + use rustc_span::DUMMY_SP; + use std::mem; + use std::ops::DerefMut; + use thin_vec::ThinVec; + + struct BracketsVisitor { + failed: bool, + } + + fn flat_map_field<T: MutVisitor>(mut f: ExprField, vis: &mut T) -> Vec<ExprField> { + if f.is_shorthand { + noop_visit_expr(&mut f.expr, vis); + } else { + vis.visit_expr(&mut f.expr); + } + vec![f] + } + + fn flat_map_stmt<T: MutVisitor>(stmt: Stmt, vis: &mut T) -> Vec<Stmt> { + let kind = match stmt.kind { + // Don't wrap toplevel expressions in statements. + StmtKind::Expr(mut e) => { + noop_visit_expr(&mut e, vis); + StmtKind::Expr(e) + } + StmtKind::Semi(mut e) => { + noop_visit_expr(&mut e, vis); + StmtKind::Semi(e) + } + s => s, + }; + + vec![Stmt { kind, ..stmt }] + } + + fn noop_visit_expr<T: MutVisitor>(e: &mut Expr, vis: &mut T) { + use rustc_ast::mut_visit::{noop_visit_expr, visit_attrs}; + match &mut e.kind { + ExprKind::AddrOf(BorrowKind::Raw, ..) => {} + ExprKind::Struct(expr) => { + let StructExpr { + qself, + path, + fields, + rest, + } = expr.deref_mut(); + vis.visit_qself(qself); + vis.visit_path(path); + fields.flat_map_in_place(|field| flat_map_field(field, vis)); + if let StructRest::Base(rest) = rest { + vis.visit_expr(rest); + } + vis.visit_id(&mut e.id); + vis.visit_span(&mut e.span); + visit_attrs(&mut e.attrs, vis); + } + _ => noop_visit_expr(e, vis), + } + } + + impl MutVisitor for BracketsVisitor { + fn visit_expr(&mut self, e: &mut P<Expr>) { + match e.kind { + ExprKind::ConstBlock(..) => {} + _ => noop_visit_expr(e, self), + } + match e.kind { + ExprKind::If(..) | ExprKind::Block(..) | ExprKind::Let(..) => {} + _ => { + let inner = mem::replace( + e, + P(Expr { + id: ast::DUMMY_NODE_ID, + kind: ExprKind::Err, + span: DUMMY_SP, + attrs: ThinVec::new(), + tokens: None, + }), + ); + e.kind = ExprKind::Paren(inner); + } + } + } + + fn visit_generic_arg(&mut self, arg: &mut GenericArg) { + match arg { + // Don't wrap unbraced const generic arg as that's invalid syntax. + GenericArg::Const(anon_const) => { + if let ExprKind::Block(..) = &mut anon_const.value.kind { + noop_visit_expr(&mut anon_const.value, self); + } + } + _ => noop_visit_generic_arg(arg, self), + } + } + + fn visit_block(&mut self, block: &mut P<Block>) { + self.visit_id(&mut block.id); + block + .stmts + .flat_map_in_place(|stmt| flat_map_stmt(stmt, self)); + self.visit_span(&mut block.span); + } + + fn visit_local(&mut self, local: &mut P<Local>) { + match local.kind { + LocalKind::InitElse(..) => {} + _ => noop_visit_local(local, self), + } + } + + // We don't want to look at expressions that might appear in patterns or + // types yet. We'll look into comparing those in the future. For now + // focus on expressions appearing in other places. + fn visit_pat(&mut self, pat: &mut P<Pat>) { + _ = pat; + } + + fn visit_ty(&mut self, ty: &mut P<Ty>) { + _ = ty; + } + + fn visit_attribute(&mut self, attr: &mut Attribute) { + _ = attr; + } + } + + let mut folder = BracketsVisitor { failed: false }; + folder.visit_expr(&mut librustc_expr); + if folder.failed { + None + } else { + Some(librustc_expr) + } +} + +/// Wrap every expression which is not already wrapped in parens with parens, to +/// reveal the precedence of the parsed expressions, and produce a stringified +/// form of the resulting expression. +fn syn_brackets(syn_expr: syn::Expr) -> syn::Expr { + use syn::fold::{fold_expr, fold_generic_argument, fold_generic_method_argument, Fold}; + use syn::{token, Expr, ExprParen, GenericArgument, GenericMethodArgument, Pat, Stmt, Type}; + + struct ParenthesizeEveryExpr; + impl Fold for ParenthesizeEveryExpr { + fn fold_expr(&mut self, expr: Expr) -> Expr { + match expr { + Expr::Group(_) => unreachable!(), + Expr::If(..) | Expr::Unsafe(..) | Expr::Block(..) | Expr::Let(..) => { + fold_expr(self, expr) + } + _ => Expr::Paren(ExprParen { + attrs: Vec::new(), + expr: Box::new(fold_expr(self, expr)), + paren_token: token::Paren::default(), + }), + } + } + + fn fold_generic_argument(&mut self, arg: GenericArgument) -> GenericArgument { + match arg { + GenericArgument::Const(arg) => GenericArgument::Const(match arg { + Expr::Block(_) => fold_expr(self, arg), + // Don't wrap unbraced const generic arg as that's invalid syntax. + _ => arg, + }), + _ => fold_generic_argument(self, arg), + } + } + + fn fold_generic_method_argument( + &mut self, + arg: GenericMethodArgument, + ) -> GenericMethodArgument { + match arg { + GenericMethodArgument::Const(arg) => GenericMethodArgument::Const(match arg { + Expr::Block(_) => fold_expr(self, arg), + // Don't wrap unbraced const generic arg as that's invalid syntax. + _ => arg, + }), + _ => fold_generic_method_argument(self, arg), + } + } + + fn fold_stmt(&mut self, stmt: Stmt) -> Stmt { + match stmt { + // Don't wrap toplevel expressions in statements. + Stmt::Expr(e) => Stmt::Expr(fold_expr(self, e)), + Stmt::Semi(e, semi) => { + if let Expr::Verbatim(_) = e { + Stmt::Semi(e, semi) + } else { + Stmt::Semi(fold_expr(self, e), semi) + } + } + s => s, + } + } + + // We don't want to look at expressions that might appear in patterns or + // types yet. We'll look into comparing those in the future. For now + // focus on expressions appearing in other places. + fn fold_pat(&mut self, pat: Pat) -> Pat { + pat + } + + fn fold_type(&mut self, ty: Type) -> Type { + ty + } + } + + let mut folder = ParenthesizeEveryExpr; + folder.fold_expr(syn_expr) +} + +/// Walk through a crate collecting all expressions we can find in it. +fn collect_exprs(file: syn::File) -> Vec<syn::Expr> { + use syn::fold::Fold; + use syn::punctuated::Punctuated; + use syn::{token, ConstParam, Expr, ExprTuple, Path}; + + struct CollectExprs(Vec<Expr>); + impl Fold for CollectExprs { + fn fold_expr(&mut self, expr: Expr) -> Expr { + match expr { + Expr::Verbatim(_) => {} + _ => self.0.push(expr), + } + + Expr::Tuple(ExprTuple { + attrs: vec![], + elems: Punctuated::new(), + paren_token: token::Paren::default(), + }) + } + + fn fold_path(&mut self, path: Path) -> Path { + // Skip traversing into const generic path arguments + path + } + + fn fold_const_param(&mut self, const_param: ConstParam) -> ConstParam { + const_param + } + } + + let mut folder = CollectExprs(vec![]); + folder.fold_file(file); + folder.0 +} diff --git a/third_party/rust/syn/tests/test_receiver.rs b/third_party/rust/syn/tests/test_receiver.rs new file mode 100644 index 0000000000..923df96ba9 --- /dev/null +++ b/third_party/rust/syn/tests/test_receiver.rs @@ -0,0 +1,127 @@ +use syn::{parse_quote, FnArg, Receiver, TraitItemMethod}; + +#[test] +fn test_by_value() { + let TraitItemMethod { sig, .. } = parse_quote! { + fn by_value(self: Self); + }; + match sig.receiver() { + Some(FnArg::Typed(_)) => (), + value => panic!("expected FnArg::Typed, got {:?}", value), + } +} + +#[test] +fn test_by_mut_value() { + let TraitItemMethod { sig, .. } = parse_quote! { + fn by_mut(mut self: Self); + }; + match sig.receiver() { + Some(FnArg::Typed(_)) => (), + value => panic!("expected FnArg::Typed, got {:?}", value), + } +} + +#[test] +fn test_by_ref() { + let TraitItemMethod { sig, .. } = parse_quote! { + fn by_ref(self: &Self); + }; + match sig.receiver() { + Some(FnArg::Typed(_)) => (), + value => panic!("expected FnArg::Typed, got {:?}", value), + } +} + +#[test] +fn test_by_box() { + let TraitItemMethod { sig, .. } = parse_quote! { + fn by_box(self: Box<Self>); + }; + match sig.receiver() { + Some(FnArg::Typed(_)) => (), + value => panic!("expected FnArg::Typed, got {:?}", value), + } +} + +#[test] +fn test_by_pin() { + let TraitItemMethod { sig, .. } = parse_quote! { + fn by_pin(self: Pin<Self>); + }; + match sig.receiver() { + Some(FnArg::Typed(_)) => (), + value => panic!("expected FnArg::Typed, got {:?}", value), + } +} + +#[test] +fn test_explicit_type() { + let TraitItemMethod { sig, .. } = parse_quote! { + fn explicit_type(self: Pin<MyType>); + }; + match sig.receiver() { + Some(FnArg::Typed(_)) => (), + value => panic!("expected FnArg::Typed, got {:?}", value), + } +} + +#[test] +fn test_value_shorthand() { + let TraitItemMethod { sig, .. } = parse_quote! { + fn value_shorthand(self); + }; + match sig.receiver() { + Some(FnArg::Receiver(Receiver { + reference: None, + mutability: None, + .. + })) => (), + value => panic!("expected FnArg::Receiver without ref/mut, got {:?}", value), + } +} + +#[test] +fn test_mut_value_shorthand() { + let TraitItemMethod { sig, .. } = parse_quote! { + fn mut_value_shorthand(mut self); + }; + match sig.receiver() { + Some(FnArg::Receiver(Receiver { + reference: None, + mutability: Some(_), + .. + })) => (), + value => panic!("expected FnArg::Receiver with mut, got {:?}", value), + } +} + +#[test] +fn test_ref_shorthand() { + let TraitItemMethod { sig, .. } = parse_quote! { + fn ref_shorthand(&self); + }; + match sig.receiver() { + Some(FnArg::Receiver(Receiver { + reference: Some(_), + mutability: None, + .. + })) => (), + value => panic!("expected FnArg::Receiver with ref, got {:?}", value), + } +} + +#[test] +fn test_ref_mut_shorthand() { + let TraitItemMethod { sig, .. } = parse_quote! { + fn ref_mut_shorthand(&mut self); + }; + match sig.receiver() { + Some(FnArg::Receiver(Receiver { + reference: Some(_), + mutability: Some(_), + .. + })) => (), + value => panic!("expected FnArg::Receiver with ref+mut, got {:?}", value), + } +} diff --git a/third_party/rust/syn/tests/test_round_trip.rs b/third_party/rust/syn/tests/test_round_trip.rs new file mode 100644 index 0000000000..9a5801d449 --- /dev/null +++ b/third_party/rust/syn/tests/test_round_trip.rs @@ -0,0 +1,241 @@ +#![cfg(not(syn_disable_nightly_tests))] +#![cfg(not(miri))] +#![recursion_limit = "1024"] +#![feature(rustc_private)] +#![allow(clippy::manual_assert)] + +extern crate rustc_ast; +extern crate rustc_data_structures; +extern crate rustc_error_messages; +extern crate rustc_errors; +extern crate rustc_expand; +extern crate rustc_parse as parse; +extern crate rustc_session; +extern crate rustc_span; + +use crate::common::eq::SpanlessEq; +use quote::quote; +use rayon::iter::{IntoParallelIterator, ParallelIterator}; +use rustc_ast::ast::{ + AngleBracketedArg, AngleBracketedArgs, Crate, GenericArg, GenericParamKind, Generics, + WhereClause, +}; +use rustc_ast::mut_visit::{self, MutVisitor}; +use rustc_error_messages::{DiagnosticMessage, LazyFallbackBundle}; +use rustc_errors::{translation, Diagnostic, PResult}; +use rustc_session::parse::ParseSess; +use rustc_span::source_map::FilePathMapping; +use rustc_span::FileName; +use std::fs; +use std::panic; +use std::path::Path; +use std::process; +use std::sync::atomic::{AtomicUsize, Ordering}; +use std::time::Instant; +use walkdir::{DirEntry, WalkDir}; + +#[macro_use] +mod macros; + +#[allow(dead_code)] +mod common; + +mod repo; + +#[test] +fn test_round_trip() { + common::rayon_init(); + repo::clone_rust(); + let abort_after = common::abort_after(); + if abort_after == 0 { + panic!("Skipping all round_trip tests"); + } + + let failed = AtomicUsize::new(0); + + WalkDir::new("tests/rust") + .sort_by(|a, b| a.file_name().cmp(b.file_name())) + .into_iter() + .filter_entry(repo::base_dir_filter) + .collect::<Result<Vec<DirEntry>, walkdir::Error>>() + .unwrap() + .into_par_iter() + .for_each(|entry| { + let path = entry.path(); + if !path.is_dir() { + test(path, &failed, abort_after); + } + }); + + let failed = failed.load(Ordering::Relaxed); + if failed > 0 { + panic!("{} failures", failed); + } +} + +fn test(path: &Path, failed: &AtomicUsize, abort_after: usize) { + let content = fs::read_to_string(path).unwrap(); + + let start = Instant::now(); + let (krate, elapsed) = match syn::parse_file(&content) { + Ok(krate) => (krate, start.elapsed()), + Err(msg) => { + errorf!("=== {}: syn failed to parse\n{:?}\n", path.display(), msg); + let prev_failed = failed.fetch_add(1, Ordering::Relaxed); + if prev_failed + 1 >= abort_after { + process::exit(1); + } + return; + } + }; + let back = quote!(#krate).to_string(); + let edition = repo::edition(path).parse().unwrap(); + + rustc_span::create_session_if_not_set_then(edition, |_| { + let equal = match panic::catch_unwind(|| { + let sess = ParseSess::new(FilePathMapping::empty()); + let before = match librustc_parse(content, &sess) { + Ok(before) => before, + Err(diagnostic) => { + errorf!( + "=== {}: ignore - librustc failed to parse original content: {}\n", + path.display(), + translate_message(&diagnostic), + ); + diagnostic.cancel(); + return Err(true); + } + }; + let after = match librustc_parse(back, &sess) { + Ok(after) => after, + Err(mut diagnostic) => { + errorf!("=== {}: librustc failed to parse", path.display()); + diagnostic.emit(); + return Err(false); + } + }; + Ok((before, after)) + }) { + Err(_) => { + errorf!("=== {}: ignoring librustc panic\n", path.display()); + true + } + Ok(Err(equal)) => equal, + Ok(Ok((mut before, mut after))) => { + normalize(&mut before); + normalize(&mut after); + if SpanlessEq::eq(&before, &after) { + errorf!( + "=== {}: pass in {}ms\n", + path.display(), + elapsed.as_secs() * 1000 + u64::from(elapsed.subsec_nanos()) / 1_000_000 + ); + true + } else { + errorf!( + "=== {}: FAIL\nbefore: {:#?}\nafter: {:#?}\n", + path.display(), + before, + after, + ); + false + } + } + }; + if !equal { + let prev_failed = failed.fetch_add(1, Ordering::Relaxed); + if prev_failed + 1 >= abort_after { + process::exit(1); + } + } + }); +} + +fn librustc_parse(content: String, sess: &ParseSess) -> PResult<Crate> { + static COUNTER: AtomicUsize = AtomicUsize::new(0); + let counter = COUNTER.fetch_add(1, Ordering::Relaxed); + let name = FileName::Custom(format!("test_round_trip{}", counter)); + parse::parse_crate_from_source_str(name, content, sess) +} + +fn translate_message(diagnostic: &Diagnostic) -> String { + thread_local! { + static FLUENT_BUNDLE: LazyFallbackBundle = { + let resources = rustc_error_messages::DEFAULT_LOCALE_RESOURCES; + let with_directionality_markers = false; + rustc_error_messages::fallback_fluent_bundle(resources, with_directionality_markers) + }; + } + + let message = &diagnostic.message[0].0; + let args = translation::to_fluent_args(diagnostic.args()); + + let (identifier, attr) = match message { + DiagnosticMessage::Str(msg) | DiagnosticMessage::Eager(msg) => return msg.clone(), + DiagnosticMessage::FluentIdentifier(identifier, attr) => (identifier, attr), + }; + + FLUENT_BUNDLE.with(|fluent_bundle| { + let message = fluent_bundle + .get_message(identifier) + .expect("missing diagnostic in fluent bundle"); + let value = match attr { + Some(attr) => message + .get_attribute(attr) + .expect("missing attribute in fluent message") + .value(), + None => message.value().expect("missing value in fluent message"), + }; + + let mut err = Vec::new(); + let translated = fluent_bundle.format_pattern(value, Some(&args), &mut err); + assert!(err.is_empty()); + translated.into_owned() + }) +} + +fn normalize(krate: &mut Crate) { + struct NormalizeVisitor; + + impl MutVisitor for NormalizeVisitor { + fn visit_angle_bracketed_parameter_data(&mut self, e: &mut AngleBracketedArgs) { + #[derive(Ord, PartialOrd, Eq, PartialEq)] + enum Group { + Lifetimes, + TypesAndConsts, + Constraints, + } + e.args.sort_by_key(|arg| match arg { + AngleBracketedArg::Arg(arg) => match arg { + GenericArg::Lifetime(_) => Group::Lifetimes, + GenericArg::Type(_) | GenericArg::Const(_) => Group::TypesAndConsts, + }, + AngleBracketedArg::Constraint(_) => Group::Constraints, + }); + mut_visit::noop_visit_angle_bracketed_parameter_data(e, self); + } + + fn visit_generics(&mut self, e: &mut Generics) { + #[derive(Ord, PartialOrd, Eq, PartialEq)] + enum Group { + Lifetimes, + TypesAndConsts, + } + e.params.sort_by_key(|param| match param.kind { + GenericParamKind::Lifetime => Group::Lifetimes, + GenericParamKind::Type { .. } | GenericParamKind::Const { .. } => { + Group::TypesAndConsts + } + }); + mut_visit::noop_visit_generics(e, self); + } + + fn visit_where_clause(&mut self, e: &mut WhereClause) { + if e.predicates.is_empty() { + e.has_where_token = false; + } + } + } + + NormalizeVisitor.visit_crate(krate); +} diff --git a/third_party/rust/syn/tests/test_shebang.rs b/third_party/rust/syn/tests/test_shebang.rs new file mode 100644 index 0000000000..dc26b9aab3 --- /dev/null +++ b/third_party/rust/syn/tests/test_shebang.rs @@ -0,0 +1,59 @@ +#[macro_use] +mod macros; + +#[test] +fn test_basic() { + let content = "#!/usr/bin/env rustx\nfn main() {}"; + let file = syn::parse_file(content).unwrap(); + snapshot!(file, @r###" + File { + shebang: Some("#!/usr/bin/env rustx"), + items: [ + Item::Fn { + vis: Inherited, + sig: Signature { + ident: "main", + generics: Generics, + output: Default, + }, + block: Block, + }, + ], + } + "###); +} + +#[test] +fn test_comment() { + let content = "#!//am/i/a/comment\n[allow(dead_code)] fn main() {}"; + let file = syn::parse_file(content).unwrap(); + snapshot!(file, @r###" + File { + attrs: [ + Attribute { + style: Inner, + path: Path { + segments: [ + PathSegment { + ident: "allow", + arguments: None, + }, + ], + }, + tokens: TokenStream(`(dead_code)`), + }, + ], + items: [ + Item::Fn { + vis: Inherited, + sig: Signature { + ident: "main", + generics: Generics, + output: Default, + }, + block: Block, + }, + ], + } + "###); +} diff --git a/third_party/rust/syn/tests/test_should_parse.rs b/third_party/rust/syn/tests/test_should_parse.rs new file mode 100644 index 0000000000..180d859916 --- /dev/null +++ b/third_party/rust/syn/tests/test_should_parse.rs @@ -0,0 +1,45 @@ +macro_rules! should_parse { + ($name:ident, { $($in:tt)* }) => { + #[test] + fn $name() { + // Make sure we can parse the file! + syn::parse_file(stringify!($($in)*)).unwrap(); + } + } +} + +should_parse!(generic_associated_type, { + impl Foo { + type Item = &'a i32; + fn foo<'a>(&'a self) -> Self::Item<'a> {} + } +}); + +#[rustfmt::skip] +should_parse!(const_generics_use, { + type X = Foo<5>; + type Y = Foo<"foo">; + type Z = Foo<X>; + type W = Foo<{ X + 10 }>; +}); + +should_parse!(trailing_plus_type, { + type A = Box<Foo>; + type A = Box<Foo + 'a>; + type A = Box<'a + Foo>; +}); + +should_parse!(generic_associated_type_where, { + trait Foo { + type Item; + fn foo<T>(&self, t: T) -> Self::Item<T>; + } +}); + +should_parse!(match_with_block_expr, { + fn main() { + match false { + _ => {}.a(), + } + } +}); diff --git a/third_party/rust/syn/tests/test_size.rs b/third_party/rust/syn/tests/test_size.rs new file mode 100644 index 0000000000..32c6edaed6 --- /dev/null +++ b/third_party/rust/syn/tests/test_size.rs @@ -0,0 +1,29 @@ +#![cfg(target_pointer_width = "64")] + +use std::mem; +use syn::{Expr, Item, Lit, Pat, Type}; + +#[test] +fn test_expr_size() { + assert_eq!(mem::size_of::<Expr>(), 272); +} + +#[test] +fn test_item_size() { + assert_eq!(mem::size_of::<Item>(), 320); +} + +#[test] +fn test_type_size() { + assert_eq!(mem::size_of::<Type>(), 288); +} + +#[test] +fn test_pat_size() { + assert_eq!(mem::size_of::<Pat>(), 144); +} + +#[test] +fn test_lit_size() { + assert_eq!(mem::size_of::<Lit>(), 32); +} diff --git a/third_party/rust/syn/tests/test_stmt.rs b/third_party/rust/syn/tests/test_stmt.rs new file mode 100644 index 0000000000..f444e5b49e --- /dev/null +++ b/third_party/rust/syn/tests/test_stmt.rs @@ -0,0 +1,93 @@ +#![allow(clippy::assertions_on_result_states, clippy::non_ascii_literal)] + +#[macro_use] +mod macros; + +use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree}; +use quote::quote; +use std::iter::FromIterator; +use syn::Stmt; + +#[test] +fn test_raw_operator() { + let stmt = syn::parse_str::<Stmt>("let _ = &raw const x;").unwrap(); + + snapshot!(stmt, @r###" + Local(Local { + pat: Pat::Wild, + init: Some(Verbatim(`& raw const x`)), + }) + "###); +} + +#[test] +fn test_raw_variable() { + let stmt = syn::parse_str::<Stmt>("let _ = &raw;").unwrap(); + + snapshot!(stmt, @r###" + Local(Local { + pat: Pat::Wild, + init: Some(Expr::Reference { + expr: Expr::Path { + path: Path { + segments: [ + PathSegment { + ident: "raw", + arguments: None, + }, + ], + }, + }, + }), + }) + "###); +} + +#[test] +fn test_raw_invalid() { + assert!(syn::parse_str::<Stmt>("let _ = &raw x;").is_err()); +} + +#[test] +fn test_none_group() { + // <Ø async fn f() {} Ø> + let tokens = TokenStream::from_iter(vec![TokenTree::Group(Group::new( + Delimiter::None, + TokenStream::from_iter(vec![ + TokenTree::Ident(Ident::new("async", Span::call_site())), + TokenTree::Ident(Ident::new("fn", Span::call_site())), + TokenTree::Ident(Ident::new("f", Span::call_site())), + TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())), + TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())), + ]), + ))]); + + snapshot!(tokens as Stmt, @r###" + Item(Item::Fn { + vis: Inherited, + sig: Signature { + asyncness: Some, + ident: "f", + generics: Generics, + output: Default, + }, + block: Block, + }) + "###); +} + +#[test] +fn test_let_dot_dot() { + let tokens = quote! { + let .. = 10; + }; + + snapshot!(tokens as Stmt, @r###" + Local(Local { + pat: Pat::Rest, + init: Some(Expr::Lit { + lit: 10, + }), + }) + "###); +} diff --git a/third_party/rust/syn/tests/test_token_trees.rs b/third_party/rust/syn/tests/test_token_trees.rs new file mode 100644 index 0000000000..5b00448af8 --- /dev/null +++ b/third_party/rust/syn/tests/test_token_trees.rs @@ -0,0 +1,30 @@ +#[macro_use] +mod macros; + +use proc_macro2::TokenStream; +use quote::quote; +use syn::Lit; + +#[test] +fn test_struct() { + let input = " + #[derive(Debug, Clone)] + pub struct Item { + pub ident: Ident, + pub attrs: Vec<Attribute>, + } + "; + + snapshot!(input as TokenStream, @r###" + TokenStream( + `# [derive (Debug , Clone)] pub struct Item { pub ident : Ident , pub attrs : Vec < Attribute >, }`, + ) + "###); +} + +#[test] +fn test_literal_mangling() { + let code = "0_4"; + let parsed: Lit = syn::parse_str(code).unwrap(); + assert_eq!(code, quote!(#parsed).to_string()); +} diff --git a/third_party/rust/syn/tests/test_ty.rs b/third_party/rust/syn/tests/test_ty.rs new file mode 100644 index 0000000000..335cafa2ac --- /dev/null +++ b/third_party/rust/syn/tests/test_ty.rs @@ -0,0 +1,352 @@ +#[macro_use] +mod macros; + +use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree}; +use quote::quote; +use std::iter::FromIterator; +use syn::Type; + +#[test] +fn test_mut_self() { + syn::parse_str::<Type>("fn(mut self)").unwrap(); + syn::parse_str::<Type>("fn(mut self,)").unwrap(); + syn::parse_str::<Type>("fn(mut self: ())").unwrap(); + syn::parse_str::<Type>("fn(mut self: ...)").unwrap_err(); + syn::parse_str::<Type>("fn(mut self: mut self)").unwrap_err(); + syn::parse_str::<Type>("fn(mut self::T)").unwrap_err(); +} + +#[test] +fn test_macro_variable_type() { + // mimics the token stream corresponding to `$ty<T>` + let tokens = TokenStream::from_iter(vec![ + TokenTree::Group(Group::new(Delimiter::None, quote! { ty })), + TokenTree::Punct(Punct::new('<', Spacing::Alone)), + TokenTree::Ident(Ident::new("T", Span::call_site())), + TokenTree::Punct(Punct::new('>', Spacing::Alone)), + ]); + + snapshot!(tokens as Type, @r###" + Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "ty", + arguments: PathArguments::AngleBracketed { + args: [ + Type(Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "T", + arguments: None, + }, + ], + }, + }), + ], + }, + }, + ], + }, + } + "###); + + // mimics the token stream corresponding to `$ty::<T>` + let tokens = TokenStream::from_iter(vec![ + TokenTree::Group(Group::new(Delimiter::None, quote! { ty })), + TokenTree::Punct(Punct::new(':', Spacing::Joint)), + TokenTree::Punct(Punct::new(':', Spacing::Alone)), + TokenTree::Punct(Punct::new('<', Spacing::Alone)), + TokenTree::Ident(Ident::new("T", Span::call_site())), + TokenTree::Punct(Punct::new('>', Spacing::Alone)), + ]); + + snapshot!(tokens as Type, @r###" + Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "ty", + arguments: PathArguments::AngleBracketed { + colon2_token: Some, + args: [ + Type(Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "T", + arguments: None, + }, + ], + }, + }), + ], + }, + }, + ], + }, + } + "###); +} + +#[test] +fn test_group_angle_brackets() { + // mimics the token stream corresponding to `Option<$ty>` + let tokens = TokenStream::from_iter(vec![ + TokenTree::Ident(Ident::new("Option", Span::call_site())), + TokenTree::Punct(Punct::new('<', Spacing::Alone)), + TokenTree::Group(Group::new(Delimiter::None, quote! { Vec<u8> })), + TokenTree::Punct(Punct::new('>', Spacing::Alone)), + ]); + + snapshot!(tokens as Type, @r###" + Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "Option", + arguments: PathArguments::AngleBracketed { + args: [ + Type(Type::Group { + elem: Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "Vec", + arguments: PathArguments::AngleBracketed { + args: [ + Type(Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "u8", + arguments: None, + }, + ], + }, + }), + ], + }, + }, + ], + }, + }, + }), + ], + }, + }, + ], + }, + } + "###); +} + +#[test] +fn test_group_colons() { + // mimics the token stream corresponding to `$ty::Item` + let tokens = TokenStream::from_iter(vec![ + TokenTree::Group(Group::new(Delimiter::None, quote! { Vec<u8> })), + TokenTree::Punct(Punct::new(':', Spacing::Joint)), + TokenTree::Punct(Punct::new(':', Spacing::Alone)), + TokenTree::Ident(Ident::new("Item", Span::call_site())), + ]); + + snapshot!(tokens as Type, @r###" + Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "Vec", + arguments: PathArguments::AngleBracketed { + args: [ + Type(Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "u8", + arguments: None, + }, + ], + }, + }), + ], + }, + }, + PathSegment { + ident: "Item", + arguments: None, + }, + ], + }, + } + "###); + + let tokens = TokenStream::from_iter(vec![ + TokenTree::Group(Group::new(Delimiter::None, quote! { [T] })), + TokenTree::Punct(Punct::new(':', Spacing::Joint)), + TokenTree::Punct(Punct::new(':', Spacing::Alone)), + TokenTree::Ident(Ident::new("Element", Span::call_site())), + ]); + + snapshot!(tokens as Type, @r###" + Type::Path { + qself: Some(QSelf { + ty: Type::Slice { + elem: Type::Path { + path: Path { + segments: [ + PathSegment { + ident: "T", + arguments: None, + }, + ], + }, + }, + }, + position: 0, + }), + path: Path { + leading_colon: Some, + segments: [ + PathSegment { + ident: "Element", + arguments: None, + }, + ], + }, + } + "###); +} + +#[test] +fn test_trait_object() { + let tokens = quote!(dyn for<'a> Trait<'a> + 'static); + snapshot!(tokens as Type, @r###" + Type::TraitObject { + dyn_token: Some, + bounds: [ + Trait(TraitBound { + modifier: None, + lifetimes: Some(BoundLifetimes { + lifetimes: [ + LifetimeDef { + lifetime: Lifetime { + ident: "a", + }, + }, + ], + }), + path: Path { + segments: [ + PathSegment { + ident: "Trait", + arguments: PathArguments::AngleBracketed { + args: [ + Lifetime(Lifetime { + ident: "a", + }), + ], + }, + }, + ], + }, + }), + Lifetime(Lifetime { + ident: "static", + }), + ], + } + "###); + + let tokens = quote!(dyn 'a + Trait); + snapshot!(tokens as Type, @r###" + Type::TraitObject { + dyn_token: Some, + bounds: [ + Lifetime(Lifetime { + ident: "a", + }), + Trait(TraitBound { + modifier: None, + path: Path { + segments: [ + PathSegment { + ident: "Trait", + arguments: None, + }, + ], + }, + }), + ], + } + "###); + + // None of the following are valid Rust types. + syn::parse_str::<Type>("for<'a> dyn Trait<'a>").unwrap_err(); + syn::parse_str::<Type>("dyn for<'a> 'a + Trait").unwrap_err(); +} + +#[test] +fn test_trailing_plus() { + #[rustfmt::skip] + let tokens = quote!(impl Trait +); + snapshot!(tokens as Type, @r###" + Type::ImplTrait { + bounds: [ + Trait(TraitBound { + modifier: None, + path: Path { + segments: [ + PathSegment { + ident: "Trait", + arguments: None, + }, + ], + }, + }), + ], + } + "###); + + #[rustfmt::skip] + let tokens = quote!(dyn Trait +); + snapshot!(tokens as Type, @r###" + Type::TraitObject { + dyn_token: Some, + bounds: [ + Trait(TraitBound { + modifier: None, + path: Path { + segments: [ + PathSegment { + ident: "Trait", + arguments: None, + }, + ], + }, + }), + ], + } + "###); + + #[rustfmt::skip] + let tokens = quote!(Trait +); + snapshot!(tokens as Type, @r###" + Type::TraitObject { + bounds: [ + Trait(TraitBound { + modifier: None, + path: Path { + segments: [ + PathSegment { + ident: "Trait", + arguments: None, + }, + ], + }, + }), + ], + } + "###); +} diff --git a/third_party/rust/syn/tests/test_visibility.rs b/third_party/rust/syn/tests/test_visibility.rs new file mode 100644 index 0000000000..7b2c00ba34 --- /dev/null +++ b/third_party/rust/syn/tests/test_visibility.rs @@ -0,0 +1,148 @@ +#[macro_use] +mod macros; + +use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree}; +use std::iter::FromIterator; +use syn::parse::{Parse, ParseStream}; +use syn::{DeriveInput, Result, Visibility}; + +#[derive(Debug)] +struct VisRest { + vis: Visibility, + rest: TokenStream, +} + +impl Parse for VisRest { + fn parse(input: ParseStream) -> Result<Self> { + Ok(VisRest { + vis: input.parse()?, + rest: input.parse()?, + }) + } +} + +macro_rules! assert_vis_parse { + ($input:expr, Ok($p:pat)) => { + assert_vis_parse!($input, Ok($p) + ""); + }; + + ($input:expr, Ok($p:pat) + $rest:expr) => { + let expected = $rest.parse::<TokenStream>().unwrap(); + let parse: VisRest = syn::parse_str($input).unwrap(); + + match parse.vis { + $p => {} + _ => panic!("Expected {}, got {:?}", stringify!($p), parse.vis), + } + + // NOTE: Round-trips through `to_string` to avoid potential whitespace + // diffs. + assert_eq!(parse.rest.to_string(), expected.to_string()); + }; + + ($input:expr, Err) => { + syn::parse2::<VisRest>($input.parse().unwrap()).unwrap_err(); + }; +} + +#[test] +fn test_pub() { + assert_vis_parse!("pub", Ok(Visibility::Public(_))); +} + +#[test] +fn test_crate() { + assert_vis_parse!("crate", Ok(Visibility::Crate(_))); +} + +#[test] +fn test_inherited() { + assert_vis_parse!("", Ok(Visibility::Inherited)); +} + +#[test] +fn test_in() { + assert_vis_parse!("pub(in foo::bar)", Ok(Visibility::Restricted(_))); +} + +#[test] +fn test_pub_crate() { + assert_vis_parse!("pub(crate)", Ok(Visibility::Restricted(_))); +} + +#[test] +fn test_pub_self() { + assert_vis_parse!("pub(self)", Ok(Visibility::Restricted(_))); +} + +#[test] +fn test_pub_super() { + assert_vis_parse!("pub(super)", Ok(Visibility::Restricted(_))); +} + +#[test] +fn test_missing_in() { + assert_vis_parse!("pub(foo::bar)", Ok(Visibility::Public(_)) + "(foo::bar)"); +} + +#[test] +fn test_missing_in_path() { + assert_vis_parse!("pub(in)", Err); +} + +#[test] +fn test_crate_path() { + assert_vis_parse!( + "pub(crate::A, crate::B)", + Ok(Visibility::Public(_)) + "(crate::A, crate::B)" + ); +} + +#[test] +fn test_junk_after_in() { + assert_vis_parse!("pub(in some::path @@garbage)", Err); +} + +#[test] +fn test_empty_group_vis() { + // mimics `struct S { $vis $field: () }` where $vis is empty + let tokens = TokenStream::from_iter(vec![ + TokenTree::Ident(Ident::new("struct", Span::call_site())), + TokenTree::Ident(Ident::new("S", Span::call_site())), + TokenTree::Group(Group::new( + Delimiter::Brace, + TokenStream::from_iter(vec![ + TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())), + TokenTree::Group(Group::new( + Delimiter::None, + TokenStream::from_iter(vec![TokenTree::Ident(Ident::new( + "f", + Span::call_site(), + ))]), + )), + TokenTree::Punct(Punct::new(':', Spacing::Alone)), + TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())), + ]), + )), + ]); + + snapshot!(tokens as DeriveInput, @r###" + DeriveInput { + vis: Inherited, + ident: "S", + generics: Generics, + data: Data::Struct { + fields: Fields::Named { + named: [ + Field { + vis: Inherited, + ident: Some("f"), + colon_token: Some, + ty: Type::Tuple, + }, + ], + }, + }, + } + "###); +} diff --git a/third_party/rust/syn/tests/zzz_stable.rs b/third_party/rust/syn/tests/zzz_stable.rs new file mode 100644 index 0000000000..a1a670d9ed --- /dev/null +++ b/third_party/rust/syn/tests/zzz_stable.rs @@ -0,0 +1,33 @@ +#![cfg(syn_disable_nightly_tests)] + +use std::io::{self, Write}; +use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor}; + +const MSG: &str = "\ +‖ +‖ WARNING: +‖ This is not a nightly compiler so not all tests were able to +‖ run. Syn includes tests that compare Syn's parser against the +‖ compiler's parser, which requires access to unstable librustc +‖ data structures and a nightly compiler. +‖ +"; + +#[test] +fn notice() -> io::Result<()> { + let header = "WARNING"; + let index_of_header = MSG.find(header).unwrap(); + let before = &MSG[..index_of_header]; + let after = &MSG[index_of_header + header.len()..]; + + let mut stderr = StandardStream::stderr(ColorChoice::Auto); + stderr.set_color(ColorSpec::new().set_fg(Some(Color::Yellow)))?; + write!(&mut stderr, "{}", before)?; + stderr.set_color(ColorSpec::new().set_bold(true).set_fg(Some(Color::Yellow)))?; + write!(&mut stderr, "{}", header)?; + stderr.set_color(ColorSpec::new().set_fg(Some(Color::Yellow)))?; + write!(&mut stderr, "{}", after)?; + stderr.reset()?; + + Ok(()) +} diff --git a/third_party/rust/sync-guid/.cargo-checksum.json b/third_party/rust/sync-guid/.cargo-checksum.json new file mode 100644 index 0000000000..d3532cd1c8 --- /dev/null +++ b/third_party/rust/sync-guid/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{"Cargo.toml":"3fc60ab9c3584d8f1951345d877a977ab080f9e44454237bd10fc8bbaf584478","src/lib.rs":"b8bc0f7eeeb61e8d5df4ce85dfd83f7c1aa9a66735212a3ef21fb6ef21387871","src/rusqlite_support.rs":"827d314605d8c741efdf238a0780a891c88bc56026a3e6dcfa534772a4852fb3","src/serde_support.rs":"519b5eb59ca7be555d522f2186909db969069dc9586a5fe4047d4ec176b2368a"},"package":null}
\ No newline at end of file diff --git a/third_party/rust/sync-guid/Cargo.toml b/third_party/rust/sync-guid/Cargo.toml new file mode 100644 index 0000000000..8f142d7d87 --- /dev/null +++ b/third_party/rust/sync-guid/Cargo.toml @@ -0,0 +1,46 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g., crates.io) dependencies. +# +# If you are reading this file be aware that the original Cargo.toml +# will likely look very different (and much more reasonable). +# See Cargo.toml.orig for the original contents. + +[package] +edition = "2021" +name = "sync-guid" +version = "0.1.0" +authors = ["Thom Chiovoloni <tchiovoloni@mozilla.com>"] +license = "MPL-2.0" + +[dependencies.base64] +version = "0.13" +optional = true + +[dependencies.rand] +version = "0.8" +optional = true + +[dependencies.rusqlite] +version = "0.28.0" +features = ["bundled"] +optional = true + +[dependencies.serde] +version = "1" +optional = true + +[dev-dependencies] +serde_test = "1" + +[features] +default = ["serde_support"] +random = [ + "rand", + "base64", +] +rusqlite_support = ["rusqlite"] +serde_support = ["serde"] diff --git a/third_party/rust/sync-guid/src/lib.rs b/third_party/rust/sync-guid/src/lib.rs new file mode 100644 index 0000000000..f35f407f30 --- /dev/null +++ b/third_party/rust/sync-guid/src/lib.rs @@ -0,0 +1,482 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#![allow(unknown_lints)] +#![warn(rust_2018_idioms)] +// (It's tempting to avoid the utf8 checks, but they're easy to get wrong, so) +#![deny(unsafe_code)] +#[cfg(feature = "serde_support")] +mod serde_support; + +#[cfg(feature = "rusqlite_support")] +mod rusqlite_support; + +use std::{ + cmp::Ordering, + fmt, + hash::{Hash, Hasher}, + ops, str, +}; + +/// This is a type intended to be used to represent the guids used by sync. It +/// has several benefits over using a `String`: +/// +/// 1. It's more explicit about what is being stored, and could prevent bugs +/// where a Guid is passed to a function expecting text. +/// +/// 2. Guids are guaranteed to be immutable. +/// +/// 3. It's optimized for the guids commonly used by sync. In particular, short guids +/// (including the guids which would meet `PlacesUtils.isValidGuid`) do not incur +/// any heap allocation, and are stored inline. +#[derive(Clone)] +pub struct Guid(Repr); + +// The internal representation of a GUID. Most Sync GUIDs are 12 bytes, +// and contain only base64url characters; we can store them on the stack +// without a heap allocation. However, arbitrary ascii guids of up to length 64 +// are possible, in which case we fall back to a heap-allocated string. +// +// This is separate only because making `Guid` an enum would expose the +// internals. +#[derive(Clone)] +enum Repr { + // see FastGuid for invariants + Fast(FastGuid), + + // invariants: + // - _0.len() > MAX_FAST_GUID_LEN + Slow(String), +} + +/// Invariants: +/// +/// - `len <= MAX_FAST_GUID_LEN`. +/// - `data[0..len]` encodes valid utf8. +/// - `data[len..].iter().all(|&b| b == b'\0')` +/// +/// Note: None of these are required for memory safety, just correctness. +#[derive(Clone)] +struct FastGuid { + len: u8, + data: [u8; MAX_FAST_GUID_LEN], +} + +// This is the maximum length (experimentally determined) we can make it before +// `Repr::Fast` is larger than `Guid::Slow` on 32 bit systems. The important +// thing is really that it's not too big, and is above 12 bytes. +const MAX_FAST_GUID_LEN: usize = 14; + +impl FastGuid { + #[inline] + fn from_slice(bytes: &[u8]) -> Self { + // Checked by the caller, so debug_assert is fine. + debug_assert!( + can_use_fast(bytes), + "Bug: Caller failed to check can_use_fast: {:?}", + bytes + ); + let mut data = [0u8; MAX_FAST_GUID_LEN]; + data[0..bytes.len()].copy_from_slice(bytes); + FastGuid { + len: bytes.len() as u8, + data, + } + } + + #[inline] + fn as_str(&self) -> &str { + // Note: we only use debug_assert! to enusre valid utf8-ness, so this need + str::from_utf8(self.bytes()).expect("Invalid fast guid bytes!") + } + + #[inline] + fn len(&self) -> usize { + self.len as usize + } + + #[inline] + fn bytes(&self) -> &[u8] { + &self.data[0..self.len()] + } +} + +// Returns: +// - true to use Repr::Fast +// - false to use Repr::Slow +#[inline] +fn can_use_fast<T: ?Sized + AsRef<[u8]>>(bytes: &T) -> bool { + let bytes = bytes.as_ref(); + // This is fine as a debug_assert since we'll still panic if it's ever used + // in such a way where it would matter. + debug_assert!(str::from_utf8(bytes).is_ok()); + bytes.len() <= MAX_FAST_GUID_LEN +} + +impl Guid { + /// Create a guid from a `str`. + #[inline] + pub fn new(s: &str) -> Self { + Guid::from_slice(s.as_ref()) + } + + /// Create an empty guid. Usable as a constant. + #[inline] + pub const fn empty() -> Self { + Guid(Repr::Fast(FastGuid { + len: 0, + data: [0u8; MAX_FAST_GUID_LEN], + })) + } + + /// Create a random guid (of 12 base64url characters). Requires the `random` + /// feature. + #[cfg(feature = "random")] + pub fn random() -> Self { + let bytes: [u8; 9] = rand::random(); + + // Note: only first 12 bytes are used, but remaining are required to + // build the FastGuid + let mut output = [0u8; MAX_FAST_GUID_LEN]; + + let bytes_written = + base64::encode_config_slice(bytes, base64::URL_SAFE_NO_PAD, &mut output[..12]); + + debug_assert!(bytes_written == 12); + + Guid(Repr::Fast(FastGuid { + len: 12, + data: output, + })) + } + + /// Convert `b` into a `Guid`. + #[inline] + pub fn from_string(s: String) -> Self { + Guid::from_vec(s.into_bytes()) + } + + /// Convert `b` into a `Guid`. + #[inline] + pub fn from_slice(b: &[u8]) -> Self { + if can_use_fast(b) { + Guid(Repr::Fast(FastGuid::from_slice(b))) + } else { + Guid::new_slow(b.into()) + } + } + + /// Convert `v` to a `Guid`, consuming it. + #[inline] + pub fn from_vec(v: Vec<u8>) -> Self { + if can_use_fast(&v) { + Guid(Repr::Fast(FastGuid::from_slice(&v))) + } else { + Guid::new_slow(v) + } + } + + /// Get the data backing this `Guid` as a `&[u8]`. + #[inline] + pub fn as_bytes(&self) -> &[u8] { + match &self.0 { + Repr::Fast(rep) => rep.bytes(), + Repr::Slow(rep) => rep.as_ref(), + } + } + + /// Get the data backing this `Guid` as a `&str`. + #[inline] + pub fn as_str(&self) -> &str { + match &self.0 { + Repr::Fast(rep) => rep.as_str(), + Repr::Slow(rep) => rep.as_ref(), + } + } + + /// Convert this `Guid` into a `String`, consuming it in the process. + #[inline] + pub fn into_string(self) -> String { + match self.0 { + Repr::Fast(rep) => rep.as_str().into(), + Repr::Slow(rep) => rep, + } + } + + /// Returns true for Guids that are deemed valid by the sync server. + /// See https://github.com/mozilla-services/server-syncstorage/blob/d92ef07877aebd05b92f87f6ade341d6a55bffc8/syncstorage/bso.py#L24 + pub fn is_valid_for_sync_server(&self) -> bool { + !self.is_empty() + && self.len() <= 64 + && self + .bytes() + .all(|b| (b' '..=b'~').contains(&b) && b != b',') + } + + /// Returns true for Guids that are valid places guids, and false for all others. + pub fn is_valid_for_places(&self) -> bool { + self.len() == 12 && self.bytes().all(Guid::is_valid_places_byte) + } + + /// Returns true if the byte `b` is a valid base64url byte. + #[inline] + pub fn is_valid_places_byte(b: u8) -> bool { + BASE64URL_BYTES[b as usize] == 1 + } + + #[cold] + fn new_slow(v: Vec<u8>) -> Self { + assert!( + !can_use_fast(&v), + "Could use fast for guid (len = {})", + v.len() + ); + Guid(Repr::Slow( + String::from_utf8(v).expect("Invalid slow guid bytes!"), + )) + } +} + +// This is used to implement the places tests. +const BASE64URL_BYTES: [u8; 256] = [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, + 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, + 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, +]; + +impl Ord for Guid { + fn cmp(&self, other: &Self) -> Ordering { + self.as_bytes().cmp(other.as_bytes()) + } +} + +impl PartialOrd for Guid { + fn partial_cmp(&self, other: &Self) -> Option<Ordering> { + Some(self.cmp(other)) + } +} + +impl PartialEq for Guid { + fn eq(&self, other: &Self) -> bool { + self.as_bytes() == other.as_bytes() + } +} + +impl Eq for Guid {} + +impl Hash for Guid { + fn hash<H: Hasher>(&self, state: &mut H) { + self.as_bytes().hash(state); + } +} + +impl<'a> From<&'a str> for Guid { + #[inline] + fn from(s: &'a str) -> Guid { + Guid::from_slice(s.as_ref()) + } +} +impl<'a> From<&'a &str> for Guid { + #[inline] + fn from(s: &'a &str) -> Guid { + Guid::from_slice(s.as_ref()) + } +} + +impl<'a> From<&'a [u8]> for Guid { + #[inline] + fn from(s: &'a [u8]) -> Guid { + Guid::from_slice(s) + } +} + +impl From<String> for Guid { + #[inline] + fn from(s: String) -> Guid { + Guid::from_string(s) + } +} + +impl From<Vec<u8>> for Guid { + #[inline] + fn from(v: Vec<u8>) -> Guid { + Guid::from_vec(v) + } +} + +impl From<Guid> for String { + #[inline] + fn from(guid: Guid) -> String { + guid.into_string() + } +} + +impl From<Guid> for Vec<u8> { + #[inline] + fn from(guid: Guid) -> Vec<u8> { + guid.into_string().into_bytes() + } +} + +impl AsRef<str> for Guid { + #[inline] + fn as_ref(&self) -> &str { + self.as_str() + } +} + +impl AsRef<[u8]> for Guid { + #[inline] + fn as_ref(&self) -> &[u8] { + self.as_bytes() + } +} + +impl ops::Deref for Guid { + type Target = str; + #[inline] + fn deref(&self) -> &str { + self.as_str() + } +} + +// The default Debug impl is pretty unhelpful here. +impl fmt::Debug for Guid { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "Guid({:?})", self.as_str()) + } +} + +impl fmt::Display for Guid { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt(self.as_str(), f) + } +} + +impl std::default::Default for Guid { + /// Create a default guid by calling `Guid::empty()` + #[inline] + fn default() -> Self { + Guid::empty() + } +} + +macro_rules! impl_guid_eq { + ($($other: ty),+) => {$( + // This macro is used for items with and without lifetimes. + #[allow(clippy::extra_unused_lifetimes)] + impl<'a> PartialEq<$other> for Guid { + #[inline] + fn eq(&self, other: &$other) -> bool { + PartialEq::eq(AsRef::<[u8]>::as_ref(self), AsRef::<[u8]>::as_ref(other)) + } + } + + #[allow(clippy::extra_unused_lifetimes)] + impl<'a> PartialEq<Guid> for $other { + #[inline] + fn eq(&self, other: &Guid) -> bool { + PartialEq::eq(AsRef::<[u8]>::as_ref(self), AsRef::<[u8]>::as_ref(other)) + } + } + )+} +} + +// Implement direct comparison with some common types from the stdlib. +impl_guid_eq![str, &'a str, String, [u8], &'a [u8], Vec<u8>]; + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn test_base64url_bytes() { + let mut expect = [0u8; 256]; + for b in b'0'..=b'9' { + expect[b as usize] = 1; + } + for b in b'a'..=b'z' { + expect[b as usize] = 1; + } + for b in b'A'..=b'Z' { + expect[b as usize] = 1; + } + expect[b'_' as usize] = 1; + expect[b'-' as usize] = 1; + assert_eq!(&BASE64URL_BYTES[..], &expect[..]); + } + + #[test] + fn test_valid_for_places() { + assert!(Guid::from("aaaabbbbcccc").is_valid_for_places()); + assert!(Guid::from_slice(b"09_az-AZ_09-").is_valid_for_places()); + assert!(!Guid::from("aaaabbbbccccd").is_valid_for_places()); // too long + assert!(!Guid::from("aaaabbbbccc").is_valid_for_places()); // too short + assert!(!Guid::from("aaaabbbbccc=").is_valid_for_places()); // right length, bad character + assert!(!Guid::empty().is_valid_for_places()); // empty isn't valid to insert. + } + + #[test] + fn test_valid_for_sync_server() { + assert!(!Guid::empty().is_valid_for_sync_server()); // empty isn't valid remotely. + } + + #[allow(clippy::cmp_owned)] // See clippy note below. + #[test] + fn test_comparison() { + assert_eq!(Guid::from("abcdabcdabcd"), "abcdabcdabcd"); + assert_ne!(Guid::from("abcdabcdabcd".to_string()), "ABCDabcdabcd"); + + assert_eq!(Guid::from("abcdabcdabcd"), &b"abcdabcdabcd"[..]); // b"abcdabcdabcd" has type &[u8; 12]... + assert_ne!(Guid::from(&b"abcdabcdabcd"[..]), &b"ABCDabcdabcd"[..]); + + assert_eq!( + Guid::from(b"abcdabcdabcd"[..].to_owned()), + "abcdabcdabcd".to_string() + ); + assert_ne!(Guid::from("abcdabcdabcd"), "ABCDabcdabcd".to_string()); + + assert_eq!( + Guid::from("abcdabcdabcd1234"), + Vec::from(b"abcdabcdabcd1234".as_ref()) + ); + assert_ne!( + Guid::from("abcdabcdabcd4321"), + Vec::from(b"ABCDabcdabcd4321".as_ref()) + ); + + // order by data instead of length + // hrmph - clippy in 1.54-nightly complains about the below: + // 'error: this creates an owned instance just for comparison' + // '... help: try: `*"aaaaaa"`' + // and suggests a change that's wrong - so we've ignored the lint above. + assert!(Guid::from("zzz") > Guid::from("aaaaaa")); + assert!(Guid::from("ThisIsASolowGuid") < Guid::from("zzz")); + assert!(Guid::from("ThisIsASolowGuid") > Guid::from("AnotherSlowGuid")); + } + + #[cfg(feature = "random")] + #[test] + fn test_random() { + use std::collections::HashSet; + // Used to verify uniqueness within our sample of 1000. Could cause + // random failures, but desktop has the same test, and it's never caused + // a problem AFAIK. + let mut seen: HashSet<String> = HashSet::new(); + for _ in 0..1000 { + let g = Guid::random(); + assert_eq!(g.len(), 12); + assert!(g.is_valid_for_places()); + let decoded = base64::decode_config(&g, base64::URL_SAFE_NO_PAD).unwrap(); + assert_eq!(decoded.len(), 9); + let no_collision = seen.insert(g.clone().into_string()); + assert!(no_collision, "{}", g); + } + } +} diff --git a/third_party/rust/sync-guid/src/rusqlite_support.rs b/third_party/rust/sync-guid/src/rusqlite_support.rs new file mode 100644 index 0000000000..c4c0f2f042 --- /dev/null +++ b/third_party/rust/sync-guid/src/rusqlite_support.rs @@ -0,0 +1,23 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#![cfg(feature = "rusqlite_support")] + +use crate::Guid; +use rusqlite::{ + self, + types::{FromSql, FromSqlResult, ToSql, ToSqlOutput, ValueRef}, +}; + +impl ToSql for Guid { + fn to_sql(&self) -> rusqlite::Result<ToSqlOutput<'_>> { + Ok(ToSqlOutput::from(self.as_str())) + } +} + +impl FromSql for Guid { + fn column_result(value: ValueRef<'_>) -> FromSqlResult<Self> { + value.as_str().map(Guid::from) + } +} diff --git a/third_party/rust/sync-guid/src/serde_support.rs b/third_party/rust/sync-guid/src/serde_support.rs new file mode 100644 index 0000000000..50220ffe12 --- /dev/null +++ b/third_party/rust/sync-guid/src/serde_support.rs @@ -0,0 +1,61 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#![cfg(feature = "serde_support")] + +use std::fmt; + +use serde::{ + de::{self, Deserialize, Deserializer, Visitor}, + ser::{Serialize, Serializer}, +}; + +use crate::Guid; + +struct GuidVisitor; +impl<'de> Visitor<'de> for GuidVisitor { + type Value = Guid; + #[inline] + fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { + formatter.write_str("a sync guid") + } + #[inline] + fn visit_str<E: de::Error>(self, s: &str) -> Result<Self::Value, E> { + Ok(Guid::from_slice(s.as_ref())) + } +} + +impl<'de> Deserialize<'de> for Guid { + #[inline] + fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> + where + D: Deserializer<'de>, + { + deserializer.deserialize_str(GuidVisitor) + } +} + +impl Serialize for Guid { + #[inline] + fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + serializer.serialize_str(self.as_str()) + } +} + +#[cfg(test)] +mod test { + use super::*; + use serde_test::{assert_tokens, Token}; + #[test] + fn test_ser_de() { + let guid = Guid::from("asdffdsa12344321"); + assert_tokens(&guid, &[Token::Str("asdffdsa12344321")]); + + let guid = Guid::from(""); + assert_tokens(&guid, &[Token::Str("")]); + + let guid = Guid::from(&b"abcd43211234"[..]); + assert_tokens(&guid, &[Token::Str("abcd43211234")]); + } +} diff --git a/third_party/rust/sync15/.cargo-checksum.json b/third_party/rust/sync15/.cargo-checksum.json new file mode 100644 index 0000000000..1d4ab35f5f --- /dev/null +++ b/third_party/rust/sync15/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{"Cargo.toml":"0102c944a52a98d1b7c6ba7a5f3ec69e28e5a05f0c58a743f0c5ba020988122a","README.md":"6d4ff5b079ac5340d18fa127f583e7ad793c5a2328b8ecd12c3fc723939804f2","src/bso/content.rs":"d2d650f4932e9a7068a25dd7df0085b92cd8976a0635320e6ae306d5a425075c","src/bso/crypto.rs":"27602dcccb37d3a55620ee4e16b705da455d49af575de115c7c79c0178eb1d6d","src/bso/mod.rs":"09e723dc7e99295ecafdcadffaf604d66ea27cf2b7f1fd9ab3cac4f4698ff6a7","src/bso/test_utils.rs":"4ec5a2df5e1c0ec14dc770681e959bdcef6ef04f6fde435999197f46a8ae4831","src/client/coll_state.rs":"4301526b987532cd7ebd5d089980c9524aa29360e08fa0c1f5437696ed354822","src/client/coll_update.rs":"627e2266c5c8f1c5e0bc83061fa14c1287c7d17c78b47882543521f75543b499","src/client/collection_keys.rs":"c27b2277a3a52033b58ab01490fc2ea7007494195dd5e6dc2c6931a4ca96795a","src/client/mod.rs":"8f588d4a035cf79d96f2500f06d5651c1a7c566127c456ffa5429811ddce3fd6","src/client/request.rs":"8841524e37d8195867bdf6ba98c75f610cf47a4644adeebd6372cc6713f2260a","src/client/state.rs":"4e31193ef2471c1dfabf1c6a391bcb95e14ddb45855786a4194ff187d5c9347c","src/client/status.rs":"f445a8765dac9789444e23b5145148413407bb1d18a15ef56682243997f591bf","src/client/storage_client.rs":"8de72d4ba3ca4f68c8e1898466de83a2b543545a18679800cb4f7fbda2dc3183","src/client/sync.rs":"a04478c4e31bbf39f593decfc4844bfb7e678907031f0a93d1e005cf0047ebb4","src/client/sync_multiple.rs":"3729d4afd90ab1bd9982a3506252c99d8f37619cc1792ef4feba352ad01a7192","src/client/token.rs":"b268759d31e0fe17e0e2a428694cd9a317fcfbdd52f023d5d8c7cc6f00f1a102","src/client/util.rs":"71cc70ee41f821f53078675e636e9fad9c6046fa1a989e37f5487e340a2277d6","src/client_types.rs":"3c3cac1540b92482f43660d9e43bdde8481c4cc1a98253a68c80e791231f5976","src/clients_engine/engine.rs":"0eaa078978c95fc96284b48a7458ebff49f0aa70b1148ef019b9350b5ba4d0d4","src/clients_engine/mod.rs":"461729e6f89b66b2cbd89b041a03d4d6a8ba582284ed4f3015cb13e1a0c6da97","src/clients_engine/record.rs":"69357413571d688eea3a5207f9b88088cde285b9373c7bd4ea1e018dbc823dd2","src/clients_engine/ser.rs":"9796e44ed7daf04f22afbb51238ac25fd0de1438b72181351b4ca29fd70fd429","src/device_type.rs":"fe217453f19b374abcc10e9f503b25f4f712b555497bebe5aefcf2e9b258d28e","src/enc_payload.rs":"aa3eea7df49b24cd59831680a47c417b73a3e36e6b0f3f4baf14ca66bd68be6b","src/engine/bridged_engine.rs":"5c0748358a7b3040442bc81122400ab7b26acf728b194dbeee8a2166dc653e11","src/engine/changeset.rs":"949c520e508da0158b2df9fa20235f942ad8096cebbfc942b0fae41c35830c6b","src/engine/mod.rs":"cb638c2170c3785785877dfd059f26fbbfeae5edc599745dd861e4c299d310ad","src/engine/request.rs":"5923025fb9550178339f880a1bf8526d8e853e7a0b2bce6d9d687cc808ac0085","src/engine/sync_engine.rs":"9a5c6993dcceec8f82ead97a6d1840c7ed7dc6b326f7234c77f18848b6baf836","src/error.rs":"a45cfe02e6301f473c34678b694943c1a04308b8c292c6e0448bf495194c3b5e","src/key_bundle.rs":"ff8b10b95add934ecbc434b37ed089805886828ed159fd38bd692d1f01d06f7f","src/lib.rs":"eca1fa801820238141c4badeeec45d430aaec8b2ce088446ef94456d51014889","src/record_types.rs":"02bb3d352fb808131d298f9b90d9c95b7e9e0138b97c5401f3b9fdacc5562f44","src/server_timestamp.rs":"0020f31971ccbfc485894cabc3087459d42252b86d7de07f2136997864b0373b","src/telemetry.rs":"35e0313a052f16326e451e3d6e371337c1d71a471f32234ad9649fc1fa9f2237"},"package":null}
\ No newline at end of file diff --git a/third_party/rust/sync15/Cargo.toml b/third_party/rust/sync15/Cargo.toml new file mode 100644 index 0000000000..895a5c3816 --- /dev/null +++ b/third_party/rust/sync15/Cargo.toml @@ -0,0 +1,87 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g., crates.io) dependencies. +# +# If you are reading this file be aware that the original Cargo.toml +# will likely look very different (and much more reasonable). +# See Cargo.toml.orig for the original contents. + +[package] +edition = "2021" +name = "sync15" +version = "0.1.0" +authors = ["application-services@mozilla.com"] +exclude = [ + "/android", + "/ios", +] +readme = "README.md" +license = "MPL-2.0" + +[dependencies] +anyhow = "1.0" +ffi-support = "0.4" +lazy_static = "1.4" +log = "0.4" +serde_derive = "1" +serde_json = "1" +thiserror = "1.0" + +[dependencies.base16] +version = "0.2" +optional = true + +[dependencies.base64] +version = "0.13" +optional = true + +[dependencies.error-support] +path = "../support/error" + +[dependencies.interrupt-support] +path = "../support/interrupt" + +[dependencies.rc_crypto] +path = "../support/rc_crypto" +features = ["hawk"] +optional = true + +[dependencies.serde] +version = "1" +features = ["derive"] + +[dependencies.sync-guid] +path = "../support/guid" +features = ["random"] + +[dependencies.url] +version = "2.1" +optional = true + +[dependencies.viaduct] +path = "../viaduct" +optional = true + +[dev-dependencies.env_logger] +version = "0.7" +default-features = false + +[features] +crypto = [ + "rc_crypto", + "base16", + "base64", +] +default = ["sync-engine"] +random-guid = ["sync-guid/random"] +standalone-sync = ["sync-client"] +sync-client = [ + "sync-engine", + "crypto", + "viaduct", + "url", +] +sync-engine = ["random-guid"] diff --git a/third_party/rust/sync15/README.md b/third_party/rust/sync15/README.md new file mode 100644 index 0000000000..a638435908 --- /dev/null +++ b/third_party/rust/sync15/README.md @@ -0,0 +1,128 @@ +# Low-level sync-1.5 helper component + +This component contains utility code to be shared between different +data stores that want to sync against a Firefox Sync v1.5 sync server. +It handles things like encrypting/decrypting records, obtaining and +using storage node auth tokens, and so-on. + +There are 2 key concepts to understand here - the implementation itself, and +a rust trait for a "syncable store" where component-specific logic lives - but +before we dive into them, some preamble might help put things into context. + +## Nomenclature + +* The term "store" is generally used as the interface to the database - ie, the + thing that gets and saves items. It can also be seen as supplying the API + used by most consumers of the component. Note that the "places" component + is alone in using the term "api" for this object. + +* The term "engine" (or ideally, "sync engine") is used for the thing that + actually does the syncing for a store. Sync engines implement the SyncEngine + trait - the trait is either implemented directly by a store, or a new object + that has a reference to a store. + +## Introduction and History + +For many years Sync has worked exclusively against a "sync v1.5 server". This +[is a REST API described here](https://mozilla-services.readthedocs.io/en/latest/storage/apis-1.5.html). +The important part is that the API is conceptually quite simple - there are +arbitrary "collections" containing "records" indexed by a GUID, and lacking +traditonal database concepts like joins. Because the record is encrypted, +there's very little scope for the server to be much smarter. Thus it's +reasonably easy to create a fairly generic abstraction over the API that can be +easily reused. + +Back in the deep past, we found ourselves with 2 different components that +needed to sync against a sync v1.5 server. The apps using these components +didn't have schedulers or any UI for choosing what to sync - so these +components just looked at the existing state of the engines on the server and +synced if they were enabled. + +This was also pre-megazord - the idea was that apps could choose from a "menu" +of components to include - so we didn't really want to bind these components +together. Therefore, there was no concept of "sync all" - instead, each of the +components had to be synced individually. So this component started out as more +of a "library" than a "component" which individual components could reuse - and +each of these components was a "syncable store" (ie, a store which could supply + a "sync engine"). + +Fast forward to Fenix and we needed a UI for managing all the engines supported +there, and a single "sync now" experience etc - so we also have a sync_manager +component - [see its README for more](../components/sync_manager/README.md). +But even though it exists, there are still some parts of this component that +reflect these early days - for example, it's still possible to sync just a +single component using sync15 (ie, without going via the "sync manager"), +although this isn't used and should be removed - the "sync manager" allows you +to choose which engines to sync, so that should be used exclusively. + +## Metadata + +There's some metadata associated with a sync. Some of the metadata is "global" +to the app (eg, the enabled state of engines, information about what servers to +use, etc) and some is specific to an engine (eg, timestamp of the +server's collection for this engine, guids for the collections, etc). + +We made the decision early on that no storage should be done by this +component: + +* The "global" metadata should be stored by the application - but because it + doesn't need to interpret the data, we do this with an opaque string (that + is JSON, but the app should never assume or introspect that) + +* Each engine should store its own metadata, so we don't end up in the + situation where, say, a database is moved between profiles causing the + metadata to refer to a completely different data set. So each engine + stores its metadata in the same database as the data itself, so if the + database is moved or copied, the metadata comes with it) + +## Sync Implementation + +The core implementation does all of the interaction with things like the +tokenserver, the `meta/global` and `info/collections` collections, etc. It +does all network interaction (ie, individual engines don't need to interact with +the network at all), tracks things like whether the server is asking us to +"backoff" due to operational concerns, manages encryption keys and the +encryption itself, etc. The general flow of a sync - which interacts with the +`SyncEngine` trait - is: + +* Does all pre-sync setup, such as checking `meta/global`, and whether the + sync IDs on the server match the sync IDs we last saw (ie, to check whether + something drastic has happened since we last synced) +* Asks the engine about how to formulate the URL query params to obtain the + records the engine cares about. In most cases, this will simply be "records + since the last modified timestamp of the last sync". +* Downloads and decrypts these records. +* Passes these records to the engine for processing, and obtains records that + should be uploaded to the server. +* Encrypts these outgoing records and uploads them. +* Tells the engine about the result of the upload (ie, the last-modified + timestamp of the POST so it can be saved as engine metadata) + +As above, the sync15 component really only deals with a single engine at a time. +See the "sync manager" for how multiple engine are managed (but the tl;dr is +that the "sync manager" leans on this very heavily, but knows about multiple +engine and manages shared state) + +## The `SyncEngine` trait + +The SyncEngine trait is where all logic specific to a collection lives. A "sync +engine" implements (or provides) this trait to implement actual syncing. + +For <handwave> reasons, it actually lives in the +[sync-traits helper](https://github.com/mozilla/application-services/blob/main/components/support/sync15-traits/src/engine.rs) +but for the purposes of this document, you should consider it as owned by sync15. + +This is actually quite a simple trait - at a high level, it's really just +concerned with: + +* Get or set some metadata the sync15 component has decided should be saved or + fetched. + +* In a normal sync, take some "incoming" records, process them, and return + the "outgoing" records we should send to the server. + +* In some edge-cases, either "wipe" (ie, actually delete everything, which + almost never happens) or "reset" (ie, pretend this engine has never before + been synced) + +And that's it! diff --git a/third_party/rust/sync15/src/bso/content.rs b/third_party/rust/sync15/src/bso/content.rs new file mode 100644 index 0000000000..f7aa6f608b --- /dev/null +++ b/third_party/rust/sync15/src/bso/content.rs @@ -0,0 +1,388 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. + */ + +//! This module enhances the IncomingBso and OutgoingBso records to deal with +//! arbitrary <T> types, which we call "content" +//! It can: +//! * Parse JSON into some <T> while handling tombstones and invalid json. +//! * Turn arbitrary <T> objects with an `id` field into an OutgoingBso. + +use super::{IncomingBso, IncomingContent, IncomingKind, OutgoingBso, OutgoingEnvelope}; +use crate::Guid; +use error_support::report_error; +use serde::Serialize; + +// The only errors we return here are serde errors. +type Result<T> = std::result::Result<T, serde_json::Error>; + +impl<T> IncomingContent<T> { + /// Returns Some(content) if [self.kind] is [IncomingKind::Content], None otherwise. + pub fn content(self) -> Option<T> { + match self.kind { + IncomingKind::Content(t) => Some(t), + _ => None, + } + } +} + +// We don't want to force our T to be Debug, but we can be Debug if T is. +impl<T: std::fmt::Debug> std::fmt::Debug for IncomingKind<T> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + IncomingKind::Content(r) => { + write!(f, "IncomingKind::Content<{:?}>", r) + } + IncomingKind::Tombstone => write!(f, "IncomingKind::Tombstone"), + IncomingKind::Malformed => write!(f, "IncomingKind::Malformed"), + } + } +} + +impl IncomingBso { + /// Convert an [IncomingBso] to an [IncomingContent] possibly holding a T. + pub fn into_content<T: for<'de> serde::Deserialize<'de>>(self) -> IncomingContent<T> { + match serde_json::from_str(&self.payload) { + Ok(json) => { + // We got a good serde_json::Value, see if it's a <T>. + let kind = json_to_kind(json, &self.envelope.id); + IncomingContent { + envelope: self.envelope, + kind, + } + } + Err(e) => { + // payload isn't valid json. + log::warn!("Invalid incoming cleartext {}: {}", self.envelope.id, e); + IncomingContent { + envelope: self.envelope, + kind: IncomingKind::Malformed, + } + } + } + } +} + +impl OutgoingBso { + /// Creates a new tombstone record. + /// Not all collections expect tombstones. + pub fn new_tombstone(envelope: OutgoingEnvelope) -> Self { + Self { + envelope, + payload: serde_json::json!({"deleted": true}).to_string(), + } + } + + /// Creates a outgoing record from some <T>, which can be made into a JSON object + /// with a valid `id`. This is the most convenient way to create an outgoing + /// item from a <T> when the default envelope is suitable. + /// Will panic if there's no good `id` in the json. + pub fn from_content_with_id<T>(record: T) -> Result<Self> + where + T: Serialize, + { + let (json, id) = content_with_id_to_json(record)?; + Ok(Self { + envelope: id.into(), + payload: serde_json::to_string(&json)?, + }) + } + + /// Create an Outgoing record with an explicit envelope. Will panic if the + /// payload has an ID but it doesn't match the envelope. + pub fn from_content<T>(envelope: OutgoingEnvelope, record: T) -> Result<Self> + where + T: Serialize, + { + let json = content_to_json(record, &envelope.id)?; + Ok(Self { + envelope, + payload: serde_json::to_string(&json)?, + }) + } +} + +// Helpers for packing and unpacking serde objects to and from a <T>. In particular: +// * Helping deal complications around raw json payload not having 'id' (the envelope is +// canonical) but needing it to exist when dealing with serde locally. +// For example, a record on the server after being decrypted looks like: +// `{"id": "a-guid", payload: {"field": "value"}}` +// But the `T` for this typically looks like `struct T { id: Guid, field: String}` +// So before we try and deserialize this record into a T, we copy the `id` field +// from the envelope into the payload, and when serializing from a T we do the +// reverse (ie, ensure the `id` in the payload is removed and placed in the envelope) +// * Tombstones. + +// Deserializing json into a T +fn json_to_kind<T>(mut json: serde_json::Value, id: &Guid) -> IncomingKind<T> +where + T: for<'de> serde::Deserialize<'de>, +{ + // It's possible that the payload does not carry 'id', but <T> always does - so grab it from the + // envelope and put it into the json before deserializing the record. + if let serde_json::Value::Object(ref mut map) = json { + if map.contains_key("deleted") { + return IncomingKind::Tombstone; + } + match map.get("id") { + Some(serde_json::Value::String(content_id)) => { + // It exists in the payload! We treat a mismatch as malformed. + if content_id != id { + log::trace!( + "malformed incoming record: envelope id: {} payload id: {}", + content_id, + id + ); + report_error!( + "incoming-invalid-mismatched-ids", + "Envelope and payload don't agree on the ID" + ); + return IncomingKind::Malformed; + } + if !id.is_valid_for_sync_server() { + log::trace!("malformed incoming record: id is not valid: {}", id); + report_error!( + "incoming-invalid-bad-payload-id", + "ID in the payload is invalid" + ); + return IncomingKind::Malformed; + } + } + Some(v) => { + // It exists in the payload but is not a string - they can't possibly be + // the same as the envelope uses a String, so must be malformed. + log::trace!("malformed incoming record: id is not a string: {}", v); + report_error!("incoming-invalid-wrong_type", "ID is not a string"); + return IncomingKind::Malformed; + } + None => { + // Doesn't exist in the payload - add it before trying to deser a T. + if !id.is_valid_for_sync_server() { + log::trace!("malformed incoming record: id is not valid: {}", id); + report_error!( + "incoming-invalid-bad-envelope-id", + "ID in envelope is not valid" + ); + return IncomingKind::Malformed; + } + map.insert("id".to_string(), id.to_string().into()); + } + } + }; + match serde_json::from_value(json) { + Ok(v) => IncomingKind::Content(v), + Err(e) => { + report_error!("invalid-incoming-content", "Invalid incoming T: {}", e); + IncomingKind::Malformed + } + } +} + +// Serializing <T> into json with special handling of `id` (the `id` from the payload +// is used as the envelope ID) +fn content_with_id_to_json<T>(record: T) -> Result<(serde_json::Value, Guid)> +where + T: Serialize, +{ + let mut json = serde_json::to_value(record)?; + let id = match json.as_object_mut() { + Some(ref mut map) => { + match map.get("id").as_ref().and_then(|v| v.as_str()) { + Some(id) => { + let id: Guid = id.into(); + assert!(id.is_valid_for_sync_server(), "record's ID is invalid"); + id + } + // In practice, this is a "static" error and not influenced by runtime behavior + None => panic!("record does not have an ID in the payload"), + } + } + None => panic!("record is not a json object"), + }; + Ok((json, id)) +} + +// Serializing <T> into json with special handling of `id` (if `id` in serialized +// JSON already exists, we panic if it doesn't match the envelope. If the serialized +// content does not have an `id`, it is added from the envelope) +// is used as the envelope ID) +fn content_to_json<T>(record: T, id: &Guid) -> Result<serde_json::Value> +where + T: Serialize, +{ + let mut payload = serde_json::to_value(record)?; + if let Some(ref mut map) = payload.as_object_mut() { + if let Some(content_id) = map.get("id").as_ref().and_then(|v| v.as_str()) { + assert_eq!(content_id, id); + assert!(id.is_valid_for_sync_server(), "record's ID is invalid"); + } else { + map.insert("id".to_string(), serde_json::Value::String(id.to_string())); + } + }; + Ok(payload) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::bso::IncomingBso; + use serde::{Deserialize, Serialize}; + use serde_json::json; + + #[derive(Default, Debug, PartialEq, Serialize, Deserialize)] + struct TestStruct { + id: Guid, + data: u32, + } + #[test] + fn test_content_deser() { + env_logger::try_init().ok(); + let json = json!({ + "id": "test", + "payload": json!({"data": 1}).to_string(), + }); + let incoming: IncomingBso = serde_json::from_value(json).unwrap(); + assert_eq!(incoming.envelope.id, "test"); + let record = incoming.into_content::<TestStruct>().content().unwrap(); + let expected = TestStruct { + id: Guid::new("test"), + data: 1, + }; + assert_eq!(record, expected); + } + + #[test] + fn test_content_deser_empty_id() { + env_logger::try_init().ok(); + let json = json!({ + "id": "", + "payload": json!({"data": 1}).to_string(), + }); + let incoming: IncomingBso = serde_json::from_value(json).unwrap(); + // The envelope has an invalid ID, but it's not handled until we try and deserialize + // it into a T + assert_eq!(incoming.envelope.id, ""); + let content = incoming.into_content::<TestStruct>(); + assert!(matches!(content.kind, IncomingKind::Malformed)); + } + + #[test] + fn test_content_deser_invalid() { + env_logger::try_init().ok(); + // And a non-empty but still invalid guid. + let json = json!({ + "id": "X".repeat(65), + "payload": json!({"data": 1}).to_string(), + }); + let incoming: IncomingBso = serde_json::from_value(json).unwrap(); + let content = incoming.into_content::<TestStruct>(); + assert!(matches!(content.kind, IncomingKind::Malformed)); + } + + #[test] + fn test_content_deser_not_string() { + env_logger::try_init().ok(); + // A non-string id. + let json = json!({ + "id": "0", + "payload": json!({"id": 0, "data": 1}).to_string(), + }); + let incoming: IncomingBso = serde_json::from_value(json).unwrap(); + let content = incoming.into_content::<serde_json::Value>(); + assert!(matches!(content.kind, IncomingKind::Malformed)); + } + + #[test] + fn test_content_ser_with_id() { + env_logger::try_init().ok(); + // When serializing, expect the ID to be in the top-level payload (ie, + // in the envelope) but should not appear in the cleartext `payload` part of + // the payload. + let val = TestStruct { + id: Guid::new("test"), + data: 1, + }; + let outgoing = OutgoingBso::from_content_with_id(val).unwrap(); + + // The envelope should have our ID. + assert_eq!(outgoing.envelope.id, Guid::new("test")); + + // and make sure `cleartext` part of the payload the data and the id. + let ct_value = serde_json::from_str::<serde_json::Value>(&outgoing.payload).unwrap(); + assert_eq!(ct_value, json!({"data": 1, "id": "test"})); + } + + #[test] + fn test_content_ser_with_envelope() { + env_logger::try_init().ok(); + // When serializing, expect the ID to be in the top-level payload (ie, + // in the envelope) but should not appear in the cleartext `payload` + let val = TestStruct { + id: Guid::new("test"), + data: 1, + }; + let envelope: OutgoingEnvelope = Guid::new("test").into(); + let outgoing = OutgoingBso::from_content(envelope, val).unwrap(); + + // The envelope should have our ID. + assert_eq!(outgoing.envelope.id, Guid::new("test")); + + // and make sure `cleartext` part of the payload has data and the id. + let ct_value = serde_json::from_str::<serde_json::Value>(&outgoing.payload).unwrap(); + assert_eq!(ct_value, json!({"data": 1, "id": "test"})); + } + + #[test] + #[should_panic] + fn test_content_ser_no_ids() { + env_logger::try_init().ok(); + #[derive(Serialize)] + struct StructWithNoId { + data: u32, + } + let val = StructWithNoId { data: 1 }; + let _ = OutgoingBso::from_content_with_id(val); + } + + #[test] + #[should_panic] + fn test_content_ser_not_object() { + env_logger::try_init().ok(); + let _ = OutgoingBso::from_content_with_id(json!("string")); + } + + #[test] + #[should_panic] + fn test_content_ser_mismatched_ids() { + env_logger::try_init().ok(); + let val = TestStruct { + id: Guid::new("test"), + data: 1, + }; + let envelope: OutgoingEnvelope = Guid::new("different").into(); + let _ = OutgoingBso::from_content(envelope, val); + } + + #[test] + #[should_panic] + fn test_content_empty_id() { + env_logger::try_init().ok(); + let val = TestStruct { + id: Guid::new(""), + data: 1, + }; + let _ = OutgoingBso::from_content_with_id(val); + } + + #[test] + #[should_panic] + fn test_content_invalid_id() { + env_logger::try_init().ok(); + let val = TestStruct { + id: Guid::new(&"X".repeat(65)), + data: 1, + }; + let _ = OutgoingBso::from_content_with_id(val); + } +} diff --git a/third_party/rust/sync15/src/bso/crypto.rs b/third_party/rust/sync15/src/bso/crypto.rs new file mode 100644 index 0000000000..d572c4692b --- /dev/null +++ b/third_party/rust/sync15/src/bso/crypto.rs @@ -0,0 +1,197 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +//! Support for "encrypted bso"s, as received by the storage servers. +//! This module decrypts them into IncomingBso's suitable for use by the +//! engines. +use super::{IncomingBso, IncomingEnvelope, OutgoingBso, OutgoingEnvelope}; +use crate::error; +use crate::key_bundle::KeyBundle; +use crate::EncryptedPayload; +use serde::{de::DeserializeOwned, Deserialize, Serialize}; + +// The BSO implementation we use for encrypted payloads. +// Note that this is almost identical to the IncomingBso implementations, except +// instead of a String payload we use an EncryptedPayload. Obviously we *could* +// just use a String payload and transform it into an EncryptedPayload - any maybe we +// should - but this is marginally optimal in terms of deserialization. +#[derive(Deserialize, Debug)] +pub struct IncomingEncryptedBso { + #[serde(flatten)] + pub envelope: IncomingEnvelope, + #[serde( + with = "as_json", + bound(deserialize = "EncryptedPayload: DeserializeOwned") + )] + pub(crate) payload: EncryptedPayload, +} + +impl IncomingEncryptedBso { + pub fn new(envelope: IncomingEnvelope, payload: EncryptedPayload) -> Self { + Self { envelope, payload } + } + /// Decrypt a BSO, consuming it into a clear-text version. + pub fn into_decrypted(self, key: &KeyBundle) -> error::Result<IncomingBso> { + Ok(IncomingBso::new(self.envelope, self.payload.decrypt(key)?)) + } +} + +#[derive(Serialize, Debug)] +pub struct OutgoingEncryptedBso { + #[serde(flatten)] + pub envelope: OutgoingEnvelope, + #[serde(with = "as_json", bound(serialize = "EncryptedPayload: Serialize"))] + payload: EncryptedPayload, +} + +impl OutgoingEncryptedBso { + pub fn new(envelope: OutgoingEnvelope, payload: EncryptedPayload) -> Self { + Self { envelope, payload } + } + + #[inline] + pub fn serialized_payload_len(&self) -> usize { + self.payload.serialized_len() + } +} + +impl OutgoingBso { + pub fn into_encrypted(self, key: &KeyBundle) -> error::Result<OutgoingEncryptedBso> { + Ok(OutgoingEncryptedBso { + envelope: self.envelope, + payload: EncryptedPayload::from_cleartext(key, self.payload)?, + }) + } +} + +// The BSOs we write to the servers expect a "payload" attribute which is a JSON serialized +// string, rather than the JSON representation of the object. +// ie, the serialized object is expected to look like: +// `{"id": "some-guid", "payload": "{\"IV\": ... }"}` <-- payload is a string. +// However, if we just serialize it directly, we end up with: +// `{"id": "some-guid", "payload": {"IV": ... }}` <-- payload is an object. +// The magic here means we can serialize and deserialize directly into/from the object, correctly +// working with the payload as a string, instead of needing to explicitly stringify/parse the +// payload as an extra step. +// +// This would work for any <T>, but we only use it for EncryptedPayload - the way our cleartext +// BSOs work mean it's not necessary there as they define the payload as a String - ie, they do +// explicitly end up doing 2 JSON operations as an ergonomic design choice. +mod as_json { + use serde::de::{self, Deserialize, DeserializeOwned, Deserializer}; + use serde::ser::{self, Serialize, Serializer}; + + pub fn serialize<T, S>(t: &T, serializer: S) -> Result<S::Ok, S::Error> + where + T: Serialize, + S: Serializer, + { + let j = serde_json::to_string(t).map_err(ser::Error::custom)?; + serializer.serialize_str(&j) + } + + pub fn deserialize<'de, T, D>(deserializer: D) -> Result<T, D::Error> + where + T: DeserializeOwned, + D: Deserializer<'de>, + { + let j = String::deserialize(deserializer)?; + serde_json::from_str(&j).map_err(de::Error::custom) + } +} + +// Lots of stuff for testing the sizes of encrypted records, because the servers have +// certain limits in terms of max-POST sizes, forcing us to chunk uploads, but +// we need to calculate based on encrypted record size rather than the raw <T> size. +// +// This is a little cludgey but I couldn't think of another way to have easy deserialization +// without a bunch of wrapper types, while still only serializing a single time in the +// postqueue. +#[cfg(test)] +impl OutgoingEncryptedBso { + /// Return the length of the serialized payload. + pub fn payload_serialized_len(&self) -> usize { + self.payload.serialized_len() + } + + // self.payload is private, but tests want to create funky things. + // XXX - test only, but test in another crate :( + //#[cfg(test)] + pub fn make_test_bso(ciphertext: String) -> Self { + Self { + envelope: OutgoingEnvelope { + id: "".into(), + sortindex: None, + ttl: None, + }, + payload: EncryptedPayload { + iv: "".into(), + hmac: "".into(), + ciphertext, + }, + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::bso::OutgoingEnvelope; + + #[test] + fn test_deserialize_enc() { + let serialized = r#"{ + "id": "1234", + "collection": "passwords", + "modified": 12344321.0, + "payload": "{\"IV\": \"aaaaa\", \"hmac\": \"bbbbb\", \"ciphertext\": \"ccccc\"}" + }"#; + let record: IncomingEncryptedBso = serde_json::from_str(serialized).unwrap(); + assert_eq!(&record.envelope.id, "1234"); + assert_eq!((record.envelope.modified.0 - 12_344_321_000).abs(), 0); + assert_eq!(record.envelope.sortindex, None); + assert_eq!(&record.payload.iv, "aaaaa"); + assert_eq!(&record.payload.hmac, "bbbbb"); + assert_eq!(&record.payload.ciphertext, "ccccc"); + } + + #[test] + fn test_deserialize_autofields() { + let serialized = r#"{ + "id": "1234", + "collection": "passwords", + "modified": 12344321.0, + "sortindex": 100, + "ttl": 99, + "payload": "{\"IV\": \"aaaaa\", \"hmac\": \"bbbbb\", \"ciphertext\": \"ccccc\"}" + }"#; + let record: IncomingEncryptedBso = serde_json::from_str(serialized).unwrap(); + assert_eq!(record.envelope.sortindex, Some(100)); + assert_eq!(record.envelope.ttl, Some(99)); + } + + #[test] + fn test_serialize_enc() { + let goal = r#"{"id":"1234","payload":"{\"IV\":\"aaaaa\",\"hmac\":\"bbbbb\",\"ciphertext\":\"ccccc\"}"}"#; + let record = OutgoingEncryptedBso { + envelope: OutgoingEnvelope { + id: "1234".into(), + ..Default::default() + }, + payload: EncryptedPayload { + iv: "aaaaa".into(), + hmac: "bbbbb".into(), + ciphertext: "ccccc".into(), + }, + }; + let actual = serde_json::to_string(&record).unwrap(); + assert_eq!(actual, goal); + + let val_str_payload: serde_json::Value = serde_json::from_str(goal).unwrap(); + assert_eq!( + val_str_payload["payload"].as_str().unwrap().len(), + record.payload.serialized_len() + ) + } +} diff --git a/third_party/rust/sync15/src/bso/mod.rs b/third_party/rust/sync15/src/bso/mod.rs new file mode 100644 index 0000000000..251c11fb3b --- /dev/null +++ b/third_party/rust/sync15/src/bso/mod.rs @@ -0,0 +1,204 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +/// This module defines our core "bso" abstractions. +/// In the terminology of this crate: +/// * "bso" is an acronym for "basic storage object" and used extensively in the sync server docs. +/// the record always has a well-defined "envelope" with metadata (eg, the ID of the record, +/// the server timestamp of the resource, etc) and a field called `payload`. +/// A bso is serialized to and from JSON. +/// * There's a "cleartext" bso: +/// ** The payload is a String, which itself is JSON encoded (ie, this string `payload` is +/// always double JSON encoded in a server record) +/// ** This supplies helper methods for working with the "content" (some arbitrary <T>) in the +/// payload. +/// * There's an "encrypted" bso +/// ** The payload is an [crate::enc_payload::EncryptedPayload] +/// ** Only clients use this; as soon as practical we decrypt and as late as practical we encrypt +/// to and from encrypted bsos. +/// ** The encrypted bsos etc are all in the [crypto] module and require the `crypto` feature. +/// +/// Let's look at some real-world examples: +/// # meta/global +/// A "bso" (ie, record with an "envelope" and a "payload" with a JSON string) - but the payload +/// is cleartext. +/// ```json +/// { +/// "id":"global", +/// "modified":1661564513.50, +/// "payload": "{\"syncID\":\"p1z5_oDdOfLF\",\"storageVersion\":5,\"engines\":{\"passwords\":{\"version\":1,\"syncID\":\"6Y6JJkB074cF\"} /* snip */},\"declined\":[]}" +/// }``` +/// +/// # encrypted bsos: +/// Encrypted BSOs are still a "bso" (ie, a record with a field names `payload` which is a string) +/// but the payload is in the form of an EncryptedPayload. +/// For example, crypto/keys: +/// ```json +/// { +/// "id":"keys", +/// "modified":1661564513.74, +/// "payload":"{\"IV\":\"snip-base-64==\",\"hmac\":\"snip-hex\",\"ciphertext\":\"snip-base64==\"}" +/// }``` +/// (Note that as described above, most code working with bsos *do not* use that `payload` +/// directly, but instead a decrypted cleartext bso. +/// +/// Note all collection responses are the same shape as `crypto/keys` - a `payload` field with a +/// JSON serialized EncryptedPayload, it's just that the final <T> content differs for each +/// collection (eg, tabs and bookmarks have quite different <T>s JSON-encoded in the +/// String payload.) +/// +/// For completeness, some other "non-BSO" records - no "id", "modified" or "payload" fields in +/// the response, just plain-old clear-text JSON. +/// # Example +/// ## `info/collections` +/// ```json +/// { +/// "bookmarks":1661564648.65, +/// "meta":1661564513.50, +/// "addons":1661564649.09, +/// "clients":1661564643.57, +/// ... +/// }``` +/// ## `info/configuration` +/// ```json +/// { +/// "max_post_bytes":2097152, +/// "max_post_records":100, +/// "max_record_payload_bytes":2097152, +/// ... +/// }``` +/// +/// Given our definitions above, these are not any kind of "bso", so are +/// not relevant to this module +use crate::{Guid, ServerTimestamp}; +use serde::{Deserialize, Serialize}; + +#[cfg(feature = "crypto")] +mod crypto; +#[cfg(feature = "crypto")] +pub use crypto::{IncomingEncryptedBso, OutgoingEncryptedBso}; + +mod content; + +// A feature for this would be ideal, but (a) the module is small and (b) it +// doesn't really fit the "features" model for sync15 to have a dev-dependency +// against itself but with a different feature set. +pub mod test_utils; + +/// An envelope for an incoming item. Envelopes carry all the metadata for +/// a Sync BSO record (`id`, `modified`, `sortindex`), *but not* the payload +/// itself. +#[derive(Debug, Clone, Deserialize)] +pub struct IncomingEnvelope { + /// The ID of the record. + pub id: Guid, + // If we don't give it a default, a small handful of tests fail. + // XXX - we should probably fix the tests and kill this? + #[serde(default = "ServerTimestamp::default")] + pub modified: ServerTimestamp, + pub sortindex: Option<i32>, + pub ttl: Option<u32>, +} + +/// An envelope for an outgoing item. This is conceptually identical to +/// [IncomingEnvelope], but omits fields that are only set by the server, +/// like `modified`. +#[derive(Debug, Default, Clone, Serialize)] +pub struct OutgoingEnvelope { + /// The ID of the record. + pub id: Guid, + #[serde(skip_serializing_if = "Option::is_none")] + pub sortindex: Option<i32>, + #[serde(skip_serializing_if = "Option::is_none")] + pub ttl: Option<u32>, +} + +/// Allow an outgoing envelope to be constructed with just a guid when default +/// values for the other fields are OK. +impl From<Guid> for OutgoingEnvelope { + fn from(id: Guid) -> Self { + OutgoingEnvelope { + id, + ..Default::default() + } + } +} + +/// IncomingBso's can come from: +/// * Directly from the server (ie, some records aren't encrypted, such as meta/global) +/// * From environments where the encryption is done externally (eg, Rust syncing in Desktop +/// Firefox has the encryption/decryption done by Firefox and the cleartext BSOs are passed in. +/// * Read from the server as an EncryptedBso; see EncryptedBso description above. +#[derive(Deserialize, Debug)] +pub struct IncomingBso { + #[serde(flatten)] + pub envelope: IncomingEnvelope, + // payload is public for some edge-cases in some components, but in general, + // you should use into_content<> to get a record out of it. + pub payload: String, +} + +impl IncomingBso { + pub fn new(envelope: IncomingEnvelope, payload: String) -> Self { + Self { envelope, payload } + } +} + +#[derive(Serialize, Debug)] +pub struct OutgoingBso { + #[serde(flatten)] + pub envelope: OutgoingEnvelope, + // payload is public for some edge-cases in some components, but in general, + // you should use into_content<> to get a record out of it. + pub payload: String, +} + +impl OutgoingBso { + /// Most consumers will use `self.from_content` and `self.from_content_with_id` + /// but this exists for the few consumers for whom that doesn't make sense. + pub fn new<T: Serialize>( + envelope: OutgoingEnvelope, + val: &T, + ) -> Result<Self, serde_json::Error> { + Ok(Self { + envelope, + payload: serde_json::to_string(&val)?, + }) + } +} + +/// We also have the concept of "content", which helps work with a `T` which +/// is represented inside the payload. Real-world examples of a `T` include +/// Bookmarks or Tabs. +/// See the content module for the implementations. +/// +/// So this all flows together in the following way: +/// * Incoming encrypted data: +/// EncryptedIncomingBso -> IncomingBso -> [specific engine] -> IncomingContent<T> +/// * Incoming cleartext data: +/// IncomingBso -> IncomingContent<T> +/// (Note that incoming cleartext only happens for a few collections managed by +/// the sync client and never by specific engines - engine BSOs are always encryted) +/// * Outgoing encrypted data: +/// OutgoingBso (created in the engine) -> [this crate] -> EncryptedOutgoingBso +/// * Outgoing cleartext data: just an OutgoingBso with no conversions needed. + +/// [IncomingContent] is the result of converting an [IncomingBso] into +/// some <T> - it consumes the Bso, so you get the envelope, and the [IncomingKind] +/// which reflects the state of parsing the json. +#[derive(Debug)] +pub struct IncomingContent<T> { + pub envelope: IncomingEnvelope, + pub kind: IncomingKind<T>, +} + +/// The "kind" of incoming content after deserializing it. +pub enum IncomingKind<T> { + /// A good, live T. + Content(T), + /// A record that used to be a T but has been replaced with a tombstone. + Tombstone, + /// Either not JSON, or can't be made into a T. + Malformed, +} diff --git a/third_party/rust/sync15/src/bso/test_utils.rs b/third_party/rust/sync15/src/bso/test_utils.rs new file mode 100644 index 0000000000..55735afda2 --- /dev/null +++ b/third_party/rust/sync15/src/bso/test_utils.rs @@ -0,0 +1,61 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +//! Utilities for tests to make IncomingBsos and Content from test data. +use super::{IncomingBso, IncomingEnvelope, OutgoingBso}; +use crate::{Guid, ServerTimestamp}; + +/// Tests often want an IncomingBso to test, and the easiest way is often to +/// create an OutgoingBso convert it back to an incoming. +impl OutgoingBso { + // These functions would ideally consume `self` and avoid the clones, but + // this is more convenient for some tests and the extra overhead doesn't + // really matter for tests. + /// When a test has an [OutgoingBso] and wants it as an [IncomingBso] + pub fn to_test_incoming(&self) -> IncomingBso { + self.to_test_incoming_ts(ServerTimestamp::default()) + } + + /// When a test has an [OutgoingBso] and wants it as an [IncomingBso] with a specific timestamp. + pub fn to_test_incoming_ts(&self, ts: ServerTimestamp) -> IncomingBso { + IncomingBso { + envelope: IncomingEnvelope { + id: self.envelope.id.clone(), + modified: ts, + sortindex: self.envelope.sortindex, + ttl: self.envelope.ttl, + }, + payload: self.payload.clone(), + } + } + + /// When a test has an [OutgoingBso] and wants it as an [IncomingBso] with a specific T. + pub fn to_test_incoming_t<T: for<'de> serde::Deserialize<'de>>(&self) -> T { + self.to_test_incoming().into_content().content().unwrap() + } +} + +/// Helpers to create an IncomingBso from some T +impl IncomingBso { + /// When a test has an T and wants it as an [IncomingBso] + pub fn from_test_content<T: serde::Serialize>(json: T) -> Self { + // Go via an OutgoingBso + OutgoingBso::from_content_with_id(json) + .unwrap() + .to_test_incoming() + } + + /// When a test has an T and wants it as an [IncomingBso] with a specific timestamp. + pub fn from_test_content_ts<T: serde::Serialize>(json: T, ts: ServerTimestamp) -> Self { + // Go via an OutgoingBso + OutgoingBso::from_content_with_id(json) + .unwrap() + .to_test_incoming_ts(ts) + } + + /// When a test wants a new incoming tombstone. + pub fn new_test_tombstone(guid: Guid) -> Self { + OutgoingBso::new_tombstone(guid.into()).to_test_incoming() + } +} diff --git a/third_party/rust/sync15/src/client/coll_state.rs b/third_party/rust/sync15/src/client/coll_state.rs new file mode 100644 index 0000000000..53e3071f92 --- /dev/null +++ b/third_party/rust/sync15/src/client/coll_state.rs @@ -0,0 +1,353 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +use super::request::InfoConfiguration; +use super::{CollectionKeys, GlobalState}; +use crate::engine::{CollSyncIds, EngineSyncAssociation, SyncEngine}; +use crate::error; +use crate::KeyBundle; +use crate::ServerTimestamp; + +/// Holds state for a collection necessary to perform a sync of it. Lives for the lifetime +/// of a single sync. +#[derive(Debug, Clone)] +pub struct CollState { + // Info about the server configuration/capabilities + pub config: InfoConfiguration, + // from meta/global, used for XIUS when we POST outgoing record based on this state. + pub last_modified: ServerTimestamp, + pub key: KeyBundle, +} + +/// This mini state-machine helps build a CollState +#[derive(Debug)] +pub enum LocalCollState { + /// The state is unknown, with the EngineSyncAssociation the collection + /// reports. + Unknown { assoc: EngineSyncAssociation }, + + /// The engine has been declined. This is a "terminal" state. + Declined, + + /// There's no such collection in meta/global. We could possibly update + /// meta/global, but currently all known collections are there by default, + /// so this is, basically, an error condition. + NoSuchCollection, + + /// Either the global or collection sync ID has changed - we will reset the engine. + SyncIdChanged { ids: CollSyncIds }, + + /// The collection is ready to sync. + Ready { coll_state: CollState }, +} + +pub struct LocalCollStateMachine<'state> { + global_state: &'state GlobalState, + root_key: &'state KeyBundle, +} + +impl<'state> LocalCollStateMachine<'state> { + fn advance( + &self, + from: LocalCollState, + engine: &dyn SyncEngine, + ) -> error::Result<LocalCollState> { + let name = &engine.collection_name().to_string(); + let meta_global = &self.global_state.global; + match from { + LocalCollState::Unknown { assoc } => { + if meta_global.declined.contains(name) { + return Ok(LocalCollState::Declined); + } + match meta_global.engines.get(name) { + Some(engine_meta) => match assoc { + EngineSyncAssociation::Disconnected => Ok(LocalCollState::SyncIdChanged { + ids: CollSyncIds { + global: meta_global.sync_id.clone(), + coll: engine_meta.sync_id.clone(), + }, + }), + EngineSyncAssociation::Connected(ref ids) + if ids.global == meta_global.sync_id + && ids.coll == engine_meta.sync_id => + { + // We are done - build the CollState + let coll_keys = CollectionKeys::from_encrypted_payload( + self.global_state.keys.clone(), + self.global_state.keys_timestamp, + self.root_key, + )?; + let key = coll_keys.key_for_collection(name).clone(); + let name = engine.collection_name(); + let config = self.global_state.config.clone(); + let last_modified = self + .global_state + .collections + .get(name.as_ref()) + .cloned() + .unwrap_or_default(); + let coll_state = CollState { + config, + last_modified, + key, + }; + Ok(LocalCollState::Ready { coll_state }) + } + _ => Ok(LocalCollState::SyncIdChanged { + ids: CollSyncIds { + global: meta_global.sync_id.clone(), + coll: engine_meta.sync_id.clone(), + }, + }), + }, + None => Ok(LocalCollState::NoSuchCollection), + } + } + + LocalCollState::Declined => unreachable!("can't advance from declined"), + + LocalCollState::NoSuchCollection => unreachable!("the collection is unknown"), + + LocalCollState::SyncIdChanged { ids } => { + let assoc = EngineSyncAssociation::Connected(ids); + log::info!("Resetting {} engine", engine.collection_name()); + engine.reset(&assoc)?; + Ok(LocalCollState::Unknown { assoc }) + } + + LocalCollState::Ready { .. } => unreachable!("can't advance from ready"), + } + } + + // A little whimsy - a portmanteau of far and fast + fn run_and_run_as_farst_as_you_can( + &mut self, + engine: &dyn SyncEngine, + ) -> error::Result<Option<CollState>> { + let mut s = LocalCollState::Unknown { + assoc: engine.get_sync_assoc()?, + }; + // This is a simple state machine and should never take more than + // 10 goes around. + let mut count = 0; + loop { + log::trace!("LocalCollState in {:?}", s); + match s { + LocalCollState::Ready { coll_state } => return Ok(Some(coll_state)), + LocalCollState::Declined | LocalCollState::NoSuchCollection => return Ok(None), + _ => { + count += 1; + if count > 10 { + log::warn!("LocalCollStateMachine appears to be looping"); + return Ok(None); + } + // should we have better loop detection? Our limit of 10 + // goes is probably OK for now, but not really ideal. + s = self.advance(s, engine)?; + } + }; + } + } + + pub fn get_state( + engine: &dyn SyncEngine, + global_state: &'state GlobalState, + root_key: &'state KeyBundle, + ) -> error::Result<Option<CollState>> { + let mut gingerbread_man = Self { + global_state, + root_key, + }; + gingerbread_man.run_and_run_as_farst_as_you_can(engine) + } +} + +#[cfg(test)] +mod tests { + use super::super::request::{InfoCollections, InfoConfiguration}; + use super::super::CollectionKeys; + use super::*; + use crate::engine::CollectionRequest; + use crate::engine::{IncomingChangeset, OutgoingChangeset}; + use crate::record_types::{MetaGlobalEngine, MetaGlobalRecord}; + use crate::{telemetry, CollectionName}; + use anyhow::Result; + use std::cell::{Cell, RefCell}; + use std::collections::HashMap; + use sync_guid::Guid; + + fn get_global_state(root_key: &KeyBundle) -> GlobalState { + let keys = CollectionKeys::new_random() + .unwrap() + .to_encrypted_payload(root_key) + .unwrap(); + GlobalState { + config: InfoConfiguration::default(), + collections: InfoCollections::new(HashMap::new()), + global: MetaGlobalRecord { + sync_id: "syncIDAAAAAA".into(), + storage_version: 5usize, + engines: vec![( + "bookmarks", + MetaGlobalEngine { + version: 1usize, + sync_id: "syncIDBBBBBB".into(), + }, + )] + .into_iter() + .map(|(key, value)| (key.to_owned(), value)) + .collect(), + declined: vec![], + }, + global_timestamp: ServerTimestamp::default(), + keys, + keys_timestamp: ServerTimestamp::default(), + } + } + + struct TestSyncEngine { + collection_name: &'static str, + assoc: Cell<EngineSyncAssociation>, + num_resets: RefCell<usize>, + } + + impl TestSyncEngine { + fn new(collection_name: &'static str, assoc: EngineSyncAssociation) -> Self { + Self { + collection_name, + assoc: Cell::new(assoc), + num_resets: RefCell::new(0), + } + } + fn get_num_resets(&self) -> usize { + *self.num_resets.borrow() + } + } + + impl SyncEngine for TestSyncEngine { + fn collection_name(&self) -> CollectionName { + self.collection_name.into() + } + + fn apply_incoming( + &self, + _inbound: Vec<IncomingChangeset>, + _telem: &mut telemetry::Engine, + ) -> Result<OutgoingChangeset> { + unreachable!("these tests shouldn't call these"); + } + + fn sync_finished( + &self, + _new_timestamp: ServerTimestamp, + _records_synced: Vec<Guid>, + ) -> Result<()> { + unreachable!("these tests shouldn't call these"); + } + + fn get_collection_requests( + &self, + _server_timestamp: ServerTimestamp, + ) -> Result<Vec<CollectionRequest>> { + unreachable!("these tests shouldn't call these"); + } + + fn get_sync_assoc(&self) -> Result<EngineSyncAssociation> { + Ok(self.assoc.replace(EngineSyncAssociation::Disconnected)) + } + + fn reset(&self, new_assoc: &EngineSyncAssociation) -> Result<()> { + self.assoc.replace(new_assoc.clone()); + *self.num_resets.borrow_mut() += 1; + Ok(()) + } + + fn wipe(&self) -> Result<()> { + unreachable!("these tests shouldn't call these"); + } + } + + #[test] + fn test_unknown() { + let root_key = KeyBundle::new_random().expect("should work"); + let gs = get_global_state(&root_key); + let engine = TestSyncEngine::new("unknown", EngineSyncAssociation::Disconnected); + let cs = LocalCollStateMachine::get_state(&engine, &gs, &root_key).expect("should work"); + assert!(cs.is_none(), "unknown collection name can't sync"); + assert_eq!(engine.get_num_resets(), 0); + } + + #[test] + fn test_known_no_state() { + let root_key = KeyBundle::new_random().expect("should work"); + let gs = get_global_state(&root_key); + let engine = TestSyncEngine::new("bookmarks", EngineSyncAssociation::Disconnected); + let cs = LocalCollStateMachine::get_state(&engine, &gs, &root_key).expect("should work"); + assert!(cs.is_some(), "collection can sync"); + assert_eq!( + engine.assoc.replace(EngineSyncAssociation::Disconnected), + EngineSyncAssociation::Connected(CollSyncIds { + global: "syncIDAAAAAA".into(), + coll: "syncIDBBBBBB".into(), + }) + ); + assert_eq!(engine.get_num_resets(), 1); + } + + #[test] + fn test_known_wrong_state() { + let root_key = KeyBundle::new_random().expect("should work"); + let gs = get_global_state(&root_key); + let engine = TestSyncEngine::new( + "bookmarks", + EngineSyncAssociation::Connected(CollSyncIds { + global: "syncIDXXXXXX".into(), + coll: "syncIDYYYYYY".into(), + }), + ); + let cs = LocalCollStateMachine::get_state(&engine, &gs, &root_key).expect("should work"); + assert!(cs.is_some(), "collection can sync"); + assert_eq!( + engine.assoc.replace(EngineSyncAssociation::Disconnected), + EngineSyncAssociation::Connected(CollSyncIds { + global: "syncIDAAAAAA".into(), + coll: "syncIDBBBBBB".into(), + }) + ); + assert_eq!(engine.get_num_resets(), 1); + } + + #[test] + fn test_known_good_state() { + let root_key = KeyBundle::new_random().expect("should work"); + let gs = get_global_state(&root_key); + let engine = TestSyncEngine::new( + "bookmarks", + EngineSyncAssociation::Connected(CollSyncIds { + global: "syncIDAAAAAA".into(), + coll: "syncIDBBBBBB".into(), + }), + ); + let cs = LocalCollStateMachine::get_state(&engine, &gs, &root_key).expect("should work"); + assert!(cs.is_some(), "collection can sync"); + assert_eq!(engine.get_num_resets(), 0); + } + + #[test] + fn test_declined() { + let root_key = KeyBundle::new_random().expect("should work"); + let mut gs = get_global_state(&root_key); + gs.global.declined.push("bookmarks".to_string()); + let engine = TestSyncEngine::new( + "bookmarks", + EngineSyncAssociation::Connected(CollSyncIds { + global: "syncIDAAAAAA".into(), + coll: "syncIDBBBBBB".into(), + }), + ); + let cs = LocalCollStateMachine::get_state(&engine, &gs, &root_key).expect("should work"); + assert!(cs.is_none(), "declined collection can sync"); + assert_eq!(engine.get_num_resets(), 0); + } +} diff --git a/third_party/rust/sync15/src/client/coll_update.rs b/third_party/rust/sync15/src/client/coll_update.rs new file mode 100644 index 0000000000..d09b1c2eb4 --- /dev/null +++ b/third_party/rust/sync15/src/client/coll_update.rs @@ -0,0 +1,127 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +use super::{ + request::{NormalResponseHandler, UploadInfo}, + CollState, Sync15ClientResponse, Sync15StorageClient, +}; +use crate::bso::OutgoingEncryptedBso; +use crate::engine::{CollectionRequest, IncomingChangeset, OutgoingChangeset}; +use crate::error::{self, Error, Result}; +use crate::{CollectionName, KeyBundle, ServerTimestamp}; + +pub fn encrypt_outgoing( + o: OutgoingChangeset, + key: &KeyBundle, +) -> Result<Vec<OutgoingEncryptedBso>> { + o.changes + .into_iter() + .map(|change| change.into_encrypted(key)) + .collect() +} + +pub fn fetch_incoming( + client: &Sync15StorageClient, + state: &CollState, + collection_request: CollectionRequest, +) -> Result<IncomingChangeset> { + let collection = collection_request.collection.clone(); + let (records, timestamp) = match client.get_encrypted_records(collection_request)? { + Sync15ClientResponse::Success { + record, + last_modified, + .. + } => (record, last_modified), + other => return Err(other.create_storage_error()), + }; + let mut result = IncomingChangeset::new(collection, timestamp); + result.changes.reserve(records.len()); + for record in records { + // if we see a HMAC error, we've made an explicit decision to + // NOT handle it here, but restart the global state machine. + // That should cause us to re-read crypto/keys and things should + // work (although if for some reason crypto/keys was updated but + // not all storage was wiped we are probably screwed.) + result.changes.push(record.into_decrypted(&state.key)?); + } + Ok(result) +} + +pub struct CollectionUpdate<'a> { + client: &'a Sync15StorageClient, + state: &'a CollState, + collection: CollectionName, + xius: ServerTimestamp, + to_update: Vec<OutgoingEncryptedBso>, + fully_atomic: bool, +} + +impl<'a> CollectionUpdate<'a> { + pub fn new( + client: &'a Sync15StorageClient, + state: &'a CollState, + collection: CollectionName, + xius: ServerTimestamp, + records: Vec<OutgoingEncryptedBso>, + fully_atomic: bool, + ) -> CollectionUpdate<'a> { + CollectionUpdate { + client, + state, + collection, + xius, + to_update: records, + fully_atomic, + } + } + + pub fn new_from_changeset( + client: &'a Sync15StorageClient, + state: &'a CollState, + changeset: OutgoingChangeset, + fully_atomic: bool, + ) -> Result<CollectionUpdate<'a>> { + let collection = changeset.collection.clone(); + let to_update = encrypt_outgoing(changeset, &state.key)?; + Ok(CollectionUpdate::new( + client, + state, + collection, + state.last_modified, + to_update, + fully_atomic, + )) + } + + /// Returns a list of the IDs that failed if allowed_dropped_records is true, otherwise + /// returns an empty vec. + pub fn upload(self) -> error::Result<UploadInfo> { + let mut failed = vec![]; + let mut q = self.client.new_post_queue( + &self.collection, + &self.state.config, + self.xius, + NormalResponseHandler::new(!self.fully_atomic), + )?; + + for record in self.to_update.into_iter() { + let enqueued = q.enqueue(&record)?; + if !enqueued && self.fully_atomic { + return Err(Error::RecordTooLargeError); + } + } + + q.flush(true)?; + let mut info = q.completed_upload_info(); + info.failed_ids.append(&mut failed); + if self.fully_atomic { + assert_eq!( + info.failed_ids.len(), + 0, + "Bug: Should have failed by now if we aren't allowing dropped records" + ); + } + Ok(info) + } +} diff --git a/third_party/rust/sync15/src/client/collection_keys.rs b/third_party/rust/sync15/src/client/collection_keys.rs new file mode 100644 index 0000000000..f51894f756 --- /dev/null +++ b/third_party/rust/sync15/src/client/collection_keys.rs @@ -0,0 +1,61 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +use crate::error::Result; +use crate::record_types::CryptoKeysRecord; +use crate::{EncryptedPayload, KeyBundle, ServerTimestamp}; +use std::collections::HashMap; + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct CollectionKeys { + pub timestamp: ServerTimestamp, + pub default: KeyBundle, + pub collections: HashMap<String, KeyBundle>, +} + +impl CollectionKeys { + pub fn new_random() -> Result<CollectionKeys> { + let default = KeyBundle::new_random()?; + Ok(CollectionKeys { + timestamp: ServerTimestamp(0), + default, + collections: HashMap::new(), + }) + } + + pub fn from_encrypted_payload( + record: EncryptedPayload, + timestamp: ServerTimestamp, + root_key: &KeyBundle, + ) -> Result<CollectionKeys> { + let keys: CryptoKeysRecord = record.decrypt_into(root_key)?; + Ok(CollectionKeys { + timestamp, + default: KeyBundle::from_base64(&keys.default[0], &keys.default[1])?, + collections: keys + .collections + .into_iter() + .map(|kv| Ok((kv.0, KeyBundle::from_base64(&kv.1[0], &kv.1[1])?))) + .collect::<Result<HashMap<String, KeyBundle>>>()?, + }) + } + + pub fn to_encrypted_payload(&self, root_key: &KeyBundle) -> Result<EncryptedPayload> { + let record = CryptoKeysRecord { + id: "keys".into(), + collection: "crypto".into(), + default: self.default.to_b64_array(), + collections: self + .collections + .iter() + .map(|kv| (kv.0.clone(), kv.1.to_b64_array())) + .collect(), + }; + EncryptedPayload::from_cleartext_payload(root_key, &record) + } + + pub fn key_for_collection<'a>(&'a self, collection: &str) -> &'a KeyBundle { + self.collections.get(collection).unwrap_or(&self.default) + } +} diff --git a/third_party/rust/sync15/src/client/mod.rs b/third_party/rust/sync15/src/client/mod.rs new file mode 100644 index 0000000000..c49486bfe6 --- /dev/null +++ b/third_party/rust/sync15/src/client/mod.rs @@ -0,0 +1,39 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +//! A module for everything needed to be a "sync client" - ie, a device which +//! can perform a full sync of any number of collections, including managing +//! the server state. +//! +//! In general, the client is responsible for all communication with the sync server, +//! including ensuring the state is correct, and encrypting/decrypting all records +//! to and from the server. However, the actual syncing of the collections is +//! delegated to an external [crate::engine](Sync Engine). +//! +//! One exception is that the "sync client" owns one sync engine - the +//! [crate::clients_engine], which is managed internally. +mod coll_state; +mod coll_update; +mod collection_keys; +mod request; +mod state; +mod status; +mod storage_client; +mod sync; +mod sync_multiple; +mod token; +mod util; + +pub(crate) use coll_state::{CollState, LocalCollStateMachine}; +pub(crate) use coll_update::{fetch_incoming, CollectionUpdate}; +pub(crate) use collection_keys::CollectionKeys; +pub(crate) use request::InfoConfiguration; +pub(crate) use state::GlobalState; +pub use status::{ServiceStatus, SyncResult}; +pub use storage_client::{ + SetupStorageClient, Sync15ClientResponse, Sync15StorageClient, Sync15StorageClientInit, +}; +pub use sync_multiple::{ + sync_multiple, sync_multiple_with_command_processor, MemoryCachedState, SyncRequestInfo, +}; diff --git a/third_party/rust/sync15/src/client/request.rs b/third_party/rust/sync15/src/client/request.rs new file mode 100644 index 0000000000..c69b630c8d --- /dev/null +++ b/third_party/rust/sync15/src/client/request.rs @@ -0,0 +1,1199 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +use super::storage_client::Sync15ClientResponse; +use crate::bso::OutgoingEncryptedBso; +use crate::error::{self, Error as ErrorKind, Result}; +use crate::ServerTimestamp; +use serde_derive::*; +use std::collections::HashMap; +use std::default::Default; +use std::ops::Deref; +use sync_guid::Guid; +use viaduct::status_codes; + +/// Manages a pair of (byte, count) limits for a PostQueue, such as +/// (max_post_bytes, max_post_records) or (max_total_bytes, max_total_records). +#[derive(Debug, Clone)] +struct LimitTracker { + max_bytes: usize, + max_records: usize, + cur_bytes: usize, + cur_records: usize, +} + +impl LimitTracker { + pub fn new(max_bytes: usize, max_records: usize) -> LimitTracker { + LimitTracker { + max_bytes, + max_records, + cur_bytes: 0, + cur_records: 0, + } + } + + pub fn clear(&mut self) { + self.cur_records = 0; + self.cur_bytes = 0; + } + + pub fn can_add_record(&self, payload_size: usize) -> bool { + // Desktop does the cur_bytes check as exclusive, but we shouldn't see any servers that + // don't have https://github.com/mozilla-services/server-syncstorage/issues/73 + self.cur_records < self.max_records && self.cur_bytes + payload_size <= self.max_bytes + } + + pub fn can_never_add(&self, record_size: usize) -> bool { + record_size >= self.max_bytes + } + + pub fn record_added(&mut self, record_size: usize) { + assert!( + self.can_add_record(record_size), + "LimitTracker::record_added caller must check can_add_record" + ); + self.cur_records += 1; + self.cur_bytes += record_size; + } +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct InfoConfiguration { + /// The maximum size in bytes of the overall HTTP request body that will be accepted by the + /// server. + #[serde(default = "default_max_request_bytes")] + pub max_request_bytes: usize, + + /// The maximum number of records that can be uploaded to a collection in a single POST request. + #[serde(default = "usize::max_value")] + pub max_post_records: usize, + + /// The maximum combined size in bytes of the record payloads that can be uploaded to a + /// collection in a single POST request. + #[serde(default = "usize::max_value")] + pub max_post_bytes: usize, + + /// The maximum total number of records that can be uploaded to a collection as part of a + /// batched upload. + #[serde(default = "usize::max_value")] + pub max_total_records: usize, + + /// The maximum total combined size in bytes of the record payloads that can be uploaded to a + /// collection as part of a batched upload. + #[serde(default = "usize::max_value")] + pub max_total_bytes: usize, + + /// The maximum size of an individual BSO payload, in bytes. + #[serde(default = "default_max_record_payload_bytes")] + pub max_record_payload_bytes: usize, +} + +// This is annoying but seems to be the only way to do it... +fn default_max_request_bytes() -> usize { + 260 * 1024 +} +fn default_max_record_payload_bytes() -> usize { + 256 * 1024 +} + +impl Default for InfoConfiguration { + #[inline] + fn default() -> InfoConfiguration { + InfoConfiguration { + max_request_bytes: default_max_request_bytes(), + max_record_payload_bytes: default_max_record_payload_bytes(), + max_post_records: usize::max_value(), + max_post_bytes: usize::max_value(), + max_total_records: usize::max_value(), + max_total_bytes: usize::max_value(), + } + } +} + +#[derive(Clone, Debug, Default, Deserialize, Serialize)] +pub struct InfoCollections(pub(crate) HashMap<String, ServerTimestamp>); + +impl InfoCollections { + pub fn new(collections: HashMap<String, ServerTimestamp>) -> InfoCollections { + InfoCollections(collections) + } +} + +impl Deref for InfoCollections { + type Target = HashMap<String, ServerTimestamp>; + + fn deref(&self) -> &HashMap<String, ServerTimestamp> { + &self.0 + } +} + +#[derive(Debug, Clone, Deserialize)] +pub struct UploadResult { + batch: Option<String>, + /// Maps record id => why failed + #[serde(default = "HashMap::new")] + pub failed: HashMap<Guid, String>, + /// Vec of ids + #[serde(default = "Vec::new")] + pub success: Vec<Guid>, +} + +pub type PostResponse = Sync15ClientResponse<UploadResult>; + +#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] +pub enum BatchState { + Unsupported, + NoBatch, + InBatch(String), +} + +#[derive(Debug)] +pub struct PostQueue<Post, OnResponse> { + poster: Post, + on_response: OnResponse, + post_limits: LimitTracker, + batch_limits: LimitTracker, + max_payload_bytes: usize, + max_request_bytes: usize, + queued: Vec<u8>, + batch: BatchState, + last_modified: ServerTimestamp, +} + +pub trait BatchPoster { + /// Note: Last argument (reference to the batch poster) is provided for the purposes of testing + /// Important: Poster should not report non-success HTTP statuses as errors!! + fn post<P, O>( + &self, + body: Vec<u8>, + xius: ServerTimestamp, + batch: Option<String>, + commit: bool, + queue: &PostQueue<P, O>, + ) -> Result<PostResponse>; +} + +// We don't just use a FnMut here since we want to override it in mocking for RefCell<TestType>, +// which we can't do for FnMut since neither FnMut nor RefCell are defined here. Also, this +// is somewhat better for documentation. +pub trait PostResponseHandler { + fn handle_response(&mut self, r: PostResponse, mid_batch: bool) -> Result<()>; +} + +#[derive(Debug, Clone)] +pub(crate) struct NormalResponseHandler { + pub failed_ids: Vec<Guid>, + pub successful_ids: Vec<Guid>, + pub allow_failed: bool, + pub pending_failed: Vec<Guid>, + pub pending_success: Vec<Guid>, +} + +impl NormalResponseHandler { + pub fn new(allow_failed: bool) -> NormalResponseHandler { + NormalResponseHandler { + failed_ids: vec![], + successful_ids: vec![], + pending_failed: vec![], + pending_success: vec![], + allow_failed, + } + } +} + +impl PostResponseHandler for NormalResponseHandler { + fn handle_response(&mut self, r: PostResponse, mid_batch: bool) -> error::Result<()> { + match r { + Sync15ClientResponse::Success { record, .. } => { + if !record.failed.is_empty() && !self.allow_failed { + return Err(ErrorKind::RecordUploadFailed); + } + for id in record.success.iter() { + self.pending_success.push(id.clone()); + } + for kv in record.failed.iter() { + self.pending_failed.push(kv.0.clone()); + } + if !mid_batch { + self.successful_ids.append(&mut self.pending_success); + self.failed_ids.append(&mut self.pending_failed); + } + Ok(()) + } + _ => Err(r.create_storage_error()), + } + } +} + +impl<Poster, OnResponse> PostQueue<Poster, OnResponse> +where + Poster: BatchPoster, + OnResponse: PostResponseHandler, +{ + pub fn new( + config: &InfoConfiguration, + ts: ServerTimestamp, + poster: Poster, + on_response: OnResponse, + ) -> PostQueue<Poster, OnResponse> { + PostQueue { + poster, + on_response, + last_modified: ts, + post_limits: LimitTracker::new(config.max_post_bytes, config.max_post_records), + batch_limits: LimitTracker::new(config.max_total_bytes, config.max_total_records), + batch: BatchState::NoBatch, + max_payload_bytes: config.max_record_payload_bytes, + max_request_bytes: config.max_request_bytes, + queued: Vec::new(), + } + } + + #[inline] + fn in_batch(&self) -> bool { + !matches!(&self.batch, BatchState::Unsupported | BatchState::NoBatch) + } + + pub fn enqueue(&mut self, record: &OutgoingEncryptedBso) -> Result<bool> { + let payload_length = record.serialized_payload_len(); + + if self.post_limits.can_never_add(payload_length) + || self.batch_limits.can_never_add(payload_length) + || payload_length >= self.max_payload_bytes + { + log::warn!( + "Single record too large to submit to server ({} b)", + payload_length + ); + return Ok(false); + } + + // Write directly into `queued` but undo if necessary (the vast majority of the time + // it won't be necessary). If we hit a problem we need to undo that, but the only error + // case we have to worry about right now is in flush() + let item_start = self.queued.len(); + + // This is conservative but can't hurt. + self.queued.reserve(payload_length + 2); + + // Either the first character in an array, or a comma separating + // it from the previous item. + let c = if self.queued.is_empty() { b'[' } else { b',' }; + self.queued.push(c); + + // This unwrap is fine, since serde_json's failure case is HashMaps that have non-object + // keys, which is impossible. If you decide to change this part, you *need* to call + // `self.queued.truncate(item_start)` here in the failure case! + serde_json::to_writer(&mut self.queued, &record).unwrap(); + + let item_end = self.queued.len(); + + debug_assert!( + item_end >= payload_length, + "EncryptedPayload::serialized_len is bugged" + ); + + // The + 1 is only relevant for the final record, which will have a trailing ']'. + let item_len = item_end - item_start + 1; + + if item_len >= self.max_request_bytes { + self.queued.truncate(item_start); + log::warn!( + "Single record too large to submit to server ({} b)", + item_len + ); + return Ok(false); + } + + let can_post_record = self.post_limits.can_add_record(payload_length); + let can_batch_record = self.batch_limits.can_add_record(payload_length); + let can_send_record = self.queued.len() < self.max_request_bytes; + + if !can_post_record || !can_send_record || !can_batch_record { + log::debug!( + "PostQueue flushing! (can_post = {}, can_send = {}, can_batch = {})", + can_post_record, + can_send_record, + can_batch_record + ); + // "unwrite" the record. + self.queued.truncate(item_start); + // Flush whatever we have queued. + self.flush(!can_batch_record)?; + // And write it again. + let c = if self.queued.is_empty() { b'[' } else { b',' }; + self.queued.push(c); + serde_json::to_writer(&mut self.queued, &record).unwrap(); + } + + self.post_limits.record_added(payload_length); + self.batch_limits.record_added(payload_length); + + Ok(true) + } + + pub fn flush(&mut self, want_commit: bool) -> Result<()> { + if self.queued.is_empty() { + assert!( + !self.in_batch(), + "Bug: Somehow we're in a batch but have no queued records" + ); + // Nothing to do! + return Ok(()); + } + + self.queued.push(b']'); + let batch_id = match &self.batch { + // Not the first post and we know we have no batch semantics. + BatchState::Unsupported => None, + // First commit in possible batch + BatchState::NoBatch => Some("true".into()), + // In a batch and we have a batch id. + BatchState::InBatch(ref s) => Some(s.clone()), + }; + + log::info!( + "Posting {} records of {} bytes", + self.post_limits.cur_records, + self.queued.len() + ); + + let is_commit = want_commit && batch_id.is_some(); + // Weird syntax for calling a function object that is a property. + let resp_or_error = self.poster.post( + self.queued.clone(), + self.last_modified, + batch_id, + is_commit, + self, + ); + + self.queued.truncate(0); + + if want_commit || self.batch == BatchState::Unsupported { + self.batch_limits.clear(); + } + self.post_limits.clear(); + + let resp = resp_or_error?; + + let (status, last_modified, record) = match resp { + Sync15ClientResponse::Success { + status, + last_modified, + ref record, + .. + } => (status, last_modified, record), + _ => { + self.on_response.handle_response(resp, !want_commit)?; + // on_response() should always fail! + unreachable!(); + } + }; + + if want_commit || self.batch == BatchState::Unsupported { + self.last_modified = last_modified; + } + + if want_commit { + log::debug!("Committed batch {:?}", self.batch); + self.batch = BatchState::NoBatch; + self.on_response.handle_response(resp, false)?; + return Ok(()); + } + + if status != status_codes::ACCEPTED { + if self.in_batch() { + return Err(ErrorKind::ServerBatchProblem( + "Server responded non-202 success code while a batch was in progress", + )); + } + self.last_modified = last_modified; + self.batch = BatchState::Unsupported; + self.batch_limits.clear(); + self.on_response.handle_response(resp, false)?; + return Ok(()); + } + + let batch_id = record + .batch + .as_ref() + .ok_or({ + ErrorKind::ServerBatchProblem("Invalid server response: 202 without a batch ID") + })? + .clone(); + + match &self.batch { + BatchState::Unsupported => { + log::warn!("Server changed its mind about supporting batching mid-batch..."); + } + + BatchState::InBatch(ref cur_id) => { + if cur_id != &batch_id { + return Err(ErrorKind::ServerBatchProblem( + "Invalid server response: 202 without a batch ID", + )); + } + } + _ => {} + } + + // Can't change this in match arms without NLL + self.batch = BatchState::InBatch(batch_id); + self.last_modified = last_modified; + + self.on_response.handle_response(resp, true)?; + + Ok(()) + } +} + +#[derive(Clone)] +pub struct UploadInfo { + pub successful_ids: Vec<Guid>, + pub failed_ids: Vec<Guid>, + pub modified_timestamp: ServerTimestamp, +} + +impl<Poster> PostQueue<Poster, NormalResponseHandler> { + // TODO: should take by move + pub fn completed_upload_info(&mut self) -> UploadInfo { + let mut result = UploadInfo { + successful_ids: Vec::with_capacity(self.on_response.successful_ids.len()), + failed_ids: Vec::with_capacity( + self.on_response.failed_ids.len() + + self.on_response.pending_failed.len() + + self.on_response.pending_success.len(), + ), + modified_timestamp: self.last_modified, + }; + + result + .successful_ids + .append(&mut self.on_response.successful_ids); + + result.failed_ids.append(&mut self.on_response.failed_ids); + result + .failed_ids + .append(&mut self.on_response.pending_failed); + result + .failed_ids + .append(&mut self.on_response.pending_success); + + result + } +} + +#[cfg(test)] +mod test { + use super::*; + use crate::bso::{IncomingEncryptedBso, OutgoingEncryptedBso, OutgoingEnvelope}; + use crate::EncryptedPayload; + use lazy_static::lazy_static; + use std::cell::RefCell; + use std::collections::VecDeque; + use std::rc::Rc; + + #[derive(Debug, Clone)] + struct PostedData { + body: String, + _xius: ServerTimestamp, + batch: Option<String>, + commit: bool, + payload_bytes: usize, + records: usize, + } + + impl PostedData { + fn records_as_json(&self) -> Vec<serde_json::Value> { + let values = + serde_json::from_str::<serde_json::Value>(&self.body).expect("Posted invalid json"); + // Check that they actually deserialize as what we want + let records_or_err = + serde_json::from_value::<Vec<IncomingEncryptedBso>>(values.clone()); + records_or_err.expect("Failed to deserialize data"); + serde_json::from_value(values).unwrap() + } + } + + #[derive(Debug, Clone)] + struct BatchInfo { + id: Option<String>, + posts: Vec<PostedData>, + bytes: usize, + records: usize, + } + + #[derive(Debug, Clone)] + struct TestPoster { + all_posts: Vec<PostedData>, + responses: VecDeque<PostResponse>, + batches: Vec<BatchInfo>, + cur_batch: Option<BatchInfo>, + cfg: InfoConfiguration, + } + + type TestPosterRef = Rc<RefCell<TestPoster>>; + impl TestPoster { + pub fn new<T>(cfg: &InfoConfiguration, responses: T) -> TestPosterRef + where + T: Into<VecDeque<PostResponse>>, + { + Rc::new(RefCell::new(TestPoster { + all_posts: vec![], + responses: responses.into(), + batches: vec![], + cur_batch: None, + cfg: cfg.clone(), + })) + } + // Adds &mut + fn do_post<T, O>( + &mut self, + body: &[u8], + xius: ServerTimestamp, + batch: Option<String>, + commit: bool, + queue: &PostQueue<T, O>, + ) -> Sync15ClientResponse<UploadResult> { + let mut post = PostedData { + body: String::from_utf8(body.into()).expect("Posted invalid utf8..."), + batch: batch.clone(), + _xius: xius, + commit, + payload_bytes: 0, + records: 0, + }; + + assert!(body.len() <= self.cfg.max_request_bytes); + + let (num_records, record_payload_bytes) = { + let recs = post.records_as_json(); + assert!(recs.len() <= self.cfg.max_post_records); + assert!(recs.len() <= self.cfg.max_total_records); + let payload_bytes: usize = recs + .iter() + .map(|r| { + let len = r["payload"] + .as_str() + .expect("Non string payload property") + .len(); + assert!(len <= self.cfg.max_record_payload_bytes); + len + }) + .sum(); + assert!(payload_bytes <= self.cfg.max_post_bytes); + assert!(payload_bytes <= self.cfg.max_total_bytes); + + assert_eq!(queue.post_limits.cur_bytes, payload_bytes); + assert_eq!(queue.post_limits.cur_records, recs.len()); + (recs.len(), payload_bytes) + }; + post.payload_bytes = record_payload_bytes; + post.records = num_records; + + self.all_posts.push(post.clone()); + let response = self.responses.pop_front().unwrap(); + + let record = match response { + Sync15ClientResponse::Success { ref record, .. } => record, + _ => { + panic!("only success codes are used in this test"); + } + }; + + if self.cur_batch.is_none() { + assert!( + batch.is_none() || batch == Some("true".into()), + "We shouldn't be in a batch now" + ); + self.cur_batch = Some(BatchInfo { + id: record.batch.clone(), + posts: vec![], + records: 0, + bytes: 0, + }); + } else { + assert_eq!( + batch, + self.cur_batch.as_ref().unwrap().id, + "We're in a batch but got the wrong batch id" + ); + } + + { + let batch = self.cur_batch.as_mut().unwrap(); + batch.posts.push(post); + batch.records += num_records; + batch.bytes += record_payload_bytes; + + assert!(batch.bytes <= self.cfg.max_total_bytes); + assert!(batch.records <= self.cfg.max_total_records); + + assert_eq!(batch.records, queue.batch_limits.cur_records); + assert_eq!(batch.bytes, queue.batch_limits.cur_bytes); + } + + if commit || record.batch.is_none() { + let batch = self.cur_batch.take().unwrap(); + self.batches.push(batch); + } + + response + } + + fn do_handle_response(&mut self, _: PostResponse, mid_batch: bool) { + assert_eq!(mid_batch, self.cur_batch.is_some()); + } + } + impl BatchPoster for TestPosterRef { + fn post<T, O>( + &self, + body: Vec<u8>, + xius: ServerTimestamp, + batch: Option<String>, + commit: bool, + queue: &PostQueue<T, O>, + ) -> Result<PostResponse> { + Ok(self.borrow_mut().do_post(&body, xius, batch, commit, queue)) + } + } + + impl PostResponseHandler for TestPosterRef { + fn handle_response(&mut self, r: PostResponse, mid_batch: bool) -> Result<()> { + self.borrow_mut().do_handle_response(r, mid_batch); + Ok(()) + } + } + + type MockedPostQueue = PostQueue<TestPosterRef, TestPosterRef>; + + fn pq_test_setup( + cfg: InfoConfiguration, + lm: i64, + resps: Vec<PostResponse>, + ) -> (MockedPostQueue, TestPosterRef) { + let tester = TestPoster::new(&cfg, resps); + let pq = PostQueue::new(&cfg, ServerTimestamp(lm), tester.clone(), tester.clone()); + (pq, tester) + } + + fn fake_response<'a, T: Into<Option<&'a str>>>(status: u16, lm: i64, batch: T) -> PostResponse { + assert!(status_codes::is_success_code(status)); + Sync15ClientResponse::Success { + status, + last_modified: ServerTimestamp(lm), + record: UploadResult { + batch: batch.into().map(Into::into), + failed: HashMap::new(), + success: vec![], + }, + route: "test/path".into(), + } + } + + lazy_static! { + // ~40b + static ref PAYLOAD_OVERHEAD: usize = { + let payload = EncryptedPayload { + iv: "".into(), + hmac: "".into(), + ciphertext: "".into() + }; + serde_json::to_string(&payload).unwrap().len() + }; + // ~80b + static ref TOTAL_RECORD_OVERHEAD: usize = { + let val = serde_json::to_value(OutgoingEncryptedBso::new(OutgoingEnvelope { + id: "".into(), + sortindex: None, + ttl: None, + }, + EncryptedPayload { + iv: "".into(), + hmac: "".into(), + ciphertext: "".into() + }, + )).unwrap(); + serde_json::to_string(&val).unwrap().len() + }; + // There's some subtlety in how we calulate this having to do with the fact that + // the quotes in the payload are escaped but the escape chars count to the request len + // and *not* to the payload len (the payload len check happens after json parsing the + // top level object). + static ref NON_PAYLOAD_OVERHEAD: usize = { + *TOTAL_RECORD_OVERHEAD - *PAYLOAD_OVERHEAD + }; + } + + // Actual record size (for max_request_len) will be larger by some amount + fn make_record(payload_size: usize) -> OutgoingEncryptedBso { + assert!(payload_size > *PAYLOAD_OVERHEAD); + let ciphertext_len = payload_size - *PAYLOAD_OVERHEAD; + OutgoingEncryptedBso::new( + OutgoingEnvelope { + id: "".into(), + sortindex: None, + ttl: None, + }, + EncryptedPayload { + iv: "".into(), + hmac: "".into(), + ciphertext: "x".repeat(ciphertext_len), + }, + ) + } + + fn request_bytes_for_payloads(payloads: &[usize]) -> usize { + 1 + payloads + .iter() + .map(|&size| size + 1 + *NON_PAYLOAD_OVERHEAD) + .sum::<usize>() + } + + #[test] + fn test_pq_basic() { + let cfg = InfoConfiguration { + max_request_bytes: 1000, + max_record_payload_bytes: 1000, + ..InfoConfiguration::default() + }; + let time = 11_111_111_000; + let (mut pq, tester) = pq_test_setup( + cfg, + time, + vec![fake_response(status_codes::OK, time + 100_000, None)], + ); + + pq.enqueue(&make_record(100)).unwrap(); + pq.flush(true).unwrap(); + + let t = tester.borrow(); + assert!(t.cur_batch.is_none()); + assert_eq!(t.all_posts.len(), 1); + assert_eq!(t.batches.len(), 1); + assert_eq!(t.batches[0].posts.len(), 1); + assert_eq!(t.batches[0].records, 1); + assert_eq!(t.batches[0].bytes, 100); + assert_eq!( + t.batches[0].posts[0].body.len(), + request_bytes_for_payloads(&[100]) + ); + } + + #[test] + fn test_pq_max_request_bytes_no_batch() { + let cfg = InfoConfiguration { + max_request_bytes: 250, + ..InfoConfiguration::default() + }; + let time = 11_111_111_000; + let (mut pq, tester) = pq_test_setup( + cfg, + time, + vec![ + fake_response(status_codes::OK, time + 100_000, None), + fake_response(status_codes::OK, time + 200_000, None), + ], + ); + + // Note that the total record overhead is around 85 bytes + let payload_size = 100 - *NON_PAYLOAD_OVERHEAD; + pq.enqueue(&make_record(payload_size)).unwrap(); // total size == 102; [r] + pq.enqueue(&make_record(payload_size)).unwrap(); // total size == 203; [r,r] + pq.enqueue(&make_record(payload_size)).unwrap(); // too big, 2nd post. + pq.flush(true).unwrap(); + + let t = tester.borrow(); + assert!(t.cur_batch.is_none()); + assert_eq!(t.all_posts.len(), 2); + assert_eq!(t.batches.len(), 2); + assert_eq!(t.batches[0].posts.len(), 1); + assert_eq!(t.batches[0].records, 2); + assert_eq!(t.batches[0].bytes, payload_size * 2); + assert_eq!(t.batches[0].posts[0].batch, Some("true".into())); + assert_eq!( + t.batches[0].posts[0].body.len(), + request_bytes_for_payloads(&[payload_size, payload_size]) + ); + + assert_eq!(t.batches[1].posts.len(), 1); + assert_eq!(t.batches[1].records, 1); + assert_eq!(t.batches[1].bytes, payload_size); + // We know at this point that the server does not support batching. + assert_eq!(t.batches[1].posts[0].batch, None); + assert!(!t.batches[1].posts[0].commit); + assert_eq!( + t.batches[1].posts[0].body.len(), + request_bytes_for_payloads(&[payload_size]) + ); + } + + #[test] + fn test_pq_max_record_payload_bytes_no_batch() { + let cfg = InfoConfiguration { + max_record_payload_bytes: 150, + max_request_bytes: 350, + ..InfoConfiguration::default() + }; + let time = 11_111_111_000; + let (mut pq, tester) = pq_test_setup( + cfg, + time, + vec![ + fake_response(status_codes::OK, time + 100_000, None), + fake_response(status_codes::OK, time + 200_000, None), + ], + ); + + // Note that the total record overhead is around 85 bytes + let payload_size = 100 - *NON_PAYLOAD_OVERHEAD; + pq.enqueue(&make_record(payload_size)).unwrap(); // total size == 102; [r] + let enqueued = pq.enqueue(&make_record(151)).unwrap(); // still 102 + assert!(!enqueued, "Should not have fit"); + pq.enqueue(&make_record(payload_size)).unwrap(); + pq.flush(true).unwrap(); + + let t = tester.borrow(); + assert!(t.cur_batch.is_none()); + assert_eq!(t.all_posts.len(), 1); + assert_eq!(t.batches.len(), 1); + assert_eq!(t.batches[0].posts.len(), 1); + assert_eq!(t.batches[0].records, 2); + assert_eq!(t.batches[0].bytes, payload_size * 2); + assert_eq!( + t.batches[0].posts[0].body.len(), + request_bytes_for_payloads(&[payload_size, payload_size]) + ); + } + + #[test] + fn test_pq_single_batch() { + let cfg = InfoConfiguration::default(); + let time = 11_111_111_000; + let (mut pq, tester) = pq_test_setup( + cfg, + time, + vec![fake_response( + status_codes::ACCEPTED, + time + 100_000, + Some("1234"), + )], + ); + + let payload_size = 100 - *NON_PAYLOAD_OVERHEAD; + pq.enqueue(&make_record(payload_size)).unwrap(); + pq.enqueue(&make_record(payload_size)).unwrap(); + pq.enqueue(&make_record(payload_size)).unwrap(); + pq.flush(true).unwrap(); + + let t = tester.borrow(); + assert!(t.cur_batch.is_none()); + assert_eq!(t.all_posts.len(), 1); + assert_eq!(t.batches.len(), 1); + assert_eq!(t.batches[0].id.as_ref().unwrap(), "1234"); + assert_eq!(t.batches[0].posts.len(), 1); + assert_eq!(t.batches[0].records, 3); + assert_eq!(t.batches[0].bytes, payload_size * 3); + assert!(t.batches[0].posts[0].commit); + assert_eq!( + t.batches[0].posts[0].body.len(), + request_bytes_for_payloads(&[payload_size, payload_size, payload_size]) + ); + } + + #[test] + fn test_pq_multi_post_batch_bytes() { + let cfg = InfoConfiguration { + max_post_bytes: 200, + ..InfoConfiguration::default() + }; + let time = 11_111_111_000; + let (mut pq, tester) = pq_test_setup( + cfg, + time, + vec![ + fake_response(status_codes::ACCEPTED, time, Some("1234")), + fake_response(status_codes::ACCEPTED, time + 100_000, Some("1234")), + ], + ); + + pq.enqueue(&make_record(100)).unwrap(); + pq.enqueue(&make_record(100)).unwrap(); + // POST + pq.enqueue(&make_record(100)).unwrap(); + pq.flush(true).unwrap(); // COMMIT + + let t = tester.borrow(); + assert!(t.cur_batch.is_none()); + assert_eq!(t.all_posts.len(), 2); + assert_eq!(t.batches.len(), 1); + assert_eq!(t.batches[0].posts.len(), 2); + assert_eq!(t.batches[0].records, 3); + assert_eq!(t.batches[0].bytes, 300); + + assert_eq!(t.batches[0].posts[0].batch.as_ref().unwrap(), "true"); + assert_eq!(t.batches[0].posts[0].records, 2); + assert_eq!(t.batches[0].posts[0].payload_bytes, 200); + assert!(!t.batches[0].posts[0].commit); + assert_eq!( + t.batches[0].posts[0].body.len(), + request_bytes_for_payloads(&[100, 100]) + ); + + assert_eq!(t.batches[0].posts[1].batch.as_ref().unwrap(), "1234"); + assert_eq!(t.batches[0].posts[1].records, 1); + assert_eq!(t.batches[0].posts[1].payload_bytes, 100); + assert!(t.batches[0].posts[1].commit); + assert_eq!( + t.batches[0].posts[1].body.len(), + request_bytes_for_payloads(&[100]) + ); + } + + #[test] + fn test_pq_multi_post_batch_records() { + let cfg = InfoConfiguration { + max_post_records: 3, + ..InfoConfiguration::default() + }; + let time = 11_111_111_000; + let (mut pq, tester) = pq_test_setup( + cfg, + time, + vec![ + fake_response(status_codes::ACCEPTED, time, Some("1234")), + fake_response(status_codes::ACCEPTED, time, Some("1234")), + fake_response(status_codes::ACCEPTED, time + 100_000, Some("1234")), + ], + ); + + pq.enqueue(&make_record(100)).unwrap(); + pq.enqueue(&make_record(100)).unwrap(); + pq.enqueue(&make_record(100)).unwrap(); + // POST + pq.enqueue(&make_record(100)).unwrap(); + pq.enqueue(&make_record(100)).unwrap(); + pq.enqueue(&make_record(100)).unwrap(); + // POST + pq.enqueue(&make_record(100)).unwrap(); + pq.flush(true).unwrap(); // COMMIT + + let t = tester.borrow(); + assert!(t.cur_batch.is_none()); + assert_eq!(t.all_posts.len(), 3); + assert_eq!(t.batches.len(), 1); + assert_eq!(t.batches[0].posts.len(), 3); + assert_eq!(t.batches[0].records, 7); + assert_eq!(t.batches[0].bytes, 700); + + assert_eq!(t.batches[0].posts[0].batch.as_ref().unwrap(), "true"); + assert_eq!(t.batches[0].posts[0].records, 3); + assert_eq!(t.batches[0].posts[0].payload_bytes, 300); + assert!(!t.batches[0].posts[0].commit); + assert_eq!( + t.batches[0].posts[0].body.len(), + request_bytes_for_payloads(&[100, 100, 100]) + ); + + assert_eq!(t.batches[0].posts[1].batch.as_ref().unwrap(), "1234"); + assert_eq!(t.batches[0].posts[1].records, 3); + assert_eq!(t.batches[0].posts[1].payload_bytes, 300); + assert!(!t.batches[0].posts[1].commit); + assert_eq!( + t.batches[0].posts[1].body.len(), + request_bytes_for_payloads(&[100, 100, 100]) + ); + + assert_eq!(t.batches[0].posts[2].batch.as_ref().unwrap(), "1234"); + assert_eq!(t.batches[0].posts[2].records, 1); + assert_eq!(t.batches[0].posts[2].payload_bytes, 100); + assert!(t.batches[0].posts[2].commit); + assert_eq!( + t.batches[0].posts[2].body.len(), + request_bytes_for_payloads(&[100]) + ); + } + + #[test] + #[allow(clippy::cognitive_complexity)] + fn test_pq_multi_post_multi_batch_records() { + let cfg = InfoConfiguration { + max_post_records: 3, + max_total_records: 5, + ..InfoConfiguration::default() + }; + let time = 11_111_111_000; + let (mut pq, tester) = pq_test_setup( + cfg, + time, + vec![ + fake_response(status_codes::ACCEPTED, time, Some("1234")), + fake_response(status_codes::ACCEPTED, time + 100_000, Some("1234")), + fake_response(status_codes::ACCEPTED, time + 100_000, Some("abcd")), + fake_response(status_codes::ACCEPTED, time + 200_000, Some("abcd")), + ], + ); + + pq.enqueue(&make_record(100)).unwrap(); + pq.enqueue(&make_record(100)).unwrap(); + pq.enqueue(&make_record(100)).unwrap(); + // POST + pq.enqueue(&make_record(100)).unwrap(); + pq.enqueue(&make_record(100)).unwrap(); + // POST + COMMIT + pq.enqueue(&make_record(100)).unwrap(); + pq.enqueue(&make_record(100)).unwrap(); + pq.enqueue(&make_record(100)).unwrap(); + // POST + pq.enqueue(&make_record(100)).unwrap(); + pq.flush(true).unwrap(); // COMMIT + + let t = tester.borrow(); + assert!(t.cur_batch.is_none()); + assert_eq!(t.all_posts.len(), 4); + assert_eq!(t.batches.len(), 2); + assert_eq!(t.batches[0].posts.len(), 2); + assert_eq!(t.batches[1].posts.len(), 2); + + assert_eq!(t.batches[0].records, 5); + assert_eq!(t.batches[1].records, 4); + + assert_eq!(t.batches[0].bytes, 500); + assert_eq!(t.batches[1].bytes, 400); + + assert_eq!(t.batches[0].posts[0].batch.as_ref().unwrap(), "true"); + assert_eq!(t.batches[0].posts[0].records, 3); + assert_eq!(t.batches[0].posts[0].payload_bytes, 300); + assert!(!t.batches[0].posts[0].commit); + assert_eq!( + t.batches[0].posts[0].body.len(), + request_bytes_for_payloads(&[100, 100, 100]) + ); + + assert_eq!(t.batches[0].posts[1].batch.as_ref().unwrap(), "1234"); + assert_eq!(t.batches[0].posts[1].records, 2); + assert_eq!(t.batches[0].posts[1].payload_bytes, 200); + assert!(t.batches[0].posts[1].commit); + assert_eq!( + t.batches[0].posts[1].body.len(), + request_bytes_for_payloads(&[100, 100]) + ); + + assert_eq!(t.batches[1].posts[0].batch.as_ref().unwrap(), "true"); + assert_eq!(t.batches[1].posts[0].records, 3); + assert_eq!(t.batches[1].posts[0].payload_bytes, 300); + assert!(!t.batches[1].posts[0].commit); + assert_eq!( + t.batches[1].posts[0].body.len(), + request_bytes_for_payloads(&[100, 100, 100]) + ); + + assert_eq!(t.batches[1].posts[1].batch.as_ref().unwrap(), "abcd"); + assert_eq!(t.batches[1].posts[1].records, 1); + assert_eq!(t.batches[1].posts[1].payload_bytes, 100); + assert!(t.batches[1].posts[1].commit); + assert_eq!( + t.batches[1].posts[1].body.len(), + request_bytes_for_payloads(&[100]) + ); + } + + #[test] + #[allow(clippy::cognitive_complexity)] + fn test_pq_multi_post_multi_batch_bytes() { + let cfg = InfoConfiguration { + max_post_bytes: 300, + max_total_bytes: 500, + ..InfoConfiguration::default() + }; + let time = 11_111_111_000; + let (mut pq, tester) = pq_test_setup( + cfg, + time, + vec![ + fake_response(status_codes::ACCEPTED, time, Some("1234")), + fake_response(status_codes::ACCEPTED, time + 100_000, Some("1234")), // should commit + fake_response(status_codes::ACCEPTED, time + 100_000, Some("abcd")), + fake_response(status_codes::ACCEPTED, time + 200_000, Some("abcd")), // should commit + ], + ); + + pq.enqueue(&make_record(100)).unwrap(); + pq.enqueue(&make_record(100)).unwrap(); + pq.enqueue(&make_record(100)).unwrap(); + assert_eq!(pq.last_modified.0, time); + // POST + pq.enqueue(&make_record(100)).unwrap(); + pq.enqueue(&make_record(100)).unwrap(); + // POST + COMMIT + pq.enqueue(&make_record(100)).unwrap(); + assert_eq!(pq.last_modified.0, time + 100_000); + pq.enqueue(&make_record(100)).unwrap(); + pq.enqueue(&make_record(100)).unwrap(); + + // POST + pq.enqueue(&make_record(100)).unwrap(); + assert_eq!(pq.last_modified.0, time + 100_000); + pq.flush(true).unwrap(); // COMMIT + + assert_eq!(pq.last_modified.0, time + 200_000); + + let t = tester.borrow(); + assert!(t.cur_batch.is_none()); + assert_eq!(t.all_posts.len(), 4); + assert_eq!(t.batches.len(), 2); + assert_eq!(t.batches[0].posts.len(), 2); + assert_eq!(t.batches[1].posts.len(), 2); + + assert_eq!(t.batches[0].records, 5); + assert_eq!(t.batches[1].records, 4); + + assert_eq!(t.batches[0].bytes, 500); + assert_eq!(t.batches[1].bytes, 400); + + assert_eq!(t.batches[0].posts[0].batch.as_ref().unwrap(), "true"); + assert_eq!(t.batches[0].posts[0].records, 3); + assert_eq!(t.batches[0].posts[0].payload_bytes, 300); + assert!(!t.batches[0].posts[0].commit); + assert_eq!( + t.batches[0].posts[0].body.len(), + request_bytes_for_payloads(&[100, 100, 100]) + ); + + assert_eq!(t.batches[0].posts[1].batch.as_ref().unwrap(), "1234"); + assert_eq!(t.batches[0].posts[1].records, 2); + assert_eq!(t.batches[0].posts[1].payload_bytes, 200); + assert!(t.batches[0].posts[1].commit); + assert_eq!( + t.batches[0].posts[1].body.len(), + request_bytes_for_payloads(&[100, 100]) + ); + + assert_eq!(t.batches[1].posts[0].batch.as_ref().unwrap(), "true"); + assert_eq!(t.batches[1].posts[0].records, 3); + assert_eq!(t.batches[1].posts[0].payload_bytes, 300); + assert!(!t.batches[1].posts[0].commit); + assert_eq!( + t.batches[1].posts[0].body.len(), + request_bytes_for_payloads(&[100, 100, 100]) + ); + + assert_eq!(t.batches[1].posts[1].batch.as_ref().unwrap(), "abcd"); + assert_eq!(t.batches[1].posts[1].records, 1); + assert_eq!(t.batches[1].posts[1].payload_bytes, 100); + assert!(t.batches[1].posts[1].commit); + assert_eq!( + t.batches[1].posts[1].body.len(), + request_bytes_for_payloads(&[100]) + ); + } + + // TODO: Test + // + // - error cases!!! We don't test our handling of server errors at all! + // - mixed bytes/record limits + // + // A lot of these have good examples in test_postqueue.js on deskftop sync +} diff --git a/third_party/rust/sync15/src/client/state.rs b/third_party/rust/sync15/src/client/state.rs new file mode 100644 index 0000000000..78e9a6a718 --- /dev/null +++ b/third_party/rust/sync15/src/client/state.rs @@ -0,0 +1,1089 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +use std::collections::{HashMap, HashSet}; + +use super::request::{InfoCollections, InfoConfiguration}; +use super::storage_client::{SetupStorageClient, Sync15ClientResponse}; +use super::CollectionKeys; +use crate::bso::OutgoingEncryptedBso; +use crate::error::{self, Error as ErrorKind, ErrorResponse}; +use crate::record_types::{MetaGlobalEngine, MetaGlobalRecord}; +use crate::EncryptedPayload; +use crate::{Guid, KeyBundle, ServerTimestamp}; +use interrupt_support::Interruptee; +use serde_derive::*; + +use self::SetupState::*; + +const STORAGE_VERSION: usize = 5; + +/// Maps names to storage versions for engines to include in a fresh +/// `meta/global` record. We include engines that we don't implement +/// because they'll be disabled on other clients if we omit them +/// (bug 1479929). +const DEFAULT_ENGINES: &[(&str, usize)] = &[ + ("passwords", 1), + ("clients", 1), + ("addons", 1), + ("addresses", 1), + ("bookmarks", 2), + ("creditcards", 1), + ("forms", 1), + ("history", 1), + ("prefs", 2), + ("tabs", 1), +]; + +// Declined engines to include in a fresh `meta/global` record. +const DEFAULT_DECLINED: &[&str] = &[]; + +/// State that we require the app to persist to storage for us. +/// It's a little unfortunate we need this, because it's only tracking +/// "declined engines", and even then, only needed in practice when there's +/// no meta/global so we need to create one. It's extra unfortunate because we +/// want to move away from "globally declined" engines anyway, moving towards +/// allowing engines to be enabled or disabled per client rather than globally. +/// +/// Apps are expected to treat this as opaque, so we support serializing it. +/// Note that this structure is *not* used to *change* the declined engines +/// list - that will be done in the future, but the API exposed for that +/// purpose will also take a mutable PersistedGlobalState. +#[derive(Debug, Serialize, Deserialize)] +#[serde(tag = "schema_version")] +pub enum PersistedGlobalState { + /// V1 was when we persisted the entire GlobalState, keys and all! + + /// V2 is just tracking the globally declined list. + /// None means "I've no idea" and theoretically should only happen on the + /// very first sync for an app. + V2 { declined: Option<Vec<String>> }, +} + +impl Default for PersistedGlobalState { + #[inline] + fn default() -> PersistedGlobalState { + PersistedGlobalState::V2 { declined: None } + } +} + +#[derive(Debug, Default, Clone, PartialEq)] +pub(crate) struct EngineChangesNeeded { + pub local_resets: HashSet<String>, + pub remote_wipes: HashSet<String>, +} + +#[derive(Debug, Default, Clone, PartialEq)] +struct RemoteEngineState { + info_collections: HashSet<String>, + declined: HashSet<String>, +} + +#[derive(Debug, Default, Clone, PartialEq)] +struct EngineStateInput { + local_declined: HashSet<String>, + remote: Option<RemoteEngineState>, + user_changes: HashMap<String, bool>, +} + +#[derive(Debug, Default, Clone, PartialEq)] +struct EngineStateOutput { + // The new declined. + declined: HashSet<String>, + // Which engines need resets or wipes. + changes_needed: EngineChangesNeeded, +} + +fn compute_engine_states(input: EngineStateInput) -> EngineStateOutput { + use super::util::*; + log::debug!("compute_engine_states: input {:?}", input); + let (must_enable, must_disable) = partition_by_value(&input.user_changes); + let have_remote = input.remote.is_some(); + let RemoteEngineState { + info_collections, + declined: remote_declined, + } = input.remote.clone().unwrap_or_default(); + + let both_declined_and_remote = set_intersection(&info_collections, &remote_declined); + if !both_declined_and_remote.is_empty() { + // Should we wipe these too? + log::warn!( + "Remote state contains engines which are in both info/collections and meta/global's declined: {:?}", + both_declined_and_remote, + ); + } + + let most_recent_declined_list = if have_remote { + &remote_declined + } else { + &input.local_declined + }; + + let result_declined = set_difference( + &set_union(most_recent_declined_list, &must_disable), + &must_enable, + ); + + let output = EngineStateOutput { + changes_needed: EngineChangesNeeded { + // Anything now declined which wasn't in our declined list before gets a reset. + local_resets: set_difference(&result_declined, &input.local_declined), + // Anything remote that we just declined gets a wipe. In the future + // we might want to consider wiping things in both remote declined + // and info/collections, but we'll let other clients pick up their + // own mess for now. + remote_wipes: set_intersection(&info_collections, &must_disable), + }, + declined: result_declined, + }; + // No PII here and this helps debug problems. + log::debug!("compute_engine_states: output {:?}", output); + output +} + +impl PersistedGlobalState { + fn set_declined(&mut self, new_declined: Vec<String>) { + match self { + Self::V2 { ref mut declined } => *declined = Some(new_declined), + } + } + pub(crate) fn get_declined(&self) -> &[String] { + match self { + Self::V2 { declined: Some(d) } => d, + Self::V2 { declined: None } => &[], + } + } +} + +/// Holds global Sync state, including server upload limits, the +/// last-fetched collection modified times, `meta/global` record, and +/// an encrypted copy of the crypto/keys resource (avoids keeping them +/// in memory longer than necessary; avoids key mismatches by ensuring the same KeyBundle +/// is used for both the keys and encrypted payloads.) +#[derive(Debug, Clone)] +pub struct GlobalState { + pub config: InfoConfiguration, + pub collections: InfoCollections, + pub global: MetaGlobalRecord, + pub global_timestamp: ServerTimestamp, + pub keys: EncryptedPayload, + pub keys_timestamp: ServerTimestamp, +} + +/// Creates a fresh `meta/global` record, using the default engine selections, +/// and declined engines from our PersistedGlobalState. +fn new_global(pgs: &PersistedGlobalState) -> MetaGlobalRecord { + let sync_id = Guid::random(); + let mut engines: HashMap<String, _> = HashMap::new(); + for (name, version) in DEFAULT_ENGINES.iter() { + let sync_id = Guid::random(); + engines.insert( + (*name).to_string(), + MetaGlobalEngine { + version: *version, + sync_id, + }, + ); + } + // We only need our PersistedGlobalState to fill out a new meta/global - if + // we previously saw a meta/global then we would have updated it with what + // it was at the time. + let declined = match pgs { + PersistedGlobalState::V2 { declined: Some(d) } => d.clone(), + _ => DEFAULT_DECLINED.iter().map(ToString::to_string).collect(), + }; + + MetaGlobalRecord { + sync_id, + storage_version: STORAGE_VERSION, + engines, + declined, + } +} + +fn fixup_meta_global(global: &mut MetaGlobalRecord) -> bool { + let mut changed_any = false; + for &(name, version) in DEFAULT_ENGINES.iter() { + let had_engine = global.engines.contains_key(name); + let should_have_engine = !global.declined.iter().any(|c| c == name); + if had_engine != should_have_engine { + if should_have_engine { + log::debug!("SyncID for engine {:?} was missing", name); + global.engines.insert( + name.to_string(), + MetaGlobalEngine { + version, + sync_id: Guid::random(), + }, + ); + } else { + log::debug!("SyncID for engine {:?} was present, but shouldn't be", name); + global.engines.remove(name); + } + changed_any = true; + } + } + changed_any +} + +pub struct SetupStateMachine<'a> { + client: &'a dyn SetupStorageClient, + root_key: &'a KeyBundle, + pgs: &'a mut PersistedGlobalState, + // `allowed_states` is designed so that we can arrange for the concept of + // a "fast" sync - so we decline to advance if we need to setup from scratch. + // The idea is that if we need to sync before going to sleep we should do + // it as fast as possible. However, in practice this isn't going to do + // what we expect - a "fast sync" that finds lots to do is almost certainly + // going to take longer than a "full sync" that finds nothing to do. + // We should almost certainly remove this and instead allow for a "time + // budget", after which we get interrupted. Later... + allowed_states: Vec<&'static str>, + sequence: Vec<&'static str>, + engine_updates: Option<&'a HashMap<String, bool>>, + interruptee: &'a dyn Interruptee, + pub(crate) changes_needed: Option<EngineChangesNeeded>, +} + +impl<'a> SetupStateMachine<'a> { + /// Creates a state machine for a "classic" Sync 1.5 client that supports + /// all states, including uploading a fresh `meta/global` and `crypto/keys` + /// after a node reassignment. + pub fn for_full_sync( + client: &'a dyn SetupStorageClient, + root_key: &'a KeyBundle, + pgs: &'a mut PersistedGlobalState, + engine_updates: Option<&'a HashMap<String, bool>>, + interruptee: &'a dyn Interruptee, + ) -> SetupStateMachine<'a> { + SetupStateMachine::with_allowed_states( + client, + root_key, + pgs, + interruptee, + engine_updates, + vec![ + "Initial", + "InitialWithConfig", + "InitialWithInfo", + "InitialWithMetaGlobal", + "Ready", + "FreshStartRequired", + "WithPreviousState", + ], + ) + } + + fn with_allowed_states( + client: &'a dyn SetupStorageClient, + root_key: &'a KeyBundle, + pgs: &'a mut PersistedGlobalState, + interruptee: &'a dyn Interruptee, + engine_updates: Option<&'a HashMap<String, bool>>, + allowed_states: Vec<&'static str>, + ) -> SetupStateMachine<'a> { + SetupStateMachine { + client, + root_key, + pgs, + sequence: Vec::new(), + allowed_states, + engine_updates, + interruptee, + changes_needed: None, + } + } + + fn advance(&mut self, from: SetupState) -> error::Result<SetupState> { + match from { + // Fetch `info/configuration` with current server limits, and + // `info/collections` with collection last modified times. + Initial => { + let config = match self.client.fetch_info_configuration()? { + Sync15ClientResponse::Success { record, .. } => record, + Sync15ClientResponse::Error(ErrorResponse::NotFound { .. }) => { + InfoConfiguration::default() + } + other => return Err(other.create_storage_error()), + }; + Ok(InitialWithConfig { config }) + } + + // XXX - we could consider combining these Initial* states, because we don't + // attempt to support filling in "missing" global state - *any* 404 in them + // means `FreshStart`. + // IOW, in all cases, they either `Err()`, move to `FreshStartRequired`, or + // advance to a specific next state. + InitialWithConfig { config } => { + match self.client.fetch_info_collections()? { + Sync15ClientResponse::Success { + record: collections, + .. + } => Ok(InitialWithInfo { + config, + collections, + }), + // If the server doesn't have a `crypto/keys`, start over + // and reupload our `meta/global` and `crypto/keys`. + Sync15ClientResponse::Error(ErrorResponse::NotFound { .. }) => { + Ok(FreshStartRequired { config }) + } + other => Err(other.create_storage_error()), + } + } + + InitialWithInfo { + config, + collections, + } => { + match self.client.fetch_meta_global()? { + Sync15ClientResponse::Success { + record: mut global, + last_modified: mut global_timestamp, + .. + } => { + // If the server has a newer storage version, we can't + // sync until our client is updated. + if global.storage_version > STORAGE_VERSION { + return Err(ErrorKind::ClientUpgradeRequired); + } + + // If the server has an older storage version, wipe and + // reupload. + if global.storage_version < STORAGE_VERSION { + Ok(FreshStartRequired { config }) + } else { + log::info!("Have info/collections and meta/global. Computing new engine states"); + let initial_global_declined: HashSet<String> = + global.declined.iter().cloned().collect(); + let result = compute_engine_states(EngineStateInput { + local_declined: self.pgs.get_declined().iter().cloned().collect(), + user_changes: self.engine_updates.cloned().unwrap_or_default(), + remote: Some(RemoteEngineState { + declined: initial_global_declined.clone(), + info_collections: collections.keys().cloned().collect(), + }), + }); + // Persist the new declined. + self.pgs + .set_declined(result.declined.iter().cloned().collect()); + // If the declined engines differ from remote, fix that. + let fixed_declined = if result.declined != initial_global_declined { + global.declined = result.declined.iter().cloned().collect(); + log::info!( + "Uploading new declined {:?} to meta/global with timestamp {:?}", + global.declined, + global_timestamp, + ); + true + } else { + false + }; + // If there are missing syncIds, we need to fix those as well + let fixed_ids = if fixup_meta_global(&mut global) { + log::info!( + "Uploading corrected meta/global with timestamp {:?}", + global_timestamp, + ); + true + } else { + false + }; + + if fixed_declined || fixed_ids { + global_timestamp = + self.client.put_meta_global(global_timestamp, &global)?; + log::debug!("new global_timestamp: {:?}", global_timestamp); + } + // Update the set of changes needed. + if self.changes_needed.is_some() { + // Should never happen (we prevent state machine + // loops elsewhere) but if it did, the info is stale + // anyway. + log::warn!("Already have a set of changes needed, Overwriting..."); + } + self.changes_needed = Some(result.changes_needed); + Ok(InitialWithMetaGlobal { + config, + collections, + global, + global_timestamp, + }) + } + } + Sync15ClientResponse::Error(ErrorResponse::NotFound { .. }) => { + Ok(FreshStartRequired { config }) + } + other => Err(other.create_storage_error()), + } + } + + InitialWithMetaGlobal { + config, + collections, + global, + global_timestamp, + } => { + // Now try and get keys etc - if we fresh-start we'll re-use declined. + match self.client.fetch_crypto_keys()? { + Sync15ClientResponse::Success { + record, + last_modified, + .. + } => { + // Note that collection/keys is itself a bso, so the + // json body also carries the timestamp. If they aren't + // identical something has screwed up and we should die. + assert_eq!(last_modified, record.envelope.modified); + let state = GlobalState { + config, + collections, + global, + global_timestamp, + keys: record.payload, + keys_timestamp: last_modified, + }; + Ok(Ready { state }) + } + // If the server doesn't have a `crypto/keys`, start over + // and reupload our `meta/global` and `crypto/keys`. + Sync15ClientResponse::Error(ErrorResponse::NotFound { .. }) => { + Ok(FreshStartRequired { config }) + } + other => Err(other.create_storage_error()), + } + } + + // We've got old state that's likely to be OK. + // We keep things simple here - if there's evidence of a new/missing + // meta/global or new/missing keys we just restart from scratch. + WithPreviousState { old_state } => match self.client.fetch_info_collections()? { + Sync15ClientResponse::Success { + record: collections, + .. + } => Ok( + if self.engine_updates.is_none() + && is_same_timestamp(old_state.global_timestamp, &collections, "meta") + && is_same_timestamp(old_state.keys_timestamp, &collections, "crypto") + { + Ready { + state: GlobalState { + collections, + ..old_state + }, + } + } else { + InitialWithConfig { + config: old_state.config, + } + }, + ), + _ => Ok(InitialWithConfig { + config: old_state.config, + }), + }, + + Ready { state } => Ok(Ready { state }), + + FreshStartRequired { config } => { + // Wipe the server. + log::info!("Fresh start: wiping remote"); + self.client.wipe_all_remote()?; + + // Upload a fresh `meta/global`... + log::info!("Uploading meta/global"); + let computed = compute_engine_states(EngineStateInput { + local_declined: self.pgs.get_declined().iter().cloned().collect(), + user_changes: self.engine_updates.cloned().unwrap_or_default(), + remote: None, + }); + self.pgs + .set_declined(computed.declined.iter().cloned().collect()); + + self.changes_needed = Some(computed.changes_needed); + + let new_global = new_global(self.pgs); + + self.client + .put_meta_global(ServerTimestamp::default(), &new_global)?; + + // ...And a fresh `crypto/keys`. + let new_keys = CollectionKeys::new_random()?.to_encrypted_payload(self.root_key)?; + let bso = OutgoingEncryptedBso::new(Guid::new("keys").into(), new_keys); + self.client + .put_crypto_keys(ServerTimestamp::default(), &bso)?; + + // TODO(lina): Can we pass along server timestamps from the PUTs + // above, and avoid re-fetching the `m/g` and `c/k` we just + // uploaded? + // OTOH(mark): restarting the state machine keeps life simple and rare. + Ok(InitialWithConfig { config }) + } + } + } + + /// Runs through the state machine to the ready state. + pub fn run_to_ready(&mut self, state: Option<GlobalState>) -> error::Result<GlobalState> { + let mut s = match state { + Some(old_state) => WithPreviousState { old_state }, + None => Initial, + }; + loop { + self.interruptee.err_if_interrupted()?; + let label = &s.label(); + log::trace!("global state: {:?}", label); + match s { + Ready { state } => { + self.sequence.push(label); + return Ok(state); + } + // If we already started over once before, we're likely in a + // cycle, and should try again later. Intermediate states + // aren't a problem, just the initial ones. + FreshStartRequired { .. } | WithPreviousState { .. } | Initial => { + if self.sequence.contains(label) { + // Is this really the correct error? + return Err(ErrorKind::SetupRace); + } + } + _ => { + if !self.allowed_states.contains(label) { + return Err(ErrorKind::SetupRequired); + } + } + }; + self.sequence.push(label); + s = self.advance(s)?; + } + } +} + +/// States in the remote setup process. +/// TODO(lina): Add link once #56 is merged. +#[derive(Debug)] +#[allow(clippy::large_enum_variant)] +enum SetupState { + // These "Initial" states are only ever used when starting from scratch. + Initial, + InitialWithConfig { + config: InfoConfiguration, + }, + InitialWithInfo { + config: InfoConfiguration, + collections: InfoCollections, + }, + InitialWithMetaGlobal { + config: InfoConfiguration, + collections: InfoCollections, + global: MetaGlobalRecord, + global_timestamp: ServerTimestamp, + }, + WithPreviousState { + old_state: GlobalState, + }, + Ready { + state: GlobalState, + }, + FreshStartRequired { + config: InfoConfiguration, + }, +} + +impl SetupState { + fn label(&self) -> &'static str { + match self { + Initial { .. } => "Initial", + InitialWithConfig { .. } => "InitialWithConfig", + InitialWithInfo { .. } => "InitialWithInfo", + InitialWithMetaGlobal { .. } => "InitialWithMetaGlobal", + Ready { .. } => "Ready", + WithPreviousState { .. } => "WithPreviousState", + FreshStartRequired { .. } => "FreshStartRequired", + } + } +} + +/// Whether we should skip fetching an item. Used when we already have timestamps +/// and want to check if we should reuse our existing state. The state's fairly +/// cheap to recreate and very bad to use if it is wrong, so we insist on the +/// *exact* timestamp matching and not a simple "later than" check. +fn is_same_timestamp(local: ServerTimestamp, collections: &InfoCollections, key: &str) -> bool { + collections.get(key).map_or(false, |ts| local == *ts) +} + +#[cfg(test)] +mod tests { + use super::*; + + use crate::bso::{IncomingEncryptedBso, IncomingEnvelope}; + use interrupt_support::NeverInterrupts; + + struct InMemoryClient { + info_configuration: error::Result<Sync15ClientResponse<InfoConfiguration>>, + info_collections: error::Result<Sync15ClientResponse<InfoCollections>>, + meta_global: error::Result<Sync15ClientResponse<MetaGlobalRecord>>, + crypto_keys: error::Result<Sync15ClientResponse<IncomingEncryptedBso>>, + } + + impl SetupStorageClient for InMemoryClient { + fn fetch_info_configuration( + &self, + ) -> error::Result<Sync15ClientResponse<InfoConfiguration>> { + match &self.info_configuration { + Ok(client_response) => Ok(client_response.clone()), + Err(_) => Ok(Sync15ClientResponse::Error(ErrorResponse::ServerError { + status: 500, + route: "test/path".into(), + })), + } + } + + fn fetch_info_collections(&self) -> error::Result<Sync15ClientResponse<InfoCollections>> { + match &self.info_collections { + Ok(collections) => Ok(collections.clone()), + Err(_) => Ok(Sync15ClientResponse::Error(ErrorResponse::ServerError { + status: 500, + route: "test/path".into(), + })), + } + } + + fn fetch_meta_global(&self) -> error::Result<Sync15ClientResponse<MetaGlobalRecord>> { + match &self.meta_global { + Ok(global) => Ok(global.clone()), + // TODO(lina): Special handling for 404s, we want to ensure we + // handle missing keys and other server errors correctly. + Err(_) => Ok(Sync15ClientResponse::Error(ErrorResponse::ServerError { + status: 500, + route: "test/path".into(), + })), + } + } + + fn put_meta_global( + &self, + xius: ServerTimestamp, + global: &MetaGlobalRecord, + ) -> error::Result<ServerTimestamp> { + // Ensure that the meta/global record we uploaded is "fixed up" + assert!(DEFAULT_ENGINES + .iter() + .filter(|e| e.0 != "logins") + .all(|&(k, _v)| global.engines.contains_key(k))); + assert!(!global.engines.contains_key("logins")); + assert_eq!(global.declined, vec!["logins".to_string()]); + // return a different timestamp. + Ok(ServerTimestamp(xius.0 + 1)) + } + + fn fetch_crypto_keys(&self) -> error::Result<Sync15ClientResponse<IncomingEncryptedBso>> { + match &self.crypto_keys { + Ok(Sync15ClientResponse::Success { + status, + record, + last_modified, + route, + }) => Ok(Sync15ClientResponse::Success { + status: *status, + record: IncomingEncryptedBso::new( + record.envelope.clone(), + record.payload.clone(), + ), + last_modified: *last_modified, + route: route.clone(), + }), + // TODO(lina): Same as above, for 404s. + _ => Ok(Sync15ClientResponse::Error(ErrorResponse::ServerError { + status: 500, + route: "test/path".into(), + })), + } + } + + fn put_crypto_keys( + &self, + xius: ServerTimestamp, + _keys: &OutgoingEncryptedBso, + ) -> error::Result<()> { + assert_eq!(xius, ServerTimestamp(888_800)); + Err(ErrorKind::StorageHttpError(ErrorResponse::ServerError { + status: 500, + route: "crypto/keys".to_string(), + })) + } + + fn wipe_all_remote(&self) -> error::Result<()> { + Ok(()) + } + } + + #[allow(clippy::unnecessary_wraps)] + fn mocked_success_ts<T>(t: T, ts: i64) -> error::Result<Sync15ClientResponse<T>> { + Ok(Sync15ClientResponse::Success { + status: 200, + record: t, + last_modified: ServerTimestamp(ts), + route: "test/path".into(), + }) + } + + fn mocked_success<T>(t: T) -> error::Result<Sync15ClientResponse<T>> { + mocked_success_ts(t, 0) + } + + fn mocked_success_keys( + keys: CollectionKeys, + root_key: &KeyBundle, + ) -> error::Result<Sync15ClientResponse<IncomingEncryptedBso>> { + let timestamp = keys.timestamp; + let payload = keys.to_encrypted_payload(root_key).unwrap(); + let bso = IncomingEncryptedBso::new( + IncomingEnvelope { + id: Guid::new("keys"), + modified: timestamp, + sortindex: None, + ttl: None, + }, + payload, + ); + Ok(Sync15ClientResponse::Success { + status: 200, + record: bso, + last_modified: timestamp, + route: "test/path".into(), + }) + } + + #[test] + fn test_state_machine_ready_from_empty() { + let _ = env_logger::try_init(); + let root_key = KeyBundle::new_random().unwrap(); + let keys = CollectionKeys { + timestamp: ServerTimestamp(123_400), + default: KeyBundle::new_random().unwrap(), + collections: HashMap::new(), + }; + let mg = MetaGlobalRecord { + sync_id: "syncIDAAAAAA".into(), + storage_version: 5usize, + engines: vec![( + "bookmarks", + MetaGlobalEngine { + version: 1usize, + sync_id: "syncIDBBBBBB".into(), + }, + )] + .into_iter() + .map(|(key, value)| (key.to_owned(), value)) + .collect(), + // We ensure that the record we upload doesn't have a logins record. + declined: vec!["logins".to_string()], + }; + let client = InMemoryClient { + info_configuration: mocked_success(InfoConfiguration::default()), + info_collections: mocked_success(InfoCollections::new( + vec![("meta", 123_456), ("crypto", 145_000)] + .into_iter() + .map(|(key, value)| (key.to_owned(), ServerTimestamp(value))) + .collect(), + )), + meta_global: mocked_success_ts(mg, 999_000), + crypto_keys: mocked_success_keys(keys, &root_key), + }; + let mut pgs = PersistedGlobalState::V2 { declined: None }; + + let mut state_machine = + SetupStateMachine::for_full_sync(&client, &root_key, &mut pgs, None, &NeverInterrupts); + assert!( + state_machine.run_to_ready(None).is_ok(), + "Should drive state machine to ready" + ); + assert_eq!( + state_machine.sequence, + vec![ + "Initial", + "InitialWithConfig", + "InitialWithInfo", + "InitialWithMetaGlobal", + "Ready", + ], + "Should cycle through all states" + ); + } + + #[test] + fn test_from_previous_state_declined() { + let _ = env_logger::try_init(); + // The state-machine sequence where we didn't use the previous state + // (ie, where the state machine restarted) + let sm_seq_restarted = vec![ + "WithPreviousState", + "InitialWithConfig", + "InitialWithInfo", + "InitialWithMetaGlobal", + "Ready", + ]; + // The state-machine sequence where we used the previous state. + let sm_seq_used_previous = vec!["WithPreviousState", "Ready"]; + + // do the actual test. + fn do_test( + client: &dyn SetupStorageClient, + root_key: &KeyBundle, + pgs: &mut PersistedGlobalState, + engine_updates: Option<&HashMap<String, bool>>, + old_state: GlobalState, + expected_states: &[&str], + ) { + let mut state_machine = SetupStateMachine::for_full_sync( + client, + root_key, + pgs, + engine_updates, + &NeverInterrupts, + ); + assert!( + state_machine.run_to_ready(Some(old_state)).is_ok(), + "Should drive state machine to ready" + ); + assert_eq!(state_machine.sequence, expected_states); + } + + // and all the complicated setup... + let ts_metaglobal = 123_456; + let ts_keys = 145_000; + let root_key = KeyBundle::new_random().unwrap(); + let keys = CollectionKeys { + timestamp: ServerTimestamp(ts_keys + 1), + default: KeyBundle::new_random().unwrap(), + collections: HashMap::new(), + }; + let mg = MetaGlobalRecord { + sync_id: "syncIDAAAAAA".into(), + storage_version: 5usize, + engines: vec![( + "bookmarks", + MetaGlobalEngine { + version: 1usize, + sync_id: "syncIDBBBBBB".into(), + }, + )] + .into_iter() + .map(|(key, value)| (key.to_owned(), value)) + .collect(), + // We ensure that the record we upload doesn't have a logins record. + declined: vec!["logins".to_string()], + }; + let collections = InfoCollections::new( + vec![("meta", ts_metaglobal), ("crypto", ts_keys)] + .into_iter() + .map(|(key, value)| (key.to_owned(), ServerTimestamp(value))) + .collect(), + ); + let client = InMemoryClient { + info_configuration: mocked_success(InfoConfiguration::default()), + info_collections: mocked_success(collections.clone()), + meta_global: mocked_success_ts(mg.clone(), ts_metaglobal), + crypto_keys: mocked_success_keys(keys.clone(), &root_key), + }; + + // First a test where the "previous" global state is OK to reuse. + { + let mut pgs = PersistedGlobalState::V2 { declined: None }; + // A "previous" global state. + let old_state = GlobalState { + config: InfoConfiguration::default(), + collections: collections.clone(), + global: mg.clone(), + global_timestamp: ServerTimestamp(ts_metaglobal), + keys: keys + .to_encrypted_payload(&root_key) + .expect("should always work in this test"), + keys_timestamp: ServerTimestamp(ts_keys), + }; + do_test( + &client, + &root_key, + &mut pgs, + None, + old_state, + &sm_seq_used_previous, + ); + } + + // Now where the meta/global record on the server is later. + { + let mut pgs = PersistedGlobalState::V2 { declined: None }; + // A "previous" global state. + let old_state = GlobalState { + config: InfoConfiguration::default(), + collections: collections.clone(), + global: mg.clone(), + global_timestamp: ServerTimestamp(999_999), + keys: keys + .to_encrypted_payload(&root_key) + .expect("should always work in this test"), + keys_timestamp: ServerTimestamp(ts_keys), + }; + do_test( + &client, + &root_key, + &mut pgs, + None, + old_state, + &sm_seq_restarted, + ); + } + + // Where keys on the server is later. + { + let mut pgs = PersistedGlobalState::V2 { declined: None }; + // A "previous" global state. + let old_state = GlobalState { + config: InfoConfiguration::default(), + collections: collections.clone(), + global: mg.clone(), + global_timestamp: ServerTimestamp(ts_metaglobal), + keys: keys + .to_encrypted_payload(&root_key) + .expect("should always work in this test"), + keys_timestamp: ServerTimestamp(999_999), + }; + do_test( + &client, + &root_key, + &mut pgs, + None, + old_state, + &sm_seq_restarted, + ); + } + + // Where there are engine-state changes. + { + let mut pgs = PersistedGlobalState::V2 { declined: None }; + // A "previous" global state. + let old_state = GlobalState { + config: InfoConfiguration::default(), + collections, + global: mg, + global_timestamp: ServerTimestamp(ts_metaglobal), + keys: keys + .to_encrypted_payload(&root_key) + .expect("should always work in this test"), + keys_timestamp: ServerTimestamp(ts_keys), + }; + let mut engine_updates = HashMap::<String, bool>::new(); + engine_updates.insert("logins".to_string(), false); + do_test( + &client, + &root_key, + &mut pgs, + Some(&engine_updates), + old_state, + &sm_seq_restarted, + ); + let declined = match pgs { + PersistedGlobalState::V2 { declined: d } => d, + }; + // and check we now consider logins as declined. + assert_eq!(declined, Some(vec!["logins".to_string()])); + } + } + + fn string_set(s: &[&str]) -> HashSet<String> { + s.iter().map(ToString::to_string).collect() + } + fn string_map<T: Clone>(s: &[(&str, T)]) -> HashMap<String, T> { + s.iter().map(|v| (v.0.to_string(), v.1.clone())).collect() + } + #[test] + fn test_engine_states() { + assert_eq!( + compute_engine_states(EngineStateInput { + local_declined: string_set(&["foo", "bar"]), + remote: None, + user_changes: Default::default(), + }), + EngineStateOutput { + declined: string_set(&["foo", "bar"]), + // No wipes, no resets + changes_needed: Default::default(), + } + ); + assert_eq!( + compute_engine_states(EngineStateInput { + local_declined: string_set(&["foo", "bar"]), + remote: Some(RemoteEngineState { + declined: string_set(&["foo"]), + info_collections: string_set(&["bar"]) + }), + user_changes: Default::default(), + }), + EngineStateOutput { + // Now we have `foo`. + declined: string_set(&["foo"]), + // No wipes, no resets, should just be a local update. + changes_needed: Default::default(), + } + ); + assert_eq!( + compute_engine_states(EngineStateInput { + local_declined: string_set(&["foo", "bar"]), + remote: Some(RemoteEngineState { + declined: string_set(&["foo", "bar", "quux"]), + info_collections: string_set(&[]) + }), + user_changes: Default::default(), + }), + EngineStateOutput { + // Now we have `foo`. + declined: string_set(&["foo", "bar", "quux"]), + changes_needed: EngineChangesNeeded { + // Should reset `quux`. + local_resets: string_set(&["quux"]), + // No wipes, though. + remote_wipes: string_set(&[]), + } + } + ); + assert_eq!( + compute_engine_states(EngineStateInput { + local_declined: string_set(&["bar", "baz"]), + remote: Some(RemoteEngineState { + declined: string_set(&["bar", "baz",]), + info_collections: string_set(&["quux"]) + }), + // Change a declined engine to undeclined. + user_changes: string_map(&[("bar", true)]), + }), + EngineStateOutput { + declined: string_set(&["baz"]), + // No wipes, just undecline it. + changes_needed: Default::default() + } + ); + assert_eq!( + compute_engine_states(EngineStateInput { + local_declined: string_set(&["bar", "baz"]), + remote: Some(RemoteEngineState { + declined: string_set(&["bar", "baz"]), + info_collections: string_set(&["foo"]) + }), + // Change an engine which exists remotely to declined. + user_changes: string_map(&[("foo", false)]), + }), + EngineStateOutput { + declined: string_set(&["baz", "bar", "foo"]), + // No wipes, just undecline it. + changes_needed: EngineChangesNeeded { + // Should reset our local foo + local_resets: string_set(&["foo"]), + // And wipe the server. + remote_wipes: string_set(&["foo"]), + } + } + ); + } +} diff --git a/third_party/rust/sync15/src/client/status.rs b/third_party/rust/sync15/src/client/status.rs new file mode 100644 index 0000000000..407efeec12 --- /dev/null +++ b/third_party/rust/sync15/src/client/status.rs @@ -0,0 +1,106 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +use crate::error::{Error, ErrorResponse}; +use crate::telemetry::SyncTelemetryPing; +use std::collections::HashMap; +use std::time::{Duration, SystemTime}; + +/// The general status of sync - should probably be moved to the "sync manager" +/// once we have one! +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum ServiceStatus { + /// Everything is fine. + Ok, + /// Some general network issue. + NetworkError, + /// Some apparent issue with the servers. + ServiceError, + /// Some external FxA action needs to be taken. + AuthenticationError, + /// We declined to do anything for backoff or rate-limiting reasons. + BackedOff, + /// We were interrupted. + Interrupted, + /// Something else - you need to check the logs for more details. May + /// or may not be transient, we really don't know. + OtherError, +} + +impl ServiceStatus { + // This is a bit naive and probably will not survive in this form in the + // SyncManager - eg, we'll want to handle backoff etc. + pub fn from_err(err: &Error) -> ServiceStatus { + match err { + // HTTP based errors. + Error::TokenserverHttpError(status) => { + // bit of a shame the tokenserver is different to storage... + if *status == 401 { + ServiceStatus::AuthenticationError + } else { + ServiceStatus::ServiceError + } + } + // BackoffError is also from the tokenserver. + Error::BackoffError(_) => ServiceStatus::ServiceError, + Error::StorageHttpError(ref e) => match e { + ErrorResponse::Unauthorized { .. } => ServiceStatus::AuthenticationError, + _ => ServiceStatus::ServiceError, + }, + + // Network errors. + Error::RequestError(_) | Error::UnexpectedStatus(_) | Error::HawkError(_) => { + ServiceStatus::NetworkError + } + + Error::Interrupted(_) => ServiceStatus::Interrupted, + _ => ServiceStatus::OtherError, + } + } +} + +/// The result of a sync request. This too is from the "sync manager", but only +/// has a fraction of the things it will have when we actually build that. +#[derive(Debug)] +pub struct SyncResult { + /// The general health. + pub service_status: ServiceStatus, + + /// The set of declined engines, if we know them. + pub declined: Option<Vec<String>>, + + /// The result of the sync. + pub result: Result<(), Error>, + + /// The result for each engine. + /// Note that we expect the `String` to be replaced with an enum later. + pub engine_results: HashMap<String, Result<(), Error>>, + + pub telemetry: SyncTelemetryPing, + + pub next_sync_after: Option<std::time::SystemTime>, +} + +// If `r` has a BackoffError, then returns the later backoff value. +fn advance_backoff(cur_best: SystemTime, r: &Result<(), Error>) -> SystemTime { + if let Err(e) = r { + if let Some(time) = e.get_backoff() { + return std::cmp::max(time, cur_best); + } + } + cur_best +} + +impl SyncResult { + pub(crate) fn set_sync_after(&mut self, backoff_duration: Duration) { + let now = SystemTime::now(); + let toplevel = advance_backoff(now + backoff_duration, &self.result); + let sync_after = self.engine_results.values().fold(toplevel, advance_backoff); + if sync_after <= now { + self.next_sync_after = None; + } else { + self.next_sync_after = Some(sync_after); + } + } +} diff --git a/third_party/rust/sync15/src/client/storage_client.rs b/third_party/rust/sync15/src/client/storage_client.rs new file mode 100644 index 0000000000..7b8f650d24 --- /dev/null +++ b/third_party/rust/sync15/src/client/storage_client.rs @@ -0,0 +1,605 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +use super::request::{ + BatchPoster, InfoCollections, InfoConfiguration, PostQueue, PostResponse, PostResponseHandler, +}; +use super::token; +use crate::bso::{IncomingBso, IncomingEncryptedBso, OutgoingBso, OutgoingEncryptedBso}; +use crate::engine::{CollectionPost, CollectionRequest}; +use crate::error::{self, Error, ErrorResponse}; +use crate::record_types::MetaGlobalRecord; +use crate::{CollectionName, Guid, ServerTimestamp}; +use serde_json::Value; +use std::str::FromStr; +use std::sync::atomic::{AtomicU32, Ordering}; +use url::Url; +use viaduct::{ + header_names::{self, AUTHORIZATION}, + Method, Request, Response, +}; + +/// A response from a GET request on a Sync15StorageClient, encapsulating all +/// the variants users of this client needs to care about. +#[derive(Debug, Clone)] +pub enum Sync15ClientResponse<T> { + Success { + status: u16, + record: T, + last_modified: ServerTimestamp, + route: String, + }, + Error(ErrorResponse), +} + +fn parse_seconds(seconds_str: &str) -> Option<u32> { + let secs = seconds_str.parse::<f64>().ok()?.ceil(); + // Note: u32 doesn't impl TryFrom<f64> :( + if !secs.is_finite() || secs < 0.0 || secs > f64::from(u32::max_value()) { + log::warn!("invalid backoff value: {}", secs); + None + } else { + Some(secs as u32) + } +} + +impl<T> Sync15ClientResponse<T> { + pub fn from_response(resp: Response, backoff_listener: &BackoffListener) -> error::Result<Self> + where + for<'a> T: serde::de::Deserialize<'a>, + { + let route: String = resp.url.path().into(); + // Android seems to respect retry_after even on success requests, so we + // will too if it's present. This also lets us handle both backoff-like + // properties in the same place. + let retry_after = resp + .headers + .get(header_names::RETRY_AFTER) + .and_then(parse_seconds); + + let backoff = resp + .headers + .get(header_names::X_WEAVE_BACKOFF) + .and_then(parse_seconds); + + if let Some(b) = backoff { + backoff_listener.note_backoff(b); + } + if let Some(ra) = retry_after { + backoff_listener.note_retry_after(ra); + } + + Ok(if resp.is_success() { + let record: T = resp.json()?; + let last_modified = resp + .headers + .get(header_names::X_LAST_MODIFIED) + .and_then(|s| ServerTimestamp::from_str(s).ok()) + .ok_or(Error::MissingServerTimestamp)?; + log::info!( + "Successful request to \"{}\", incoming x-last-modified={:?}", + route, + last_modified + ); + + Sync15ClientResponse::Success { + status: resp.status, + record, + last_modified, + route, + } + } else { + let status = resp.status; + log::info!("Request \"{}\" was an error (status={})", route, status); + match status { + 404 => Sync15ClientResponse::Error(ErrorResponse::NotFound { route }), + 401 => Sync15ClientResponse::Error(ErrorResponse::Unauthorized { route }), + 412 => Sync15ClientResponse::Error(ErrorResponse::PreconditionFailed { route }), + 500..=600 => { + Sync15ClientResponse::Error(ErrorResponse::ServerError { route, status }) + } + _ => Sync15ClientResponse::Error(ErrorResponse::RequestFailed { route, status }), + } + }) + } + + pub fn create_storage_error(self) -> Error { + let inner = match self { + Sync15ClientResponse::Success { status, route, .. } => { + // This should never happen as callers are expected to have + // already special-cased this response, so warn if it does. + // (or maybe we could panic?) + log::warn!("Converting success response into an error"); + ErrorResponse::RequestFailed { status, route } + } + Sync15ClientResponse::Error(e) => e, + }; + Error::StorageHttpError(inner) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct Sync15StorageClientInit { + pub key_id: String, + pub access_token: String, + pub tokenserver_url: Url, +} + +/// A trait containing the methods required to run through the setup state +/// machine. This is factored out into a separate trait to make mocking +/// easier. +pub trait SetupStorageClient { + fn fetch_info_configuration(&self) -> error::Result<Sync15ClientResponse<InfoConfiguration>>; + fn fetch_info_collections(&self) -> error::Result<Sync15ClientResponse<InfoCollections>>; + fn fetch_meta_global(&self) -> error::Result<Sync15ClientResponse<MetaGlobalRecord>>; + fn fetch_crypto_keys(&self) -> error::Result<Sync15ClientResponse<IncomingEncryptedBso>>; + + fn put_meta_global( + &self, + xius: ServerTimestamp, + global: &MetaGlobalRecord, + ) -> error::Result<ServerTimestamp>; + fn put_crypto_keys( + &self, + xius: ServerTimestamp, + keys: &OutgoingEncryptedBso, + ) -> error::Result<()>; + fn wipe_all_remote(&self) -> error::Result<()>; +} + +#[derive(Debug, Default)] +pub struct BackoffState { + pub backoff_secs: AtomicU32, + pub retry_after_secs: AtomicU32, +} + +pub(crate) type BackoffListener = std::sync::Arc<BackoffState>; + +pub(crate) fn new_backoff_listener() -> BackoffListener { + std::sync::Arc::new(BackoffState::default()) +} + +impl BackoffState { + pub fn note_backoff(&self, noted: u32) { + super::util::atomic_update_max(&self.backoff_secs, noted) + } + + pub fn note_retry_after(&self, noted: u32) { + super::util::atomic_update_max(&self.retry_after_secs, noted) + } + + pub fn get_backoff_secs(&self) -> u32 { + self.backoff_secs.load(Ordering::SeqCst) + } + + pub fn get_retry_after_secs(&self) -> u32 { + self.retry_after_secs.load(Ordering::SeqCst) + } + + pub fn get_required_wait(&self, ignore_soft_backoff: bool) -> Option<std::time::Duration> { + let bo = self.get_backoff_secs(); + let ra = self.get_retry_after_secs(); + let secs = u64::from(if ignore_soft_backoff { ra } else { bo.max(ra) }); + if secs > 0 { + Some(std::time::Duration::from_secs(secs)) + } else { + None + } + } + + pub fn reset(&self) { + self.backoff_secs.store(0, Ordering::SeqCst); + self.retry_after_secs.store(0, Ordering::SeqCst); + } +} + +// meta/global is a clear-text Bso (ie, there's a String `payload` which has a MetaGlobalRecord) +// We don't use the 'content' helpers here because we want json errors to be fatal here +// (ie, we don't need tombstones and can't just skip a malformed record) +type IncMetaGlobalBso = IncomingBso; +type OutMetaGlobalBso = OutgoingBso; + +#[derive(Debug)] +pub struct Sync15StorageClient { + tsc: token::TokenProvider, + pub(crate) backoff: BackoffListener, +} + +impl SetupStorageClient for Sync15StorageClient { + fn fetch_info_configuration(&self) -> error::Result<Sync15ClientResponse<InfoConfiguration>> { + self.relative_storage_request(Method::Get, "info/configuration") + } + + fn fetch_info_collections(&self) -> error::Result<Sync15ClientResponse<InfoCollections>> { + self.relative_storage_request(Method::Get, "info/collections") + } + + fn fetch_meta_global(&self) -> error::Result<Sync15ClientResponse<MetaGlobalRecord>> { + let got: Sync15ClientResponse<IncMetaGlobalBso> = + self.relative_storage_request(Method::Get, "storage/meta/global")?; + Ok(match got { + Sync15ClientResponse::Success { + record, + last_modified, + route, + status, + } => { + log::debug!( + "Got meta global with modified = {}; last-modified = {}", + record.envelope.modified, + last_modified + ); + Sync15ClientResponse::Success { + record: serde_json::from_str(&record.payload)?, + last_modified, + route, + status, + } + } + Sync15ClientResponse::Error(e) => Sync15ClientResponse::Error(e), + }) + } + + fn fetch_crypto_keys(&self) -> error::Result<Sync15ClientResponse<IncomingEncryptedBso>> { + self.relative_storage_request(Method::Get, "storage/crypto/keys") + } + + fn put_meta_global( + &self, + xius: ServerTimestamp, + global: &MetaGlobalRecord, + ) -> error::Result<ServerTimestamp> { + let bso = OutMetaGlobalBso::new(Guid::new("global").into(), global)?; + self.put("storage/meta/global", xius, &bso) + } + + fn put_crypto_keys( + &self, + xius: ServerTimestamp, + keys: &OutgoingEncryptedBso, + ) -> error::Result<()> { + self.put("storage/crypto/keys", xius, keys)?; + Ok(()) + } + + fn wipe_all_remote(&self) -> error::Result<()> { + let s = self.tsc.api_endpoint()?; + let url = Url::parse(&s)?; + + let req = self.build_request(Method::Delete, url)?; + match self.exec_request::<Value>(req, false) { + Ok(Sync15ClientResponse::Error(ErrorResponse::NotFound { .. })) + | Ok(Sync15ClientResponse::Success { .. }) => Ok(()), + Ok(resp) => Err(resp.create_storage_error()), + Err(e) => Err(e), + } + } +} + +impl Sync15StorageClient { + pub fn new(init_params: Sync15StorageClientInit) -> error::Result<Sync15StorageClient> { + rc_crypto::ensure_initialized(); + let tsc = token::TokenProvider::new( + init_params.tokenserver_url, + init_params.access_token, + init_params.key_id, + )?; + Ok(Sync15StorageClient { + tsc, + backoff: new_backoff_listener(), + }) + } + + pub fn get_encrypted_records( + &self, + collection_request: CollectionRequest, + ) -> error::Result<Sync15ClientResponse<Vec<IncomingEncryptedBso>>> { + self.collection_request(Method::Get, collection_request) + } + + #[inline] + fn authorized(&self, req: Request) -> error::Result<Request> { + let hawk_header_value = self.tsc.authorization(&req)?; + Ok(req.header(AUTHORIZATION, hawk_header_value)?) + } + + // TODO: probably want a builder-like API to do collection requests (e.g. something + // that occupies roughly the same conceptual role as the Collection class in desktop) + fn build_request(&self, method: Method, url: Url) -> error::Result<Request> { + self.authorized(Request::new(method, url).header(header_names::ACCEPT, "application/json")?) + } + + fn relative_storage_request<P, T>( + &self, + method: Method, + relative_path: P, + ) -> error::Result<Sync15ClientResponse<T>> + where + P: AsRef<str>, + for<'a> T: serde::de::Deserialize<'a>, + { + let s = self.tsc.api_endpoint()? + "/"; + let url = Url::parse(&s)?.join(relative_path.as_ref())?; + self.exec_request(self.build_request(method, url)?, false) + } + + fn exec_request<T>( + &self, + req: Request, + require_success: bool, + ) -> error::Result<Sync15ClientResponse<T>> + where + for<'a> T: serde::de::Deserialize<'a>, + { + log::trace!( + "request: {} {} ({:?})", + req.method, + req.url.path(), + req.url.query() + ); + let resp = req.send()?; + + let result = Sync15ClientResponse::from_response(resp, &self.backoff)?; + match result { + Sync15ClientResponse::Success { .. } => Ok(result), + _ => { + if require_success { + Err(result.create_storage_error()) + } else { + Ok(result) + } + } + } + } + + fn collection_request<T>( + &self, + method: Method, + r: CollectionRequest, + ) -> error::Result<Sync15ClientResponse<T>> + where + for<'a> T: serde::de::Deserialize<'a>, + { + let url = build_collection_request_url(Url::parse(&self.tsc.api_endpoint()?)?, r)?; + self.exec_request(self.build_request(method, url)?, false) + } + + pub fn new_post_queue<'a, F: PostResponseHandler>( + &'a self, + coll: &'a CollectionName, + config: &InfoConfiguration, + ts: ServerTimestamp, + on_response: F, + ) -> error::Result<PostQueue<PostWrapper<'a>, F>> { + let pw = PostWrapper { client: self, coll }; + Ok(PostQueue::new(config, ts, pw, on_response)) + } + + fn put<P, B>( + &self, + relative_path: P, + xius: ServerTimestamp, + body: &B, + ) -> error::Result<ServerTimestamp> + where + P: AsRef<str>, + B: serde::ser::Serialize, + { + let s = self.tsc.api_endpoint()? + "/"; + let url = Url::parse(&s)?.join(relative_path.as_ref())?; + + let req = self + .build_request(Method::Put, url)? + .json(body) + .header(header_names::X_IF_UNMODIFIED_SINCE, format!("{}", xius))?; + + let resp = self.exec_request::<Value>(req, true)?; + // Note: we pass `true` for require_success, so this panic never happens. + if let Sync15ClientResponse::Success { last_modified, .. } = resp { + Ok(last_modified) + } else { + unreachable!("Error returned exec_request when `require_success` was true"); + } + } + + pub fn hashed_uid(&self) -> error::Result<String> { + self.tsc.hashed_uid() + } + + pub(crate) fn wipe_remote_engine(&self, engine: &str) -> error::Result<()> { + let s = self.tsc.api_endpoint()? + "/"; + let url = Url::parse(&s)?.join(&format!("storage/{}", engine))?; + log::debug!("Wiping: {:?}", url); + let req = self.build_request(Method::Delete, url)?; + match self.exec_request::<Value>(req, false) { + Ok(Sync15ClientResponse::Error(ErrorResponse::NotFound { .. })) + | Ok(Sync15ClientResponse::Success { .. }) => Ok(()), + Ok(resp) => Err(resp.create_storage_error()), + Err(e) => Err(e), + } + } +} + +pub struct PostWrapper<'a> { + client: &'a Sync15StorageClient, + coll: &'a CollectionName, +} + +impl<'a> BatchPoster for PostWrapper<'a> { + fn post<T, O>( + &self, + bytes: Vec<u8>, + xius: ServerTimestamp, + batch: Option<String>, + commit: bool, + _: &PostQueue<T, O>, + ) -> error::Result<PostResponse> { + let r = CollectionPost::new(self.coll.clone()) + .batch(batch) + .commit(commit); + let url = build_collection_post_url(Url::parse(&self.client.tsc.api_endpoint()?)?, r)?; + + let req = self + .client + .build_request(Method::Post, url)? + .header(header_names::CONTENT_TYPE, "application/json")? + .header(header_names::X_IF_UNMODIFIED_SINCE, format!("{}", xius))? + .body(bytes); + self.client.exec_request(req, false) + } +} + +fn build_collection_url(mut base_url: Url, collection: CollectionName) -> error::Result<Url> { + base_url + .path_segments_mut() + .map_err(|_| Error::UnacceptableUrl("Storage server URL is not a base".to_string()))? + .extend(&["storage", &collection]); + + // This is strange but just accessing query_pairs_mut makes you have + // a trailing question mark on your url. I don't think anything bad + // would happen here, but I don't know, and also, it looks dumb so + // I'd rather not have it. + if base_url.query() == Some("") { + base_url.set_query(None); + } + Ok(base_url) +} + +fn build_collection_request_url(mut base_url: Url, r: CollectionRequest) -> error::Result<Url> { + let mut pairs = base_url.query_pairs_mut(); + if r.full { + pairs.append_pair("full", "1"); + } + if let Some(ids) = &r.ids { + // Most ids are 12 characters, and we comma separate them, so 13. + let mut buf = String::with_capacity(ids.len() * 13); + for (i, id) in ids.iter().enumerate() { + if i > 0 { + buf.push(','); + } + buf.push_str(id.as_str()); + } + pairs.append_pair("ids", &buf); + } + if let Some(ts) = r.older { + pairs.append_pair("older", &ts.to_string()); + } + if let Some(ts) = r.newer { + pairs.append_pair("newer", &ts.to_string()); + } + if let Some(l) = r.limit { + pairs.append_pair("sort", l.order.as_str()); + pairs.append_pair("limit", &l.num.to_string()); + } + pairs.finish(); + drop(pairs); + build_collection_url(base_url, r.collection) +} + +#[cfg(feature = "sync-client")] +fn build_collection_post_url(mut base_url: Url, r: CollectionPost) -> error::Result<Url> { + let mut pairs = base_url.query_pairs_mut(); + if let Some(batch) = &r.batch { + pairs.append_pair("batch", batch); + } + if r.commit { + pairs.append_pair("commit", "true"); + } + pairs.finish(); + drop(pairs); + build_collection_url(base_url, r.collection) +} + +#[cfg(test)] +mod test { + use super::*; + #[test] + fn test_send() { + fn ensure_send<T: Send>() {} + // Compile will fail if not send. + ensure_send::<Sync15StorageClient>(); + } + + #[test] + fn test_parse_seconds() { + assert_eq!(parse_seconds("1"), Some(1)); + assert_eq!(parse_seconds("1.4"), Some(2)); + assert_eq!(parse_seconds("1.5"), Some(2)); + assert_eq!(parse_seconds("3600.0"), Some(3600)); + assert_eq!(parse_seconds("3600"), Some(3600)); + assert_eq!(parse_seconds("-1"), None); + assert_eq!(parse_seconds("inf"), None); + assert_eq!(parse_seconds("-inf"), None); + assert_eq!(parse_seconds("one-thousand"), None); + assert_eq!(parse_seconds("4294967295"), Some(4294967295)); + assert_eq!(parse_seconds("4294967296"), None); + } + + #[test] + fn test_query_building() { + use crate::engine::RequestOrder; + let base = Url::parse("https://example.com/sync").unwrap(); + + let empty = + build_collection_request_url(base.clone(), CollectionRequest::new("foo".into())) + .unwrap(); + assert_eq!(empty.as_str(), "https://example.com/sync/storage/foo"); + + let idreq = build_collection_request_url( + base.clone(), + CollectionRequest::new("wutang".into()) + .full() + .ids(&["rza", "gza"]), + ) + .unwrap(); + assert_eq!( + idreq.as_str(), + "https://example.com/sync/storage/wutang?full=1&ids=rza%2Cgza" + ); + + let complex = build_collection_request_url( + base, + CollectionRequest::new("specific".into()) + .full() + .limit(10, RequestOrder::Oldest) + .older_than(ServerTimestamp(9_876_540)) + .newer_than(ServerTimestamp(1_234_560)), + ) + .unwrap(); + assert_eq!(complex.as_str(), + "https://example.com/sync/storage/specific?full=1&older=9876.54&newer=1234.56&sort=oldest&limit=10"); + } + + #[cfg(feature = "sync-client")] + #[test] + fn test_post_query_building() { + let base = Url::parse("https://example.com/sync").unwrap(); + + let empty = + build_collection_post_url(base.clone(), CollectionPost::new("foo".into())).unwrap(); + assert_eq!(empty.as_str(), "https://example.com/sync/storage/foo"); + let batch_start = build_collection_post_url( + base.clone(), + CollectionPost::new("bar".into()) + .batch(Some("true".into())) + .commit(false), + ) + .unwrap(); + assert_eq!( + batch_start.as_str(), + "https://example.com/sync/storage/bar?batch=true" + ); + let batch_commit = build_collection_post_url( + base, + CollectionPost::new("asdf".into()) + .batch(Some("1234abc".into())) + .commit(true), + ) + .unwrap(); + assert_eq!( + batch_commit.as_str(), + "https://example.com/sync/storage/asdf?batch=1234abc&commit=true" + ); + } +} diff --git a/third_party/rust/sync15/src/client/sync.rs b/third_party/rust/sync15/src/client/sync.rs new file mode 100644 index 0000000000..55bcb491e9 --- /dev/null +++ b/third_party/rust/sync15/src/client/sync.rs @@ -0,0 +1,94 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +use super::{CollectionUpdate, GlobalState, LocalCollStateMachine, Sync15StorageClient}; +use crate::clients_engine; +use crate::engine::{IncomingChangeset, SyncEngine}; +use crate::error::Error; +use crate::telemetry; +use crate::KeyBundle; +use interrupt_support::Interruptee; + +#[allow(clippy::too_many_arguments)] +pub fn synchronize_with_clients_engine( + client: &Sync15StorageClient, + global_state: &GlobalState, + root_sync_key: &KeyBundle, + clients: Option<&clients_engine::Engine<'_>>, + engine: &dyn SyncEngine, + fully_atomic: bool, + telem_engine: &mut telemetry::Engine, + interruptee: &dyn Interruptee, +) -> Result<(), Error> { + let collection = engine.collection_name(); + log::info!("Syncing collection {}", collection); + + // our global state machine is ready - get the collection machine going. + let coll_state = match LocalCollStateMachine::get_state(engine, global_state, root_sync_key)? { + Some(coll_state) => coll_state, + None => { + // XXX - this is either "error" or "declined". + log::warn!( + "can't setup for the {} collection - hopefully it works later", + collection + ); + return Ok(()); + } + }; + + if let Some(clients) = clients { + engine.prepare_for_sync(&|| clients.get_client_data())?; + } + + let collection_requests = engine.get_collection_requests(coll_state.last_modified)?; + let incoming = if collection_requests.is_empty() { + log::info!("skipping incoming for {} - not needed.", collection); + vec![IncomingChangeset::new(collection, coll_state.last_modified)] + } else { + assert_eq!(collection_requests.last().unwrap().collection, collection); + + let count = collection_requests.len(); + collection_requests + .into_iter() + .enumerate() + .map(|(idx, collection_request)| { + interruptee.err_if_interrupted()?; + let incoming_changes = + super::fetch_incoming(client, &coll_state, collection_request)?; + + log::info!( + "Downloaded {} remote changes (request {} of {})", + incoming_changes.changes.len(), + idx, + count, + ); + Ok(incoming_changes) + }) + .collect::<Result<Vec<_>, Error>>()? + }; + + let outgoing = engine.apply_incoming(incoming, telem_engine)?; + interruptee.err_if_interrupted()?; + log::info!("Uploading {} outgoing changes", outgoing.changes.len()); + let upload_info = + CollectionUpdate::new_from_changeset(client, &coll_state, outgoing, fully_atomic)? + .upload()?; + log::info!( + "Upload success ({} records success, {} records failed)", + upload_info.successful_ids.len(), + upload_info.failed_ids.len() + ); + + // ideally we'd report this per-batch, but for now, let's just report it + // as a total. + let mut telem_outgoing = telemetry::EngineOutgoing::new(); + telem_outgoing.sent(upload_info.successful_ids.len() + upload_info.failed_ids.len()); + telem_outgoing.failed(upload_info.failed_ids.len()); + telem_engine.outgoing(telem_outgoing); + + engine.sync_finished(upload_info.modified_timestamp, upload_info.successful_ids)?; + + log::info!("Sync finished!"); + Ok(()) +} diff --git a/third_party/rust/sync15/src/client/sync_multiple.rs b/third_party/rust/sync15/src/client/sync_multiple.rs new file mode 100644 index 0000000000..79ddceff3c --- /dev/null +++ b/third_party/rust/sync15/src/client/sync_multiple.rs @@ -0,0 +1,493 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +// This helps you perform a sync of multiple engines and helps you manage +// global and local state between syncs. + +use super::state::{EngineChangesNeeded, GlobalState, PersistedGlobalState, SetupStateMachine}; +use super::status::{ServiceStatus, SyncResult}; +use super::storage_client::{BackoffListener, Sync15StorageClient, Sync15StorageClientInit}; +use crate::clients_engine::{self, CommandProcessor, CLIENTS_TTL_REFRESH}; +use crate::engine::{EngineSyncAssociation, SyncEngine}; +use crate::error::Error; +use crate::telemetry; +use crate::KeyBundle; +use interrupt_support::Interruptee; +use std::collections::HashMap; +use std::mem; +use std::result; +use std::time::{Duration, SystemTime}; + +/// Info about the client to use. We reuse the client unless +/// we discover the client_init has changed, in which case we re-create one. +#[derive(Debug)] +struct ClientInfo { + // the client_init used to create `client`. + client_init: Sync15StorageClientInit, + // the client (our tokenserver state machine state, and our http library's state) + client: Sync15StorageClient, +} + +impl ClientInfo { + fn new(ci: &Sync15StorageClientInit) -> Result<Self, Error> { + Ok(Self { + client_init: ci.clone(), + client: Sync15StorageClient::new(ci.clone())?, + }) + } +} + +/// Info we want callers to engine *in memory* for us so that subsequent +/// syncs are faster. This should never be persisted to storage as it holds +/// sensitive information, such as the sync decryption keys. +#[derive(Debug, Default)] +pub struct MemoryCachedState { + last_client_info: Option<ClientInfo>, + last_global_state: Option<GlobalState>, + // These are just engined in memory, as persisting an invalid value far in the + // future has the potential to break sync for good. + next_sync_after: Option<SystemTime>, + next_client_refresh_after: Option<SystemTime>, +} + +impl MemoryCachedState { + // Called we notice the cached state is stale. + pub fn clear_sensitive_info(&mut self) { + self.last_client_info = None; + self.last_global_state = None; + // Leave the backoff time, as there's no reason to think it's not still + // true. + } + pub fn get_next_sync_after(&self) -> Option<SystemTime> { + self.next_sync_after + } + pub fn should_refresh_client(&self) -> bool { + match self.next_client_refresh_after { + Some(t) => SystemTime::now() > t, + None => true, + } + } + pub fn note_client_refresh(&mut self) { + self.next_client_refresh_after = + Some(SystemTime::now() + Duration::from_secs(CLIENTS_TTL_REFRESH)); + } +} + +/// Sync multiple engines +/// * `engines` - The engines to sync +/// * `persisted_global_state` - The global state to use, or None if never +/// before provided. At the end of the sync, and even when the sync fails, +/// the value in this cell should be persisted to permanent storage and +/// provided next time the sync is called. +/// * `last_client_info` - The client state to use, or None if never before +/// provided. At the end of the sync, the value should be persisted +/// *in memory only* - it should not be persisted to disk. +/// * `storage_init` - Information about how the sync http client should be +/// configured. +/// * `root_sync_key` - The KeyBundle used for encryption. +/// +/// Returns a map, keyed by name and holding an error value - if any engine +/// fails, the sync will continue on to other engines, but the error will be +/// places in this map. The absence of a name in the map implies the engine +/// succeeded. +pub fn sync_multiple( + engines: &[&dyn SyncEngine], + persisted_global_state: &mut Option<String>, + mem_cached_state: &mut MemoryCachedState, + storage_init: &Sync15StorageClientInit, + root_sync_key: &KeyBundle, + interruptee: &dyn Interruptee, + req_info: Option<SyncRequestInfo<'_>>, +) -> SyncResult { + sync_multiple_with_command_processor( + None, + engines, + persisted_global_state, + mem_cached_state, + storage_init, + root_sync_key, + interruptee, + req_info, + ) +} + +/// Like `sync_multiple`, but specifies an optional command processor to handle +/// commands from the clients collection. This function is called by the sync +/// manager, which provides its own processor. +#[allow(clippy::too_many_arguments)] +pub fn sync_multiple_with_command_processor( + command_processor: Option<&dyn CommandProcessor>, + engines: &[&dyn SyncEngine], + persisted_global_state: &mut Option<String>, + mem_cached_state: &mut MemoryCachedState, + storage_init: &Sync15StorageClientInit, + root_sync_key: &KeyBundle, + interruptee: &dyn Interruptee, + req_info: Option<SyncRequestInfo<'_>>, +) -> SyncResult { + log::info!("Syncing {} engines", engines.len()); + let mut sync_result = SyncResult { + service_status: ServiceStatus::OtherError, + result: Ok(()), + declined: None, + next_sync_after: None, + engine_results: HashMap::with_capacity(engines.len()), + telemetry: telemetry::SyncTelemetryPing::new(), + }; + let backoff = super::storage_client::new_backoff_listener(); + let req_info = req_info.unwrap_or_default(); + let driver = SyncMultipleDriver { + command_processor, + engines, + storage_init, + interruptee, + engines_to_state_change: req_info.engines_to_state_change, + backoff: backoff.clone(), + root_sync_key, + result: &mut sync_result, + persisted_global_state, + mem_cached_state, + saw_auth_error: false, + ignore_soft_backoff: req_info.is_user_action, + }; + match driver.sync() { + Ok(()) => { + log::debug!( + "sync was successful, final status={:?}", + sync_result.service_status + ); + } + Err(e) => { + log::warn!( + "sync failed: {}, final status={:?}", + e, + sync_result.service_status, + ); + sync_result.result = Err(e); + } + } + // Respect `backoff` value when computing the next sync time even if we were + // ignoring it during the sync + sync_result.set_sync_after(backoff.get_required_wait(false).unwrap_or_default()); + mem_cached_state.next_sync_after = sync_result.next_sync_after; + log::trace!("Sync result: {:?}", sync_result); + sync_result +} + +/// This is essentially a bag of information that the sync manager knows, but +/// otherwise we won't. It should probably be rethought if it gains many more +/// fields. +#[derive(Debug, Default)] +pub struct SyncRequestInfo<'a> { + pub engines_to_state_change: Option<&'a HashMap<String, bool>>, + pub is_user_action: bool, +} + +// The sync multiple driver +struct SyncMultipleDriver<'info, 'res, 'pgs, 'mcs> { + command_processor: Option<&'info dyn CommandProcessor>, + engines: &'info [&'info dyn SyncEngine], + storage_init: &'info Sync15StorageClientInit, + root_sync_key: &'info KeyBundle, + interruptee: &'info dyn Interruptee, + backoff: BackoffListener, + engines_to_state_change: Option<&'info HashMap<String, bool>>, + result: &'res mut SyncResult, + persisted_global_state: &'pgs mut Option<String>, + mem_cached_state: &'mcs mut MemoryCachedState, + ignore_soft_backoff: bool, + saw_auth_error: bool, +} + +impl<'info, 'res, 'pgs, 'mcs> SyncMultipleDriver<'info, 'res, 'pgs, 'mcs> { + /// The actual worker for sync_multiple. + fn sync(mut self) -> result::Result<(), Error> { + log::info!("Loading/initializing persisted state"); + let mut pgs = self.prepare_persisted_state(); + + log::info!("Preparing client info"); + let client_info = self.prepare_client_info()?; + + if self.was_interrupted() { + return Ok(()); + } + + log::info!("Entering sync state machine"); + // Advance the state machine to the point where it can perform a full + // sync. This may involve uploading meta/global, crypto/keys etc. + let mut global_state = self.run_state_machine(&client_info, &mut pgs)?; + + if self.was_interrupted() { + return Ok(()); + } + + // Set the service status to OK here - we may adjust it based on an individual + // engine failing. + self.result.service_status = ServiceStatus::Ok; + + let clients_engine = if let Some(command_processor) = self.command_processor { + log::info!("Synchronizing clients engine"); + let should_refresh = self.mem_cached_state.should_refresh_client(); + let mut engine = clients_engine::Engine::new(command_processor, self.interruptee); + if let Err(e) = engine.sync( + &client_info.client, + &global_state, + self.root_sync_key, + should_refresh, + ) { + // Record telemetry with the error just in case... + let mut telem_sync = telemetry::SyncTelemetry::new(); + let mut telem_engine = telemetry::Engine::new("clients"); + telem_engine.failure(&e); + telem_sync.engine(telem_engine); + self.result.service_status = ServiceStatus::from_err(&e); + + // ...And bail, because a clients engine sync failure is fatal. + return Err(e); + } + // We don't record telemetry for successful clients engine + // syncs, since we only keep client records in memory, we + // expect the counts to be the same most times, and a + // failure aborts the entire sync. + if self.was_interrupted() { + return Ok(()); + } + self.mem_cached_state.note_client_refresh(); + Some(engine) + } else { + None + }; + + log::info!("Synchronizing engines"); + + let telem_sync = + self.sync_engines(&client_info, &mut global_state, clients_engine.as_ref()); + self.result.telemetry.sync(telem_sync); + + log::info!("Finished syncing engines."); + + if !self.saw_auth_error { + log::trace!("Updating persisted global state"); + self.mem_cached_state.last_client_info = Some(client_info); + self.mem_cached_state.last_global_state = Some(global_state); + } + + Ok(()) + } + + fn was_interrupted(&mut self) -> bool { + if self.interruptee.was_interrupted() { + log::info!("Interrupted, bailing out"); + self.result.service_status = ServiceStatus::Interrupted; + true + } else { + false + } + } + + fn sync_engines( + &mut self, + client_info: &ClientInfo, + global_state: &mut GlobalState, + clients: Option<&clients_engine::Engine<'_>>, + ) -> telemetry::SyncTelemetry { + let mut telem_sync = telemetry::SyncTelemetry::new(); + for engine in self.engines { + let name = engine.collection_name(); + if self + .backoff + .get_required_wait(self.ignore_soft_backoff) + .is_some() + { + log::warn!("Got backoff, bailing out of sync early"); + break; + } + if global_state.global.declined.iter().any(|e| e == &*name) { + log::info!("The {} engine is declined. Skipping", name); + continue; + } + log::info!("Syncing {} engine!", name); + + let mut telem_engine = telemetry::Engine::new(&*name); + let result = super::sync::synchronize_with_clients_engine( + &client_info.client, + global_state, + self.root_sync_key, + clients, + *engine, + true, + &mut telem_engine, + self.interruptee, + ); + + match result { + Ok(()) => log::info!("Sync of {} was successful!", name), + Err(ref e) => { + log::warn!("Sync of {} failed! {:?}", name, e); + let this_status = ServiceStatus::from_err(e); + // The only error which forces us to discard our state is an + // auth error. + self.saw_auth_error = + self.saw_auth_error || this_status == ServiceStatus::AuthenticationError; + telem_engine.failure(e); + // If the failure from the engine looks like anything other than + // a "engine error" we don't bother trying the others. + if this_status != ServiceStatus::OtherError { + telem_sync.engine(telem_engine); + self.result.engine_results.insert(name.into(), result); + self.result.service_status = this_status; + break; + } + } + } + telem_sync.engine(telem_engine); + self.result.engine_results.insert(name.into(), result); + if self.was_interrupted() { + break; + } + } + telem_sync + } + + fn run_state_machine( + &mut self, + client_info: &ClientInfo, + pgs: &mut PersistedGlobalState, + ) -> result::Result<GlobalState, Error> { + let last_state = mem::replace(&mut self.mem_cached_state.last_global_state, None); + + let mut state_machine = SetupStateMachine::for_full_sync( + &client_info.client, + self.root_sync_key, + pgs, + self.engines_to_state_change, + self.interruptee, + ); + + log::info!("Advancing state machine to ready (full)"); + let res = state_machine.run_to_ready(last_state); + // Grab this now even though we don't need it until later to avoid a + // lifetime issue + let changes = state_machine.changes_needed.take(); + // The state machine might have updated our persisted_global_state, so + // update the caller's repr of it. + *self.persisted_global_state = Some(serde_json::to_string(&pgs)?); + + // Now that we've gone through the state machine, engine the declined list in + // the sync_result + self.result.declined = Some(pgs.get_declined().to_vec()); + log::debug!( + "Declined engines list after state machine set to: {:?}", + self.result.declined, + ); + + if let Some(c) = changes { + self.wipe_or_reset_engines(c, &client_info.client)?; + } + let state = match res { + Err(e) => { + self.result.service_status = ServiceStatus::from_err(&e); + return Err(e); + } + Ok(state) => state, + }; + self.result.telemetry.uid(client_info.client.hashed_uid()?); + // As for client_info, put None back now so we start from scratch on error. + self.mem_cached_state.last_global_state = None; + Ok(state) + } + + fn wipe_or_reset_engines( + &mut self, + changes: EngineChangesNeeded, + client: &Sync15StorageClient, + ) -> result::Result<(), Error> { + if changes.local_resets.is_empty() && changes.remote_wipes.is_empty() { + return Ok(()); + } + for e in &changes.remote_wipes { + log::info!("Engine {:?} just got disabled locally, wiping server", e); + client.wipe_remote_engine(e)?; + } + + for s in self.engines { + let name = s.collection_name(); + if changes.local_resets.contains(&*name) { + log::info!("Resetting engine {}, as it was declined remotely", name); + s.reset(&EngineSyncAssociation::Disconnected)?; + } + } + + Ok(()) + } + + fn prepare_client_info(&mut self) -> result::Result<ClientInfo, Error> { + let mut client_info = match mem::replace(&mut self.mem_cached_state.last_client_info, None) + { + Some(client_info) => { + // if our storage_init has changed it probably means the user has + // changed, courtesy of the 'kid' in the structure. Thus, we can't + // reuse the client or the memory cached state. We do keep the disk + // state as currently that's only the declined list. + if client_info.client_init != *self.storage_init { + log::info!("Discarding all state as the account might have changed"); + *self.mem_cached_state = MemoryCachedState::default(); + ClientInfo::new(self.storage_init)? + } else { + log::debug!("Reusing memory-cached client_info"); + // we can reuse it (which should be the common path) + client_info + } + } + None => { + log::debug!("mem_cached_state was stale or missing, need setup"); + // We almost certainly have no other state here, but to be safe, we + // throw away any memory state we do have. + self.mem_cached_state.clear_sensitive_info(); + ClientInfo::new(self.storage_init)? + } + }; + // Ensure we use the correct listener here rather than on all the branches + // above, since it seems less error prone. + client_info.client.backoff = self.backoff.clone(); + Ok(client_info) + } + + fn prepare_persisted_state(&mut self) -> PersistedGlobalState { + // Note that any failure to use a persisted state means we also decline + // to use our memory cached state, so that we fully rebuild that + // persisted state for next time. + match self.persisted_global_state { + Some(persisted_string) if !persisted_string.is_empty() => { + match serde_json::from_str::<PersistedGlobalState>(persisted_string) { + Ok(state) => { + log::trace!("Read persisted state: {:?}", state); + // Note that we don't set `result.declined` from the + // data in state - it remains None, which explicitly + // indicates "we don't have updated info". + state + } + _ => { + // Don't log the error since it might contain sensitive + // info (although currently it only contains the declined engines list) + error_support::report_error!( + "sync15-prepare-persisted-state", + "Failed to parse PersistedGlobalState from JSON! Falling back to default" + ); + *self.mem_cached_state = MemoryCachedState::default(); + PersistedGlobalState::default() + } + } + } + _ => { + log::info!( + "The application didn't give us persisted state - \ + this is only expected on the very first run for a given user." + ); + *self.mem_cached_state = MemoryCachedState::default(); + PersistedGlobalState::default() + } + } + } +} diff --git a/third_party/rust/sync15/src/client/token.rs b/third_party/rust/sync15/src/client/token.rs new file mode 100644 index 0000000000..b416c0c12a --- /dev/null +++ b/third_party/rust/sync15/src/client/token.rs @@ -0,0 +1,602 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +use crate::error::{self, Error as ErrorKind, Result}; +use crate::ServerTimestamp; +use rc_crypto::hawk; +use serde_derive::*; +use std::borrow::{Borrow, Cow}; +use std::cell::RefCell; +use std::fmt; +use std::time::{Duration, SystemTime}; +use url::Url; +use viaduct::{header_names, Request}; + +const RETRY_AFTER_DEFAULT_MS: u64 = 10000; + +// The TokenserverToken is the token as received directly from the token server +// and deserialized from JSON. +#[derive(Deserialize, Clone, PartialEq, Eq)] +struct TokenserverToken { + id: String, + key: String, + api_endpoint: String, + uid: u64, + duration: u64, + hashed_fxa_uid: String, +} + +impl std::fmt::Debug for TokenserverToken { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("TokenserverToken") + .field("api_endpoint", &self.api_endpoint) + .field("uid", &self.uid) + .field("duration", &self.duration) + .field("hashed_fxa_uid", &self.hashed_fxa_uid) + .finish() + } +} + +// The struct returned by the TokenFetcher - the token itself and the +// server timestamp. +struct TokenFetchResult { + token: TokenserverToken, + server_timestamp: ServerTimestamp, +} + +// The trait for fetching tokens - we'll provide a "real" implementation but +// tests will re-implement it. +trait TokenFetcher { + fn fetch_token(&self) -> crate::Result<TokenFetchResult>; + // We allow the trait to tell us what the time is so tests can get funky. + fn now(&self) -> SystemTime; +} + +// Our "real" token fetcher, implementing the TokenFetcher trait, which hits +// the token server +#[derive(Debug)] +struct TokenServerFetcher { + // The stuff needed to fetch a token. + server_url: Url, + access_token: String, + key_id: String, +} + +fn fixup_server_url(mut url: Url) -> url::Url { + // The given `url` is the end-point as returned by .well-known/fxa-client-configuration, + // or as directly specified by self-hosters. As a result, it may or may not have + // the sync 1.5 suffix of "/1.0/sync/1.5", so add it on here if it does not. + if url.as_str().ends_with("1.0/sync/1.5") { + // ok! + } else if url.as_str().ends_with("1.0/sync/1.5/") { + // Shouldn't ever be Err() here, but the result is `Result<PathSegmentsMut, ()>` + // and I don't want to unwrap or add a new error type just for PathSegmentsMut failing. + if let Ok(mut path) = url.path_segments_mut() { + path.pop(); + } + } else { + // We deliberately don't use `.join()` here in order to preserve all path components. + // For example, "http://example.com/token" should produce "http://example.com/token/1.0/sync/1.5" + // but using `.join()` would produce "http://example.com/1.0/sync/1.5". + if let Ok(mut path) = url.path_segments_mut() { + path.pop_if_empty(); + path.extend(&["1.0", "sync", "1.5"]); + } + }; + url +} + +impl TokenServerFetcher { + fn new(base_url: Url, access_token: String, key_id: String) -> TokenServerFetcher { + TokenServerFetcher { + server_url: fixup_server_url(base_url), + access_token, + key_id, + } + } +} + +impl TokenFetcher for TokenServerFetcher { + fn fetch_token(&self) -> Result<TokenFetchResult> { + log::debug!("Fetching token from {}", self.server_url); + let resp = Request::get(self.server_url.clone()) + .header( + header_names::AUTHORIZATION, + format!("Bearer {}", self.access_token), + )? + .header(header_names::X_KEYID, self.key_id.clone())? + .send()?; + + if !resp.is_success() { + log::warn!("Non-success status when fetching token: {}", resp.status); + // TODO: the body should be JSON and contain a status parameter we might need? + log::trace!(" Response body {}", resp.text()); + // XXX - shouldn't we "chain" these errors - ie, a BackoffError could + // have a TokenserverHttpError as its cause? + if let Some(res) = resp.headers.get_as::<f64, _>(header_names::RETRY_AFTER) { + let ms = res + .ok() + .map_or(RETRY_AFTER_DEFAULT_MS, |f| (f * 1000f64) as u64); + let when = self.now() + Duration::from_millis(ms); + return Err(ErrorKind::BackoffError(when)); + } + let status = resp.status; + return Err(ErrorKind::TokenserverHttpError(status)); + } + + let token: TokenserverToken = resp.json()?; + let server_timestamp = resp + .headers + .try_get::<ServerTimestamp, _>(header_names::X_TIMESTAMP) + .ok_or(ErrorKind::MissingServerTimestamp)?; + Ok(TokenFetchResult { + token, + server_timestamp, + }) + } + + fn now(&self) -> SystemTime { + SystemTime::now() + } +} + +// The context stored by our TokenProvider when it has a TokenState::Token +// state. +struct TokenContext { + token: TokenserverToken, + credentials: hawk::Credentials, + server_timestamp: ServerTimestamp, + valid_until: SystemTime, +} + +// hawk::Credentials doesn't implement debug -_- +impl fmt::Debug for TokenContext { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> ::std::result::Result<(), fmt::Error> { + f.debug_struct("TokenContext") + .field("token", &self.token) + .field("credentials", &"(omitted)") + .field("server_timestamp", &self.server_timestamp) + .field("valid_until", &self.valid_until) + .finish() + } +} + +impl TokenContext { + fn new( + token: TokenserverToken, + credentials: hawk::Credentials, + server_timestamp: ServerTimestamp, + valid_until: SystemTime, + ) -> Self { + Self { + token, + credentials, + server_timestamp, + valid_until, + } + } + + fn is_valid(&self, now: SystemTime) -> bool { + // We could consider making the duration a little shorter - if it + // only has 1 second validity there seems a reasonable chance it will + // have expired by the time it gets presented to the remote that wants + // it. + // Either way though, we will eventually need to handle a token being + // rejected as a non-fatal error and recover, so maybe we don't care? + now < self.valid_until + } + + fn authorization(&self, req: &Request) -> Result<String> { + let url = &req.url; + + let path_and_query = match url.query() { + None => Cow::from(url.path()), + Some(qs) => Cow::from(format!("{}?{}", url.path(), qs)), + }; + + let host = url + .host_str() + .ok_or_else(|| ErrorKind::UnacceptableUrl("Storage URL has no host".into()))?; + + // Known defaults exist for https? (among others), so this should be impossible + let port = url.port_or_known_default().ok_or_else(|| { + ErrorKind::UnacceptableUrl( + "Storage URL has no port and no default port is known for the protocol".into(), + ) + })?; + + let header = + hawk::RequestBuilder::new(req.method.as_str(), host, port, path_and_query.borrow()) + .request() + .make_header(&self.credentials)?; + + Ok(format!("Hawk {}", header)) + } +} + +// The state our TokenProvider holds to reflect the state of the token. +#[derive(Debug)] +enum TokenState { + // We've never fetched a token. + NoToken, + // Have a token and last we checked it remained valid. + Token(TokenContext), + // We failed to fetch a token. First elt is the error, second elt is + // the api_endpoint we had before we failed to fetch a new token (or + // None if the very first attempt at fetching a token failed) + Failed(Option<error::Error>, Option<String>), + // Previously failed and told to back-off for SystemTime duration. Second + // elt is the api_endpoint we had before we hit the backoff error. + // XXX - should we roll Backoff and Failed together? + Backoff(SystemTime, Option<String>), + // api_endpoint changed - we are never going to get a token nor move out + // of this state. + NodeReassigned, +} + +/// The generic TokenProvider implementation - long lived and fetches tokens +/// on demand (eg, when first needed, or when an existing one expires.) +#[derive(Debug)] +struct TokenProviderImpl<TF: TokenFetcher> { + fetcher: TF, + // Our token state (ie, whether we have a token, and if not, why not) + current_state: RefCell<TokenState>, +} + +impl<TF: TokenFetcher> TokenProviderImpl<TF> { + fn new(fetcher: TF) -> Self { + // We check this at the real entrypoint of the application, but tests + // can/do bypass that, so check this here too. + rc_crypto::ensure_initialized(); + TokenProviderImpl { + fetcher, + current_state: RefCell::new(TokenState::NoToken), + } + } + + // Uses our fetcher to grab a new token and if successfull, derives other + // info from that token into a usable TokenContext. + fn fetch_context(&self) -> Result<TokenContext> { + let result = self.fetcher.fetch_token()?; + let token = result.token; + let valid_until = SystemTime::now() + Duration::from_secs(token.duration); + + let credentials = hawk::Credentials { + id: token.id.clone(), + key: hawk::Key::new(token.key.as_bytes(), hawk::SHA256)?, + }; + + Ok(TokenContext::new( + token, + credentials, + result.server_timestamp, + valid_until, + )) + } + + // Attempt to fetch a new token and return a new state reflecting that + // operation. If it worked a TokenState will be returned, but errors may + // cause other states. + fn fetch_token(&self, previous_endpoint: Option<&str>) -> TokenState { + match self.fetch_context() { + Ok(tc) => { + // We got a new token - check that the endpoint is the same + // as a previous endpoint we saw (if any) + match previous_endpoint { + Some(prev) => { + if prev == tc.token.api_endpoint { + TokenState::Token(tc) + } else { + log::warn!( + "api_endpoint changed from {} to {}", + prev, + tc.token.api_endpoint + ); + TokenState::NodeReassigned + } + } + None => { + // Never had an api_endpoint in the past, so this is OK. + TokenState::Token(tc) + } + } + } + Err(e) => { + // Early to avoid nll issues... + if let ErrorKind::BackoffError(be) = e { + return TokenState::Backoff(be, previous_endpoint.map(ToString::to_string)); + } + TokenState::Failed(Some(e), previous_endpoint.map(ToString::to_string)) + } + } + } + + // Given the state we are currently in, return a new current state. + // Returns None if the current state should be used (eg, if we are + // holding a token that remains valid) or Some() if the state has changed + // (which may have changed to a state with a token or an error state) + fn advance_state(&self, state: &TokenState) -> Option<TokenState> { + match state { + TokenState::NoToken => Some(self.fetch_token(None)), + TokenState::Failed(_, existing_endpoint) => { + Some(self.fetch_token(existing_endpoint.as_ref().map(String::as_str))) + } + TokenState::Token(existing_context) => { + if existing_context.is_valid(self.fetcher.now()) { + None + } else { + Some(self.fetch_token(Some(existing_context.token.api_endpoint.as_str()))) + } + } + TokenState::Backoff(ref until, ref existing_endpoint) => { + if let Ok(remaining) = until.duration_since(self.fetcher.now()) { + log::debug!("enforcing existing backoff - {:?} remains", remaining); + None + } else { + // backoff period is over + Some(self.fetch_token(existing_endpoint.as_ref().map(String::as_str))) + } + } + TokenState::NodeReassigned => { + // We never leave this state. + None + } + } + } + + fn with_token<T, F>(&self, func: F) -> Result<T> + where + F: FnOnce(&TokenContext) -> Result<T>, + { + // first get a mutable ref to our existing state, advance to the + // state we will use, then re-stash that state for next time. + let state: &mut TokenState = &mut self.current_state.borrow_mut(); + if let Some(new_state) = self.advance_state(state) { + *state = new_state; + } + + // Now re-fetch the state we should use for this call - if it's + // anything other than TokenState::Token we will fail. + match state { + TokenState::NoToken => { + // it should be impossible to get here. + panic!("Can't be in NoToken state after advancing"); + } + TokenState::Token(ref token_context) => { + // make the call. + func(token_context) + } + TokenState::Failed(e, _) => { + // We swap the error out of the state enum and return it. + Err(e.take().unwrap()) + } + TokenState::NodeReassigned => { + // this is unrecoverable. + Err(ErrorKind::StorageResetError) + } + TokenState::Backoff(ref remaining, _) => Err(ErrorKind::BackoffError(*remaining)), + } + } + + fn hashed_uid(&self) -> Result<String> { + self.with_token(|ctx| Ok(ctx.token.hashed_fxa_uid.clone())) + } + + fn authorization(&self, req: &Request) -> Result<String> { + self.with_token(|ctx| ctx.authorization(req)) + } + + fn api_endpoint(&self) -> Result<String> { + self.with_token(|ctx| Ok(ctx.token.api_endpoint.clone())) + } + // TODO: we probably want a "drop_token/context" type method so that when + // using a token with some validity fails the caller can force a new one + // (in which case the new token request will probably fail with a 401) +} + +// The public concrete object exposed by this module +#[derive(Debug)] +pub struct TokenProvider { + imp: TokenProviderImpl<TokenServerFetcher>, +} + +impl TokenProvider { + pub fn new(url: Url, access_token: String, key_id: String) -> Result<Self> { + let fetcher = TokenServerFetcher::new(url, access_token, key_id); + Ok(Self { + imp: TokenProviderImpl::new(fetcher), + }) + } + + pub fn hashed_uid(&self) -> Result<String> { + self.imp.hashed_uid() + } + + pub fn authorization(&self, req: &Request) -> Result<String> { + self.imp.authorization(req) + } + + pub fn api_endpoint(&self) -> Result<String> { + self.imp.api_endpoint() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::cell::Cell; + + struct TestFetcher<FF, FN> + where + FF: Fn() -> Result<TokenFetchResult>, + FN: Fn() -> SystemTime, + { + fetch: FF, + now: FN, + } + impl<FF, FN> TokenFetcher for TestFetcher<FF, FN> + where + FF: Fn() -> Result<TokenFetchResult>, + FN: Fn() -> SystemTime, + { + fn fetch_token(&self) -> Result<TokenFetchResult> { + (self.fetch)() + } + fn now(&self) -> SystemTime { + (self.now)() + } + } + + fn make_tsc<FF, FN>(fetch: FF, now: FN) -> TokenProviderImpl<TestFetcher<FF, FN>> + where + FF: Fn() -> Result<TokenFetchResult>, + FN: Fn() -> SystemTime, + { + let fetcher: TestFetcher<FF, FN> = TestFetcher { fetch, now }; + TokenProviderImpl::new(fetcher) + } + + #[test] + fn test_endpoint() { + // Use a cell to avoid the closure having a mutable ref to this scope. + let counter: Cell<u32> = Cell::new(0); + let fetch = || { + counter.set(counter.get() + 1); + Ok(TokenFetchResult { + token: TokenserverToken { + id: "id".to_string(), + key: "key".to_string(), + api_endpoint: "api_endpoint".to_string(), + uid: 1, + duration: 1000, + hashed_fxa_uid: "hash".to_string(), + }, + server_timestamp: ServerTimestamp(0i64), + }) + }; + + let tsc = make_tsc(fetch, SystemTime::now); + + let e = tsc.api_endpoint().expect("should work"); + assert_eq!(e, "api_endpoint".to_string()); + assert_eq!(counter.get(), 1); + + let e2 = tsc.api_endpoint().expect("should work"); + assert_eq!(e2, "api_endpoint".to_string()); + // should not have re-fetched. + assert_eq!(counter.get(), 1); + } + + #[test] + fn test_backoff() { + let counter: Cell<u32> = Cell::new(0); + let fetch = || { + counter.set(counter.get() + 1); + let when = SystemTime::now() + Duration::from_millis(10000); + Err(ErrorKind::BackoffError(when)) + }; + let now: Cell<SystemTime> = Cell::new(SystemTime::now()); + let tsc = make_tsc(fetch, || now.get()); + + tsc.api_endpoint().expect_err("should bail"); + // XXX - check error type. + assert_eq!(counter.get(), 1); + // try and get another token - should not re-fetch as backoff is still + // in progress. + tsc.api_endpoint().expect_err("should bail"); + assert_eq!(counter.get(), 1); + + // Advance the clock. + now.set(now.get() + Duration::new(20, 0)); + + // Our token fetch mock is still returning a backoff error, so we + // still fail, but should have re-hit the fetch function. + tsc.api_endpoint().expect_err("should bail"); + assert_eq!(counter.get(), 2); + } + + #[test] + fn test_validity() { + let counter: Cell<u32> = Cell::new(0); + let fetch = || { + counter.set(counter.get() + 1); + Ok(TokenFetchResult { + token: TokenserverToken { + id: "id".to_string(), + key: "key".to_string(), + api_endpoint: "api_endpoint".to_string(), + uid: 1, + duration: 10, + hashed_fxa_uid: "hash".to_string(), + }, + server_timestamp: ServerTimestamp(0i64), + }) + }; + let now: Cell<SystemTime> = Cell::new(SystemTime::now()); + let tsc = make_tsc(fetch, || now.get()); + + tsc.api_endpoint().expect("should get a valid token"); + assert_eq!(counter.get(), 1); + + // try and get another token - should not re-fetch as the old one + // remains valid. + tsc.api_endpoint().expect("should reuse existing token"); + assert_eq!(counter.get(), 1); + + // Advance the clock. + now.set(now.get() + Duration::new(20, 0)); + + // We should discard our token and fetch a new one. + tsc.api_endpoint().expect("should re-fetch"); + assert_eq!(counter.get(), 2); + } + + #[test] + fn test_server_url() { + assert_eq!( + fixup_server_url( + Url::parse("https://token.services.mozilla.com/1.0/sync/1.5").unwrap() + ) + .as_str(), + "https://token.services.mozilla.com/1.0/sync/1.5" + ); + assert_eq!( + fixup_server_url( + Url::parse("https://token.services.mozilla.com/1.0/sync/1.5/").unwrap() + ) + .as_str(), + "https://token.services.mozilla.com/1.0/sync/1.5" + ); + assert_eq!( + fixup_server_url(Url::parse("https://token.services.mozilla.com").unwrap()).as_str(), + "https://token.services.mozilla.com/1.0/sync/1.5" + ); + assert_eq!( + fixup_server_url(Url::parse("https://token.services.mozilla.com/").unwrap()).as_str(), + "https://token.services.mozilla.com/1.0/sync/1.5" + ); + assert_eq!( + fixup_server_url( + Url::parse("https://selfhosted.example.com/token/1.0/sync/1.5").unwrap() + ) + .as_str(), + "https://selfhosted.example.com/token/1.0/sync/1.5" + ); + assert_eq!( + fixup_server_url( + Url::parse("https://selfhosted.example.com/token/1.0/sync/1.5/").unwrap() + ) + .as_str(), + "https://selfhosted.example.com/token/1.0/sync/1.5" + ); + assert_eq!( + fixup_server_url(Url::parse("https://selfhosted.example.com/token/").unwrap()).as_str(), + "https://selfhosted.example.com/token/1.0/sync/1.5" + ); + assert_eq!( + fixup_server_url(Url::parse("https://selfhosted.example.com/token").unwrap()).as_str(), + "https://selfhosted.example.com/token/1.0/sync/1.5" + ); + } +} diff --git a/third_party/rust/sync15/src/client/util.rs b/third_party/rust/sync15/src/client/util.rs new file mode 100644 index 0000000000..01fff77afa --- /dev/null +++ b/third_party/rust/sync15/src/client/util.rs @@ -0,0 +1,102 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +use std::collections::{HashMap, HashSet}; +use std::sync::atomic::{AtomicU32, Ordering}; + +/// Finds the maximum of the current value and the argument `val`, and sets the +/// new value to the result. +/// +/// Note: `AtomicFoo::fetch_max` is unstable, and can't really be implemented as +/// a single atomic operation from outside the stdlib ;-; +pub(crate) fn atomic_update_max(v: &AtomicU32, new: u32) { + // For loads (and the compare_exchange_weak second ordering argument) this + // is too strong, we could probably get away with Acquire (or maybe Relaxed + // because we don't need the result?). In either case, this fn isn't called + // from a hot spot so whatever. + let mut cur = v.load(Ordering::SeqCst); + while cur < new { + // we're already handling the failure case so there's no reason not to + // use _weak here. + match v.compare_exchange_weak(cur, new, Ordering::SeqCst, Ordering::SeqCst) { + Ok(_) => { + // Success. + break; + } + Err(new_cur) => { + // Interrupted, keep trying. + cur = new_cur + } + } + } +} + +// Slight wrappers around the builtin methods for doing this. +pub(crate) fn set_union(a: &HashSet<String>, b: &HashSet<String>) -> HashSet<String> { + a.union(b).cloned().collect() +} + +pub(crate) fn set_difference(a: &HashSet<String>, b: &HashSet<String>) -> HashSet<String> { + a.difference(b).cloned().collect() +} + +pub(crate) fn set_intersection(a: &HashSet<String>, b: &HashSet<String>) -> HashSet<String> { + a.intersection(b).cloned().collect() +} + +pub(crate) fn partition_by_value(v: &HashMap<String, bool>) -> (HashSet<String>, HashSet<String>) { + let mut true_: HashSet<String> = HashSet::new(); + let mut false_: HashSet<String> = HashSet::new(); + for (s, val) in v { + if *val { + true_.insert(s.clone()); + } else { + false_.insert(s.clone()); + } + } + (true_, false_) +} + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn test_set_ops() { + fn hash_set(s: &[&str]) -> HashSet<String> { + s.iter() + .copied() + .map(ToOwned::to_owned) + .collect::<HashSet<_>>() + } + + assert_eq!( + set_union(&hash_set(&["a", "b", "c"]), &hash_set(&["b", "d"])), + hash_set(&["a", "b", "c", "d"]), + ); + + assert_eq!( + set_difference(&hash_set(&["a", "b", "c"]), &hash_set(&["b", "d"])), + hash_set(&["a", "c"]), + ); + assert_eq!( + set_intersection(&hash_set(&["a", "b", "c"]), &hash_set(&["b", "d"])), + hash_set(&["b"]), + ); + let m: HashMap<String, bool> = [ + ("foo", true), + ("bar", true), + ("baz", false), + ("quux", false), + ] + .iter() + .copied() + .map(|(a, b)| (a.to_owned(), b)) + .collect(); + assert_eq!( + partition_by_value(&m), + (hash_set(&["foo", "bar"]), hash_set(&["baz", "quux"])), + ); + } +} diff --git a/third_party/rust/sync15/src/client_types.rs b/third_party/rust/sync15/src/client_types.rs new file mode 100644 index 0000000000..2966a190e2 --- /dev/null +++ b/third_party/rust/sync15/src/client_types.rs @@ -0,0 +1,129 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +//! This module has to be here because of some hard-to-avoid hacks done for the +//! tabs engine... See issue #2590 + +use crate::DeviceType; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; + +/// Argument to Store::prepare_for_sync. See comment there for more info. Only +/// really intended to be used by tabs engine. +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct ClientData { + pub local_client_id: String, + /// A hashmap of records in the `clients` collection. Key is the id of the record in + /// that collection, which may or may not be the device's fxa_device_id. + pub recent_clients: HashMap<String, RemoteClient>, +} + +/// Information about a remote client in the clients collection. +#[derive(Clone, Debug, Eq, Hash, PartialEq, Serialize, Deserialize)] +pub struct RemoteClient { + pub fxa_device_id: Option<String>, + pub device_name: String, + #[serde(default)] + pub device_type: DeviceType, +} + +#[cfg(test)] +mod client_types_tests { + use super::*; + + #[test] + fn test_remote_client() { + // Missing `device_type` gets DeviceType::Unknown. + let dt = serde_json::from_str::<RemoteClient>("{\"device_name\": \"foo\"}").unwrap(); + assert_eq!(dt.device_type, DeviceType::Unknown); + // But reserializes as null. + assert_eq!( + serde_json::to_string(&dt).unwrap(), + "{\"fxa_device_id\":null,\"device_name\":\"foo\",\"device_type\":null}" + ); + + // explicit null is also unknown. + assert_eq!( + serde_json::from_str::<RemoteClient>( + "{\"device_name\": \"foo\", \"device_type\": null}", + ) + .unwrap() + .device_type, + DeviceType::Unknown + ); + + // Unknown device_type string deserializes as DeviceType::Unknown. + let dt = serde_json::from_str::<RemoteClient>( + "{\"device_name\": \"foo\", \"device_type\": \"foo\"}", + ) + .unwrap(); + assert_eq!(dt.device_type, DeviceType::Unknown); + // The None gets re-serialized as null. + assert_eq!( + serde_json::to_string(&dt).unwrap(), + "{\"fxa_device_id\":null,\"device_name\":\"foo\",\"device_type\":null}" + ); + + // DeviceType::Unknown gets serialized as null. + let dt = RemoteClient { + device_name: "bar".to_string(), + fxa_device_id: None, + device_type: DeviceType::Unknown, + }; + assert_eq!( + serde_json::to_string(&dt).unwrap(), + "{\"fxa_device_id\":null,\"device_name\":\"bar\",\"device_type\":null}" + ); + + // DeviceType::Desktop gets serialized as "desktop". + let dt = RemoteClient { + device_name: "bar".to_string(), + fxa_device_id: Some("fxa".to_string()), + device_type: DeviceType::Desktop, + }; + assert_eq!( + serde_json::to_string(&dt).unwrap(), + "{\"fxa_device_id\":\"fxa\",\"device_name\":\"bar\",\"device_type\":\"desktop\"}" + ); + } + + #[test] + fn test_client_data() { + let client_data = ClientData { + local_client_id: "my-device".to_string(), + recent_clients: HashMap::from([ + ( + "my-device".to_string(), + RemoteClient { + fxa_device_id: None, + device_name: "my device".to_string(), + device_type: DeviceType::Unknown, + }, + ), + ( + "device-no-tabs".to_string(), + RemoteClient { + fxa_device_id: None, + device_name: "device with no tabs".to_string(), + device_type: DeviceType::Unknown, + }, + ), + ( + "device-with-a-tab".to_string(), + RemoteClient { + fxa_device_id: None, + device_name: "device with a tab".to_string(), + device_type: DeviceType::Desktop, + }, + ), + ]), + }; + //serialize + let client_data_ser = serde_json::to_string(&client_data).unwrap(); + println!("SER: {}", client_data_ser); + // deserialize + let client_data_des: ClientData = serde_json::from_str(&client_data_ser).unwrap(); + assert_eq!(client_data_des, client_data); + } +} diff --git a/third_party/rust/sync15/src/clients_engine/engine.rs b/third_party/rust/sync15/src/clients_engine/engine.rs new file mode 100644 index 0000000000..eb746d5516 --- /dev/null +++ b/third_party/rust/sync15/src/clients_engine/engine.rs @@ -0,0 +1,804 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +use std::collections::{HashMap, HashSet}; + +use crate::bso::{IncomingKind, OutgoingBso, OutgoingEnvelope}; +use crate::client::{ + CollState, CollectionKeys, CollectionUpdate, GlobalState, InfoConfiguration, + Sync15StorageClient, +}; +use crate::client_types::{ClientData, RemoteClient}; +use crate::engine::{CollectionRequest, IncomingChangeset, OutgoingChangeset}; +use crate::{error::Result, Guid, KeyBundle}; +use interrupt_support::Interruptee; + +use super::{ + record::{ClientRecord, CommandRecord}, + ser::shrink_to_fit, + Command, CommandProcessor, CommandStatus, CLIENTS_TTL, +}; + +const COLLECTION_NAME: &str = "clients"; + +/// The driver for the clients engine. Internal; split out from the `Engine` +/// struct to make testing easier. +struct Driver<'a> { + command_processor: &'a dyn CommandProcessor, + interruptee: &'a dyn Interruptee, + config: &'a InfoConfiguration, + recent_clients: HashMap<String, RemoteClient>, +} + +impl<'a> Driver<'a> { + fn new( + command_processor: &'a dyn CommandProcessor, + interruptee: &'a dyn Interruptee, + config: &'a InfoConfiguration, + ) -> Driver<'a> { + Driver { + command_processor, + interruptee, + config, + recent_clients: HashMap::new(), + } + } + + fn note_recent_client(&mut self, client: &ClientRecord) { + self.recent_clients.insert(client.id.clone(), client.into()); + } + + fn sync( + &mut self, + inbound: IncomingChangeset, + should_refresh_client: bool, + ) -> Result<OutgoingChangeset> { + self.interruptee.err_if_interrupted()?; + let outgoing_commands = self.command_processor.fetch_outgoing_commands()?; + + let mut has_own_client_record = false; + let mut changes = Vec::new(); + + for bso in inbound.changes { + self.interruptee.err_if_interrupted()?; + + let content = bso.into_content(); + + let client: ClientRecord = match content.kind { + IncomingKind::Malformed => { + log::debug!("Error unpacking record"); + continue; + } + IncomingKind::Tombstone => { + log::debug!("Record has been deleted; skipping..."); + continue; + } + IncomingKind::Content(client) => client, + }; + + if client.id == self.command_processor.settings().fxa_device_id { + log::debug!("Found my record on the server"); + // If we see our own client record, apply any incoming commands, + // remove them from the list, and reupload the record. Any + // commands that we don't understand also go back in the list. + // https://github.com/mozilla/application-services/issues/1800 + // tracks if that's the right thing to do. + has_own_client_record = true; + let mut current_client_record = self.current_client_record(); + for c in &client.commands { + let status = match c.as_command() { + Some(command) => self.command_processor.apply_incoming_command(command)?, + None => CommandStatus::Unsupported, + }; + match status { + CommandStatus::Applied => {} + CommandStatus::Ignored => { + log::debug!("Ignored command {:?}", c); + } + CommandStatus::Unsupported => { + log::warn!("Don't know how to apply command {:?}", c); + current_client_record.commands.push(c.clone()); + } + } + } + + // The clients collection has a hard limit on the payload size, + // after which the server starts rejecting our records. Large + // command lists can cause us to exceed this, so we truncate + // the list. + shrink_to_fit( + &mut current_client_record.commands, + self.memcache_max_record_payload_size(), + )?; + + // Add the new client record to our map of recently synced + // clients, so that downstream consumers like synced tabs can + // access them. + self.note_recent_client(¤t_client_record); + + // We periodically upload our own client record, even if it + // doesn't change, to keep it fresh. + if should_refresh_client || client != current_client_record { + log::debug!("Will update our client record on the server"); + let envelope = OutgoingEnvelope { + id: content.envelope.id, + ttl: Some(CLIENTS_TTL), + ..Default::default() + }; + changes.push(OutgoingBso::from_content(envelope, current_client_record)?); + } + } else { + // Add the other client to our map of recently synced clients. + self.note_recent_client(&client); + + // Bail if we don't have any outgoing commands to write into + // the other client's record. + if outgoing_commands.is_empty() { + continue; + } + + // Determine if we have new commands, that aren't already in the + // client's command list. + let current_commands: HashSet<Command> = client + .commands + .iter() + .filter_map(|c| c.as_command()) + .collect(); + let mut new_outgoing_commands = outgoing_commands + .difference(¤t_commands) + .cloned() + .collect::<Vec<_>>(); + // Sort, to ensure deterministic ordering for tests. + new_outgoing_commands.sort(); + let mut new_client = client.clone(); + new_client + .commands + .extend(new_outgoing_commands.into_iter().map(CommandRecord::from)); + if new_client.commands.len() == client.commands.len() { + continue; + } + + // Hooray, we added new commands! Make sure the record still + // fits in the maximum record size, or the server will reject + // our upload. + shrink_to_fit( + &mut new_client.commands, + self.memcache_max_record_payload_size(), + )?; + + let envelope = OutgoingEnvelope { + id: content.envelope.id, + ttl: Some(CLIENTS_TTL), + ..Default::default() + }; + changes.push(OutgoingBso::from_content(envelope, new_client)?); + } + } + + // Upload a record for our own client, if we didn't replace it already. + if !has_own_client_record { + let current_client_record = self.current_client_record(); + self.note_recent_client(¤t_client_record); + let envelope = OutgoingEnvelope { + id: Guid::new(¤t_client_record.id), + ttl: Some(CLIENTS_TTL), + ..Default::default() + }; + changes.push(OutgoingBso::from_content(envelope, current_client_record)?); + } + + Ok(OutgoingChangeset::new(COLLECTION_NAME.into(), changes)) + } + + /// Builds a fresh client record for this device. + fn current_client_record(&self) -> ClientRecord { + let settings = self.command_processor.settings(); + ClientRecord { + id: settings.fxa_device_id.clone(), + name: settings.device_name.clone(), + typ: settings.device_type, + commands: Vec::new(), + fxa_device_id: Some(settings.fxa_device_id.clone()), + version: None, + protocols: vec!["1.5".into()], + form_factor: None, + os: None, + app_package: None, + application: None, + device: None, + } + } + + fn max_record_payload_size(&self) -> usize { + let payload_max = self.config.max_record_payload_bytes; + if payload_max <= self.config.max_post_bytes { + self.config.max_post_bytes.saturating_sub(4096) + } else { + payload_max + } + } + + /// Collections stored in memcached ("tabs", "clients" or "meta") have a + /// different max size than ones stored in the normal storage server db. + /// In practice, the real limit here is 1M (bug 1300451 comment 40), but + /// there's overhead involved that is hard to calculate on the client, so we + /// use 512k to be safe (at the recommendation of the server team). Note + /// that if the server reports a lower limit (via info/configuration), we + /// respect that limit instead. See also bug 1403052. + /// XXX - the above comment is stale and refers to the world before the + /// move to spanner and the rust sync server. + fn memcache_max_record_payload_size(&self) -> usize { + self.max_record_payload_size().min(512 * 1024) + } +} + +pub struct Engine<'a> { + pub command_processor: &'a dyn CommandProcessor, + pub interruptee: &'a dyn Interruptee, + pub recent_clients: HashMap<String, RemoteClient>, +} + +impl<'a> Engine<'a> { + /// Creates a new clients engine that delegates to the given command + /// processor to apply incoming commands. + pub fn new<'b>( + command_processor: &'b dyn CommandProcessor, + interruptee: &'b dyn Interruptee, + ) -> Engine<'b> { + Engine { + command_processor, + interruptee, + recent_clients: HashMap::new(), + } + } + + /// Syncs the clients collection. This works a little differently than + /// other collections: + /// + /// 1. It can't be disabled or declined. + /// 2. The sync ID and last sync time aren't meaningful, since we always + /// fetch all client records on every sync. As such, the + /// `LocalCollStateMachine` that we use for other engines doesn't + /// apply to it. + /// 3. It doesn't persist state directly, but relies on the sync manager + /// to persist device settings, and process commands. + /// 4. Failing to sync the clients collection is fatal, and aborts the + /// sync. + /// + /// For these reasons, we implement this engine directly in the `sync15` + /// crate, and provide a specialized `sync` method instead of implementing + /// `sync15::Store`. + pub fn sync( + &mut self, + storage_client: &Sync15StorageClient, + global_state: &GlobalState, + root_sync_key: &KeyBundle, + should_refresh_client: bool, + ) -> Result<()> { + log::info!("Syncing collection clients"); + + let coll_keys = CollectionKeys::from_encrypted_payload( + global_state.keys.clone(), + global_state.keys_timestamp, + root_sync_key, + )?; + let coll_state = CollState { + config: global_state.config.clone(), + last_modified: global_state + .collections + .get(COLLECTION_NAME) + .cloned() + .unwrap_or_default(), + key: coll_keys.key_for_collection(COLLECTION_NAME).clone(), + }; + + let inbound = self.fetch_incoming(storage_client, &coll_state)?; + + let mut driver = Driver::new( + self.command_processor, + self.interruptee, + &global_state.config, + ); + + let outgoing = driver.sync(inbound, should_refresh_client)?; + self.recent_clients = driver.recent_clients; + + self.interruptee.err_if_interrupted()?; + let upload_info = + CollectionUpdate::new_from_changeset(storage_client, &coll_state, outgoing, true)? + .upload()?; + + log::info!( + "Upload success ({} records success, {} records failed)", + upload_info.successful_ids.len(), + upload_info.failed_ids.len() + ); + + log::info!("Finished syncing clients"); + Ok(()) + } + + fn fetch_incoming( + &self, + storage_client: &Sync15StorageClient, + coll_state: &CollState, + ) -> Result<IncomingChangeset> { + // Note that, unlike other stores, we always fetch the full collection + // on every sync, so `inbound` will return all clients, not just the + // ones that changed since the last sync. + let coll_request = CollectionRequest::new(COLLECTION_NAME.into()).full(); + + self.interruptee.err_if_interrupted()?; + let inbound = crate::client::fetch_incoming(storage_client, coll_state, coll_request)?; + + Ok(inbound) + } + + pub fn local_client_id(&self) -> String { + // Bit dirty but it's the easiest way to reach to our own + // device ID without refactoring the whole sync manager crate. + self.command_processor.settings().fxa_device_id.clone() + } + + pub fn get_client_data(&self) -> ClientData { + ClientData { + local_client_id: self.local_client_id(), + recent_clients: self.recent_clients.clone(), + } + } +} + +#[cfg(test)] +mod tests { + use super::super::{CommandStatus, DeviceType, Settings}; + use super::*; + use crate::bso::IncomingBso; + use crate::ServerTimestamp; + use anyhow::Result; + use interrupt_support::NeverInterrupts; + use serde_json::{json, Value}; + use std::iter::zip; + + struct TestProcessor { + settings: Settings, + outgoing_commands: HashSet<Command>, + } + + impl CommandProcessor for TestProcessor { + fn settings(&self) -> &Settings { + &self.settings + } + + fn apply_incoming_command(&self, command: Command) -> Result<CommandStatus> { + Ok(if let Command::Reset(name) = command { + if name == "forms" { + CommandStatus::Unsupported + } else { + CommandStatus::Applied + } + } else { + CommandStatus::Ignored + }) + } + + fn fetch_outgoing_commands(&self) -> Result<HashSet<Command>> { + Ok(self.outgoing_commands.clone()) + } + } + + fn inbound_from_clients(clients: Value) -> IncomingChangeset { + if let Value::Array(clients) = clients { + let changes = clients + .into_iter() + .map(IncomingBso::from_test_content) + .collect(); + IncomingChangeset { + changes, + timestamp: ServerTimestamp(0), + collection: COLLECTION_NAME.into(), + } + } else { + unreachable!("`clients` must be an array of client records") + } + } + + #[test] + fn test_clients_sync() { + let processor = TestProcessor { + settings: Settings { + fxa_device_id: "deviceAAAAAA".into(), + device_name: "Laptop".into(), + device_type: DeviceType::Desktop, + }, + outgoing_commands: [ + Command::Wipe("bookmarks".into()), + Command::Reset("history".into()), + ] + .iter() + .cloned() + .collect(), + }; + + let config = InfoConfiguration::default(); + + let mut driver = Driver::new(&processor, &NeverInterrupts, &config); + + let inbound = inbound_from_clients(json!([{ + "id": "deviceBBBBBB", + "name": "iPhone", + "type": "mobile", + "commands": [{ + "command": "resetEngine", + "args": ["history"], + }], + "fxaDeviceId": "iPhooooooone", + "protocols": ["1.5"], + "device": "iPhone", + }, { + "id": "deviceCCCCCC", + "name": "Fenix", + "type": "mobile", + "commands": [], + "fxaDeviceId": "deviceCCCCCC", + }, { + "id": "deviceAAAAAA", + "name": "Laptop with a different name", + "type": "desktop", + "commands": [{ + "command": "wipeEngine", + "args": ["logins"] + }, { + "command": "displayURI", + "args": ["http://example.com", "Fennec", "Example page"], + "flowID": "flooooooooow", + }, { + "command": "resetEngine", + "args": ["forms"], + }, { + "command": "logout", + "args": [], + }], + "fxaDeviceId": "deviceAAAAAA", + }])); + + // Passing false for `should_refresh_client` - it should be ignored + // because we've changed the commands. + let mut outgoing = driver.sync(inbound, false).expect("Should sync clients"); + outgoing + .changes + .sort_by(|a, b| a.envelope.id.cmp(&b.envelope.id)); + + // Make sure the list of recently synced remote clients is correct. + let expected_ids = &["deviceAAAAAA", "deviceBBBBBB", "deviceCCCCCC"]; + let mut actual_ids = driver.recent_clients.keys().collect::<Vec<&String>>(); + actual_ids.sort(); + assert_eq!(actual_ids, expected_ids); + + let expected_remote_clients = &[ + RemoteClient { + fxa_device_id: Some("deviceAAAAAA".to_string()), + device_name: "Laptop".into(), + device_type: DeviceType::Desktop, + }, + RemoteClient { + fxa_device_id: Some("iPhooooooone".to_string()), + device_name: "iPhone".into(), + device_type: DeviceType::Mobile, + }, + RemoteClient { + fxa_device_id: Some("deviceCCCCCC".to_string()), + device_name: "Fenix".into(), + device_type: DeviceType::Mobile, + }, + ]; + let actual_remote_clients = expected_ids + .iter() + .filter_map(|&id| driver.recent_clients.get(id)) + .cloned() + .collect::<Vec<RemoteClient>>(); + assert_eq!(actual_remote_clients, expected_remote_clients); + + let expected = json!([{ + "id": "deviceAAAAAA", + "name": "Laptop", + "type": "desktop", + "commands": [{ + "command": "displayURI", + "args": ["http://example.com", "Fennec", "Example page"], + "flowID": "flooooooooow", + }, { + "command": "resetEngine", + "args": ["forms"], + }, { + "command": "logout", + "args": [], + }], + "fxaDeviceId": "deviceAAAAAA", + "protocols": ["1.5"], + }, { + "id": "deviceBBBBBB", + "name": "iPhone", + "type": "mobile", + "commands": [{ + "command": "resetEngine", + "args": ["history"], + }, { + "command": "wipeEngine", + "args": ["bookmarks"], + }], + "fxaDeviceId": "iPhooooooone", + "protocols": ["1.5"], + "device": "iPhone", + }, { + "id": "deviceCCCCCC", + "name": "Fenix", + "type": "mobile", + "commands": [{ + "command": "wipeEngine", + "args": ["bookmarks"], + }, { + "command": "resetEngine", + "args": ["history"], + }], + "fxaDeviceId": "deviceCCCCCC", + }]); + // turn outgoing into an incoming payload. + let incoming = IncomingChangeset { + changes: outgoing + .changes + .into_iter() + .map(|c| OutgoingBso::to_test_incoming(&c)) + .collect(), + timestamp: ServerTimestamp::default(), + collection: outgoing.collection, + }; + if let Value::Array(expected) = expected { + for (incoming_cleartext, exp_client) in zip(incoming.changes, expected) { + let incoming_client: ClientRecord = + incoming_cleartext.into_content().content().unwrap(); + assert_eq!(incoming_client, serde_json::from_value(exp_client).unwrap()); + } + } else { + unreachable!("`expected_clients` must be an array of client records") + } + } + + #[test] + fn test_clients_sync_bad_incoming_record_skipped() { + let processor = TestProcessor { + settings: Settings { + fxa_device_id: "deviceAAAAAA".into(), + device_name: "Laptop".into(), + device_type: DeviceType::Desktop, + }, + outgoing_commands: [].iter().cloned().collect(), + }; + + let config = InfoConfiguration::default(); + + let mut driver = Driver::new(&processor, &NeverInterrupts, &config); + + let inbound = inbound_from_clients(json!([{ + "id": "deviceBBBBBB", + "name": "iPhone", + "type": "mobile", + "commands": [{ + "command": "resetEngine", + "args": ["history"], + }], + "fxaDeviceId": "iPhooooooone", + "protocols": ["1.5"], + "device": "iPhone", + }, { + "id": "garbage", + "garbage": "value", + }, { + "id": "deviceCCCCCC", + "deleted": true, + "name": "Fenix", + "type": "mobile", + "commands": [], + "fxaDeviceId": "deviceCCCCCC", + }])); + + driver.sync(inbound, false).expect("Should sync clients"); + + // Make sure the list of recently synced remote clients is correct. + let expected_ids = &["deviceAAAAAA", "deviceBBBBBB"]; + let mut actual_ids = driver.recent_clients.keys().collect::<Vec<&String>>(); + actual_ids.sort(); + assert_eq!(actual_ids, expected_ids); + + let expected_remote_clients = &[ + RemoteClient { + fxa_device_id: Some("deviceAAAAAA".to_string()), + device_name: "Laptop".into(), + device_type: DeviceType::Desktop, + }, + RemoteClient { + fxa_device_id: Some("iPhooooooone".to_string()), + device_name: "iPhone".into(), + device_type: DeviceType::Mobile, + }, + ]; + let actual_remote_clients = expected_ids + .iter() + .filter_map(|&id| driver.recent_clients.get(id)) + .cloned() + .collect::<Vec<RemoteClient>>(); + assert_eq!(actual_remote_clients, expected_remote_clients); + } + + #[test] + fn test_clients_sync_explicit_refresh() { + let processor = TestProcessor { + settings: Settings { + fxa_device_id: "deviceAAAAAA".into(), + device_name: "Laptop".into(), + device_type: DeviceType::Desktop, + }, + outgoing_commands: [].iter().cloned().collect(), + }; + + let config = InfoConfiguration::default(); + + let mut driver = Driver::new(&processor, &NeverInterrupts, &config); + + let test_clients = json!([{ + "id": "deviceBBBBBB", + "name": "iPhone", + "type": "mobile", + "commands": [{ + "command": "resetEngine", + "args": ["history"], + }], + "fxaDeviceId": "iPhooooooone", + "protocols": ["1.5"], + "device": "iPhone", + }, { + "id": "deviceAAAAAA", + "name": "Laptop", + "type": "desktop", + "commands": [], + "fxaDeviceId": "deviceAAAAAA", + "protocols": ["1.5"], + }]); + + let outgoing = driver + .sync(inbound_from_clients(test_clients.clone()), false) + .expect("Should sync clients"); + // should be no outgoing changes. + assert_eq!(outgoing.changes.len(), 0); + + // Make sure the list of recently synced remote clients is correct and + // still includes our record we didn't update. + let expected_ids = &["deviceAAAAAA", "deviceBBBBBB"]; + let mut actual_ids = driver.recent_clients.keys().collect::<Vec<&String>>(); + actual_ids.sort(); + assert_eq!(actual_ids, expected_ids); + + // Do it again - still no changes, but force a refresh. + let outgoing = driver + .sync(inbound_from_clients(test_clients), true) + .expect("Should sync clients"); + assert_eq!(outgoing.changes.len(), 1); + + // Do it again - but this time with our own client record needing + // some change. + let inbound = inbound_from_clients(json!([{ + "id": "deviceAAAAAA", + "name": "Laptop with New Name", + "type": "desktop", + "commands": [], + "fxaDeviceId": "deviceAAAAAA", + "protocols": ["1.5"], + }])); + let outgoing = driver.sync(inbound, false).expect("Should sync clients"); + // should still be outgoing because the name changed. + assert_eq!(outgoing.changes.len(), 1); + } + + #[test] + fn test_fresh_client_record() { + let processor = TestProcessor { + settings: Settings { + fxa_device_id: "deviceAAAAAA".into(), + device_name: "Laptop".into(), + device_type: DeviceType::Desktop, + }, + outgoing_commands: HashSet::new(), + }; + + let config = InfoConfiguration::default(); + + let mut driver = Driver::new(&processor, &NeverInterrupts, &config); + + let clients = json!([{ + "id": "deviceBBBBBB", + "name": "iPhone", + "type": "mobile", + "commands": [{ + "command": "resetEngine", + "args": ["history"], + }], + "fxaDeviceId": "iPhooooooone", + "protocols": ["1.5"], + "device": "iPhone", + }]); + + let inbound = if let Value::Array(clients) = clients { + let changes = clients + .into_iter() + .map(IncomingBso::from_test_content) + .collect(); + IncomingChangeset { + changes, + timestamp: ServerTimestamp(0), + collection: COLLECTION_NAME.into(), + } + } else { + unreachable!("`clients` must be an array of client records") + }; + + // Passing false here for should_refresh_client, but it should be + // ignored as we don't have an existing record yet. + let mut outgoing = driver.sync(inbound, false).expect("Should sync clients"); + outgoing + .changes + .sort_by(|a, b| a.envelope.id.cmp(&b.envelope.id)); + + // Make sure the list of recently synced remote clients is correct. + let expected_ids = &["deviceAAAAAA", "deviceBBBBBB"]; + let mut actual_ids = driver.recent_clients.keys().collect::<Vec<&String>>(); + actual_ids.sort(); + assert_eq!(actual_ids, expected_ids); + + let expected_remote_clients = &[ + RemoteClient { + fxa_device_id: Some("deviceAAAAAA".to_string()), + device_name: "Laptop".into(), + device_type: DeviceType::Desktop, + }, + RemoteClient { + fxa_device_id: Some("iPhooooooone".to_string()), + device_name: "iPhone".into(), + device_type: DeviceType::Mobile, + }, + ]; + let actual_remote_clients = expected_ids + .iter() + .filter_map(|&id| driver.recent_clients.get(id)) + .cloned() + .collect::<Vec<RemoteClient>>(); + assert_eq!(actual_remote_clients, expected_remote_clients); + + let expected = json!([{ + "id": "deviceAAAAAA", + "name": "Laptop", + "type": "desktop", + "fxaDeviceId": "deviceAAAAAA", + "protocols": ["1.5"], + "ttl": CLIENTS_TTL, + }]); + if let Value::Array(expected) = expected { + // turn outgoing into an incoming payload. + let incoming = IncomingChangeset { + changes: outgoing + .changes + .into_iter() + .map(|c| OutgoingBso::to_test_incoming(&c)) + .collect(), + timestamp: ServerTimestamp::default(), + collection: outgoing.collection, + }; + for (incoming_cleartext, record) in zip(incoming.changes, expected) { + let incoming_client: ClientRecord = + incoming_cleartext.into_content().content().unwrap(); + assert_eq!(incoming_client, serde_json::from_value(record).unwrap()); + } + } else { + unreachable!("`expected_clients` must be an array of client records") + } + } +} diff --git a/third_party/rust/sync15/src/clients_engine/mod.rs b/third_party/rust/sync15/src/clients_engine/mod.rs new file mode 100644 index 0000000000..7346712dc7 --- /dev/null +++ b/third_party/rust/sync15/src/clients_engine/mod.rs @@ -0,0 +1,93 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +//! The client engine is a [crate::engine](Sync Engine) used to manage the +//! "clients" collection. The clients engine manages the client record for +//! "this device, and also manages "commands". +//! In short, commands target one or more engines and instruct them to +//! perform various operations - such as wiping all local data. +//! These commands are used very rarely - currently the only command used +//! in practice is for bookmarks to wipe all their data, which is sent when +//! a desktop device restores all bookmarks from a backup. In this scenario, +//! desktop will delete all local bookmarks then replace them with the backed +//! up set, which without a "wipe" command would almost certainly cause other +//! connected devices to "resurrect" the deleted bookmarks. +use std::collections::HashSet; + +mod engine; +mod record; +mod ser; + +use crate::DeviceType; +use anyhow::Result; +pub use engine::Engine; + +// These are what desktop uses. +const CLIENTS_TTL: u32 = 15_552_000; // 180 days +pub(crate) const CLIENTS_TTL_REFRESH: u64 = 604_800; // 7 days + +/// A command processor applies incoming commands like wipes and resets for all +/// stores, and returns commands to send to other clients. It also manages +/// settings like the device name and type, which is stored in the special +/// `clients` collection. +/// +/// In practice, this trait only has one implementation, in the sync manager. +/// It's split this way because the clients engine depends on internal `sync15` +/// structures, and can't be implemented as a syncable store...but `sync15` +/// doesn't know anything about multiple engines. This lets the sync manager +/// provide its own implementation for handling wipe and reset commands for all +/// the engines that it manages. +pub trait CommandProcessor { + fn settings(&self) -> &Settings; + + /// Fetches commands to send to other clients. An error return value means + /// commands couldn't be fetched, and halts the sync. + fn fetch_outgoing_commands(&self) -> Result<HashSet<Command>>; + + /// Applies a command sent to this client from another client. This method + /// should return a `CommandStatus` indicating whether the command was + /// processed. + /// + /// An error return value means the sync manager encountered an error + /// applying the command, and halts the sync to prevent unexpected behavior + /// (for example, merging local and remote bookmarks, when we were told to + /// wipe our local bookmarks). + fn apply_incoming_command(&self, command: Command) -> Result<CommandStatus>; +} + +/// Indicates if a command was applied successfully, ignored, or not supported. +/// Applied and ignored commands are removed from our client record, and never +/// retried. Unsupported commands are put back into our record, and retried on +/// subsequent syncs. This is to handle clients adding support for new data +/// types. +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] +pub enum CommandStatus { + Applied, + Ignored, + Unsupported, +} + +/// Information about this device to include in its client record. This should +/// be persisted across syncs, as part of the sync manager state. +#[derive(Clone, Debug, Eq, Hash, PartialEq)] +pub struct Settings { + /// The FxA device ID of this client, also used as this client's record ID + /// in the clients collection. + pub fxa_device_id: String, + /// The name of this client. This should match the client's name in the + /// FxA device manager. + pub device_name: String, + /// The type of this client: mobile, tablet, desktop, or other. + pub device_type: DeviceType, +} + +#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)] +pub enum Command { + /// Erases all local data for a specific engine. + Wipe(String), + /// Resets local sync state for all engines. + ResetAll, + /// Resets local sync state for a specific engine. + Reset(String), +} diff --git a/third_party/rust/sync15/src/clients_engine/record.rs b/third_party/rust/sync15/src/clients_engine/record.rs new file mode 100644 index 0000000000..fe17c12861 --- /dev/null +++ b/third_party/rust/sync15/src/clients_engine/record.rs @@ -0,0 +1,124 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +use serde_derive::*; + +use super::Command; + +/// The serialized form of a client record. +#[derive(Clone, Debug, Eq, Deserialize, Hash, PartialEq, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct ClientRecord { + #[serde(rename = "id")] + pub id: String, + + pub name: String, + + #[serde(rename = "type")] + pub typ: crate::DeviceType, + + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub commands: Vec<CommandRecord>, + + #[serde(default, skip_serializing_if = "Option::is_none")] + pub fxa_device_id: Option<String>, + + /// `version`, `protocols`, `formfactor`, `os`, `appPackage`, `application`, + /// and `device` are unused and optional in all implementations (Desktop, + /// iOS, and Fennec), but we round-trip them. + + #[serde(default, skip_serializing_if = "Option::is_none")] + pub version: Option<String>, + + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub protocols: Vec<String>, + + #[serde( + default, + rename = "formfactor", + skip_serializing_if = "Option::is_none" + )] + pub form_factor: Option<String>, + + #[serde(default, skip_serializing_if = "Option::is_none")] + pub os: Option<String>, + + #[serde(default, skip_serializing_if = "Option::is_none")] + pub app_package: Option<String>, + + #[serde(default, skip_serializing_if = "Option::is_none")] + pub application: Option<String>, + + /// The model of the device, like "iPhone" or "iPod touch" on iOS. Note + /// that this is _not_ the client ID (`id`) or the FxA device ID + /// (`fxa_device_id`). + #[serde(default, skip_serializing_if = "Option::is_none")] + pub device: Option<String>, +} + +impl From<&ClientRecord> for crate::RemoteClient { + fn from(record: &ClientRecord) -> crate::RemoteClient { + crate::RemoteClient { + fxa_device_id: record.fxa_device_id.clone(), + device_name: record.name.clone(), + device_type: record.typ, + } + } +} + +/// The serialized form of a client command. +#[derive(Clone, Debug, Eq, Deserialize, Hash, PartialEq, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct CommandRecord { + /// The command name. This is a string, not an enum, because we want to + /// round-trip commands that we don't support yet. + #[serde(rename = "command")] + pub name: String, + + /// Extra, command-specific arguments. Note that we must send an empty + /// array if the command expects no arguments. + #[serde(default)] + pub args: Vec<String>, + + /// Some commands, like repair, send a "flow ID" that other cliennts can + /// record in their telemetry. We don't currently send commands with + /// flow IDs, but we round-trip them. + #[serde(default, rename = "flowID", skip_serializing_if = "Option::is_none")] + pub flow_id: Option<String>, +} + +impl CommandRecord { + /// Converts a serialized command into one that we can apply. Returns `None` + /// if we don't support the command. + pub fn as_command(&self) -> Option<Command> { + match self.name.as_str() { + "wipeEngine" => self.args.get(0).map(|e| Command::Wipe(e.into())), + "resetEngine" => self.args.get(0).map(|e| Command::Reset(e.into())), + "resetAll" => Some(Command::ResetAll), + _ => None, + } + } +} + +impl From<Command> for CommandRecord { + fn from(command: Command) -> CommandRecord { + match command { + Command::Wipe(engine) => CommandRecord { + name: "wipeEngine".into(), + args: vec![engine], + flow_id: None, + }, + Command::Reset(engine) => CommandRecord { + name: "resetEngine".into(), + args: vec![engine], + flow_id: None, + }, + Command::ResetAll => CommandRecord { + name: "resetAll".into(), + args: Vec::new(), + flow_id: None, + }, + } + } +} diff --git a/third_party/rust/sync15/src/clients_engine/ser.rs b/third_party/rust/sync15/src/clients_engine/ser.rs new file mode 100644 index 0000000000..2e7b0817b8 --- /dev/null +++ b/third_party/rust/sync15/src/clients_engine/ser.rs @@ -0,0 +1,125 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +use crate::error::Result; +use serde::Serialize; +use std::io::{self, Write}; + +/// A writer that counts the number of bytes it's asked to write, and discards +/// the data. Used to calculate the serialized size of the commands list. +#[derive(Clone, Copy, Default)] +pub struct WriteCount(usize); + +impl WriteCount { + #[inline] + pub fn len(self) -> usize { + self.0 + } +} + +impl Write for WriteCount { + #[inline] + fn write(&mut self, buf: &[u8]) -> io::Result<usize> { + self.0 += buf.len(); + Ok(buf.len()) + } + + #[inline] + fn flush(&mut self) -> io::Result<()> { + Ok(()) + } +} + +/// Returns the size of the given value, in bytes, when serialized to JSON. +fn compute_serialized_size<T: Serialize>(value: &T) -> Result<usize> { + let mut w = WriteCount::default(); + serde_json::to_writer(&mut w, value)?; + Ok(w.len()) +} + +/// Truncates `list` to fit within `payload_size_max_bytes` when serialized to +/// JSON. +pub fn shrink_to_fit<T: Serialize>(list: &mut Vec<T>, payload_size_max_bytes: usize) -> Result<()> { + let size = compute_serialized_size(&list)?; + // See bug 535326 comment 8 for an explanation of the estimation + match ((payload_size_max_bytes / 4) * 3).checked_sub(1500) { + Some(max_serialized_size) => { + if size > max_serialized_size { + // Estimate a little more than the direct fraction to maximize packing + let cutoff = (list.len() * max_serialized_size - 1) / size + 1; + list.truncate(cutoff + 1); + // Keep dropping off the last entry until the data fits. + while compute_serialized_size(&list)? > max_serialized_size { + if list.pop().is_none() { + break; + } + } + } + Ok(()) + } + None => { + list.clear(); + Ok(()) + } + } +} + +#[cfg(test)] +mod tests { + use super::super::record::CommandRecord; + use super::*; + + #[test] + fn test_compute_serialized_size() { + assert_eq!(compute_serialized_size(&1).unwrap(), 1); + assert_eq!(compute_serialized_size(&"hi").unwrap(), 4); + assert_eq!( + compute_serialized_size(&["hi", "hello", "bye"]).unwrap(), + 20 + ); + } + + #[test] + fn test_shrink_to_fit() { + let mut commands = vec![ + CommandRecord { + name: "wipeEngine".into(), + args: vec!["bookmarks".into()], + flow_id: Some("flow".into()), + }, + CommandRecord { + name: "resetEngine".into(), + args: vec!["history".into()], + flow_id: Some("flow".into()), + }, + CommandRecord { + name: "logout".into(), + args: Vec::new(), + flow_id: None, + }, + ]; + + // 4096 bytes is enough to fit all three commands. + shrink_to_fit(&mut commands, 4096).unwrap(); + assert_eq!(commands.len(), 3); + + let sizes = commands + .iter() + .map(|c| compute_serialized_size(c).unwrap()) + .collect::<Vec<_>>(); + assert_eq!(sizes, &[61, 60, 30]); + + // `logout` won't fit within 2168 bytes. + shrink_to_fit(&mut commands, 2168).unwrap(); + assert_eq!(commands.len(), 2); + + // `resetEngine` won't fit within 2084 bytes. + shrink_to_fit(&mut commands, 2084).unwrap(); + assert_eq!(commands.len(), 1); + + // `wipeEngine` won't fit at all. + shrink_to_fit(&mut commands, 1024).unwrap(); + assert!(commands.is_empty()); + } +} diff --git a/third_party/rust/sync15/src/device_type.rs b/third_party/rust/sync15/src/device_type.rs new file mode 100644 index 0000000000..8afea0dd00 --- /dev/null +++ b/third_party/rust/sync15/src/device_type.rs @@ -0,0 +1,152 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +//! This type is strictly owned by FxA, but is defined in this crate because of +//! some hard-to-avoid hacks done for the tabs engine... See issue #2590. +//! +//! Thus, fxa-client ends up taking a dep on this crate, which is roughly +//! the opposite of reality. + +use serde::{Deserialize, Deserializer, Serialize, Serializer}; + +/// Enumeration for the different types of device. +/// +/// Firefox Accounts and the broader Sync universe separates devices into broad categories for +/// various purposes, such as distinguishing a desktop PC from a mobile phone. +/// +/// A special variant in this enum, `DeviceType::Unknown` is used to capture +/// the string values we don't recognise. It also has a custom serde serializer and deserializer +/// which implements the following semantics: +/// * deserializing a `DeviceType` which uses a string value we don't recognise or null will return +/// `DeviceType::Unknown` rather than returning an error. +/// * serializing `DeviceType::Unknown` will serialize `null`. +/// +/// This has a few important implications: +/// * In general, `Option<DeviceType>` should be avoided, and a plain `DeviceType` used instead, +/// because in that case, `None` would be semantically identical to `DeviceType::Unknown` and +/// as mentioned above, `null` already deserializes as `DeviceType::Unknown`. +/// * Any unknown device types can not be round-tripped via this enum - eg, if you deserialize +/// a struct holding a `DeviceType` string value we don't recognize, then re-serialize it, the +/// original string value is lost. We don't consider this a problem because in practice, we only +/// upload records with *this* device's type, not the type of other devices, and it's reasonable +/// to assume that this module knows about all valid device types for the device type it is +/// deployed on. +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +pub enum DeviceType { + Desktop, + Mobile, + Tablet, + VR, + TV, + // See docstrings above re how Unknown is serialized and deserialized. + Unknown, +} + +impl Default for DeviceType { + fn default() -> Self { + DeviceType::Unknown + } +} + +impl<'de> Deserialize<'de> for DeviceType { + fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> + where + D: Deserializer<'de>, + { + Ok(match String::deserialize(deserializer) { + Ok(s) => match s.as_str() { + "desktop" => DeviceType::Desktop, + "mobile" => DeviceType::Mobile, + "tablet" => DeviceType::Tablet, + "vr" => DeviceType::VR, + "tv" => DeviceType::TV, + // There's a vague possibility that desktop might serialize "phone" for mobile + // devices - https://searchfox.org/mozilla-central/rev/a156a65ced2dae5913ae35a68e9445b8ee7ca457/services/sync/modules/engines/clients.js#292 + "phone" => DeviceType::Mobile, + // Everything else is Unknown. + _ => DeviceType::Unknown, + }, + // Anything other than a string is "unknown" - this isn't ideal - we really only want + // to handle null and, eg, a number probably should be an error, but meh. + Err(_) => DeviceType::Unknown, + }) + } +} +impl Serialize for DeviceType { + fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error> + where + S: Serializer, + { + match self { + // It's unfortunate we need to duplicate the strings here... + DeviceType::Desktop => s.serialize_unit_variant("DeviceType", 0, "desktop"), + DeviceType::Mobile => s.serialize_unit_variant("DeviceType", 1, "mobile"), + DeviceType::Tablet => s.serialize_unit_variant("DeviceType", 2, "tablet"), + DeviceType::VR => s.serialize_unit_variant("DeviceType", 3, "vr"), + DeviceType::TV => s.serialize_unit_variant("DeviceType", 4, "tv"), + // This is the important bit - Unknown -> None + DeviceType::Unknown => s.serialize_none(), + } + } +} + +#[cfg(test)] +mod device_type_tests { + use super::*; + + #[test] + fn test_serde_ser() { + assert_eq!( + serde_json::to_string(&DeviceType::Desktop).unwrap(), + "\"desktop\"" + ); + assert_eq!( + serde_json::to_string(&DeviceType::Mobile).unwrap(), + "\"mobile\"" + ); + assert_eq!( + serde_json::to_string(&DeviceType::Tablet).unwrap(), + "\"tablet\"" + ); + assert_eq!(serde_json::to_string(&DeviceType::VR).unwrap(), "\"vr\""); + assert_eq!(serde_json::to_string(&DeviceType::TV).unwrap(), "\"tv\""); + assert_eq!(serde_json::to_string(&DeviceType::Unknown).unwrap(), "null"); + } + + #[test] + fn test_serde_de() { + assert!(matches!( + serde_json::from_str::<DeviceType>("\"desktop\"").unwrap(), + DeviceType::Desktop + )); + assert!(matches!( + serde_json::from_str::<DeviceType>("\"mobile\"").unwrap(), + DeviceType::Mobile + )); + assert!(matches!( + serde_json::from_str::<DeviceType>("\"tablet\"").unwrap(), + DeviceType::Tablet + )); + assert!(matches!( + serde_json::from_str::<DeviceType>("\"vr\"").unwrap(), + DeviceType::VR + )); + assert!(matches!( + serde_json::from_str::<DeviceType>("\"tv\"").unwrap(), + DeviceType::TV + )); + assert!(matches!( + serde_json::from_str::<DeviceType>("\"something-else\"").unwrap(), + DeviceType::Unknown, + )); + assert!(matches!( + serde_json::from_str::<DeviceType>("null").unwrap(), + DeviceType::Unknown, + )); + assert!(matches!( + serde_json::from_str::<DeviceType>("99").unwrap(), + DeviceType::Unknown, + )); + } +} diff --git a/third_party/rust/sync15/src/enc_payload.rs b/third_party/rust/sync15/src/enc_payload.rs new file mode 100644 index 0000000000..2adc031f70 --- /dev/null +++ b/third_party/rust/sync15/src/enc_payload.rs @@ -0,0 +1,110 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +use crate::error; +use crate::key_bundle::KeyBundle; +use lazy_static::lazy_static; +use serde::{Deserialize, Serialize}; + +/// A representation of an encrypted payload. Used as the payload in EncryptedBso and +/// also anywhere else the sync keys might be used to encrypt/decrypt, such as send-tab payloads. +#[derive(Deserialize, Serialize, Clone, Debug)] +pub struct EncryptedPayload { + #[serde(rename = "IV")] + pub iv: String, + pub hmac: String, + pub ciphertext: String, +} + +impl EncryptedPayload { + #[inline] + pub fn serialized_len(&self) -> usize { + (*EMPTY_ENCRYPTED_PAYLOAD_SIZE) + self.ciphertext.len() + self.hmac.len() + self.iv.len() + } + + pub fn decrypt(&self, key: &KeyBundle) -> error::Result<String> { + key.decrypt(&self.ciphertext, &self.iv, &self.hmac) + } + + pub fn decrypt_into<T>(&self, key: &KeyBundle) -> error::Result<T> + where + for<'a> T: Deserialize<'a>, + { + Ok(serde_json::from_str(&self.decrypt(key)?)?) + } + + pub fn from_cleartext(key: &KeyBundle, cleartext: String) -> error::Result<Self> { + let (enc_base64, iv_base64, hmac_base16) = + key.encrypt_bytes_rand_iv(cleartext.as_bytes())?; + Ok(EncryptedPayload { + iv: iv_base64, + hmac: hmac_base16, + ciphertext: enc_base64, + }) + } + + pub fn from_cleartext_payload<T: Serialize>( + key: &KeyBundle, + cleartext_payload: &T, + ) -> error::Result<Self> { + Self::from_cleartext(key, serde_json::to_string(cleartext_payload)?) + } +} + +// Our "postqueue", which chunks records for upload, needs to know this value. +// It's tricky to determine at compile time, so do it once at at runtime. +lazy_static! { + // The number of bytes taken up by padding in a EncryptedPayload. + static ref EMPTY_ENCRYPTED_PAYLOAD_SIZE: usize = serde_json::to_string( + &EncryptedPayload { iv: "".into(), hmac: "".into(), ciphertext: "".into() } + ).unwrap().len(); +} + +#[cfg(test)] +mod tests { + use super::*; + use serde_json::json; + + #[derive(Serialize, Deserialize, Debug)] + struct TestStruct { + id: String, + age: u32, + meta: String, + } + + #[test] + fn test_roundtrip_crypt_record() { + let key = KeyBundle::new_random().unwrap(); + let payload_json = json!({ "id": "aaaaaaaaaaaa", "age": 105, "meta": "data" }); + let payload = + EncryptedPayload::from_cleartext(&key, serde_json::to_string(&payload_json).unwrap()) + .unwrap(); + + let record = payload.decrypt_into::<TestStruct>(&key).unwrap(); + assert_eq!(record.id, "aaaaaaaaaaaa"); + assert_eq!(record.age, 105); + assert_eq!(record.meta, "data"); + + // While we're here, check on EncryptedPayload::serialized_len + let val_rec = serde_json::to_string(&serde_json::to_value(&payload).unwrap()).unwrap(); + assert_eq!(payload.serialized_len(), val_rec.len()); + } + + #[test] + fn test_record_bad_hmac() { + let key1 = KeyBundle::new_random().unwrap(); + let json = json!({ "id": "aaaaaaaaaaaa", "deleted": true, }); + + let payload = + EncryptedPayload::from_cleartext(&key1, serde_json::to_string(&json).unwrap()).unwrap(); + + let key2 = KeyBundle::new_random().unwrap(); + let e = payload + .decrypt(&key2) + .expect_err("Should fail because wrong keybundle"); + + // Note: ErrorKind isn't PartialEq, so. + assert!(matches!(e, error::Error::CryptoError(_))); + } +} diff --git a/third_party/rust/sync15/src/engine/bridged_engine.rs b/third_party/rust/sync15/src/engine/bridged_engine.rs new file mode 100644 index 0000000000..c12780ba04 --- /dev/null +++ b/third_party/rust/sync15/src/engine/bridged_engine.rs @@ -0,0 +1,119 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +use anyhow::Result; + +use crate::bso::{IncomingBso, OutgoingBso}; +use crate::Guid; + +/// A BridgedEngine acts as a bridge between application-services, rust +/// implemented sync engines and sync engines as defined by Desktop Firefox. +/// +/// [Desktop Firefox has an abstract implementation of a Sync +/// Engine](https://searchfox.org/mozilla-central/source/services/sync/modules/engines.js) +/// with a number of functions each engine is expected to override. Engines +/// implemented in Rust use a different shape (specifically, the +/// [SyncEngine](crate::SyncEngine) trait), so this BridgedEngine trait adapts +/// between the 2. +pub trait BridgedEngine: Send + Sync { + /// Returns the last sync time, in milliseconds, for this engine's + /// collection. This is called before each sync, to determine the lower + /// bound for new records to fetch from the server. + fn last_sync(&self) -> Result<i64>; + + /// Sets the last sync time, in milliseconds. This is called throughout + /// the sync, to fast-forward the stored last sync time to match the + /// timestamp on the uploaded records. + fn set_last_sync(&self, last_sync_millis: i64) -> Result<()>; + + /// Returns the sync ID for this engine's collection. This is only used in + /// tests. + fn sync_id(&self) -> Result<Option<String>>; + + /// Resets the sync ID for this engine's collection, returning the new ID. + /// As a side effect, implementations should reset all local Sync state, + /// as in `reset`. + fn reset_sync_id(&self) -> Result<String>; + + /// Ensures that the locally stored sync ID for this engine's collection + /// matches the `new_sync_id` from the server. If the two don't match, + /// implementations should reset all local Sync state, as in `reset`. + /// This method returns the assigned sync ID, which can be either the + /// `new_sync_id`, or a different one if the engine wants to force other + /// devices to reset their Sync state for this collection the next time they + /// sync. + fn ensure_current_sync_id(&self, new_sync_id: &str) -> Result<String>; + + /// Tells the tabs engine about recent FxA devices. A bit of a leaky abstration as it only + /// makes sense for tabs. + /// The arg is a json serialized `ClientData` struct. + fn prepare_for_sync(&self, _client_data: &str) -> Result<()> { + Ok(()) + } + + /// Indicates that the engine is about to start syncing. This is called + /// once per sync, and always before `store_incoming`. + fn sync_started(&self) -> Result<()>; + + /// Stages a batch of incoming Sync records. This is called multiple + /// times per sync, once for each batch. Implementations can use the + /// signal to check if the operation was aborted, and cancel any + /// pending work. + fn store_incoming(&self, incoming_records: Vec<IncomingBso>) -> Result<()>; + + /// Applies all staged records, reconciling changes on both sides and + /// resolving conflicts. Returns a list of records to upload. + fn apply(&self) -> Result<ApplyResults>; + + /// Indicates that the given record IDs were uploaded successfully to the + /// server. This is called multiple times per sync, once for each batch + /// upload. + fn set_uploaded(&self, server_modified_millis: i64, ids: &[Guid]) -> Result<()>; + + /// Indicates that all records have been uploaded. At this point, any record + /// IDs marked for upload that haven't been passed to `set_uploaded`, can be + /// assumed to have failed: for example, because the server rejected a record + /// with an invalid TTL or sort index. + fn sync_finished(&self) -> Result<()>; + + /// Resets all local Sync state, including any change flags, mirrors, and + /// the last sync time, such that the next sync is treated as a first sync + /// with all new local data. Does not erase any local user data. + fn reset(&self) -> Result<()>; + + /// Erases all local user data for this collection, and any Sync metadata. + /// This method is destructive, and unused for most collections. + fn wipe(&self) -> Result<()>; +} + +// TODO: We should replace this with OutgoingChangeset to reduce the number +// of types engines need to deal with. +#[derive(Debug, Default)] +pub struct ApplyResults { + /// List of records + pub records: Vec<OutgoingBso>, + /// The number of incoming records whose contents were merged because they + /// changed on both sides. None indicates we aren't reporting this + /// information. + pub num_reconciled: Option<usize>, +} + +impl ApplyResults { + pub fn new(records: Vec<OutgoingBso>, num_reconciled: impl Into<Option<usize>>) -> Self { + Self { + records, + num_reconciled: num_reconciled.into(), + } + } +} + +// Shorthand for engines that don't care. +impl From<Vec<OutgoingBso>> for ApplyResults { + fn from(records: Vec<OutgoingBso>) -> Self { + Self { + records, + num_reconciled: None, + } + } +} diff --git a/third_party/rust/sync15/src/engine/changeset.rs b/third_party/rust/sync15/src/engine/changeset.rs new file mode 100644 index 0000000000..c5a4e65fcd --- /dev/null +++ b/third_party/rust/sync15/src/engine/changeset.rs @@ -0,0 +1,52 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +use crate::bso::{IncomingBso, OutgoingBso}; +use crate::{CollectionName, ServerTimestamp}; + +// Incoming and Outgoing changesets are almost identical except for the timestamp. +// Separate types still helps avoid confusion with that timestamp, so they're split. +#[derive(Debug)] +pub struct IncomingChangeset { + pub changes: Vec<IncomingBso>, + /// The server timestamp of the collection. + pub timestamp: ServerTimestamp, + pub collection: CollectionName, +} + +impl IncomingChangeset { + #[inline] + pub fn new(collection: CollectionName, timestamp: ServerTimestamp) -> Self { + Self::new_with_changes(collection, timestamp, Vec::new()) + } + + #[inline] + pub fn new_with_changes( + collection: CollectionName, + timestamp: ServerTimestamp, + changes: Vec<IncomingBso>, + ) -> Self { + Self { + changes, + timestamp, + collection, + } + } +} + +#[derive(Debug)] +pub struct OutgoingChangeset { + pub changes: Vec<OutgoingBso>, + pub collection: CollectionName, +} + +impl OutgoingChangeset { + #[inline] + pub fn new(collection: CollectionName, changes: Vec<OutgoingBso>) -> Self { + Self { + collection, + changes, + } + } +} diff --git a/third_party/rust/sync15/src/engine/mod.rs b/third_party/rust/sync15/src/engine/mod.rs new file mode 100644 index 0000000000..427b779c9b --- /dev/null +++ b/third_party/rust/sync15/src/engine/mod.rs @@ -0,0 +1,38 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +//! This module is used by crates which need to implement a "sync engine". +//! At a high-level, a "sync engine" is code which knows how to take records +//! from a sync server, apply and reconcile them with the local data, then +//! provide records which should be uploaded to the server. +//! +//! Note that the "sync engine" does not itself talk to the server, nor does +//! it manage the state of the remote server, nor does it do any of the +//! encryption/decryption - that is the responsbility of the "sync client", as +//! implemented in the [client] module (or in some cases, implemented externally) +//! +//! There are currently 2 types of engine: +//! * Code which implements the [crate::engine::sync_engine::SyncEngine] +//! trait. These are the "original" Rust engines, designed to be used with +//! the [crate::client](sync client) +//! * Code which implements the [crate::engine::bridged_engine::BridgedEngine] +//! trait. These engines are a "bridge" between the Desktop JS Sync world and +//! this rust code. +//! While these engines end up doing the same thing, the difference is due to +//! implementation differences between the Desktop Sync client and the Rust +//! client. +//! We intend merging these engines - the first step will be to merge the +//! types and payload management used by these traits, then to combine the +//! requirements into a single trait that captures both use-cases. +mod bridged_engine; +mod changeset; +mod request; +mod sync_engine; + +pub use bridged_engine::{ApplyResults, BridgedEngine}; +pub use changeset::{IncomingChangeset, OutgoingChangeset}; +#[cfg(feature = "sync-client")] +pub(crate) use request::CollectionPost; +pub use request::{CollectionRequest, RequestOrder}; +pub use sync_engine::{CollSyncIds, EngineSyncAssociation, SyncEngine, SyncEngineId}; diff --git a/third_party/rust/sync15/src/engine/request.rs b/third_party/rust/sync15/src/engine/request.rs new file mode 100644 index 0000000000..7d634bb5e7 --- /dev/null +++ b/third_party/rust/sync15/src/engine/request.rs @@ -0,0 +1,125 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ +use crate::{CollectionName, Guid, ServerTimestamp}; +#[derive(Debug, Default, Clone, PartialEq, Eq)] +pub struct CollectionRequest { + pub collection: CollectionName, + pub full: bool, + pub ids: Option<Vec<Guid>>, + + pub limit: Option<RequestLimit>, + pub older: Option<ServerTimestamp>, + pub newer: Option<ServerTimestamp>, +} + +impl CollectionRequest { + #[inline] + pub fn new(collection: CollectionName) -> CollectionRequest { + CollectionRequest { + collection, + ..Default::default() + } + } + + #[inline] + pub fn ids<V>(mut self, v: V) -> CollectionRequest + where + V: IntoIterator, + V::Item: Into<Guid>, + { + self.ids = Some(v.into_iter().map(|id| id.into()).collect()); + self + } + + #[inline] + pub fn full(mut self) -> CollectionRequest { + self.full = true; + self + } + + #[inline] + pub fn older_than(mut self, ts: ServerTimestamp) -> CollectionRequest { + self.older = Some(ts); + self + } + + #[inline] + pub fn newer_than(mut self, ts: ServerTimestamp) -> CollectionRequest { + self.newer = Some(ts); + self + } + + #[inline] + pub fn limit(mut self, num: usize, order: RequestOrder) -> CollectionRequest { + self.limit = Some(RequestLimit { num, order }); + self + } +} + +// This is just used interally - consumers just provide the content, not request params. +#[cfg(feature = "sync-client")] +#[derive(Debug, Default, Clone, PartialEq, Eq)] +pub(crate) struct CollectionPost { + pub collection: CollectionName, + pub commit: bool, + pub batch: Option<String>, +} + +#[cfg(feature = "sync-client")] +impl CollectionPost { + #[inline] + pub fn new(collection: CollectionName) -> Self { + Self { + collection, + ..Default::default() + } + } + + #[inline] + pub fn batch(mut self, batch: Option<String>) -> Self { + self.batch = batch; + self + } + + #[inline] + pub fn commit(mut self, v: bool) -> Self { + self.commit = v; + self + } +} + +// Asking for the order of records only makes sense if you are limiting them +// in some way - consumers don't care about the order otherwise as everything +// is processed as a set. +#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)] +pub enum RequestOrder { + Oldest, + Newest, + Index, +} + +impl RequestOrder { + #[inline] + pub fn as_str(self) -> &'static str { + match self { + RequestOrder::Oldest => "oldest", + RequestOrder::Newest => "newest", + RequestOrder::Index => "index", + } + } +} + +impl std::fmt::Display for RequestOrder { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str(self.as_str()) + } +} + +// If you specify a numerical limit you must provide the order so backfilling +// is possible (ie, so you know which ones you got!) +#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)] +pub struct RequestLimit { + pub(crate) num: usize, + pub(crate) order: RequestOrder, +} diff --git a/third_party/rust/sync15/src/engine/sync_engine.rs b/third_party/rust/sync15/src/engine/sync_engine.rs new file mode 100644 index 0000000000..79b9b1a522 --- /dev/null +++ b/third_party/rust/sync15/src/engine/sync_engine.rs @@ -0,0 +1,235 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +use super::{CollectionRequest, IncomingChangeset, OutgoingChangeset}; +use crate::client_types::ClientData; +use crate::{telemetry, CollectionName, Guid, ServerTimestamp}; +use anyhow::Result; +use std::fmt; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct CollSyncIds { + pub global: Guid, + pub coll: Guid, +} + +/// Defines how an engine is associated with a particular set of records +/// on a sync storage server. It's either disconnected, or believes it is +/// connected with a specific set of GUIDs. If the server and the engine don't +/// agree on the exact GUIDs, the engine will assume something radical happened +/// so it can't believe anything it thinks it knows about the state of the +/// server (ie, it will "reset" then do a full reconcile) +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum EngineSyncAssociation { + /// This store is disconnected (although it may be connected in the future). + Disconnected, + /// Sync is connected, and has the following sync IDs. + Connected(CollSyncIds), +} + +/// The concrete `SyncEngine` implementations +#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub enum SyncEngineId { + // Note that we've derived PartialOrd etc, which uses lexicographic ordering + // of the variants. We leverage that such that the higher priority engines + // are listed first. + // This order matches desktop. + Passwords, + Tabs, + Bookmarks, + Addresses, + CreditCards, + History, +} + +impl SyncEngineId { + // Iterate over all possible engines. Note that we've made a policy decision + // that this should enumerate in "order" as defined by PartialCmp, and tests + // enforce this. + pub fn iter() -> impl Iterator<Item = SyncEngineId> { + [ + Self::Passwords, + Self::Tabs, + Self::Bookmarks, + Self::Addresses, + Self::CreditCards, + Self::History, + ] + .into_iter() + } + + // Get the string identifier for this engine. This must match the strings in SyncEngineSelection. + pub fn name(&self) -> &'static str { + match self { + Self::Passwords => "passwords", + Self::History => "history", + Self::Bookmarks => "bookmarks", + Self::Tabs => "tabs", + Self::Addresses => "addresses", + Self::CreditCards => "creditcards", + } + } +} + +impl fmt::Display for SyncEngineId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.name()) + } +} + +impl TryFrom<&str> for SyncEngineId { + type Error = String; + + fn try_from(value: &str) -> std::result::Result<Self, Self::Error> { + match value { + "passwords" => Ok(Self::Passwords), + "history" => Ok(Self::History), + "bookmarks" => Ok(Self::Bookmarks), + "tabs" => Ok(Self::Tabs), + "addresses" => Ok(Self::Addresses), + "creditcards" => Ok(Self::CreditCards), + _ => Err(value.into()), + } + } +} + +/// A "sync engine" is a thing that knows how to sync. It's often implemented +/// by a "store" (which is the generic term responsible for all storage +/// associated with a component, including storage required for sync.) +/// +/// Low-level engine functionality. Engines that need custom reconciliation +/// logic should use this. +/// +/// Different engines will produce errors of different types. To accommodate +/// this, we force them all to return anyhow::Error. +pub trait SyncEngine { + fn collection_name(&self) -> CollectionName; + + /// Prepares the engine for syncing. The tabs engine currently uses this to + /// store the current list of clients, which it uses to look up device names + /// and types. + /// + /// Note that this method is only called by `sync_multiple`, and only if a + /// command processor is registered. In particular, `prepare_for_sync` will + /// not be called if the store is synced using `sync::synchronize` or + /// `sync_multiple::sync_multiple`. It _will_ be called if the store is + /// synced via the Sync Manager. + /// + /// TODO(issue #2590): This is pretty cludgey and will be hard to extend for + /// any case other than the tabs case. We should find another way to support + /// tabs... + fn prepare_for_sync(&self, _get_client_data: &dyn Fn() -> ClientData) -> Result<()> { + Ok(()) + } + + /// Tells the engine what the local encryption key is for the data managed + /// by the engine. This is only used by collections that store data + /// encrypted locally and is unrelated to the encryption used by Sync. + /// The intent is that for such collections, this key can be used to + /// decrypt local data before it is re-encrypted by Sync and sent to the + /// storage servers, and similarly, data from the storage servers will be + /// decrypted by Sync, then encrypted by the local encryption key before + /// being added to the local database. + /// + /// The expectation is that the key value is being maintained by the + /// embedding application in some secure way suitable for the environment + /// in which the app is running - eg, the OS "keychain". The value of the + /// key is implementation dependent - it is expected that the engine and + /// embedding application already have some external agreement about how + /// to generate keys and in what form they are exchanged. Finally, there's + /// an assumption that sync engines are short-lived and only live for a + /// single sync - this means that sync doesn't hold on to the key for an + /// extended period. + /// + /// This will panic if called by an engine that doesn't have explicit + /// support for local encryption keys as that implies a degree of confusion + /// which shouldn't be possible to ignore. + fn set_local_encryption_key(&mut self, _key: &str) -> Result<()> { + unimplemented!("This engine does not support local encryption"); + } + + /// `inbound` is a vector to support the case where + /// `get_collection_requests` returned multiple requests. The changesets are + /// in the same order as the requests were -- e.g. if `vec![req_a, req_b]` + /// was returned from `get_collection_requests`, `inbound` will have the + /// results from `req_a` as its first index, and those from `req_b` as it's + /// second. + fn apply_incoming( + &self, + inbound: Vec<IncomingChangeset>, + telem: &mut telemetry::Engine, + ) -> Result<OutgoingChangeset>; + + fn sync_finished( + &self, + new_timestamp: ServerTimestamp, + records_synced: Vec<Guid>, + ) -> Result<()>; + + /// The engine is responsible for building collection requests. Engines + /// typically will store a lastModified timestamp and use that to build a + /// request saying "give me full records since that date" - however, other + /// engines might do something fancier. This could even later be extended to + /// handle "backfills" etc + /// + /// To support more advanced use cases, multiple requests can be returned + /// here - either from the same or different collections. The vast majority + /// of engines will just want to return zero or one item in their vector + /// (zero is a valid optimization when the server timestamp is the same as + /// the engine last saw, one when it is not) + /// + /// Important: In the case when more than one collection is requested, it's + /// assumed the last one is the "canonical" one. (That is, it must be for + /// "this" collection, its timestamp is used to represent the sync, etc). + /// (Note that multiple collection request support is currently unused, so + /// it might make sense to delete it - if we need it later, we may find a + /// better shape for our use-case) + fn get_collection_requests( + &self, + server_timestamp: ServerTimestamp, + ) -> Result<Vec<CollectionRequest>>; + + /// Get persisted sync IDs. If they don't match the global state we'll be + /// `reset()` with the new IDs. + fn get_sync_assoc(&self) -> Result<EngineSyncAssociation>; + + /// Reset the engine (and associated store) without wiping local data, + /// ready for a "first sync". + /// `assoc` defines how this store is to be associated with sync. + fn reset(&self, assoc: &EngineSyncAssociation) -> Result<()>; + + fn wipe(&self) -> Result<()>; +} + +#[cfg(test)] +mod test { + use super::*; + use std::iter::zip; + + #[test] + fn test_engine_priority() { + fn sorted(mut engines: Vec<SyncEngineId>) -> Vec<SyncEngineId> { + engines.sort(); + engines + } + assert_eq!( + vec![SyncEngineId::Passwords, SyncEngineId::Tabs], + sorted(vec![SyncEngineId::Passwords, SyncEngineId::Tabs]) + ); + assert_eq!( + vec![SyncEngineId::Passwords, SyncEngineId::Tabs], + sorted(vec![SyncEngineId::Tabs, SyncEngineId::Passwords]) + ); + } + + #[test] + fn test_engine_enum_order() { + let unsorted = SyncEngineId::iter().collect::<Vec<SyncEngineId>>(); + let mut sorted = SyncEngineId::iter().collect::<Vec<SyncEngineId>>(); + sorted.sort(); + + // iterating should supply identical elements in each. + assert!(zip(unsorted, sorted).fold(true, |acc, (a, b)| acc && (a == b))) + } +} diff --git a/third_party/rust/sync15/src/error.rs b/third_party/rust/sync15/src/error.rs new file mode 100644 index 0000000000..e4a043b8ac --- /dev/null +++ b/third_party/rust/sync15/src/error.rs @@ -0,0 +1,138 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +use interrupt_support::Interrupted; + +/// This enum is to discriminate `StorageHttpError`, and not used as an error. +#[cfg(feature = "sync-client")] +#[derive(Debug, Clone)] +pub enum ErrorResponse { + NotFound { route: String }, + // 401 + Unauthorized { route: String }, + // 412 + PreconditionFailed { route: String }, + // 5XX + ServerError { route: String, status: u16 }, // TODO: info for "retry-after" and backoff handling etc here. + // Other HTTP responses. + RequestFailed { route: String, status: u16 }, +} + +pub type Result<T> = std::result::Result<T, Error>; + +#[derive(Debug, thiserror::Error)] +pub enum Error { + #[cfg(feature = "crypto")] + #[error("Key {0} had wrong length, got {1}, expected {2}")] + BadKeyLength(&'static str, usize, usize), + + #[cfg(feature = "crypto")] + #[error("SHA256 HMAC Mismatch error")] + HmacMismatch, + + #[cfg(feature = "crypto")] + #[error("Crypto/NSS error: {0}")] + CryptoError(#[from] rc_crypto::Error), + + #[cfg(feature = "crypto")] + #[error("Base64 decode error: {0}")] + Base64Decode(#[from] base64::DecodeError), + + #[error("JSON error: {0}")] + JsonError(#[from] serde_json::Error), + + #[error("Bad cleartext UTF8: {0}")] + BadCleartextUtf8(#[from] std::string::FromUtf8Error), + + #[cfg(feature = "crypto")] + #[error("HAWK error: {0}")] + HawkError(#[from] rc_crypto::hawk::Error), + + // + // Errors specific to this module. + // + #[cfg(feature = "sync-client")] + #[error("HTTP status {0} when requesting a token from the tokenserver")] + TokenserverHttpError(u16), + + #[cfg(feature = "sync-client")] + #[error("HTTP storage error: {0:?}")] + StorageHttpError(ErrorResponse), + + #[cfg(feature = "sync-client")] + #[error("Server requested backoff. Retry after {0:?}")] + BackoffError(std::time::SystemTime), + + #[cfg(feature = "sync-client")] + #[error("Outgoing record is too large to upload")] + RecordTooLargeError, + + // Do we want to record the concrete problems? + #[cfg(feature = "sync-client")] + #[error("Not all records were successfully uploaded")] + RecordUploadFailed, + + /// Used for things like a node reassignment or an unexpected syncId + /// implying the app needs to "reset" its understanding of remote storage. + #[cfg(feature = "sync-client")] + #[error("The server has reset the storage for this account")] + StorageResetError, + + #[cfg(feature = "sync-client")] + #[error("Unacceptable URL: {0}")] + UnacceptableUrl(String), + + #[cfg(feature = "sync-client")] + #[error("Missing server timestamp header in request")] + MissingServerTimestamp, + + #[cfg(feature = "sync-client")] + #[error("Unexpected server behavior during batch upload: {0}")] + ServerBatchProblem(&'static str), + + #[cfg(feature = "sync-client")] + #[error("It appears some other client is also trying to setup storage; try again later")] + SetupRace, + + #[cfg(feature = "sync-client")] + #[error("Client upgrade required; server storage version too new")] + ClientUpgradeRequired, + + // This means that our global state machine needs to enter a state (such as + // "FreshStartNeeded", but the allowed_states don't include that state.) + // It typically means we are trying to do a "fast" or "read-only" sync. + #[cfg(feature = "sync-client")] + #[error("Our storage needs setting up and we can't currently do it")] + SetupRequired, + + #[cfg(feature = "sync-client")] + #[error("Store error: {0}")] + StoreError(#[from] anyhow::Error), + + #[cfg(feature = "sync-client")] + #[error("Network error: {0}")] + RequestError(#[from] viaduct::Error), + + #[cfg(feature = "sync-client")] + #[error("Unexpected HTTP status: {0}")] + UnexpectedStatus(#[from] viaduct::UnexpectedStatus), + + #[cfg(feature = "sync-client")] + #[error("URL parse error: {0}")] + MalformedUrl(#[from] url::ParseError), + + #[error("The operation was interrupted.")] + Interrupted(#[from] Interrupted), +} + +#[cfg(feature = "sync-client")] +impl Error { + pub(crate) fn get_backoff(&self) -> Option<std::time::SystemTime> { + if let Error::BackoffError(time) = self { + Some(*time) + } else { + None + } + } +} diff --git a/third_party/rust/sync15/src/key_bundle.rs b/third_party/rust/sync15/src/key_bundle.rs new file mode 100644 index 0000000000..25d8ccc5ca --- /dev/null +++ b/third_party/rust/sync15/src/key_bundle.rs @@ -0,0 +1,224 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +use crate::error::{Error, Result}; +use rc_crypto::{ + aead::{self, OpeningKey, SealingKey}, + rand, +}; + +#[derive(Clone, PartialEq, Eq, Hash)] +pub struct KeyBundle { + enc_key: Vec<u8>, + mac_key: Vec<u8>, +} + +impl std::fmt::Debug for KeyBundle { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("KeyBundle").finish() + } +} + +impl KeyBundle { + /// Construct a key bundle from the already-decoded encrypt and hmac keys. + /// Panics (asserts) if they aren't both 32 bytes. + pub fn new(enc: Vec<u8>, mac: Vec<u8>) -> Result<KeyBundle> { + if enc.len() != 32 { + error_support::report_error!( + "sync15-key-bundle", + "Bad key length (enc_key): {} != 32", + enc.len() + ); + return Err(Error::BadKeyLength("enc_key", enc.len(), 32)); + } + if mac.len() != 32 { + error_support::report_error!( + "sync15-key-bundle", + "Bad key length (mac_key): {} != 32", + mac.len() + ); + return Err(Error::BadKeyLength("mac_key", mac.len(), 32)); + } + Ok(KeyBundle { + enc_key: enc, + mac_key: mac, + }) + } + + pub fn new_random() -> Result<KeyBundle> { + let mut buffer = [0u8; 64]; + rand::fill(&mut buffer)?; + KeyBundle::from_ksync_bytes(&buffer) + } + + pub fn from_ksync_bytes(ksync: &[u8]) -> Result<KeyBundle> { + if ksync.len() != 64 { + error_support::report_error!( + "sync15-key-bundle", + "Bad key length (kSync): {} != 64", + ksync.len() + ); + return Err(Error::BadKeyLength("kSync", ksync.len(), 64)); + } + Ok(KeyBundle { + enc_key: ksync[0..32].into(), + mac_key: ksync[32..64].into(), + }) + } + + pub fn from_ksync_base64(ksync: &str) -> Result<KeyBundle> { + let bytes = base64::decode_config(ksync, base64::URL_SAFE_NO_PAD)?; + KeyBundle::from_ksync_bytes(&bytes) + } + + pub fn from_base64(enc: &str, mac: &str) -> Result<KeyBundle> { + let enc_bytes = base64::decode(enc)?; + let mac_bytes = base64::decode(mac)?; + KeyBundle::new(enc_bytes, mac_bytes) + } + + #[inline] + pub fn encryption_key(&self) -> &[u8] { + &self.enc_key + } + + #[inline] + pub fn hmac_key(&self) -> &[u8] { + &self.mac_key + } + + #[inline] + pub fn to_b64_array(&self) -> [String; 2] { + [base64::encode(&self.enc_key), base64::encode(&self.mac_key)] + } + + /// Decrypt the provided ciphertext with the given iv, and decodes the + /// result as a utf8 string. + pub fn decrypt(&self, enc_base64: &str, iv_base64: &str, hmac_base16: &str) -> Result<String> { + // Decode the expected_hmac into bytes to avoid issues if a client happens to encode + // this as uppercase. This shouldn't happen in practice, but doing it this way is more + // robust and avoids an allocation. + let mut decoded_hmac = vec![0u8; 32]; + if base16::decode_slice(hmac_base16, &mut decoded_hmac).is_err() { + log::warn!("Garbage HMAC verification string: contained non base16 characters"); + return Err(Error::HmacMismatch); + } + let iv = base64::decode(iv_base64)?; + let ciphertext_bytes = base64::decode(enc_base64)?; + let key_bytes = [self.encryption_key(), self.hmac_key()].concat(); + let key = OpeningKey::new(&aead::LEGACY_SYNC_AES_256_CBC_HMAC_SHA256, &key_bytes)?; + let nonce = aead::Nonce::try_assume_unique_for_key( + &aead::LEGACY_SYNC_AES_256_CBC_HMAC_SHA256, + &iv, + )?; + let ciphertext_and_hmac = [ciphertext_bytes, decoded_hmac].concat(); + let cleartext_bytes = aead::open(&key, nonce, aead::Aad::empty(), &ciphertext_and_hmac)?; + let cleartext = String::from_utf8(cleartext_bytes)?; + Ok(cleartext) + } + + /// Encrypt using the provided IV. + pub fn encrypt_bytes_with_iv( + &self, + cleartext_bytes: &[u8], + iv: &[u8], + ) -> Result<(String, String)> { + let key_bytes = [self.encryption_key(), self.hmac_key()].concat(); + let key = SealingKey::new(&aead::LEGACY_SYNC_AES_256_CBC_HMAC_SHA256, &key_bytes)?; + let nonce = + aead::Nonce::try_assume_unique_for_key(&aead::LEGACY_SYNC_AES_256_CBC_HMAC_SHA256, iv)?; + let ciphertext_and_hmac = aead::seal(&key, nonce, aead::Aad::empty(), cleartext_bytes)?; + let ciphertext_len = ciphertext_and_hmac.len() - key.algorithm().tag_len(); + // Do the string conversions here so we don't have to split and copy to 2 vectors. + let (ciphertext, hmac_signature) = ciphertext_and_hmac.split_at(ciphertext_len); + let enc_base64 = base64::encode(ciphertext); + let hmac_base16 = base16::encode_lower(&hmac_signature); + Ok((enc_base64, hmac_base16)) + } + + /// Generate a random iv and encrypt with it. Return both the encrypted bytes + /// and the generated iv. + pub fn encrypt_bytes_rand_iv( + &self, + cleartext_bytes: &[u8], + ) -> Result<(String, String, String)> { + let mut iv = [0u8; 16]; + rand::fill(&mut iv)?; + let (enc_base64, hmac_base16) = self.encrypt_bytes_with_iv(cleartext_bytes, &iv)?; + let iv_base64 = base64::encode(iv); + Ok((enc_base64, iv_base64, hmac_base16)) + } + + pub fn encrypt_with_iv(&self, cleartext: &str, iv: &[u8]) -> Result<(String, String)> { + self.encrypt_bytes_with_iv(cleartext.as_bytes(), iv) + } + + pub fn encrypt_rand_iv(&self, cleartext: &str) -> Result<(String, String, String)> { + self.encrypt_bytes_rand_iv(cleartext.as_bytes()) + } +} + +#[cfg(test)] +mod test { + use super::*; + + const HMAC_B16: &str = "b1e6c18ac30deb70236bc0d65a46f7a4dce3b8b0e02cf92182b914e3afa5eebc"; + const IV_B64: &str = "GX8L37AAb2FZJMzIoXlX8w=="; + const HMAC_KEY_B64: &str = "MMntEfutgLTc8FlTLQFms8/xMPmCldqPlq/QQXEjx70="; + const ENC_KEY_B64: &str = "9K/wLdXdw+nrTtXo4ZpECyHFNr4d7aYHqeg3KW9+m6Q="; + + const CIPHERTEXT_B64_PIECES: &[&str] = &[ + "NMsdnRulLwQsVcwxKW9XwaUe7ouJk5Wn80QhbD80l0HEcZGCynh45qIbeYBik0lgcHbK", + "mlIxTJNwU+OeqipN+/j7MqhjKOGIlvbpiPQQLC6/ffF2vbzL0nzMUuSyvaQzyGGkSYM2", + "xUFt06aNivoQTvU2GgGmUK6MvadoY38hhW2LCMkoZcNfgCqJ26lO1O0sEO6zHsk3IVz6", + "vsKiJ2Hq6VCo7hu123wNegmujHWQSGyf8JeudZjKzfi0OFRRvvm4QAKyBWf0MgrW1F8S", + "FDnVfkq8amCB7NhdwhgLWbN+21NitNwWYknoEWe1m6hmGZDgDT32uxzWxCV8QqqrpH/Z", + "ggViEr9uMgoy4lYaWqP7G5WKvvechc62aqnsNEYhH26A5QgzmlNyvB+KPFvPsYzxDnSC", + "jOoRSLx7GG86wT59QZw=", + ]; + + const CLEARTEXT_B64_PIECES: &[&str] = &[ + "eyJpZCI6IjVxUnNnWFdSSlpYciIsImhpc3RVcmkiOiJmaWxlOi8vL1VzZXJzL2phc29u", + "L0xpYnJhcnkvQXBwbGljYXRpb24lMjBTdXBwb3J0L0ZpcmVmb3gvUHJvZmlsZXMva3Nn", + "ZDd3cGsuTG9jYWxTeW5jU2VydmVyL3dlYXZlL2xvZ3MvIiwidGl0bGUiOiJJbmRleCBv", + "ZiBmaWxlOi8vL1VzZXJzL2phc29uL0xpYnJhcnkvQXBwbGljYXRpb24gU3VwcG9ydC9G", + "aXJlZm94L1Byb2ZpbGVzL2tzZ2Q3d3BrLkxvY2FsU3luY1NlcnZlci93ZWF2ZS9sb2dz", + "LyIsInZpc2l0cyI6W3siZGF0ZSI6MTMxOTE0OTAxMjM3MjQyNSwidHlwZSI6MX1dfQ==", + ]; + + #[test] + fn test_decrypt() { + let key_bundle = KeyBundle::from_base64(ENC_KEY_B64, HMAC_KEY_B64).unwrap(); + let ciphertext = CIPHERTEXT_B64_PIECES.join(""); + let s = key_bundle.decrypt(&ciphertext, IV_B64, HMAC_B16).unwrap(); + + let cleartext = + String::from_utf8(base64::decode(CLEARTEXT_B64_PIECES.join("")).unwrap()).unwrap(); + assert_eq!(&cleartext, &s); + } + + #[test] + fn test_encrypt() { + let key_bundle = KeyBundle::from_base64(ENC_KEY_B64, HMAC_KEY_B64).unwrap(); + let iv = base64::decode(IV_B64).unwrap(); + + let cleartext_bytes = base64::decode(CLEARTEXT_B64_PIECES.join("")).unwrap(); + let (enc_base64, _hmac_base16) = key_bundle + .encrypt_bytes_with_iv(&cleartext_bytes, &iv) + .unwrap(); + + let expect_ciphertext = CIPHERTEXT_B64_PIECES.join(""); + + assert_eq!(&enc_base64, &expect_ciphertext); + + let (enc_base64_2, iv_base64_2, hmac_base16_2) = + key_bundle.encrypt_bytes_rand_iv(&cleartext_bytes).unwrap(); + assert_ne!(&enc_base64_2, &expect_ciphertext); + + let s = key_bundle + .decrypt(&enc_base64_2, &iv_base64_2, &hmac_base16_2) + .unwrap(); + assert_eq!(&cleartext_bytes, &s.as_bytes()); + } +} diff --git a/third_party/rust/sync15/src/lib.rs b/third_party/rust/sync15/src/lib.rs new file mode 100644 index 0000000000..21fcf60cb5 --- /dev/null +++ b/third_party/rust/sync15/src/lib.rs @@ -0,0 +1,48 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#![allow(unknown_lints, clippy::implicit_hasher)] +#![warn(rust_2018_idioms)] + +pub mod bso; +#[cfg(feature = "sync-client")] +pub mod client; +// Type to describe device types +mod device_type; +// Types to describe client records +mod client_types; +// Note that `clients_engine` should probably be in `sync_client`, but let's not make +// things too nested at this stage... +#[cfg(feature = "sync-client")] +pub mod clients_engine; +#[cfg(feature = "crypto")] +mod enc_payload; +#[cfg(feature = "sync-engine")] +pub mod engine; +mod error; +#[cfg(feature = "crypto")] +mod key_bundle; +mod record_types; +mod server_timestamp; +pub mod telemetry; + +pub use crate::client_types::{ClientData, RemoteClient}; +pub use crate::device_type::DeviceType; +pub use crate::error::{Error, Result}; +#[cfg(feature = "crypto")] +pub use enc_payload::EncryptedPayload; +#[cfg(feature = "crypto")] +pub use key_bundle::KeyBundle; +pub use server_timestamp::ServerTimestamp; +pub use sync_guid::Guid; + +// Collection names are almost always `static, so we use a `Cow`: +// * Either a `String` or a `'static &str` can be `.into()`'d into one of these. +// * Cloning one with a `'static &str` is extremely cheap and doesn't allocate memory. +pub type CollectionName = std::borrow::Cow<'static, str>; + +// For skip_serializing_if +fn skip_if_default<T: PartialEq + Default>(v: &T) -> bool { + *v == T::default() +} diff --git a/third_party/rust/sync15/src/record_types.rs b/third_party/rust/sync15/src/record_types.rs new file mode 100644 index 0000000000..f8f1449fdc --- /dev/null +++ b/third_party/rust/sync15/src/record_types.rs @@ -0,0 +1,51 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +use serde_derive::*; +use std::collections::HashMap; +use sync_guid::Guid; + +// Known record formats. + +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct MetaGlobalEngine { + pub version: usize, + #[serde(rename = "syncID")] + pub sync_id: Guid, +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct MetaGlobalRecord { + #[serde(rename = "syncID")] + pub sync_id: Guid, + #[serde(rename = "storageVersion")] + pub storage_version: usize, + #[serde(default)] + pub engines: HashMap<String, MetaGlobalEngine>, + #[serde(default)] + pub declined: Vec<String>, +} + +#[derive(Deserialize, Serialize, Clone, Debug, Eq, PartialEq)] +pub struct CryptoKeysRecord { + pub id: Guid, + pub collection: String, + pub default: [String; 2], + pub collections: HashMap<String, [String; 2]>, +} + +#[cfg(test)] +#[test] +fn test_deserialize_meta_global() { + let record = serde_json::json!({ + "syncID": "abcd1234abcd", + "storageVersion": 1, + }) + .to_string(); + let r = serde_json::from_str::<MetaGlobalRecord>(&record).unwrap(); + assert_eq!(r.sync_id, "abcd1234abcd"); + assert_eq!(r.storage_version, 1); + assert_eq!(r.engines.len(), 0); + assert_eq!(r.declined.len(), 0); +} diff --git a/third_party/rust/sync15/src/server_timestamp.rs b/third_party/rust/sync15/src/server_timestamp.rs new file mode 100644 index 0000000000..7dd7a7fc42 --- /dev/null +++ b/third_party/rust/sync15/src/server_timestamp.rs @@ -0,0 +1,122 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ +use std::time::Duration; + +/// Typesafe way to manage server timestamps without accidentally mixing them up with +/// local ones. +#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Default)] +pub struct ServerTimestamp(pub i64); + +impl ServerTimestamp { + pub fn from_float_seconds(ts: f64) -> Self { + let rf = (ts * 1000.0).round(); + if !rf.is_finite() || rf < 0.0 || rf >= i64::max_value() as f64 { + error_support::report_error!("sync15-illegal-timestamp", "Illegal timestamp: {}", ts); + ServerTimestamp(0) + } else { + ServerTimestamp(rf as i64) + } + } + + pub fn from_millis(ts: i64) -> Self { + // Catch it in tests, but just complain and replace with 0 otherwise. + debug_assert!(ts >= 0, "Bad timestamp: {}", ts); + if ts >= 0 { + Self(ts) + } else { + error_support::report_error!( + "sync15-illegal-timestamp", + "Illegal timestamp, substituting 0: {}", + ts + ); + Self(0) + } + } +} + +// This lets us use these in hyper header! blocks. +impl std::str::FromStr for ServerTimestamp { + type Err = std::num::ParseFloatError; + fn from_str(s: &str) -> Result<Self, Self::Err> { + let val = f64::from_str(s)?; + Ok(Self::from_float_seconds(val)) + } +} + +impl std::fmt::Display for ServerTimestamp { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.0 as f64 / 1000.0) + } +} + +impl ServerTimestamp { + pub const EPOCH: ServerTimestamp = ServerTimestamp(0); + + /// Returns None if `other` is later than `self` (Duration may not represent + /// negative timespans in rust). + #[inline] + pub fn duration_since(self, other: ServerTimestamp) -> Option<Duration> { + let delta = self.0 - other.0; + if delta < 0 { + None + } else { + Some(Duration::from_millis(delta as u64)) + } + } + + /// Get the milliseconds for the timestamp. + #[inline] + pub fn as_millis(self) -> i64 { + self.0 + } +} + +impl serde::ser::Serialize for ServerTimestamp { + fn serialize<S: serde::ser::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { + serializer.serialize_f64(self.0 as f64 / 1000.0) + } +} + +impl<'de> serde::de::Deserialize<'de> for ServerTimestamp { + fn deserialize<D: serde::de::Deserializer<'de>>(d: D) -> Result<Self, D::Error> { + f64::deserialize(d).map(Self::from_float_seconds) + } +} + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn test_server_timestamp() { + let t0 = ServerTimestamp(10_300_150); + let t1 = ServerTimestamp(10_100_050); + assert!(t1.duration_since(t0).is_none()); + assert!(t0.duration_since(t1).is_some()); + let dur = t0.duration_since(t1).unwrap(); + assert_eq!(dur.as_secs(), 200); + assert_eq!(dur.subsec_nanos(), 100_000_000); + } + + #[test] + fn test_serde() { + let ts = ServerTimestamp(123_456); + + // test serialize + let ser = serde_json::to_string(&ts).unwrap(); + assert_eq!("123.456".to_string(), ser); + + // test deserialize of float + let ts: ServerTimestamp = serde_json::from_str(&ser).unwrap(); + assert_eq!(ServerTimestamp(123_456), ts); + + // test deserialize of whole number + let ts: ServerTimestamp = serde_json::from_str("123").unwrap(); + assert_eq!(ServerTimestamp(123_000), ts); + + // test deserialize of negative number + let ts: ServerTimestamp = serde_json::from_str("-123").unwrap(); + assert_eq!(ServerTimestamp(0), ts); + } +} diff --git a/third_party/rust/sync15/src/telemetry.rs b/third_party/rust/sync15/src/telemetry.rs new file mode 100644 index 0000000000..1d5519c937 --- /dev/null +++ b/third_party/rust/sync15/src/telemetry.rs @@ -0,0 +1,824 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +//! Manage recording sync telemetry. Assumes some external telemetry +//! library/code which manages submitting. + +use crate::error::Error; +#[cfg(feature = "sync-client")] +use crate::error::ErrorResponse; + +use std::collections::HashMap; +use std::time; + +use serde::{ser, Serialize, Serializer}; + +// A test helper, used by the many test modules below. +#[cfg(test)] +fn assert_json<T: ?Sized>(v: &T, expected: serde_json::Value) +where + T: serde::Serialize, +{ + assert_eq!( + serde_json::to_value(v).expect("should get a value"), + expected + ); +} + +/// What we record for 'when' and 'took' in a telemetry record. +#[derive(Debug, Serialize)] +struct WhenTook { + when: f64, + #[serde(skip_serializing_if = "crate::skip_if_default")] + took: u64, +} + +/// What we track while recording 'when' and 'took. It serializes as a WhenTook, +/// except when .finished() hasn't been called, in which case it panics. +#[derive(Debug)] +enum Stopwatch { + Started(time::SystemTime, time::Instant), + Finished(WhenTook), +} + +impl Default for Stopwatch { + fn default() -> Self { + Stopwatch::new() + } +} + +impl Stopwatch { + fn new() -> Self { + Stopwatch::Started(time::SystemTime::now(), time::Instant::now()) + } + + // For tests we don't want real timestamps because we test against literals. + #[cfg(test)] + fn finished(&self) -> Self { + Stopwatch::Finished(WhenTook { when: 0.0, took: 0 }) + } + + #[cfg(not(test))] + fn finished(&self) -> Self { + match self { + Stopwatch::Started(st, si) => { + let std = st.duration_since(time::UNIX_EPOCH).unwrap_or_default(); + let when = std.as_secs() as f64; // we don't want sub-sec accuracy. Do we need to write a float? + + let sid = si.elapsed(); + let took = sid.as_secs() * 1000 + (u64::from(sid.subsec_nanos()) / 1_000_000); + Stopwatch::Finished(WhenTook { when, took }) + } + _ => { + unreachable!("can't finish twice"); + } + } + } +} + +impl Serialize for Stopwatch { + fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error> + where + S: Serializer, + { + match self { + Stopwatch::Started(_, _) => Err(ser::Error::custom("StopWatch has not been finished")), + Stopwatch::Finished(c) => c.serialize(serializer), + } + } +} + +#[cfg(test)] +mod stopwatch_tests { + use super::*; + + // A wrapper struct because we flatten - this struct should serialize with + // 'when' and 'took' keys (but with no 'sw'.) + #[derive(Debug, Serialize)] + struct WT { + #[serde(flatten)] + sw: Stopwatch, + } + + #[test] + fn test_not_finished() { + let wt = WT { + sw: Stopwatch::new(), + }; + serde_json::to_string(&wt).expect_err("unfinished stopwatch should fail"); + } + + #[test] + fn test() { + assert_json( + &WT { + sw: Stopwatch::Finished(WhenTook { when: 1.0, took: 1 }), + }, + serde_json::json!({"when": 1.0, "took": 1}), + ); + assert_json( + &WT { + sw: Stopwatch::Finished(WhenTook { when: 1.0, took: 0 }), + }, + serde_json::json!({"when": 1.0}), + ); + } +} + +/// A generic "Event" - suitable for all kinds of pings (although this module +/// only cares about the sync ping) +#[derive(Debug, Serialize)] +pub struct Event { + // We use static str references as we expect values to be literals. + object: &'static str, + + method: &'static str, + + // Maybe "value" should be a string? + #[serde(skip_serializing_if = "Option::is_none")] + value: Option<&'static str>, + + // we expect the keys to be literals but values are real strings. + #[serde(skip_serializing_if = "Option::is_none")] + extra: Option<HashMap<&'static str, String>>, +} + +impl Event { + pub fn new(object: &'static str, method: &'static str) -> Self { + assert!(object.len() <= 20); + assert!(method.len() <= 20); + Self { + object, + method, + value: None, + extra: None, + } + } + + pub fn value(mut self, v: &'static str) -> Self { + assert!(v.len() <= 80); + self.value = Some(v); + self + } + + pub fn extra(mut self, key: &'static str, val: String) -> Self { + assert!(key.len() <= 15); + assert!(val.len() <= 85); + match self.extra { + None => self.extra = Some(HashMap::new()), + Some(ref e) => assert!(e.len() < 10), + } + self.extra.as_mut().unwrap().insert(key, val); + self + } +} + +#[cfg(test)] +mod test_events { + use super::*; + + #[test] + #[should_panic] + fn test_invalid_length_ctor() { + Event::new("A very long object value", "Method"); + } + + #[test] + #[should_panic] + fn test_invalid_length_extra_key() { + Event::new("O", "M").extra("A very long key value", "v".to_string()); + } + + #[test] + #[should_panic] + fn test_invalid_length_extra_val() { + let l = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ + abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"; + Event::new("O", "M").extra("k", l.to_string()); + } + + #[test] + #[should_panic] + fn test_too_many_extras() { + let l = "abcdefghijk"; + let mut e = Event::new("Object", "Method"); + for i in 0..l.len() { + e = e.extra(&l[i..=i], "v".to_string()); + } + } + + #[test] + fn test_json() { + assert_json( + &Event::new("Object", "Method").value("Value"), + serde_json::json!({"object": "Object", "method": "Method", "value": "Value"}), + ); + + assert_json( + &Event::new("Object", "Method").extra("one", "one".to_string()), + serde_json::json!({"object": "Object", + "method": "Method", + "extra": {"one": "one"} + }), + ) + } +} + +/// A Sync failure. +#[derive(Debug, Serialize)] +#[serde(tag = "name")] +pub enum SyncFailure { + #[serde(rename = "shutdownerror")] + Shutdown, + + #[serde(rename = "othererror")] + Other { error: String }, + + #[serde(rename = "unexpectederror")] + Unexpected { error: String }, + + #[serde(rename = "autherror")] + Auth { from: &'static str }, + + #[serde(rename = "httperror")] + Http { code: u16 }, +} + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn reprs() { + assert_json( + &SyncFailure::Shutdown, + serde_json::json!({"name": "shutdownerror"}), + ); + + assert_json( + &SyncFailure::Other { + error: "dunno".to_string(), + }, + serde_json::json!({"name": "othererror", "error": "dunno"}), + ); + + assert_json( + &SyncFailure::Unexpected { + error: "dunno".to_string(), + }, + serde_json::json!({"name": "unexpectederror", "error": "dunno"}), + ); + + assert_json( + &SyncFailure::Auth { from: "FxA" }, + serde_json::json!({"name": "autherror", "from": "FxA"}), + ); + + assert_json( + &SyncFailure::Http { code: 500 }, + serde_json::json!({"name": "httperror", "code": 500}), + ); + } +} + +/// Incoming record for an engine's sync +#[derive(Debug, Default, Serialize)] +pub struct EngineIncoming { + #[serde(skip_serializing_if = "crate::skip_if_default")] + applied: u32, + + #[serde(skip_serializing_if = "crate::skip_if_default")] + failed: u32, + + #[serde(rename = "newFailed")] + #[serde(skip_serializing_if = "crate::skip_if_default")] + new_failed: u32, + + #[serde(skip_serializing_if = "crate::skip_if_default")] + reconciled: u32, +} + +impl EngineIncoming { + pub fn new() -> Self { + Self { + ..Default::default() + } + } + + // A helper used via skip_serializing_if + fn is_empty(inc: &Option<Self>) -> bool { + match inc { + Some(a) => a.applied == 0 && a.failed == 0 && a.new_failed == 0 && a.reconciled == 0, + None => true, + } + } + + /// Increment the value of `applied` by `n`. + #[inline] + pub fn applied(&mut self, n: u32) { + self.applied += n; + } + + /// Increment the value of `failed` by `n`. + #[inline] + pub fn failed(&mut self, n: u32) { + self.failed += n; + } + + /// Increment the value of `new_failed` by `n`. + #[inline] + pub fn new_failed(&mut self, n: u32) { + self.new_failed += n; + } + + /// Increment the value of `reconciled` by `n`. + #[inline] + pub fn reconciled(&mut self, n: u32) { + self.reconciled += n; + } + + /// Get the value of `applied`. Mostly useful for testing. + #[inline] + pub fn get_applied(&self) -> u32 { + self.applied + } + + /// Get the value of `failed`. Mostly useful for testing. + #[inline] + pub fn get_failed(&self) -> u32 { + self.failed + } + + /// Get the value of `new_failed`. Mostly useful for testing. + #[inline] + pub fn get_new_failed(&self) -> u32 { + self.new_failed + } + + /// Get the value of `reconciled`. Mostly useful for testing. + #[inline] + pub fn get_reconciled(&self) -> u32 { + self.reconciled + } +} + +/// Outgoing record for an engine's sync +#[derive(Debug, Default, Serialize)] +pub struct EngineOutgoing { + #[serde(skip_serializing_if = "crate::skip_if_default")] + sent: usize, + + #[serde(skip_serializing_if = "crate::skip_if_default")] + failed: usize, +} + +impl EngineOutgoing { + pub fn new() -> Self { + EngineOutgoing { + ..Default::default() + } + } + + #[inline] + pub fn sent(&mut self, n: usize) { + self.sent += n; + } + + #[inline] + pub fn failed(&mut self, n: usize) { + self.failed += n; + } +} + +/// One engine's sync. +#[derive(Debug, Serialize)] +pub struct Engine { + name: String, + + #[serde(flatten)] + when_took: Stopwatch, + + #[serde(skip_serializing_if = "EngineIncoming::is_empty")] + incoming: Option<EngineIncoming>, + + #[serde(skip_serializing_if = "Vec::is_empty")] + outgoing: Vec<EngineOutgoing>, // one for each batch posted. + + #[serde(skip_serializing_if = "Option::is_none")] + #[serde(rename = "failureReason")] + failure: Option<SyncFailure>, + + #[serde(skip_serializing_if = "Option::is_none")] + validation: Option<Validation>, +} + +impl Engine { + pub fn new(name: impl Into<String>) -> Self { + Self { + name: name.into(), + when_took: Stopwatch::new(), + incoming: None, + outgoing: Vec::new(), + failure: None, + validation: None, + } + } + + pub fn incoming(&mut self, inc: EngineIncoming) { + assert!(self.incoming.is_none()); + self.incoming = Some(inc); + } + + pub fn outgoing(&mut self, out: EngineOutgoing) { + self.outgoing.push(out); + } + + pub fn failure(&mut self, err: impl Into<SyncFailure>) { + // Currently we take the first error, under the assumption that the + // first is the most important and all others stem from that. + let failure = err.into(); + if self.failure.is_none() { + self.failure = Some(failure); + } else { + log::warn!( + "engine already has recorded a failure of {:?} - ignoring {:?}", + &self.failure, + &failure + ); + } + } + + pub fn validation(&mut self, v: Validation) { + assert!(self.validation.is_none()); + self.validation = Some(v); + } + + fn finished(&mut self) { + self.when_took = self.when_took.finished(); + } +} + +#[derive(Debug, Default, Serialize)] +pub struct Validation { + version: u32, + + #[serde(skip_serializing_if = "Vec::is_empty")] + problems: Vec<Problem>, + + #[serde(skip_serializing_if = "Option::is_none")] + #[serde(rename = "failureReason")] + failure: Option<SyncFailure>, +} + +impl Validation { + pub fn with_version(version: u32) -> Validation { + Validation { + version, + ..Validation::default() + } + } + + pub fn problem(&mut self, name: &'static str, count: usize) -> &mut Self { + if count > 0 { + self.problems.push(Problem { name, count }); + } + self + } +} + +#[derive(Debug, Default, Serialize)] +pub struct Problem { + name: &'static str, + #[serde(skip_serializing_if = "crate::skip_if_default")] + count: usize, +} + +#[cfg(test)] +mod engine_tests { + use super::*; + + #[test] + fn test_engine() { + let mut e = Engine::new("test_engine"); + e.finished(); + assert_json(&e, serde_json::json!({"name": "test_engine", "when": 0.0})); + } + + #[test] + fn test_engine_not_finished() { + let e = Engine::new("test_engine"); + serde_json::to_value(e).expect_err("unfinished stopwatch should fail"); + } + + #[test] + fn test_incoming() { + let mut i = EngineIncoming::new(); + i.applied(1); + i.failed(2); + let mut e = Engine::new("TestEngine"); + e.incoming(i); + e.finished(); + assert_json( + &e, + serde_json::json!({"name": "TestEngine", "when": 0.0, "incoming": {"applied": 1, "failed": 2}}), + ); + } + + #[test] + fn test_outgoing() { + let mut o = EngineOutgoing::new(); + o.sent(2); + o.failed(1); + let mut e = Engine::new("TestEngine"); + e.outgoing(o); + e.finished(); + assert_json( + &e, + serde_json::json!({"name": "TestEngine", "when": 0.0, "outgoing": [{"sent": 2, "failed": 1}]}), + ); + } + + #[test] + fn test_failure() { + let mut e = Engine::new("TestEngine"); + e.failure(SyncFailure::Http { code: 500 }); + e.finished(); + assert_json( + &e, + serde_json::json!({"name": "TestEngine", + "when": 0.0, + "failureReason": {"name": "httperror", "code": 500} + }), + ); + } + + #[test] + fn test_raw() { + let mut e = Engine::new("TestEngine"); + let mut inc = EngineIncoming::new(); + inc.applied(10); + e.incoming(inc); + let mut out = EngineOutgoing::new(); + out.sent(1); + e.outgoing(out); + e.failure(SyncFailure::Http { code: 500 }); + e.finished(); + + assert_eq!(e.outgoing.len(), 1); + assert_eq!(e.incoming.as_ref().unwrap().applied, 10); + assert_eq!(e.outgoing[0].sent, 1); + assert!(e.failure.is_some()); + serde_json::to_string(&e).expect("should get json"); + } +} + +/// A single sync. May have many engines, may have its own failure. +#[derive(Debug, Serialize, Default)] +pub struct SyncTelemetry { + #[serde(flatten)] + when_took: Stopwatch, + + #[serde(skip_serializing_if = "Vec::is_empty")] + engines: Vec<Engine>, + + #[serde(skip_serializing_if = "Option::is_none")] + #[serde(rename = "failureReason")] + failure: Option<SyncFailure>, +} + +impl SyncTelemetry { + pub fn new() -> Self { + Default::default() + } + + pub fn engine(&mut self, mut e: Engine) { + e.finished(); + self.engines.push(e); + } + + pub fn failure(&mut self, failure: SyncFailure) { + assert!(self.failure.is_none()); + self.failure = Some(failure); + } + + // Note that unlike other 'finished' methods, this isn't private - someone + // needs to explicitly call this before handling the json payload to + // whatever ends up submitting it. + pub fn finished(&mut self) { + self.when_took = self.when_took.finished(); + } +} + +#[cfg(test)] +mod sync_tests { + use super::*; + + #[test] + fn test_accum() { + let mut s = SyncTelemetry::new(); + let mut inc = EngineIncoming::new(); + inc.applied(10); + let mut e = Engine::new("test_engine"); + e.incoming(inc); + e.failure(SyncFailure::Http { code: 500 }); + e.finished(); + s.engine(e); + s.finished(); + + assert_json( + &s, + serde_json::json!({ + "when": 0.0, + "engines": [{ + "name":"test_engine", + "when":0.0, + "incoming": { + "applied": 10 + }, + "failureReason": { + "name": "httperror", + "code": 500 + } + }] + }), + ); + } + + #[test] + fn test_multi_engine() { + let mut inc_e1 = EngineIncoming::new(); + inc_e1.applied(1); + let mut e1 = Engine::new("test_engine"); + e1.incoming(inc_e1); + + let mut inc_e2 = EngineIncoming::new(); + inc_e2.failed(1); + let mut e2 = Engine::new("test_engine_2"); + e2.incoming(inc_e2); + let mut out_e2 = EngineOutgoing::new(); + out_e2.sent(1); + e2.outgoing(out_e2); + + let mut s = SyncTelemetry::new(); + s.engine(e1); + s.engine(e2); + s.failure(SyncFailure::Http { code: 500 }); + s.finished(); + assert_json( + &s, + serde_json::json!({ + "when": 0.0, + "engines": [{ + "name": "test_engine", + "when": 0.0, + "incoming": { + "applied": 1 + } + },{ + "name": "test_engine_2", + "when": 0.0, + "incoming": { + "failed": 1 + }, + "outgoing": [{ + "sent": 1 + }] + }], + "failureReason": { + "name": "httperror", + "code": 500 + } + }), + ); + } +} + +/// The Sync ping payload, as documented at +/// https://firefox-source-docs.mozilla.org/toolkit/components/telemetry/telemetry/data/sync-ping.html. +/// May have many syncs, may have many events. However, due to the architecture +/// of apps which use these components, this payload is almost certainly not +/// suitable for submitting directly. For example, we will always return a +/// payload with exactly 1 sync, and it will not know certain other fields +/// in the payload, such as the *hashed* FxA device ID (see +/// https://searchfox.org/mozilla-central/rev/c3ebaf6de2d481c262c04bb9657eaf76bf47e2ac/services/sync/modules/browserid_identity.js#185 +/// for an example of how the device ID is constructed). The intention is that +/// consumers of this will use this to create a "real" payload - eg, accumulating +/// until some threshold number of syncs is reached, and contributing +/// additional data which only the consumer knows. +#[derive(Debug, Serialize, Default)] +pub struct SyncTelemetryPing { + version: u32, + + uid: Option<String>, + + #[serde(skip_serializing_if = "Vec::is_empty")] + events: Vec<Event>, + + #[serde(skip_serializing_if = "Vec::is_empty")] + syncs: Vec<SyncTelemetry>, +} + +impl SyncTelemetryPing { + pub fn new() -> Self { + Self { + version: 1, + ..Default::default() + } + } + + pub fn uid(&mut self, uid: String) { + if let Some(ref existing) = self.uid { + if *existing != uid { + log::warn!("existing uid ${} being replaced by {}", existing, uid); + } + } + self.uid = Some(uid); + } + + pub fn sync(&mut self, mut s: SyncTelemetry) { + s.finished(); + self.syncs.push(s); + } + + pub fn event(&mut self, e: Event) { + self.events.push(e); + } +} + +ffi_support::implement_into_ffi_by_json!(SyncTelemetryPing); + +#[cfg(test)] +mod ping_tests { + use super::*; + #[test] + fn test_ping() { + let engine = Engine::new("test"); + let mut s = SyncTelemetry::new(); + s.engine(engine); + let mut p = SyncTelemetryPing::new(); + p.uid("user-id".into()); + p.sync(s); + let event = Event::new("foo", "bar"); + p.event(event); + assert_json( + &p, + serde_json::json!({ + "events": [{ + "method": "bar", "object": "foo" + }], + "syncs": [{ + "engines": [{ + "name": "test", "when": 0.0 + }], + "when": 0.0 + }], + "uid": "user-id", + "version": 1 + }), + ); + } +} + +impl<'a> From<&'a Error> for SyncFailure { + fn from(e: &Error) -> SyncFailure { + match e { + #[cfg(feature = "sync-client")] + Error::TokenserverHttpError(status) => { + if *status == 401 { + SyncFailure::Auth { + from: "tokenserver", + } + } else { + SyncFailure::Http { code: *status } + } + } + #[cfg(feature = "sync-client")] + Error::BackoffError(_) => SyncFailure::Http { code: 503 }, + #[cfg(feature = "sync-client")] + Error::StorageHttpError(ref e) => match e { + ErrorResponse::NotFound { .. } => SyncFailure::Http { code: 404 }, + ErrorResponse::Unauthorized { .. } => SyncFailure::Auth { from: "storage" }, + ErrorResponse::PreconditionFailed { .. } => SyncFailure::Http { code: 412 }, + ErrorResponse::ServerError { status, .. } => SyncFailure::Http { code: *status }, + ErrorResponse::RequestFailed { status, .. } => SyncFailure::Http { code: *status }, + }, + #[cfg(feature = "crypto")] + Error::CryptoError(ref e) => SyncFailure::Unexpected { + error: e.to_string(), + }, + #[cfg(feature = "sync-client")] + Error::RequestError(ref e) => SyncFailure::Unexpected { + error: e.to_string(), + }, + #[cfg(feature = "sync-client")] + Error::UnexpectedStatus(ref e) => SyncFailure::Http { code: e.status }, + Error::Interrupted(ref e) => SyncFailure::Unexpected { + error: e.to_string(), + }, + e => SyncFailure::Other { + error: e.to_string(), + }, + } + } +} diff --git a/third_party/rust/synstructure/.cargo-checksum.json b/third_party/rust/synstructure/.cargo-checksum.json new file mode 100644 index 0000000000..bb02a4ca35 --- /dev/null +++ b/third_party/rust/synstructure/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{"Cargo.toml":"b9f3ce31a9ac80dc24f4afac7108b7cff44399f8d7503dc4addea4d256431a73","LICENSE":"219920e865eee70b7dcfc948a86b099e7f4fe2de01bcca2ca9a20c0a033f2b59","README.md":"a528e7356db49ea813c3290dd4f6b15d8e6c0a870cfc07a2df0f3d1381c575bf","src/lib.rs":"b8d9885399b22a5ee92b51e4ca757427da07775e940b25e1573180d9e8faf7d0","src/macros.rs":"e7cf1808faf5dac5ca25bd40ad99e95c2aab4f9899bd9327898761ea86271f7c"},"package":"f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f"}
\ No newline at end of file diff --git a/third_party/rust/synstructure/Cargo.toml b/third_party/rust/synstructure/Cargo.toml new file mode 100644 index 0000000000..03398567e6 --- /dev/null +++ b/third_party/rust/synstructure/Cargo.toml @@ -0,0 +1,44 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g., crates.io) dependencies. +# +# If you are reading this file be aware that the original Cargo.toml +# will likely look very different (and much more reasonable). +# See Cargo.toml.orig for the original contents. + +[package] +edition = "2018" +name = "synstructure" +version = "0.12.6" +authors = ["Nika Layzell <nika@thelayzells.com>"] +include = ["src/**/*", "Cargo.toml", "README.md", "LICENSE"] +description = "Helper methods and macros for custom derives" +documentation = "https://docs.rs/synstructure" +readme = "README.md" +keywords = ["syn", "macros", "derive", "expand_substructure", "enum"] +license = "MIT" +repository = "https://github.com/mystor/synstructure" +[dependencies.proc-macro2] +version = "1" +default-features = false + +[dependencies.quote] +version = "1" +default-features = false + +[dependencies.syn] +version = "1" +features = ["derive", "parsing", "printing", "clone-impls", "visit", "extra-traits"] +default-features = false + +[dependencies.unicode-xid] +version = "0.2" +[dev-dependencies.synstructure_test_traits] +version = "0.1" + +[features] +default = ["proc-macro"] +proc-macro = ["proc-macro2/proc-macro", "syn/proc-macro", "quote/proc-macro"] diff --git a/third_party/rust/synstructure/LICENSE b/third_party/rust/synstructure/LICENSE new file mode 100644 index 0000000000..f78f1c15d1 --- /dev/null +++ b/third_party/rust/synstructure/LICENSE @@ -0,0 +1,7 @@ +Copyright 2016 Nika Layzell + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/third_party/rust/synstructure/README.md b/third_party/rust/synstructure/README.md new file mode 100644 index 0000000000..b72b7bd9d5 --- /dev/null +++ b/third_party/rust/synstructure/README.md @@ -0,0 +1,159 @@ +# synstructure + +[![Latest Version](https://img.shields.io/crates/v/synstructure.svg)](https://crates.io/crates/synstructure) +[![Documentation](https://docs.rs/synstructure/badge.svg)](https://docs.rs/synstructure) +[![Build Status](https://travis-ci.org/mystor/synstructure.svg?branch=master)](https://travis-ci.org/mystor/synstructure) +[![Rustc Version 1.31+](https://img.shields.io/badge/rustc-1.31+-lightgray.svg)](https://blog.rust-lang.org/2018/12/06/Rust-1.31-and-rust-2018.html) + +> NOTE: What follows is an exerpt from the module level documentation. For full +> details read the docs on [docs.rs](https://docs.rs/synstructure/) + +This crate provides helper types for matching against enum variants, and +extracting bindings to each of the fields in the deriving Struct or Enum in +a generic way. + +If you are writing a `#[derive]` which needs to perform some operation on +every field, then you have come to the right place! + +# Example: `WalkFields` +### Trait Implementation +```rust +pub trait WalkFields: std::any::Any { + fn walk_fields(&self, walk: &mut FnMut(&WalkFields)); +} +impl WalkFields for i32 { + fn walk_fields(&self, _walk: &mut FnMut(&WalkFields)) {} +} +``` + +### Custom Derive +```rust +#[macro_use] +extern crate synstructure; +#[macro_use] +extern crate quote; +extern crate proc_macro2; + +fn walkfields_derive(s: synstructure::Structure) -> proc_macro2::TokenStream { + let body = s.each(|bi| quote!{ + walk(#bi) + }); + + s.bound_impl(quote!(example_traits::WalkFields), quote!{ + fn walk_fields(&self, walk: &mut FnMut(&example_traits::WalkFields)) { + match *self { #body } + } + }) +} +decl_derive!([WalkFields] => walkfields_derive); + +/* + * Test Case + */ +fn main() { + test_derive! { + walkfields_derive { + enum A<T> { + B(i32, T), + C(i32), + } + } + expands to { + #[allow(non_upper_case_globals)] + const _DERIVE_example_traits_WalkFields_FOR_A: () = { + extern crate example_traits; + impl<T> example_traits::WalkFields for A<T> + where T: example_traits::WalkFields + { + fn walk_fields(&self, walk: &mut FnMut(&example_traits::WalkFields)) { + match *self { + A::B(ref __binding_0, ref __binding_1,) => { + { walk(__binding_0) } + { walk(__binding_1) } + } + A::C(ref __binding_0,) => { + { walk(__binding_0) } + } + } + } + } + }; + } + } +} +``` + +# Example: `Interest` +### Trait Implementation +```rust +pub trait Interest { + fn interesting(&self) -> bool; +} +impl Interest for i32 { + fn interesting(&self) -> bool { *self > 0 } +} +``` + +### Custom Derive +```rust +#[macro_use] +extern crate synstructure; +#[macro_use] +extern crate quote; +extern crate proc_macro2; + +fn interest_derive(mut s: synstructure::Structure) -> proc_macro2::TokenStream { + let body = s.fold(false, |acc, bi| quote!{ + #acc || example_traits::Interest::interesting(#bi) + }); + + s.bound_impl(quote!(example_traits::Interest), quote!{ + fn interesting(&self) -> bool { + match *self { + #body + } + } + }) +} +decl_derive!([Interest] => interest_derive); + +/* + * Test Case + */ +fn main() { + test_derive!{ + interest_derive { + enum A<T> { + B(i32, T), + C(i32), + } + } + expands to { + #[allow(non_upper_case_globals)] + const _DERIVE_example_traits_Interest_FOR_A: () = { + extern crate example_traits; + impl<T> example_traits::Interest for A<T> + where T: example_traits::Interest + { + fn interesting(&self) -> bool { + match *self { + A::B(ref __binding_0, ref __binding_1,) => { + false || + example_traits::Interest::interesting(__binding_0) || + example_traits::Interest::interesting(__binding_1) + } + A::C(ref __binding_0,) => { + false || + example_traits::Interest::interesting(__binding_0) + } + } + } + } + }; + } + } +} +``` + +For more example usage, consider investigating the `abomonation_derive` crate, +which makes use of this crate, and is fairly simple. diff --git a/third_party/rust/synstructure/src/lib.rs b/third_party/rust/synstructure/src/lib.rs new file mode 100644 index 0000000000..2cd2b88011 --- /dev/null +++ b/third_party/rust/synstructure/src/lib.rs @@ -0,0 +1,2488 @@ +//! This crate provides helper types for matching against enum variants, and +//! extracting bindings to each of the fields in the deriving Struct or Enum in +//! a generic way. +//! +//! If you are writing a `#[derive]` which needs to perform some operation on +//! every field, then you have come to the right place! +//! +//! # Example: `WalkFields` +//! ### Trait Implementation +//! ``` +//! pub trait WalkFields: std::any::Any { +//! fn walk_fields(&self, walk: &mut FnMut(&WalkFields)); +//! } +//! impl WalkFields for i32 { +//! fn walk_fields(&self, _walk: &mut FnMut(&WalkFields)) {} +//! } +//! ``` +//! +//! ### Custom Derive +//! ``` +//! # use quote::quote; +//! fn walkfields_derive(s: synstructure::Structure) -> proc_macro2::TokenStream { +//! let body = s.each(|bi| quote!{ +//! walk(#bi) +//! }); +//! +//! s.gen_impl(quote! { +//! extern crate synstructure_test_traits; +//! +//! gen impl synstructure_test_traits::WalkFields for @Self { +//! fn walk_fields(&self, walk: &mut FnMut(&synstructure_test_traits::WalkFields)) { +//! match *self { #body } +//! } +//! } +//! }) +//! } +//! # const _IGNORE: &'static str = stringify!( +//! synstructure::decl_derive!([WalkFields] => walkfields_derive); +//! # ); +//! +//! /* +//! * Test Case +//! */ +//! fn main() { +//! synstructure::test_derive! { +//! walkfields_derive { +//! enum A<T> { +//! B(i32, T), +//! C(i32), +//! } +//! } +//! expands to { +//! #[allow(non_upper_case_globals)] +//! const _DERIVE_synstructure_test_traits_WalkFields_FOR_A: () = { +//! extern crate synstructure_test_traits; +//! impl<T> synstructure_test_traits::WalkFields for A<T> +//! where T: synstructure_test_traits::WalkFields +//! { +//! fn walk_fields(&self, walk: &mut FnMut(&synstructure_test_traits::WalkFields)) { +//! match *self { +//! A::B(ref __binding_0, ref __binding_1,) => { +//! { walk(__binding_0) } +//! { walk(__binding_1) } +//! } +//! A::C(ref __binding_0,) => { +//! { walk(__binding_0) } +//! } +//! } +//! } +//! } +//! }; +//! } +//! } +//! } +//! ``` +//! +//! # Example: `Interest` +//! ### Trait Implementation +//! ``` +//! pub trait Interest { +//! fn interesting(&self) -> bool; +//! } +//! impl Interest for i32 { +//! fn interesting(&self) -> bool { *self > 0 } +//! } +//! ``` +//! +//! ### Custom Derive +//! ``` +//! # use quote::quote; +//! fn interest_derive(mut s: synstructure::Structure) -> proc_macro2::TokenStream { +//! let body = s.fold(false, |acc, bi| quote!{ +//! #acc || synstructure_test_traits::Interest::interesting(#bi) +//! }); +//! +//! s.gen_impl(quote! { +//! extern crate synstructure_test_traits; +//! gen impl synstructure_test_traits::Interest for @Self { +//! fn interesting(&self) -> bool { +//! match *self { +//! #body +//! } +//! } +//! } +//! }) +//! } +//! # const _IGNORE: &'static str = stringify!( +//! synstructure::decl_derive!([Interest] => interest_derive); +//! # ); +//! +//! /* +//! * Test Case +//! */ +//! fn main() { +//! synstructure::test_derive!{ +//! interest_derive { +//! enum A<T> { +//! B(i32, T), +//! C(i32), +//! } +//! } +//! expands to { +//! #[allow(non_upper_case_globals)] +//! const _DERIVE_synstructure_test_traits_Interest_FOR_A: () = { +//! extern crate synstructure_test_traits; +//! impl<T> synstructure_test_traits::Interest for A<T> +//! where T: synstructure_test_traits::Interest +//! { +//! fn interesting(&self) -> bool { +//! match *self { +//! A::B(ref __binding_0, ref __binding_1,) => { +//! false || +//! synstructure_test_traits::Interest::interesting(__binding_0) || +//! synstructure_test_traits::Interest::interesting(__binding_1) +//! } +//! A::C(ref __binding_0,) => { +//! false || +//! synstructure_test_traits::Interest::interesting(__binding_0) +//! } +//! } +//! } +//! } +//! }; +//! } +//! } +//! } +//! ``` +//! +//! For more example usage, consider investigating the `abomonation_derive` crate, +//! which makes use of this crate, and is fairly simple. + +#[cfg(all( + not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))), + feature = "proc-macro" +))] +extern crate proc_macro; + +use std::collections::HashSet; + +use syn::parse::{ParseStream, Parser}; +use syn::visit::{self, Visit}; +use syn::{ + braced, punctuated, token, Attribute, Data, DeriveInput, Error, Expr, Field, Fields, + FieldsNamed, FieldsUnnamed, GenericParam, Generics, Ident, PredicateType, Result, Token, + TraitBound, Type, TypeMacro, TypeParamBound, TypePath, WhereClause, WherePredicate, +}; + +use quote::{format_ident, quote_spanned, ToTokens}; +// re-export the quote! macro so we can depend on it being around in our macro's +// implementations. +#[doc(hidden)] +pub use quote::quote; + +use unicode_xid::UnicodeXID; + +use proc_macro2::{Span, TokenStream, TokenTree}; + +// NOTE: This module has documentation hidden, as it only exports macros (which +// always appear in the root of the crate) and helper methods / re-exports used +// in the implementation of those macros. +#[doc(hidden)] +pub mod macros; + +/// Changes how bounds are added +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub enum AddBounds { + /// Add for fields and generics + Both, + /// Fields only + Fields, + /// Generics only + Generics, + /// None + None, + #[doc(hidden)] + __Nonexhaustive, +} + +/// The type of binding to use when generating a pattern. +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub enum BindStyle { + /// `x` + Move, + /// `mut x` + MoveMut, + /// `ref x` + Ref, + /// `ref mut x` + RefMut, +} + +impl ToTokens for BindStyle { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + BindStyle::Move => {} + BindStyle::MoveMut => quote_spanned!(Span::call_site() => mut).to_tokens(tokens), + BindStyle::Ref => quote_spanned!(Span::call_site() => ref).to_tokens(tokens), + BindStyle::RefMut => quote_spanned!(Span::call_site() => ref mut).to_tokens(tokens), + } + } +} + +// Internal method for merging seen_generics arrays together. +fn generics_fuse(res: &mut Vec<bool>, new: &[bool]) { + for (i, &flag) in new.iter().enumerate() { + if i == res.len() { + res.push(false); + } + if flag { + res[i] = true; + } + } +} + +// Internal method for extracting the set of generics which have been matched. +fn fetch_generics<'a>(set: &[bool], generics: &'a Generics) -> Vec<&'a Ident> { + let mut tys = vec![]; + for (&seen, param) in set.iter().zip(generics.params.iter()) { + if seen { + if let GenericParam::Type(tparam) = param { + tys.push(&tparam.ident) + } + } + } + tys +} + +// Internal method for sanitizing an identifier for hygiene purposes. +fn sanitize_ident(s: &str) -> Ident { + let mut res = String::with_capacity(s.len()); + for mut c in s.chars() { + if !UnicodeXID::is_xid_continue(c) { + c = '_' + } + // Deduplicate consecutive _ characters. + if res.ends_with('_') && c == '_' { + continue; + } + res.push(c); + } + Ident::new(&res, Span::call_site()) +} + +// Internal method to merge two Generics objects together intelligently. +fn merge_generics(into: &mut Generics, from: &Generics) -> Result<()> { + // Try to add the param into `into`, and merge parmas with identical names. + for p in &from.params { + for op in &into.params { + match (op, p) { + (GenericParam::Type(otp), GenericParam::Type(tp)) => { + // NOTE: This is only OK because syn ignores the span for equality purposes. + if otp.ident == tp.ident { + return Err(Error::new_spanned( + p, + format!( + "Attempted to merge conflicting generic parameters: {} and {}", + quote!(#op), + quote!(#p) + ), + )); + } + } + (GenericParam::Lifetime(olp), GenericParam::Lifetime(lp)) => { + // NOTE: This is only OK because syn ignores the span for equality purposes. + if olp.lifetime == lp.lifetime { + return Err(Error::new_spanned( + p, + format!( + "Attempted to merge conflicting generic parameters: {} and {}", + quote!(#op), + quote!(#p) + ), + )); + } + } + // We don't support merging Const parameters, because that wouldn't make much sense. + _ => (), + } + } + into.params.push(p.clone()); + } + + // Add any where clauses from the input generics object. + if let Some(from_clause) = &from.where_clause { + into.make_where_clause() + .predicates + .extend(from_clause.predicates.iter().cloned()); + } + + Ok(()) +} + +/// Helper method which does the same thing as rustc 1.20's +/// `Option::get_or_insert_with`. This method is used to keep backwards +/// compatibility with rustc 1.15. +fn get_or_insert_with<T, F>(opt: &mut Option<T>, f: F) -> &mut T +where + F: FnOnce() -> T, +{ + if opt.is_none() { + *opt = Some(f()); + } + + match opt { + Some(v) => v, + None => unreachable!(), + } +} + +/// Information about a specific binding. This contains both an `Ident` +/// reference to the given field, and the syn `&'a Field` descriptor for that +/// field. +/// +/// This type supports `quote::ToTokens`, so can be directly used within the +/// `quote!` macro. It expands to a reference to the matched field. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct BindingInfo<'a> { + /// The name which this BindingInfo will bind to. + pub binding: Ident, + + /// The type of binding which this BindingInfo will create. + pub style: BindStyle, + + field: &'a Field, + + // These are used to determine which type parameters are avaliable. + generics: &'a Generics, + seen_generics: Vec<bool>, + // The original index of the binding + // this will not change when .filter() is called + index: usize, +} + +impl<'a> ToTokens for BindingInfo<'a> { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.binding.to_tokens(tokens); + } +} + +impl<'a> BindingInfo<'a> { + /// Returns a reference to the underlying `syn` AST node which this + /// `BindingInfo` references + pub fn ast(&self) -> &'a Field { + self.field + } + + /// Generates the pattern fragment for this field binding. + /// + /// # Example + /// ``` + /// # use synstructure::*; + /// let di: syn::DeriveInput = syn::parse_quote! { + /// enum A { + /// B{ a: i32, b: i32 }, + /// C(u32), + /// } + /// }; + /// let s = Structure::new(&di); + /// + /// assert_eq!( + /// s.variants()[0].bindings()[0].pat().to_string(), + /// quote! { + /// ref __binding_0 + /// }.to_string() + /// ); + /// ``` + pub fn pat(&self) -> TokenStream { + let BindingInfo { binding, style, .. } = self; + quote!(#style #binding) + } + + /// Returns a list of the type parameters which are referenced in this + /// field's type. + /// + /// # Caveat + /// + /// If the field contains any macros in type position, all parameters will + /// be considered bound. This is because we cannot determine which type + /// parameters are bound by type macros. + /// + /// # Example + /// ``` + /// # use synstructure::*; + /// let di: syn::DeriveInput = syn::parse_quote! { + /// struct A<T, U> { + /// a: Option<T>, + /// b: U, + /// } + /// }; + /// let mut s = Structure::new(&di); + /// + /// assert_eq!( + /// s.variants()[0].bindings()[0].referenced_ty_params(), + /// &["e::format_ident!("T")] + /// ); + /// ``` + pub fn referenced_ty_params(&self) -> Vec<&'a Ident> { + fetch_generics(&self.seen_generics, self.generics) + } +} + +/// This type is similar to `syn`'s `Variant` type, however each of the fields +/// are references rather than owned. When this is used as the AST for a real +/// variant, this struct simply borrows the fields of the `syn::Variant`, +/// however this type may also be used as the sole variant for a struct. +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct VariantAst<'a> { + pub attrs: &'a [Attribute], + pub ident: &'a Ident, + pub fields: &'a Fields, + pub discriminant: &'a Option<(token::Eq, Expr)>, +} + +/// A wrapper around a `syn::DeriveInput`'s variant which provides utilities +/// for destructuring `Variant`s with `match` expressions. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct VariantInfo<'a> { + pub prefix: Option<&'a Ident>, + bindings: Vec<BindingInfo<'a>>, + ast: VariantAst<'a>, + generics: &'a Generics, + // The original length of `bindings` before any `.filter()` calls + original_length: usize, +} + +/// Helper function used by the VariantInfo constructor. Walks all of the types +/// in `field` and returns a list of the type parameters from `ty_params` which +/// are referenced in the field. +fn get_ty_params(field: &Field, generics: &Generics) -> Vec<bool> { + // Helper type. Discovers all identifiers inside of the visited type, + // and calls a callback with them. + struct BoundTypeLocator<'a> { + result: Vec<bool>, + generics: &'a Generics, + } + + impl<'a> Visit<'a> for BoundTypeLocator<'a> { + // XXX: This also (intentionally) captures paths like T::SomeType. Is + // this desirable? + fn visit_ident(&mut self, id: &Ident) { + for (idx, i) in self.generics.params.iter().enumerate() { + if let GenericParam::Type(tparam) = i { + if tparam.ident == *id { + self.result[idx] = true; + } + } + } + } + + fn visit_type_macro(&mut self, x: &'a TypeMacro) { + // If we see a type_mac declaration, then we can't know what type parameters + // it might be binding, so we presume it binds all of them. + for r in &mut self.result { + *r = true; + } + visit::visit_type_macro(self, x) + } + } + + let mut btl = BoundTypeLocator { + result: vec![false; generics.params.len()], + generics, + }; + + btl.visit_type(&field.ty); + + btl.result +} + +impl<'a> VariantInfo<'a> { + fn new(ast: VariantAst<'a>, prefix: Option<&'a Ident>, generics: &'a Generics) -> Self { + let bindings = match ast.fields { + Fields::Unit => vec![], + Fields::Unnamed(FieldsUnnamed { + unnamed: fields, .. + }) + | Fields::Named(FieldsNamed { named: fields, .. }) => { + fields + .into_iter() + .enumerate() + .map(|(i, field)| { + BindingInfo { + // XXX: This has to be call_site to avoid privacy + // when deriving on private fields. + binding: format_ident!("__binding_{}", i), + style: BindStyle::Ref, + field, + generics, + seen_generics: get_ty_params(field, generics), + index: i, + } + }) + .collect::<Vec<_>>() + } + }; + + let original_length = bindings.len(); + VariantInfo { + prefix, + bindings, + ast, + generics, + original_length, + } + } + + /// Returns a slice of the bindings in this Variant. + pub fn bindings(&self) -> &[BindingInfo<'a>] { + &self.bindings + } + + /// Returns a mut slice of the bindings in this Variant. + pub fn bindings_mut(&mut self) -> &mut [BindingInfo<'a>] { + &mut self.bindings + } + + /// Returns a `VariantAst` object which contains references to the + /// underlying `syn` AST node which this `Variant` was created from. + pub fn ast(&self) -> VariantAst<'a> { + self.ast + } + + /// True if any bindings were omitted due to a `filter` call. + pub fn omitted_bindings(&self) -> bool { + self.original_length != self.bindings.len() + } + + /// Generates the match-arm pattern which could be used to match against this Variant. + /// + /// # Example + /// ``` + /// # use synstructure::*; + /// let di: syn::DeriveInput = syn::parse_quote! { + /// enum A { + /// B(i32, i32), + /// C(u32), + /// } + /// }; + /// let s = Structure::new(&di); + /// + /// assert_eq!( + /// s.variants()[0].pat().to_string(), + /// quote!{ + /// A::B(ref __binding_0, ref __binding_1,) + /// }.to_string() + /// ); + /// ``` + pub fn pat(&self) -> TokenStream { + let mut t = TokenStream::new(); + if let Some(prefix) = self.prefix { + prefix.to_tokens(&mut t); + quote!(::).to_tokens(&mut t); + } + self.ast.ident.to_tokens(&mut t); + match self.ast.fields { + Fields::Unit => { + assert!(self.bindings.is_empty()); + } + Fields::Unnamed(..) => token::Paren(Span::call_site()).surround(&mut t, |t| { + let mut expected_index = 0; + for binding in &self.bindings { + while expected_index < binding.index { + quote!(_,).to_tokens(t); + expected_index += 1; + } + binding.pat().to_tokens(t); + quote!(,).to_tokens(t); + expected_index += 1; + } + if expected_index != self.original_length { + quote!(..).to_tokens(t); + } + }), + Fields::Named(..) => token::Brace(Span::call_site()).surround(&mut t, |t| { + for binding in &self.bindings { + binding.field.ident.to_tokens(t); + quote!(:).to_tokens(t); + binding.pat().to_tokens(t); + quote!(,).to_tokens(t); + } + if self.omitted_bindings() { + quote!(..).to_tokens(t); + } + }), + } + t + } + + /// Generates the token stream required to construct the current variant. + /// + /// The init array initializes each of the fields in the order they are + /// written in `variant.ast().fields`. + /// + /// # Example + /// ``` + /// # use synstructure::*; + /// let di: syn::DeriveInput = syn::parse_quote! { + /// enum A { + /// B(usize, usize), + /// C{ v: usize }, + /// } + /// }; + /// let s = Structure::new(&di); + /// + /// assert_eq!( + /// s.variants()[0].construct(|_, i| quote!(#i)).to_string(), + /// + /// quote!{ + /// A::B(0usize, 1usize,) + /// }.to_string() + /// ); + /// + /// assert_eq!( + /// s.variants()[1].construct(|_, i| quote!(#i)).to_string(), + /// + /// quote!{ + /// A::C{ v: 0usize, } + /// }.to_string() + /// ); + /// ``` + pub fn construct<F, T>(&self, mut func: F) -> TokenStream + where + F: FnMut(&Field, usize) -> T, + T: ToTokens, + { + let mut t = TokenStream::new(); + if let Some(prefix) = self.prefix { + quote!(#prefix ::).to_tokens(&mut t); + } + self.ast.ident.to_tokens(&mut t); + + match &self.ast.fields { + Fields::Unit => (), + Fields::Unnamed(FieldsUnnamed { unnamed, .. }) => { + token::Paren::default().surround(&mut t, |t| { + for (i, field) in unnamed.into_iter().enumerate() { + func(field, i).to_tokens(t); + quote!(,).to_tokens(t); + } + }) + } + Fields::Named(FieldsNamed { named, .. }) => { + token::Brace::default().surround(&mut t, |t| { + for (i, field) in named.into_iter().enumerate() { + field.ident.to_tokens(t); + quote!(:).to_tokens(t); + func(field, i).to_tokens(t); + quote!(,).to_tokens(t); + } + }) + } + } + t + } + + /// Runs the passed-in function once for each bound field, passing in a `BindingInfo`. + /// and generating a `match` arm which evaluates the returned tokens. + /// + /// This method will ignore fields which are ignored through the `filter` + /// method. + /// + /// # Example + /// ``` + /// # use synstructure::*; + /// let di: syn::DeriveInput = syn::parse_quote! { + /// enum A { + /// B(i32, i32), + /// C(u32), + /// } + /// }; + /// let s = Structure::new(&di); + /// + /// assert_eq!( + /// s.variants()[0].each(|bi| quote!(println!("{:?}", #bi))).to_string(), + /// + /// quote!{ + /// A::B(ref __binding_0, ref __binding_1,) => { + /// { println!("{:?}", __binding_0) } + /// { println!("{:?}", __binding_1) } + /// } + /// }.to_string() + /// ); + /// ``` + pub fn each<F, R>(&self, mut f: F) -> TokenStream + where + F: FnMut(&BindingInfo<'_>) -> R, + R: ToTokens, + { + let pat = self.pat(); + let mut body = TokenStream::new(); + for binding in &self.bindings { + token::Brace::default().surround(&mut body, |body| { + f(binding).to_tokens(body); + }); + } + quote!(#pat => { #body }) + } + + /// Runs the passed-in function once for each bound field, passing in the + /// result of the previous call, and a `BindingInfo`. generating a `match` + /// arm which evaluates to the resulting tokens. + /// + /// This method will ignore fields which are ignored through the `filter` + /// method. + /// + /// # Example + /// ``` + /// # use synstructure::*; + /// let di: syn::DeriveInput = syn::parse_quote! { + /// enum A { + /// B(i32, i32), + /// C(u32), + /// } + /// }; + /// let s = Structure::new(&di); + /// + /// assert_eq!( + /// s.variants()[0].fold(quote!(0), |acc, bi| quote!(#acc + #bi)).to_string(), + /// + /// quote!{ + /// A::B(ref __binding_0, ref __binding_1,) => { + /// 0 + __binding_0 + __binding_1 + /// } + /// }.to_string() + /// ); + /// ``` + pub fn fold<F, I, R>(&self, init: I, mut f: F) -> TokenStream + where + F: FnMut(TokenStream, &BindingInfo<'_>) -> R, + I: ToTokens, + R: ToTokens, + { + let pat = self.pat(); + let body = self.bindings.iter().fold(quote!(#init), |i, bi| { + let r = f(i, bi); + quote!(#r) + }); + quote!(#pat => { #body }) + } + + /// Filter the bindings created by this `Variant` object. This has 2 effects: + /// + /// * The bindings will no longer appear in match arms generated by methods + /// on this `Variant` or its subobjects. + /// + /// * Impl blocks created with the `bound_impl` or `unsafe_bound_impl` + /// method only consider type parameters referenced in the types of + /// non-filtered fields. + /// + /// # Example + /// ``` + /// # use synstructure::*; + /// let di: syn::DeriveInput = syn::parse_quote! { + /// enum A { + /// B{ a: i32, b: i32 }, + /// C{ a: u32 }, + /// } + /// }; + /// let mut s = Structure::new(&di); + /// + /// s.variants_mut()[0].filter(|bi| { + /// bi.ast().ident == Some(quote::format_ident!("b")) + /// }); + /// + /// assert_eq!( + /// s.each(|bi| quote!(println!("{:?}", #bi))).to_string(), + /// + /// quote!{ + /// A::B{ b: ref __binding_1, .. } => { + /// { println!("{:?}", __binding_1) } + /// } + /// A::C{ a: ref __binding_0, } => { + /// { println!("{:?}", __binding_0) } + /// } + /// }.to_string() + /// ); + /// ``` + pub fn filter<F>(&mut self, f: F) -> &mut Self + where + F: FnMut(&BindingInfo<'_>) -> bool, + { + self.bindings.retain(f); + self + } + + /// Remove the binding at the given index. + /// + /// # Panics + /// + /// Panics if the index is out of range. + pub fn remove_binding(&mut self, idx: usize) -> &mut Self { + self.bindings.remove(idx); + self + } + + /// Updates the `BindStyle` for each of the passed-in fields by calling the + /// passed-in function for each `BindingInfo`. + /// + /// # Example + /// ``` + /// # use synstructure::*; + /// let di: syn::DeriveInput = syn::parse_quote! { + /// enum A { + /// B(i32, i32), + /// C(u32), + /// } + /// }; + /// let mut s = Structure::new(&di); + /// + /// s.variants_mut()[0].bind_with(|bi| BindStyle::RefMut); + /// + /// assert_eq!( + /// s.each(|bi| quote!(println!("{:?}", #bi))).to_string(), + /// + /// quote!{ + /// A::B(ref mut __binding_0, ref mut __binding_1,) => { + /// { println!("{:?}", __binding_0) } + /// { println!("{:?}", __binding_1) } + /// } + /// A::C(ref __binding_0,) => { + /// { println!("{:?}", __binding_0) } + /// } + /// }.to_string() + /// ); + /// ``` + pub fn bind_with<F>(&mut self, mut f: F) -> &mut Self + where + F: FnMut(&BindingInfo<'_>) -> BindStyle, + { + for binding in &mut self.bindings { + binding.style = f(&binding); + } + self + } + + /// Updates the binding name for each fo the passed-in fields by calling the + /// passed-in function for each `BindingInfo`. + /// + /// The function will be called with the `BindingInfo` and its index in the + /// enclosing variant. + /// + /// The default name is `__binding_{}` where `{}` is replaced with an + /// increasing number. + /// + /// # Example + /// ``` + /// # use synstructure::*; + /// let di: syn::DeriveInput = syn::parse_quote! { + /// enum A { + /// B{ a: i32, b: i32 }, + /// C{ a: u32 }, + /// } + /// }; + /// let mut s = Structure::new(&di); + /// + /// s.variants_mut()[0].binding_name(|bi, i| bi.ident.clone().unwrap()); + /// + /// assert_eq!( + /// s.each(|bi| quote!(println!("{:?}", #bi))).to_string(), + /// + /// quote!{ + /// A::B{ a: ref a, b: ref b, } => { + /// { println!("{:?}", a) } + /// { println!("{:?}", b) } + /// } + /// A::C{ a: ref __binding_0, } => { + /// { println!("{:?}", __binding_0) } + /// } + /// }.to_string() + /// ); + /// ``` + pub fn binding_name<F>(&mut self, mut f: F) -> &mut Self + where + F: FnMut(&Field, usize) -> Ident, + { + for (it, binding) in self.bindings.iter_mut().enumerate() { + binding.binding = f(binding.field, it); + } + self + } + + /// Returns a list of the type parameters which are referenced in this + /// field's type. + /// + /// # Caveat + /// + /// If the field contains any macros in type position, all parameters will + /// be considered bound. This is because we cannot determine which type + /// parameters are bound by type macros. + /// + /// # Example + /// ``` + /// # use synstructure::*; + /// let di: syn::DeriveInput = syn::parse_quote! { + /// struct A<T, U> { + /// a: Option<T>, + /// b: U, + /// } + /// }; + /// let mut s = Structure::new(&di); + /// + /// assert_eq!( + /// s.variants()[0].bindings()[0].referenced_ty_params(), + /// &["e::format_ident!("T")] + /// ); + /// ``` + pub fn referenced_ty_params(&self) -> Vec<&'a Ident> { + let mut flags = Vec::new(); + for binding in &self.bindings { + generics_fuse(&mut flags, &binding.seen_generics); + } + fetch_generics(&flags, self.generics) + } +} + +/// A wrapper around a `syn::DeriveInput` which provides utilities for creating +/// custom derive trait implementations. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Structure<'a> { + variants: Vec<VariantInfo<'a>>, + omitted_variants: bool, + underscore_const: bool, + ast: &'a DeriveInput, + extra_impl: Vec<GenericParam>, + extra_predicates: Vec<WherePredicate>, + add_bounds: AddBounds, +} + +impl<'a> Structure<'a> { + /// Create a new `Structure` with the variants and fields from the passed-in + /// `DeriveInput`. + /// + /// # Panics + /// + /// This method will panic if the provided AST node represents an untagged + /// union. + pub fn new(ast: &'a DeriveInput) -> Self { + Self::try_new(ast).expect("Unable to create synstructure::Structure") + } + + /// Create a new `Structure` with the variants and fields from the passed-in + /// `DeriveInput`. + /// + /// Unlike `Structure::new`, this method does not panic if the provided AST + /// node represents an untagged union. + pub fn try_new(ast: &'a DeriveInput) -> Result<Self> { + let variants = match &ast.data { + Data::Enum(data) => (&data.variants) + .into_iter() + .map(|v| { + VariantInfo::new( + VariantAst { + attrs: &v.attrs, + ident: &v.ident, + fields: &v.fields, + discriminant: &v.discriminant, + }, + Some(&ast.ident), + &ast.generics, + ) + }) + .collect::<Vec<_>>(), + Data::Struct(data) => { + // SAFETY NOTE: Normally putting an `Expr` in static storage + // wouldn't be safe, because it could contain `Term` objects + // which use thread-local interning. However, this static always + // contains the value `None`. Thus, it will never contain any + // unsafe values. + struct UnsafeMakeSync(Option<(token::Eq, Expr)>); + unsafe impl Sync for UnsafeMakeSync {} + static NONE_DISCRIMINANT: UnsafeMakeSync = UnsafeMakeSync(None); + + vec![VariantInfo::new( + VariantAst { + attrs: &ast.attrs, + ident: &ast.ident, + fields: &data.fields, + discriminant: &NONE_DISCRIMINANT.0, + }, + None, + &ast.generics, + )] + } + Data::Union(_) => { + return Err(Error::new_spanned( + ast, + "unexpected unsupported untagged union", + )); + } + }; + + Ok(Structure { + variants, + omitted_variants: false, + underscore_const: false, + ast, + extra_impl: vec![], + extra_predicates: vec![], + add_bounds: AddBounds::Both, + }) + } + + /// Returns a slice of the variants in this Structure. + pub fn variants(&self) -> &[VariantInfo<'a>] { + &self.variants + } + + /// Returns a mut slice of the variants in this Structure. + pub fn variants_mut(&mut self) -> &mut [VariantInfo<'a>] { + &mut self.variants + } + + /// Returns a reference to the underlying `syn` AST node which this + /// `Structure` was created from. + pub fn ast(&self) -> &'a DeriveInput { + self.ast + } + + /// True if any variants were omitted due to a `filter_variants` call. + pub fn omitted_variants(&self) -> bool { + self.omitted_variants + } + + /// Runs the passed-in function once for each bound field, passing in a `BindingInfo`. + /// and generating `match` arms which evaluate the returned tokens. + /// + /// This method will ignore variants or fields which are ignored through the + /// `filter` and `filter_variant` methods. + /// + /// # Example + /// ``` + /// # use synstructure::*; + /// let di: syn::DeriveInput = syn::parse_quote! { + /// enum A { + /// B(i32, i32), + /// C(u32), + /// } + /// }; + /// let s = Structure::new(&di); + /// + /// assert_eq!( + /// s.each(|bi| quote!(println!("{:?}", #bi))).to_string(), + /// + /// quote!{ + /// A::B(ref __binding_0, ref __binding_1,) => { + /// { println!("{:?}", __binding_0) } + /// { println!("{:?}", __binding_1) } + /// } + /// A::C(ref __binding_0,) => { + /// { println!("{:?}", __binding_0) } + /// } + /// }.to_string() + /// ); + /// ``` + pub fn each<F, R>(&self, mut f: F) -> TokenStream + where + F: FnMut(&BindingInfo<'_>) -> R, + R: ToTokens, + { + let mut t = TokenStream::new(); + for variant in &self.variants { + variant.each(&mut f).to_tokens(&mut t); + } + if self.omitted_variants { + quote!(_ => {}).to_tokens(&mut t); + } + t + } + + /// Runs the passed-in function once for each bound field, passing in the + /// result of the previous call, and a `BindingInfo`. generating `match` + /// arms which evaluate to the resulting tokens. + /// + /// This method will ignore variants or fields which are ignored through the + /// `filter` and `filter_variant` methods. + /// + /// If a variant has been ignored, it will return the `init` value. + /// + /// # Example + /// ``` + /// # use synstructure::*; + /// let di: syn::DeriveInput = syn::parse_quote! { + /// enum A { + /// B(i32, i32), + /// C(u32), + /// } + /// }; + /// let s = Structure::new(&di); + /// + /// assert_eq!( + /// s.fold(quote!(0), |acc, bi| quote!(#acc + #bi)).to_string(), + /// + /// quote!{ + /// A::B(ref __binding_0, ref __binding_1,) => { + /// 0 + __binding_0 + __binding_1 + /// } + /// A::C(ref __binding_0,) => { + /// 0 + __binding_0 + /// } + /// }.to_string() + /// ); + /// ``` + pub fn fold<F, I, R>(&self, init: I, mut f: F) -> TokenStream + where + F: FnMut(TokenStream, &BindingInfo<'_>) -> R, + I: ToTokens, + R: ToTokens, + { + let mut t = TokenStream::new(); + for variant in &self.variants { + variant.fold(&init, &mut f).to_tokens(&mut t); + } + if self.omitted_variants { + quote!(_ => { #init }).to_tokens(&mut t); + } + t + } + + /// Runs the passed-in function once for each variant, passing in a + /// `VariantInfo`. and generating `match` arms which evaluate the returned + /// tokens. + /// + /// This method will ignore variants and not bind fields which are ignored + /// through the `filter` and `filter_variant` methods. + /// + /// # Example + /// ``` + /// # use synstructure::*; + /// let di: syn::DeriveInput = syn::parse_quote! { + /// enum A { + /// B(i32, i32), + /// C(u32), + /// } + /// }; + /// let s = Structure::new(&di); + /// + /// assert_eq!( + /// s.each_variant(|v| { + /// let name = &v.ast().ident; + /// quote!(println!(stringify!(#name))) + /// }).to_string(), + /// + /// quote!{ + /// A::B(ref __binding_0, ref __binding_1,) => { + /// println!(stringify!(B)) + /// } + /// A::C(ref __binding_0,) => { + /// println!(stringify!(C)) + /// } + /// }.to_string() + /// ); + /// ``` + pub fn each_variant<F, R>(&self, mut f: F) -> TokenStream + where + F: FnMut(&VariantInfo<'_>) -> R, + R: ToTokens, + { + let mut t = TokenStream::new(); + for variant in &self.variants { + let pat = variant.pat(); + let body = f(variant); + quote!(#pat => { #body }).to_tokens(&mut t); + } + if self.omitted_variants { + quote!(_ => {}).to_tokens(&mut t); + } + t + } + + /// Filter the bindings created by this `Structure` object. This has 2 effects: + /// + /// * The bindings will no longer appear in match arms generated by methods + /// on this `Structure` or its subobjects. + /// + /// * Impl blocks created with the `bound_impl` or `unsafe_bound_impl` + /// method only consider type parameters referenced in the types of + /// non-filtered fields. + /// + /// # Example + /// ``` + /// # use synstructure::*; + /// let di: syn::DeriveInput = syn::parse_quote! { + /// enum A { + /// B{ a: i32, b: i32 }, + /// C{ a: u32 }, + /// } + /// }; + /// let mut s = Structure::new(&di); + /// + /// s.filter(|bi| { + /// bi.ast().ident == Some(quote::format_ident!("a")) + /// }); + /// + /// assert_eq!( + /// s.each(|bi| quote!(println!("{:?}", #bi))).to_string(), + /// + /// quote!{ + /// A::B{ a: ref __binding_0, .. } => { + /// { println!("{:?}", __binding_0) } + /// } + /// A::C{ a: ref __binding_0, } => { + /// { println!("{:?}", __binding_0) } + /// } + /// }.to_string() + /// ); + /// ``` + pub fn filter<F>(&mut self, mut f: F) -> &mut Self + where + F: FnMut(&BindingInfo<'_>) -> bool, + { + for variant in &mut self.variants { + variant.filter(&mut f); + } + self + } + + /// Specify additional where predicate bounds which should be generated by + /// impl-generating functions such as `gen_impl`, `bound_impl`, and + /// `unsafe_bound_impl`. + /// + /// # Example + /// ``` + /// # use synstructure::*; + /// let di: syn::DeriveInput = syn::parse_quote! { + /// enum A<T, U> { + /// B(T), + /// C(Option<U>), + /// } + /// }; + /// let mut s = Structure::new(&di); + /// + /// // Add an additional where predicate. + /// s.add_where_predicate(syn::parse_quote!(T: std::fmt::Display)); + /// + /// assert_eq!( + /// s.bound_impl(quote!(krate::Trait), quote!{ + /// fn a() {} + /// }).to_string(), + /// quote!{ + /// #[allow(non_upper_case_globals)] + /// #[doc(hidden)] + /// const _DERIVE_krate_Trait_FOR_A: () = { + /// extern crate krate; + /// impl<T, U> krate::Trait for A<T, U> + /// where T: std::fmt::Display, + /// T: krate::Trait, + /// Option<U>: krate::Trait, + /// U: krate::Trait + /// { + /// fn a() {} + /// } + /// }; + /// }.to_string() + /// ); + /// ``` + pub fn add_where_predicate(&mut self, pred: WherePredicate) -> &mut Self { + self.extra_predicates.push(pred); + self + } + + /// Specify which bounds should be generated by impl-generating functions + /// such as `gen_impl`, `bound_impl`, and `unsafe_bound_impl`. + /// + /// The default behaviour is to generate both field and generic bounds from + /// type parameters. + /// + /// # Example + /// ``` + /// # use synstructure::*; + /// let di: syn::DeriveInput = syn::parse_quote! { + /// enum A<T, U> { + /// B(T), + /// C(Option<U>), + /// } + /// }; + /// let mut s = Structure::new(&di); + /// + /// // Limit bounds to only generics. + /// s.add_bounds(AddBounds::Generics); + /// + /// assert_eq!( + /// s.bound_impl(quote!(krate::Trait), quote!{ + /// fn a() {} + /// }).to_string(), + /// quote!{ + /// #[allow(non_upper_case_globals)] + /// #[doc(hidden)] + /// const _DERIVE_krate_Trait_FOR_A: () = { + /// extern crate krate; + /// impl<T, U> krate::Trait for A<T, U> + /// where T: krate::Trait, + /// U: krate::Trait + /// { + /// fn a() {} + /// } + /// }; + /// }.to_string() + /// ); + /// ``` + pub fn add_bounds(&mut self, mode: AddBounds) -> &mut Self { + self.add_bounds = mode; + self + } + + /// Filter the variants matched by this `Structure` object. This has 2 effects: + /// + /// * Match arms destructuring these variants will no longer be generated by + /// methods on this `Structure` + /// + /// * Impl blocks created with the `bound_impl` or `unsafe_bound_impl` + /// method only consider type parameters referenced in the types of + /// fields in non-fitered variants. + /// + /// # Example + /// ``` + /// # use synstructure::*; + /// let di: syn::DeriveInput = syn::parse_quote! { + /// enum A { + /// B(i32, i32), + /// C(u32), + /// } + /// }; + /// + /// let mut s = Structure::new(&di); + /// + /// s.filter_variants(|v| v.ast().ident != "B"); + /// + /// assert_eq!( + /// s.each(|bi| quote!(println!("{:?}", #bi))).to_string(), + /// + /// quote!{ + /// A::C(ref __binding_0,) => { + /// { println!("{:?}", __binding_0) } + /// } + /// _ => {} + /// }.to_string() + /// ); + /// ``` + pub fn filter_variants<F>(&mut self, f: F) -> &mut Self + where + F: FnMut(&VariantInfo<'_>) -> bool, + { + let before_len = self.variants.len(); + self.variants.retain(f); + if self.variants.len() != before_len { + self.omitted_variants = true; + } + self + } + + /// Remove the variant at the given index. + /// + /// # Panics + /// + /// Panics if the index is out of range. + pub fn remove_variant(&mut self, idx: usize) -> &mut Self { + self.variants.remove(idx); + self.omitted_variants = true; + self + } + + /// Updates the `BindStyle` for each of the passed-in fields by calling the + /// passed-in function for each `BindingInfo`. + /// + /// # Example + /// ``` + /// # use synstructure::*; + /// let di: syn::DeriveInput = syn::parse_quote! { + /// enum A { + /// B(i32, i32), + /// C(u32), + /// } + /// }; + /// let mut s = Structure::new(&di); + /// + /// s.bind_with(|bi| BindStyle::RefMut); + /// + /// assert_eq!( + /// s.each(|bi| quote!(println!("{:?}", #bi))).to_string(), + /// + /// quote!{ + /// A::B(ref mut __binding_0, ref mut __binding_1,) => { + /// { println!("{:?}", __binding_0) } + /// { println!("{:?}", __binding_1) } + /// } + /// A::C(ref mut __binding_0,) => { + /// { println!("{:?}", __binding_0) } + /// } + /// }.to_string() + /// ); + /// ``` + pub fn bind_with<F>(&mut self, mut f: F) -> &mut Self + where + F: FnMut(&BindingInfo<'_>) -> BindStyle, + { + for variant in &mut self.variants { + variant.bind_with(&mut f); + } + self + } + + /// Updates the binding name for each fo the passed-in fields by calling the + /// passed-in function for each `BindingInfo`. + /// + /// The function will be called with the `BindingInfo` and its index in the + /// enclosing variant. + /// + /// The default name is `__binding_{}` where `{}` is replaced with an + /// increasing number. + /// + /// # Example + /// ``` + /// # use synstructure::*; + /// let di: syn::DeriveInput = syn::parse_quote! { + /// enum A { + /// B{ a: i32, b: i32 }, + /// C{ a: u32 }, + /// } + /// }; + /// let mut s = Structure::new(&di); + /// + /// s.binding_name(|bi, i| bi.ident.clone().unwrap()); + /// + /// assert_eq!( + /// s.each(|bi| quote!(println!("{:?}", #bi))).to_string(), + /// + /// quote!{ + /// A::B{ a: ref a, b: ref b, } => { + /// { println!("{:?}", a) } + /// { println!("{:?}", b) } + /// } + /// A::C{ a: ref a, } => { + /// { println!("{:?}", a) } + /// } + /// }.to_string() + /// ); + /// ``` + pub fn binding_name<F>(&mut self, mut f: F) -> &mut Self + where + F: FnMut(&Field, usize) -> Ident, + { + for variant in &mut self.variants { + variant.binding_name(&mut f); + } + self + } + + /// Returns a list of the type parameters which are refrenced in the types + /// of non-filtered fields / variants. + /// + /// # Caveat + /// + /// If the struct contains any macros in type position, all parameters will + /// be considered bound. This is because we cannot determine which type + /// parameters are bound by type macros. + /// + /// # Example + /// ``` + /// # use synstructure::*; + /// let di: syn::DeriveInput = syn::parse_quote! { + /// enum A<T, U> { + /// B(T, i32), + /// C(Option<U>), + /// } + /// }; + /// let mut s = Structure::new(&di); + /// + /// s.filter_variants(|v| v.ast().ident != "C"); + /// + /// assert_eq!( + /// s.referenced_ty_params(), + /// &["e::format_ident!("T")] + /// ); + /// ``` + pub fn referenced_ty_params(&self) -> Vec<&'a Ident> { + let mut flags = Vec::new(); + for variant in &self.variants { + for binding in &variant.bindings { + generics_fuse(&mut flags, &binding.seen_generics); + } + } + fetch_generics(&flags, &self.ast.generics) + } + + /// Adds an `impl<>` generic parameter. + /// This can be used when the trait to be derived needs some extra generic parameters. + /// + /// # Example + /// ``` + /// # use synstructure::*; + /// let di: syn::DeriveInput = syn::parse_quote! { + /// enum A<T, U> { + /// B(T), + /// C(Option<U>), + /// } + /// }; + /// let mut s = Structure::new(&di); + /// let generic: syn::GenericParam = syn::parse_quote!(X: krate::AnotherTrait); + /// + /// assert_eq!( + /// s.add_impl_generic(generic) + /// .bound_impl(quote!(krate::Trait<X>), + /// quote!{ + /// fn a() {} + /// } + /// ).to_string(), + /// quote!{ + /// #[allow(non_upper_case_globals)] + /// #[doc(hidden)] + /// const _DERIVE_krate_Trait_X_FOR_A: () = { + /// extern crate krate; + /// impl<T, U, X: krate::AnotherTrait> krate::Trait<X> for A<T, U> + /// where T : krate :: Trait < X >, + /// Option<U>: krate::Trait<X>, + /// U: krate::Trait<X> + /// { + /// fn a() {} + /// } + /// }; + /// }.to_string() + /// ); + /// ``` + pub fn add_impl_generic(&mut self, param: GenericParam) -> &mut Self { + self.extra_impl.push(param); + self + } + + /// Add trait bounds for a trait with the given path for each type parmaeter + /// referenced in the types of non-filtered fields. + /// + /// # Caveat + /// + /// If the method contains any macros in type position, all parameters will + /// be considered bound. This is because we cannot determine which type + /// parameters are bound by type macros. + pub fn add_trait_bounds( + &self, + bound: &TraitBound, + where_clause: &mut Option<WhereClause>, + mode: AddBounds, + ) { + // If we have any explicit where predicates, make sure to add them first. + if !self.extra_predicates.is_empty() { + let clause = get_or_insert_with(&mut *where_clause, || WhereClause { + where_token: Default::default(), + predicates: punctuated::Punctuated::new(), + }); + clause + .predicates + .extend(self.extra_predicates.iter().cloned()); + } + + let mut seen = HashSet::new(); + let mut pred = |ty: Type| { + if !seen.contains(&ty) { + seen.insert(ty.clone()); + + // Add a predicate. + let clause = get_or_insert_with(&mut *where_clause, || WhereClause { + where_token: Default::default(), + predicates: punctuated::Punctuated::new(), + }); + clause.predicates.push(WherePredicate::Type(PredicateType { + lifetimes: None, + bounded_ty: ty, + colon_token: Default::default(), + bounds: Some(punctuated::Pair::End(TypeParamBound::Trait(bound.clone()))) + .into_iter() + .collect(), + })); + } + }; + + for variant in &self.variants { + for binding in &variant.bindings { + match mode { + AddBounds::Both | AddBounds::Fields => { + for &seen in &binding.seen_generics { + if seen { + pred(binding.ast().ty.clone()); + break; + } + } + } + _ => {} + } + + match mode { + AddBounds::Both | AddBounds::Generics => { + for param in binding.referenced_ty_params() { + pred(Type::Path(TypePath { + qself: None, + path: (*param).clone().into(), + })); + } + } + _ => {} + } + } + } + } + + /// Configure whether to use `const _` instead of a generated const name in + /// code generated by `gen_impl` and `bound_impl`. + /// + /// This syntax is only supported by rust 1.37, and later versions. + /// + /// Defaults to `false` for backwards compatibility reasons. + /// + /// # Example + /// + /// ``` + /// # use synstructure::*; + /// let di: syn::DeriveInput = syn::parse_quote! { + /// struct MyStruct; + /// }; + /// let mut s = Structure::new(&di); + /// + /// assert_eq!( + /// s.underscore_const(true) + /// .gen_impl(quote! { gen impl Trait for @Self { } }) + /// .to_string(), + /// quote! { + /// const _: () = { + /// impl Trait for MyStruct { } + /// }; + /// } + /// .to_string() + /// ); + /// + /// assert_eq!( + /// s.underscore_const(false) + /// .gen_impl(quote! { gen impl Trait for @Self { } }) + /// .to_string(), + /// quote! { + /// #[allow(non_upper_case_globals)] + /// const _DERIVE_Trait_FOR_MyStruct: () = { + /// impl Trait for MyStruct { } + /// }; + /// } + /// .to_string() + /// ); + /// ``` + pub fn underscore_const(&mut self, enabled: bool) -> &mut Self { + self.underscore_const = enabled; + self + } + + /// > NOTE: This methods' features are superceded by `Structure::gen_impl`. + /// + /// Creates an `impl` block with the required generic type fields filled in + /// to implement the trait `path`. + /// + /// This method also adds where clauses to the impl requiring that all + /// referenced type parmaeters implement the trait `path`. + /// + /// # Hygiene and Paths + /// + /// This method wraps the impl block inside of a `const` (see the example + /// below). In this scope, the first segment of the passed-in path is + /// `extern crate`-ed in. If you don't want to generate that `extern crate` + /// item, use a global path. + /// + /// This means that if you are implementing `my_crate::Trait`, you simply + /// write `s.bound_impl(quote!(my_crate::Trait), quote!(...))`, and for the + /// entirety of the definition, you can refer to your crate as `my_crate`. + /// + /// # Caveat + /// + /// If the method contains any macros in type position, all parameters will + /// be considered bound. This is because we cannot determine which type + /// parameters are bound by type macros. + /// + /// # Panics + /// + /// Panics if the path string parameter is not a valid `TraitBound`. + /// + /// # Example + /// ``` + /// # use synstructure::*; + /// let di: syn::DeriveInput = syn::parse_quote! { + /// enum A<T, U> { + /// B(T), + /// C(Option<U>), + /// } + /// }; + /// let mut s = Structure::new(&di); + /// + /// s.filter_variants(|v| v.ast().ident != "B"); + /// + /// assert_eq!( + /// s.bound_impl(quote!(krate::Trait), quote!{ + /// fn a() {} + /// }).to_string(), + /// quote!{ + /// #[allow(non_upper_case_globals)] + /// #[doc(hidden)] + /// const _DERIVE_krate_Trait_FOR_A: () = { + /// extern crate krate; + /// impl<T, U> krate::Trait for A<T, U> + /// where Option<U>: krate::Trait, + /// U: krate::Trait + /// { + /// fn a() {} + /// } + /// }; + /// }.to_string() + /// ); + /// ``` + pub fn bound_impl<P: ToTokens, B: ToTokens>(&self, path: P, body: B) -> TokenStream { + self.impl_internal( + path.into_token_stream(), + body.into_token_stream(), + quote!(), + None, + ) + } + + /// > NOTE: This methods' features are superceded by `Structure::gen_impl`. + /// + /// Creates an `impl` block with the required generic type fields filled in + /// to implement the unsafe trait `path`. + /// + /// This method also adds where clauses to the impl requiring that all + /// referenced type parmaeters implement the trait `path`. + /// + /// # Hygiene and Paths + /// + /// This method wraps the impl block inside of a `const` (see the example + /// below). In this scope, the first segment of the passed-in path is + /// `extern crate`-ed in. If you don't want to generate that `extern crate` + /// item, use a global path. + /// + /// This means that if you are implementing `my_crate::Trait`, you simply + /// write `s.bound_impl(quote!(my_crate::Trait), quote!(...))`, and for the + /// entirety of the definition, you can refer to your crate as `my_crate`. + /// + /// # Caveat + /// + /// If the method contains any macros in type position, all parameters will + /// be considered bound. This is because we cannot determine which type + /// parameters are bound by type macros. + /// + /// # Panics + /// + /// Panics if the path string parameter is not a valid `TraitBound`. + /// + /// # Example + /// ``` + /// # use synstructure::*; + /// let di: syn::DeriveInput = syn::parse_quote! { + /// enum A<T, U> { + /// B(T), + /// C(Option<U>), + /// } + /// }; + /// let mut s = Structure::new(&di); + /// + /// s.filter_variants(|v| v.ast().ident != "B"); + /// + /// assert_eq!( + /// s.unsafe_bound_impl(quote!(krate::Trait), quote!{ + /// fn a() {} + /// }).to_string(), + /// quote!{ + /// #[allow(non_upper_case_globals)] + /// #[doc(hidden)] + /// const _DERIVE_krate_Trait_FOR_A: () = { + /// extern crate krate; + /// unsafe impl<T, U> krate::Trait for A<T, U> + /// where Option<U>: krate::Trait, + /// U: krate::Trait + /// { + /// fn a() {} + /// } + /// }; + /// }.to_string() + /// ); + /// ``` + pub fn unsafe_bound_impl<P: ToTokens, B: ToTokens>(&self, path: P, body: B) -> TokenStream { + self.impl_internal( + path.into_token_stream(), + body.into_token_stream(), + quote!(unsafe), + None, + ) + } + + /// > NOTE: This methods' features are superceded by `Structure::gen_impl`. + /// + /// Creates an `impl` block with the required generic type fields filled in + /// to implement the trait `path`. + /// + /// This method will not add any where clauses to the impl. + /// + /// # Hygiene and Paths + /// + /// This method wraps the impl block inside of a `const` (see the example + /// below). In this scope, the first segment of the passed-in path is + /// `extern crate`-ed in. If you don't want to generate that `extern crate` + /// item, use a global path. + /// + /// This means that if you are implementing `my_crate::Trait`, you simply + /// write `s.bound_impl(quote!(my_crate::Trait), quote!(...))`, and for the + /// entirety of the definition, you can refer to your crate as `my_crate`. + /// + /// # Panics + /// + /// Panics if the path string parameter is not a valid `TraitBound`. + /// + /// # Example + /// ``` + /// # use synstructure::*; + /// let di: syn::DeriveInput = syn::parse_quote! { + /// enum A<T, U> { + /// B(T), + /// C(Option<U>), + /// } + /// }; + /// let mut s = Structure::new(&di); + /// + /// s.filter_variants(|v| v.ast().ident != "B"); + /// + /// assert_eq!( + /// s.unbound_impl(quote!(krate::Trait), quote!{ + /// fn a() {} + /// }).to_string(), + /// quote!{ + /// #[allow(non_upper_case_globals)] + /// #[doc(hidden)] + /// const _DERIVE_krate_Trait_FOR_A: () = { + /// extern crate krate; + /// impl<T, U> krate::Trait for A<T, U> { + /// fn a() {} + /// } + /// }; + /// }.to_string() + /// ); + /// ``` + pub fn unbound_impl<P: ToTokens, B: ToTokens>(&self, path: P, body: B) -> TokenStream { + self.impl_internal( + path.into_token_stream(), + body.into_token_stream(), + quote!(), + Some(AddBounds::None), + ) + } + + /// > NOTE: This methods' features are superceded by `Structure::gen_impl`. + /// + /// Creates an `impl` block with the required generic type fields filled in + /// to implement the unsafe trait `path`. + /// + /// This method will not add any where clauses to the impl. + /// + /// # Hygiene and Paths + /// + /// This method wraps the impl block inside of a `const` (see the example + /// below). In this scope, the first segment of the passed-in path is + /// `extern crate`-ed in. If you don't want to generate that `extern crate` + /// item, use a global path. + /// + /// This means that if you are implementing `my_crate::Trait`, you simply + /// write `s.bound_impl(quote!(my_crate::Trait), quote!(...))`, and for the + /// entirety of the definition, you can refer to your crate as `my_crate`. + /// + /// # Panics + /// + /// Panics if the path string parameter is not a valid `TraitBound`. + /// + /// # Example + /// ``` + /// # use synstructure::*; + /// let di: syn::DeriveInput = syn::parse_quote! { + /// enum A<T, U> { + /// B(T), + /// C(Option<U>), + /// } + /// }; + /// let mut s = Structure::new(&di); + /// + /// s.filter_variants(|v| v.ast().ident != "B"); + /// + /// assert_eq!( + /// s.unsafe_unbound_impl(quote!(krate::Trait), quote!{ + /// fn a() {} + /// }).to_string(), + /// quote!{ + /// #[allow(non_upper_case_globals)] + /// #[doc(hidden)] + /// const _DERIVE_krate_Trait_FOR_A: () = { + /// extern crate krate; + /// unsafe impl<T, U> krate::Trait for A<T, U> { + /// fn a() {} + /// } + /// }; + /// }.to_string() + /// ); + /// ``` + #[deprecated] + pub fn unsafe_unbound_impl<P: ToTokens, B: ToTokens>(&self, path: P, body: B) -> TokenStream { + self.impl_internal( + path.into_token_stream(), + body.into_token_stream(), + quote!(unsafe), + Some(AddBounds::None), + ) + } + + fn impl_internal( + &self, + path: TokenStream, + body: TokenStream, + safety: TokenStream, + mode: Option<AddBounds>, + ) -> TokenStream { + let mode = mode.unwrap_or(self.add_bounds); + let name = &self.ast.ident; + let mut gen_clone = self.ast.generics.clone(); + gen_clone.params.extend(self.extra_impl.clone().into_iter()); + let (impl_generics, _, _) = gen_clone.split_for_impl(); + let (_, ty_generics, where_clause) = self.ast.generics.split_for_impl(); + + let bound = syn::parse2::<TraitBound>(path) + .expect("`path` argument must be a valid rust trait bound"); + + let mut where_clause = where_clause.cloned(); + self.add_trait_bounds(&bound, &mut where_clause, mode); + + // This function is smart. If a global path is passed, no extern crate + // statement will be generated, however, a relative path will cause the + // crate which it is relative to to be imported within the current + // scope. + let mut extern_crate = quote!(); + if bound.path.leading_colon.is_none() { + if let Some(seg) = bound.path.segments.first() { + let seg = &seg.ident; + extern_crate = quote! { extern crate #seg; }; + } + } + + let generated = quote! { + #extern_crate + #safety impl #impl_generics #bound for #name #ty_generics #where_clause { + #body + } + }; + + if self.underscore_const { + quote! { + const _: () = { #generated }; + } + } else { + let dummy_const: Ident = sanitize_ident(&format!( + "_DERIVE_{}_FOR_{}", + (&bound).into_token_stream(), + name.into_token_stream(), + )); + quote! { + #[allow(non_upper_case_globals)] + #[doc(hidden)] + const #dummy_const: () = { + #generated + }; + } + } + } + + /// Generate an impl block for the given struct. This impl block will + /// automatically use hygiene tricks to avoid polluting the caller's + /// namespace, and will automatically add trait bounds for generic type + /// parameters. + /// + /// # Syntax + /// + /// This function accepts its arguments as a `TokenStream`. The recommended way + /// to call this function is passing the result of invoking the `quote!` + /// macro to it. + /// + /// ```ignore + /// s.gen_impl(quote! { + /// // You can write any items which you want to import into scope here. + /// // For example, you may want to include an `extern crate` for the + /// // crate which implements your trait. These items will only be + /// // visible to the code you generate, and won't be exposed to the + /// // consuming crate + /// extern crate krate; + /// + /// // You can also add `use` statements here to bring types or traits + /// // into scope. + /// // + /// // WARNING: Try not to use common names here, because the stable + /// // version of syn does not support hygiene and you could accidentally + /// // shadow types from the caller crate. + /// use krate::Trait as MyTrait; + /// + /// // The actual impl block is a `gen impl` or `gen unsafe impl` block. + /// // You can use `@Self` to refer to the structure's type. + /// gen impl MyTrait for @Self { + /// fn f(&self) { ... } + /// } + /// }) + /// ``` + /// + /// The most common usage of this trait involves loading the crate the + /// target trait comes from with `extern crate`, and then invoking a `gen + /// impl` block. + /// + /// # Hygiene + /// + /// This method tries to handle hygiene intelligenly for both stable and + /// unstable proc-macro implementations, however there are visible + /// differences. + /// + /// The output of every `gen_impl` function is wrapped in a dummy `const` + /// value, to ensure that it is given its own scope, and any values brought + /// into scope are not leaked to the calling crate. + /// + /// By default, the above invocation may generate an output like the + /// following: + /// + /// ```ignore + /// const _DERIVE_krate_Trait_FOR_Struct: () = { + /// extern crate krate; + /// use krate::Trait as MyTrait; + /// impl<T> MyTrait for Struct<T> where T: MyTrait { + /// fn f(&self) { ... } + /// } + /// }; + /// ``` + /// + /// The `Structure` may also be confired with the [`underscore_const`] method + /// to generate `const _` instead. + /// + /// ```ignore + /// const _: () = { + /// extern crate krate; + /// use krate::Trait as MyTrait; + /// impl<T> MyTrait for Struct<T> where T: MyTrait { + /// fn f(&self) { ... } + /// } + /// }; + /// ``` + /// + /// ### Using the `std` crate + /// + /// If you are using `quote!()` to implement your trait, with the + /// `proc-macro2/nightly` feature, `std` isn't considered to be in scope for + /// your macro. This means that if you use types from `std` in your + /// procedural macro, you'll want to explicitly load it with an `extern + /// crate std;`. + /// + /// ### Absolute paths + /// + /// You should generally avoid using absolute paths in your generated code, + /// as they will resolve very differently when using the stable and nightly + /// versions of `proc-macro2`. Instead, load the crates you need to use + /// explictly with `extern crate` and + /// + /// # Trait Bounds + /// + /// This method will automatically add trait bounds for any type parameters + /// which are referenced within the types of non-ignored fields. + /// + /// Additional type parameters may be added with the generics syntax after + /// the `impl` keyword. + /// + /// ### Type Macro Caveat + /// + /// If the method contains any macros in type position, all parameters will + /// be considered bound. This is because we cannot determine which type + /// parameters are bound by type macros. + /// + /// # Errors + /// + /// This function will generate a `compile_error!` if additional type + /// parameters added by `impl<..>` conflict with generic type parameters on + /// the original struct. + /// + /// # Panics + /// + /// This function will panic if the input `TokenStream` is not well-formed. + /// + /// # Example Usage + /// + /// ``` + /// # use synstructure::*; + /// let di: syn::DeriveInput = syn::parse_quote! { + /// enum A<T, U> { + /// B(T), + /// C(Option<U>), + /// } + /// }; + /// let mut s = Structure::new(&di); + /// + /// s.filter_variants(|v| v.ast().ident != "B"); + /// + /// assert_eq!( + /// s.gen_impl(quote! { + /// extern crate krate; + /// gen impl krate::Trait for @Self { + /// fn a() {} + /// } + /// }).to_string(), + /// quote!{ + /// #[allow(non_upper_case_globals)] + /// const _DERIVE_krate_Trait_FOR_A: () = { + /// extern crate krate; + /// impl<T, U> krate::Trait for A<T, U> + /// where + /// Option<U>: krate::Trait, + /// U: krate::Trait + /// { + /// fn a() {} + /// } + /// }; + /// }.to_string() + /// ); + /// + /// // NOTE: You can also add extra generics after the impl + /// assert_eq!( + /// s.gen_impl(quote! { + /// extern crate krate; + /// gen impl<X: krate::OtherTrait> krate::Trait<X> for @Self + /// where + /// X: Send + Sync, + /// { + /// fn a() {} + /// } + /// }).to_string(), + /// quote!{ + /// #[allow(non_upper_case_globals)] + /// const _DERIVE_krate_Trait_X_FOR_A: () = { + /// extern crate krate; + /// impl<X: krate::OtherTrait, T, U> krate::Trait<X> for A<T, U> + /// where + /// X: Send + Sync, + /// Option<U>: krate::Trait<X>, + /// U: krate::Trait<X> + /// { + /// fn a() {} + /// } + /// }; + /// }.to_string() + /// ); + /// + /// // NOTE: you can generate multiple traits with a single call + /// assert_eq!( + /// s.gen_impl(quote! { + /// extern crate krate; + /// + /// gen impl krate::Trait for @Self { + /// fn a() {} + /// } + /// + /// gen impl krate::OtherTrait for @Self { + /// fn b() {} + /// } + /// }).to_string(), + /// quote!{ + /// #[allow(non_upper_case_globals)] + /// const _DERIVE_krate_Trait_FOR_A: () = { + /// extern crate krate; + /// impl<T, U> krate::Trait for A<T, U> + /// where + /// Option<U>: krate::Trait, + /// U: krate::Trait + /// { + /// fn a() {} + /// } + /// + /// impl<T, U> krate::OtherTrait for A<T, U> + /// where + /// Option<U>: krate::OtherTrait, + /// U: krate::OtherTrait + /// { + /// fn b() {} + /// } + /// }; + /// }.to_string() + /// ); + /// ``` + /// + /// Use `add_bounds` to change which bounds are generated. + pub fn gen_impl(&self, cfg: TokenStream) -> TokenStream { + Parser::parse2( + |input: ParseStream<'_>| -> Result<TokenStream> { self.gen_impl_parse(input, true) }, + cfg, + ) + .expect("Failed to parse gen_impl") + } + + fn gen_impl_parse(&self, input: ParseStream<'_>, wrap: bool) -> Result<TokenStream> { + fn parse_prefix(input: ParseStream<'_>) -> Result<Option<Token![unsafe]>> { + if input.parse::<Ident>()? != "gen" { + return Err(input.error("Expected keyword `gen`")); + } + let safety = input.parse::<Option<Token![unsafe]>>()?; + let _ = input.parse::<Token![impl]>()?; + Ok(safety) + } + + let mut before = vec![]; + loop { + if parse_prefix(&input.fork()).is_ok() { + break; + } + before.push(input.parse::<TokenTree>()?); + } + + // Parse the prefix "for real" + let safety = parse_prefix(input)?; + + // optional `<>` + let mut generics = input.parse::<Generics>()?; + + // @bound + let bound = input.parse::<TraitBound>()?; + + // `for @Self` + let _ = input.parse::<Token![for]>()?; + let _ = input.parse::<Token![@]>()?; + let _ = input.parse::<Token![Self]>()?; + + // optional `where ...` + generics.where_clause = input.parse()?; + + // Body of the impl + let body; + braced!(body in input); + let body = body.parse::<TokenStream>()?; + + // Try to parse the next entry in sequence. If this fails, we'll fall + // back to just parsing the entire rest of the TokenStream. + let maybe_next_impl = self.gen_impl_parse(&input.fork(), false); + + // Eat tokens to the end. Whether or not our speculative nested parse + // succeeded, we're going to want to consume the rest of our input. + let mut after = input.parse::<TokenStream>()?; + if let Ok(stream) = maybe_next_impl { + after = stream; + } + assert!(input.is_empty(), "Should've consumed the rest of our input"); + + /* Codegen Logic */ + let name = &self.ast.ident; + + // Add the generics from the original struct in, and then add any + // additional trait bounds which we need on the type. + if let Err(err) = merge_generics(&mut generics, &self.ast.generics) { + // Report the merge error as a `compile_error!`, as it may be + // triggerable by an end-user. + return Ok(err.to_compile_error()); + } + + self.add_trait_bounds(&bound, &mut generics.where_clause, self.add_bounds); + let (impl_generics, _, where_clause) = generics.split_for_impl(); + let (_, ty_generics, _) = self.ast.generics.split_for_impl(); + + let generated = quote! { + #(#before)* + #safety impl #impl_generics #bound for #name #ty_generics #where_clause { + #body + } + #after + }; + + if wrap { + if self.underscore_const { + Ok(quote! { + const _: () = { #generated }; + }) + } else { + let dummy_const: Ident = sanitize_ident(&format!( + "_DERIVE_{}_FOR_{}", + (&bound).into_token_stream(), + name.into_token_stream(), + )); + Ok(quote! { + #[allow(non_upper_case_globals)] + const #dummy_const: () = { + #generated + }; + }) + } + } else { + Ok(generated) + } + } +} + +/// Dumps an unpretty version of a tokenstream. Takes any type which implements +/// `Display`. +/// +/// This is mostly useful for visualizing the output of a procedural macro, as +/// it makes it marginally more readable. It is used in the implementation of +/// `test_derive!` to unprettily print the output. +/// +/// # Stability +/// +/// The stability of the output of this function is not guaranteed. Do not +/// assert that the output of this function does not change between minor +/// versions. +/// +/// # Example +/// +/// ``` +/// # use quote::quote; +/// assert_eq!( +/// synstructure::unpretty_print(quote! { +/// #[allow(non_upper_case_globals)] +/// const _DERIVE_krate_Trait_FOR_A: () = { +/// extern crate krate; +/// impl<T, U> krate::Trait for A<T, U> +/// where +/// Option<U>: krate::Trait, +/// U: krate::Trait +/// { +/// fn a() {} +/// } +/// }; +/// }), +/// "# [ +/// allow ( +/// non_upper_case_globals ) +/// ] +/// const _DERIVE_krate_Trait_FOR_A : ( +/// ) +/// = { +/// extern crate krate ; +/// impl < T , U > krate :: Trait for A < T , U > where Option < U > : krate :: Trait , U : krate :: Trait { +/// fn a ( +/// ) +/// { +/// } +/// } +/// } +/// ; +/// " +/// ) +/// ``` +pub fn unpretty_print<T: std::fmt::Display>(ts: T) -> String { + let mut res = String::new(); + + let raw_s = ts.to_string(); + let mut s = &raw_s[..]; + let mut indent = 0; + while let Some(i) = s.find(&['(', '{', '[', ')', '}', ']', ';'][..]) { + match &s[i..=i] { + "(" | "{" | "[" => indent += 1, + ")" | "}" | "]" => indent -= 1, + _ => {} + } + res.push_str(&s[..=i]); + res.push('\n'); + for _ in 0..indent { + res.push_str(" "); + } + s = trim_start_matches(&s[i + 1..], ' '); + } + res.push_str(s); + res +} + +/// `trim_left_matches` has been deprecated in favor of `trim_start_matches`. +/// This helper silences the warning, as we need to continue using +/// `trim_left_matches` for rust 1.15 support. +#[allow(deprecated)] +fn trim_start_matches(s: &str, c: char) -> &str { + s.trim_left_matches(c) +} + +/// Helper trait describing values which may be returned by macro implementation +/// methods used by this crate's macros. +pub trait MacroResult { + /// Convert this result into a `Result` for further processing / validation. + fn into_result(self) -> Result<TokenStream>; + + /// Convert this result into a `proc_macro::TokenStream`, ready to return + /// from a native `proc_macro` implementation. + /// + /// If `into_result()` would return an `Err`, this method should instead + /// generate a `compile_error!` invocation to nicely report the error. + /// + /// *This method is available if `synstructure` is built with the + /// `"proc-macro"` feature.* + #[cfg(all( + not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))), + feature = "proc-macro" + ))] + fn into_stream(self) -> proc_macro::TokenStream + where + Self: Sized, + { + match self.into_result() { + Ok(ts) => ts.into(), + Err(err) => err.to_compile_error().into(), + } + } +} + +#[cfg(all( + not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))), + feature = "proc-macro" +))] +impl MacroResult for proc_macro::TokenStream { + fn into_result(self) -> Result<TokenStream> { + Ok(self.into()) + } + + fn into_stream(self) -> proc_macro::TokenStream { + self + } +} + +impl MacroResult for TokenStream { + fn into_result(self) -> Result<TokenStream> { + Ok(self) + } +} + +impl<T: MacroResult> MacroResult for Result<T> { + fn into_result(self) -> Result<TokenStream> { + match self { + Ok(v) => v.into_result(), + Err(err) => Err(err), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + // Regression test for #48 + #[test] + fn test_each_enum() { + let di: syn::DeriveInput = syn::parse_quote! { + enum A { + Foo(usize, bool), + Bar(bool, usize), + Baz(usize, bool, usize), + Quux(bool, usize, bool) + } + }; + let mut s = Structure::new(&di); + + s.filter(|bi| bi.ast().ty.to_token_stream().to_string() == "bool"); + + assert_eq!( + s.each(|bi| quote!(do_something(#bi))).to_string(), + quote! { + A::Foo(_, ref __binding_1,) => { { do_something(__binding_1) } } + A::Bar(ref __binding_0, ..) => { { do_something(__binding_0) } } + A::Baz(_, ref __binding_1, ..) => { { do_something(__binding_1) } } + A::Quux(ref __binding_0, _, ref __binding_2,) => { + { + do_something(__binding_0) + } + { + do_something(__binding_2) + } + } + } + .to_string() + ); + } +} diff --git a/third_party/rust/synstructure/src/macros.rs b/third_party/rust/synstructure/src/macros.rs new file mode 100644 index 0000000000..5b6557b7c7 --- /dev/null +++ b/third_party/rust/synstructure/src/macros.rs @@ -0,0 +1,250 @@ +//! This module provides two utility macros for testing custom derives. They can +//! be used together to eliminate some of the boilerplate required in order to +//! declare and test custom derive implementations. + +// Re-exports used by the decl_derive! and test_derive! +pub use proc_macro2::TokenStream as TokenStream2; +pub use syn::{parse_str, DeriveInput}; + +#[cfg(all( + not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))), + feature = "proc-macro" +))] +pub use proc_macro::TokenStream; +#[cfg(all( + not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))), + feature = "proc-macro" +))] +pub use syn::parse; + +/// The `decl_derive!` macro declares a custom derive wrapper. It will parse the +/// incoming `TokenStream` into a `synstructure::Structure` object, and pass it +/// into the inner function. +/// +/// Your inner function should take a `synstructure::Structure` by value, and +/// return a type implementing `synstructure::MacroResult`, for example: +/// +/// ``` +/// fn derive_simple(input: synstructure::Structure) -> proc_macro2::TokenStream { +/// unimplemented!() +/// } +/// +/// fn derive_result(input: synstructure::Structure) +/// -> syn::Result<proc_macro2::TokenStream> +/// { +/// unimplemented!() +/// } +/// ``` +/// +/// # Usage +/// +/// ### Without Attributes +/// ``` +/// fn derive_interesting(_input: synstructure::Structure) -> proc_macro2::TokenStream { +/// quote::quote! { ... } +/// } +/// +/// # const _IGNORE: &'static str = stringify! { +/// decl_derive!([Interesting] => derive_interesting); +/// # }; +/// ``` +/// +/// ### With Attributes +/// ``` +/// # fn main() {} +/// fn derive_interesting(_input: synstructure::Structure) -> proc_macro2::TokenStream { +/// quote::quote! { ... } +/// } +/// +/// # const _IGNORE: &'static str = stringify! { +/// decl_derive!([Interesting, attributes(interesting_ignore)] => derive_interesting); +/// # }; +/// ``` +/// +/// ### Decl Attributes & Doc Comments +/// ``` +/// # fn main() {} +/// fn derive_interesting(_input: synstructure::Structure) -> proc_macro2::TokenStream { +/// quote::quote! { ... } +/// } +/// +/// # const _IGNORE: &'static str = stringify! { +/// decl_derive! { +/// [Interesting] => +/// #[allow(some_lint)] +/// /// Documentation Comments +/// derive_interesting +/// } +/// # }; +/// ``` +/// +/// *This macro is available if `synstructure` is built with the `"proc-macro"` +/// feature.* +#[cfg(all( + not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))), + feature = "proc-macro" +))] +#[macro_export] +macro_rules! decl_derive { + // XXX: Switch to using this variant everywhere? + ([$derives:ident $($derive_t:tt)*] => $(#[$($attrs:tt)*])* $inner:path) => { + #[proc_macro_derive($derives $($derive_t)*)] + #[allow(non_snake_case)] + $(#[$($attrs)*])* + pub fn $derives( + i: $crate::macros::TokenStream + ) -> $crate::macros::TokenStream { + match $crate::macros::parse::<$crate::macros::DeriveInput>(i) { + Ok(p) => { + match $crate::Structure::try_new(&p) { + Ok(s) => $crate::MacroResult::into_stream($inner(s)), + Err(e) => e.to_compile_error().into(), + } + } + Err(e) => e.to_compile_error().into(), + } + } + }; +} + +/// The `decl_attribute!` macro declares a custom attribute wrapper. It will +/// parse the incoming `TokenStream` into a `synstructure::Structure` object, +/// and pass it into the inner function. +/// +/// Your inner function should have the following type: +/// +/// ``` +/// fn attribute( +/// attr: proc_macro2::TokenStream, +/// structure: synstructure::Structure, +/// ) -> proc_macro2::TokenStream { +/// unimplemented!() +/// } +/// ``` +/// +/// # Usage +/// +/// ``` +/// fn attribute_interesting( +/// _attr: proc_macro2::TokenStream, +/// _structure: synstructure::Structure, +/// ) -> proc_macro2::TokenStream { +/// quote::quote! { ... } +/// } +/// +/// # const _IGNORE: &'static str = stringify! { +/// decl_attribute!([interesting] => attribute_interesting); +/// # }; +/// ``` +/// +/// *This macro is available if `synstructure` is built with the `"proc-macro"` +/// feature.* +#[cfg(all( + not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))), + feature = "proc-macro" +))] +#[macro_export] +macro_rules! decl_attribute { + ([$attribute:ident] => $(#[$($attrs:tt)*])* $inner:path) => { + #[proc_macro_attribute] + $(#[$($attrs)*])* + pub fn $attribute( + attr: $crate::macros::TokenStream, + i: $crate::macros::TokenStream, + ) -> $crate::macros::TokenStream { + match $crate::macros::parse::<$crate::macros::DeriveInput>(i) { + Ok(p) => match $crate::Structure::try_new(&p) { + Ok(s) => $crate::MacroResult::into_stream($inner(attr.into(), s)), + Err(e) => e.to_compile_error().into(), + }, + Err(e) => e.to_compile_error().into(), + } + } + }; +} + +/// Run a test on a custom derive. This macro expands both the original struct +/// and the expansion to ensure that they compile correctly, and confirms that +/// feeding the original struct into the named derive will produce the written +/// output. +/// +/// You can add `no_build` to the end of the macro invocation to disable +/// checking that the written code compiles. This is useful in contexts where +/// the procedural macro cannot depend on the crate where it is used during +/// tests. +/// +/// # Usage +/// +/// ``` +/// fn test_derive_example(_s: synstructure::Structure) +/// -> Result<proc_macro2::TokenStream, syn::Error> +/// { +/// Ok(quote::quote! { const YOUR_OUTPUT: &'static str = "here"; }) +/// } +/// +/// fn main() { +/// synstructure::test_derive!{ +/// test_derive_example { +/// struct A; +/// } +/// expands to { +/// const YOUR_OUTPUT: &'static str = "here"; +/// } +/// } +/// } +/// ``` +#[macro_export] +macro_rules! test_derive { + ($name:path { $($i:tt)* } expands to { $($o:tt)* }) => { + { + #[allow(dead_code)] + fn ensure_compiles() { + $($i)* + $($o)* + } + + $crate::test_derive!($name { $($i)* } expands to { $($o)* } no_build); + } + }; + + ($name:path { $($i:tt)* } expands to { $($o:tt)* } no_build) => { + { + let i = stringify!( $($i)* ); + let parsed = $crate::macros::parse_str::<$crate::macros::DeriveInput>(i) + .expect(concat!( + "Failed to parse input to `#[derive(", + stringify!($name), + ")]`", + )); + + let raw_res = $name($crate::Structure::new(&parsed)); + let res = $crate::MacroResult::into_result(raw_res) + .expect(concat!( + "Procedural macro failed for `#[derive(", + stringify!($name), + ")]`", + )); + + let expected = stringify!( $($o)* ) + .parse::<$crate::macros::TokenStream2>() + .expect("output should be a valid TokenStream"); + let mut expected_toks = $crate::macros::TokenStream2::from(expected); + if res.to_string() != expected_toks.to_string() { + panic!("\ +test_derive failed: +expected: +``` +{} +``` + +got: +``` +{} +```\n", + $crate::unpretty_print(&expected_toks), + $crate::unpretty_print(&res), + ); + } + } + }; +} |