summaryrefslogtreecommitdiffstats
path: root/vendor/syn
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:11:28 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:11:28 +0000
commit94a0819fe3a0d679c3042a77bfe6a2afc505daea (patch)
tree2b827afe6a05f3538db3f7803a88c4587fe85648 /vendor/syn
parentAdding upstream version 1.64.0+dfsg1. (diff)
downloadrustc-94a0819fe3a0d679c3042a77bfe6a2afc505daea.tar.xz
rustc-94a0819fe3a0d679c3042a77bfe6a2afc505daea.zip
Adding upstream version 1.66.0+dfsg1.upstream/1.66.0+dfsg1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'vendor/syn')
-rw-r--r--vendor/syn/.cargo-checksum.json2
-rw-r--r--vendor/syn/Cargo.toml13
-rw-r--r--vendor/syn/benches/file.rs36
-rw-r--r--vendor/syn/benches/rust.rs11
-rw-r--r--vendor/syn/src/buffer.rs199
-rw-r--r--vendor/syn/src/expr.rs21
-rw-r--r--vendor/syn/src/gen/clone.rs2
-rw-r--r--vendor/syn/src/gen/debug.rs10
-rw-r--r--vendor/syn/src/gen/eq.rs6
-rw-r--r--vendor/syn/src/gen/fold.rs6
-rw-r--r--vendor/syn/src/gen/hash.rs6
-rw-r--r--vendor/syn/src/gen/visit.rs6
-rw-r--r--vendor/syn/src/gen/visit_mut.rs6
-rw-r--r--vendor/syn/src/item.rs14
-rw-r--r--vendor/syn/src/lib.rs13
-rw-r--r--vendor/syn/src/lit.rs2
-rw-r--r--vendor/syn/src/parse_macro_input.rs4
-rw-r--r--vendor/syn/src/pat.rs35
-rw-r--r--vendor/syn/src/path.rs46
-rw-r--r--vendor/syn/src/stmt.rs6
-rw-r--r--vendor/syn/src/ty.rs53
-rw-r--r--vendor/syn/tests/.gitignore1
-rw-r--r--vendor/syn/tests/common/eq.rs109
-rw-r--r--vendor/syn/tests/debug/gen.rs12
-rw-r--r--vendor/syn/tests/regression/issue1108.rs2
-rw-r--r--vendor/syn/tests/repo/mod.rs77
-rw-r--r--vendor/syn/tests/test_derive_input.rs2
-rw-r--r--vendor/syn/tests/test_parse_stream.rs8
-rw-r--r--vendor/syn/tests/test_precedence.rs19
-rw-r--r--vendor/syn/tests/test_size.rs8
-rw-r--r--vendor/syn/tests/test_stmt.rs2
31 files changed, 403 insertions, 334 deletions
diff --git a/vendor/syn/.cargo-checksum.json b/vendor/syn/.cargo-checksum.json
index 8ea280f0f..8b90dae0f 100644
--- a/vendor/syn/.cargo-checksum.json
+++ b/vendor/syn/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"5b13c4f4b16314a07b7c6a3cc3e726579bf0c24b5b45fa2ec48ab966ccaa0e45","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"b1546652aefba564455c1ebbf0f276450d4fdb19755e08bfa03c13c8bab241fc","benches/file.rs":"af4671030b7fbc3a5dbc37b492e80fcae2893ad7a64dd43dac12cf5906c32301","benches/rust.rs":"22dfbffc39f8d091ef74cca849e7e7c69e9a47f2a06f7dec91c0382401055c14","build.rs":"b815649fd2929d3debd93a58f5da2fb8eba506047a6a5ba538347305828a87b0","src/attr.rs":"234d9cebe2c5e92cd0f5e1117bf5755037e2e905788a337000a65d4bd82b63aa","src/await.rs":"8aa22e3c201cb2bdb6b4817fa00901f308ab06817607aa7b884c58c957705969","src/bigint.rs":"efc7f64959980653d73fe4f8bc2a3a2904dc05f45b02c6dc15cd316fa3d7c338","src/buffer.rs":"24a3ba4928a5b2265199c29b84610b6f988bc422076ad07dfa30e91b6d72314f","src/custom_keyword.rs":"5c706fc3611e73d16b8c019d7ecb848a86b1ccfcd9e556f80bb6e6a4abe058a8","src/custom_punctuation.rs":"8a666298e774b0d326642f0f73284f6677d0d0a7c9e4a712c9c98d010b4d8a2c","src/data.rs":"75d2c2b5d6a01bf8a6fa2845e41663d8045a78b4b191f1a1bd7c93619d20017a","src/derive.rs":"ee24a202be2d36ccdff576dd9cd765e94b33ef2286946e6725d75b08e777d462","src/discouraged.rs":"6c6a9298f8d24f578da119557bc588f3bd928f7b79fca27d6bdfe3e786dd005f","src/error.rs":"e548cc5b7c6f742ab6c19788755980594c4cb8086f99e6709f1cbc982961102d","src/export.rs":"0cf50d70c32d5fddba8b1193032df62e560237c113df3e86ba26b565cc82838e","src/expr.rs":"0d441100457567b7b18372fdcbbe833d7516b5a43f17f1cfc4d7f354141b8206","src/ext.rs":"1f648cff1d705a1cea64b32b77482b97a82d2fe0aaf63b40cade91e5c02dc969","src/file.rs":"f86697655222ae294215114f4eae8e6b0b5e2a935d6c479ff8f8f889c4efd2e2","src/gen/clone.rs":"c43199af10b9963476a1ef6b9e0540363a8383c611f8999463235a0939817da8","src/gen/debug.rs":"12e07500a09d1d15b7a148d9155af357c7ac9b65ac100906cf0fac604403274e","src/gen/eq.rs":"e0928a9f4e81a7ede04853d1837abccaf29dd4ffb56d864fad2f4c3a4c76f1b4","src/gen/fold.rs":"43b34e7a951c180b65c6dd97c380f067ea91776f52e70f61555bd8a4120170b1","src/gen/hash.rs":"b66425846386e2168990b3bee2461f5c01695acd8c4b6360619a07108b260f3d","src/gen/visit.rs":"d559f661ab4a4c5b058af91ef244f30f906b6be82d1dd61bf0d058236f8c6a35","src/gen/visit_mut.rs":"fe384074919cdb1a6f54b93f8a9c768da7d3751e5bbfaf8c6c7133a7ebd390ae","src/gen_helper.rs":"ea6c66388365971db6a2fc86cbb208f7eacde77e245bc8623f27a3642a3d7741","src/generics.rs":"46ed41bf116448822ddfefcb62e803fd33264ca8ba672efc0612674d85b6dd11","src/group.rs":"166f0fbb365471ffa3e4f554b72c2b460cbf7e3a1f9bec6c01ef6bbbcd751041","src/ident.rs":"2443e43561abea7eea577b141422258237a663499c839923d8a5ca6fea2470db","src/item.rs":"2745d8bc068f821fc7dc8f480aceac1d10adc578a0b8b7317eb78f5c9048c68c","src/lib.rs":"81f5b18a8474d6a919af2134ea263f940584c347a2592cbc5e7f31987d3c5e15","src/lifetime.rs":"b18862ef1e690037a4f308ea897debad7bc5038584e3b26c6d8809752ea0e3c2","src/lit.rs":"9134ff103d943cfabdbfae56e78881680f91f9902172b890884a05c58131602a","src/lookahead.rs":"e2c2b6d55906421e83dab51463b58bc6dcb582f1bff9303c8b62afefb8d71e5f","src/mac.rs":"004cb89f9697564f6c9ee837e08ead68463ef946fb4c13c6c105adf2ba364b2b","src/macros.rs":"936f503c2fcde602f05220954ecaf87625c6138d0af13d33d56c7b6530110084","src/op.rs":"9d499022902743a6a0a19223b356449a979b90e60552d0446497d72750e646a4","src/parse.rs":"7b2f8caddf25a5734cbcdf7cbf043cbf9afbc07b484966cd59ddfcec9f970fb3","src/parse_macro_input.rs":"88929a1a7e5e72aa2d0b3459e52d8975afea856d159047ba4ab02ecbc5878a9c","src/parse_quote.rs":"d7d996f1382c68b5fbfd4b7327ce1d389cd43c3bb3c4f382a35994d0bb79d8ab","src/pat.rs":"1e0223ca92c160e07b17a593bb93dd5d451fc9b7014012b403377a4f0f21bfce","src/path.rs":"1ad8b8335628f67a013b7ce2f662aa2feab4583ff67bc959a9dea1dcb51be8a3","src/print.rs":"da6529c1d9d21aaf6c835f66b4e67eacb7cf91a10eb5e9a2143b49bf99b3b5e1","src/punctuated.rs":"f687c23bd3ae512e7412c28ac68030d3bc7a384d1ca8b3da6620e364b0cbbb78","src/reserved.rs":"e70e028bd55cfa43e23cab4ba29e4dc53a3d91eff685ef2b6e57efc2b87a3428","src/sealed.rs":"896a495a5340eec898527f18bd4ddca408ea03ea0ee3af30074ff48deace778d","src/span.rs":"748c51c6feb223c26d3b1701f5bb98aee823666c775c98106cfa24fe29d8cec1","src/spanned.rs":"3ca016a943637653ab98e373dfb826a120f3c159867346fa38a844439944eb39","src/stmt.rs":"8115bc96090022baad91660d7e5e986664c3f1fbd2f112d1c5d1d77e5c3f227e","src/thread.rs":"815eca6bd64f4eef7c447f0809e84108f5428ff50225224b373efd8fbb696874","src/token.rs":"5e423a696f80e281c322f37c87577f9fdc28607e9c007e24896a2b12da62d5ad","src/tt.rs":"32402645b6e82ef1e882945721b59b5fb7b0ee337d1972876362ecacef643d0f","src/ty.rs":"5c05b000d4884334bed729c3345732fbbb4136a75df9dd5002c493ebb6cd091b","src/verbatim.rs":"802a97df997432f18cac6e6200ff6ea29fb2474986005e0fcdbc2b65197f87f7","src/whitespace.rs":"e63dd0aa3d34029f17766a8b09c1a6e4479e36c552c8b7023d710a399333aace","tests/.gitignore":"22e782449a3c216db3f7215d5fb8882e316768e40beeec3833aae419ad8941db","tests/common/eq.rs":"fc4b0ced43acb32eedecb1a161bfe12148aacdd7bf57c926da55880b70b529f7","tests/common/mod.rs":"432ad35577f836a20b517d8c26ed994ac25fe73ef2f461c67688b61b99762015","tests/common/parse.rs":"81580f23583723f7a2a337c4d13ebc021057cd825562fb4e474caa7cc641fed9","tests/debug/gen.rs":"4937074a11fe8266431d05177435bad28340763d408d0a4c45bd0c0acc90a86a","tests/debug/mod.rs":"3a6bb799f478101f71c84c6f1a854a58afe2f9db43c39017909346ca20262d94","tests/macros/mod.rs":"aff805b35cfd55aef6a1359ff747e4023afcb08d69d86aff4c19465d29dda088","tests/regression.rs":"f962ebf24007f631f7e702e34e142d07581da7c9a36321ac142cafed1a0afc69","tests/regression/issue1108.rs":"f32db35244a674e22ff824ca9e5bbec2184e287b59f022db68c418b5878a2edc","tests/repo/mod.rs":"947b678f50df8716ef5c946885a3096e91ab2ec4ce2251cea83d6e6d2ab82eb5","tests/repo/progress.rs":"c08d0314a7f3ecf760d471f27da3cd2a500aeb9f1c8331bffb2aa648f9fabf3f","tests/test_asyncness.rs":"cff01db49d28ab23b0b258bc6c0a5cc4071be4fe7248eef344a5d79d2fb649b7","tests/test_attribute.rs":"0ffd99384e1a52ae17d9fed5c4053e411e8f9018decef07ffa621d1faa7329d8","tests/test_derive_input.rs":"63f0b4d56f3f421e0bb523ce2924afe9e349f0ccefda52ccb8e1e5f342e6d525","tests/test_expr.rs":"a639728866a063b590430965a4840c01755e398b89be12d8d09b0aa97837ecac","tests/test_generics.rs":"54b7d2afc19aa6e9049585f4c8f7d3f0c29ac3bd11a2c769e9df76f18a4f5ecb","tests/test_grouping.rs":"6276c3c73bba649dec5c97904ad2492879f918bc887a2c425d095c654ca0d925","tests/test_ident.rs":"9eb53d1e21edf23e7c9e14dc74dcc2b2538e9221e19dbcc0a44e3acc2e90f3f6","tests/test_item.rs":"a3642c80066f1e7787becfd0278af90a6b7968d6c1249e25e81663aa454cfb2a","tests/test_iterators.rs":"53ed6078d37550bd6765d2411e3660be401aef8a31a407350cc064a7d08c7c33","tests/test_lit.rs":"19740ea9cd4a980bcab9b0dcaa4b032bb6ebb137fa5e4237140b97da1d9679fa","tests/test_meta.rs":"65d4586d131f6cac66694ca5e936748ec4e7f7423af6d8da509240e6be14800b","tests/test_parse_buffer.rs":"68d857f776396d064fcc0023c37093c2fbf75ee68e8241d4014d00d1423c18e9","tests/test_parse_stream.rs":"2f449a2c41a3dee6fd14bee24e1666a453cb808eda17332fd91afd127fcdd2a6","tests/test_pat.rs":"d4465f4fc3fd5d6e534ba8efabe1e0ed6da89de4ac7c96effa6bfb880c4287cf","tests/test_path.rs":"71092a5ae2c9143b92a8fe15a92d39958b3c28bd4d4275cfb2d22cbdd53ada07","tests/test_precedence.rs":"1069d979cec0e6c650bebb58be272d23080ce89e83d194dfa2718d22912e481d","tests/test_receiver.rs":"084eca59984b9a18651da52f2c4407355da3de1335916a12477652999e2d01cc","tests/test_round_trip.rs":"b47662e35be2729f28bacdbbea20f1879c111889430e735a7bcb5f2a5c0b9e5c","tests/test_shebang.rs":"f5772cadad5b56e3112cb16308b779f92bce1c3a48091fc9933deb2276a69331","tests/test_should_parse.rs":"1d3535698a446e2755bfc360676bdb161841a1f454cdef6e7556c6d06a95c89d","tests/test_size.rs":"1aa0dd489bd844a4a9cf32a5310abd66dad1dae3ffb24fe1288b62a26bfdd8dc","tests/test_stmt.rs":"1c39e26e262673aee7747338f0dbca01f5fc4f07b198b11414a614f36d3f9587","tests/test_token_trees.rs":"43e56a701817e3c3bfd0cae54a457dd7a38ccb3ca19da41e2b995fdf20e6ed18","tests/test_ty.rs":"f71d7f7f1c038aaabea8dd4c03c0d5752c76d570f8b4885a81659825bbb4d576","tests/test_visibility.rs":"7456fcb3a6634db509748aededff9c2d8b242d511a3e5ee3022e40b232892704","tests/zzz_stable.rs":"2a862e59cb446235ed99aec0e6ada8e16d3ecc30229b29d825b7c0bbc2602989"},"package":"c50aef8a904de4c23c788f104b7dddc7d6f79c647c7c8ce4cc8f73eb0ca773dd"} \ No newline at end of file
+{"files":{"Cargo.toml":"8366f3b0e0c3a589f43424b1837bb43aa8b4dd224184d355ad38a63bac915210","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"b1546652aefba564455c1ebbf0f276450d4fdb19755e08bfa03c13c8bab241fc","benches/file.rs":"3d737ef3878f6e242b003af9bd539e565f98439a12ee44d9548d84e3fdd7af0c","benches/rust.rs":"11ac9fe898a7bf1bd63e8a8cc9c08bd795b01f0248215cff99afaaf28ce87fab","build.rs":"b815649fd2929d3debd93a58f5da2fb8eba506047a6a5ba538347305828a87b0","src/attr.rs":"234d9cebe2c5e92cd0f5e1117bf5755037e2e905788a337000a65d4bd82b63aa","src/await.rs":"8aa22e3c201cb2bdb6b4817fa00901f308ab06817607aa7b884c58c957705969","src/bigint.rs":"efc7f64959980653d73fe4f8bc2a3a2904dc05f45b02c6dc15cd316fa3d7c338","src/buffer.rs":"2b48296087f096c9630e1e4b03a00ae703407d1b352902b3197370b2f56c62ff","src/custom_keyword.rs":"5c706fc3611e73d16b8c019d7ecb848a86b1ccfcd9e556f80bb6e6a4abe058a8","src/custom_punctuation.rs":"8a666298e774b0d326642f0f73284f6677d0d0a7c9e4a712c9c98d010b4d8a2c","src/data.rs":"75d2c2b5d6a01bf8a6fa2845e41663d8045a78b4b191f1a1bd7c93619d20017a","src/derive.rs":"ee24a202be2d36ccdff576dd9cd765e94b33ef2286946e6725d75b08e777d462","src/discouraged.rs":"6c6a9298f8d24f578da119557bc588f3bd928f7b79fca27d6bdfe3e786dd005f","src/error.rs":"e548cc5b7c6f742ab6c19788755980594c4cb8086f99e6709f1cbc982961102d","src/export.rs":"0cf50d70c32d5fddba8b1193032df62e560237c113df3e86ba26b565cc82838e","src/expr.rs":"5eea3828f3291b0ce5463ed5f0c23fc8a39aeceae68a3247ae02ae467dd35a98","src/ext.rs":"1f648cff1d705a1cea64b32b77482b97a82d2fe0aaf63b40cade91e5c02dc969","src/file.rs":"f86697655222ae294215114f4eae8e6b0b5e2a935d6c479ff8f8f889c4efd2e2","src/gen/clone.rs":"76e89fe155fedf43bc4a252af7e35319b82ce455f584bad8698fdc3f9b7f5d4e","src/gen/debug.rs":"4b05e474e864ce6bf1a5a6ab48ee6c0ecdf41a0d750237990cf2e31963bc1208","src/gen/eq.rs":"79f84836fdcd5cfa352f38055dab7c3246c7757650946c1c701234b11021652a","src/gen/fold.rs":"fcd6a05c8c8e0c36e7ede8593002528b553c8b648fbed452106fd6a8a8c9212a","src/gen/hash.rs":"575e8beae303c1eabda12bf76cbd82672268c502a8ebb8517aab18b40fdbc44e","src/gen/visit.rs":"ced9f6c17d2b3eb3553faab710cb2b3d44d6bca7d1862c8c5da09c3d45debecb","src/gen/visit_mut.rs":"966ea340c53461bf8a1c6bed3c882e4ab8b8907fd18ac35531266f7891ae5f46","src/gen_helper.rs":"ea6c66388365971db6a2fc86cbb208f7eacde77e245bc8623f27a3642a3d7741","src/generics.rs":"46ed41bf116448822ddfefcb62e803fd33264ca8ba672efc0612674d85b6dd11","src/group.rs":"166f0fbb365471ffa3e4f554b72c2b460cbf7e3a1f9bec6c01ef6bbbcd751041","src/ident.rs":"2443e43561abea7eea577b141422258237a663499c839923d8a5ca6fea2470db","src/item.rs":"419c4d6135a7ca7b8f94b5ba038b6af8fcb3939ae807153a19e3c82e9b01e0b7","src/lib.rs":"7875551b227d19f083115d48a83e8c35e3e6d31dbd749bdd03556e2762f7d4fd","src/lifetime.rs":"b18862ef1e690037a4f308ea897debad7bc5038584e3b26c6d8809752ea0e3c2","src/lit.rs":"fc06ddd523f7f9971d8abdb4c8d5d51030ffb3d6810615d5575ae210a7800695","src/lookahead.rs":"e2c2b6d55906421e83dab51463b58bc6dcb582f1bff9303c8b62afefb8d71e5f","src/mac.rs":"004cb89f9697564f6c9ee837e08ead68463ef946fb4c13c6c105adf2ba364b2b","src/macros.rs":"936f503c2fcde602f05220954ecaf87625c6138d0af13d33d56c7b6530110084","src/op.rs":"9d499022902743a6a0a19223b356449a979b90e60552d0446497d72750e646a4","src/parse.rs":"7b2f8caddf25a5734cbcdf7cbf043cbf9afbc07b484966cd59ddfcec9f970fb3","src/parse_macro_input.rs":"a5d16859b782bb6a2754c1066468a2f1ea05b57390caa32175bb84064973be7b","src/parse_quote.rs":"d7d996f1382c68b5fbfd4b7327ce1d389cd43c3bb3c4f382a35994d0bb79d8ab","src/pat.rs":"b2de04ae6c01df50eab9d1c3908287aca8424adc2007b926c7bcf74d1f64d40a","src/path.rs":"269d5d8b0c21eaf96e1c49bcb1ec2a03175a8adcc103c142e550b3f5e79825d8","src/print.rs":"da6529c1d9d21aaf6c835f66b4e67eacb7cf91a10eb5e9a2143b49bf99b3b5e1","src/punctuated.rs":"f687c23bd3ae512e7412c28ac68030d3bc7a384d1ca8b3da6620e364b0cbbb78","src/reserved.rs":"e70e028bd55cfa43e23cab4ba29e4dc53a3d91eff685ef2b6e57efc2b87a3428","src/sealed.rs":"896a495a5340eec898527f18bd4ddca408ea03ea0ee3af30074ff48deace778d","src/span.rs":"748c51c6feb223c26d3b1701f5bb98aee823666c775c98106cfa24fe29d8cec1","src/spanned.rs":"3ca016a943637653ab98e373dfb826a120f3c159867346fa38a844439944eb39","src/stmt.rs":"601a6914f1e0bf97ae0d31d474a531d195b8c251a4ded11aa8746ac0018d367b","src/thread.rs":"815eca6bd64f4eef7c447f0809e84108f5428ff50225224b373efd8fbb696874","src/token.rs":"5e423a696f80e281c322f37c87577f9fdc28607e9c007e24896a2b12da62d5ad","src/tt.rs":"32402645b6e82ef1e882945721b59b5fb7b0ee337d1972876362ecacef643d0f","src/ty.rs":"7e678749af18fc84ae9220435e467e520de05eea66adeeed3b5d634cd744561c","src/verbatim.rs":"802a97df997432f18cac6e6200ff6ea29fb2474986005e0fcdbc2b65197f87f7","src/whitespace.rs":"e63dd0aa3d34029f17766a8b09c1a6e4479e36c552c8b7023d710a399333aace","tests/common/eq.rs":"953f5db261a3334eba1d37df4247463e9234c7988da04f43028b5273d24bf2da","tests/common/mod.rs":"432ad35577f836a20b517d8c26ed994ac25fe73ef2f461c67688b61b99762015","tests/common/parse.rs":"81580f23583723f7a2a337c4d13ebc021057cd825562fb4e474caa7cc641fed9","tests/debug/gen.rs":"1b7f875344cb04a7dd3df62deac2f410a9d107c097986e68006d87465f5f5306","tests/debug/mod.rs":"3a6bb799f478101f71c84c6f1a854a58afe2f9db43c39017909346ca20262d94","tests/macros/mod.rs":"aff805b35cfd55aef6a1359ff747e4023afcb08d69d86aff4c19465d29dda088","tests/regression.rs":"f962ebf24007f631f7e702e34e142d07581da7c9a36321ac142cafed1a0afc69","tests/regression/issue1108.rs":"adcc55a42239d344da74216ed85fc14153ddd6ca4dec4872d8339604ba78c185","tests/repo/mod.rs":"1ea18f9430e75cabc4b23b826544c2bf2f950b679a04b237a11e17aabc16e2e9","tests/repo/progress.rs":"c08d0314a7f3ecf760d471f27da3cd2a500aeb9f1c8331bffb2aa648f9fabf3f","tests/test_asyncness.rs":"cff01db49d28ab23b0b258bc6c0a5cc4071be4fe7248eef344a5d79d2fb649b7","tests/test_attribute.rs":"0ffd99384e1a52ae17d9fed5c4053e411e8f9018decef07ffa621d1faa7329d8","tests/test_derive_input.rs":"62bb86aaaaf730187a46ff700a8e3b2d1a163039b109b6a483aa44ed2b6806fe","tests/test_expr.rs":"a639728866a063b590430965a4840c01755e398b89be12d8d09b0aa97837ecac","tests/test_generics.rs":"54b7d2afc19aa6e9049585f4c8f7d3f0c29ac3bd11a2c769e9df76f18a4f5ecb","tests/test_grouping.rs":"6276c3c73bba649dec5c97904ad2492879f918bc887a2c425d095c654ca0d925","tests/test_ident.rs":"9eb53d1e21edf23e7c9e14dc74dcc2b2538e9221e19dbcc0a44e3acc2e90f3f6","tests/test_item.rs":"a3642c80066f1e7787becfd0278af90a6b7968d6c1249e25e81663aa454cfb2a","tests/test_iterators.rs":"53ed6078d37550bd6765d2411e3660be401aef8a31a407350cc064a7d08c7c33","tests/test_lit.rs":"19740ea9cd4a980bcab9b0dcaa4b032bb6ebb137fa5e4237140b97da1d9679fa","tests/test_meta.rs":"65d4586d131f6cac66694ca5e936748ec4e7f7423af6d8da509240e6be14800b","tests/test_parse_buffer.rs":"68d857f776396d064fcc0023c37093c2fbf75ee68e8241d4014d00d1423c18e9","tests/test_parse_stream.rs":"bf1db6fab7ac396fa61012faccbe6ffbc9c3d795ed2900be75e91c5b09b0c62f","tests/test_pat.rs":"d4465f4fc3fd5d6e534ba8efabe1e0ed6da89de4ac7c96effa6bfb880c4287cf","tests/test_path.rs":"71092a5ae2c9143b92a8fe15a92d39958b3c28bd4d4275cfb2d22cbdd53ada07","tests/test_precedence.rs":"736eee861c4c7a3d7d4387d2fb1b5eced1541790d34974f72b0a5532797e73c3","tests/test_receiver.rs":"084eca59984b9a18651da52f2c4407355da3de1335916a12477652999e2d01cc","tests/test_round_trip.rs":"b47662e35be2729f28bacdbbea20f1879c111889430e735a7bcb5f2a5c0b9e5c","tests/test_shebang.rs":"f5772cadad5b56e3112cb16308b779f92bce1c3a48091fc9933deb2276a69331","tests/test_should_parse.rs":"1d3535698a446e2755bfc360676bdb161841a1f454cdef6e7556c6d06a95c89d","tests/test_size.rs":"697906d892ab8186eebdf6bc7696fde7a42376d50bee846ba69f031bdb847e01","tests/test_stmt.rs":"0601fc32131b5501dfcdc4b4248d46bf21e0a98a49eb19439e1a46869dfb30b7","tests/test_token_trees.rs":"43e56a701817e3c3bfd0cae54a457dd7a38ccb3ca19da41e2b995fdf20e6ed18","tests/test_ty.rs":"f71d7f7f1c038aaabea8dd4c03c0d5752c76d570f8b4885a81659825bbb4d576","tests/test_visibility.rs":"7456fcb3a6634db509748aededff9c2d8b242d511a3e5ee3022e40b232892704","tests/zzz_stable.rs":"2a862e59cb446235ed99aec0e6ada8e16d3ecc30229b29d825b7c0bbc2602989"},"package":"3fcd952facd492f9be3ef0d0b7032a6e442ee9b361d4acc2b1d0c4aaa5f613a1"} \ No newline at end of file
diff --git a/vendor/syn/Cargo.toml b/vendor/syn/Cargo.toml
index acd07cc81..7b1412593 100644
--- a/vendor/syn/Cargo.toml
+++ b/vendor/syn/Cargo.toml
@@ -13,7 +13,7 @@
edition = "2018"
rust-version = "1.31"
name = "syn"
-version = "1.0.98"
+version = "1.0.102"
authors = ["David Tolnay <dtolnay@gmail.com>"]
include = [
"/benches/**",
@@ -28,7 +28,14 @@ include = [
description = "Parser for Rust source code"
documentation = "https://docs.rs/syn"
readme = "README.md"
-categories = ["development-tools::procedural-macro-helpers"]
+keywords = [
+ "macros",
+ "syn",
+]
+categories = [
+ "development-tools::procedural-macro-helpers",
+ "parser-implementations",
+]
license = "MIT OR Apache-2.0"
repository = "https://github.com/dtolnay/syn"
@@ -65,7 +72,7 @@ required-features = [
]
[dependencies.proc-macro2]
-version = "1.0.39"
+version = "1.0.46"
default-features = false
[dependencies.quote]
diff --git a/vendor/syn/benches/file.rs b/vendor/syn/benches/file.rs
index 86204df2d..bd4a247df 100644
--- a/vendor/syn/benches/file.rs
+++ b/vendor/syn/benches/file.rs
@@ -1,8 +1,12 @@
-// $ cargo bench --features full --bench file
+// $ cargo bench --features full,test --bench file
#![feature(rustc_private, test)]
#![recursion_limit = "1024"]
-#![allow(clippy::missing_panics_doc, clippy::must_use_candidate)]
+#![allow(
+ clippy::items_after_statements,
+ clippy::missing_panics_doc,
+ clippy::must_use_candidate
+)]
extern crate test;
@@ -15,17 +19,37 @@ mod common;
#[path = "../tests/repo/mod.rs"]
pub mod repo;
-use proc_macro2::TokenStream;
+use proc_macro2::{Span, TokenStream};
use std::fs;
use std::str::FromStr;
+use syn::parse::{ParseStream, Parser};
use test::Bencher;
const FILE: &str = "tests/rust/library/core/src/str/mod.rs";
-#[bench]
-fn parse_file(b: &mut Bencher) {
+fn get_tokens() -> TokenStream {
repo::clone_rust();
let content = fs::read_to_string(FILE).unwrap();
- let tokens = TokenStream::from_str(&content).unwrap();
+ TokenStream::from_str(&content).unwrap()
+}
+
+#[bench]
+fn baseline(b: &mut Bencher) {
+ let tokens = get_tokens();
+ b.iter(|| drop(tokens.clone()));
+}
+
+#[bench]
+fn create_token_buffer(b: &mut Bencher) {
+ let tokens = get_tokens();
+ fn immediate_fail(_input: ParseStream) -> syn::Result<()> {
+ Err(syn::Error::new(Span::call_site(), ""))
+ }
+ b.iter(|| immediate_fail.parse2(tokens.clone()));
+}
+
+#[bench]
+fn parse_file(b: &mut Bencher) {
+ let tokens = get_tokens();
b.iter(|| syn::parse2::<syn::File>(tokens.clone()));
}
diff --git a/vendor/syn/benches/rust.rs b/vendor/syn/benches/rust.rs
index 5e4fba182..e3f8f550a 100644
--- a/vendor/syn/benches/rust.rs
+++ b/vendor/syn/benches/rust.rs
@@ -1,7 +1,7 @@
-// $ cargo bench --features full --bench rust
+// $ cargo bench --features full,test --bench rust
//
// Syn only, useful for profiling:
-// $ RUSTFLAGS='--cfg syn_only' cargo build --release --features full --bench rust
+// $ RUSTFLAGS='--cfg syn_only' cargo build --release --features full,test --bench rust
#![cfg_attr(not(syn_only), feature(rustc_private))]
#![recursion_limit = "1024"]
@@ -46,7 +46,7 @@ mod librustc_parse {
use rustc_data_structures::sync::Lrc;
use rustc_error_messages::FluentBundle;
- use rustc_errors::{emitter::Emitter, Diagnostic, Handler};
+ use rustc_errors::{emitter::Emitter, translation::Translate, Diagnostic, Handler};
use rustc_session::parse::ParseSess;
use rustc_span::source_map::{FilePathMapping, SourceMap};
use rustc_span::{edition::Edition, FileName};
@@ -59,6 +59,9 @@ mod librustc_parse {
fn source_map(&self) -> Option<&Lrc<SourceMap>> {
None
}
+ }
+
+ impl Translate for SilentEmitter {
fn fluent_bundle(&self) -> Option<&Lrc<FluentBundle>> {
None
}
@@ -88,7 +91,7 @@ mod librustc_parse {
#[cfg(not(syn_only))]
mod read_from_disk {
pub fn bench(content: &str) -> Result<(), ()> {
- let _ = content;
+ _ = content;
Ok(())
}
}
diff --git a/vendor/syn/src/buffer.rs b/vendor/syn/src/buffer.rs
index 2cb6690f0..161b614c8 100644
--- a/vendor/syn/src/buffer.rs
+++ b/vendor/syn/src/buffer.rs
@@ -15,20 +15,17 @@ use crate::proc_macro as pm;
use crate::Lifetime;
use proc_macro2::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
use std::marker::PhantomData;
-use std::ptr;
-use std::slice;
/// Internal type which is used instead of `TokenTree` to represent a token tree
/// within a `TokenBuffer`.
enum Entry {
// Mimicking types from proc-macro.
- Group(Group, TokenBuffer),
+ // Group entries contain the offset to the matching End entry.
+ Group(Group, usize),
Ident(Ident),
Punct(Punct),
Literal(Literal),
- // End entries contain a raw pointer to the entry from the containing
- // token tree, or null if this is the outermost level.
- End(*const Entry),
+ End,
}
/// A buffer that can be efficiently traversed multiple times, unlike
@@ -37,76 +34,29 @@ enum Entry {
///
/// *This type is available only if Syn is built with the `"parsing"` feature.*
pub struct TokenBuffer {
- // NOTE: Do not implement clone on this - there are raw pointers inside
- // these entries which will be messed up. Moving the `TokenBuffer` itself is
- // safe as the data pointed to won't be moved.
- ptr: *const Entry,
- len: usize,
-}
-
-impl Drop for TokenBuffer {
- fn drop(&mut self) {
- unsafe {
- let slice = slice::from_raw_parts_mut(self.ptr as *mut Entry, self.len);
- let _ = Box::from_raw(slice);
- }
- }
+ // NOTE: Do not implement clone on this - while the current design could be
+ // cloned, other designs which could be desirable may not be cloneable.
+ entries: Box<[Entry]>,
}
impl TokenBuffer {
- // NOTE: Do not mutate the Vec returned from this function once it returns;
- // the address of its backing memory must remain stable.
- fn inner_new(stream: TokenStream, up: *const Entry) -> TokenBuffer {
- // Build up the entries list, recording the locations of any Groups
- // in the list to be processed later.
- let mut entries = Vec::new();
- let mut groups = Vec::new();
+ fn recursive_new(entries: &mut Vec<Entry>, stream: TokenStream) {
for tt in stream {
match tt {
- TokenTree::Ident(sym) => {
- entries.push(Entry::Ident(sym));
- }
- TokenTree::Punct(op) => {
- entries.push(Entry::Punct(op));
- }
- TokenTree::Literal(l) => {
- entries.push(Entry::Literal(l));
- }
- TokenTree::Group(g) => {
- // Record the index of the interesting entry, and store an
- // `End(null)` there temporarily.
- groups.push((entries.len(), g));
- entries.push(Entry::End(ptr::null()));
+ TokenTree::Ident(ident) => entries.push(Entry::Ident(ident)),
+ TokenTree::Punct(punct) => entries.push(Entry::Punct(punct)),
+ TokenTree::Literal(literal) => entries.push(Entry::Literal(literal)),
+ TokenTree::Group(group) => {
+ let group_start_index = entries.len();
+ entries.push(Entry::End); // we replace this below
+ Self::recursive_new(entries, group.stream());
+ let group_end_index = entries.len();
+ entries.push(Entry::End);
+ let group_end_offset = group_end_index - group_start_index;
+ entries[group_start_index] = Entry::Group(group, group_end_offset);
}
}
}
- // Add an `End` entry to the end with a reference to the enclosing token
- // stream which was passed in.
- entries.push(Entry::End(up));
-
- // NOTE: This is done to ensure that we don't accidentally modify the
- // length of the backing buffer. The backing buffer must remain at a
- // constant address after this point, as we are going to store a raw
- // pointer into it.
- let entries = entries.into_boxed_slice();
- let len = entries.len();
- // Convert boxed slice into a pointer to the first element early, to
- // avoid invalidating pointers into this slice when we move the Box.
- // See https://github.com/rust-lang/unsafe-code-guidelines/issues/326
- let entries = Box::into_raw(entries) as *mut Entry;
- for (idx, group) in groups {
- // We know that this index refers to one of the temporary
- // `End(null)` entries, and we know that the last entry is
- // `End(up)`, so the next index is also valid.
- let group_up = unsafe { entries.add(idx + 1) };
-
- // The end entry stored at the end of this Entry::Group should
- // point to the Entry which follows the Group in the list.
- let inner = Self::inner_new(group.stream(), group_up);
- unsafe { *entries.add(idx) = Entry::Group(group, inner) };
- }
-
- TokenBuffer { ptr: entries, len }
}
/// Creates a `TokenBuffer` containing all the tokens from the input
@@ -125,13 +75,19 @@ impl TokenBuffer {
/// Creates a `TokenBuffer` containing all the tokens from the input
/// `proc_macro2::TokenStream`.
pub fn new2(stream: TokenStream) -> Self {
- Self::inner_new(stream, ptr::null())
+ let mut entries = Vec::new();
+ Self::recursive_new(&mut entries, stream);
+ entries.push(Entry::End);
+ Self {
+ entries: entries.into_boxed_slice(),
+ }
}
/// Creates a cursor referencing the first token in the buffer and able to
/// traverse until the end of the buffer.
pub fn begin(&self) -> Cursor {
- unsafe { Cursor::create(self.ptr, self.ptr.add(self.len - 1)) }
+ let ptr = self.entries.as_ptr();
+ unsafe { Cursor::create(ptr, ptr.add(self.entries.len() - 1)) }
}
}
@@ -151,7 +107,7 @@ impl TokenBuffer {
pub struct Cursor<'a> {
// The current entry which the `Cursor` is pointing at.
ptr: *const Entry,
- // This is the only `Entry::End(..)` object which this cursor is allowed to
+ // This is the only `Entry::End` object which this cursor is allowed to
// point at. All other `End` objects are skipped over in `Cursor::create`.
scope: *const Entry,
// Cursor is covariant in 'a. This field ensures that our pointers are still
@@ -171,7 +127,7 @@ impl<'a> Cursor<'a> {
// object in global storage.
struct UnsafeSyncEntry(Entry);
unsafe impl Sync for UnsafeSyncEntry {}
- static EMPTY_ENTRY: UnsafeSyncEntry = UnsafeSyncEntry(Entry::End(0 as *const Entry));
+ static EMPTY_ENTRY: UnsafeSyncEntry = UnsafeSyncEntry(Entry::End);
Cursor {
ptr: &EMPTY_ENTRY.0,
@@ -184,15 +140,15 @@ impl<'a> Cursor<'a> {
/// `None`-delimited scopes when the cursor reaches the end of them,
/// allowing for them to be treated transparently.
unsafe fn create(mut ptr: *const Entry, scope: *const Entry) -> Self {
- // NOTE: If we're looking at a `End(..)`, we want to advance the cursor
+ // NOTE: If we're looking at a `End`, we want to advance the cursor
// past it, unless `ptr == scope`, which means that we're at the edge of
// our cursor's scope. We should only have `ptr != scope` at the exit
// from None-delimited groups entered with `ignore_none`.
- while let Entry::End(exit) = *ptr {
+ while let Entry::End = *ptr {
if ptr == scope {
break;
}
- ptr = exit;
+ ptr = ptr.add(1);
}
Cursor {
@@ -210,7 +166,10 @@ impl<'a> Cursor<'a> {
/// Bump the cursor to point at the next token after the current one. This
/// is undefined behavior if the cursor is currently looking at an
/// `Entry::End`.
- unsafe fn bump(self) -> Cursor<'a> {
+ ///
+ /// If the cursor is looking at an `Entry::Group`, the bumped cursor will
+ /// point at the first token in the group (with the same scope end).
+ unsafe fn bump_ignore_group(self) -> Cursor<'a> {
Cursor::create(self.ptr.offset(1), self.scope)
}
@@ -220,14 +179,9 @@ impl<'a> Cursor<'a> {
///
/// WARNING: This mutates its argument.
fn ignore_none(&mut self) {
- while let Entry::Group(group, buf) = self.entry() {
+ while let Entry::Group(group, _) = self.entry() {
if group.delimiter() == Delimiter::None {
- // NOTE: We call `Cursor::create` here to make sure that
- // situations where we should immediately exit the span after
- // entering it are handled correctly.
- unsafe {
- *self = Cursor::create(buf.ptr, self.scope);
- }
+ unsafe { *self = self.bump_ignore_group() };
} else {
break;
}
@@ -251,9 +205,12 @@ impl<'a> Cursor<'a> {
self.ignore_none();
}
- if let Entry::Group(group, buf) = self.entry() {
+ if let Entry::Group(group, end_offset) = self.entry() {
if group.delimiter() == delim {
- return Some((buf.begin(), group.span(), unsafe { self.bump() }));
+ let end_of_group = unsafe { self.ptr.add(*end_offset) };
+ let inside_of_group = unsafe { Cursor::create(self.ptr.add(1), end_of_group) };
+ let after_group = unsafe { Cursor::create(end_of_group, self.scope) };
+ return Some((inside_of_group, group.span(), after_group));
}
}
@@ -265,7 +222,7 @@ impl<'a> Cursor<'a> {
pub fn ident(mut self) -> Option<(Ident, Cursor<'a>)> {
self.ignore_none();
match self.entry() {
- Entry::Ident(ident) => Some((ident.clone(), unsafe { self.bump() })),
+ Entry::Ident(ident) => Some((ident.clone(), unsafe { self.bump_ignore_group() })),
_ => None,
}
}
@@ -275,7 +232,9 @@ impl<'a> Cursor<'a> {
pub fn punct(mut self) -> Option<(Punct, Cursor<'a>)> {
self.ignore_none();
match self.entry() {
- Entry::Punct(op) if op.as_char() != '\'' => Some((op.clone(), unsafe { self.bump() })),
+ Entry::Punct(punct) if punct.as_char() != '\'' => {
+ Some((punct.clone(), unsafe { self.bump_ignore_group() }))
+ }
_ => None,
}
}
@@ -285,7 +244,7 @@ impl<'a> Cursor<'a> {
pub fn literal(mut self) -> Option<(Literal, Cursor<'a>)> {
self.ignore_none();
match self.entry() {
- Entry::Literal(lit) => Some((lit.clone(), unsafe { self.bump() })),
+ Entry::Literal(literal) => Some((literal.clone(), unsafe { self.bump_ignore_group() })),
_ => None,
}
}
@@ -295,18 +254,14 @@ impl<'a> Cursor<'a> {
pub fn lifetime(mut self) -> Option<(Lifetime, Cursor<'a>)> {
self.ignore_none();
match self.entry() {
- Entry::Punct(op) if op.as_char() == '\'' && op.spacing() == Spacing::Joint => {
- let next = unsafe { self.bump() };
- match next.ident() {
- Some((ident, rest)) => {
- let lifetime = Lifetime {
- apostrophe: op.span(),
- ident,
- };
- Some((lifetime, rest))
- }
- None => None,
- }
+ Entry::Punct(punct) if punct.as_char() == '\'' && punct.spacing() == Spacing::Joint => {
+ let next = unsafe { self.bump_ignore_group() };
+ let (ident, rest) = next.ident()?;
+ let lifetime = Lifetime {
+ apostrophe: punct.span(),
+ ident,
+ };
+ Some((lifetime, rest))
}
_ => None,
}
@@ -332,15 +287,16 @@ impl<'a> Cursor<'a> {
/// This method does not treat `None`-delimited groups as transparent, and
/// will return a `Group(None, ..)` if the cursor is looking at one.
pub fn token_tree(self) -> Option<(TokenTree, Cursor<'a>)> {
- let tree = match self.entry() {
- Entry::Group(group, _) => group.clone().into(),
- Entry::Literal(lit) => lit.clone().into(),
- Entry::Ident(ident) => ident.clone().into(),
- Entry::Punct(op) => op.clone().into(),
- Entry::End(..) => return None,
+ let (tree, len) = match self.entry() {
+ Entry::Group(group, end_offset) => (group.clone().into(), *end_offset),
+ Entry::Literal(literal) => (literal.clone().into(), 1),
+ Entry::Ident(ident) => (ident.clone().into(), 1),
+ Entry::Punct(punct) => (punct.clone().into(), 1),
+ Entry::End => return None,
};
- Some((tree, unsafe { self.bump() }))
+ let rest = unsafe { Cursor::create(self.ptr.add(len), self.scope) };
+ Some((tree, rest))
}
/// Returns the `Span` of the current token, or `Span::call_site()` if this
@@ -348,10 +304,10 @@ impl<'a> Cursor<'a> {
pub fn span(self) -> Span {
match self.entry() {
Entry::Group(group, _) => group.span(),
- Entry::Literal(l) => l.span(),
- Entry::Ident(t) => t.span(),
- Entry::Punct(o) => o.span(),
- Entry::End(..) => Span::call_site(),
+ Entry::Literal(literal) => literal.span(),
+ Entry::Ident(ident) => ident.span(),
+ Entry::Punct(punct) => punct.span(),
+ Entry::End => Span::call_site(),
}
}
@@ -360,19 +316,22 @@ impl<'a> Cursor<'a> {
///
/// This method treats `'lifetimes` as a single token.
pub(crate) fn skip(self) -> Option<Cursor<'a>> {
- match self.entry() {
- Entry::End(..) => None,
+ let len = match self.entry() {
+ Entry::End => return None,
// Treat lifetimes as a single tt for the purposes of 'skip'.
- Entry::Punct(op) if op.as_char() == '\'' && op.spacing() == Spacing::Joint => {
- let next = unsafe { self.bump() };
- match next.entry() {
- Entry::Ident(_) => Some(unsafe { next.bump() }),
- _ => Some(next),
+ Entry::Punct(punct) if punct.as_char() == '\'' && punct.spacing() == Spacing::Joint => {
+ match unsafe { &*self.ptr.add(1) } {
+ Entry::Ident(_) => 2,
+ _ => 1,
}
}
- _ => Some(unsafe { self.bump() }),
- }
+
+ Entry::Group(_, end_offset) => *end_offset,
+ _ => 1,
+ };
+
+ Some(unsafe { Cursor::create(self.ptr.add(len), self.scope) })
}
}
diff --git a/vendor/syn/src/expr.rs b/vendor/syn/src/expr.rs
index cf0fa0af9..93a59b0e2 100644
--- a/vendor/syn/src/expr.rs
+++ b/vendor/syn/src/expr.rs
@@ -1371,7 +1371,9 @@ pub(crate) mod parsing {
});
} else if Precedence::Cast >= base && input.peek(Token![as]) {
let as_token: Token![as] = input.parse()?;
- let ty = input.call(Type::without_plus)?;
+ let allow_plus = false;
+ let allow_group_generic = false;
+ let ty = ty::parsing::ambig_ty(input, allow_plus, allow_group_generic)?;
check_cast(input)?;
lhs = Expr::Cast(ExprCast {
attrs: Vec::new(),
@@ -1381,7 +1383,9 @@ pub(crate) mod parsing {
});
} else if Precedence::Cast >= base && input.peek(Token![:]) && !input.peek(Token![::]) {
let colon_token: Token![:] = input.parse()?;
- let ty = input.call(Type::without_plus)?;
+ let allow_plus = false;
+ let allow_group_generic = false;
+ let ty = ty::parsing::ambig_ty(input, allow_plus, allow_group_generic)?;
check_cast(input)?;
lhs = Expr::Type(ExprType {
attrs: Vec::new(),
@@ -1429,7 +1433,9 @@ pub(crate) mod parsing {
});
} else if Precedence::Cast >= base && input.peek(Token![as]) {
let as_token: Token![as] = input.parse()?;
- let ty = input.call(Type::without_plus)?;
+ let allow_plus = false;
+ let allow_group_generic = false;
+ let ty = ty::parsing::ambig_ty(input, allow_plus, allow_group_generic)?;
check_cast(input)?;
lhs = Expr::Cast(ExprCast {
attrs: Vec::new(),
@@ -1727,7 +1733,10 @@ pub(crate) mod parsing {
|| input.peek(Token![move])
{
expr_closure(input, allow_struct).map(Expr::Closure)
- } else if input.peek(Token![for]) && input.peek2(Token![<]) && input.peek3(Lifetime) {
+ } else if input.peek(Token![for])
+ && input.peek2(Token![<])
+ && (input.peek3(Lifetime) || input.peek3(Token![>]))
+ {
let begin = input.fork();
input.parse::<BoundLifetimes>()?;
expr_closure(input, allow_struct)?;
@@ -2010,7 +2019,9 @@ pub(crate) mod parsing {
Expr::If(input.parse()?)
} else if input.peek(Token![while]) {
Expr::While(input.parse()?)
- } else if input.peek(Token![for]) {
+ } else if input.peek(Token![for])
+ && !(input.peek2(Token![<]) && (input.peek3(Lifetime) || input.peek3(Token![>])))
+ {
Expr::ForLoop(input.parse()?)
} else if input.peek(Token![loop]) {
Expr::Loop(input.parse()?)
diff --git a/vendor/syn/src/gen/clone.rs b/vendor/syn/src/gen/clone.rs
index 8de1cd8c9..a413e3ec7 100644
--- a/vendor/syn/src/gen/clone.rs
+++ b/vendor/syn/src/gen/clone.rs
@@ -910,9 +910,9 @@ impl Clone for GenericArgument {
match self {
GenericArgument::Lifetime(v0) => GenericArgument::Lifetime(v0.clone()),
GenericArgument::Type(v0) => GenericArgument::Type(v0.clone()),
+ GenericArgument::Const(v0) => GenericArgument::Const(v0.clone()),
GenericArgument::Binding(v0) => GenericArgument::Binding(v0.clone()),
GenericArgument::Constraint(v0) => GenericArgument::Constraint(v0.clone()),
- GenericArgument::Const(v0) => GenericArgument::Const(v0.clone()),
}
}
}
diff --git a/vendor/syn/src/gen/debug.rs b/vendor/syn/src/gen/debug.rs
index 4adf8c593..a1f0afa79 100644
--- a/vendor/syn/src/gen/debug.rs
+++ b/vendor/syn/src/gen/debug.rs
@@ -1268,6 +1268,11 @@ impl Debug for GenericArgument {
formatter.field(v0);
formatter.finish()
}
+ GenericArgument::Const(v0) => {
+ let mut formatter = formatter.debug_tuple("Const");
+ formatter.field(v0);
+ formatter.finish()
+ }
GenericArgument::Binding(v0) => {
let mut formatter = formatter.debug_tuple("Binding");
formatter.field(v0);
@@ -1278,11 +1283,6 @@ impl Debug for GenericArgument {
formatter.field(v0);
formatter.finish()
}
- GenericArgument::Const(v0) => {
- let mut formatter = formatter.debug_tuple("Const");
- formatter.field(v0);
- formatter.finish()
- }
}
}
}
diff --git a/vendor/syn/src/gen/eq.rs b/vendor/syn/src/gen/eq.rs
index 40fed0b89..20acb809d 100644
--- a/vendor/syn/src/gen/eq.rs
+++ b/vendor/syn/src/gen/eq.rs
@@ -878,13 +878,13 @@ impl PartialEq for GenericArgument {
(GenericArgument::Type(self0), GenericArgument::Type(other0)) => {
self0 == other0
}
- (GenericArgument::Binding(self0), GenericArgument::Binding(other0)) => {
+ (GenericArgument::Const(self0), GenericArgument::Const(other0)) => {
self0 == other0
}
- (GenericArgument::Constraint(self0), GenericArgument::Constraint(other0)) => {
+ (GenericArgument::Binding(self0), GenericArgument::Binding(other0)) => {
self0 == other0
}
- (GenericArgument::Const(self0), GenericArgument::Const(other0)) => {
+ (GenericArgument::Constraint(self0), GenericArgument::Constraint(other0)) => {
self0 == other0
}
_ => false,
diff --git a/vendor/syn/src/gen/fold.rs b/vendor/syn/src/gen/fold.rs
index 6e19e6f3a..98bb5794a 100644
--- a/vendor/syn/src/gen/fold.rs
+++ b/vendor/syn/src/gen/fold.rs
@@ -1787,15 +1787,15 @@ where
GenericArgument::Type(_binding_0) => {
GenericArgument::Type(f.fold_type(_binding_0))
}
+ GenericArgument::Const(_binding_0) => {
+ GenericArgument::Const(f.fold_expr(_binding_0))
+ }
GenericArgument::Binding(_binding_0) => {
GenericArgument::Binding(f.fold_binding(_binding_0))
}
GenericArgument::Constraint(_binding_0) => {
GenericArgument::Constraint(f.fold_constraint(_binding_0))
}
- GenericArgument::Const(_binding_0) => {
- GenericArgument::Const(f.fold_expr(_binding_0))
- }
}
}
#[cfg(feature = "full")]
diff --git a/vendor/syn/src/gen/hash.rs b/vendor/syn/src/gen/hash.rs
index f68a7630e..d0400e19d 100644
--- a/vendor/syn/src/gen/hash.rs
+++ b/vendor/syn/src/gen/hash.rs
@@ -1184,15 +1184,15 @@ impl Hash for GenericArgument {
state.write_u8(1u8);
v0.hash(state);
}
- GenericArgument::Binding(v0) => {
+ GenericArgument::Const(v0) => {
state.write_u8(2u8);
v0.hash(state);
}
- GenericArgument::Constraint(v0) => {
+ GenericArgument::Binding(v0) => {
state.write_u8(3u8);
v0.hash(state);
}
- GenericArgument::Const(v0) => {
+ GenericArgument::Constraint(v0) => {
state.write_u8(4u8);
v0.hash(state);
}
diff --git a/vendor/syn/src/gen/visit.rs b/vendor/syn/src/gen/visit.rs
index 051b65936..19ddd2e72 100644
--- a/vendor/syn/src/gen/visit.rs
+++ b/vendor/syn/src/gen/visit.rs
@@ -1974,15 +1974,15 @@ where
GenericArgument::Type(_binding_0) => {
v.visit_type(_binding_0);
}
+ GenericArgument::Const(_binding_0) => {
+ v.visit_expr(_binding_0);
+ }
GenericArgument::Binding(_binding_0) => {
v.visit_binding(_binding_0);
}
GenericArgument::Constraint(_binding_0) => {
v.visit_constraint(_binding_0);
}
- GenericArgument::Const(_binding_0) => {
- v.visit_expr(_binding_0);
- }
}
}
#[cfg(feature = "full")]
diff --git a/vendor/syn/src/gen/visit_mut.rs b/vendor/syn/src/gen/visit_mut.rs
index 3ddbe9c06..239709d19 100644
--- a/vendor/syn/src/gen/visit_mut.rs
+++ b/vendor/syn/src/gen/visit_mut.rs
@@ -1975,15 +1975,15 @@ where
GenericArgument::Type(_binding_0) => {
v.visit_type_mut(_binding_0);
}
+ GenericArgument::Const(_binding_0) => {
+ v.visit_expr_mut(_binding_0);
+ }
GenericArgument::Binding(_binding_0) => {
v.visit_binding_mut(_binding_0);
}
GenericArgument::Constraint(_binding_0) => {
v.visit_constraint_mut(_binding_0);
}
- GenericArgument::Const(_binding_0) => {
- v.visit_expr_mut(_binding_0);
- }
}
}
#[cfg(feature = "full")]
diff --git a/vendor/syn/src/item.rs b/vendor/syn/src/item.rs
index 1ce970ee2..a1ef7ab43 100644
--- a/vendor/syn/src/item.rs
+++ b/vendor/syn/src/item.rs
@@ -2765,7 +2765,6 @@ mod printing {
use super::*;
use crate::attr::FilterAttrs;
use crate::print::TokensOrDefault;
- use crate::punctuated::Pair;
use proc_macro2::TokenStream;
use quote::{ToTokens, TokenStreamExt};
@@ -3283,16 +3282,9 @@ mod printing {
self.generics.to_tokens(tokens);
self.paren_token.surround(tokens, |tokens| {
let mut last_is_variadic = false;
- for input in self.inputs.pairs() {
- match input {
- Pair::Punctuated(input, comma) => {
- maybe_variadic_to_tokens(input, tokens);
- comma.to_tokens(tokens);
- }
- Pair::End(input) => {
- last_is_variadic = maybe_variadic_to_tokens(input, tokens);
- }
- }
+ for pair in self.inputs.pairs() {
+ last_is_variadic = maybe_variadic_to_tokens(pair.value(), tokens);
+ pair.punct().to_tokens(tokens);
}
if self.variadic.is_some() && !last_is_variadic {
if !self.inputs.empty_or_trailing() {
diff --git a/vendor/syn/src/lib.rs b/vendor/syn/src/lib.rs
index 608c39876..81f03e1b5 100644
--- a/vendor/syn/src/lib.rs
+++ b/vendor/syn/src/lib.rs
@@ -250,15 +250,18 @@
//! dynamic library libproc_macro from rustc toolchain.
// Syn types in rustdoc of other crates get linked to here.
-#![doc(html_root_url = "https://docs.rs/syn/1.0.98")]
+#![doc(html_root_url = "https://docs.rs/syn/1.0.102")]
#![cfg_attr(doc_cfg, feature(doc_cfg))]
#![allow(non_camel_case_types)]
#![allow(
+ clippy::bool_to_int_with_if,
clippy::cast_lossless,
clippy::cast_possible_truncation,
+ clippy::cast_ptr_alignment,
clippy::default_trait_access,
clippy::doc_markdown,
clippy::expl_impl_clone_on_copy,
+ clippy::explicit_auto_deref,
clippy::if_not_else,
clippy::inherent_to_string,
clippy::large_enum_variant,
@@ -810,14 +813,6 @@ mod print;
////////////////////////////////////////////////////////////////////////////////
-// https://github.com/rust-lang/rust/issues/62830
-#[cfg(feature = "parsing")]
-mod rustdoc_workaround {
- pub use crate::parse::{self as parse_module};
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
mod error;
pub use crate::error::{Error, Result};
diff --git a/vendor/syn/src/lit.rs b/vendor/syn/src/lit.rs
index 2600dc801..130b40ed1 100644
--- a/vendor/syn/src/lit.rs
+++ b/vendor/syn/src/lit.rs
@@ -224,7 +224,7 @@ impl LitStr {
// Parse string literal into a token stream with every span equal to the
// original literal's span.
- let mut tokens = crate::parse_str(&self.value())?;
+ let mut tokens = TokenStream::from_str(&self.value())?;
tokens = respan_token_stream(tokens, self.span());
parser.parse2(tokens)
diff --git a/vendor/syn/src/parse_macro_input.rs b/vendor/syn/src/parse_macro_input.rs
index 8e1a5ec6b..6163cd70a 100644
--- a/vendor/syn/src/parse_macro_input.rs
+++ b/vendor/syn/src/parse_macro_input.rs
@@ -4,7 +4,7 @@
/// Refer to the [`parse` module] documentation for more details about parsing
/// in Syn.
///
-/// [`parse` module]: crate::rustdoc_workaround::parse_module
+/// [`parse` module]: mod@crate::parse
///
/// <br>
///
@@ -51,7 +51,7 @@
/// This macro can also be used with the [`Parser` trait] for types that have
/// multiple ways that they can be parsed.
///
-/// [`Parser` trait]: crate::rustdoc_workaround::parse_module::Parser
+/// [`Parser` trait]: crate::parse::Parser
///
/// ```
/// # extern crate proc_macro;
diff --git a/vendor/syn/src/pat.rs b/vendor/syn/src/pat.rs
index fa0818c16..b279186aa 100644
--- a/vendor/syn/src/pat.rs
+++ b/vendor/syn/src/pat.rs
@@ -400,11 +400,11 @@ pub mod parsing {
}
if input.peek(token::Brace) {
- let pat = pat_struct(input, path)?;
+ let pat = pat_struct(begin.fork(), input, path)?;
if qself.is_some() {
Ok(Pat::Verbatim(verbatim::between(begin, input)))
} else {
- Ok(Pat::Struct(pat))
+ Ok(pat)
}
} else if input.peek(token::Paren) {
let pat = pat_tuple_struct(input, path)?;
@@ -465,13 +465,23 @@ pub mod parsing {
})
}
- fn pat_struct(input: ParseStream, path: Path) -> Result<PatStruct> {
+ fn pat_struct(begin: ParseBuffer, input: ParseStream, path: Path) -> Result<Pat> {
let content;
let brace_token = braced!(content in input);
let mut fields = Punctuated::new();
- while !content.is_empty() && !content.peek(Token![..]) {
- let value = content.call(field_pat)?;
+ let mut dot2_token = None;
+ while !content.is_empty() {
+ let attrs = content.call(Attribute::parse_outer)?;
+ if content.peek(Token![..]) {
+ dot2_token = Some(content.parse()?);
+ if !attrs.is_empty() {
+ return Ok(Pat::Verbatim(verbatim::between(begin, input)));
+ }
+ break;
+ }
+ let mut value = content.call(field_pat)?;
+ value.attrs = attrs;
fields.push_value(value);
if content.is_empty() {
break;
@@ -480,19 +490,13 @@ pub mod parsing {
fields.push_punct(punct);
}
- let dot2_token = if fields.empty_or_trailing() && content.peek(Token![..]) {
- Some(content.parse()?)
- } else {
- None
- };
-
- Ok(PatStruct {
+ Ok(Pat::Struct(PatStruct {
attrs: Vec::new(),
path,
brace_token,
fields,
dot2_token,
- })
+ }))
}
impl Member {
@@ -505,7 +509,6 @@ pub mod parsing {
}
fn field_pat(input: ParseStream) -> Result<FieldPat> {
- let attrs = input.call(Attribute::parse_outer)?;
let boxed: Option<Token![box]> = input.parse()?;
let by_ref: Option<Token![ref]> = input.parse()?;
let mutability: Option<Token![mut]> = input.parse()?;
@@ -515,7 +518,7 @@ pub mod parsing {
|| member.is_unnamed()
{
return Ok(FieldPat {
- attrs,
+ attrs: Vec::new(),
member,
colon_token: input.parse()?,
pat: Box::new(multi_pat_with_leading_vert(input)?),
@@ -544,7 +547,7 @@ pub mod parsing {
}
Ok(FieldPat {
- attrs,
+ attrs: Vec::new(),
member: Member::Named(ident),
colon_token: None,
pat: Box::new(pat),
diff --git a/vendor/syn/src/path.rs b/vendor/syn/src/path.rs
index 00be352b1..742273afd 100644
--- a/vendor/syn/src/path.rs
+++ b/vendor/syn/src/path.rs
@@ -109,16 +109,16 @@ ast_enum! {
Lifetime(Lifetime),
/// A type argument.
Type(Type),
- /// A binding (equality constraint) on an associated type: the `Item =
- /// u8` in `Iterator<Item = u8>`.
- Binding(Binding),
- /// An associated type bound: `Iterator<Item: Display>`.
- Constraint(Constraint),
/// A const expression. Must be inside of a block.
///
/// NOTE: Identity expressions are represented as Type arguments, as
/// they are indistinguishable syntactically.
Const(Expr),
+ /// A binding (equality constraint) on an associated type: the `Item =
+ /// u8` in `Iterator<Item = u8>`.
+ Binding(Binding),
+ /// An associated type bound: `Iterator<Item: Display>`.
+ Constraint(Constraint),
}
}
@@ -729,8 +729,6 @@ pub(crate) mod printing {
match self {
GenericArgument::Lifetime(lt) => lt.to_tokens(tokens),
GenericArgument::Type(ty) => ty.to_tokens(tokens),
- GenericArgument::Binding(tb) => tb.to_tokens(tokens),
- GenericArgument::Constraint(tc) => tc.to_tokens(tokens),
GenericArgument::Const(e) => match *e {
Expr::Lit(_) => e.to_tokens(tokens),
@@ -746,6 +744,8 @@ pub(crate) mod printing {
e.to_tokens(tokens);
}),
},
+ GenericArgument::Binding(tb) => tb.to_tokens(tokens),
+ GenericArgument::Constraint(tc) => tc.to_tokens(tokens),
}
}
}
@@ -756,11 +756,8 @@ pub(crate) mod printing {
self.colon2_token.to_tokens(tokens);
self.lt_token.to_tokens(tokens);
- // Print lifetimes before types and consts, all before bindings,
- // regardless of their order in self.args.
- //
- // TODO: ordering rules for const arguments vs type arguments have
- // not been settled yet. https://github.com/rust-lang/rust/issues/44580
+ // Print lifetimes before types/consts/bindings, regardless of their
+ // order in self.args.
let mut trailing_or_empty = true;
for param in self.args.pairs() {
match **param.value() {
@@ -769,37 +766,24 @@ pub(crate) mod printing {
trailing_or_empty = param.punct().is_some();
}
GenericArgument::Type(_)
- | GenericArgument::Binding(_)
- | GenericArgument::Constraint(_)
- | GenericArgument::Const(_) => {}
- }
- }
- for param in self.args.pairs() {
- match **param.value() {
- GenericArgument::Type(_) | GenericArgument::Const(_) => {
- if !trailing_or_empty {
- <Token![,]>::default().to_tokens(tokens);
- }
- param.to_tokens(tokens);
- trailing_or_empty = param.punct().is_some();
- }
- GenericArgument::Lifetime(_)
+ | GenericArgument::Const(_)
| GenericArgument::Binding(_)
| GenericArgument::Constraint(_) => {}
}
}
for param in self.args.pairs() {
match **param.value() {
- GenericArgument::Binding(_) | GenericArgument::Constraint(_) => {
+ GenericArgument::Type(_)
+ | GenericArgument::Const(_)
+ | GenericArgument::Binding(_)
+ | GenericArgument::Constraint(_) => {
if !trailing_or_empty {
<Token![,]>::default().to_tokens(tokens);
}
param.to_tokens(tokens);
trailing_or_empty = param.punct().is_some();
}
- GenericArgument::Lifetime(_)
- | GenericArgument::Type(_)
- | GenericArgument::Const(_) => {}
+ GenericArgument::Lifetime(_) => {}
}
}
diff --git a/vendor/syn/src/stmt.rs b/vendor/syn/src/stmt.rs
index 3e2c71bdd..58bd013ec 100644
--- a/vendor/syn/src/stmt.rs
+++ b/vendor/syn/src/stmt.rs
@@ -175,7 +175,11 @@ pub mod parsing {
|| input.peek(Token![crate]) && !input.peek2(Token![::])
|| input.peek(Token![extern])
|| input.peek(Token![use])
- || input.peek(Token![static]) && (input.peek2(Token![mut]) || input.peek2(Ident))
+ || input.peek(Token![static])
+ && (input.peek2(Token![mut])
+ || input.peek2(Ident)
+ && !(input.peek2(Token![async])
+ && (input.peek3(Token![move]) || input.peek3(Token![|]))))
|| input.peek(Token![const]) && !input.peek2(token::Brace)
|| input.peek(Token![unsafe]) && !input.peek2(token::Brace)
|| input.peek(Token![async])
diff --git a/vendor/syn/src/ty.rs b/vendor/syn/src/ty.rs
index 0f1341fdd..4068be3c7 100644
--- a/vendor/syn/src/ty.rs
+++ b/vendor/syn/src/ty.rs
@@ -343,7 +343,8 @@ pub mod parsing {
impl Parse for Type {
fn parse(input: ParseStream) -> Result<Self> {
let allow_plus = true;
- ambig_ty(input, allow_plus)
+ let allow_group_generic = true;
+ ambig_ty(input, allow_plus, allow_group_generic)
}
}
@@ -356,11 +357,16 @@ pub mod parsing {
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub fn without_plus(input: ParseStream) -> Result<Self> {
let allow_plus = false;
- ambig_ty(input, allow_plus)
+ let allow_group_generic = true;
+ ambig_ty(input, allow_plus, allow_group_generic)
}
}
- fn ambig_ty(input: ParseStream, allow_plus: bool) -> Result<Type> {
+ pub(crate) fn ambig_ty(
+ input: ParseStream,
+ allow_plus: bool,
+ allow_group_generic: bool,
+ ) -> Result<Type> {
let begin = input.fork();
if input.peek(token::Group) {
@@ -381,7 +387,9 @@ pub mod parsing {
path: Path::parse_helper(input, false)?,
}));
}
- } else if input.peek(Token![<]) || input.peek(Token![::]) && input.peek3(Token![<]) {
+ } else if input.peek(Token![<]) && allow_group_generic
+ || input.peek(Token![::]) && input.peek3(Token![<])
+ {
if let Type::Path(mut ty) = *group.elem {
let arguments = &mut ty.path.segments.last_mut().unwrap().arguments;
if let PathArguments::None = arguments {
@@ -537,9 +545,15 @@ pub mod parsing {
|| lookahead.peek(Token![::])
|| lookahead.peek(Token![<])
{
- if input.peek(Token![dyn]) {
- let trait_object = TypeTraitObject::parse(input, allow_plus)?;
- return Ok(Type::TraitObject(trait_object));
+ let dyn_token: Option<Token![dyn]> = input.parse()?;
+ if dyn_token.is_some() {
+ let star_token: Option<Token![*]> = input.parse()?;
+ let bounds = TypeTraitObject::parse_bounds(input, allow_plus)?;
+ return Ok(if star_token.is_some() {
+ Type::Verbatim(verbatim::between(begin, input))
+ } else {
+ Type::TraitObject(TypeTraitObject { dyn_token, bounds })
+ });
}
let ty: TypePath = input.parse()?;
@@ -819,15 +833,28 @@ pub mod parsing {
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for TypePath {
fn parse(input: ParseStream) -> Result<Self> {
- let (qself, mut path) = path::parsing::qpath(input, false)?;
+ let expr_style = false;
+ let (qself, mut path) = path::parsing::qpath(input, expr_style)?;
- if path.segments.last().unwrap().arguments.is_empty()
+ while path.segments.last().unwrap().arguments.is_empty()
&& (input.peek(token::Paren) || input.peek(Token![::]) && input.peek3(token::Paren))
{
input.parse::<Option<Token![::]>>()?;
let args: ParenthesizedGenericArguments = input.parse()?;
+ let allow_associated_type = cfg!(feature = "full")
+ && match &args.output {
+ ReturnType::Default => true,
+ ReturnType::Type(_, ty) => match **ty {
+ // TODO: probably some of the other kinds allow this too.
+ Type::Paren(_) => true,
+ _ => false,
+ },
+ };
let parenthesized = PathArguments::Parenthesized(args);
path.segments.last_mut().unwrap().arguments = parenthesized;
+ if allow_associated_type {
+ Path::parse_rest(input, &mut path, expr_style)?;
+ }
}
Ok(TypePath { qself, path })
@@ -844,7 +871,8 @@ pub mod parsing {
pub(crate) fn parse(input: ParseStream, allow_plus: bool) -> Result<Self> {
if input.peek(Token![->]) {
let arrow = input.parse()?;
- let ty = ambig_ty(input, allow_plus)?;
+ let allow_group_generic = true;
+ let ty = ambig_ty(input, allow_plus, allow_group_generic)?;
Ok(ReturnType::Type(arrow, Box::new(ty)))
} else {
Ok(ReturnType::Default)
@@ -967,7 +995,10 @@ pub mod parsing {
let content;
Ok(TypeParen {
paren_token: parenthesized!(content in input),
- elem: Box::new(ambig_ty(&content, allow_plus)?),
+ elem: Box::new({
+ let allow_group_generic = true;
+ ambig_ty(&content, allow_plus, allow_group_generic)?
+ }),
})
}
}
diff --git a/vendor/syn/tests/.gitignore b/vendor/syn/tests/.gitignore
deleted file mode 100644
index 291ed43a2..000000000
--- a/vendor/syn/tests/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-/*.pending-snap
diff --git a/vendor/syn/tests/common/eq.rs b/vendor/syn/tests/common/eq.rs
index a6c379165..a53146241 100644
--- a/vendor/syn/tests/common/eq.rs
+++ b/vendor/syn/tests/common/eq.rs
@@ -3,6 +3,7 @@
extern crate rustc_ast;
extern crate rustc_data_structures;
extern crate rustc_span;
+extern crate thin_vec;
use rustc_ast::ast::AngleBracketedArg;
use rustc_ast::ast::AngleBracketedArgs;
@@ -19,11 +20,13 @@ use rustc_ast::ast::AttrStyle;
use rustc_ast::ast::Attribute;
use rustc_ast::ast::BareFnTy;
use rustc_ast::ast::BinOpKind;
-use rustc_ast::ast::BindingMode;
+use rustc_ast::ast::BindingAnnotation;
use rustc_ast::ast::Block;
use rustc_ast::ast::BlockCheckMode;
use rustc_ast::ast::BorrowKind;
+use rustc_ast::ast::ByRef;
use rustc_ast::ast::CaptureBy;
+use rustc_ast::ast::ClosureBinder;
use rustc_ast::ast::Const;
use rustc_ast::ast::Crate;
use rustc_ast::ast::Defaultness;
@@ -81,6 +84,7 @@ use rustc_ast::ast::Movability;
use rustc_ast::ast::MutTy;
use rustc_ast::ast::Mutability;
use rustc_ast::ast::NodeId;
+use rustc_ast::ast::NormalAttr;
use rustc_ast::ast::Param;
use rustc_ast::ast::ParenthesizedArgs;
use rustc_ast::ast::Pat;
@@ -126,14 +130,14 @@ use rustc_ast::ast::WhereRegionPredicate;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, CommentKind, Delimiter, Nonterminal, Token, TokenKind};
use rustc_ast::tokenstream::{
- AttrAnnotatedTokenStream, AttrAnnotatedTokenTree, AttributesData, DelimSpan, LazyTokenStream,
- Spacing, TokenStream, TokenTree,
+ AttrTokenStream, AttrTokenTree, AttributesData, DelimSpan, LazyAttrTokenStream, Spacing,
+ TokenStream, TokenTree,
};
use rustc_data_structures::sync::Lrc;
-use rustc_data_structures::thin_vec::ThinVec;
use rustc_span::source_map::Spanned;
use rustc_span::symbol::{sym, Ident};
use rustc_span::{Span, Symbol, SyntaxContext, DUMMY_SP};
+use thin_vec::ThinVec;
pub trait SpanlessEq {
fn eq(&self, other: &Self) -> bool;
@@ -145,7 +149,7 @@ impl<T: ?Sized + SpanlessEq> SpanlessEq for Box<T> {
}
}
-impl<T: SpanlessEq> SpanlessEq for P<T> {
+impl<T: ?Sized + SpanlessEq> SpanlessEq for P<T> {
fn eq(&self, other: &Self) -> bool {
SpanlessEq::eq(&**self, &**other)
}
@@ -400,11 +404,12 @@ spanless_eq_struct!(AngleBracketedArgs; span args);
spanless_eq_struct!(AnonConst; id value);
spanless_eq_struct!(Arm; attrs pat guard body span id is_placeholder);
spanless_eq_struct!(AssocConstraint; id ident gen_args kind span);
-spanless_eq_struct!(AttrAnnotatedTokenStream; 0);
spanless_eq_struct!(AttrItem; path args tokens);
+spanless_eq_struct!(AttrTokenStream; 0);
spanless_eq_struct!(Attribute; kind id style span);
spanless_eq_struct!(AttributesData; attrs tokens);
spanless_eq_struct!(BareFnTy; unsafety ext generic_params decl decl_span);
+spanless_eq_struct!(BindingAnnotation; 0 1);
spanless_eq_struct!(Block; stmts id rules span tokens could_be_bare_literal);
spanless_eq_struct!(Crate; attrs items spans id is_placeholder);
spanless_eq_struct!(EnumDef; variants);
@@ -424,13 +429,14 @@ spanless_eq_struct!(InlineAsmSym; id qself path);
spanless_eq_struct!(Item<K>; attrs id span vis ident kind !tokens);
spanless_eq_struct!(Label; ident);
spanless_eq_struct!(Lifetime; id ident);
-spanless_eq_struct!(Lit; token kind span);
+spanless_eq_struct!(Lit; token_lit kind span);
spanless_eq_struct!(Local; pat ty kind id span attrs !tokens);
spanless_eq_struct!(MacCall; path args prior_type_ascription);
spanless_eq_struct!(MacCallStmt; mac style attrs tokens);
spanless_eq_struct!(MacroDef; body macro_rules);
spanless_eq_struct!(ModSpans; !inner_span !inject_use_span);
spanless_eq_struct!(MutTy; ty mutbl);
+spanless_eq_struct!(NormalAttr; item tokens);
spanless_eq_struct!(ParenthesizedArgs; span inputs inputs_span output);
spanless_eq_struct!(Pat; id kind span tokens);
spanless_eq_struct!(PatField; ident pat is_shorthand attrs id span is_placeholder);
@@ -452,23 +458,24 @@ spanless_eq_struct!(Variant; attrs id span !vis ident data disr_expr is_placehol
spanless_eq_struct!(Visibility; kind span tokens);
spanless_eq_struct!(WhereBoundPredicate; span bound_generic_params bounded_ty bounds);
spanless_eq_struct!(WhereClause; has_where_token predicates span);
-spanless_eq_struct!(WhereEqPredicate; id span lhs_ty rhs_ty);
+spanless_eq_struct!(WhereEqPredicate; span lhs_ty rhs_ty);
spanless_eq_struct!(WhereRegionPredicate; span lifetime bounds);
spanless_eq_struct!(token::Lit; kind symbol suffix);
spanless_eq_enum!(AngleBracketedArg; Arg(0) Constraint(0));
spanless_eq_enum!(AssocItemKind; Const(0 1 2) Fn(0) TyAlias(0) MacCall(0));
spanless_eq_enum!(AssocConstraintKind; Equality(term) Bound(bounds));
spanless_eq_enum!(Async; Yes(span closure_id return_impl_trait_id) No);
-spanless_eq_enum!(AttrAnnotatedTokenTree; Token(0) Delimited(0 1 2) Attributes(0));
spanless_eq_enum!(AttrStyle; Outer Inner);
+spanless_eq_enum!(AttrTokenTree; Token(0 1) Delimited(0 1 2) Attributes(0));
spanless_eq_enum!(BinOpKind; Add Sub Mul Div Rem And Or BitXor BitAnd BitOr Shl Shr Eq Lt Le Ne Ge Gt);
-spanless_eq_enum!(BindingMode; ByRef(0) ByValue(0));
spanless_eq_enum!(BlockCheckMode; Default Unsafe(0));
spanless_eq_enum!(BorrowKind; Ref Raw);
+spanless_eq_enum!(ByRef; Yes No);
spanless_eq_enum!(CaptureBy; Value Ref);
+spanless_eq_enum!(ClosureBinder; NotPresent For(span generic_params));
spanless_eq_enum!(Const; Yes(0) No);
spanless_eq_enum!(Defaultness; Default(0) Final);
-spanless_eq_enum!(Extern; None Implicit Explicit(0));
+spanless_eq_enum!(Extern; None Implicit(0) Explicit(0 1));
spanless_eq_enum!(FloatTy; F32 F64);
spanless_eq_enum!(FnRetTy; Default(0) Ty(0));
spanless_eq_enum!(ForeignItemKind; Static(0 1 2) Fn(0) TyAlias(0) MacCall(0));
@@ -498,24 +505,25 @@ spanless_eq_enum!(StmtKind; Local(0) Item(0) Expr(0) Semi(0) Empty MacCall(0));
spanless_eq_enum!(StrStyle; Cooked Raw(0));
spanless_eq_enum!(StructRest; Base(0) Rest(0) None);
spanless_eq_enum!(Term; Ty(0) Const(0));
-spanless_eq_enum!(TokenTree; Token(0) Delimited(0 1 2));
+spanless_eq_enum!(TokenTree; Token(0 1) Delimited(0 1 2));
spanless_eq_enum!(TraitBoundModifier; None Maybe MaybeConst MaybeConstMaybe);
-spanless_eq_enum!(TraitObjectSyntax; Dyn None);
+spanless_eq_enum!(TraitObjectSyntax; Dyn DynStar None);
spanless_eq_enum!(UintTy; Usize U8 U16 U32 U64 U128);
spanless_eq_enum!(UnOp; Deref Not Neg);
spanless_eq_enum!(Unsafe; Yes(0) No);
spanless_eq_enum!(UnsafeSource; CompilerGenerated UserProvided);
spanless_eq_enum!(UseTreeKind; Simple(0 1 2) Nested(0) Glob);
spanless_eq_enum!(VariantData; Struct(0 1) Tuple(0 1) Unit(0));
-spanless_eq_enum!(VisibilityKind; Public Restricted(path id) Inherited);
+spanless_eq_enum!(VisibilityKind; Public Restricted(path id shorthand) Inherited);
spanless_eq_enum!(WherePredicate; BoundPredicate(0) RegionPredicate(0) EqPredicate(0));
spanless_eq_enum!(ExprKind; Box(0) Array(0) ConstBlock(0) Call(0 1)
- MethodCall(0 1 2) Tup(0) Binary(0 1 2) Unary(0 1) Lit(0) Cast(0 1) Type(0 1)
- Let(0 1 2) If(0 1 2) While(0 1 2) ForLoop(0 1 2 3) Loop(0 1) Match(0 1)
- Closure(0 1 2 3 4 5) Block(0 1) Async(0 1 2) Await(0) TryBlock(0)
- Assign(0 1 2) AssignOp(0 1 2) Field(0 1) Index(0 1) Underscore Range(0 1 2)
- Path(0 1) AddrOf(0 1 2) Break(0 1) Continue(0) Ret(0) InlineAsm(0)
- MacCall(0) Struct(0) Repeat(0 1) Paren(0) Try(0) Yield(0) Yeet(0) Err);
+ MethodCall(0 1 2 3) Tup(0) Binary(0 1 2) Unary(0 1) Lit(0) Cast(0 1)
+ Type(0 1) Let(0 1 2) If(0 1 2) While(0 1 2) ForLoop(0 1 2 3) Loop(0 1)
+ Match(0 1) Closure(0 1 2 3 4 5 6) Block(0 1) Async(0 1 2) Await(0)
+ TryBlock(0) Assign(0 1 2) AssignOp(0 1 2) Field(0 1) Index(0 1) Underscore
+ Range(0 1 2) Path(0 1) AddrOf(0 1 2) Break(0 1) Continue(0) Ret(0)
+ InlineAsm(0) MacCall(0) Struct(0) Repeat(0 1) Paren(0) Try(0) Yield(0)
+ Yeet(0) Err);
spanless_eq_enum!(InlineAsmOperand; In(reg expr) Out(reg late expr)
InOut(reg late expr) SplitInOut(reg late in_expr out_expr) Const(anon_const)
Sym(sym));
@@ -523,7 +531,7 @@ spanless_eq_enum!(ItemKind; ExternCrate(0) Use(0) Static(0 1 2) Const(0 1 2)
Fn(0) Mod(0 1) ForeignMod(0) GlobalAsm(0) TyAlias(0) Enum(0 1) Struct(0 1)
Union(0 1) Trait(0) TraitAlias(0 1) Impl(0) MacCall(0) MacroDef(0));
spanless_eq_enum!(LitKind; Str(0 1) ByteStr(0) Byte(0) Char(0) Int(0 1)
- Float(0 1) Bool(0) Err(0));
+ Float(0 1) Bool(0) Err);
spanless_eq_enum!(PatKind; Wild Ident(0 1 2) Struct(0 1 2 3) TupleStruct(0 1 2)
Or(0) Path(0 1) Tuple(0) Box(0) Ref(0 1) Lit(0) Range(0 1 2) Slice(0) Rest
Paren(0) MacCall(0));
@@ -610,7 +618,7 @@ impl SpanlessEq for TokenStream {
if SpanlessEq::eq(this, other) {
continue;
}
- if let (TokenTree::Token(this), TokenTree::Token(other)) = (this, other) {
+ if let (TokenTree::Token(this, _), TokenTree::Token(other, _)) = (this, other) {
if match (&this.kind, &other.kind) {
(TokenKind::Literal(this), TokenKind::Literal(other)) => {
SpanlessEq::eq(this, other)
@@ -641,10 +649,13 @@ fn doc_comment<'a>(
AttrStyle::Inner => true,
} {
match trees.next() {
- Some(TokenTree::Token(Token {
- kind: TokenKind::Not,
- span: _,
- })) => {}
+ Some(TokenTree::Token(
+ Token {
+ kind: TokenKind::Not,
+ span: _,
+ },
+ _spacing,
+ )) => {}
_ => return false,
}
}
@@ -654,21 +665,27 @@ fn doc_comment<'a>(
};
let mut trees = stream.trees();
match trees.next() {
- Some(TokenTree::Token(Token {
- kind: TokenKind::Ident(symbol, false),
- span: _,
- })) if *symbol == sym::doc => {}
+ Some(TokenTree::Token(
+ Token {
+ kind: TokenKind::Ident(symbol, false),
+ span: _,
+ },
+ _spacing,
+ )) if *symbol == sym::doc => {}
_ => return false,
}
match trees.next() {
- Some(TokenTree::Token(Token {
- kind: TokenKind::Eq,
- span: _,
- })) => {}
+ Some(TokenTree::Token(
+ Token {
+ kind: TokenKind::Eq,
+ span: _,
+ },
+ _spacing,
+ )) => {}
_ => return false,
}
match trees.next() {
- Some(TokenTree::Token(token)) => {
+ Some(TokenTree::Token(token, _spacing)) => {
is_escaped_literal_token(token, unescaped) && trees.next().is_none()
}
_ => false,
@@ -680,7 +697,7 @@ fn is_escaped_literal_token(token: &Token, unescaped: Symbol) -> bool {
Token {
kind: TokenKind::Literal(lit),
span: _,
- } => match Lit::from_lit_token(*lit, DUMMY_SP) {
+ } => match Lit::from_token_lit(*lit, DUMMY_SP) {
Ok(lit) => is_escaped_literal(&lit, unescaped),
Err(_) => false,
},
@@ -711,7 +728,7 @@ fn is_escaped_literal_macro_arg(arg: &MacArgsEq, unescaped: Symbol) -> bool {
fn is_escaped_literal(lit: &Lit, unescaped: Symbol) -> bool {
match lit {
Lit {
- token:
+ token_lit:
token::Lit {
kind: token::LitKind::Str,
symbol: _,
@@ -724,10 +741,10 @@ fn is_escaped_literal(lit: &Lit, unescaped: Symbol) -> bool {
}
}
-impl SpanlessEq for LazyTokenStream {
+impl SpanlessEq for LazyAttrTokenStream {
fn eq(&self, other: &Self) -> bool {
- let this = self.create_token_stream();
- let other = other.create_token_stream();
+ let this = self.to_attr_token_stream();
+ let other = other.to_attr_token_stream();
SpanlessEq::eq(&this, &other)
}
}
@@ -735,26 +752,26 @@ impl SpanlessEq for LazyTokenStream {
impl SpanlessEq for AttrKind {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
- (AttrKind::Normal(item, tokens), AttrKind::Normal(item2, tokens2)) => {
- SpanlessEq::eq(item, item2) && SpanlessEq::eq(tokens, tokens2)
+ (AttrKind::Normal(normal), AttrKind::Normal(normal2)) => {
+ SpanlessEq::eq(normal, normal2)
}
(AttrKind::DocComment(kind, symbol), AttrKind::DocComment(kind2, symbol2)) => {
SpanlessEq::eq(kind, kind2) && SpanlessEq::eq(symbol, symbol2)
}
- (AttrKind::DocComment(kind, unescaped), AttrKind::Normal(item2, _tokens)) => {
+ (AttrKind::DocComment(kind, unescaped), AttrKind::Normal(normal2)) => {
match kind {
CommentKind::Line | CommentKind::Block => {}
}
let path = Path::from_ident(Ident::with_dummy_span(sym::doc));
- SpanlessEq::eq(&path, &item2.path)
- && match &item2.args {
+ SpanlessEq::eq(&path, &normal2.item.path)
+ && match &normal2.item.args {
MacArgs::Empty | MacArgs::Delimited(..) => false,
MacArgs::Eq(_span, token) => {
is_escaped_literal_macro_arg(token, *unescaped)
}
}
}
- (AttrKind::Normal(..), AttrKind::DocComment(..)) => SpanlessEq::eq(other, self),
+ (AttrKind::Normal(_), AttrKind::DocComment(..)) => SpanlessEq::eq(other, self),
}
}
}
diff --git a/vendor/syn/tests/debug/gen.rs b/vendor/syn/tests/debug/gen.rs
index a49ee6c92..cfd63d117 100644
--- a/vendor/syn/tests/debug/gen.rs
+++ b/vendor/syn/tests/debug/gen.rs
@@ -2215,22 +2215,22 @@ impl Debug for Lite<syn::GenericArgument> {
formatter.write_str(")")?;
Ok(())
}
- syn::GenericArgument::Binding(_val) => {
- formatter.write_str("Binding")?;
+ syn::GenericArgument::Const(_val) => {
+ formatter.write_str("Const")?;
formatter.write_str("(")?;
Debug::fmt(Lite(_val), formatter)?;
formatter.write_str(")")?;
Ok(())
}
- syn::GenericArgument::Constraint(_val) => {
- formatter.write_str("Constraint")?;
+ syn::GenericArgument::Binding(_val) => {
+ formatter.write_str("Binding")?;
formatter.write_str("(")?;
Debug::fmt(Lite(_val), formatter)?;
formatter.write_str(")")?;
Ok(())
}
- syn::GenericArgument::Const(_val) => {
- formatter.write_str("Const")?;
+ syn::GenericArgument::Constraint(_val) => {
+ formatter.write_str("Constraint")?;
formatter.write_str("(")?;
Debug::fmt(Lite(_val), formatter)?;
formatter.write_str(")")?;
diff --git a/vendor/syn/tests/regression/issue1108.rs b/vendor/syn/tests/regression/issue1108.rs
index 11a82adaa..4fd30c0c7 100644
--- a/vendor/syn/tests/regression/issue1108.rs
+++ b/vendor/syn/tests/regression/issue1108.rs
@@ -1,5 +1,5 @@
#[test]
fn issue1108() {
let data = "impl<x<>>::x for";
- let _ = syn::parse_file(data);
+ _ = syn::parse_file(data);
}
diff --git a/vendor/syn/tests/repo/mod.rs b/vendor/syn/tests/repo/mod.rs
index 0bafe6714..4c7be853b 100644
--- a/vendor/syn/tests/repo/mod.rs
+++ b/vendor/syn/tests/repo/mod.rs
@@ -10,10 +10,10 @@ use std::path::Path;
use tar::Archive;
use walkdir::DirEntry;
-const REVISION: &str = "ee160f2f5e73b6f5954bc33f059c316d9e8582c4";
+const REVISION: &str = "98ad6a5519651af36e246c0335c964dd52c554ba";
#[rustfmt::skip]
-static EXCLUDE: &[&str] = &[
+static EXCLUDE_FILES: &[&str] = &[
// TODO: impl ~const T {}
// https://github.com/dtolnay/syn/issues/1051
"src/test/ui/rfc-2632-const-trait-impl/syntax.rs",
@@ -33,6 +33,21 @@ static EXCLUDE: &[&str] = &[
"src/tools/rustfmt/tests/source/trait.rs",
"src/tools/rustfmt/tests/target/trait.rs",
+ // Various extensions to Rust syntax made up by rust-analyzer
+ "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0012_type_item_where_clause.rs",
+ "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rs",
+ "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rs",
+ "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0179_use_tree_abs_star.rs",
+ "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0188_const_param_default_path.rs",
+ "src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0015_use_tree.rs",
+ "src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0029_range_forms.rs",
+ "src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0051_parameter_attrs.rs",
+ "src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0055_dot_dot_dot.rs",
+ "src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0068_item_modifiers.rs",
+ "src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0031_block_inner_attrs.rs",
+ "src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0045_ambiguous_trait_object.rs",
+ "src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0046_mutable_const_item.rs",
+
// Placeholder syntax for "throw expressions"
"src/test/pretty/yeet-expr.rs",
"src/test/ui/try-trait/yeet-for-option.rs",
@@ -41,7 +56,9 @@ static EXCLUDE: &[&str] = &[
// Excessive nesting
"src/test/ui/issues/issue-74564-if-expr-stack-overflow.rs",
- // Testing rustfmt on invalid syntax
+ // Testing tools on invalid syntax
+ "src/test/run-make/translation/test.rs",
+ "src/test/ui/generics/issue-94432-garbage-ice.rs",
"src/tools/rustfmt/tests/coverage/target/comments.rs",
"src/tools/rustfmt/tests/parser/issue-4126/invalid.rs",
"src/tools/rustfmt/tests/parser/issue_4418.rs",
@@ -54,8 +71,8 @@ static EXCLUDE: &[&str] = &[
"src/tools/rustfmt/tests/target/configs/spaces_around_ranges/true.rs",
"src/tools/rustfmt/tests/target/type.rs",
- // Testing compiler diagnostic localization on invalid syntax
- "src/test/run-make/translation/basic-translation.rs",
+ // Generated file containing a top-level expression, used with `include!`
+ "compiler/rustc_codegen_gcc/src/intrinsic/archs.rs",
// Clippy lint lists represented as expressions
"src/tools/clippy/clippy_lints/src/lib.deprecated.rs",
@@ -73,9 +90,6 @@ static EXCLUDE: &[&str] = &[
"src/tools/clippy/clippy_lints/src/lib.register_suspicious.rs",
// Not actually test cases
- "src/test/rustdoc-ui/test-compile-fail2.rs",
- "src/test/rustdoc-ui/test-compile-fail3.rs",
- "src/test/ui/json-bom-plus-crlf-multifile-aux.rs",
"src/test/ui/lint/expansion-time-include.rs",
"src/test/ui/macros/auxiliary/macro-comma-support.rs",
"src/test/ui/macros/auxiliary/macro-include-items-expr.rs",
@@ -84,38 +98,52 @@ static EXCLUDE: &[&str] = &[
"src/test/ui/parser/issues/auxiliary/issue-21146-inc.rs",
];
+#[rustfmt::skip]
+static EXCLUDE_DIRS: &[&str] = &[
+ // Inputs that intentionally do not parse
+ "src/tools/rust-analyzer/crates/parser/test_data/parser/err",
+ "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err",
+
+ // Inputs that lex but do not necessarily parse
+ "src/tools/rust-analyzer/crates/parser/test_data/lexer",
+
+ // Inputs that used to crash rust-analyzer, but aren't necessarily supposed to parse
+ "src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures",
+ "src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures",
+];
+
pub fn base_dir_filter(entry: &DirEntry) -> bool {
let path = entry.path();
- if path.is_dir() {
- return true; // otherwise walkdir does not visit the files
- }
- if path.extension().map_or(true, |e| e != "rs") {
- return false;
- }
let mut path_string = path.to_string_lossy();
if cfg!(windows) {
path_string = path_string.replace('\\', "/").into();
}
- let path = if let Some(path) = path_string.strip_prefix("tests/rust/") {
+ let path_string = if path_string == "tests/rust" {
+ return true;
+ } else if let Some(path) = path_string.strip_prefix("tests/rust/") {
path
} else {
panic!("unexpected path in Rust dist: {}", path_string);
};
- if path.starts_with("src/test/compile-fail") || path.starts_with("src/test/rustfix") {
+ if path.is_dir() {
+ return !EXCLUDE_DIRS.contains(&path_string);
+ }
+
+ if path.extension().map_or(true, |e| e != "rs") {
return false;
}
- if path.starts_with("src/test/ui") {
- let stderr_path = entry.path().with_extension("stderr");
+ if path_string.starts_with("src/test/ui") || path_string.starts_with("src/test/rustdoc-ui") {
+ let stderr_path = path.with_extension("stderr");
if stderr_path.exists() {
// Expected to fail in some way
return false;
}
}
- !EXCLUDE.contains(&path)
+ !EXCLUDE_FILES.contains(&path_string)
}
#[allow(dead_code)]
@@ -137,10 +165,17 @@ pub fn clone_rust() {
}
let mut missing = String::new();
let test_src = Path::new("tests/rust");
- for exclude in EXCLUDE {
- if !test_src.join(exclude).exists() {
+ for exclude in EXCLUDE_FILES {
+ if !test_src.join(exclude).is_file() {
+ missing += "\ntests/rust/";
+ missing += exclude;
+ }
+ }
+ for exclude in EXCLUDE_DIRS {
+ if !test_src.join(exclude).is_dir() {
missing += "\ntests/rust/";
missing += exclude;
+ missing += "/";
}
}
if !missing.is_empty() {
diff --git a/vendor/syn/tests/test_derive_input.rs b/vendor/syn/tests/test_derive_input.rs
index 93634e577..1eff01186 100644
--- a/vendor/syn/tests/test_derive_input.rs
+++ b/vendor/syn/tests/test_derive_input.rs
@@ -1,4 +1,4 @@
-#![allow(clippy::too_many_lines)]
+#![allow(clippy::assertions_on_result_states, clippy::too_many_lines)]
#[macro_use]
mod macros;
diff --git a/vendor/syn/tests/test_parse_stream.rs b/vendor/syn/tests/test_parse_stream.rs
index 76bd06577..cc14fa032 100644
--- a/vendor/syn/tests/test_parse_stream.rs
+++ b/vendor/syn/tests/test_parse_stream.rs
@@ -4,9 +4,9 @@ use syn::{Ident, Token};
#[test]
fn test_peek() {
- let _ = |input: ParseStream| {
- let _ = input.peek(Ident);
- let _ = input.peek(Ident::peek_any);
- let _ = input.peek(Token![::]);
+ _ = |input: ParseStream| {
+ _ = input.peek(Ident);
+ _ = input.peek(Ident::peek_any);
+ _ = input.peek(Token![::]);
};
}
diff --git a/vendor/syn/tests/test_precedence.rs b/vendor/syn/tests/test_precedence.rs
index bd273a565..dbcd74f16 100644
--- a/vendor/syn/tests/test_precedence.rs
+++ b/vendor/syn/tests/test_precedence.rs
@@ -24,6 +24,7 @@
extern crate rustc_ast;
extern crate rustc_data_structures;
extern crate rustc_span;
+extern crate thin_vec;
use crate::common::eq::SpanlessEq;
use crate::common::parse;
@@ -207,10 +208,10 @@ fn librustc_brackets(mut librustc_expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
};
use rustc_ast::mut_visit::{noop_visit_generic_arg, noop_visit_local, MutVisitor};
use rustc_data_structures::map_in_place::MapInPlace;
- use rustc_data_structures::thin_vec::ThinVec;
use rustc_span::DUMMY_SP;
use std::mem;
use std::ops::DerefMut;
+ use thin_vec::ThinVec;
struct BracketsVisitor {
failed: bool,
@@ -243,7 +244,7 @@ fn librustc_brackets(mut librustc_expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
}
fn noop_visit_expr<T: MutVisitor>(e: &mut Expr, vis: &mut T) {
- use rustc_ast::mut_visit::{noop_visit_expr, visit_thin_attrs};
+ use rustc_ast::mut_visit::{noop_visit_expr, visit_attrs};
match &mut e.kind {
ExprKind::AddrOf(BorrowKind::Raw, ..) => {}
ExprKind::Struct(expr) => {
@@ -261,7 +262,7 @@ fn librustc_brackets(mut librustc_expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
}
vis.visit_id(&mut e.id);
vis.visit_span(&mut e.span);
- visit_thin_attrs(&mut e.attrs, vis);
+ visit_attrs(&mut e.attrs, vis);
}
_ => noop_visit_expr(e, vis),
}
@@ -322,15 +323,15 @@ fn librustc_brackets(mut librustc_expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
// types yet. We'll look into comparing those in the future. For now
// focus on expressions appearing in other places.
fn visit_pat(&mut self, pat: &mut P<Pat>) {
- let _ = pat;
+ _ = pat;
}
fn visit_ty(&mut self, ty: &mut P<Ty>) {
- let _ = ty;
+ _ = ty;
}
fn visit_attribute(&mut self, attr: &mut Attribute) {
- let _ = attr;
+ _ = attr;
}
}
@@ -426,7 +427,7 @@ fn syn_brackets(syn_expr: syn::Expr) -> syn::Expr {
fn collect_exprs(file: syn::File) -> Vec<syn::Expr> {
use syn::fold::Fold;
use syn::punctuated::Punctuated;
- use syn::{token, Expr, ExprTuple, Path};
+ use syn::{token, ConstParam, Expr, ExprTuple, Path};
struct CollectExprs(Vec<Expr>);
impl Fold for CollectExprs {
@@ -447,6 +448,10 @@ fn collect_exprs(file: syn::File) -> Vec<syn::Expr> {
// Skip traversing into const generic path arguments
path
}
+
+ fn fold_const_param(&mut self, const_param: ConstParam) -> ConstParam {
+ const_param
+ }
}
let mut folder = CollectExprs(vec![]);
diff --git a/vendor/syn/tests/test_size.rs b/vendor/syn/tests/test_size.rs
index e172df2bc..02b0700f0 100644
--- a/vendor/syn/tests/test_size.rs
+++ b/vendor/syn/tests/test_size.rs
@@ -5,17 +5,17 @@ use syn::{Expr, Item, Lit, Pat, Type};
#[test]
fn test_expr_size() {
- assert_eq!(mem::size_of::<Expr>(), 280);
+ assert_eq!(mem::size_of::<Expr>(), 264);
}
#[test]
fn test_item_size() {
- assert_eq!(mem::size_of::<Item>(), 344);
+ assert_eq!(mem::size_of::<Item>(), 320);
}
#[test]
fn test_type_size() {
- assert_eq!(mem::size_of::<Type>(), 304);
+ assert_eq!(mem::size_of::<Type>(), 280);
}
#[test]
@@ -25,5 +25,5 @@ fn test_pat_size() {
#[test]
fn test_lit_size() {
- assert_eq!(mem::size_of::<Lit>(), 40);
+ assert_eq!(mem::size_of::<Lit>(), 32);
}
diff --git a/vendor/syn/tests/test_stmt.rs b/vendor/syn/tests/test_stmt.rs
index 0bca62b04..f444e5b49 100644
--- a/vendor/syn/tests/test_stmt.rs
+++ b/vendor/syn/tests/test_stmt.rs
@@ -1,4 +1,4 @@
-#![allow(clippy::non_ascii_literal)]
+#![allow(clippy::assertions_on_result_states, clippy::non_ascii_literal)]
#[macro_use]
mod macros;