diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-17 12:06:31 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-17 12:06:31 +0000 |
commit | 2ff14448863ac1a1dd9533461708e29aae170c2d (patch) | |
tree | 85b9fea2bbfe3f06473cfa381eed11f273b57c5c /vendor/proc-macro2 | |
parent | Adding debian version 1.64.0+dfsg1-1. (diff) | |
download | rustc-2ff14448863ac1a1dd9533461708e29aae170c2d.tar.xz rustc-2ff14448863ac1a1dd9533461708e29aae170c2d.zip |
Adding debian version 1.65.0+dfsg1-2.debian/1.65.0+dfsg1-2
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'vendor/proc-macro2')
-rw-r--r-- | vendor/proc-macro2/.cargo-checksum.json | 2 | ||||
-rw-r--r-- | vendor/proc-macro2/Cargo.toml | 12 | ||||
-rw-r--r-- | vendor/proc-macro2/src/detection.rs | 2 | ||||
-rw-r--r-- | vendor/proc-macro2/src/fallback.rs | 169 | ||||
-rw-r--r-- | vendor/proc-macro2/src/lib.rs | 40 | ||||
-rw-r--r-- | vendor/proc-macro2/src/marker.rs | 4 | ||||
-rw-r--r-- | vendor/proc-macro2/src/parse.rs | 64 | ||||
-rw-r--r-- | vendor/proc-macro2/src/rcvec.rs | 142 | ||||
-rw-r--r-- | vendor/proc-macro2/src/wrapper.rs | 14 | ||||
-rw-r--r-- | vendor/proc-macro2/tests/comments.rs | 2 | ||||
-rw-r--r-- | vendor/proc-macro2/tests/test.rs | 9 |
11 files changed, 324 insertions, 136 deletions
diff --git a/vendor/proc-macro2/.cargo-checksum.json b/vendor/proc-macro2/.cargo-checksum.json index 00ef04da4..59f2a8483 100644 --- a/vendor/proc-macro2/.cargo-checksum.json +++ b/vendor/proc-macro2/.cargo-checksum.json @@ -1 +1 @@ -{"files":{"Cargo.toml":"d6809d3069ce4c5e92828c6e49100a684c37f55e4d2d42bbe02362e0cd0297d7","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"0c17148c1957c3f721d99fc99aedaefee5f2f1ba7e2336a289b02f91609099fb","build.rs":"2c6ddbeeb1700b8dba3689185a535c672dc43e9d61b54f835cf4f3cc163b4afc","src/detection.rs":"12821bd715999c994fc1c8e1b68e52e1d64677290d803c53ad096ab17c9959fb","src/fallback.rs":"21f3f0f552db6fbec0cde8c056a2c722a3c88302b1519d08a3bfe341f2a62055","src/lib.rs":"ed7b36e5b3a35852936a4d1121704ee249011379b582cd0409e1ce08fae25c24","src/marker.rs":"87fce2d0357f5b7998b6d9dfb064f4a0cbc9dabb19e33d4b514a446243ebe2e8","src/parse.rs":"44a6fdec0332de7b8c083530294d3dc0aadb626a0739fd07a16a1ec48473a99d","src/wrapper.rs":"a0ba0eadd7f30842049bd9f4dca9182f2e4f7e2a09d394959bcd48c1d752a795","tests/comments.rs":"065132797580744767b7a854d5467757bd3433a990957f8dbccdfa779bfb275f","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"cb6d776eba6a238d726b0f531883adf41957e06f2717ee8a069821c81e7081d6","tests/test.rs":"15783299f85ecccb51acd6cc95087077eccfecddce1208e56ad5dabf74d3f50a","tests/test_fmt.rs":"9357769945784354909259084ec8b34d2aa52081dd3967cac6dae3a5e3df3bc0"},"package":"dd96a1e8ed2596c337f8eae5f24924ec83f5ad5ab21ea8e455d3566c69fbcaf7"}
\ No newline at end of file +{"files":{"Cargo.toml":"774906eb037620dba1ef4cf6adc14ec35e7f9f81d1e2d967dfedfab8e7b603d9","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"0c17148c1957c3f721d99fc99aedaefee5f2f1ba7e2336a289b02f91609099fb","build.rs":"2c6ddbeeb1700b8dba3689185a535c672dc43e9d61b54f835cf4f3cc163b4afc","src/detection.rs":"ed9a5f9a979ab01247d7a68eeb1afa3c13209334c5bfff0f9289cb07e5bb4e8b","src/fallback.rs":"5aab7b73e381dc192256a0c67c99706ae0850159fd9f805fb0c9418fc14b1e00","src/lib.rs":"8bd3da5c9f048acda3799d53eefb7fb2f8b4046612d48f34ec30102b10f4ab08","src/marker.rs":"344a8394f06a1d43355b514920e7e3c0c6dce507be767e3a590bbe3552edd110","src/parse.rs":"34fa3c2e03fa9ab3e63ea57595d1dea4edf0fcfa313a04057e4dd7dc7d7269e0","src/rcvec.rs":"49b6784c6ca5f32573cd8a83758b485d8acbfa126e5fb516ae439e429ef4c144","src/wrapper.rs":"a88d0dff9bb8317071ec205b0c2a79717cd8313041e03ef58e0645be65ab05e8","tests/comments.rs":"31115b3a56c83d93eef2fb4c9566bf4543e302560732986161b98aef504785ed","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"cb6d776eba6a238d726b0f531883adf41957e06f2717ee8a069821c81e7081d6","tests/test.rs":"2fe87d2eab135fb2417cac2876998e9314616fb68981918d7d76333c22b199a0","tests/test_fmt.rs":"9357769945784354909259084ec8b34d2aa52081dd3967cac6dae3a5e3df3bc0"},"package":"0a2ca2c61bc9f3d74d2886294ab7b9853abd9c1ad903a3ac7815c58989bb7bab"}
\ No newline at end of file diff --git a/vendor/proc-macro2/Cargo.toml b/vendor/proc-macro2/Cargo.toml index 2dabf8994..b8b46430d 100644 --- a/vendor/proc-macro2/Cargo.toml +++ b/vendor/proc-macro2/Cargo.toml @@ -13,19 +13,19 @@ edition = "2018" rust-version = "1.31" name = "proc-macro2" -version = "1.0.40" +version = "1.0.43" authors = [ "David Tolnay <dtolnay@gmail.com>", "Alex Crichton <alex@alexcrichton.com>", ] autobenches = false -description = """ -A substitute implementation of the compiler's `proc_macro` API to decouple -token-based libraries from the procedural macro use case. -""" +description = "A substitute implementation of the compiler's `proc_macro` API to decouple token-based libraries from the procedural macro use case." documentation = "https://docs.rs/proc-macro2" readme = "README.md" -keywords = ["macros"] +keywords = [ + "macros", + "syn", +] categories = ["development-tools::procedural-macro-helpers"] license = "MIT OR Apache-2.0" repository = "https://github.com/dtolnay/proc-macro2" diff --git a/vendor/proc-macro2/src/detection.rs b/vendor/proc-macro2/src/detection.rs index d139b7365..beba7b237 100644 --- a/vendor/proc-macro2/src/detection.rs +++ b/vendor/proc-macro2/src/detection.rs @@ -1,4 +1,4 @@ -use std::sync::atomic::{AtomicUsize, Ordering}; +use core::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Once; static WORKS: AtomicUsize = AtomicUsize::new(0); diff --git a/vendor/proc-macro2/src/fallback.rs b/vendor/proc-macro2/src/fallback.rs index 6afa790f8..378ef7e94 100644 --- a/vendor/proc-macro2/src/fallback.rs +++ b/vendor/proc-macro2/src/fallback.rs @@ -1,18 +1,19 @@ use crate::parse::{self, Cursor}; +use crate::rcvec::{RcVec, RcVecBuilder, RcVecIntoIter, RcVecMut}; use crate::{Delimiter, Spacing, TokenTree}; #[cfg(span_locations)] -use std::cell::RefCell; +use core::cell::RefCell; #[cfg(span_locations)] -use std::cmp; -use std::fmt::{self, Debug, Display, Write}; -use std::iter::FromIterator; -use std::mem; -use std::ops::RangeBounds; +use core::cmp; +use core::fmt::{self, Debug, Display, Write}; +use core::iter::FromIterator; +use core::mem::ManuallyDrop; +use core::ops::RangeBounds; +use core::ptr; +use core::str::FromStr; #[cfg(procmacro2_semver_exempt)] use std::path::Path; use std::path::PathBuf; -use std::str::FromStr; -use std::vec; /// Force use of proc-macro2's fallback implementation of the API for now, even /// if the compiler's implementation is available. @@ -30,7 +31,7 @@ pub fn unforce() { #[derive(Clone)] pub(crate) struct TokenStream { - inner: Vec<TokenTree>, + inner: RcVec<TokenTree>, } #[derive(Debug)] @@ -52,71 +53,69 @@ impl LexError { impl TokenStream { pub fn new() -> Self { - TokenStream { inner: Vec::new() } + TokenStream { + inner: RcVecBuilder::new().build(), + } } pub fn is_empty(&self) -> bool { self.inner.len() == 0 } - fn take_inner(&mut self) -> Vec<TokenTree> { - mem::replace(&mut self.inner, Vec::new()) + fn take_inner(self) -> RcVecBuilder<TokenTree> { + let nodrop = ManuallyDrop::new(self); + unsafe { ptr::read(&nodrop.inner) }.make_owned() } +} - fn push_token(&mut self, token: TokenTree) { - // https://github.com/dtolnay/proc-macro2/issues/235 - match token { - #[cfg(not(no_bind_by_move_pattern_guard))] - TokenTree::Literal(crate::Literal { - #[cfg(wrap_proc_macro)] - inner: crate::imp::Literal::Fallback(literal), - #[cfg(not(wrap_proc_macro))] - inner: literal, - .. - }) if literal.repr.starts_with('-') => { - push_negative_literal(self, literal); - } - #[cfg(no_bind_by_move_pattern_guard)] - TokenTree::Literal(crate::Literal { - #[cfg(wrap_proc_macro)] - inner: crate::imp::Literal::Fallback(literal), - #[cfg(not(wrap_proc_macro))] - inner: literal, - .. - }) => { - if literal.repr.starts_with('-') { - push_negative_literal(self, literal); - } else { - self.inner - .push(TokenTree::Literal(crate::Literal::_new_stable(literal))); - } - } - _ => self.inner.push(token), +fn push_token_from_proc_macro(mut vec: RcVecMut<TokenTree>, token: TokenTree) { + // https://github.com/dtolnay/proc-macro2/issues/235 + match token { + #[cfg(not(no_bind_by_move_pattern_guard))] + TokenTree::Literal(crate::Literal { + #[cfg(wrap_proc_macro)] + inner: crate::imp::Literal::Fallback(literal), + #[cfg(not(wrap_proc_macro))] + inner: literal, + .. + }) if literal.repr.starts_with('-') => { + push_negative_literal(vec, literal); } - - #[cold] - fn push_negative_literal(stream: &mut TokenStream, mut literal: Literal) { - literal.repr.remove(0); - let mut punct = crate::Punct::new('-', Spacing::Alone); - punct.set_span(crate::Span::_new_stable(literal.span)); - stream.inner.push(TokenTree::Punct(punct)); - stream - .inner - .push(TokenTree::Literal(crate::Literal::_new_stable(literal))); + #[cfg(no_bind_by_move_pattern_guard)] + TokenTree::Literal(crate::Literal { + #[cfg(wrap_proc_macro)] + inner: crate::imp::Literal::Fallback(literal), + #[cfg(not(wrap_proc_macro))] + inner: literal, + .. + }) => { + if literal.repr.starts_with('-') { + push_negative_literal(vec, literal); + } else { + vec.push(TokenTree::Literal(crate::Literal::_new_stable(literal))); + } } + _ => vec.push(token), } -} -impl From<Vec<TokenTree>> for TokenStream { - fn from(inner: Vec<TokenTree>) -> Self { - TokenStream { inner } + #[cold] + fn push_negative_literal(mut vec: RcVecMut<TokenTree>, mut literal: Literal) { + literal.repr.remove(0); + let mut punct = crate::Punct::new('-', Spacing::Alone); + punct.set_span(crate::Span::_new_stable(literal.span)); + vec.push(TokenTree::Punct(punct)); + vec.push(TokenTree::Literal(crate::Literal::_new_stable(literal))); } } // Nonrecursive to prevent stack overflow. impl Drop for TokenStream { fn drop(&mut self) { - while let Some(token) = self.inner.pop() { + let mut inner = match self.inner.get_mut() { + Some(inner) => inner, + None => return, + }; + while let Some(token) = inner.pop() { let group = match token { TokenTree::Group(group) => group.inner, _ => continue, @@ -126,8 +125,35 @@ impl Drop for TokenStream { crate::imp::Group::Fallback(group) => group, crate::imp::Group::Compiler(_) => continue, }; - let mut group = group; - self.inner.extend(group.stream.take_inner()); + inner.extend(group.stream.take_inner()); + } + } +} + +pub(crate) struct TokenStreamBuilder { + inner: RcVecBuilder<TokenTree>, +} + +impl TokenStreamBuilder { + pub fn new() -> Self { + TokenStreamBuilder { + inner: RcVecBuilder::new(), + } + } + + pub fn with_capacity(cap: usize) -> Self { + TokenStreamBuilder { + inner: RcVecBuilder::with_capacity(cap), + } + } + + pub fn push_token_from_parser(&mut self, tt: TokenTree) { + self.inner.push(tt); + } + + pub fn build(self) -> TokenStream { + TokenStream { + inner: self.inner.build(), } } } @@ -220,9 +246,11 @@ impl From<TokenStream> for proc_macro::TokenStream { impl From<TokenTree> for TokenStream { fn from(tree: TokenTree) -> TokenStream { - let mut stream = TokenStream::new(); - stream.push_token(tree); - stream + let mut stream = RcVecBuilder::new(); + push_token_from_proc_macro(stream.as_mut(), tree); + TokenStream { + inner: stream.build(), + } } } @@ -236,35 +264,38 @@ impl FromIterator<TokenTree> for TokenStream { impl FromIterator<TokenStream> for TokenStream { fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self { - let mut v = Vec::new(); + let mut v = RcVecBuilder::new(); - for mut stream in streams { + for stream in streams { v.extend(stream.take_inner()); } - TokenStream { inner: v } + TokenStream { inner: v.build() } } } impl Extend<TokenTree> for TokenStream { fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, tokens: I) { - tokens.into_iter().for_each(|token| self.push_token(token)); + let mut vec = self.inner.make_mut(); + tokens + .into_iter() + .for_each(|token| push_token_from_proc_macro(vec.as_mut(), token)); } } impl Extend<TokenStream> for TokenStream { fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) { - self.inner.extend(streams.into_iter().flatten()); + self.inner.make_mut().extend(streams.into_iter().flatten()); } } -pub(crate) type TokenTreeIter = vec::IntoIter<TokenTree>; +pub(crate) type TokenTreeIter = RcVecIntoIter<TokenTree>; impl IntoIterator for TokenStream { type Item = TokenTree; type IntoIter = TokenTreeIter; - fn into_iter(mut self) -> TokenTreeIter { + fn into_iter(self) -> TokenTreeIter { self.take_inner().into_iter() } } @@ -383,7 +414,7 @@ impl SourceMap { fn add_file(&mut self, name: &str, src: &str) -> Span { let (len, lines) = lines_offsets(src); let lo = self.next_start_pos(); - // XXX(nika): Shouild we bother doing a checked cast or checked add here? + // XXX(nika): Should we bother doing a checked cast or checked add here? let span = Span { lo, hi: lo + (len as u32), diff --git a/vendor/proc-macro2/src/lib.rs b/vendor/proc-macro2/src/lib.rs index 6adf71a19..be5aa5114 100644 --- a/vendor/proc-macro2/src/lib.rs +++ b/vendor/proc-macro2/src/lib.rs @@ -86,7 +86,7 @@ //! a different thread. // Proc-macro2 types in rustdoc of other crates get linked to here. -#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.40")] +#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.43")] #![cfg_attr(any(proc_macro_span, super_unstable), feature(proc_macro_span))] #![cfg_attr(super_unstable, feature(proc_macro_def_site))] #![cfg_attr(doc_cfg, feature(doc_cfg))] @@ -120,6 +120,7 @@ extern crate proc_macro; mod marker; mod parse; +mod rcvec; #[cfg(wrap_proc_macro)] mod detection; @@ -136,15 +137,15 @@ use crate::fallback as imp; mod imp; use crate::marker::Marker; -use std::cmp::Ordering; +use core::cmp::Ordering; +use core::fmt::{self, Debug, Display}; +use core::hash::{Hash, Hasher}; +use core::iter::FromIterator; +use core::ops::RangeBounds; +use core::str::FromStr; use std::error::Error; -use std::fmt::{self, Debug, Display}; -use std::hash::{Hash, Hasher}; -use std::iter::FromIterator; -use std::ops::RangeBounds; #[cfg(procmacro2_semver_exempt)] use std::path::PathBuf; -use std::str::FromStr; /// An abstract stream of tokens, or more concretely a sequence of token trees. /// @@ -1128,9 +1129,9 @@ impl Literal { /// This constructor is similar to those like `Literal::i8_unsuffixed` where /// the float's value is emitted directly into the token but no suffix is /// used, so it may be inferred to be a `f64` later in the compiler. - /// Literals created from negative numbers may not survive rountrips through - /// `TokenStream` or strings and may be broken into two tokens (`-` and - /// positive literal). + /// Literals created from negative numbers may not survive round-trips + /// through `TokenStream` or strings and may be broken into two tokens (`-` + /// and positive literal). /// /// # Panics /// @@ -1147,7 +1148,7 @@ impl Literal { /// specified is the preceding part of the token and `f64` is the suffix of /// the token. This token will always be inferred to be an `f64` in the /// compiler. Literals created from negative numbers may not survive - /// rountrips through `TokenStream` or strings and may be broken into two + /// round-trips through `TokenStream` or strings and may be broken into two /// tokens (`-` and positive literal). /// /// # Panics @@ -1164,9 +1165,9 @@ impl Literal { /// This constructor is similar to those like `Literal::i8_unsuffixed` where /// the float's value is emitted directly into the token but no suffix is /// used, so it may be inferred to be a `f64` later in the compiler. - /// Literals created from negative numbers may not survive rountrips through - /// `TokenStream` or strings and may be broken into two tokens (`-` and - /// positive literal). + /// Literals created from negative numbers may not survive round-trips + /// through `TokenStream` or strings and may be broken into two tokens (`-` + /// and positive literal). /// /// # Panics /// @@ -1183,7 +1184,7 @@ impl Literal { /// specified is the preceding part of the token and `f32` is the suffix of /// the token. This token will always be inferred to be an `f32` in the /// compiler. Literals created from negative numbers may not survive - /// rountrips through `TokenStream` or strings and may be broken into two + /// round-trips through `TokenStream` or strings and may be broken into two /// tokens (`-` and positive literal). /// /// # Panics @@ -1270,7 +1271,7 @@ impl Display for Literal { pub mod token_stream { use crate::marker::Marker; use crate::{imp, TokenTree}; - use std::fmt::{self, Debug}; + use core::fmt::{self, Debug}; pub use crate::TokenStream; @@ -1290,11 +1291,16 @@ pub mod token_stream { fn next(&mut self) -> Option<TokenTree> { self.inner.next() } + + fn size_hint(&self) -> (usize, Option<usize>) { + self.inner.size_hint() + } } impl Debug for IntoIter { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - Debug::fmt(&self.inner, f) + f.write_str("TokenStream ")?; + f.debug_list().entries(self.clone()).finish() } } diff --git a/vendor/proc-macro2/src/marker.rs b/vendor/proc-macro2/src/marker.rs index 58729baf4..59fd09630 100644 --- a/vendor/proc-macro2/src/marker.rs +++ b/vendor/proc-macro2/src/marker.rs @@ -1,4 +1,4 @@ -use std::marker::PhantomData; +use core::marker::PhantomData; use std::panic::{RefUnwindSafe, UnwindSafe}; use std::rc::Rc; @@ -9,7 +9,7 @@ pub(crate) type Marker = PhantomData<ProcMacroAutoTraits>; pub(crate) use self::value::*; mod value { - pub(crate) use std::marker::PhantomData as Marker; + pub(crate) use core::marker::PhantomData as Marker; } pub(crate) struct ProcMacroAutoTraits(Rc<()>); diff --git a/vendor/proc-macro2/src/parse.rs b/vendor/proc-macro2/src/parse.rs index f77213aa1..1c9974cfa 100644 --- a/vendor/proc-macro2/src/parse.rs +++ b/vendor/proc-macro2/src/parse.rs @@ -1,9 +1,10 @@ use crate::fallback::{ is_ident_continue, is_ident_start, Group, LexError, Literal, Span, TokenStream, + TokenStreamBuilder, }; use crate::{Delimiter, Punct, Spacing, TokenTree}; -use std::char; -use std::str::{Bytes, CharIndices, Chars}; +use core::char; +use core::str::{Bytes, CharIndices, Chars}; #[derive(Copy, Clone, Eq, PartialEq)] pub(crate) struct Cursor<'a> { @@ -150,14 +151,13 @@ fn word_break(input: Cursor) -> Result<Cursor, Reject> { } pub(crate) fn token_stream(mut input: Cursor) -> Result<TokenStream, LexError> { - let mut trees = Vec::new(); + let mut trees = TokenStreamBuilder::new(); let mut stack = Vec::new(); loop { input = skip_whitespace(input); - if let Ok((rest, tt)) = doc_comment(input) { - trees.extend(tt); + if let Ok((rest, ())) = doc_comment(input, &mut trees) { input = rest; continue; } @@ -168,7 +168,7 @@ pub(crate) fn token_stream(mut input: Cursor) -> Result<TokenStream, LexError> { let first = match input.bytes().next() { Some(first) => first, None => match stack.last() { - None => return Ok(TokenStream::from(trees)), + None => return Ok(trees.build()), #[cfg(span_locations)] Some((lo, _frame)) => { return Err(LexError { @@ -191,7 +191,7 @@ pub(crate) fn token_stream(mut input: Cursor) -> Result<TokenStream, LexError> { #[cfg(span_locations)] let frame = (lo, frame); stack.push(frame); - trees = Vec::new(); + trees = TokenStreamBuilder::new(); } else if let Some(close_delimiter) = match first { b')' => Some(Delimiter::Parenthesis), b']' => Some(Delimiter::Bracket), @@ -209,7 +209,7 @@ pub(crate) fn token_stream(mut input: Cursor) -> Result<TokenStream, LexError> { return Err(lex_error(input)); } input = input.advance(1); - let mut g = Group::new(open_delimiter, TokenStream::from(trees)); + let mut g = Group::new(open_delimiter, trees.build()); g.set_span(Span { #[cfg(span_locations)] lo, @@ -217,7 +217,7 @@ pub(crate) fn token_stream(mut input: Cursor) -> Result<TokenStream, LexError> { hi: input.off, }); trees = outer; - trees.push(TokenTree::Group(crate::Group::_new_stable(g))); + trees.push_token_from_parser(TokenTree::Group(crate::Group::_new_stable(g))); } else { let (rest, mut tt) = match leaf_token(input) { Ok((rest, tt)) => (rest, tt), @@ -229,7 +229,7 @@ pub(crate) fn token_stream(mut input: Cursor) -> Result<TokenStream, LexError> { #[cfg(span_locations)] hi: rest.off, })); - trees.push(tt); + trees.push_token_from_parser(tt); input = rest; } } @@ -786,7 +786,7 @@ fn punct_char(input: Cursor) -> PResult<char> { } } -fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> { +fn doc_comment<'a>(input: Cursor<'a>, trees: &mut TokenStreamBuilder) -> PResult<'a, ()> { #[cfg(span_locations)] let lo = input.off; let (rest, (comment, inner)) = doc_comment_contents(input)?; @@ -806,25 +806,31 @@ fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> { scan_for_bare_cr = rest; } - let mut trees = Vec::new(); - trees.push(TokenTree::Punct(Punct::new('#', Spacing::Alone))); + let mut pound = Punct::new('#', Spacing::Alone); + pound.set_span(span); + trees.push_token_from_parser(TokenTree::Punct(pound)); + if inner { - trees.push(Punct::new('!', Spacing::Alone).into()); - } - let mut stream = vec![ - TokenTree::Ident(crate::Ident::new("doc", span)), - TokenTree::Punct(Punct::new('=', Spacing::Alone)), - TokenTree::Literal(crate::Literal::string(comment)), - ]; - for tt in &mut stream { - tt.set_span(span); - } - let group = Group::new(Delimiter::Bracket, TokenStream::from(stream)); - trees.push(crate::Group::_new_stable(group).into()); - for tt in &mut trees { - tt.set_span(span); - } - Ok((rest, trees)) + let mut bang = Punct::new('!', Spacing::Alone); + bang.set_span(span); + trees.push_token_from_parser(TokenTree::Punct(bang)); + } + + let doc_ident = crate::Ident::new("doc", span); + let mut equal = Punct::new('=', Spacing::Alone); + equal.set_span(span); + let mut literal = crate::Literal::string(comment); + literal.set_span(span); + let mut bracketed = TokenStreamBuilder::with_capacity(3); + bracketed.push_token_from_parser(TokenTree::Ident(doc_ident)); + bracketed.push_token_from_parser(TokenTree::Punct(equal)); + bracketed.push_token_from_parser(TokenTree::Literal(literal)); + let group = Group::new(Delimiter::Bracket, bracketed.build()); + let mut group = crate::Group::_new_stable(group); + group.set_span(span); + trees.push_token_from_parser(TokenTree::Group(group)); + + Ok((rest, ())) } fn doc_comment_contents(input: Cursor) -> PResult<(&str, bool)> { diff --git a/vendor/proc-macro2/src/rcvec.rs b/vendor/proc-macro2/src/rcvec.rs new file mode 100644 index 000000000..86ca7d808 --- /dev/null +++ b/vendor/proc-macro2/src/rcvec.rs @@ -0,0 +1,142 @@ +use core::mem; +use core::slice; +use std::rc::Rc; +use std::vec; + +pub(crate) struct RcVec<T> { + inner: Rc<Vec<T>>, +} + +pub(crate) struct RcVecBuilder<T> { + inner: Vec<T>, +} + +pub(crate) struct RcVecMut<'a, T> { + inner: &'a mut Vec<T>, +} + +#[derive(Clone)] +pub(crate) struct RcVecIntoIter<T> { + inner: vec::IntoIter<T>, +} + +impl<T> RcVec<T> { + pub fn is_empty(&self) -> bool { + self.inner.is_empty() + } + + pub fn len(&self) -> usize { + self.inner.len() + } + + pub fn iter(&self) -> slice::Iter<T> { + self.inner.iter() + } + + pub fn make_mut(&mut self) -> RcVecMut<T> + where + T: Clone, + { + RcVecMut { + inner: Rc::make_mut(&mut self.inner), + } + } + + pub fn get_mut(&mut self) -> Option<RcVecMut<T>> { + let inner = Rc::get_mut(&mut self.inner)?; + Some(RcVecMut { inner }) + } + + pub fn make_owned(mut self) -> RcVecBuilder<T> + where + T: Clone, + { + let vec = if let Some(owned) = Rc::get_mut(&mut self.inner) { + mem::replace(owned, Vec::new()) + } else { + Vec::clone(&self.inner) + }; + RcVecBuilder { inner: vec } + } +} + +impl<T> RcVecBuilder<T> { + pub fn new() -> Self { + RcVecBuilder { inner: Vec::new() } + } + + pub fn with_capacity(cap: usize) -> Self { + RcVecBuilder { + inner: Vec::with_capacity(cap), + } + } + + pub fn push(&mut self, element: T) { + self.inner.push(element); + } + + pub fn extend(&mut self, iter: impl IntoIterator<Item = T>) { + self.inner.extend(iter); + } + + pub fn as_mut(&mut self) -> RcVecMut<T> { + RcVecMut { + inner: &mut self.inner, + } + } + + pub fn build(self) -> RcVec<T> { + RcVec { + inner: Rc::new(self.inner), + } + } +} + +impl<'a, T> RcVecMut<'a, T> { + pub fn push(&mut self, element: T) { + self.inner.push(element); + } + + pub fn extend(&mut self, iter: impl IntoIterator<Item = T>) { + self.inner.extend(iter); + } + + pub fn pop(&mut self) -> Option<T> { + self.inner.pop() + } + + pub fn as_mut(&mut self) -> RcVecMut<T> { + RcVecMut { inner: self.inner } + } +} + +impl<T> Clone for RcVec<T> { + fn clone(&self) -> Self { + RcVec { + inner: Rc::clone(&self.inner), + } + } +} + +impl<T> IntoIterator for RcVecBuilder<T> { + type Item = T; + type IntoIter = RcVecIntoIter<T>; + + fn into_iter(self) -> Self::IntoIter { + RcVecIntoIter { + inner: self.inner.into_iter(), + } + } +} + +impl<T> Iterator for RcVecIntoIter<T> { + type Item = T; + + fn next(&mut self) -> Option<Self::Item> { + self.inner.next() + } + + fn size_hint(&self) -> (usize, Option<usize>) { + self.inner.size_hint() + } +} diff --git a/vendor/proc-macro2/src/wrapper.rs b/vendor/proc-macro2/src/wrapper.rs index 750e156ff..934440139 100644 --- a/vendor/proc-macro2/src/wrapper.rs +++ b/vendor/proc-macro2/src/wrapper.rs @@ -1,12 +1,12 @@ use crate::detection::inside_proc_macro; use crate::{fallback, Delimiter, Punct, Spacing, TokenTree}; -use std::fmt::{self, Debug, Display}; -use std::iter::FromIterator; -use std::ops::RangeBounds; +use core::fmt::{self, Debug, Display}; +use core::iter::FromIterator; +use core::ops::RangeBounds; +use core::str::FromStr; use std::panic; #[cfg(super_unstable)] use std::path::PathBuf; -use std::str::FromStr; #[derive(Clone)] pub(crate) enum TokenStream { @@ -350,12 +350,6 @@ impl Iterator for TokenTreeIter { } } -impl Debug for TokenTreeIter { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_struct("TokenTreeIter").finish() - } -} - #[derive(Clone, PartialEq, Eq)] #[cfg(super_unstable)] pub(crate) enum SourceFile { diff --git a/vendor/proc-macro2/tests/comments.rs b/vendor/proc-macro2/tests/comments.rs index 71741080c..4f7236dea 100644 --- a/vendor/proc-macro2/tests/comments.rs +++ b/vendor/proc-macro2/tests/comments.rs @@ -1,3 +1,5 @@ +#![allow(clippy::assertions_on_result_states)] + use proc_macro2::{Delimiter, Literal, Spacing, TokenStream, TokenTree}; // #[doc = "..."] -> "..." diff --git a/vendor/proc-macro2/tests/test.rs b/vendor/proc-macro2/tests/test.rs index 86031db6a..16f775ed0 100644 --- a/vendor/proc-macro2/tests/test.rs +++ b/vendor/proc-macro2/tests/test.rs @@ -1,4 +1,4 @@ -#![allow(clippy::non_ascii_literal)] +#![allow(clippy::assertions_on_result_states, clippy::non_ascii_literal)] use proc_macro2::{Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree}; use std::panic; @@ -548,6 +548,13 @@ fn default_tokenstream_is_empty() { } #[test] +fn tokenstream_size_hint() { + let tokens = "a b (c d) e".parse::<TokenStream>().unwrap(); + + assert_eq!(tokens.into_iter().size_hint(), (4, Some(4))); +} + +#[test] fn tuple_indexing() { // This behavior may change depending on https://github.com/rust-lang/rust/pull/71322 let mut tokens = "tuple.0.0".parse::<TokenStream>().unwrap().into_iter(); |