diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-17 12:11:28 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-17 12:11:28 +0000 |
commit | 94a0819fe3a0d679c3042a77bfe6a2afc505daea (patch) | |
tree | 2b827afe6a05f3538db3f7803a88c4587fe85648 /vendor/syn/src | |
parent | Adding upstream version 1.64.0+dfsg1. (diff) | |
download | rustc-94a0819fe3a0d679c3042a77bfe6a2afc505daea.tar.xz rustc-94a0819fe3a0d679c3042a77bfe6a2afc505daea.zip |
Adding upstream version 1.66.0+dfsg1.upstream/1.66.0+dfsg1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'vendor/syn/src')
-rw-r--r-- | vendor/syn/src/buffer.rs | 199 | ||||
-rw-r--r-- | vendor/syn/src/expr.rs | 21 | ||||
-rw-r--r-- | vendor/syn/src/gen/clone.rs | 2 | ||||
-rw-r--r-- | vendor/syn/src/gen/debug.rs | 10 | ||||
-rw-r--r-- | vendor/syn/src/gen/eq.rs | 6 | ||||
-rw-r--r-- | vendor/syn/src/gen/fold.rs | 6 | ||||
-rw-r--r-- | vendor/syn/src/gen/hash.rs | 6 | ||||
-rw-r--r-- | vendor/syn/src/gen/visit.rs | 6 | ||||
-rw-r--r-- | vendor/syn/src/gen/visit_mut.rs | 6 | ||||
-rw-r--r-- | vendor/syn/src/item.rs | 14 | ||||
-rw-r--r-- | vendor/syn/src/lib.rs | 13 | ||||
-rw-r--r-- | vendor/syn/src/lit.rs | 2 | ||||
-rw-r--r-- | vendor/syn/src/parse_macro_input.rs | 4 | ||||
-rw-r--r-- | vendor/syn/src/pat.rs | 35 | ||||
-rw-r--r-- | vendor/syn/src/path.rs | 46 | ||||
-rw-r--r-- | vendor/syn/src/stmt.rs | 6 | ||||
-rw-r--r-- | vendor/syn/src/ty.rs | 53 |
17 files changed, 207 insertions, 228 deletions
diff --git a/vendor/syn/src/buffer.rs b/vendor/syn/src/buffer.rs index 2cb6690f0..161b614c8 100644 --- a/vendor/syn/src/buffer.rs +++ b/vendor/syn/src/buffer.rs @@ -15,20 +15,17 @@ use crate::proc_macro as pm; use crate::Lifetime; use proc_macro2::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree}; use std::marker::PhantomData; -use std::ptr; -use std::slice; /// Internal type which is used instead of `TokenTree` to represent a token tree /// within a `TokenBuffer`. enum Entry { // Mimicking types from proc-macro. - Group(Group, TokenBuffer), + // Group entries contain the offset to the matching End entry. + Group(Group, usize), Ident(Ident), Punct(Punct), Literal(Literal), - // End entries contain a raw pointer to the entry from the containing - // token tree, or null if this is the outermost level. - End(*const Entry), + End, } /// A buffer that can be efficiently traversed multiple times, unlike @@ -37,76 +34,29 @@ enum Entry { /// /// *This type is available only if Syn is built with the `"parsing"` feature.* pub struct TokenBuffer { - // NOTE: Do not implement clone on this - there are raw pointers inside - // these entries which will be messed up. Moving the `TokenBuffer` itself is - // safe as the data pointed to won't be moved. - ptr: *const Entry, - len: usize, -} - -impl Drop for TokenBuffer { - fn drop(&mut self) { - unsafe { - let slice = slice::from_raw_parts_mut(self.ptr as *mut Entry, self.len); - let _ = Box::from_raw(slice); - } - } + // NOTE: Do not implement clone on this - while the current design could be + // cloned, other designs which could be desirable may not be cloneable. + entries: Box<[Entry]>, } impl TokenBuffer { - // NOTE: Do not mutate the Vec returned from this function once it returns; - // the address of its backing memory must remain stable. - fn inner_new(stream: TokenStream, up: *const Entry) -> TokenBuffer { - // Build up the entries list, recording the locations of any Groups - // in the list to be processed later. - let mut entries = Vec::new(); - let mut groups = Vec::new(); + fn recursive_new(entries: &mut Vec<Entry>, stream: TokenStream) { for tt in stream { match tt { - TokenTree::Ident(sym) => { - entries.push(Entry::Ident(sym)); - } - TokenTree::Punct(op) => { - entries.push(Entry::Punct(op)); - } - TokenTree::Literal(l) => { - entries.push(Entry::Literal(l)); - } - TokenTree::Group(g) => { - // Record the index of the interesting entry, and store an - // `End(null)` there temporarily. - groups.push((entries.len(), g)); - entries.push(Entry::End(ptr::null())); + TokenTree::Ident(ident) => entries.push(Entry::Ident(ident)), + TokenTree::Punct(punct) => entries.push(Entry::Punct(punct)), + TokenTree::Literal(literal) => entries.push(Entry::Literal(literal)), + TokenTree::Group(group) => { + let group_start_index = entries.len(); + entries.push(Entry::End); // we replace this below + Self::recursive_new(entries, group.stream()); + let group_end_index = entries.len(); + entries.push(Entry::End); + let group_end_offset = group_end_index - group_start_index; + entries[group_start_index] = Entry::Group(group, group_end_offset); } } } - // Add an `End` entry to the end with a reference to the enclosing token - // stream which was passed in. - entries.push(Entry::End(up)); - - // NOTE: This is done to ensure that we don't accidentally modify the - // length of the backing buffer. The backing buffer must remain at a - // constant address after this point, as we are going to store a raw - // pointer into it. - let entries = entries.into_boxed_slice(); - let len = entries.len(); - // Convert boxed slice into a pointer to the first element early, to - // avoid invalidating pointers into this slice when we move the Box. - // See https://github.com/rust-lang/unsafe-code-guidelines/issues/326 - let entries = Box::into_raw(entries) as *mut Entry; - for (idx, group) in groups { - // We know that this index refers to one of the temporary - // `End(null)` entries, and we know that the last entry is - // `End(up)`, so the next index is also valid. - let group_up = unsafe { entries.add(idx + 1) }; - - // The end entry stored at the end of this Entry::Group should - // point to the Entry which follows the Group in the list. - let inner = Self::inner_new(group.stream(), group_up); - unsafe { *entries.add(idx) = Entry::Group(group, inner) }; - } - - TokenBuffer { ptr: entries, len } } /// Creates a `TokenBuffer` containing all the tokens from the input @@ -125,13 +75,19 @@ impl TokenBuffer { /// Creates a `TokenBuffer` containing all the tokens from the input /// `proc_macro2::TokenStream`. pub fn new2(stream: TokenStream) -> Self { - Self::inner_new(stream, ptr::null()) + let mut entries = Vec::new(); + Self::recursive_new(&mut entries, stream); + entries.push(Entry::End); + Self { + entries: entries.into_boxed_slice(), + } } /// Creates a cursor referencing the first token in the buffer and able to /// traverse until the end of the buffer. pub fn begin(&self) -> Cursor { - unsafe { Cursor::create(self.ptr, self.ptr.add(self.len - 1)) } + let ptr = self.entries.as_ptr(); + unsafe { Cursor::create(ptr, ptr.add(self.entries.len() - 1)) } } } @@ -151,7 +107,7 @@ impl TokenBuffer { pub struct Cursor<'a> { // The current entry which the `Cursor` is pointing at. ptr: *const Entry, - // This is the only `Entry::End(..)` object which this cursor is allowed to + // This is the only `Entry::End` object which this cursor is allowed to // point at. All other `End` objects are skipped over in `Cursor::create`. scope: *const Entry, // Cursor is covariant in 'a. This field ensures that our pointers are still @@ -171,7 +127,7 @@ impl<'a> Cursor<'a> { // object in global storage. struct UnsafeSyncEntry(Entry); unsafe impl Sync for UnsafeSyncEntry {} - static EMPTY_ENTRY: UnsafeSyncEntry = UnsafeSyncEntry(Entry::End(0 as *const Entry)); + static EMPTY_ENTRY: UnsafeSyncEntry = UnsafeSyncEntry(Entry::End); Cursor { ptr: &EMPTY_ENTRY.0, @@ -184,15 +140,15 @@ impl<'a> Cursor<'a> { /// `None`-delimited scopes when the cursor reaches the end of them, /// allowing for them to be treated transparently. unsafe fn create(mut ptr: *const Entry, scope: *const Entry) -> Self { - // NOTE: If we're looking at a `End(..)`, we want to advance the cursor + // NOTE: If we're looking at a `End`, we want to advance the cursor // past it, unless `ptr == scope`, which means that we're at the edge of // our cursor's scope. We should only have `ptr != scope` at the exit // from None-delimited groups entered with `ignore_none`. - while let Entry::End(exit) = *ptr { + while let Entry::End = *ptr { if ptr == scope { break; } - ptr = exit; + ptr = ptr.add(1); } Cursor { @@ -210,7 +166,10 @@ impl<'a> Cursor<'a> { /// Bump the cursor to point at the next token after the current one. This /// is undefined behavior if the cursor is currently looking at an /// `Entry::End`. - unsafe fn bump(self) -> Cursor<'a> { + /// + /// If the cursor is looking at an `Entry::Group`, the bumped cursor will + /// point at the first token in the group (with the same scope end). + unsafe fn bump_ignore_group(self) -> Cursor<'a> { Cursor::create(self.ptr.offset(1), self.scope) } @@ -220,14 +179,9 @@ impl<'a> Cursor<'a> { /// /// WARNING: This mutates its argument. fn ignore_none(&mut self) { - while let Entry::Group(group, buf) = self.entry() { + while let Entry::Group(group, _) = self.entry() { if group.delimiter() == Delimiter::None { - // NOTE: We call `Cursor::create` here to make sure that - // situations where we should immediately exit the span after - // entering it are handled correctly. - unsafe { - *self = Cursor::create(buf.ptr, self.scope); - } + unsafe { *self = self.bump_ignore_group() }; } else { break; } @@ -251,9 +205,12 @@ impl<'a> Cursor<'a> { self.ignore_none(); } - if let Entry::Group(group, buf) = self.entry() { + if let Entry::Group(group, end_offset) = self.entry() { if group.delimiter() == delim { - return Some((buf.begin(), group.span(), unsafe { self.bump() })); + let end_of_group = unsafe { self.ptr.add(*end_offset) }; + let inside_of_group = unsafe { Cursor::create(self.ptr.add(1), end_of_group) }; + let after_group = unsafe { Cursor::create(end_of_group, self.scope) }; + return Some((inside_of_group, group.span(), after_group)); } } @@ -265,7 +222,7 @@ impl<'a> Cursor<'a> { pub fn ident(mut self) -> Option<(Ident, Cursor<'a>)> { self.ignore_none(); match self.entry() { - Entry::Ident(ident) => Some((ident.clone(), unsafe { self.bump() })), + Entry::Ident(ident) => Some((ident.clone(), unsafe { self.bump_ignore_group() })), _ => None, } } @@ -275,7 +232,9 @@ impl<'a> Cursor<'a> { pub fn punct(mut self) -> Option<(Punct, Cursor<'a>)> { self.ignore_none(); match self.entry() { - Entry::Punct(op) if op.as_char() != '\'' => Some((op.clone(), unsafe { self.bump() })), + Entry::Punct(punct) if punct.as_char() != '\'' => { + Some((punct.clone(), unsafe { self.bump_ignore_group() })) + } _ => None, } } @@ -285,7 +244,7 @@ impl<'a> Cursor<'a> { pub fn literal(mut self) -> Option<(Literal, Cursor<'a>)> { self.ignore_none(); match self.entry() { - Entry::Literal(lit) => Some((lit.clone(), unsafe { self.bump() })), + Entry::Literal(literal) => Some((literal.clone(), unsafe { self.bump_ignore_group() })), _ => None, } } @@ -295,18 +254,14 @@ impl<'a> Cursor<'a> { pub fn lifetime(mut self) -> Option<(Lifetime, Cursor<'a>)> { self.ignore_none(); match self.entry() { - Entry::Punct(op) if op.as_char() == '\'' && op.spacing() == Spacing::Joint => { - let next = unsafe { self.bump() }; - match next.ident() { - Some((ident, rest)) => { - let lifetime = Lifetime { - apostrophe: op.span(), - ident, - }; - Some((lifetime, rest)) - } - None => None, - } + Entry::Punct(punct) if punct.as_char() == '\'' && punct.spacing() == Spacing::Joint => { + let next = unsafe { self.bump_ignore_group() }; + let (ident, rest) = next.ident()?; + let lifetime = Lifetime { + apostrophe: punct.span(), + ident, + }; + Some((lifetime, rest)) } _ => None, } @@ -332,15 +287,16 @@ impl<'a> Cursor<'a> { /// This method does not treat `None`-delimited groups as transparent, and /// will return a `Group(None, ..)` if the cursor is looking at one. pub fn token_tree(self) -> Option<(TokenTree, Cursor<'a>)> { - let tree = match self.entry() { - Entry::Group(group, _) => group.clone().into(), - Entry::Literal(lit) => lit.clone().into(), - Entry::Ident(ident) => ident.clone().into(), - Entry::Punct(op) => op.clone().into(), - Entry::End(..) => return None, + let (tree, len) = match self.entry() { + Entry::Group(group, end_offset) => (group.clone().into(), *end_offset), + Entry::Literal(literal) => (literal.clone().into(), 1), + Entry::Ident(ident) => (ident.clone().into(), 1), + Entry::Punct(punct) => (punct.clone().into(), 1), + Entry::End => return None, }; - Some((tree, unsafe { self.bump() })) + let rest = unsafe { Cursor::create(self.ptr.add(len), self.scope) }; + Some((tree, rest)) } /// Returns the `Span` of the current token, or `Span::call_site()` if this @@ -348,10 +304,10 @@ impl<'a> Cursor<'a> { pub fn span(self) -> Span { match self.entry() { Entry::Group(group, _) => group.span(), - Entry::Literal(l) => l.span(), - Entry::Ident(t) => t.span(), - Entry::Punct(o) => o.span(), - Entry::End(..) => Span::call_site(), + Entry::Literal(literal) => literal.span(), + Entry::Ident(ident) => ident.span(), + Entry::Punct(punct) => punct.span(), + Entry::End => Span::call_site(), } } @@ -360,19 +316,22 @@ impl<'a> Cursor<'a> { /// /// This method treats `'lifetimes` as a single token. pub(crate) fn skip(self) -> Option<Cursor<'a>> { - match self.entry() { - Entry::End(..) => None, + let len = match self.entry() { + Entry::End => return None, // Treat lifetimes as a single tt for the purposes of 'skip'. - Entry::Punct(op) if op.as_char() == '\'' && op.spacing() == Spacing::Joint => { - let next = unsafe { self.bump() }; - match next.entry() { - Entry::Ident(_) => Some(unsafe { next.bump() }), - _ => Some(next), + Entry::Punct(punct) if punct.as_char() == '\'' && punct.spacing() == Spacing::Joint => { + match unsafe { &*self.ptr.add(1) } { + Entry::Ident(_) => 2, + _ => 1, } } - _ => Some(unsafe { self.bump() }), - } + + Entry::Group(_, end_offset) => *end_offset, + _ => 1, + }; + + Some(unsafe { Cursor::create(self.ptr.add(len), self.scope) }) } } diff --git a/vendor/syn/src/expr.rs b/vendor/syn/src/expr.rs index cf0fa0af9..93a59b0e2 100644 --- a/vendor/syn/src/expr.rs +++ b/vendor/syn/src/expr.rs @@ -1371,7 +1371,9 @@ pub(crate) mod parsing { }); } else if Precedence::Cast >= base && input.peek(Token![as]) { let as_token: Token![as] = input.parse()?; - let ty = input.call(Type::without_plus)?; + let allow_plus = false; + let allow_group_generic = false; + let ty = ty::parsing::ambig_ty(input, allow_plus, allow_group_generic)?; check_cast(input)?; lhs = Expr::Cast(ExprCast { attrs: Vec::new(), @@ -1381,7 +1383,9 @@ pub(crate) mod parsing { }); } else if Precedence::Cast >= base && input.peek(Token![:]) && !input.peek(Token![::]) { let colon_token: Token![:] = input.parse()?; - let ty = input.call(Type::without_plus)?; + let allow_plus = false; + let allow_group_generic = false; + let ty = ty::parsing::ambig_ty(input, allow_plus, allow_group_generic)?; check_cast(input)?; lhs = Expr::Type(ExprType { attrs: Vec::new(), @@ -1429,7 +1433,9 @@ pub(crate) mod parsing { }); } else if Precedence::Cast >= base && input.peek(Token![as]) { let as_token: Token![as] = input.parse()?; - let ty = input.call(Type::without_plus)?; + let allow_plus = false; + let allow_group_generic = false; + let ty = ty::parsing::ambig_ty(input, allow_plus, allow_group_generic)?; check_cast(input)?; lhs = Expr::Cast(ExprCast { attrs: Vec::new(), @@ -1727,7 +1733,10 @@ pub(crate) mod parsing { || input.peek(Token![move]) { expr_closure(input, allow_struct).map(Expr::Closure) - } else if input.peek(Token![for]) && input.peek2(Token![<]) && input.peek3(Lifetime) { + } else if input.peek(Token![for]) + && input.peek2(Token![<]) + && (input.peek3(Lifetime) || input.peek3(Token![>])) + { let begin = input.fork(); input.parse::<BoundLifetimes>()?; expr_closure(input, allow_struct)?; @@ -2010,7 +2019,9 @@ pub(crate) mod parsing { Expr::If(input.parse()?) } else if input.peek(Token![while]) { Expr::While(input.parse()?) - } else if input.peek(Token![for]) { + } else if input.peek(Token![for]) + && !(input.peek2(Token![<]) && (input.peek3(Lifetime) || input.peek3(Token![>]))) + { Expr::ForLoop(input.parse()?) } else if input.peek(Token![loop]) { Expr::Loop(input.parse()?) diff --git a/vendor/syn/src/gen/clone.rs b/vendor/syn/src/gen/clone.rs index 8de1cd8c9..a413e3ec7 100644 --- a/vendor/syn/src/gen/clone.rs +++ b/vendor/syn/src/gen/clone.rs @@ -910,9 +910,9 @@ impl Clone for GenericArgument { match self { GenericArgument::Lifetime(v0) => GenericArgument::Lifetime(v0.clone()), GenericArgument::Type(v0) => GenericArgument::Type(v0.clone()), + GenericArgument::Const(v0) => GenericArgument::Const(v0.clone()), GenericArgument::Binding(v0) => GenericArgument::Binding(v0.clone()), GenericArgument::Constraint(v0) => GenericArgument::Constraint(v0.clone()), - GenericArgument::Const(v0) => GenericArgument::Const(v0.clone()), } } } diff --git a/vendor/syn/src/gen/debug.rs b/vendor/syn/src/gen/debug.rs index 4adf8c593..a1f0afa79 100644 --- a/vendor/syn/src/gen/debug.rs +++ b/vendor/syn/src/gen/debug.rs @@ -1268,6 +1268,11 @@ impl Debug for GenericArgument { formatter.field(v0); formatter.finish() } + GenericArgument::Const(v0) => { + let mut formatter = formatter.debug_tuple("Const"); + formatter.field(v0); + formatter.finish() + } GenericArgument::Binding(v0) => { let mut formatter = formatter.debug_tuple("Binding"); formatter.field(v0); @@ -1278,11 +1283,6 @@ impl Debug for GenericArgument { formatter.field(v0); formatter.finish() } - GenericArgument::Const(v0) => { - let mut formatter = formatter.debug_tuple("Const"); - formatter.field(v0); - formatter.finish() - } } } } diff --git a/vendor/syn/src/gen/eq.rs b/vendor/syn/src/gen/eq.rs index 40fed0b89..20acb809d 100644 --- a/vendor/syn/src/gen/eq.rs +++ b/vendor/syn/src/gen/eq.rs @@ -878,13 +878,13 @@ impl PartialEq for GenericArgument { (GenericArgument::Type(self0), GenericArgument::Type(other0)) => { self0 == other0 } - (GenericArgument::Binding(self0), GenericArgument::Binding(other0)) => { + (GenericArgument::Const(self0), GenericArgument::Const(other0)) => { self0 == other0 } - (GenericArgument::Constraint(self0), GenericArgument::Constraint(other0)) => { + (GenericArgument::Binding(self0), GenericArgument::Binding(other0)) => { self0 == other0 } - (GenericArgument::Const(self0), GenericArgument::Const(other0)) => { + (GenericArgument::Constraint(self0), GenericArgument::Constraint(other0)) => { self0 == other0 } _ => false, diff --git a/vendor/syn/src/gen/fold.rs b/vendor/syn/src/gen/fold.rs index 6e19e6f3a..98bb5794a 100644 --- a/vendor/syn/src/gen/fold.rs +++ b/vendor/syn/src/gen/fold.rs @@ -1787,15 +1787,15 @@ where GenericArgument::Type(_binding_0) => { GenericArgument::Type(f.fold_type(_binding_0)) } + GenericArgument::Const(_binding_0) => { + GenericArgument::Const(f.fold_expr(_binding_0)) + } GenericArgument::Binding(_binding_0) => { GenericArgument::Binding(f.fold_binding(_binding_0)) } GenericArgument::Constraint(_binding_0) => { GenericArgument::Constraint(f.fold_constraint(_binding_0)) } - GenericArgument::Const(_binding_0) => { - GenericArgument::Const(f.fold_expr(_binding_0)) - } } } #[cfg(feature = "full")] diff --git a/vendor/syn/src/gen/hash.rs b/vendor/syn/src/gen/hash.rs index f68a7630e..d0400e19d 100644 --- a/vendor/syn/src/gen/hash.rs +++ b/vendor/syn/src/gen/hash.rs @@ -1184,15 +1184,15 @@ impl Hash for GenericArgument { state.write_u8(1u8); v0.hash(state); } - GenericArgument::Binding(v0) => { + GenericArgument::Const(v0) => { state.write_u8(2u8); v0.hash(state); } - GenericArgument::Constraint(v0) => { + GenericArgument::Binding(v0) => { state.write_u8(3u8); v0.hash(state); } - GenericArgument::Const(v0) => { + GenericArgument::Constraint(v0) => { state.write_u8(4u8); v0.hash(state); } diff --git a/vendor/syn/src/gen/visit.rs b/vendor/syn/src/gen/visit.rs index 051b65936..19ddd2e72 100644 --- a/vendor/syn/src/gen/visit.rs +++ b/vendor/syn/src/gen/visit.rs @@ -1974,15 +1974,15 @@ where GenericArgument::Type(_binding_0) => { v.visit_type(_binding_0); } + GenericArgument::Const(_binding_0) => { + v.visit_expr(_binding_0); + } GenericArgument::Binding(_binding_0) => { v.visit_binding(_binding_0); } GenericArgument::Constraint(_binding_0) => { v.visit_constraint(_binding_0); } - GenericArgument::Const(_binding_0) => { - v.visit_expr(_binding_0); - } } } #[cfg(feature = "full")] diff --git a/vendor/syn/src/gen/visit_mut.rs b/vendor/syn/src/gen/visit_mut.rs index 3ddbe9c06..239709d19 100644 --- a/vendor/syn/src/gen/visit_mut.rs +++ b/vendor/syn/src/gen/visit_mut.rs @@ -1975,15 +1975,15 @@ where GenericArgument::Type(_binding_0) => { v.visit_type_mut(_binding_0); } + GenericArgument::Const(_binding_0) => { + v.visit_expr_mut(_binding_0); + } GenericArgument::Binding(_binding_0) => { v.visit_binding_mut(_binding_0); } GenericArgument::Constraint(_binding_0) => { v.visit_constraint_mut(_binding_0); } - GenericArgument::Const(_binding_0) => { - v.visit_expr_mut(_binding_0); - } } } #[cfg(feature = "full")] diff --git a/vendor/syn/src/item.rs b/vendor/syn/src/item.rs index 1ce970ee2..a1ef7ab43 100644 --- a/vendor/syn/src/item.rs +++ b/vendor/syn/src/item.rs @@ -2765,7 +2765,6 @@ mod printing { use super::*; use crate::attr::FilterAttrs; use crate::print::TokensOrDefault; - use crate::punctuated::Pair; use proc_macro2::TokenStream; use quote::{ToTokens, TokenStreamExt}; @@ -3283,16 +3282,9 @@ mod printing { self.generics.to_tokens(tokens); self.paren_token.surround(tokens, |tokens| { let mut last_is_variadic = false; - for input in self.inputs.pairs() { - match input { - Pair::Punctuated(input, comma) => { - maybe_variadic_to_tokens(input, tokens); - comma.to_tokens(tokens); - } - Pair::End(input) => { - last_is_variadic = maybe_variadic_to_tokens(input, tokens); - } - } + for pair in self.inputs.pairs() { + last_is_variadic = maybe_variadic_to_tokens(pair.value(), tokens); + pair.punct().to_tokens(tokens); } if self.variadic.is_some() && !last_is_variadic { if !self.inputs.empty_or_trailing() { diff --git a/vendor/syn/src/lib.rs b/vendor/syn/src/lib.rs index 608c39876..81f03e1b5 100644 --- a/vendor/syn/src/lib.rs +++ b/vendor/syn/src/lib.rs @@ -250,15 +250,18 @@ //! dynamic library libproc_macro from rustc toolchain. // Syn types in rustdoc of other crates get linked to here. -#![doc(html_root_url = "https://docs.rs/syn/1.0.98")] +#![doc(html_root_url = "https://docs.rs/syn/1.0.102")] #![cfg_attr(doc_cfg, feature(doc_cfg))] #![allow(non_camel_case_types)] #![allow( + clippy::bool_to_int_with_if, clippy::cast_lossless, clippy::cast_possible_truncation, + clippy::cast_ptr_alignment, clippy::default_trait_access, clippy::doc_markdown, clippy::expl_impl_clone_on_copy, + clippy::explicit_auto_deref, clippy::if_not_else, clippy::inherent_to_string, clippy::large_enum_variant, @@ -810,14 +813,6 @@ mod print; //////////////////////////////////////////////////////////////////////////////// -// https://github.com/rust-lang/rust/issues/62830 -#[cfg(feature = "parsing")] -mod rustdoc_workaround { - pub use crate::parse::{self as parse_module}; -} - -//////////////////////////////////////////////////////////////////////////////// - mod error; pub use crate::error::{Error, Result}; diff --git a/vendor/syn/src/lit.rs b/vendor/syn/src/lit.rs index 2600dc801..130b40ed1 100644 --- a/vendor/syn/src/lit.rs +++ b/vendor/syn/src/lit.rs @@ -224,7 +224,7 @@ impl LitStr { // Parse string literal into a token stream with every span equal to the // original literal's span. - let mut tokens = crate::parse_str(&self.value())?; + let mut tokens = TokenStream::from_str(&self.value())?; tokens = respan_token_stream(tokens, self.span()); parser.parse2(tokens) diff --git a/vendor/syn/src/parse_macro_input.rs b/vendor/syn/src/parse_macro_input.rs index 8e1a5ec6b..6163cd70a 100644 --- a/vendor/syn/src/parse_macro_input.rs +++ b/vendor/syn/src/parse_macro_input.rs @@ -4,7 +4,7 @@ /// Refer to the [`parse` module] documentation for more details about parsing /// in Syn. /// -/// [`parse` module]: crate::rustdoc_workaround::parse_module +/// [`parse` module]: mod@crate::parse /// /// <br> /// @@ -51,7 +51,7 @@ /// This macro can also be used with the [`Parser` trait] for types that have /// multiple ways that they can be parsed. /// -/// [`Parser` trait]: crate::rustdoc_workaround::parse_module::Parser +/// [`Parser` trait]: crate::parse::Parser /// /// ``` /// # extern crate proc_macro; diff --git a/vendor/syn/src/pat.rs b/vendor/syn/src/pat.rs index fa0818c16..b279186aa 100644 --- a/vendor/syn/src/pat.rs +++ b/vendor/syn/src/pat.rs @@ -400,11 +400,11 @@ pub mod parsing { } if input.peek(token::Brace) { - let pat = pat_struct(input, path)?; + let pat = pat_struct(begin.fork(), input, path)?; if qself.is_some() { Ok(Pat::Verbatim(verbatim::between(begin, input))) } else { - Ok(Pat::Struct(pat)) + Ok(pat) } } else if input.peek(token::Paren) { let pat = pat_tuple_struct(input, path)?; @@ -465,13 +465,23 @@ pub mod parsing { }) } - fn pat_struct(input: ParseStream, path: Path) -> Result<PatStruct> { + fn pat_struct(begin: ParseBuffer, input: ParseStream, path: Path) -> Result<Pat> { let content; let brace_token = braced!(content in input); let mut fields = Punctuated::new(); - while !content.is_empty() && !content.peek(Token![..]) { - let value = content.call(field_pat)?; + let mut dot2_token = None; + while !content.is_empty() { + let attrs = content.call(Attribute::parse_outer)?; + if content.peek(Token![..]) { + dot2_token = Some(content.parse()?); + if !attrs.is_empty() { + return Ok(Pat::Verbatim(verbatim::between(begin, input))); + } + break; + } + let mut value = content.call(field_pat)?; + value.attrs = attrs; fields.push_value(value); if content.is_empty() { break; @@ -480,19 +490,13 @@ pub mod parsing { fields.push_punct(punct); } - let dot2_token = if fields.empty_or_trailing() && content.peek(Token![..]) { - Some(content.parse()?) - } else { - None - }; - - Ok(PatStruct { + Ok(Pat::Struct(PatStruct { attrs: Vec::new(), path, brace_token, fields, dot2_token, - }) + })) } impl Member { @@ -505,7 +509,6 @@ pub mod parsing { } fn field_pat(input: ParseStream) -> Result<FieldPat> { - let attrs = input.call(Attribute::parse_outer)?; let boxed: Option<Token![box]> = input.parse()?; let by_ref: Option<Token![ref]> = input.parse()?; let mutability: Option<Token![mut]> = input.parse()?; @@ -515,7 +518,7 @@ pub mod parsing { || member.is_unnamed() { return Ok(FieldPat { - attrs, + attrs: Vec::new(), member, colon_token: input.parse()?, pat: Box::new(multi_pat_with_leading_vert(input)?), @@ -544,7 +547,7 @@ pub mod parsing { } Ok(FieldPat { - attrs, + attrs: Vec::new(), member: Member::Named(ident), colon_token: None, pat: Box::new(pat), diff --git a/vendor/syn/src/path.rs b/vendor/syn/src/path.rs index 00be352b1..742273afd 100644 --- a/vendor/syn/src/path.rs +++ b/vendor/syn/src/path.rs @@ -109,16 +109,16 @@ ast_enum! { Lifetime(Lifetime), /// A type argument. Type(Type), - /// A binding (equality constraint) on an associated type: the `Item = - /// u8` in `Iterator<Item = u8>`. - Binding(Binding), - /// An associated type bound: `Iterator<Item: Display>`. - Constraint(Constraint), /// A const expression. Must be inside of a block. /// /// NOTE: Identity expressions are represented as Type arguments, as /// they are indistinguishable syntactically. Const(Expr), + /// A binding (equality constraint) on an associated type: the `Item = + /// u8` in `Iterator<Item = u8>`. + Binding(Binding), + /// An associated type bound: `Iterator<Item: Display>`. + Constraint(Constraint), } } @@ -729,8 +729,6 @@ pub(crate) mod printing { match self { GenericArgument::Lifetime(lt) => lt.to_tokens(tokens), GenericArgument::Type(ty) => ty.to_tokens(tokens), - GenericArgument::Binding(tb) => tb.to_tokens(tokens), - GenericArgument::Constraint(tc) => tc.to_tokens(tokens), GenericArgument::Const(e) => match *e { Expr::Lit(_) => e.to_tokens(tokens), @@ -746,6 +744,8 @@ pub(crate) mod printing { e.to_tokens(tokens); }), }, + GenericArgument::Binding(tb) => tb.to_tokens(tokens), + GenericArgument::Constraint(tc) => tc.to_tokens(tokens), } } } @@ -756,11 +756,8 @@ pub(crate) mod printing { self.colon2_token.to_tokens(tokens); self.lt_token.to_tokens(tokens); - // Print lifetimes before types and consts, all before bindings, - // regardless of their order in self.args. - // - // TODO: ordering rules for const arguments vs type arguments have - // not been settled yet. https://github.com/rust-lang/rust/issues/44580 + // Print lifetimes before types/consts/bindings, regardless of their + // order in self.args. let mut trailing_or_empty = true; for param in self.args.pairs() { match **param.value() { @@ -769,37 +766,24 @@ pub(crate) mod printing { trailing_or_empty = param.punct().is_some(); } GenericArgument::Type(_) - | GenericArgument::Binding(_) - | GenericArgument::Constraint(_) - | GenericArgument::Const(_) => {} - } - } - for param in self.args.pairs() { - match **param.value() { - GenericArgument::Type(_) | GenericArgument::Const(_) => { - if !trailing_or_empty { - <Token![,]>::default().to_tokens(tokens); - } - param.to_tokens(tokens); - trailing_or_empty = param.punct().is_some(); - } - GenericArgument::Lifetime(_) + | GenericArgument::Const(_) | GenericArgument::Binding(_) | GenericArgument::Constraint(_) => {} } } for param in self.args.pairs() { match **param.value() { - GenericArgument::Binding(_) | GenericArgument::Constraint(_) => { + GenericArgument::Type(_) + | GenericArgument::Const(_) + | GenericArgument::Binding(_) + | GenericArgument::Constraint(_) => { if !trailing_or_empty { <Token![,]>::default().to_tokens(tokens); } param.to_tokens(tokens); trailing_or_empty = param.punct().is_some(); } - GenericArgument::Lifetime(_) - | GenericArgument::Type(_) - | GenericArgument::Const(_) => {} + GenericArgument::Lifetime(_) => {} } } diff --git a/vendor/syn/src/stmt.rs b/vendor/syn/src/stmt.rs index 3e2c71bdd..58bd013ec 100644 --- a/vendor/syn/src/stmt.rs +++ b/vendor/syn/src/stmt.rs @@ -175,7 +175,11 @@ pub mod parsing { || input.peek(Token![crate]) && !input.peek2(Token![::]) || input.peek(Token![extern]) || input.peek(Token![use]) - || input.peek(Token![static]) && (input.peek2(Token![mut]) || input.peek2(Ident)) + || input.peek(Token![static]) + && (input.peek2(Token![mut]) + || input.peek2(Ident) + && !(input.peek2(Token![async]) + && (input.peek3(Token![move]) || input.peek3(Token![|])))) || input.peek(Token![const]) && !input.peek2(token::Brace) || input.peek(Token![unsafe]) && !input.peek2(token::Brace) || input.peek(Token![async]) diff --git a/vendor/syn/src/ty.rs b/vendor/syn/src/ty.rs index 0f1341fdd..4068be3c7 100644 --- a/vendor/syn/src/ty.rs +++ b/vendor/syn/src/ty.rs @@ -343,7 +343,8 @@ pub mod parsing { impl Parse for Type { fn parse(input: ParseStream) -> Result<Self> { let allow_plus = true; - ambig_ty(input, allow_plus) + let allow_group_generic = true; + ambig_ty(input, allow_plus, allow_group_generic) } } @@ -356,11 +357,16 @@ pub mod parsing { #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] pub fn without_plus(input: ParseStream) -> Result<Self> { let allow_plus = false; - ambig_ty(input, allow_plus) + let allow_group_generic = true; + ambig_ty(input, allow_plus, allow_group_generic) } } - fn ambig_ty(input: ParseStream, allow_plus: bool) -> Result<Type> { + pub(crate) fn ambig_ty( + input: ParseStream, + allow_plus: bool, + allow_group_generic: bool, + ) -> Result<Type> { let begin = input.fork(); if input.peek(token::Group) { @@ -381,7 +387,9 @@ pub mod parsing { path: Path::parse_helper(input, false)?, })); } - } else if input.peek(Token![<]) || input.peek(Token![::]) && input.peek3(Token![<]) { + } else if input.peek(Token![<]) && allow_group_generic + || input.peek(Token![::]) && input.peek3(Token![<]) + { if let Type::Path(mut ty) = *group.elem { let arguments = &mut ty.path.segments.last_mut().unwrap().arguments; if let PathArguments::None = arguments { @@ -537,9 +545,15 @@ pub mod parsing { || lookahead.peek(Token![::]) || lookahead.peek(Token![<]) { - if input.peek(Token![dyn]) { - let trait_object = TypeTraitObject::parse(input, allow_plus)?; - return Ok(Type::TraitObject(trait_object)); + let dyn_token: Option<Token![dyn]> = input.parse()?; + if dyn_token.is_some() { + let star_token: Option<Token![*]> = input.parse()?; + let bounds = TypeTraitObject::parse_bounds(input, allow_plus)?; + return Ok(if star_token.is_some() { + Type::Verbatim(verbatim::between(begin, input)) + } else { + Type::TraitObject(TypeTraitObject { dyn_token, bounds }) + }); } let ty: TypePath = input.parse()?; @@ -819,15 +833,28 @@ pub mod parsing { #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] impl Parse for TypePath { fn parse(input: ParseStream) -> Result<Self> { - let (qself, mut path) = path::parsing::qpath(input, false)?; + let expr_style = false; + let (qself, mut path) = path::parsing::qpath(input, expr_style)?; - if path.segments.last().unwrap().arguments.is_empty() + while path.segments.last().unwrap().arguments.is_empty() && (input.peek(token::Paren) || input.peek(Token![::]) && input.peek3(token::Paren)) { input.parse::<Option<Token![::]>>()?; let args: ParenthesizedGenericArguments = input.parse()?; + let allow_associated_type = cfg!(feature = "full") + && match &args.output { + ReturnType::Default => true, + ReturnType::Type(_, ty) => match **ty { + // TODO: probably some of the other kinds allow this too. + Type::Paren(_) => true, + _ => false, + }, + }; let parenthesized = PathArguments::Parenthesized(args); path.segments.last_mut().unwrap().arguments = parenthesized; + if allow_associated_type { + Path::parse_rest(input, &mut path, expr_style)?; + } } Ok(TypePath { qself, path }) @@ -844,7 +871,8 @@ pub mod parsing { pub(crate) fn parse(input: ParseStream, allow_plus: bool) -> Result<Self> { if input.peek(Token![->]) { let arrow = input.parse()?; - let ty = ambig_ty(input, allow_plus)?; + let allow_group_generic = true; + let ty = ambig_ty(input, allow_plus, allow_group_generic)?; Ok(ReturnType::Type(arrow, Box::new(ty))) } else { Ok(ReturnType::Default) @@ -967,7 +995,10 @@ pub mod parsing { let content; Ok(TypeParen { paren_token: parenthesized!(content in input), - elem: Box::new(ambig_ty(&content, allow_plus)?), + elem: Box::new({ + let allow_group_generic = true; + ambig_ty(&content, allow_plus, allow_group_generic)? + }), }) } } |