From 4f9fe856a25ab29345b90e7725509e9ee38a37be Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Wed, 17 Apr 2024 14:19:41 +0200 Subject: Adding upstream version 1.69.0+dfsg1. Signed-off-by: Daniel Baumann --- vendor/syn/src/buffer.rs | 50 ++++++++++++++++++------ vendor/syn/src/drops.rs | 58 ++++++++++++++++++++++++++++ vendor/syn/src/error.rs | 54 ++++++++++++++++---------- vendor/syn/src/generics.rs | 25 ++++++++++++ vendor/syn/src/lib.rs | 6 ++- vendor/syn/src/path.rs | 5 +-- vendor/syn/src/punctuated.rs | 43 +++++++++++++++------ vendor/syn/src/ty.rs | 92 ++++++++++++++++++++++++++------------------ vendor/syn/src/verbatim.rs | 20 +++++++++- 9 files changed, 265 insertions(+), 88 deletions(-) create mode 100644 vendor/syn/src/drops.rs (limited to 'vendor/syn/src') diff --git a/vendor/syn/src/buffer.rs b/vendor/syn/src/buffer.rs index 161b614c8..0d5cf30d5 100644 --- a/vendor/syn/src/buffer.rs +++ b/vendor/syn/src/buffer.rs @@ -14,6 +14,7 @@ use crate::proc_macro as pm; use crate::Lifetime; use proc_macro2::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree}; +use std::cmp::Ordering; use std::marker::PhantomData; /// Internal type which is used instead of `TokenTree` to represent a token tree @@ -25,7 +26,8 @@ enum Entry { Ident(Ident), Punct(Punct), Literal(Literal), - End, + // End entries contain the offset (negative) to the start of the buffer. + End(isize), } /// A buffer that can be efficiently traversed multiple times, unlike @@ -48,10 +50,10 @@ impl TokenBuffer { TokenTree::Literal(literal) => entries.push(Entry::Literal(literal)), TokenTree::Group(group) => { let group_start_index = entries.len(); - entries.push(Entry::End); // we replace this below + entries.push(Entry::End(0)); // we replace this below Self::recursive_new(entries, group.stream()); let group_end_index = entries.len(); - entries.push(Entry::End); + entries.push(Entry::End(-(group_end_index as isize))); let group_end_offset = group_end_index - group_start_index; entries[group_start_index] = Entry::Group(group, group_end_offset); } @@ -77,7 +79,7 @@ impl TokenBuffer { pub fn new2(stream: TokenStream) -> Self { let mut entries = Vec::new(); Self::recursive_new(&mut entries, stream); - entries.push(Entry::End); + entries.push(Entry::End(-(entries.len() as isize))); Self { entries: entries.into_boxed_slice(), } @@ -127,7 +129,7 @@ impl<'a> Cursor<'a> { // object in global storage. struct UnsafeSyncEntry(Entry); unsafe impl Sync for UnsafeSyncEntry {} - static EMPTY_ENTRY: UnsafeSyncEntry = UnsafeSyncEntry(Entry::End); + static EMPTY_ENTRY: UnsafeSyncEntry = UnsafeSyncEntry(Entry::End(0)); Cursor { ptr: &EMPTY_ENTRY.0, @@ -144,7 +146,7 @@ impl<'a> Cursor<'a> { // past it, unless `ptr == scope`, which means that we're at the edge of // our cursor's scope. We should only have `ptr != scope` at the exit // from None-delimited groups entered with `ignore_none`. - while let Entry::End = *ptr { + while let Entry::End(_) = *ptr { if ptr == scope { break; } @@ -292,7 +294,7 @@ impl<'a> Cursor<'a> { Entry::Literal(literal) => (literal.clone().into(), 1), Entry::Ident(ident) => (ident.clone().into(), 1), Entry::Punct(punct) => (punct.clone().into(), 1), - Entry::End => return None, + Entry::End(_) => return None, }; let rest = unsafe { Cursor::create(self.ptr.add(len), self.scope) }; @@ -307,7 +309,7 @@ impl<'a> Cursor<'a> { Entry::Literal(literal) => literal.span(), Entry::Ident(ident) => ident.span(), Entry::Punct(punct) => punct.span(), - Entry::End => Span::call_site(), + Entry::End(_) => Span::call_site(), } } @@ -317,7 +319,7 @@ impl<'a> Cursor<'a> { /// This method treats `'lifetimes` as a single token. pub(crate) fn skip(self) -> Option> { let len = match self.entry() { - Entry::End => return None, + Entry::End(_) => return None, // Treat lifetimes as a single tt for the purposes of 'skip'. Entry::Punct(punct) if punct.as_char() == '\'' && punct.spacing() == Spacing::Joint => { @@ -347,9 +349,17 @@ impl<'a> Eq for Cursor<'a> {} impl<'a> PartialEq for Cursor<'a> { fn eq(&self, other: &Self) -> bool { - let Cursor { ptr, scope, marker } = self; - let _ = marker; - *ptr == other.ptr && *scope == other.scope + self.ptr == other.ptr + } +} + +impl<'a> PartialOrd for Cursor<'a> { + fn partial_cmp(&self, other: &Self) -> Option { + if same_buffer(*self, *other) { + Some(self.ptr.cmp(&other.ptr)) + } else { + None + } } } @@ -357,6 +367,22 @@ pub(crate) fn same_scope(a: Cursor, b: Cursor) -> bool { a.scope == b.scope } +pub(crate) fn same_buffer(a: Cursor, b: Cursor) -> bool { + unsafe { + match (&*a.scope, &*b.scope) { + (Entry::End(a_offset), Entry::End(b_offset)) => { + a.scope.offset(*a_offset) == b.scope.offset(*b_offset) + } + _ => unreachable!(), + } + } +} + +#[cfg(any(feature = "full", feature = "derive"))] +pub(crate) fn cmp_assuming_same_buffer(a: Cursor, b: Cursor) -> Ordering { + a.ptr.cmp(&b.ptr) +} + pub(crate) fn open_span_of_group(cursor: Cursor) -> Span { match cursor.entry() { Entry::Group(group, _) => group.span_open(), diff --git a/vendor/syn/src/drops.rs b/vendor/syn/src/drops.rs new file mode 100644 index 000000000..89b42d82e --- /dev/null +++ b/vendor/syn/src/drops.rs @@ -0,0 +1,58 @@ +use std::iter; +use std::mem::ManuallyDrop; +use std::ops::{Deref, DerefMut}; +use std::option; +use std::slice; + +#[repr(transparent)] +pub(crate) struct NoDrop(ManuallyDrop); + +impl NoDrop { + pub(crate) fn new(value: T) -> Self + where + T: TrivialDrop, + { + NoDrop(ManuallyDrop::new(value)) + } +} + +impl Deref for NoDrop { + type Target = T; + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl DerefMut for NoDrop { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +pub(crate) trait TrivialDrop {} + +impl TrivialDrop for iter::Empty {} +impl<'a, T> TrivialDrop for slice::Iter<'a, T> {} +impl<'a, T> TrivialDrop for slice::IterMut<'a, T> {} +impl<'a, T> TrivialDrop for option::IntoIter<&'a T> {} +impl<'a, T> TrivialDrop for option::IntoIter<&'a mut T> {} + +#[test] +fn test_needs_drop() { + use std::mem::needs_drop; + + struct NeedsDrop; + + impl Drop for NeedsDrop { + fn drop(&mut self) {} + } + + assert!(needs_drop::()); + + // Test each of the types with a handwritten TrivialDrop impl above. + assert!(!needs_drop::>()); + assert!(!needs_drop::>()); + assert!(!needs_drop::>()); + assert!(!needs_drop::>()); + assert!(!needs_drop::>()); +} diff --git a/vendor/syn/src/error.rs b/vendor/syn/src/error.rs index 609cc086f..e301367d5 100644 --- a/vendor/syn/src/error.rs +++ b/vendor/syn/src/error.rs @@ -134,12 +134,16 @@ impl Error { /// } /// ``` pub fn new(span: Span, message: T) -> Self { - Error { - messages: vec![ErrorMessage { - start_span: ThreadBound::new(span), - end_span: ThreadBound::new(span), - message: message.to_string(), - }], + return new(span, message.to_string()); + + fn new(span: Span, message: String) -> Error { + Error { + messages: vec![ErrorMessage { + start_span: ThreadBound::new(span), + end_span: ThreadBound::new(span), + message, + }], + } } } @@ -158,15 +162,19 @@ impl Error { /// `ParseStream::error`)! #[cfg(feature = "printing")] pub fn new_spanned(tokens: T, message: U) -> Self { - let mut iter = tokens.into_token_stream().into_iter(); - let start = iter.next().map_or_else(Span::call_site, |t| t.span()); - let end = iter.last().map_or(start, |t| t.span()); - Error { - messages: vec![ErrorMessage { - start_span: ThreadBound::new(start), - end_span: ThreadBound::new(end), - message: message.to_string(), - }], + return new_spanned(tokens.into_token_stream(), message.to_string()); + + fn new_spanned(tokens: TokenStream, message: String) -> Error { + let mut iter = tokens.into_iter(); + let start = iter.next().map_or_else(Span::call_site, |t| t.span()); + let end = iter.last().map_or(start, |t| t.span()); + Error { + messages: vec![ErrorMessage { + start_span: ThreadBound::new(start), + end_span: ThreadBound::new(end), + message, + }], + } } } @@ -288,12 +296,16 @@ pub fn new_at(scope: Span, cursor: Cursor, message: T) -> Error { #[cfg(all(feature = "parsing", any(feature = "full", feature = "derive")))] pub fn new2(start: Span, end: Span, message: T) -> Error { - Error { - messages: vec![ErrorMessage { - start_span: ThreadBound::new(start), - end_span: ThreadBound::new(end), - message: message.to_string(), - }], + return new2(start, end, message.to_string()); + + fn new2(start: Span, end: Span, message: String) -> Error { + Error { + messages: vec![ErrorMessage { + start_span: ThreadBound::new(start), + end_span: ThreadBound::new(end), + message, + }], + } } } diff --git a/vendor/syn/src/generics.rs b/vendor/syn/src/generics.rs index 9c2802f87..6d4fe847e 100644 --- a/vendor/syn/src/generics.rs +++ b/vendor/syn/src/generics.rs @@ -828,6 +828,31 @@ pub mod parsing { } } + impl TypeParamBound { + pub(crate) fn parse_multiple( + input: ParseStream, + allow_plus: bool, + ) -> Result> { + let mut bounds = Punctuated::new(); + loop { + bounds.push_value(input.parse()?); + if !(allow_plus && input.peek(Token![+])) { + break; + } + bounds.push_punct(input.parse()?); + if !(input.peek(Ident::peek_any) + || input.peek(Token![::]) + || input.peek(Token![?]) + || input.peek(Lifetime) + || input.peek(token::Paren)) + { + break; + } + } + Ok(bounds) + } + } + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] impl Parse for TraitBound { fn parse(input: ParseStream) -> Result { diff --git a/vendor/syn/src/lib.rs b/vendor/syn/src/lib.rs index 81f03e1b5..e47ba28c6 100644 --- a/vendor/syn/src/lib.rs +++ b/vendor/syn/src/lib.rs @@ -250,13 +250,14 @@ //! dynamic library libproc_macro from rustc toolchain. // Syn types in rustdoc of other crates get linked to here. -#![doc(html_root_url = "https://docs.rs/syn/1.0.102")] +#![doc(html_root_url = "https://docs.rs/syn/1.0.107")] #![cfg_attr(doc_cfg, feature(doc_cfg))] #![allow(non_camel_case_types)] #![allow( clippy::bool_to_int_with_if, clippy::cast_lossless, clippy::cast_possible_truncation, + clippy::cast_possible_wrap, clippy::cast_ptr_alignment, clippy::default_trait_access, clippy::doc_markdown, @@ -264,8 +265,8 @@ clippy::explicit_auto_deref, clippy::if_not_else, clippy::inherent_to_string, + clippy::items_after_statements, clippy::large_enum_variant, - clippy::let_underscore_drop, clippy::manual_assert, clippy::match_on_vec_items, clippy::match_same_arms, @@ -428,6 +429,7 @@ pub use crate::path::{ #[cfg(feature = "parsing")] #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] pub mod buffer; +mod drops; #[cfg(feature = "parsing")] #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] pub mod ext; diff --git a/vendor/syn/src/path.rs b/vendor/syn/src/path.rs index 742273afd..6cdb43ac5 100644 --- a/vendor/syn/src/path.rs +++ b/vendor/syn/src/path.rs @@ -89,9 +89,8 @@ impl PathArguments { } } - #[cfg(feature = "parsing")] - fn is_none(&self) -> bool { - match *self { + pub fn is_none(&self) -> bool { + match self { PathArguments::None => true, PathArguments::AngleBracketed(_) | PathArguments::Parenthesized(_) => false, } diff --git a/vendor/syn/src/punctuated.rs b/vendor/syn/src/punctuated.rs index 0fe1078cf..b7d0185e8 100644 --- a/vendor/syn/src/punctuated.rs +++ b/vendor/syn/src/punctuated.rs @@ -32,6 +32,7 @@ use std::option; use std::slice; use std::vec; +use crate::drops::{NoDrop, TrivialDrop}; #[cfg(feature = "parsing")] use crate::parse::{Parse, ParseStream, Result}; #[cfg(feature = "parsing")] @@ -104,10 +105,10 @@ impl Punctuated { /// Returns an iterator over borrowed syntax tree nodes of type `&T`. pub fn iter(&self) -> Iter { Iter { - inner: Box::new(PrivateIter { + inner: Box::new(NoDrop::new(PrivateIter { inner: self.inner.iter(), last: self.last.as_ref().map(Box::as_ref).into_iter(), - }), + })), } } @@ -115,10 +116,10 @@ impl Punctuated { /// `&mut T`. pub fn iter_mut(&mut self) -> IterMut { IterMut { - inner: Box::new(PrivateIterMut { + inner: Box::new(NoDrop::new(PrivateIterMut { inner: self.inner.iter_mut(), last: self.last.as_mut().map(Box::as_mut).into_iter(), - }), + })), } } @@ -721,13 +722,13 @@ pub struct Iter<'a, T: 'a> { // The `Item = &'a T` needs to be specified to support rustc 1.31 and older. // On modern compilers we would be able to write just IterTrait<'a, T> where // Item can be inferred unambiguously from the supertrait. - inner: Box + 'a>, + inner: Box + 'a>>, } trait IterTrait<'a, T: 'a>: DoubleEndedIterator + ExactSizeIterator { - fn clone_box(&self) -> Box + 'a>; + fn clone_box(&self) -> Box + 'a>>; } struct PrivateIter<'a, T: 'a, P: 'a> { @@ -735,10 +736,17 @@ struct PrivateIter<'a, T: 'a, P: 'a> { last: option::IntoIter<&'a T>, } +impl<'a, T, P> TrivialDrop for PrivateIter<'a, T, P> +where + slice::Iter<'a, (T, P)>: TrivialDrop, + option::IntoIter<&'a T>: TrivialDrop, +{ +} + #[cfg(any(feature = "full", feature = "derive"))] pub(crate) fn empty_punctuated_iter<'a, T>() -> Iter<'a, T> { Iter { - inner: Box::new(iter::empty()), + inner: Box::new(NoDrop::new(iter::empty())), } } @@ -813,10 +821,14 @@ impl<'a, T, P> Clone for PrivateIter<'a, T, P> { impl<'a, T, I> IterTrait<'a, T> for I where T: 'a, - I: DoubleEndedIterator + ExactSizeIterator + Clone + 'a, + I: DoubleEndedIterator + + ExactSizeIterator + + Clone + + TrivialDrop + + 'a, { - fn clone_box(&self) -> Box + 'a> { - Box::new(self.clone()) + fn clone_box(&self) -> Box + 'a>> { + Box::new(NoDrop::new(self.clone())) } } @@ -826,7 +838,7 @@ where /// /// [module documentation]: self pub struct IterMut<'a, T: 'a> { - inner: Box + 'a>, + inner: Box + 'a>>, } trait IterMutTrait<'a, T: 'a>: @@ -839,10 +851,17 @@ struct PrivateIterMut<'a, T: 'a, P: 'a> { last: option::IntoIter<&'a mut T>, } +impl<'a, T, P> TrivialDrop for PrivateIterMut<'a, T, P> +where + slice::IterMut<'a, (T, P)>: TrivialDrop, + option::IntoIter<&'a mut T>: TrivialDrop, +{ +} + #[cfg(any(feature = "full", feature = "derive"))] pub(crate) fn empty_punctuated_iter_mut<'a, T>() -> IterMut<'a, T> { IterMut { - inner: Box::new(iter::empty()), + inner: Box::new(NoDrop::new(iter::empty())), } } diff --git a/vendor/syn/src/ty.rs b/vendor/syn/src/ty.rs index 4068be3c7..8c841e2f7 100644 --- a/vendor/syn/src/ty.rs +++ b/vendor/syn/src/ty.rs @@ -337,7 +337,7 @@ pub mod parsing { use crate::ext::IdentExt; use crate::parse::{Parse, ParseStream, Result}; use crate::path; - use proc_macro2::{Punct, Spacing, TokenTree}; + use proc_macro2::{Punct, Spacing, Span, TokenTree}; #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] impl Parse for Type { @@ -546,13 +546,17 @@ pub mod parsing { || lookahead.peek(Token![<]) { let dyn_token: Option = input.parse()?; - if dyn_token.is_some() { + if let Some(dyn_token) = dyn_token { + let dyn_span = dyn_token.span; let star_token: Option = input.parse()?; - let bounds = TypeTraitObject::parse_bounds(input, allow_plus)?; + let bounds = TypeTraitObject::parse_bounds(dyn_span, input, allow_plus)?; return Ok(if star_token.is_some() { Type::Verbatim(verbatim::between(begin, input)) } else { - Type::TraitObject(TypeTraitObject { dyn_token, bounds }) + Type::TraitObject(TypeTraitObject { + dyn_token: Some(dyn_token), + bounds, + }) }); } @@ -896,15 +900,6 @@ pub mod parsing { } } - fn at_least_one_type(bounds: &Punctuated) -> bool { - for bound in bounds { - if let TypeParamBound::Trait(_) = *bound { - return true; - } - } - false - } - impl TypeTraitObject { #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] pub fn without_plus(input: ParseStream) -> Result { @@ -914,35 +909,38 @@ pub mod parsing { // Only allow multiple trait references if allow_plus is true. pub(crate) fn parse(input: ParseStream, allow_plus: bool) -> Result { - Ok(TypeTraitObject { - dyn_token: input.parse()?, - bounds: Self::parse_bounds(input, allow_plus)?, - }) + let dyn_token: Option = input.parse()?; + let dyn_span = match &dyn_token { + Some(token) => token.span, + None => input.span(), + }; + let bounds = Self::parse_bounds(dyn_span, input, allow_plus)?; + Ok(TypeTraitObject { dyn_token, bounds }) } fn parse_bounds( + dyn_span: Span, input: ParseStream, allow_plus: bool, ) -> Result> { - let mut bounds = Punctuated::new(); - loop { - bounds.push_value(input.parse()?); - if !(allow_plus && input.peek(Token![+])) { - break; - } - bounds.push_punct(input.parse()?); - if !(input.peek(Ident::peek_any) - || input.peek(Token![::]) - || input.peek(Token![?]) - || input.peek(Lifetime) - || input.peek(token::Paren)) - { - break; + let bounds = TypeParamBound::parse_multiple(input, allow_plus)?; + let mut last_lifetime_span = None; + let mut at_least_one_trait = false; + for bound in &bounds { + match bound { + TypeParamBound::Trait(_) => { + at_least_one_trait = true; + break; + } + TypeParamBound::Lifetime(lifetime) => { + last_lifetime_span = Some(lifetime.ident.span()); + } } } // Just lifetimes like `'a + 'b` is not a TraitObject. - if !at_least_one_type(&bounds) { - return Err(input.error("expected at least one type")); + if !at_least_one_trait { + let msg = "at least one trait is required for an object type"; + return Err(error::new2(dyn_span, last_lifetime_span.unwrap(), msg)); } Ok(bounds) } @@ -964,10 +962,30 @@ pub mod parsing { } pub(crate) fn parse(input: ParseStream, allow_plus: bool) -> Result { - Ok(TypeImplTrait { - impl_token: input.parse()?, - bounds: TypeTraitObject::parse_bounds(input, allow_plus)?, - }) + let impl_token: Token![impl] = input.parse()?; + let bounds = TypeParamBound::parse_multiple(input, allow_plus)?; + let mut last_lifetime_span = None; + let mut at_least_one_trait = false; + for bound in &bounds { + match bound { + TypeParamBound::Trait(_) => { + at_least_one_trait = true; + break; + } + TypeParamBound::Lifetime(lifetime) => { + last_lifetime_span = Some(lifetime.ident.span()); + } + } + } + if !at_least_one_trait { + let msg = "at least one trait must be specified"; + return Err(error::new2( + impl_token.span, + last_lifetime_span.unwrap(), + msg, + )); + } + Ok(TypeImplTrait { impl_token, bounds }) } } diff --git a/vendor/syn/src/verbatim.rs b/vendor/syn/src/verbatim.rs index 0686352f7..58cf68d17 100644 --- a/vendor/syn/src/verbatim.rs +++ b/vendor/syn/src/verbatim.rs @@ -1,13 +1,31 @@ use crate::parse::{ParseBuffer, ParseStream}; -use proc_macro2::TokenStream; +use proc_macro2::{Delimiter, TokenStream}; +use std::cmp::Ordering; use std::iter; pub fn between<'a>(begin: ParseBuffer<'a>, end: ParseStream<'a>) -> TokenStream { let end = end.cursor(); let mut cursor = begin.cursor(); + assert!(crate::buffer::same_buffer(end, cursor)); + let mut tokens = TokenStream::new(); while cursor != end { let (tt, next) = cursor.token_tree().unwrap(); + + if crate::buffer::cmp_assuming_same_buffer(end, next) == Ordering::Less { + // A syntax node can cross the boundary of a None-delimited group + // due to such groups being transparent to the parser in most cases. + // Any time this occurs the group is known to be semantically + // irrelevant. https://github.com/dtolnay/syn/issues/1235 + if let Some((inside, _span, after)) = cursor.group(Delimiter::None) { + assert!(next == after); + cursor = inside; + continue; + } else { + panic!("verbatim end must not be inside a delimited group"); + } + } + tokens.extend(iter::once(tt)); cursor = next; } -- cgit v1.2.3