summaryrefslogtreecommitdiffstats
path: root/compiler/rustc_parse
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_parse')
-rw-r--r--compiler/rustc_parse/messages.ftl3
-rw-r--r--compiler/rustc_parse/src/errors.rs11
-rw-r--r--compiler/rustc_parse/src/lexer/unescape_error_reporting.rs2
-rw-r--r--compiler/rustc_parse/src/parser/attr.rs17
-rw-r--r--compiler/rustc_parse/src/parser/attr_wrapper.rs10
-rw-r--r--compiler/rustc_parse/src/parser/diagnostics.rs41
-rw-r--r--compiler/rustc_parse/src/parser/expr.rs151
-rw-r--r--compiler/rustc_parse/src/parser/generics.rs44
-rw-r--r--compiler/rustc_parse/src/parser/item.rs6
-rw-r--r--compiler/rustc_parse/src/parser/pat.rs57
-rw-r--r--compiler/rustc_parse/src/parser/stmt.rs7
11 files changed, 277 insertions, 72 deletions
diff --git a/compiler/rustc_parse/messages.ftl b/compiler/rustc_parse/messages.ftl
index 926339450..9787d98c1 100644
--- a/compiler/rustc_parse/messages.ftl
+++ b/compiler/rustc_parse/messages.ftl
@@ -353,6 +353,7 @@ parse_int_literal_too_large = integer literal is too large
parse_invalid_block_macro_segment = cannot use a `block` macro fragment here
.label = the `block` fragment is within this context
+ .suggestion = wrap this in another block
parse_invalid_char_in_escape = {parse_invalid_char_in_escape_msg}: `{$ch}`
.label = {parse_invalid_char_in_escape_msg}
@@ -695,7 +696,7 @@ parse_struct_literal_body_without_path =
parse_struct_literal_needing_parens =
invalid struct literal
- .suggestion = you might need to surround the struct literal in parentheses
+ .suggestion = you might need to surround the struct literal with parentheses
parse_struct_literal_not_allowed_here = struct literals are not allowed here
.suggestion = surround the struct literal with parentheses
diff --git a/compiler/rustc_parse/src/errors.rs b/compiler/rustc_parse/src/errors.rs
index 84494eab8..96e1c0e3c 100644
--- a/compiler/rustc_parse/src/errors.rs
+++ b/compiler/rustc_parse/src/errors.rs
@@ -333,6 +333,17 @@ pub(crate) struct InvalidBlockMacroSegment {
pub span: Span,
#[label]
pub context: Span,
+ #[subdiagnostic]
+ pub wrap: WrapInExplicitBlock,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct WrapInExplicitBlock {
+ #[suggestion_part(code = "{{ ")]
+ pub lo: Span,
+ #[suggestion_part(code = " }}")]
+ pub hi: Span,
}
#[derive(Diagnostic)]
diff --git a/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs b/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
index eb9625f92..461a34b67 100644
--- a/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
+++ b/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
@@ -158,7 +158,7 @@ pub(crate) fn emit_unescape_error(
diag.help(
"for more information, visit \
- <https://static.rust-lang.org/doc/master/reference.html#literals>",
+ <https://doc.rust-lang.org/reference/tokens.html#literals>",
);
}
diag.emit();
diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs
index e1db19557..ee0abba1c 100644
--- a/compiler/rustc_parse/src/parser/attr.rs
+++ b/compiler/rustc_parse/src/parser/attr.rs
@@ -422,15 +422,12 @@ impl<'a> Parser<'a> {
}
}
-pub fn maybe_needs_tokens(attrs: &[ast::Attribute]) -> bool {
- // One of the attributes may either itself be a macro,
- // or expand to macro attributes (`cfg_attr`).
- attrs.iter().any(|attr| {
- if attr.is_doc_comment() {
- return false;
- }
- attr.ident().map_or(true, |ident| {
- ident.name == sym::cfg_attr || !rustc_feature::is_builtin_attr_name(ident.name)
- })
+/// The attributes are complete if all attributes are either a doc comment or a builtin attribute other than `cfg_attr`
+pub fn is_complete(attrs: &[ast::Attribute]) -> bool {
+ attrs.iter().all(|attr| {
+ attr.is_doc_comment()
+ || attr.ident().is_some_and(|ident| {
+ ident.name != sym::cfg_attr && rustc_feature::is_builtin_attr_name(ident.name)
+ })
})
}
diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs
index 1e6ac5496..b579da098 100644
--- a/compiler/rustc_parse/src/parser/attr_wrapper.rs
+++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs
@@ -61,8 +61,8 @@ impl AttrWrapper {
self.attrs.is_empty()
}
- pub fn maybe_needs_tokens(&self) -> bool {
- crate::parser::attr::maybe_needs_tokens(&self.attrs)
+ pub fn is_complete(&self) -> bool {
+ crate::parser::attr::is_complete(&self.attrs)
}
}
@@ -201,7 +201,7 @@ impl<'a> Parser<'a> {
// by definition
if matches!(force_collect, ForceCollect::No)
// None of our outer attributes can require tokens (e.g. a proc-macro)
- && !attrs.maybe_needs_tokens()
+ && attrs.is_complete()
// If our target supports custom inner attributes, then we cannot bail
// out early, since we may need to capture tokens for a custom inner attribute
// invocation.
@@ -244,9 +244,9 @@ impl<'a> Parser<'a> {
// Now that we've parsed an AST node, we have more information available.
if matches!(force_collect, ForceCollect::No)
// We now have inner attributes available, so this check is more precise
- // than `attrs.maybe_needs_tokens()` at the start of the function.
+ // than `attrs.is_complete()` at the start of the function.
// As a result, we don't need to check `R::SUPPORTS_CUSTOM_INNER_ATTRS`
- && !crate::parser::attr::maybe_needs_tokens(ret.attrs())
+ && crate::parser::attr::is_complete(ret.attrs())
// Subtle: We call `has_cfg_or_cfg_attr` with the attrs from `ret`.
// This ensures that we consider inner attributes (e.g. `#![cfg]`),
// which require us to have tokens available
diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs
index c14540396..0ce6a570d 100644
--- a/compiler/rustc_parse/src/parser/diagnostics.rs
+++ b/compiler/rustc_parse/src/parser/diagnostics.rs
@@ -605,6 +605,22 @@ impl<'a> Parser<'a> {
}
}
+ if let TokenKind::Ident(prev, _) = &self.prev_token.kind
+ && let TokenKind::Ident(cur, _) = &self.token.kind
+ {
+ let concat = Symbol::intern(&format!("{}{}", prev, cur));
+ let ident = Ident::new(concat, DUMMY_SP);
+ if ident.is_used_keyword() || ident.is_reserved() || ident.is_raw_guess() {
+ let span = self.prev_token.span.to(self.token.span);
+ err.span_suggestion_verbose(
+ span,
+ format!("consider removing the space to spell keyword `{}`", concat),
+ concat,
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+
// `pub` may be used for an item or `pub(crate)`
if self.prev_token.is_ident_named(sym::public)
&& (self.token.can_begin_item()
@@ -751,13 +767,24 @@ impl<'a> Parser<'a> {
tail.could_be_bare_literal = true;
if maybe_struct_name.is_ident() && can_be_struct_literal {
// Account for `if Example { a: one(), }.is_pos() {}`.
- Err(self.sess.create_err(StructLiteralNeedingParens {
- span: maybe_struct_name.span.to(expr.span),
- sugg: StructLiteralNeedingParensSugg {
- before: maybe_struct_name.span.shrink_to_lo(),
- after: expr.span.shrink_to_hi(),
- },
- }))
+ // expand `before` so that we take care of module path such as:
+ // `foo::Bar { ... } `
+ // we expect to suggest `(foo::Bar { ... })` instead of `foo::(Bar { ... })`
+ let sm = self.sess.source_map();
+ let before = maybe_struct_name.span.shrink_to_lo();
+ if let Ok(extend_before) = sm.span_extend_prev_while(before, |t| {
+ t.is_alphanumeric() || t == ':' || t == '_'
+ }) {
+ Err(self.sess.create_err(StructLiteralNeedingParens {
+ span: maybe_struct_name.span.to(expr.span),
+ sugg: StructLiteralNeedingParensSugg {
+ before: extend_before.shrink_to_lo(),
+ after: expr.span.shrink_to_hi(),
+ },
+ }))
+ } else {
+ return None;
+ }
} else {
self.sess.emit_err(StructLiteralBodyWithoutPath {
span: expr.span,
diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs
index 1b28f3c97..7ede4fbc3 100644
--- a/compiler/rustc_parse/src/parser/expr.rs
+++ b/compiler/rustc_parse/src/parser/expr.rs
@@ -91,6 +91,18 @@ impl From<P<Expr>> for LhsExpr {
}
}
+#[derive(Debug)]
+enum DestructuredFloat {
+ /// 1e2
+ Single(Symbol, Span),
+ /// 1.
+ TrailingDot(Symbol, Span, Span),
+ /// 1.2 | 1.2e3
+ MiddleDot(Symbol, Span, Span, Symbol, Span),
+ /// Invalid
+ Error,
+}
+
impl<'a> Parser<'a> {
/// Parses an expression.
#[inline]
@@ -1001,9 +1013,15 @@ impl<'a> Parser<'a> {
}
fn error_unexpected_after_dot(&self) {
- // FIXME Could factor this out into non_fatal_unexpected or something.
let actual = pprust::token_to_string(&self.token);
- self.sess.emit_err(errors::UnexpectedTokenAfterDot { span: self.token.span, actual });
+ let span = self.token.span;
+ let sm = self.sess.source_map();
+ let (span, actual) = match (&self.token.kind, self.subparser_name) {
+ (token::Eof, Some(_)) if let Ok(actual) = sm.span_to_snippet(sm.next_point(span)) =>
+ (span.shrink_to_hi(), actual.into()),
+ _ => (span, actual),
+ };
+ self.sess.emit_err(errors::UnexpectedTokenAfterDot { span, actual });
}
// We need an identifier or integer, but the next token is a float.
@@ -1013,13 +1031,8 @@ impl<'a> Parser<'a> {
// support pushing "future tokens" (would be also helpful to `break_and_eat`), or
// we should break everything including floats into more basic proc-macro style
// tokens in the lexer (probably preferable).
- fn parse_expr_tuple_field_access_float(
- &mut self,
- lo: Span,
- base: P<Expr>,
- float: Symbol,
- suffix: Option<Symbol>,
- ) -> P<Expr> {
+ // See also `TokenKind::break_two_token_op` which does similar splitting of `>>` into `>`.
+ fn break_up_float(&mut self, float: Symbol) -> DestructuredFloat {
#[derive(Debug)]
enum FloatComponent {
IdentLike(String),
@@ -1056,7 +1069,7 @@ impl<'a> Parser<'a> {
match &*components {
// 1e2
[IdentLike(i)] => {
- self.parse_expr_tuple_field_access(lo, base, Symbol::intern(&i), suffix, None)
+ DestructuredFloat::Single(Symbol::intern(&i), span)
}
// 1.
[IdentLike(i), Punct('.')] => {
@@ -1068,11 +1081,8 @@ impl<'a> Parser<'a> {
} else {
(span, span)
};
- assert!(suffix.is_none());
let symbol = Symbol::intern(&i);
- self.token = Token::new(token::Ident(symbol, false), ident_span);
- let next_token = (Token::new(token::Dot, dot_span), self.token_spacing);
- self.parse_expr_tuple_field_access(lo, base, symbol, None, Some(next_token))
+ DestructuredFloat::TrailingDot(symbol, ident_span, dot_span)
}
// 1.2 | 1.2e3
[IdentLike(i1), Punct('.'), IdentLike(i2)] => {
@@ -1088,16 +1098,8 @@ impl<'a> Parser<'a> {
(span, span, span)
};
let symbol1 = Symbol::intern(&i1);
- self.token = Token::new(token::Ident(symbol1, false), ident1_span);
- // This needs to be `Spacing::Alone` to prevent regressions.
- // See issue #76399 and PR #76285 for more details
- let next_token1 = (Token::new(token::Dot, dot_span), Spacing::Alone);
- let base1 =
- self.parse_expr_tuple_field_access(lo, base, symbol1, None, Some(next_token1));
let symbol2 = Symbol::intern(&i2);
- let next_token2 = Token::new(token::Ident(symbol2, false), ident2_span);
- self.bump_with((next_token2, self.token_spacing)); // `.`
- self.parse_expr_tuple_field_access(lo, base1, symbol2, suffix, None)
+ DestructuredFloat::MiddleDot(symbol1, ident1_span, dot_span, symbol2, ident2_span)
}
// 1e+ | 1e- (recovered)
[IdentLike(_), Punct('+' | '-')] |
@@ -1109,12 +1111,83 @@ impl<'a> Parser<'a> {
[IdentLike(_), Punct('.'), IdentLike(_), Punct('+' | '-'), IdentLike(_)] => {
// See the FIXME about `TokenCursor` above.
self.error_unexpected_after_dot();
- base
+ DestructuredFloat::Error
}
_ => panic!("unexpected components in a float token: {:?}", components),
}
}
+ fn parse_expr_tuple_field_access_float(
+ &mut self,
+ lo: Span,
+ base: P<Expr>,
+ float: Symbol,
+ suffix: Option<Symbol>,
+ ) -> P<Expr> {
+ match self.break_up_float(float) {
+ // 1e2
+ DestructuredFloat::Single(sym, _sp) => {
+ self.parse_expr_tuple_field_access(lo, base, sym, suffix, None)
+ }
+ // 1.
+ DestructuredFloat::TrailingDot(sym, ident_span, dot_span) => {
+ assert!(suffix.is_none());
+ self.token = Token::new(token::Ident(sym, false), ident_span);
+ let next_token = (Token::new(token::Dot, dot_span), self.token_spacing);
+ self.parse_expr_tuple_field_access(lo, base, sym, None, Some(next_token))
+ }
+ // 1.2 | 1.2e3
+ DestructuredFloat::MiddleDot(symbol1, ident1_span, dot_span, symbol2, ident2_span) => {
+ self.token = Token::new(token::Ident(symbol1, false), ident1_span);
+ // This needs to be `Spacing::Alone` to prevent regressions.
+ // See issue #76399 and PR #76285 for more details
+ let next_token1 = (Token::new(token::Dot, dot_span), Spacing::Alone);
+ let base1 =
+ self.parse_expr_tuple_field_access(lo, base, symbol1, None, Some(next_token1));
+ let next_token2 = Token::new(token::Ident(symbol2, false), ident2_span);
+ self.bump_with((next_token2, self.token_spacing)); // `.`
+ self.parse_expr_tuple_field_access(lo, base1, symbol2, suffix, None)
+ }
+ DestructuredFloat::Error => base,
+ }
+ }
+
+ fn parse_field_name_maybe_tuple(&mut self) -> PResult<'a, ThinVec<Ident>> {
+ let token::Literal(token::Lit { kind: token::Float, symbol, suffix }) = self.token.kind
+ else {
+ return Ok(thin_vec![self.parse_field_name()?]);
+ };
+ Ok(match self.break_up_float(symbol) {
+ // 1e2
+ DestructuredFloat::Single(sym, sp) => {
+ self.bump();
+ thin_vec![Ident::new(sym, sp)]
+ }
+ // 1.
+ DestructuredFloat::TrailingDot(sym, sym_span, dot_span) => {
+ assert!(suffix.is_none());
+ // Analogous to `Self::break_and_eat`
+ self.token_cursor.break_last_token = true;
+ // This might work, in cases like `1. 2`, and might not,
+ // in cases like `offset_of!(Ty, 1.)`. It depends on what comes
+ // after the float-like token, and therefore we have to make
+ // the other parts of the parser think that there is a dot literal.
+ self.token = Token::new(token::Ident(sym, false), sym_span);
+ self.bump_with((Token::new(token::Dot, dot_span), self.token_spacing));
+ thin_vec![Ident::new(sym, sym_span)]
+ }
+ // 1.2 | 1.2e3
+ DestructuredFloat::MiddleDot(symbol1, ident1_span, _dot_span, symbol2, ident2_span) => {
+ self.bump();
+ thin_vec![Ident::new(symbol1, ident1_span), Ident::new(symbol2, ident2_span)]
+ }
+ DestructuredFloat::Error => {
+ self.bump();
+ thin_vec![Ident::new(symbol, self.prev_token.span)]
+ }
+ })
+ }
+
fn parse_expr_tuple_field_access(
&mut self,
lo: Span,
@@ -1363,6 +1436,8 @@ impl<'a> Parser<'a> {
self.parse_expr_yield()
} else if self.is_do_yeet() {
self.parse_expr_yeet()
+ } else if self.eat_keyword(kw::Become) {
+ self.parse_expr_become()
} else if self.check_keyword(kw::Let) {
self.parse_expr_let()
} else if self.eat_keyword(kw::Underscore) {
@@ -1679,6 +1754,16 @@ impl<'a> Parser<'a> {
self.maybe_recover_from_bad_qpath(expr)
}
+ /// Parse `"become" expr`, with `"become"` token already eaten.
+ fn parse_expr_become(&mut self) -> PResult<'a, P<Expr>> {
+ let lo = self.prev_token.span;
+ let kind = ExprKind::Become(self.parse_expr()?);
+ let span = lo.to(self.prev_token.span);
+ self.sess.gated_spans.gate(sym::explicit_tail_calls, span);
+ let expr = self.mk_expr(span, kind);
+ self.maybe_recover_from_bad_qpath(expr)
+ }
+
/// Parse `"break" (('label (:? expr)?) | expr?)` with `"break"` token already eaten.
/// If the label is followed immediately by a `:` token, the label and `:` are
/// parsed as part of the expression (i.e. a labeled loop). The language team has
@@ -1821,10 +1906,11 @@ impl<'a> Parser<'a> {
let (fields, _trailing, _recovered) = self.parse_seq_to_before_end(
&TokenKind::CloseDelim(Delimiter::Parenthesis),
seq_sep,
- Parser::parse_field_name,
+ Parser::parse_field_name_maybe_tuple,
)?;
+ let fields = fields.into_iter().flatten().collect::<Vec<_>>();
let span = lo.to(self.token.span);
- Ok(self.mk_expr(span, ExprKind::OffsetOf(container, fields.to_vec().into())))
+ Ok(self.mk_expr(span, ExprKind::OffsetOf(container, fields.into())))
}
/// Returns a string literal if the next token is a string literal.
@@ -1955,17 +2041,14 @@ impl<'a> Parser<'a> {
let recovered = self.recover_after_dot();
let token = recovered.as_ref().unwrap_or(&self.token);
match token::Lit::from_token(token) {
- Some(token_lit) => {
- match MetaItemLit::from_token_lit(token_lit, token.span) {
+ Some(lit) => {
+ match MetaItemLit::from_token_lit(lit, token.span) {
Ok(lit) => {
self.bump();
Some(lit)
}
Err(err) => {
- let span = token.span;
- let token::Literal(lit) = token.kind else {
- unreachable!();
- };
+ let span = token.uninterpolated_span();
self.bump();
report_lit_error(&self.sess, err, lit, span);
// Pack possible quotes and prefixes from the original literal into
@@ -2109,6 +2192,10 @@ impl<'a> Parser<'a> {
self.sess.emit_err(errors::InvalidBlockMacroSegment {
span: self.token.span,
context: lo.to(self.token.span),
+ wrap: errors::WrapInExplicitBlock {
+ lo: self.token.span.shrink_to_lo(),
+ hi: self.token.span.shrink_to_hi(),
+ },
});
}
diff --git a/compiler/rustc_parse/src/parser/generics.rs b/compiler/rustc_parse/src/parser/generics.rs
index cd779b0b4..8ab38c4fb 100644
--- a/compiler/rustc_parse/src/parser/generics.rs
+++ b/compiler/rustc_parse/src/parser/generics.rs
@@ -43,6 +43,15 @@ impl<'a> Parser<'a> {
fn parse_ty_param(&mut self, preceding_attrs: AttrVec) -> PResult<'a, GenericParam> {
let ident = self.parse_ident()?;
+ // We might have a typo'd `Const` that was parsed as a type parameter.
+ if self.may_recover()
+ && ident.name.as_str().to_ascii_lowercase() == kw::Const.as_str()
+ && self.check_ident()
+ // `Const` followed by IDENT
+ {
+ return Ok(self.recover_const_param_with_mistyped_const(preceding_attrs, ident)?);
+ }
+
// Parse optional colon and param bounds.
let mut colon_span = None;
let bounds = if self.eat(&token::Colon) {
@@ -120,6 +129,41 @@ impl<'a> Parser<'a> {
})
}
+ pub(crate) fn recover_const_param_with_mistyped_const(
+ &mut self,
+ preceding_attrs: AttrVec,
+ mistyped_const_ident: Ident,
+ ) -> PResult<'a, GenericParam> {
+ let ident = self.parse_ident()?;
+ self.expect(&token::Colon)?;
+ let ty = self.parse_ty()?;
+
+ // Parse optional const generics default value.
+ let default = if self.eat(&token::Eq) { Some(self.parse_const_arg()?) } else { None };
+
+ let mut err = self.struct_span_err(
+ mistyped_const_ident.span,
+ format!("`const` keyword was mistyped as `{}`", mistyped_const_ident.as_str()),
+ );
+ err.span_suggestion_verbose(
+ mistyped_const_ident.span,
+ "use the `const` keyword",
+ kw::Const.as_str(),
+ Applicability::MachineApplicable,
+ );
+ err.emit();
+
+ Ok(GenericParam {
+ ident,
+ id: ast::DUMMY_NODE_ID,
+ attrs: preceding_attrs,
+ bounds: Vec::new(),
+ kind: GenericParamKind::Const { ty, kw_span: mistyped_const_ident.span, default },
+ is_placeholder: false,
+ colon_span: None,
+ })
+ }
+
/// Parses a (possibly empty) list of lifetime and type parameters, possibly including
/// a trailing comma and erroneous trailing attributes.
pub(super) fn parse_generic_params(&mut self) -> PResult<'a, ThinVec<ast::GenericParam>> {
diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs
index 3783ec41b..1470180de 100644
--- a/compiler/rustc_parse/src/parser/item.rs
+++ b/compiler/rustc_parse/src/parser/item.rs
@@ -2182,7 +2182,11 @@ impl<'a> Parser<'a> {
// `extern ABI fn`
|| self.check_keyword_case(kw::Extern, case)
&& self.look_ahead(1, |t| t.can_begin_literal_maybe_minus())
- && self.look_ahead(2, |t| t.is_keyword_case(kw::Fn, case))
+ && (self.look_ahead(2, |t| t.is_keyword_case(kw::Fn, case)) ||
+ // this branch is only for better diagnostic in later, `pub` is not allowed here
+ (self.may_recover()
+ && self.look_ahead(2, |t| t.is_keyword(kw::Pub))
+ && self.look_ahead(3, |t| t.is_keyword_case(kw::Fn, case))))
}
/// Parses all the "front matter" (or "qualifiers") for a `fn` declaration,
diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs
index c317d9636..fdf365178 100644
--- a/compiler/rustc_parse/src/parser/pat.rs
+++ b/compiler/rustc_parse/src/parser/pat.rs
@@ -938,7 +938,8 @@ impl<'a> Parser<'a> {
let mut etc = false;
let mut ate_comma = true;
let mut delayed_err: Option<DiagnosticBuilder<'a, ErrorGuaranteed>> = None;
- let mut etc_span = None;
+ let mut first_etc_and_maybe_comma_span = None;
+ let mut last_non_comma_dotdot_span = None;
while self.token != token::CloseDelim(Delimiter::Brace) {
let attrs = match self.parse_outer_attributes() {
@@ -969,12 +970,27 @@ impl<'a> Parser<'a> {
{
etc = true;
let mut etc_sp = self.token.span;
+ if first_etc_and_maybe_comma_span.is_none() {
+ if let Some(comma_tok) = self
+ .look_ahead(1, |t| if *t == token::Comma { Some(t.clone()) } else { None })
+ {
+ let nw_span = self
+ .sess
+ .source_map()
+ .span_extend_to_line(comma_tok.span)
+ .trim_start(comma_tok.span.shrink_to_lo())
+ .map(|s| self.sess.source_map().span_until_non_whitespace(s));
+ first_etc_and_maybe_comma_span = nw_span.map(|s| etc_sp.to(s));
+ } else {
+ first_etc_and_maybe_comma_span =
+ Some(self.sess.source_map().span_until_non_whitespace(etc_sp));
+ }
+ }
self.recover_bad_dot_dot();
self.bump(); // `..` || `...` || `_`
if self.token == token::CloseDelim(Delimiter::Brace) {
- etc_span = Some(etc_sp);
break;
}
let token_str = super::token_descr(&self.token);
@@ -996,7 +1012,6 @@ impl<'a> Parser<'a> {
ate_comma = true;
}
- etc_span = Some(etc_sp.until(self.token.span));
if self.token == token::CloseDelim(Delimiter::Brace) {
// If the struct looks otherwise well formed, recover and continue.
if let Some(sp) = comma_sp {
@@ -1040,6 +1055,9 @@ impl<'a> Parser<'a> {
}
}?;
ate_comma = this.eat(&token::Comma);
+
+ last_non_comma_dotdot_span = Some(this.prev_token.span);
+
// We just ate a comma, so there's no need to use
// `TrailingToken::Comma`
Ok((field, TrailingToken::None))
@@ -1049,15 +1067,30 @@ impl<'a> Parser<'a> {
}
if let Some(mut err) = delayed_err {
- if let Some(etc_span) = etc_span {
- err.multipart_suggestion(
- "move the `..` to the end of the field list",
- vec![
- (etc_span, String::new()),
- (self.token.span, format!("{}.. }}", if ate_comma { "" } else { ", " })),
- ],
- Applicability::MachineApplicable,
- );
+ if let Some(first_etc_span) = first_etc_and_maybe_comma_span {
+ if self.prev_token == token::DotDot {
+ // We have `.., x, ..`.
+ err.multipart_suggestion(
+ "remove the starting `..`",
+ vec![(first_etc_span, String::new())],
+ Applicability::MachineApplicable,
+ );
+ } else {
+ if let Some(last_non_comma_dotdot_span) = last_non_comma_dotdot_span {
+ // We have `.., x`.
+ err.multipart_suggestion(
+ "move the `..` to the end of the field list",
+ vec![
+ (first_etc_span, String::new()),
+ (
+ self.token.span.to(last_non_comma_dotdot_span.shrink_to_hi()),
+ format!("{} .. }}", if ate_comma { "" } else { "," }),
+ ),
+ ],
+ Applicability::MachineApplicable,
+ );
+ }
+ }
}
err.emit();
}
diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs
index 54f9fc5d2..9fcf51a04 100644
--- a/compiler/rustc_parse/src/parser/stmt.rs
+++ b/compiler/rustc_parse/src/parser/stmt.rs
@@ -23,6 +23,7 @@ use rustc_errors::{Applicability, DiagnosticBuilder, ErrorGuaranteed, PResult};
use rustc_span::source_map::{BytePos, Span};
use rustc_span::symbol::{kw, sym, Ident};
+use std::borrow::Cow;
use std::mem;
use thin_vec::{thin_vec, ThinVec};
@@ -364,7 +365,7 @@ impl<'a> Parser<'a> {
// `let...else if`. Emit the same error that `parse_block()` would,
// but explicitly point out that this pattern is not allowed.
let msg = "conditional `else if` is not supported for `let...else`";
- return Err(self.error_block_no_opening_brace_msg(msg));
+ return Err(self.error_block_no_opening_brace_msg(Cow::from(msg)));
}
let els = self.parse_block()?;
self.check_let_else_init_bool_expr(&init);
@@ -438,7 +439,7 @@ impl<'a> Parser<'a> {
fn error_block_no_opening_brace_msg(
&mut self,
- msg: &str,
+ msg: Cow<'static, str>,
) -> DiagnosticBuilder<'a, ErrorGuaranteed> {
let sp = self.token.span;
let mut e = self.struct_span_err(sp, msg);
@@ -502,7 +503,7 @@ impl<'a> Parser<'a> {
fn error_block_no_opening_brace<T>(&mut self) -> PResult<'a, T> {
let tok = super::token_descr(&self.token);
let msg = format!("expected `{{`, found {}", tok);
- Err(self.error_block_no_opening_brace_msg(&msg))
+ Err(self.error_block_no_opening_brace_msg(Cow::from(msg)))
}
/// Parses a block. Inner attributes are allowed.