summaryrefslogtreecommitdiffstats
path: root/compiler/rustc_parse
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:11:38 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:12:43 +0000
commitcf94bdc0742c13e2a0cac864c478b8626b266e1b (patch)
tree044670aa50cc5e2b4229aa0b6b3df6676730c0a6 /compiler/rustc_parse
parentAdding debian version 1.65.0+dfsg1-2. (diff)
downloadrustc-cf94bdc0742c13e2a0cac864c478b8626b266e1b.tar.xz
rustc-cf94bdc0742c13e2a0cac864c478b8626b266e1b.zip
Merging upstream version 1.66.0+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'compiler/rustc_parse')
-rw-r--r--compiler/rustc_parse/Cargo.toml1
-rw-r--r--compiler/rustc_parse/src/errors.rs1237
-rw-r--r--compiler/rustc_parse/src/lexer/mod.rs397
-rw-r--r--compiler/rustc_parse/src/lexer/tokentrees.rs428
-rw-r--r--compiler/rustc_parse/src/lexer/unescape_error_reporting.rs19
-rw-r--r--compiler/rustc_parse/src/lib.rs6
-rw-r--r--compiler/rustc_parse/src/parser/attr.rs159
-rw-r--r--compiler/rustc_parse/src/parser/attr_wrapper.rs33
-rw-r--r--compiler/rustc_parse/src/parser/diagnostics.rs946
-rw-r--r--compiler/rustc_parse/src/parser/expr.rs509
-rw-r--r--compiler/rustc_parse/src/parser/generics.rs36
-rw-r--r--compiler/rustc_parse/src/parser/item.rs163
-rw-r--r--compiler/rustc_parse/src/parser/mod.rs151
-rw-r--r--compiler/rustc_parse/src/parser/pat.rs26
-rw-r--r--compiler/rustc_parse/src/parser/stmt.rs169
-rw-r--r--compiler/rustc_parse/src/parser/ty.rs11
16 files changed, 2523 insertions, 1768 deletions
diff --git a/compiler/rustc_parse/Cargo.toml b/compiler/rustc_parse/Cargo.toml
index c6ca260e9..a5c94e164 100644
--- a/compiler/rustc_parse/Cargo.toml
+++ b/compiler/rustc_parse/Cargo.toml
@@ -4,7 +4,6 @@ version = "0.0.0"
edition = "2021"
[lib]
-doctest = false
[dependencies]
bitflags = "1.0"
diff --git a/compiler/rustc_parse/src/errors.rs b/compiler/rustc_parse/src/errors.rs
new file mode 100644
index 000000000..9b177c518
--- /dev/null
+++ b/compiler/rustc_parse/src/errors.rs
@@ -0,0 +1,1237 @@
+use rustc_ast::token::Token;
+use rustc_ast::Path;
+use rustc_errors::{fluent, AddToDiagnostic, Applicability, EmissionGuarantee, IntoDiagnostic};
+use rustc_macros::{Diagnostic, Subdiagnostic};
+use rustc_session::errors::ExprParenthesesNeeded;
+use rustc_span::symbol::Ident;
+use rustc_span::{Span, Symbol};
+
+use crate::parser::TokenDescription;
+
+#[derive(Diagnostic)]
+#[diag(parser_maybe_report_ambiguous_plus)]
+pub(crate) struct AmbiguousPlus {
+ pub sum_ty: String,
+ #[primary_span]
+ #[suggestion(code = "({sum_ty})")]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_maybe_recover_from_bad_type_plus, code = "E0178")]
+pub(crate) struct BadTypePlus {
+ pub ty: String,
+ #[primary_span]
+ pub span: Span,
+ #[subdiagnostic]
+ pub sub: BadTypePlusSub,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum BadTypePlusSub {
+ #[suggestion(
+ parser_add_paren,
+ code = "{sum_with_parens}",
+ applicability = "machine-applicable"
+ )]
+ AddParen {
+ sum_with_parens: String,
+ #[primary_span]
+ span: Span,
+ },
+ #[label(parser_forgot_paren)]
+ ForgotParen {
+ #[primary_span]
+ span: Span,
+ },
+ #[label(parser_expect_path)]
+ ExpectPath {
+ #[primary_span]
+ span: Span,
+ },
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_maybe_recover_from_bad_qpath_stage_2)]
+pub(crate) struct BadQPathStage2 {
+ #[primary_span]
+ #[suggestion(code = "", applicability = "maybe-incorrect")]
+ pub span: Span,
+ pub ty: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_incorrect_semicolon)]
+pub(crate) struct IncorrectSemicolon<'a> {
+ #[primary_span]
+ #[suggestion_short(code = "", applicability = "machine-applicable")]
+ pub span: Span,
+ #[help]
+ pub opt_help: Option<()>,
+ pub name: &'a str,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_incorrect_use_of_await)]
+pub(crate) struct IncorrectUseOfAwait {
+ #[primary_span]
+ #[suggestion(parentheses_suggestion, code = "", applicability = "machine-applicable")]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_incorrect_use_of_await)]
+pub(crate) struct IncorrectAwait {
+ #[primary_span]
+ pub span: Span,
+ #[suggestion(postfix_suggestion, code = "{expr}.await{question_mark}")]
+ pub sugg_span: (Span, Applicability),
+ pub expr: String,
+ pub question_mark: &'static str,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_in_in_typo)]
+pub(crate) struct InInTypo {
+ #[primary_span]
+ pub span: Span,
+ #[suggestion(code = "", applicability = "machine-applicable")]
+ pub sugg_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_invalid_variable_declaration)]
+pub(crate) struct InvalidVariableDeclaration {
+ #[primary_span]
+ pub span: Span,
+ #[subdiagnostic]
+ pub sub: InvalidVariableDeclarationSub,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum InvalidVariableDeclarationSub {
+ #[suggestion(parser_switch_mut_let_order, applicability = "maybe-incorrect", code = "let mut")]
+ SwitchMutLetOrder(#[primary_span] Span),
+ #[suggestion(
+ parser_missing_let_before_mut,
+ applicability = "machine-applicable",
+ code = "let mut"
+ )]
+ MissingLet(#[primary_span] Span),
+ #[suggestion(parser_use_let_not_auto, applicability = "machine-applicable", code = "let")]
+ UseLetNotAuto(#[primary_span] Span),
+ #[suggestion(parser_use_let_not_var, applicability = "machine-applicable", code = "let")]
+ UseLetNotVar(#[primary_span] Span),
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_invalid_comparison_operator)]
+pub(crate) struct InvalidComparisonOperator {
+ #[primary_span]
+ pub span: Span,
+ pub invalid: String,
+ #[subdiagnostic]
+ pub sub: InvalidComparisonOperatorSub,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum InvalidComparisonOperatorSub {
+ #[suggestion_short(use_instead, applicability = "machine-applicable", code = "{correct}")]
+ Correctable {
+ #[primary_span]
+ span: Span,
+ invalid: String,
+ correct: String,
+ },
+ #[label(spaceship_operator_invalid)]
+ Spaceship(#[primary_span] Span),
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_invalid_logical_operator)]
+#[note]
+pub(crate) struct InvalidLogicalOperator {
+ #[primary_span]
+ pub span: Span,
+ pub incorrect: String,
+ #[subdiagnostic]
+ pub sub: InvalidLogicalOperatorSub,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum InvalidLogicalOperatorSub {
+ #[suggestion_short(
+ use_amp_amp_for_conjunction,
+ applicability = "machine-applicable",
+ code = "&&"
+ )]
+ Conjunction(#[primary_span] Span),
+ #[suggestion_short(
+ use_pipe_pipe_for_disjunction,
+ applicability = "machine-applicable",
+ code = "||"
+ )]
+ Disjunction(#[primary_span] Span),
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_tilde_is_not_unary_operator)]
+pub(crate) struct TildeAsUnaryOperator(
+ #[primary_span]
+ #[suggestion_short(applicability = "machine-applicable", code = "!")]
+ pub Span,
+);
+
+#[derive(Diagnostic)]
+#[diag(parser_unexpected_token_after_not)]
+pub(crate) struct NotAsNegationOperator {
+ #[primary_span]
+ pub negated: Span,
+ pub negated_desc: String,
+ #[subdiagnostic]
+ pub sub: NotAsNegationOperatorSub,
+}
+
+#[derive(Subdiagnostic)]
+pub enum NotAsNegationOperatorSub {
+ #[suggestion_short(
+ parser_unexpected_token_after_not_default,
+ applicability = "machine-applicable",
+ code = "!"
+ )]
+ SuggestNotDefault(#[primary_span] Span),
+
+ #[suggestion_short(
+ parser_unexpected_token_after_not_bitwise,
+ applicability = "machine-applicable",
+ code = "!"
+ )]
+ SuggestNotBitwise(#[primary_span] Span),
+
+ #[suggestion_short(
+ parser_unexpected_token_after_not_logical,
+ applicability = "machine-applicable",
+ code = "!"
+ )]
+ SuggestNotLogical(#[primary_span] Span),
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_malformed_loop_label)]
+pub(crate) struct MalformedLoopLabel {
+ #[primary_span]
+ #[suggestion(applicability = "machine-applicable", code = "{correct_label}")]
+ pub span: Span,
+ pub correct_label: Ident,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_lifetime_in_borrow_expression)]
+pub(crate) struct LifetimeInBorrowExpression {
+ #[primary_span]
+ pub span: Span,
+ #[suggestion(applicability = "machine-applicable", code = "")]
+ #[label]
+ pub lifetime_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_field_expression_with_generic)]
+pub(crate) struct FieldExpressionWithGeneric(#[primary_span] pub Span);
+
+#[derive(Diagnostic)]
+#[diag(parser_macro_invocation_with_qualified_path)]
+pub(crate) struct MacroInvocationWithQualifiedPath(#[primary_span] pub Span);
+
+#[derive(Diagnostic)]
+#[diag(parser_unexpected_token_after_label)]
+pub(crate) struct UnexpectedTokenAfterLabel {
+ #[primary_span]
+ #[label(parser_unexpected_token_after_label)]
+ pub span: Span,
+ #[suggestion_verbose(suggestion_remove_label, code = "")]
+ pub remove_label: Option<Span>,
+ #[subdiagnostic]
+ pub enclose_in_block: Option<UnexpectedTokenAfterLabelSugg>,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(suggestion_enclose_in_block, applicability = "machine-applicable")]
+pub(crate) struct UnexpectedTokenAfterLabelSugg {
+ #[suggestion_part(code = "{{ ")]
+ pub left: Span,
+ #[suggestion_part(code = " }}")]
+ pub right: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_require_colon_after_labeled_expression)]
+#[note]
+pub(crate) struct RequireColonAfterLabeledExpression {
+ #[primary_span]
+ pub span: Span,
+ #[label]
+ pub label: Span,
+ #[suggestion_short(applicability = "machine-applicable", code = ": ")]
+ pub label_end: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_do_catch_syntax_removed)]
+#[note]
+pub(crate) struct DoCatchSyntaxRemoved {
+ #[primary_span]
+ #[suggestion(applicability = "machine-applicable", code = "try")]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_float_literal_requires_integer_part)]
+pub(crate) struct FloatLiteralRequiresIntegerPart {
+ #[primary_span]
+ #[suggestion(applicability = "machine-applicable", code = "{correct}")]
+ pub span: Span,
+ pub correct: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_invalid_int_literal_width)]
+#[help]
+pub(crate) struct InvalidIntLiteralWidth {
+ #[primary_span]
+ pub span: Span,
+ pub width: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_invalid_num_literal_base_prefix)]
+#[note]
+pub(crate) struct InvalidNumLiteralBasePrefix {
+ #[primary_span]
+ #[suggestion(applicability = "maybe-incorrect", code = "{fixed}")]
+ pub span: Span,
+ pub fixed: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_invalid_num_literal_suffix)]
+#[help]
+pub(crate) struct InvalidNumLiteralSuffix {
+ #[primary_span]
+ #[label]
+ pub span: Span,
+ pub suffix: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_invalid_float_literal_width)]
+#[help]
+pub(crate) struct InvalidFloatLiteralWidth {
+ #[primary_span]
+ pub span: Span,
+ pub width: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_invalid_float_literal_suffix)]
+#[help]
+pub(crate) struct InvalidFloatLiteralSuffix {
+ #[primary_span]
+ #[label]
+ pub span: Span,
+ pub suffix: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_int_literal_too_large)]
+pub(crate) struct IntLiteralTooLarge {
+ #[primary_span]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_missing_semicolon_before_array)]
+pub(crate) struct MissingSemicolonBeforeArray {
+ #[primary_span]
+ pub open_delim: Span,
+ #[suggestion_verbose(applicability = "maybe-incorrect", code = ";")]
+ pub semicolon: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_invalid_block_macro_segment)]
+pub(crate) struct InvalidBlockMacroSegment {
+ #[primary_span]
+ pub span: Span,
+ #[label]
+ pub context: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_if_expression_missing_then_block)]
+pub(crate) struct IfExpressionMissingThenBlock {
+ #[primary_span]
+ pub if_span: Span,
+ #[subdiagnostic]
+ pub sub: IfExpressionMissingThenBlockSub,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum IfExpressionMissingThenBlockSub {
+ #[help(condition_possibly_unfinished)]
+ UnfinishedCondition(#[primary_span] Span),
+ #[help(add_then_block)]
+ AddThenBlock(#[primary_span] Span),
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_if_expression_missing_condition)]
+pub(crate) struct IfExpressionMissingCondition {
+ #[primary_span]
+ #[label(condition_label)]
+ pub if_span: Span,
+ #[label(block_label)]
+ pub block_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_expected_expression_found_let)]
+pub(crate) struct ExpectedExpressionFoundLet {
+ #[primary_span]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_expected_else_block)]
+pub(crate) struct ExpectedElseBlock {
+ #[primary_span]
+ pub first_tok_span: Span,
+ pub first_tok: String,
+ #[label]
+ pub else_span: Span,
+ #[suggestion(applicability = "maybe-incorrect", code = "if ")]
+ pub condition_start: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_outer_attribute_not_allowed_on_if_else)]
+pub(crate) struct OuterAttributeNotAllowedOnIfElse {
+ #[primary_span]
+ pub last: Span,
+
+ #[label(branch_label)]
+ pub branch_span: Span,
+
+ #[label(ctx_label)]
+ pub ctx_span: Span,
+ pub ctx: String,
+
+ #[suggestion(applicability = "machine-applicable", code = "")]
+ pub attributes: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_missing_in_in_for_loop)]
+pub(crate) struct MissingInInForLoop {
+ #[primary_span]
+ pub span: Span,
+ #[subdiagnostic]
+ pub sub: MissingInInForLoopSub,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum MissingInInForLoopSub {
+ // Has been misleading, at least in the past (closed Issue #48492), thus maybe-incorrect
+ #[suggestion_short(use_in_not_of, applicability = "maybe-incorrect", code = "in")]
+ InNotOf(#[primary_span] Span),
+ #[suggestion_short(add_in, applicability = "maybe-incorrect", code = " in ")]
+ AddIn(#[primary_span] Span),
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_missing_comma_after_match_arm)]
+pub(crate) struct MissingCommaAfterMatchArm {
+ #[primary_span]
+ #[suggestion(applicability = "machine-applicable", code = ",")]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_catch_after_try)]
+#[help]
+pub(crate) struct CatchAfterTry {
+ #[primary_span]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_comma_after_base_struct)]
+#[note]
+pub(crate) struct CommaAfterBaseStruct {
+ #[primary_span]
+ pub span: Span,
+ #[suggestion_short(applicability = "machine-applicable", code = "")]
+ pub comma: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_eq_field_init)]
+pub(crate) struct EqFieldInit {
+ #[primary_span]
+ pub span: Span,
+ #[suggestion(applicability = "machine-applicable", code = ":")]
+ pub eq: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_dotdotdot)]
+pub(crate) struct DotDotDot {
+ #[primary_span]
+ #[suggestion(suggest_exclusive_range, applicability = "maybe-incorrect", code = "..")]
+ #[suggestion(suggest_inclusive_range, applicability = "maybe-incorrect", code = "..=")]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_left_arrow_operator)]
+pub(crate) struct LeftArrowOperator {
+ #[primary_span]
+ #[suggestion(applicability = "maybe-incorrect", code = "< -")]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_remove_let)]
+pub(crate) struct RemoveLet {
+ #[primary_span]
+ #[suggestion(applicability = "machine-applicable", code = "")]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_use_eq_instead)]
+pub(crate) struct UseEqInstead {
+ #[primary_span]
+ #[suggestion_short(applicability = "machine-applicable", code = "=")]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_use_empty_block_not_semi)]
+pub(crate) struct UseEmptyBlockNotSemi {
+ #[primary_span]
+ #[suggestion_hidden(applicability = "machine-applicable", code = "{{}}")]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_comparison_interpreted_as_generic)]
+pub(crate) struct ComparisonInterpretedAsGeneric {
+ #[primary_span]
+ #[label(label_comparison)]
+ pub comparison: Span,
+ pub r#type: Path,
+ #[label(label_args)]
+ pub args: Span,
+ #[subdiagnostic]
+ pub suggestion: ComparisonOrShiftInterpretedAsGenericSugg,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_shift_interpreted_as_generic)]
+pub(crate) struct ShiftInterpretedAsGeneric {
+ #[primary_span]
+ #[label(label_comparison)]
+ pub shift: Span,
+ pub r#type: Path,
+ #[label(label_args)]
+ pub args: Span,
+ #[subdiagnostic]
+ pub suggestion: ComparisonOrShiftInterpretedAsGenericSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(suggestion, applicability = "machine-applicable")]
+pub(crate) struct ComparisonOrShiftInterpretedAsGenericSugg {
+ #[suggestion_part(code = "(")]
+ pub left: Span,
+ #[suggestion_part(code = ")")]
+ pub right: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_found_expr_would_be_stmt)]
+pub(crate) struct FoundExprWouldBeStmt {
+ #[primary_span]
+ #[label]
+ pub span: Span,
+ pub token: Token,
+ #[subdiagnostic]
+ pub suggestion: ExprParenthesesNeeded,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_leading_plus_not_supported)]
+pub(crate) struct LeadingPlusNotSupported {
+ #[primary_span]
+ #[label]
+ pub span: Span,
+ #[suggestion_verbose(suggestion_remove_plus, code = "", applicability = "machine-applicable")]
+ pub remove_plus: Option<Span>,
+ #[subdiagnostic]
+ pub add_parentheses: Option<ExprParenthesesNeeded>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_parentheses_with_struct_fields)]
+pub(crate) struct ParenthesesWithStructFields {
+ #[primary_span]
+ pub span: Span,
+ pub r#type: Path,
+ #[subdiagnostic]
+ pub braces_for_struct: BracesForStructLiteral,
+ #[subdiagnostic]
+ pub no_fields_for_fn: NoFieldsForFnCall,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(suggestion_braces_for_struct, applicability = "maybe-incorrect")]
+pub(crate) struct BracesForStructLiteral {
+ #[suggestion_part(code = " {{ ")]
+ pub first: Span,
+ #[suggestion_part(code = " }}")]
+ pub second: Span,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(suggestion_no_fields_for_fn, applicability = "maybe-incorrect")]
+pub(crate) struct NoFieldsForFnCall {
+ #[suggestion_part(code = "")]
+ pub fields: Vec<Span>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_labeled_loop_in_break)]
+pub(crate) struct LabeledLoopInBreak {
+ #[primary_span]
+ pub span: Span,
+ #[subdiagnostic]
+ pub sub: WrapExpressionInParentheses,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(
+ parser_sugg_wrap_expression_in_parentheses,
+ applicability = "machine-applicable"
+)]
+pub(crate) struct WrapExpressionInParentheses {
+ #[suggestion_part(code = "(")]
+ pub left: Span,
+ #[suggestion_part(code = ")")]
+ pub right: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_array_brackets_instead_of_braces)]
+pub(crate) struct ArrayBracketsInsteadOfSpaces {
+ #[primary_span]
+ pub span: Span,
+ #[subdiagnostic]
+ pub sub: ArrayBracketsInsteadOfSpacesSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(suggestion, applicability = "maybe-incorrect")]
+pub(crate) struct ArrayBracketsInsteadOfSpacesSugg {
+ #[suggestion_part(code = "[")]
+ pub left: Span,
+ #[suggestion_part(code = "]")]
+ pub right: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_match_arm_body_without_braces)]
+pub(crate) struct MatchArmBodyWithoutBraces {
+ #[primary_span]
+ #[label(label_statements)]
+ pub statements: Span,
+ #[label(label_arrow)]
+ pub arrow: Span,
+ pub num_statements: usize,
+ #[subdiagnostic]
+ pub sub: MatchArmBodyWithoutBracesSugg,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum MatchArmBodyWithoutBracesSugg {
+ #[multipart_suggestion(suggestion_add_braces, applicability = "machine-applicable")]
+ AddBraces {
+ #[suggestion_part(code = "{{ ")]
+ left: Span,
+ #[suggestion_part(code = " }}")]
+ right: Span,
+ },
+ #[suggestion(
+ suggestion_use_comma_not_semicolon,
+ code = ",",
+ applicability = "machine-applicable"
+ )]
+ UseComma {
+ #[primary_span]
+ semicolon: Span,
+ },
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_struct_literal_not_allowed_here)]
+pub(crate) struct StructLiteralNotAllowedHere {
+ #[primary_span]
+ pub span: Span,
+ #[subdiagnostic]
+ pub sub: StructLiteralNotAllowedHereSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(suggestion, applicability = "machine-applicable")]
+pub(crate) struct StructLiteralNotAllowedHereSugg {
+ #[suggestion_part(code = "(")]
+ pub left: Span,
+ #[suggestion_part(code = ")")]
+ pub right: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_invalid_interpolated_expression)]
+pub(crate) struct InvalidInterpolatedExpression {
+ #[primary_span]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_hexadecimal_float_literal_not_supported)]
+pub(crate) struct HexadecimalFloatLiteralNotSupported {
+ #[primary_span]
+ #[label(parser_not_supported)]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_octal_float_literal_not_supported)]
+pub(crate) struct OctalFloatLiteralNotSupported {
+ #[primary_span]
+ #[label(parser_not_supported)]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_binary_float_literal_not_supported)]
+pub(crate) struct BinaryFloatLiteralNotSupported {
+ #[primary_span]
+ #[label(parser_not_supported)]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_invalid_literal_suffix)]
+pub(crate) struct InvalidLiteralSuffix {
+ #[primary_span]
+ #[label]
+ pub span: Span,
+ // FIXME(#100717)
+ pub kind: String,
+ pub suffix: Symbol,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_invalid_literal_suffix_on_tuple_index)]
+pub(crate) struct InvalidLiteralSuffixOnTupleIndex {
+ #[primary_span]
+ #[label]
+ pub span: Span,
+ pub suffix: Symbol,
+ #[help(tuple_exception_line_1)]
+ #[help(tuple_exception_line_2)]
+ #[help(tuple_exception_line_3)]
+ pub exception: Option<()>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_non_string_abi_literal)]
+pub(crate) struct NonStringAbiLiteral {
+ #[primary_span]
+ #[suggestion(code = "\"C\"", applicability = "maybe-incorrect")]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_mismatched_closing_delimiter)]
+pub(crate) struct MismatchedClosingDelimiter {
+ #[primary_span]
+ pub spans: Vec<Span>,
+ pub delimiter: String,
+ #[label(label_unmatched)]
+ pub unmatched: Span,
+ #[label(label_opening_candidate)]
+ pub opening_candidate: Option<Span>,
+ #[label(label_unclosed)]
+ pub unclosed: Option<Span>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_incorrect_visibility_restriction, code = "E0704")]
+#[help]
+pub(crate) struct IncorrectVisibilityRestriction {
+ #[primary_span]
+ #[suggestion(code = "in {inner_str}", applicability = "machine-applicable")]
+ pub span: Span,
+ pub inner_str: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_assignment_else_not_allowed)]
+pub(crate) struct AssignmentElseNotAllowed {
+ #[primary_span]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_expected_statement_after_outer_attr)]
+pub(crate) struct ExpectedStatementAfterOuterAttr {
+ #[primary_span]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_doc_comment_does_not_document_anything, code = "E0585")]
+#[help]
+pub(crate) struct DocCommentDoesNotDocumentAnything {
+ #[primary_span]
+ pub span: Span,
+ #[suggestion(code = ",", applicability = "machine-applicable")]
+ pub missing_comma: Option<Span>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_const_let_mutually_exclusive)]
+pub(crate) struct ConstLetMutuallyExclusive {
+ #[primary_span]
+ #[suggestion(code = "const", applicability = "maybe-incorrect")]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_invalid_expression_in_let_else)]
+pub(crate) struct InvalidExpressionInLetElse {
+ #[primary_span]
+ pub span: Span,
+ pub operator: &'static str,
+ #[subdiagnostic]
+ pub sugg: WrapExpressionInParentheses,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_invalid_curly_in_let_else)]
+pub(crate) struct InvalidCurlyInLetElse {
+ #[primary_span]
+ pub span: Span,
+ #[subdiagnostic]
+ pub sugg: WrapExpressionInParentheses,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_compound_assignment_expression_in_let)]
+#[help]
+pub(crate) struct CompoundAssignmentExpressionInLet {
+ #[primary_span]
+ #[suggestion_short(code = "=", applicability = "maybe-incorrect")]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_suffixed_literal_in_attribute)]
+#[help]
+pub(crate) struct SuffixedLiteralInAttribute {
+ #[primary_span]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_invalid_meta_item)]
+pub(crate) struct InvalidMetaItem {
+ #[primary_span]
+ pub span: Span,
+ pub token: Token,
+}
+
+#[derive(Subdiagnostic)]
+#[suggestion_verbose(
+ parser_sugg_escape_to_use_as_identifier,
+ applicability = "maybe-incorrect",
+ code = "r#"
+)]
+pub(crate) struct SuggEscapeToUseAsIdentifier {
+ #[primary_span]
+ pub span: Span,
+ pub ident_name: String,
+}
+
+#[derive(Subdiagnostic)]
+#[suggestion(parser_sugg_remove_comma, applicability = "machine-applicable", code = "")]
+pub(crate) struct SuggRemoveComma {
+ #[primary_span]
+ pub span: Span,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum ExpectedIdentifierFound {
+ #[label(parser_expected_identifier_found_reserved_identifier)]
+ ReservedIdentifier(#[primary_span] Span),
+ #[label(parser_expected_identifier_found_keyword)]
+ Keyword(#[primary_span] Span),
+ #[label(parser_expected_identifier_found_reserved_keyword)]
+ ReservedKeyword(#[primary_span] Span),
+ #[label(parser_expected_identifier_found_doc_comment)]
+ DocComment(#[primary_span] Span),
+ #[label(parser_expected_identifier)]
+ Other(#[primary_span] Span),
+}
+
+impl ExpectedIdentifierFound {
+ pub fn new(token_descr: Option<TokenDescription>, span: Span) -> Self {
+ (match token_descr {
+ Some(TokenDescription::ReservedIdentifier) => {
+ ExpectedIdentifierFound::ReservedIdentifier
+ }
+ Some(TokenDescription::Keyword) => ExpectedIdentifierFound::Keyword,
+ Some(TokenDescription::ReservedKeyword) => ExpectedIdentifierFound::ReservedKeyword,
+ Some(TokenDescription::DocComment) => ExpectedIdentifierFound::DocComment,
+ None => ExpectedIdentifierFound::Other,
+ })(span)
+ }
+}
+
+pub(crate) struct ExpectedIdentifier {
+ pub span: Span,
+ pub token: Token,
+ pub suggest_raw: Option<SuggEscapeToUseAsIdentifier>,
+ pub suggest_remove_comma: Option<SuggRemoveComma>,
+}
+
+impl<'a, G: EmissionGuarantee> IntoDiagnostic<'a, G> for ExpectedIdentifier {
+ fn into_diagnostic(
+ self,
+ handler: &'a rustc_errors::Handler,
+ ) -> rustc_errors::DiagnosticBuilder<'a, G> {
+ let token_descr = super::parser::TokenDescription::from_token(&self.token);
+
+ let mut diag = handler.struct_diagnostic(match token_descr {
+ Some(TokenDescription::ReservedIdentifier) => {
+ fluent::parser_expected_identifier_found_reserved_identifier_str
+ }
+ Some(TokenDescription::Keyword) => fluent::parser_expected_identifier_found_keyword_str,
+ Some(TokenDescription::ReservedKeyword) => {
+ fluent::parser_expected_identifier_found_reserved_keyword_str
+ }
+ Some(TokenDescription::DocComment) => {
+ fluent::parser_expected_identifier_found_doc_comment_str
+ }
+ None => fluent::parser_expected_identifier_found_str,
+ });
+ diag.set_span(self.span);
+ diag.set_arg("token", self.token);
+
+ if let Some(sugg) = self.suggest_raw {
+ sugg.add_to_diagnostic(&mut diag);
+ }
+
+ ExpectedIdentifierFound::new(token_descr, self.span).add_to_diagnostic(&mut diag);
+
+ if let Some(sugg) = self.suggest_remove_comma {
+ sugg.add_to_diagnostic(&mut diag);
+ }
+
+ diag
+ }
+}
+
+pub(crate) struct ExpectedSemi {
+ pub span: Span,
+ pub token: Token,
+
+ pub unexpected_token_label: Option<Span>,
+ pub sugg: ExpectedSemiSugg,
+}
+
+impl<'a, G: EmissionGuarantee> IntoDiagnostic<'a, G> for ExpectedSemi {
+ fn into_diagnostic(
+ self,
+ handler: &'a rustc_errors::Handler,
+ ) -> rustc_errors::DiagnosticBuilder<'a, G> {
+ let token_descr = super::parser::TokenDescription::from_token(&self.token);
+
+ let mut diag = handler.struct_diagnostic(match token_descr {
+ Some(TokenDescription::ReservedIdentifier) => {
+ fluent::parser_expected_semi_found_reserved_identifier_str
+ }
+ Some(TokenDescription::Keyword) => fluent::parser_expected_semi_found_keyword_str,
+ Some(TokenDescription::ReservedKeyword) => {
+ fluent::parser_expected_semi_found_reserved_keyword_str
+ }
+ Some(TokenDescription::DocComment) => {
+ fluent::parser_expected_semi_found_doc_comment_str
+ }
+ None => fluent::parser_expected_semi_found_str,
+ });
+ diag.set_span(self.span);
+ diag.set_arg("token", self.token);
+
+ if let Some(unexpected_token_label) = self.unexpected_token_label {
+ diag.span_label(unexpected_token_label, fluent::parser_label_unexpected_token);
+ }
+
+ self.sugg.add_to_diagnostic(&mut diag);
+
+ diag
+ }
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum ExpectedSemiSugg {
+ #[suggestion(
+ parser_sugg_change_this_to_semi,
+ code = ";",
+ applicability = "machine-applicable"
+ )]
+ ChangeToSemi(#[primary_span] Span),
+ #[suggestion_short(parser_sugg_add_semi, code = ";", applicability = "machine-applicable")]
+ AddSemi(#[primary_span] Span),
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_struct_literal_body_without_path)]
+pub(crate) struct StructLiteralBodyWithoutPath {
+ #[primary_span]
+ pub span: Span,
+ #[subdiagnostic]
+ pub sugg: StructLiteralBodyWithoutPathSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(suggestion, applicability = "has-placeholders")]
+pub(crate) struct StructLiteralBodyWithoutPathSugg {
+ #[suggestion_part(code = "{{ SomeStruct ")]
+ pub before: Span,
+ #[suggestion_part(code = " }}")]
+ pub after: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_unmatched_angle_brackets)]
+pub(crate) struct UnmatchedAngleBrackets {
+ #[primary_span]
+ #[suggestion(code = "", applicability = "machine-applicable")]
+ pub span: Span,
+ pub num_extra_brackets: usize,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_generic_parameters_without_angle_brackets)]
+pub(crate) struct GenericParamsWithoutAngleBrackets {
+ #[primary_span]
+ pub span: Span,
+ #[subdiagnostic]
+ pub sugg: GenericParamsWithoutAngleBracketsSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(suggestion, applicability = "machine-applicable")]
+pub(crate) struct GenericParamsWithoutAngleBracketsSugg {
+ #[suggestion_part(code = "<")]
+ pub left: Span,
+ #[suggestion_part(code = ">")]
+ pub right: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_comparison_operators_cannot_be_chained)]
+pub(crate) struct ComparisonOperatorsCannotBeChained {
+ #[primary_span]
+ pub span: Vec<Span>,
+ #[suggestion_verbose(
+ parser_sugg_turbofish_syntax,
+ code = "::",
+ applicability = "maybe-incorrect"
+ )]
+ pub suggest_turbofish: Option<Span>,
+ #[help(parser_sugg_turbofish_syntax)]
+ #[help(sugg_parentheses_for_function_args)]
+ pub help_turbofish: Option<()>,
+ #[subdiagnostic]
+ pub chaining_sugg: Option<ComparisonOperatorsCannotBeChainedSugg>,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum ComparisonOperatorsCannotBeChainedSugg {
+ #[suggestion_verbose(
+ sugg_split_comparison,
+ code = " && {middle_term}",
+ applicability = "maybe-incorrect"
+ )]
+ SplitComparison {
+ #[primary_span]
+ span: Span,
+ middle_term: String,
+ },
+ #[multipart_suggestion(sugg_parenthesize, applicability = "maybe-incorrect")]
+ Parenthesize {
+ #[suggestion_part(code = "(")]
+ left: Span,
+ #[suggestion_part(code = ")")]
+ right: Span,
+ },
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_question_mark_in_type)]
+pub(crate) struct QuestionMarkInType {
+ #[primary_span]
+ #[label]
+ pub span: Span,
+ #[subdiagnostic]
+ pub sugg: QuestionMarkInTypeSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(suggestion, applicability = "machine-applicable")]
+pub(crate) struct QuestionMarkInTypeSugg {
+ #[suggestion_part(code = "Option<")]
+ pub left: Span,
+ #[suggestion_part(code = ">")]
+ pub right: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_unexpected_parentheses_in_for_head)]
+pub(crate) struct ParenthesesInForHead {
+ #[primary_span]
+ pub span: Vec<Span>,
+ #[subdiagnostic]
+ pub sugg: ParenthesesInForHeadSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(suggestion, applicability = "machine-applicable")]
+pub(crate) struct ParenthesesInForHeadSugg {
+ #[suggestion_part(code = "")]
+ pub left: Span,
+ #[suggestion_part(code = "")]
+ pub right: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_doc_comment_on_param_type)]
+pub(crate) struct DocCommentOnParamType {
+ #[primary_span]
+ #[label]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_attribute_on_param_type)]
+pub(crate) struct AttributeOnParamType {
+ #[primary_span]
+ #[label]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_pattern_method_param_without_body, code = "E0642")]
+pub(crate) struct PatternMethodParamWithoutBody {
+ #[primary_span]
+ #[suggestion(code = "_", applicability = "machine-applicable")]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_self_param_not_first)]
+pub(crate) struct SelfParamNotFirst {
+ #[primary_span]
+ #[label]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_const_generic_without_braces)]
+pub(crate) struct ConstGenericWithoutBraces {
+ #[primary_span]
+ pub span: Span,
+ #[subdiagnostic]
+ pub sugg: ConstGenericWithoutBracesSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(suggestion, applicability = "machine-applicable")]
+pub(crate) struct ConstGenericWithoutBracesSugg {
+ #[suggestion_part(code = "{{ ")]
+ pub left: Span,
+ #[suggestion_part(code = " }}")]
+ pub right: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_unexpected_const_param_declaration)]
+pub(crate) struct UnexpectedConstParamDeclaration {
+ #[primary_span]
+ #[label]
+ pub span: Span,
+ #[subdiagnostic]
+ pub sugg: Option<UnexpectedConstParamDeclarationSugg>,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum UnexpectedConstParamDeclarationSugg {
+ #[multipart_suggestion(suggestion, applicability = "machine-applicable")]
+ AddParam {
+ #[suggestion_part(code = "<{snippet}>")]
+ impl_generics: Span,
+ #[suggestion_part(code = "{ident}")]
+ incorrect_decl: Span,
+ snippet: String,
+ ident: String,
+ },
+ #[multipart_suggestion(suggestion, applicability = "machine-applicable")]
+ AppendParam {
+ #[suggestion_part(code = ", {snippet}")]
+ impl_generics_end: Span,
+ #[suggestion_part(code = "{ident}")]
+ incorrect_decl: Span,
+ snippet: String,
+ ident: String,
+ },
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_unexpected_const_in_generic_param)]
+pub(crate) struct UnexpectedConstInGenericParam {
+ #[primary_span]
+ pub span: Span,
+ #[suggestion_verbose(code = "", applicability = "maybe-incorrect")]
+ pub to_remove: Option<Span>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_async_move_order_incorrect)]
+pub(crate) struct AsyncMoveOrderIncorrect {
+ #[primary_span]
+ #[suggestion_verbose(code = "async move", applicability = "maybe-incorrect")]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parser_double_colon_in_bound)]
+pub(crate) struct DoubleColonInBound {
+ #[primary_span]
+ pub span: Span,
+ #[suggestion(code = ": ", applicability = "machine-applicable")]
+ pub between: Span,
+}
diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs
index 63819a2f9..462bce16a 100644
--- a/compiler/rustc_parse/src/lexer/mod.rs
+++ b/compiler/rustc_parse/src/lexer/mod.rs
@@ -1,10 +1,13 @@
use crate::lexer::unicode_chars::UNICODE_ARRAY;
use rustc_ast::ast::{self, AttrStyle};
use rustc_ast::token::{self, CommentKind, Delimiter, Token, TokenKind};
-use rustc_ast::tokenstream::{Spacing, TokenStream};
+use rustc_ast::tokenstream::TokenStream;
use rustc_ast::util::unicode::contains_text_flow_control_chars;
-use rustc_errors::{error_code, Applicability, DiagnosticBuilder, ErrorGuaranteed, PResult};
+use rustc_errors::{
+ error_code, Applicability, DiagnosticBuilder, ErrorGuaranteed, PResult, StashKey,
+};
use rustc_lexer::unescape::{self, Mode};
+use rustc_lexer::Cursor;
use rustc_lexer::{Base, DocStyle, RawStrError};
use rustc_session::lint::builtin::{
RUST_2021_PREFIXES_INCOMPATIBLE_SYNTAX, TEXT_DIRECTION_CODEPOINT_IN_COMMENT,
@@ -38,11 +41,20 @@ pub struct UnmatchedBrace {
pub(crate) fn parse_token_trees<'a>(
sess: &'a ParseSess,
- src: &'a str,
- start_pos: BytePos,
+ mut src: &'a str,
+ mut start_pos: BytePos,
override_span: Option<Span>,
) -> (PResult<'a, TokenStream>, Vec<UnmatchedBrace>) {
- StringReader { sess, start_pos, pos: start_pos, src, override_span }.into_token_trees()
+ // Skip `#!`, if present.
+ if let Some(shebang_len) = rustc_lexer::strip_shebang(src) {
+ src = &src[shebang_len..];
+ start_pos = start_pos + BytePos::from_usize(shebang_len);
+ }
+
+ let cursor = Cursor::new(src);
+ let string_reader =
+ StringReader { sess, start_pos, pos: start_pos, src, cursor, override_span };
+ tokentrees::TokenTreesReader::parse_all_token_trees(string_reader)
}
struct StringReader<'a> {
@@ -53,6 +65,8 @@ struct StringReader<'a> {
pos: BytePos,
/// Source text to tokenize.
src: &'a str,
+ /// Cursor for getting lexer tokens.
+ cursor: Cursor<'a>,
override_span: Option<Span>,
}
@@ -61,42 +75,198 @@ impl<'a> StringReader<'a> {
self.override_span.unwrap_or_else(|| Span::with_root_ctxt(lo, hi))
}
- /// Returns the next token, and info about preceding whitespace, if any.
- fn next_token(&mut self) -> (Spacing, Token) {
- let mut spacing = Spacing::Joint;
-
- // Skip `#!` at the start of the file
- if self.pos == self.start_pos
- && let Some(shebang_len) = rustc_lexer::strip_shebang(self.src)
- {
- self.pos = self.pos + BytePos::from_usize(shebang_len);
- spacing = Spacing::Alone;
- }
+ /// Returns the next token, paired with a bool indicating if the token was
+ /// preceded by whitespace.
+ fn next_token(&mut self) -> (Token, bool) {
+ let mut preceded_by_whitespace = false;
// Skip trivial (whitespace & comments) tokens
loop {
- let start_src_index = self.src_index(self.pos);
- let text: &str = &self.src[start_src_index..];
-
- if text.is_empty() {
- let span = self.mk_sp(self.pos, self.pos);
- return (spacing, Token::new(token::Eof, span));
- }
-
- let token = rustc_lexer::first_token(text);
-
+ let token = self.cursor.advance_token();
let start = self.pos;
self.pos = self.pos + BytePos(token.len);
debug!("next_token: {:?}({:?})", token.kind, self.str_from(start));
- match self.cook_lexer_token(token.kind, start) {
- Some(kind) => {
+ // Now "cook" the token, converting the simple `rustc_lexer::TokenKind` enum into a
+ // rich `rustc_ast::TokenKind`. This turns strings into interned symbols and runs
+ // additional validation.
+ let kind = match token.kind {
+ rustc_lexer::TokenKind::LineComment { doc_style } => {
+ // Skip non-doc comments
+ let Some(doc_style) = doc_style else {
+ self.lint_unicode_text_flow(start);
+ preceded_by_whitespace = true;
+ continue;
+ };
+
+ // Opening delimiter of the length 3 is not included into the symbol.
+ let content_start = start + BytePos(3);
+ let content = self.str_from(content_start);
+ self.cook_doc_comment(content_start, content, CommentKind::Line, doc_style)
+ }
+ rustc_lexer::TokenKind::BlockComment { doc_style, terminated } => {
+ if !terminated {
+ self.report_unterminated_block_comment(start, doc_style);
+ }
+
+ // Skip non-doc comments
+ let Some(doc_style) = doc_style else {
+ self.lint_unicode_text_flow(start);
+ preceded_by_whitespace = true;
+ continue;
+ };
+
+ // Opening delimiter of the length 3 and closing delimiter of the length 2
+ // are not included into the symbol.
+ let content_start = start + BytePos(3);
+ let content_end = self.pos - BytePos(if terminated { 2 } else { 0 });
+ let content = self.str_from_to(content_start, content_end);
+ self.cook_doc_comment(content_start, content, CommentKind::Block, doc_style)
+ }
+ rustc_lexer::TokenKind::Whitespace => {
+ preceded_by_whitespace = true;
+ continue;
+ }
+ rustc_lexer::TokenKind::Ident => {
+ let sym = nfc_normalize(self.str_from(start));
let span = self.mk_sp(start, self.pos);
- return (spacing, Token::new(kind, span));
+ self.sess.symbol_gallery.insert(sym, span);
+ token::Ident(sym, false)
}
- None => spacing = Spacing::Alone,
- }
+ rustc_lexer::TokenKind::RawIdent => {
+ let sym = nfc_normalize(self.str_from(start + BytePos(2)));
+ let span = self.mk_sp(start, self.pos);
+ self.sess.symbol_gallery.insert(sym, span);
+ if !sym.can_be_raw() {
+ self.err_span(span, &format!("`{}` cannot be a raw identifier", sym));
+ }
+ self.sess.raw_identifier_spans.borrow_mut().push(span);
+ token::Ident(sym, true)
+ }
+ rustc_lexer::TokenKind::UnknownPrefix => {
+ self.report_unknown_prefix(start);
+ let sym = nfc_normalize(self.str_from(start));
+ let span = self.mk_sp(start, self.pos);
+ self.sess.symbol_gallery.insert(sym, span);
+ token::Ident(sym, false)
+ }
+ rustc_lexer::TokenKind::InvalidIdent
+ // Do not recover an identifier with emoji if the codepoint is a confusable
+ // with a recoverable substitution token, like `âž–`.
+ if !UNICODE_ARRAY
+ .iter()
+ .any(|&(c, _, _)| {
+ let sym = self.str_from(start);
+ sym.chars().count() == 1 && c == sym.chars().next().unwrap()
+ }) =>
+ {
+ let sym = nfc_normalize(self.str_from(start));
+ let span = self.mk_sp(start, self.pos);
+ self.sess.bad_unicode_identifiers.borrow_mut().entry(sym).or_default()
+ .push(span);
+ token::Ident(sym, false)
+ }
+ rustc_lexer::TokenKind::Literal { kind, suffix_start } => {
+ let suffix_start = start + BytePos(suffix_start);
+ let (kind, symbol) = self.cook_lexer_literal(start, suffix_start, kind);
+ let suffix = if suffix_start < self.pos {
+ let string = self.str_from(suffix_start);
+ if string == "_" {
+ self.sess
+ .span_diagnostic
+ .struct_span_warn(
+ self.mk_sp(suffix_start, self.pos),
+ "underscore literal suffix is not allowed",
+ )
+ .warn(
+ "this was previously accepted by the compiler but is \
+ being phased out; it will become a hard error in \
+ a future release!",
+ )
+ .note(
+ "see issue #42326 \
+ <https://github.com/rust-lang/rust/issues/42326> \
+ for more information",
+ )
+ .emit();
+ None
+ } else {
+ Some(Symbol::intern(string))
+ }
+ } else {
+ None
+ };
+ token::Literal(token::Lit { kind, symbol, suffix })
+ }
+ rustc_lexer::TokenKind::Lifetime { starts_with_number } => {
+ // Include the leading `'` in the real identifier, for macro
+ // expansion purposes. See #12512 for the gory details of why
+ // this is necessary.
+ let lifetime_name = self.str_from(start);
+ if starts_with_number {
+ let span = self.mk_sp(start, self.pos);
+ let mut diag = self.sess.struct_err("lifetimes cannot start with a number");
+ diag.set_span(span);
+ diag.stash(span, StashKey::LifetimeIsChar);
+ }
+ let ident = Symbol::intern(lifetime_name);
+ token::Lifetime(ident)
+ }
+ rustc_lexer::TokenKind::Semi => token::Semi,
+ rustc_lexer::TokenKind::Comma => token::Comma,
+ rustc_lexer::TokenKind::Dot => token::Dot,
+ rustc_lexer::TokenKind::OpenParen => token::OpenDelim(Delimiter::Parenthesis),
+ rustc_lexer::TokenKind::CloseParen => token::CloseDelim(Delimiter::Parenthesis),
+ rustc_lexer::TokenKind::OpenBrace => token::OpenDelim(Delimiter::Brace),
+ rustc_lexer::TokenKind::CloseBrace => token::CloseDelim(Delimiter::Brace),
+ rustc_lexer::TokenKind::OpenBracket => token::OpenDelim(Delimiter::Bracket),
+ rustc_lexer::TokenKind::CloseBracket => token::CloseDelim(Delimiter::Bracket),
+ rustc_lexer::TokenKind::At => token::At,
+ rustc_lexer::TokenKind::Pound => token::Pound,
+ rustc_lexer::TokenKind::Tilde => token::Tilde,
+ rustc_lexer::TokenKind::Question => token::Question,
+ rustc_lexer::TokenKind::Colon => token::Colon,
+ rustc_lexer::TokenKind::Dollar => token::Dollar,
+ rustc_lexer::TokenKind::Eq => token::Eq,
+ rustc_lexer::TokenKind::Bang => token::Not,
+ rustc_lexer::TokenKind::Lt => token::Lt,
+ rustc_lexer::TokenKind::Gt => token::Gt,
+ rustc_lexer::TokenKind::Minus => token::BinOp(token::Minus),
+ rustc_lexer::TokenKind::And => token::BinOp(token::And),
+ rustc_lexer::TokenKind::Or => token::BinOp(token::Or),
+ rustc_lexer::TokenKind::Plus => token::BinOp(token::Plus),
+ rustc_lexer::TokenKind::Star => token::BinOp(token::Star),
+ rustc_lexer::TokenKind::Slash => token::BinOp(token::Slash),
+ rustc_lexer::TokenKind::Caret => token::BinOp(token::Caret),
+ rustc_lexer::TokenKind::Percent => token::BinOp(token::Percent),
+
+ rustc_lexer::TokenKind::Unknown | rustc_lexer::TokenKind::InvalidIdent => {
+ let c = self.str_from(start).chars().next().unwrap();
+ let mut err =
+ self.struct_err_span_char(start, self.pos, "unknown start of token", c);
+ // FIXME: the lexer could be used to turn the ASCII version of unicode
+ // homoglyphs, instead of keeping a table in `check_for_substitution`into the
+ // token. Ideally, this should be inside `rustc_lexer`. However, we should
+ // first remove compound tokens like `<<` from `rustc_lexer`, and then add
+ // fancier error recovery to it, as there will be less overall work to do this
+ // way.
+ let token = unicode_chars::check_for_substitution(self, start, c, &mut err);
+ if c == '\x00' {
+ err.help("source files must contain UTF-8 encoded text, unexpected null bytes might occur when a different encoding is used");
+ }
+ err.emit();
+ if let Some(token) = token {
+ token
+ } else {
+ preceded_by_whitespace = true;
+ continue;
+ }
+ }
+ rustc_lexer::TokenKind::Eof => token::Eof,
+ };
+ let span = self.mk_sp(start, self.pos);
+ return (Token::new(kind, span), preceded_by_whitespace);
}
}
@@ -162,171 +332,6 @@ impl<'a> StringReader<'a> {
}
}
- /// Turns simple `rustc_lexer::TokenKind` enum into a rich
- /// `rustc_ast::TokenKind`. This turns strings into interned
- /// symbols and runs additional validation.
- fn cook_lexer_token(&self, token: rustc_lexer::TokenKind, start: BytePos) -> Option<TokenKind> {
- Some(match token {
- rustc_lexer::TokenKind::LineComment { doc_style } => {
- // Skip non-doc comments
- let Some(doc_style) = doc_style else {
- self.lint_unicode_text_flow(start);
- return None;
- };
-
- // Opening delimiter of the length 3 is not included into the symbol.
- let content_start = start + BytePos(3);
- let content = self.str_from(content_start);
- self.cook_doc_comment(content_start, content, CommentKind::Line, doc_style)
- }
- rustc_lexer::TokenKind::BlockComment { doc_style, terminated } => {
- if !terminated {
- self.report_unterminated_block_comment(start, doc_style);
- }
-
- // Skip non-doc comments
- let Some(doc_style) = doc_style else {
- self.lint_unicode_text_flow(start);
- return None;
- };
-
- // Opening delimiter of the length 3 and closing delimiter of the length 2
- // are not included into the symbol.
- let content_start = start + BytePos(3);
- let content_end = self.pos - BytePos(if terminated { 2 } else { 0 });
- let content = self.str_from_to(content_start, content_end);
- self.cook_doc_comment(content_start, content, CommentKind::Block, doc_style)
- }
- rustc_lexer::TokenKind::Whitespace => return None,
- rustc_lexer::TokenKind::Ident
- | rustc_lexer::TokenKind::RawIdent
- | rustc_lexer::TokenKind::UnknownPrefix => {
- let is_raw_ident = token == rustc_lexer::TokenKind::RawIdent;
- let is_unknown_prefix = token == rustc_lexer::TokenKind::UnknownPrefix;
- let mut ident_start = start;
- if is_raw_ident {
- ident_start = ident_start + BytePos(2);
- }
- if is_unknown_prefix {
- self.report_unknown_prefix(start);
- }
- let sym = nfc_normalize(self.str_from(ident_start));
- let span = self.mk_sp(start, self.pos);
- self.sess.symbol_gallery.insert(sym, span);
- if is_raw_ident {
- if !sym.can_be_raw() {
- self.err_span(span, &format!("`{}` cannot be a raw identifier", sym));
- }
- self.sess.raw_identifier_spans.borrow_mut().push(span);
- }
- token::Ident(sym, is_raw_ident)
- }
- rustc_lexer::TokenKind::InvalidIdent
- // Do not recover an identifier with emoji if the codepoint is a confusable
- // with a recoverable substitution token, like `âž–`.
- if !UNICODE_ARRAY
- .iter()
- .any(|&(c, _, _)| {
- let sym = self.str_from(start);
- sym.chars().count() == 1 && c == sym.chars().next().unwrap()
- })
- =>
- {
- let sym = nfc_normalize(self.str_from(start));
- let span = self.mk_sp(start, self.pos);
- self.sess.bad_unicode_identifiers.borrow_mut().entry(sym).or_default().push(span);
- token::Ident(sym, false)
- }
- rustc_lexer::TokenKind::Literal { kind, suffix_start } => {
- let suffix_start = start + BytePos(suffix_start);
- let (kind, symbol) = self.cook_lexer_literal(start, suffix_start, kind);
- let suffix = if suffix_start < self.pos {
- let string = self.str_from(suffix_start);
- if string == "_" {
- self.sess
- .span_diagnostic
- .struct_span_warn(
- self.mk_sp(suffix_start, self.pos),
- "underscore literal suffix is not allowed",
- )
- .warn(
- "this was previously accepted by the compiler but is \
- being phased out; it will become a hard error in \
- a future release!",
- )
- .note(
- "see issue #42326 \
- <https://github.com/rust-lang/rust/issues/42326> \
- for more information",
- )
- .emit();
- None
- } else {
- Some(Symbol::intern(string))
- }
- } else {
- None
- };
- token::Literal(token::Lit { kind, symbol, suffix })
- }
- rustc_lexer::TokenKind::Lifetime { starts_with_number } => {
- // Include the leading `'` in the real identifier, for macro
- // expansion purposes. See #12512 for the gory details of why
- // this is necessary.
- let lifetime_name = self.str_from(start);
- if starts_with_number {
- self.err_span_(start, self.pos, "lifetimes cannot start with a number");
- }
- let ident = Symbol::intern(lifetime_name);
- token::Lifetime(ident)
- }
- rustc_lexer::TokenKind::Semi => token::Semi,
- rustc_lexer::TokenKind::Comma => token::Comma,
- rustc_lexer::TokenKind::Dot => token::Dot,
- rustc_lexer::TokenKind::OpenParen => token::OpenDelim(Delimiter::Parenthesis),
- rustc_lexer::TokenKind::CloseParen => token::CloseDelim(Delimiter::Parenthesis),
- rustc_lexer::TokenKind::OpenBrace => token::OpenDelim(Delimiter::Brace),
- rustc_lexer::TokenKind::CloseBrace => token::CloseDelim(Delimiter::Brace),
- rustc_lexer::TokenKind::OpenBracket => token::OpenDelim(Delimiter::Bracket),
- rustc_lexer::TokenKind::CloseBracket => token::CloseDelim(Delimiter::Bracket),
- rustc_lexer::TokenKind::At => token::At,
- rustc_lexer::TokenKind::Pound => token::Pound,
- rustc_lexer::TokenKind::Tilde => token::Tilde,
- rustc_lexer::TokenKind::Question => token::Question,
- rustc_lexer::TokenKind::Colon => token::Colon,
- rustc_lexer::TokenKind::Dollar => token::Dollar,
- rustc_lexer::TokenKind::Eq => token::Eq,
- rustc_lexer::TokenKind::Bang => token::Not,
- rustc_lexer::TokenKind::Lt => token::Lt,
- rustc_lexer::TokenKind::Gt => token::Gt,
- rustc_lexer::TokenKind::Minus => token::BinOp(token::Minus),
- rustc_lexer::TokenKind::And => token::BinOp(token::And),
- rustc_lexer::TokenKind::Or => token::BinOp(token::Or),
- rustc_lexer::TokenKind::Plus => token::BinOp(token::Plus),
- rustc_lexer::TokenKind::Star => token::BinOp(token::Star),
- rustc_lexer::TokenKind::Slash => token::BinOp(token::Slash),
- rustc_lexer::TokenKind::Caret => token::BinOp(token::Caret),
- rustc_lexer::TokenKind::Percent => token::BinOp(token::Percent),
-
- rustc_lexer::TokenKind::Unknown | rustc_lexer::TokenKind::InvalidIdent => {
- let c = self.str_from(start).chars().next().unwrap();
- let mut err =
- self.struct_err_span_char(start, self.pos, "unknown start of token", c);
- // FIXME: the lexer could be used to turn the ASCII version of unicode homoglyphs,
- // instead of keeping a table in `check_for_substitution`into the token. Ideally,
- // this should be inside `rustc_lexer`. However, we should first remove compound
- // tokens like `<<` from `rustc_lexer`, and then add fancier error recovery to it,
- // as there will be less overall work to do this way.
- let token = unicode_chars::check_for_substitution(self, start, c, &mut err);
- if c == '\x00' {
- err.help("source files must contain UTF-8 encoded text, unexpected null bytes might occur when a different encoding is used");
- }
- err.emit();
- token?
- }
- })
- }
-
fn cook_doc_comment(
&self,
content_start: BytePos,
diff --git a/compiler/rustc_parse/src/lexer/tokentrees.rs b/compiler/rustc_parse/src/lexer/tokentrees.rs
index aa70912dc..b2701817d 100644
--- a/compiler/rustc_parse/src/lexer/tokentrees.rs
+++ b/compiler/rustc_parse/src/lexer/tokentrees.rs
@@ -1,31 +1,15 @@
use super::{StringReader, UnmatchedBrace};
-
use rustc_ast::token::{self, Delimiter, Token};
use rustc_ast::tokenstream::{DelimSpan, Spacing, TokenStream, TokenTree};
use rustc_ast_pretty::pprust::token_to_string;
use rustc_data_structures::fx::FxHashMap;
-use rustc_errors::PResult;
+use rustc_errors::{PErr, PResult};
use rustc_span::Span;
-impl<'a> StringReader<'a> {
- pub(super) fn into_token_trees(self) -> (PResult<'a, TokenStream>, Vec<UnmatchedBrace>) {
- let mut tt_reader = TokenTreesReader {
- string_reader: self,
- token: Token::dummy(),
- open_braces: Vec::new(),
- unmatched_braces: Vec::new(),
- matching_delim_spans: Vec::new(),
- last_unclosed_found_span: None,
- last_delim_empty_block_spans: FxHashMap::default(),
- matching_block_spans: Vec::new(),
- };
- let res = tt_reader.parse_all_token_trees();
- (res, tt_reader.unmatched_braces)
- }
-}
-
-struct TokenTreesReader<'a> {
+pub(super) struct TokenTreesReader<'a> {
string_reader: StringReader<'a>,
+ /// The "next" token, which has been obtained from the `StringReader` but
+ /// not yet handled by the `TokenTreesReader`.
token: Token,
/// Stack of open delimiters and their spans. Used for error message.
open_braces: Vec<(Delimiter, Span)>,
@@ -43,254 +27,232 @@ struct TokenTreesReader<'a> {
}
impl<'a> TokenTreesReader<'a> {
- // Parse a stream of tokens into a list of `TokenTree`s, up to an `Eof`.
- fn parse_all_token_trees(&mut self) -> PResult<'a, TokenStream> {
- let mut buf = TokenStreamBuilder::default();
-
- self.bump();
- while self.token != token::Eof {
- buf.push(self.parse_token_tree()?);
- }
-
- Ok(buf.into_token_stream())
+ pub(super) fn parse_all_token_trees(
+ string_reader: StringReader<'a>,
+ ) -> (PResult<'a, TokenStream>, Vec<UnmatchedBrace>) {
+ let mut tt_reader = TokenTreesReader {
+ string_reader,
+ token: Token::dummy(),
+ open_braces: Vec::new(),
+ unmatched_braces: Vec::new(),
+ matching_delim_spans: Vec::new(),
+ last_unclosed_found_span: None,
+ last_delim_empty_block_spans: FxHashMap::default(),
+ matching_block_spans: Vec::new(),
+ };
+ let res = tt_reader.parse_token_trees(/* is_delimited */ false);
+ (res, tt_reader.unmatched_braces)
}
- // Parse a stream of tokens into a list of `TokenTree`s, up to a `CloseDelim`.
- fn parse_token_trees_until_close_delim(&mut self) -> TokenStream {
- let mut buf = TokenStreamBuilder::default();
+ // Parse a stream of tokens into a list of `TokenTree`s.
+ fn parse_token_trees(&mut self, is_delimited: bool) -> PResult<'a, TokenStream> {
+ self.token = self.string_reader.next_token().0;
+ let mut buf = Vec::new();
loop {
- if let token::CloseDelim(..) = self.token.kind {
- return buf.into_token_stream();
- }
-
- match self.parse_token_tree() {
- Ok(tree) => buf.push(tree),
- Err(mut e) => {
- e.emit();
- return buf.into_token_stream();
+ match self.token.kind {
+ token::OpenDelim(delim) => buf.push(self.parse_token_tree_open_delim(delim)),
+ token::CloseDelim(delim) => {
+ return if is_delimited {
+ Ok(TokenStream::new(buf))
+ } else {
+ Err(self.close_delim_err(delim))
+ };
+ }
+ token::Eof => {
+ if is_delimited {
+ self.eof_err().emit();
+ }
+ return Ok(TokenStream::new(buf));
+ }
+ _ => {
+ // Get the next normal token. This might require getting multiple adjacent
+ // single-char tokens and joining them together.
+ let (this_spacing, next_tok) = loop {
+ let (next_tok, is_next_tok_preceded_by_whitespace) =
+ self.string_reader.next_token();
+ if !is_next_tok_preceded_by_whitespace {
+ if let Some(glued) = self.token.glue(&next_tok) {
+ self.token = glued;
+ } else {
+ let this_spacing =
+ if next_tok.is_op() { Spacing::Joint } else { Spacing::Alone };
+ break (this_spacing, next_tok);
+ }
+ } else {
+ break (Spacing::Alone, next_tok);
+ }
+ };
+ let this_tok = std::mem::replace(&mut self.token, next_tok);
+ buf.push(TokenTree::Token(this_tok, this_spacing));
}
}
}
}
- fn parse_token_tree(&mut self) -> PResult<'a, TokenTree> {
- let sm = self.string_reader.sess.source_map();
-
- match self.token.kind {
- token::Eof => {
- let msg = "this file contains an unclosed delimiter";
- let mut err =
- self.string_reader.sess.span_diagnostic.struct_span_err(self.token.span, msg);
- for &(_, sp) in &self.open_braces {
- err.span_label(sp, "unclosed delimiter");
- self.unmatched_braces.push(UnmatchedBrace {
- expected_delim: Delimiter::Brace,
- found_delim: None,
- found_span: self.token.span,
- unclosed_span: Some(sp),
- candidate_span: None,
- });
- }
+ fn eof_err(&mut self) -> PErr<'a> {
+ let msg = "this file contains an unclosed delimiter";
+ let mut err = self.string_reader.sess.span_diagnostic.struct_span_err(self.token.span, msg);
+ for &(_, sp) in &self.open_braces {
+ err.span_label(sp, "unclosed delimiter");
+ self.unmatched_braces.push(UnmatchedBrace {
+ expected_delim: Delimiter::Brace,
+ found_delim: None,
+ found_span: self.token.span,
+ unclosed_span: Some(sp),
+ candidate_span: None,
+ });
+ }
- if let Some((delim, _)) = self.open_braces.last() {
- if let Some((_, open_sp, close_sp)) =
- self.matching_delim_spans.iter().find(|(d, open_sp, close_sp)| {
- if let Some(close_padding) = sm.span_to_margin(*close_sp) {
- if let Some(open_padding) = sm.span_to_margin(*open_sp) {
- return delim == d && close_padding != open_padding;
- }
- }
- false
- })
- // these are in reverse order as they get inserted on close, but
- {
- // we want the last open/first close
- err.span_label(*open_sp, "this delimiter might not be properly closed...");
- err.span_label(
- *close_sp,
- "...as it matches this but it has different indentation",
- );
+ if let Some((delim, _)) = self.open_braces.last() {
+ if let Some((_, open_sp, close_sp)) =
+ self.matching_delim_spans.iter().find(|(d, open_sp, close_sp)| {
+ let sm = self.string_reader.sess.source_map();
+ if let Some(close_padding) = sm.span_to_margin(*close_sp) {
+ if let Some(open_padding) = sm.span_to_margin(*open_sp) {
+ return delim == d && close_padding != open_padding;
+ }
}
- }
- Err(err)
+ false
+ })
+ // these are in reverse order as they get inserted on close, but
+ {
+ // we want the last open/first close
+ err.span_label(*open_sp, "this delimiter might not be properly closed...");
+ err.span_label(*close_sp, "...as it matches this but it has different indentation");
}
- token::OpenDelim(delim) => {
- // The span for beginning of the delimited section
- let pre_span = self.token.span;
-
- // Parse the open delimiter.
- self.open_braces.push((delim, self.token.span));
- self.bump();
+ }
+ err
+ }
- // Parse the token trees within the delimiters.
- // We stop at any delimiter so we can try to recover if the user
- // uses an incorrect delimiter.
- let tts = self.parse_token_trees_until_close_delim();
+ fn parse_token_tree_open_delim(&mut self, open_delim: Delimiter) -> TokenTree {
+ // The span for beginning of the delimited section
+ let pre_span = self.token.span;
- // Expand to cover the entire delimited token tree
- let delim_span = DelimSpan::from_pair(pre_span, self.token.span);
+ self.open_braces.push((open_delim, self.token.span));
- match self.token.kind {
- // Correct delimiter.
- token::CloseDelim(d) if d == delim => {
- let (open_brace, open_brace_span) = self.open_braces.pop().unwrap();
- let close_brace_span = self.token.span;
+ // Parse the token trees within the delimiters.
+ // We stop at any delimiter so we can try to recover if the user
+ // uses an incorrect delimiter.
+ let tts = self.parse_token_trees(/* is_delimited */ true).unwrap();
- if tts.is_empty() {
- let empty_block_span = open_brace_span.to(close_brace_span);
- if !sm.is_multiline(empty_block_span) {
- // Only track if the block is in the form of `{}`, otherwise it is
- // likely that it was written on purpose.
- self.last_delim_empty_block_spans.insert(delim, empty_block_span);
- }
- }
+ // Expand to cover the entire delimited token tree
+ let delim_span = DelimSpan::from_pair(pre_span, self.token.span);
- //only add braces
- if let (Delimiter::Brace, Delimiter::Brace) = (open_brace, delim) {
- self.matching_block_spans.push((open_brace_span, close_brace_span));
- }
+ match self.token.kind {
+ // Correct delimiter.
+ token::CloseDelim(close_delim) if close_delim == open_delim => {
+ let (open_brace, open_brace_span) = self.open_braces.pop().unwrap();
+ let close_brace_span = self.token.span;
- if self.open_braces.is_empty() {
- // Clear up these spans to avoid suggesting them as we've found
- // properly matched delimiters so far for an entire block.
- self.matching_delim_spans.clear();
- } else {
- self.matching_delim_spans.push((
- open_brace,
- open_brace_span,
- close_brace_span,
- ));
- }
- // Parse the closing delimiter.
- self.bump();
+ if tts.is_empty() {
+ let empty_block_span = open_brace_span.to(close_brace_span);
+ let sm = self.string_reader.sess.source_map();
+ if !sm.is_multiline(empty_block_span) {
+ // Only track if the block is in the form of `{}`, otherwise it is
+ // likely that it was written on purpose.
+ self.last_delim_empty_block_spans.insert(open_delim, empty_block_span);
}
- // Incorrect delimiter.
- token::CloseDelim(other) => {
- let mut unclosed_delimiter = None;
- let mut candidate = None;
-
- if self.last_unclosed_found_span != Some(self.token.span) {
- // do not complain about the same unclosed delimiter multiple times
- self.last_unclosed_found_span = Some(self.token.span);
- // This is a conservative error: only report the last unclosed
- // delimiter. The previous unclosed delimiters could actually be
- // closed! The parser just hasn't gotten to them yet.
- if let Some(&(_, sp)) = self.open_braces.last() {
- unclosed_delimiter = Some(sp);
- };
- if let Some(current_padding) = sm.span_to_margin(self.token.span) {
- for (brace, brace_span) in &self.open_braces {
- if let Some(padding) = sm.span_to_margin(*brace_span) {
- // high likelihood of these two corresponding
- if current_padding == padding && brace == &other {
- candidate = Some(*brace_span);
- }
- }
- }
- }
- let (tok, _) = self.open_braces.pop().unwrap();
- self.unmatched_braces.push(UnmatchedBrace {
- expected_delim: tok,
- found_delim: Some(other),
- found_span: self.token.span,
- unclosed_span: unclosed_delimiter,
- candidate_span: candidate,
- });
- } else {
- self.open_braces.pop();
- }
+ }
- // If the incorrect delimiter matches an earlier opening
- // delimiter, then don't consume it (it can be used to
- // close the earlier one). Otherwise, consume it.
- // E.g., we try to recover from:
- // fn foo() {
- // bar(baz(
- // } // Incorrect delimiter but matches the earlier `{`
- if !self.open_braces.iter().any(|&(b, _)| b == other) {
- self.bump();
- }
- }
- token::Eof => {
- // Silently recover, the EOF token will be seen again
- // and an error emitted then. Thus we don't pop from
- // self.open_braces here.
- }
- _ => {}
+ //only add braces
+ if let (Delimiter::Brace, Delimiter::Brace) = (open_brace, open_delim) {
+ self.matching_block_spans.push((open_brace_span, close_brace_span));
}
- Ok(TokenTree::Delimited(delim_span, delim, tts))
+ if self.open_braces.is_empty() {
+ // Clear up these spans to avoid suggesting them as we've found
+ // properly matched delimiters so far for an entire block.
+ self.matching_delim_spans.clear();
+ } else {
+ self.matching_delim_spans.push((open_brace, open_brace_span, close_brace_span));
+ }
+ // Move past the closing delimiter.
+ self.token = self.string_reader.next_token().0;
}
- token::CloseDelim(delim) => {
- // An unexpected closing delimiter (i.e., there is no
- // matching opening delimiter).
- let token_str = token_to_string(&self.token);
- let msg = format!("unexpected closing delimiter: `{}`", token_str);
- let mut err =
- self.string_reader.sess.span_diagnostic.struct_span_err(self.token.span, &msg);
+ // Incorrect delimiter.
+ token::CloseDelim(close_delim) => {
+ let mut unclosed_delimiter = None;
+ let mut candidate = None;
- // Braces are added at the end, so the last element is the biggest block
- if let Some(parent) = self.matching_block_spans.last() {
- if let Some(span) = self.last_delim_empty_block_spans.remove(&delim) {
- // Check if the (empty block) is in the last properly closed block
- if (parent.0.to(parent.1)).contains(span) {
- err.span_label(
- span,
- "block is empty, you might have not meant to close it",
- );
- } else {
- err.span_label(parent.0, "this opening brace...");
-
- err.span_label(parent.1, "...matches this closing brace");
+ if self.last_unclosed_found_span != Some(self.token.span) {
+ // do not complain about the same unclosed delimiter multiple times
+ self.last_unclosed_found_span = Some(self.token.span);
+ // This is a conservative error: only report the last unclosed
+ // delimiter. The previous unclosed delimiters could actually be
+ // closed! The parser just hasn't gotten to them yet.
+ if let Some(&(_, sp)) = self.open_braces.last() {
+ unclosed_delimiter = Some(sp);
+ };
+ let sm = self.string_reader.sess.source_map();
+ if let Some(current_padding) = sm.span_to_margin(self.token.span) {
+ for (brace, brace_span) in &self.open_braces {
+ if let Some(padding) = sm.span_to_margin(*brace_span) {
+ // high likelihood of these two corresponding
+ if current_padding == padding && brace == &close_delim {
+ candidate = Some(*brace_span);
+ }
+ }
}
- } else {
- err.span_label(parent.0, "this opening brace...");
-
- err.span_label(parent.1, "...matches this closing brace");
}
+ let (tok, _) = self.open_braces.pop().unwrap();
+ self.unmatched_braces.push(UnmatchedBrace {
+ expected_delim: tok,
+ found_delim: Some(close_delim),
+ found_span: self.token.span,
+ unclosed_span: unclosed_delimiter,
+ candidate_span: candidate,
+ });
+ } else {
+ self.open_braces.pop();
}
- err.span_label(self.token.span, "unexpected closing delimiter");
- Err(err)
- }
- _ => {
- let tok = self.token.take();
- let mut spacing = self.bump();
- if !self.token.is_op() {
- spacing = Spacing::Alone;
+ // If the incorrect delimiter matches an earlier opening
+ // delimiter, then don't consume it (it can be used to
+ // close the earlier one). Otherwise, consume it.
+ // E.g., we try to recover from:
+ // fn foo() {
+ // bar(baz(
+ // } // Incorrect delimiter but matches the earlier `{`
+ if !self.open_braces.iter().any(|&(b, _)| b == close_delim) {
+ self.token = self.string_reader.next_token().0;
}
- Ok(TokenTree::Token(tok, spacing))
}
+ token::Eof => {
+ // Silently recover, the EOF token will be seen again
+ // and an error emitted then. Thus we don't pop from
+ // self.open_braces here.
+ }
+ _ => unreachable!(),
}
- }
- fn bump(&mut self) -> Spacing {
- let (spacing, token) = self.string_reader.next_token();
- self.token = token;
- spacing
+ TokenTree::Delimited(delim_span, open_delim, tts)
}
-}
-#[derive(Default)]
-struct TokenStreamBuilder {
- buf: Vec<TokenTree>,
-}
+ fn close_delim_err(&mut self, delim: Delimiter) -> PErr<'a> {
+ // An unexpected closing delimiter (i.e., there is no
+ // matching opening delimiter).
+ let token_str = token_to_string(&self.token);
+ let msg = format!("unexpected closing delimiter: `{}`", token_str);
+ let mut err =
+ self.string_reader.sess.span_diagnostic.struct_span_err(self.token.span, &msg);
-impl TokenStreamBuilder {
- #[inline(always)]
- fn push(&mut self, tree: TokenTree) {
- if let Some(TokenTree::Token(prev_token, Spacing::Joint)) = self.buf.last()
- && let TokenTree::Token(token, joint) = &tree
- && let Some(glued) = prev_token.glue(token)
- {
- self.buf.pop();
- self.buf.push(TokenTree::Token(glued, *joint));
- } else {
- self.buf.push(tree)
+ // Braces are added at the end, so the last element is the biggest block
+ if let Some(parent) = self.matching_block_spans.last() {
+ if let Some(span) = self.last_delim_empty_block_spans.remove(&delim) {
+ // Check if the (empty block) is in the last properly closed block
+ if (parent.0.to(parent.1)).contains(span) {
+ err.span_label(span, "block is empty, you might have not meant to close it");
+ } else {
+ err.span_label(parent.0, "this opening brace...");
+ err.span_label(parent.1, "...matches this closing brace");
+ }
+ } else {
+ err.span_label(parent.0, "this opening brace...");
+ err.span_label(parent.1, "...matches this closing brace");
+ }
}
- }
- fn into_token_stream(self) -> TokenStream {
- TokenStream::new(self.buf)
+ err.span_label(self.token.span, "unexpected closing delimiter");
+ err
}
}
diff --git a/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs b/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
index 77c4fadab..f075de714 100644
--- a/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
+++ b/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
@@ -113,11 +113,26 @@ pub(crate) fn emit_unescape_error(
} else {
("", "if you meant to write a `str` literal, use double quotes")
};
-
+ let mut escaped = String::with_capacity(lit.len());
+ let mut chrs = lit.chars().peekable();
+ while let Some(first) = chrs.next() {
+ match (first, chrs.peek()) {
+ ('\\', Some('"')) => {
+ escaped.push('\\');
+ escaped.push('"');
+ chrs.next();
+ }
+ ('"', _) => {
+ escaped.push('\\');
+ escaped.push('"')
+ }
+ (c, _) => escaped.push(c),
+ };
+ }
handler.span_suggestion(
span_with_quotes,
msg,
- format!("{}\"{}\"", prefix, lit),
+ format!("{prefix}\"{escaped}\""),
Applicability::MachineApplicable,
);
}
diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs
index a37327f42..3dcadb4c9 100644
--- a/compiler/rustc_parse/src/lib.rs
+++ b/compiler/rustc_parse/src/lib.rs
@@ -4,7 +4,6 @@
#![feature(box_patterns)]
#![feature(if_let_guard)]
#![feature(let_chains)]
-#![cfg_attr(bootstrap, feature(let_else))]
#![feature(never_type)]
#![feature(rustc_attrs)]
#![recursion_limit = "256"]
@@ -33,12 +32,15 @@ use parser::{emit_unclosed_delims, make_unclosed_delims_error, Parser};
pub mod lexer;
pub mod validate_attr;
+mod errors;
+
// A bunch of utility functions of the form `parse_<thing>_from_<source>`
// where <thing> includes crate, expr, item, stmt, tts, and one that
// uses a HOF to parse anything, and <source> includes file and
// `source_str`.
-/// A variant of 'panictry!' that works on a Vec<Diagnostic> instead of a single DiagnosticBuilder.
+/// A variant of 'panictry!' that works on a `Vec<Diagnostic>` instead of a single
+/// `DiagnosticBuilder`.
macro_rules! panictry_buffer {
($handler:expr, $e:expr) => {{
use rustc_errors::FatalError;
diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs
index 5fd69b15e..9e4565694 100644
--- a/compiler/rustc_parse/src/parser/attr.rs
+++ b/compiler/rustc_parse/src/parser/attr.rs
@@ -1,27 +1,26 @@
+use crate::errors::{InvalidMetaItem, SuffixedLiteralInAttribute};
+
use super::{AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, PathStyle};
use rustc_ast as ast;
use rustc_ast::attr;
use rustc_ast::token::{self, Delimiter, Nonterminal};
-use rustc_ast_pretty::pprust;
-use rustc_errors::{error_code, Diagnostic, PResult};
+use rustc_errors::{error_code, fluent, Diagnostic, IntoDiagnostic, PResult};
use rustc_span::{sym, BytePos, Span};
use std::convert::TryInto;
// Public for rustfmt usage
#[derive(Debug)]
-pub enum InnerAttrPolicy<'a> {
+pub enum InnerAttrPolicy {
Permitted,
- Forbidden { reason: &'a str, saw_doc_comment: bool, prev_outer_attr_sp: Option<Span> },
+ Forbidden(Option<InnerAttrForbiddenReason>),
}
-const DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG: &str = "an inner attribute is not \
- permitted in this context";
-
-pub(super) const DEFAULT_INNER_ATTR_FORBIDDEN: InnerAttrPolicy<'_> = InnerAttrPolicy::Forbidden {
- reason: DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG,
- saw_doc_comment: false,
- prev_outer_attr_sp: None,
-};
+#[derive(Clone, Copy, Debug)]
+pub enum InnerAttrForbiddenReason {
+ InCodeBlock,
+ AfterOuterDocComment { prev_doc_comment_span: Span },
+ AfterOuterAttribute { prev_outer_attr_sp: Span },
+}
enum OuterAttributeType {
DocComment,
@@ -40,17 +39,15 @@ impl<'a> Parser<'a> {
let prev_outer_attr_sp = outer_attrs.last().map(|attr| attr.span);
let inner_error_reason = if just_parsed_doc_comment {
- "an inner attribute is not permitted following an outer doc comment"
- } else if prev_outer_attr_sp.is_some() {
- "an inner attribute is not permitted following an outer attribute"
+ Some(InnerAttrForbiddenReason::AfterOuterDocComment {
+ prev_doc_comment_span: prev_outer_attr_sp.unwrap(),
+ })
+ } else if let Some(prev_outer_attr_sp) = prev_outer_attr_sp {
+ Some(InnerAttrForbiddenReason::AfterOuterAttribute { prev_outer_attr_sp })
} else {
- DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG
- };
- let inner_parse_policy = InnerAttrPolicy::Forbidden {
- reason: inner_error_reason,
- saw_doc_comment: just_parsed_doc_comment,
- prev_outer_attr_sp,
+ None
};
+ let inner_parse_policy = InnerAttrPolicy::Forbidden(inner_error_reason);
just_parsed_doc_comment = false;
Some(self.parse_attribute(inner_parse_policy)?)
} else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind {
@@ -58,7 +55,7 @@ impl<'a> Parser<'a> {
let span = self.token.span;
let mut err = self.sess.span_diagnostic.struct_span_err_with_code(
span,
- "expected outer doc comment",
+ fluent::parser_inner_doc_comment_not_permitted,
error_code!(E0753),
);
if let Some(replacement_span) = self.annotate_following_item_if_applicable(
@@ -69,13 +66,10 @@ impl<'a> Parser<'a> {
token::CommentKind::Block => OuterAttributeType::DocBlockComment,
},
) {
- err.note(
- "inner doc comments like this (starting with `//!` or `/*!`) can \
- only appear before items",
- );
+ err.note(fluent::note);
err.span_suggestion_verbose(
replacement_span,
- "you might have meant to write a regular comment",
+ fluent::suggestion,
"",
rustc_errors::Applicability::MachineApplicable,
);
@@ -113,7 +107,7 @@ impl<'a> Parser<'a> {
// Public for rustfmt usage.
pub fn parse_attribute(
&mut self,
- inner_parse_policy: InnerAttrPolicy<'_>,
+ inner_parse_policy: InnerAttrPolicy,
) -> PResult<'a, ast::Attribute> {
debug!(
"parse_attribute: inner_parse_policy={:?} self.token={:?}",
@@ -122,35 +116,22 @@ impl<'a> Parser<'a> {
let lo = self.token.span;
// Attributes can't have attributes of their own [Editor's note: not with that attitude]
self.collect_tokens_no_attrs(|this| {
- if this.eat(&token::Pound) {
- let style = if this.eat(&token::Not) {
- ast::AttrStyle::Inner
- } else {
- ast::AttrStyle::Outer
- };
+ assert!(this.eat(&token::Pound), "parse_attribute called in non-attribute position");
- this.expect(&token::OpenDelim(Delimiter::Bracket))?;
- let item = this.parse_attr_item(false)?;
- this.expect(&token::CloseDelim(Delimiter::Bracket))?;
- let attr_sp = lo.to(this.prev_token.span);
+ let style =
+ if this.eat(&token::Not) { ast::AttrStyle::Inner } else { ast::AttrStyle::Outer };
- // Emit error if inner attribute is encountered and forbidden.
- if style == ast::AttrStyle::Inner {
- this.error_on_forbidden_inner_attr(attr_sp, inner_parse_policy);
- }
+ this.expect(&token::OpenDelim(Delimiter::Bracket))?;
+ let item = this.parse_attr_item(false)?;
+ this.expect(&token::CloseDelim(Delimiter::Bracket))?;
+ let attr_sp = lo.to(this.prev_token.span);
- Ok(attr::mk_attr_from_item(
- &self.sess.attr_id_generator,
- item,
- None,
- style,
- attr_sp,
- ))
- } else {
- let token_str = pprust::token_to_string(&this.token);
- let msg = &format!("expected `#`, found `{token_str}`");
- Err(this.struct_span_err(this.token.span, msg))
+ // Emit error if inner attribute is encountered and forbidden.
+ if style == ast::AttrStyle::Inner {
+ this.error_on_forbidden_inner_attr(attr_sp, inner_parse_policy);
}
+
+ Ok(attr::mk_attr_from_item(&self.sess.attr_id_generator, item, None, style, attr_sp))
})
}
@@ -190,21 +171,12 @@ impl<'a> Parser<'a> {
ForceCollect::No,
) {
Ok(Some(item)) => {
- let attr_name = match attr_type {
- OuterAttributeType::Attribute => "attribute",
- _ => "doc comment",
- };
- err.span_label(
- item.span,
- &format!("the inner {} doesn't annotate this {}", attr_name, item.kind.descr()),
- );
+ // FIXME(#100717)
+ err.set_arg("item", item.kind.descr());
+ err.span_label(item.span, fluent::label_does_not_annotate_this);
err.span_suggestion_verbose(
replacement_span,
- &format!(
- "to annotate the {}, change the {} from inner to outer style",
- item.kind.descr(),
- attr_name
- ),
+ fluent::sugg_change_inner_to_outer,
match attr_type {
OuterAttributeType::Attribute => "",
OuterAttributeType::DocBlockComment => "*",
@@ -222,22 +194,33 @@ impl<'a> Parser<'a> {
Some(replacement_span)
}
- pub(super) fn error_on_forbidden_inner_attr(&self, attr_sp: Span, policy: InnerAttrPolicy<'_>) {
- if let InnerAttrPolicy::Forbidden { reason, saw_doc_comment, prev_outer_attr_sp } = policy {
- let prev_outer_attr_note =
- if saw_doc_comment { "previous doc comment" } else { "previous outer attribute" };
-
- let mut diag = self.struct_span_err(attr_sp, reason);
-
- if let Some(prev_outer_attr_sp) = prev_outer_attr_sp {
- diag.span_label(attr_sp, "not permitted following an outer attribute")
- .span_label(prev_outer_attr_sp, prev_outer_attr_note);
- }
+ pub(super) fn error_on_forbidden_inner_attr(&self, attr_sp: Span, policy: InnerAttrPolicy) {
+ if let InnerAttrPolicy::Forbidden(reason) = policy {
+ let mut diag = match reason.as_ref().copied() {
+ Some(InnerAttrForbiddenReason::AfterOuterDocComment { prev_doc_comment_span }) => {
+ let mut diag = self.struct_span_err(
+ attr_sp,
+ fluent::parser_inner_attr_not_permitted_after_outer_doc_comment,
+ );
+ diag.span_label(attr_sp, fluent::label_attr)
+ .span_label(prev_doc_comment_span, fluent::label_prev_doc_comment);
+ diag
+ }
+ Some(InnerAttrForbiddenReason::AfterOuterAttribute { prev_outer_attr_sp }) => {
+ let mut diag = self.struct_span_err(
+ attr_sp,
+ fluent::parser_inner_attr_not_permitted_after_outer_attr,
+ );
+ diag.span_label(attr_sp, fluent::label_attr)
+ .span_label(prev_outer_attr_sp, fluent::label_prev_attr);
+ diag
+ }
+ Some(InnerAttrForbiddenReason::InCodeBlock) | None => {
+ self.struct_span_err(attr_sp, fluent::parser_inner_attr_not_permitted)
+ }
+ };
- diag.note(
- "inner attributes, like `#![no_std]`, annotate the item enclosing them, and \
- are usually found at the beginning of source files",
- );
+ diag.note(fluent::parser_inner_attr_explanation);
if self
.annotate_following_item_if_applicable(
&mut diag,
@@ -246,7 +229,7 @@ impl<'a> Parser<'a> {
)
.is_some()
{
- diag.note("outer attributes, like `#[test]`, annotate the item following them");
+ diag.note(fluent::parser_outer_attr_explanation);
};
diag.emit();
}
@@ -337,12 +320,7 @@ impl<'a> Parser<'a> {
debug!("checking if {:?} is unusuffixed", lit);
if !lit.kind.is_unsuffixed() {
- self.struct_span_err(lit.span, "suffixed literals are not allowed in attributes")
- .help(
- "instead of using a suffixed literal (`1u8`, `1.0f32`, etc.), \
- use an unsuffixed version (`1`, `1.0`, etc.)",
- )
- .emit();
+ self.sess.emit_err(SuffixedLiteralInAttribute { span: lit.span });
}
Ok(lit)
@@ -435,9 +413,8 @@ impl<'a> Parser<'a> {
Err(err) => err.cancel(),
}
- let found = pprust::token_to_string(&self.token);
- let msg = format!("expected unsuffixed literal or identifier, found `{found}`");
- Err(self.struct_span_err(self.token.span, &msg))
+ Err(InvalidMetaItem { span: self.token.span, token: self.token.clone() }
+ .into_diagnostic(&self.sess.span_diagnostic))
}
}
diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs
index 5fdafd187..1b16ecb5e 100644
--- a/compiler/rustc_parse/src/parser/attr_wrapper.rs
+++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs
@@ -32,11 +32,6 @@ pub struct AttrWrapper {
start_pos: usize,
}
-// This struct is passed around very frequently,
-// so make sure it doesn't accidentally get larger
-#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
-rustc_data_structures::static_assert_size!(AttrWrapper, 16);
-
impl AttrWrapper {
pub(super) fn new(attrs: AttrVec, start_pos: usize) -> AttrWrapper {
AttrWrapper { attrs, start_pos }
@@ -96,9 +91,6 @@ struct LazyAttrTokenStreamImpl {
replace_ranges: Box<[ReplaceRange]>,
}
-#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
-rustc_data_structures::static_assert_size!(LazyAttrTokenStreamImpl, 144);
-
impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
fn to_attr_token_stream(&self) -> AttrTokenStream {
// The token produced by the final call to `{,inlined_}next` was not
@@ -281,16 +273,23 @@ impl<'a> Parser<'a> {
let cursor_snapshot_next_calls = cursor_snapshot.num_next_calls;
let mut end_pos = self.token_cursor.num_next_calls;
+ let mut captured_trailing = false;
+
// Capture a trailing token if requested by the callback 'f'
match trailing {
TrailingToken::None => {}
+ TrailingToken::Gt => {
+ assert_eq!(self.token.kind, token::Gt);
+ }
TrailingToken::Semi => {
assert_eq!(self.token.kind, token::Semi);
end_pos += 1;
+ captured_trailing = true;
}
TrailingToken::MaybeComma => {
if self.token.kind == token::Comma {
end_pos += 1;
+ captured_trailing = true;
}
}
}
@@ -300,11 +299,7 @@ impl<'a> Parser<'a> {
// was not actually bumped past it. When the `LazyAttrTokenStream` gets converted
// into an `AttrTokenStream`, we will create the proper token.
if self.token_cursor.break_last_token {
- assert_eq!(
- trailing,
- TrailingToken::None,
- "Cannot set `break_last_token` and have trailing token"
- );
+ assert!(!captured_trailing, "Cannot set break_last_token and have trailing token");
end_pos += 1;
}
@@ -459,6 +454,16 @@ fn make_token_stream(
panic!("Unexpected last token {:?}", last_token)
}
}
- assert!(stack.is_empty(), "Stack should be empty: final_buf={:?} stack={:?}", final_buf, stack);
AttrTokenStream::new(final_buf.inner)
}
+
+// Some types are used a lot. Make sure they don't unintentionally get bigger.
+#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
+mod size_asserts {
+ use super::*;
+ use rustc_data_structures::static_assert_size;
+ // tidy-alphabetical-start
+ static_assert_size!(AttrWrapper, 16);
+ static_assert_size!(LazyAttrTokenStreamImpl, 144);
+ // tidy-alphabetical-end
+}
diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs
index be524db78..309717350 100644
--- a/compiler/rustc_parse/src/parser/diagnostics.rs
+++ b/compiler/rustc_parse/src/parser/diagnostics.rs
@@ -3,6 +3,19 @@ use super::{
BlockMode, CommaRecoveryMode, Parser, PathStyle, Restrictions, SemiColonMode, SeqSep,
TokenExpectType, TokenType,
};
+use crate::errors::{
+ AmbiguousPlus, AttributeOnParamType, BadQPathStage2, BadTypePlus, BadTypePlusSub,
+ ComparisonOperatorsCannotBeChained, ComparisonOperatorsCannotBeChainedSugg,
+ ConstGenericWithoutBraces, ConstGenericWithoutBracesSugg, DocCommentOnParamType,
+ DoubleColonInBound, ExpectedIdentifier, ExpectedSemi, ExpectedSemiSugg,
+ GenericParamsWithoutAngleBrackets, GenericParamsWithoutAngleBracketsSugg, InInTypo,
+ IncorrectAwait, IncorrectSemicolon, IncorrectUseOfAwait, ParenthesesInForHead,
+ ParenthesesInForHeadSugg, PatternMethodParamWithoutBody, QuestionMarkInType,
+ QuestionMarkInTypeSugg, SelfParamNotFirst, StructLiteralBodyWithoutPath,
+ StructLiteralBodyWithoutPathSugg, SuggEscapeToUseAsIdentifier, SuggRemoveComma,
+ UnexpectedConstInGenericParam, UnexpectedConstParamDeclaration,
+ UnexpectedConstParamDeclarationSugg, UnmatchedAngleBrackets, UseEqInstead,
+};
use crate::lexer::UnmatchedBrace;
use rustc_ast as ast;
@@ -19,8 +32,8 @@ use rustc_data_structures::fx::FxHashSet;
use rustc_errors::{
fluent, Applicability, DiagnosticBuilder, DiagnosticMessage, Handler, MultiSpan, PResult,
};
-use rustc_errors::{pluralize, struct_span_err, Diagnostic, ErrorGuaranteed};
-use rustc_macros::{SessionDiagnostic, SessionSubdiagnostic};
+use rustc_errors::{pluralize, Diagnostic, ErrorGuaranteed, IntoDiagnostic};
+use rustc_session::errors::ExprParenthesesNeeded;
use rustc_span::source_map::Spanned;
use rustc_span::symbol::{kw, sym, Ident};
use rustc_span::{Span, SpanSnippetError, DUMMY_SP};
@@ -30,9 +43,6 @@ use std::mem::take;
use crate::parser;
-const TURBOFISH_SUGGESTION_STR: &str =
- "use `::<...>` instead of `<...>` to specify lifetime, type, or const arguments";
-
/// Creates a placeholder argument.
pub(super) fn dummy_arg(ident: Ident) -> Param {
let pat = P(Pat {
@@ -52,34 +62,6 @@ pub(super) fn dummy_arg(ident: Ident) -> Param {
}
}
-pub enum Error {
- UselessDocComment,
-}
-
-impl Error {
- fn span_err(
- self,
- sp: impl Into<MultiSpan>,
- handler: &Handler,
- ) -> DiagnosticBuilder<'_, ErrorGuaranteed> {
- match self {
- Error::UselessDocComment => {
- let mut err = struct_span_err!(
- handler,
- sp,
- E0585,
- "found a documentation comment that doesn't document anything",
- );
- err.help(
- "doc comments must come before what they document, maybe a comment was \
- intended with `//`?",
- );
- err
- }
- }
- }
-}
-
pub(super) trait RecoverQPath: Sized + 'static {
const PATH_STYLE: PathStyle = PathStyle::Expr;
fn to_ty(&self) -> Option<P<Ty>>;
@@ -242,485 +224,6 @@ impl MultiSugg {
}
}
-#[derive(SessionDiagnostic)]
-#[diag(parser::maybe_report_ambiguous_plus)]
-struct AmbiguousPlus {
- pub sum_ty: String,
- #[primary_span]
- #[suggestion(code = "({sum_ty})")]
- pub span: Span,
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::maybe_recover_from_bad_type_plus, code = "E0178")]
-struct BadTypePlus {
- pub ty: String,
- #[primary_span]
- pub span: Span,
- #[subdiagnostic]
- pub sub: BadTypePlusSub,
-}
-
-#[derive(SessionSubdiagnostic)]
-pub enum BadTypePlusSub {
- #[suggestion(
- parser::add_paren,
- code = "{sum_with_parens}",
- applicability = "machine-applicable"
- )]
- AddParen {
- sum_with_parens: String,
- #[primary_span]
- span: Span,
- },
- #[label(parser::forgot_paren)]
- ForgotParen {
- #[primary_span]
- span: Span,
- },
- #[label(parser::expect_path)]
- ExpectPath {
- #[primary_span]
- span: Span,
- },
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::maybe_recover_from_bad_qpath_stage_2)]
-struct BadQPathStage2 {
- #[primary_span]
- #[suggestion(applicability = "maybe-incorrect")]
- span: Span,
- ty: String,
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::incorrect_semicolon)]
-struct IncorrectSemicolon<'a> {
- #[primary_span]
- #[suggestion_short(applicability = "machine-applicable")]
- span: Span,
- #[help]
- opt_help: Option<()>,
- name: &'a str,
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::incorrect_use_of_await)]
-struct IncorrectUseOfAwait {
- #[primary_span]
- #[suggestion(parser::parentheses_suggestion, applicability = "machine-applicable")]
- span: Span,
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::incorrect_use_of_await)]
-struct IncorrectAwait {
- #[primary_span]
- span: Span,
- #[suggestion(parser::postfix_suggestion, code = "{expr}.await{question_mark}")]
- sugg_span: (Span, Applicability),
- expr: String,
- question_mark: &'static str,
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::in_in_typo)]
-struct InInTypo {
- #[primary_span]
- span: Span,
- #[suggestion(applicability = "machine-applicable")]
- sugg_span: Span,
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::invalid_variable_declaration)]
-pub struct InvalidVariableDeclaration {
- #[primary_span]
- pub span: Span,
- #[subdiagnostic]
- pub sub: InvalidVariableDeclarationSub,
-}
-
-#[derive(SessionSubdiagnostic)]
-pub enum InvalidVariableDeclarationSub {
- #[suggestion(
- parser::switch_mut_let_order,
- applicability = "maybe-incorrect",
- code = "let mut"
- )]
- SwitchMutLetOrder(#[primary_span] Span),
- #[suggestion(
- parser::missing_let_before_mut,
- applicability = "machine-applicable",
- code = "let mut"
- )]
- MissingLet(#[primary_span] Span),
- #[suggestion(parser::use_let_not_auto, applicability = "machine-applicable", code = "let")]
- UseLetNotAuto(#[primary_span] Span),
- #[suggestion(parser::use_let_not_var, applicability = "machine-applicable", code = "let")]
- UseLetNotVar(#[primary_span] Span),
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::invalid_comparison_operator)]
-pub(crate) struct InvalidComparisonOperator {
- #[primary_span]
- pub span: Span,
- pub invalid: String,
- #[subdiagnostic]
- pub sub: InvalidComparisonOperatorSub,
-}
-
-#[derive(SessionSubdiagnostic)]
-pub(crate) enum InvalidComparisonOperatorSub {
- #[suggestion_short(
- parser::use_instead,
- applicability = "machine-applicable",
- code = "{correct}"
- )]
- Correctable {
- #[primary_span]
- span: Span,
- invalid: String,
- correct: String,
- },
- #[label(parser::spaceship_operator_invalid)]
- Spaceship(#[primary_span] Span),
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::invalid_logical_operator)]
-#[note]
-pub(crate) struct InvalidLogicalOperator {
- #[primary_span]
- pub span: Span,
- pub incorrect: String,
- #[subdiagnostic]
- pub sub: InvalidLogicalOperatorSub,
-}
-
-#[derive(SessionSubdiagnostic)]
-pub(crate) enum InvalidLogicalOperatorSub {
- #[suggestion_short(
- parser::use_amp_amp_for_conjunction,
- applicability = "machine-applicable",
- code = "&&"
- )]
- Conjunction(#[primary_span] Span),
- #[suggestion_short(
- parser::use_pipe_pipe_for_disjunction,
- applicability = "machine-applicable",
- code = "||"
- )]
- Disjunction(#[primary_span] Span),
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::tilde_is_not_unary_operator)]
-pub(crate) struct TildeAsUnaryOperator(
- #[primary_span]
- #[suggestion_short(applicability = "machine-applicable", code = "!")]
- pub Span,
-);
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::unexpected_token_after_not)]
-pub(crate) struct NotAsNegationOperator {
- #[primary_span]
- pub negated: Span,
- pub negated_desc: String,
- #[suggestion_short(applicability = "machine-applicable", code = "!")]
- pub not: Span,
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::malformed_loop_label)]
-pub(crate) struct MalformedLoopLabel {
- #[primary_span]
- #[suggestion(applicability = "machine-applicable", code = "{correct_label}")]
- pub span: Span,
- pub correct_label: Ident,
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::lifetime_in_borrow_expression)]
-pub(crate) struct LifetimeInBorrowExpression {
- #[primary_span]
- pub span: Span,
- #[suggestion(applicability = "machine-applicable", code = "")]
- #[label]
- pub lifetime_span: Span,
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::field_expression_with_generic)]
-pub(crate) struct FieldExpressionWithGeneric(#[primary_span] pub Span);
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::macro_invocation_with_qualified_path)]
-pub(crate) struct MacroInvocationWithQualifiedPath(#[primary_span] pub Span);
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::unexpected_token_after_label)]
-pub(crate) struct UnexpectedTokenAfterLabel(
- #[primary_span]
- #[label(parser::unexpected_token_after_label)]
- pub Span,
-);
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::require_colon_after_labeled_expression)]
-#[note]
-pub(crate) struct RequireColonAfterLabeledExpression {
- #[primary_span]
- pub span: Span,
- #[label]
- pub label: Span,
- #[suggestion_short(applicability = "machine-applicable", code = ": ")]
- pub label_end: Span,
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::do_catch_syntax_removed)]
-#[note]
-pub(crate) struct DoCatchSyntaxRemoved {
- #[primary_span]
- #[suggestion(applicability = "machine-applicable", code = "try")]
- pub span: Span,
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::float_literal_requires_integer_part)]
-pub(crate) struct FloatLiteralRequiresIntegerPart {
- #[primary_span]
- #[suggestion(applicability = "machine-applicable", code = "{correct}")]
- pub span: Span,
- pub correct: String,
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::invalid_int_literal_width)]
-#[help]
-pub(crate) struct InvalidIntLiteralWidth {
- #[primary_span]
- pub span: Span,
- pub width: String,
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::invalid_num_literal_base_prefix)]
-#[note]
-pub(crate) struct InvalidNumLiteralBasePrefix {
- #[primary_span]
- #[suggestion(applicability = "maybe-incorrect", code = "{fixed}")]
- pub span: Span,
- pub fixed: String,
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::invalid_num_literal_suffix)]
-#[help]
-pub(crate) struct InvalidNumLiteralSuffix {
- #[primary_span]
- #[label]
- pub span: Span,
- pub suffix: String,
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::invalid_float_literal_width)]
-#[help]
-pub(crate) struct InvalidFloatLiteralWidth {
- #[primary_span]
- pub span: Span,
- pub width: String,
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::invalid_float_literal_suffix)]
-#[help]
-pub(crate) struct InvalidFloatLiteralSuffix {
- #[primary_span]
- #[label]
- pub span: Span,
- pub suffix: String,
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::int_literal_too_large)]
-pub(crate) struct IntLiteralTooLarge {
- #[primary_span]
- pub span: Span,
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::missing_semicolon_before_array)]
-pub(crate) struct MissingSemicolonBeforeArray {
- #[primary_span]
- pub open_delim: Span,
- #[suggestion_verbose(applicability = "maybe-incorrect", code = ";")]
- pub semicolon: Span,
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::invalid_block_macro_segment)]
-pub(crate) struct InvalidBlockMacroSegment {
- #[primary_span]
- pub span: Span,
- #[label]
- pub context: Span,
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::if_expression_missing_then_block)]
-pub(crate) struct IfExpressionMissingThenBlock {
- #[primary_span]
- pub if_span: Span,
- #[subdiagnostic]
- pub sub: IfExpressionMissingThenBlockSub,
-}
-
-#[derive(SessionSubdiagnostic)]
-pub(crate) enum IfExpressionMissingThenBlockSub {
- #[help(parser::condition_possibly_unfinished)]
- UnfinishedCondition(#[primary_span] Span),
- #[help(parser::add_then_block)]
- AddThenBlock(#[primary_span] Span),
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::if_expression_missing_condition)]
-pub(crate) struct IfExpressionMissingCondition {
- #[primary_span]
- #[label(parser::condition_label)]
- pub if_span: Span,
- #[label(parser::block_label)]
- pub block_span: Span,
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::expected_expression_found_let)]
-pub(crate) struct ExpectedExpressionFoundLet {
- #[primary_span]
- pub span: Span,
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::expected_else_block)]
-pub(crate) struct ExpectedElseBlock {
- #[primary_span]
- pub first_tok_span: Span,
- pub first_tok: String,
- #[label]
- pub else_span: Span,
- #[suggestion(applicability = "maybe-incorrect", code = "if ")]
- pub condition_start: Span,
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::outer_attribute_not_allowed_on_if_else)]
-pub(crate) struct OuterAttributeNotAllowedOnIfElse {
- #[primary_span]
- pub last: Span,
-
- #[label(parser::branch_label)]
- pub branch_span: Span,
-
- #[label(parser::ctx_label)]
- pub ctx_span: Span,
- pub ctx: String,
-
- #[suggestion(applicability = "machine-applicable", code = "")]
- pub attributes: Span,
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::missing_in_in_for_loop)]
-pub(crate) struct MissingInInForLoop {
- #[primary_span]
- pub span: Span,
- #[subdiagnostic]
- pub sub: MissingInInForLoopSub,
-}
-
-#[derive(SessionSubdiagnostic)]
-pub(crate) enum MissingInInForLoopSub {
- // Has been misleading, at least in the past (closed Issue #48492), thus maybe-incorrect
- #[suggestion_short(parser::use_in_not_of, applicability = "maybe-incorrect", code = "in")]
- InNotOf(#[primary_span] Span),
- #[suggestion_short(parser::add_in, applicability = "maybe-incorrect", code = " in ")]
- AddIn(#[primary_span] Span),
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::missing_comma_after_match_arm)]
-pub(crate) struct MissingCommaAfterMatchArm {
- #[primary_span]
- #[suggestion(applicability = "machine-applicable", code = ",")]
- pub span: Span,
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::catch_after_try)]
-#[help]
-pub(crate) struct CatchAfterTry {
- #[primary_span]
- pub span: Span,
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::comma_after_base_struct)]
-#[note]
-pub(crate) struct CommaAfterBaseStruct {
- #[primary_span]
- pub span: Span,
- #[suggestion_short(applicability = "machine-applicable", code = "")]
- pub comma: Span,
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::eq_field_init)]
-pub(crate) struct EqFieldInit {
- #[primary_span]
- pub span: Span,
- #[suggestion(applicability = "machine-applicable", code = ":")]
- pub eq: Span,
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::dotdotdot)]
-pub(crate) struct DotDotDot {
- #[primary_span]
- #[suggestion(parser::suggest_exclusive_range, applicability = "maybe-incorrect", code = "..")]
- #[suggestion(parser::suggest_inclusive_range, applicability = "maybe-incorrect", code = "..=")]
- pub span: Span,
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::left_arrow_operator)]
-pub(crate) struct LeftArrowOperator {
- #[primary_span]
- #[suggestion(applicability = "maybe-incorrect", code = "< -")]
- pub span: Span,
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::remove_let)]
-pub(crate) struct RemoveLet {
- #[primary_span]
- #[suggestion(applicability = "machine-applicable", code = "")]
- pub span: Span,
-}
-
-#[derive(SessionDiagnostic)]
-#[diag(parser::use_eq_instead)]
-pub(crate) struct UseEqInstead {
- #[primary_span]
- #[suggestion_short(applicability = "machine-applicable", code = "=")]
- pub span: Span,
-}
-
// SnapshotParser is used to create a snapshot of the parser
// without causing duplicate errors being emitted when the `Parser`
// is dropped.
@@ -745,15 +248,6 @@ impl<'a> DerefMut for SnapshotParser<'a> {
impl<'a> Parser<'a> {
#[rustc_lint_diagnostics]
- pub(super) fn span_err<S: Into<MultiSpan>>(
- &self,
- sp: S,
- err: Error,
- ) -> DiagnosticBuilder<'a, ErrorGuaranteed> {
- err.span_err(sp, self.diagnostic())
- }
-
- #[rustc_lint_diagnostics]
pub fn struct_span_err<S: Into<MultiSpan>>(
&self,
sp: S,
@@ -798,10 +292,6 @@ impl<'a> Parser<'a> {
}
pub(super) fn expected_ident_found(&self) -> DiagnosticBuilder<'a, ErrorGuaranteed> {
- let mut err = self.struct_span_err(
- self.token.span,
- &format!("expected identifier, found {}", super::token_descr(&self.token)),
- );
let valid_follow = &[
TokenKind::Eq,
TokenKind::Colon,
@@ -813,34 +303,35 @@ impl<'a> Parser<'a> {
TokenKind::CloseDelim(Delimiter::Brace),
TokenKind::CloseDelim(Delimiter::Parenthesis),
];
- match self.token.ident() {
+ let suggest_raw = match self.token.ident() {
Some((ident, false))
if ident.is_raw_guess()
&& self.look_ahead(1, |t| valid_follow.contains(&t.kind)) =>
{
- err.span_suggestion_verbose(
- ident.span.shrink_to_lo(),
- &format!("escape `{}` to use it as an identifier", ident.name),
- "r#",
- Applicability::MaybeIncorrect,
- );
+ Some(SuggEscapeToUseAsIdentifier {
+ span: ident.span.shrink_to_lo(),
+ // `Symbol::to_string()` is different from `Symbol::into_diagnostic_arg()`,
+ // which uses `Symbol::to_ident_string()` and "helpfully" adds an implicit `r#`
+ ident_name: ident.name.to_string(),
+ })
}
- _ => {}
- }
- if let Some(token_descr) = super::token_descr_opt(&self.token) {
- err.span_label(self.token.span, format!("expected identifier, found {}", token_descr));
- } else {
- err.span_label(self.token.span, "expected identifier");
+ _ => None,
+ };
+
+ let suggest_remove_comma =
if self.token == token::Comma && self.look_ahead(1, |t| t.is_ident()) {
- err.span_suggestion(
- self.token.span,
- "remove this comma",
- "",
- Applicability::MachineApplicable,
- );
- }
- }
- err
+ Some(SuggRemoveComma { span: self.token.span })
+ } else {
+ None
+ };
+
+ let err = ExpectedIdentifier {
+ span: self.token.span,
+ token: self.token.clone(),
+ suggest_raw,
+ suggest_remove_comma,
+ };
+ err.into_diagnostic(&self.sess.span_diagnostic)
}
pub(super) fn expected_one_of_not_found(
@@ -905,8 +396,8 @@ impl<'a> Parser<'a> {
expected.dedup();
let sm = self.sess.source_map();
- let msg = format!("expected `;`, found {}", super::token_descr(&self.token));
- let appl = Applicability::MachineApplicable;
+
+ // Special-case "expected `;`" errors
if expected.contains(&TokenType::Token(token::Semi)) {
if self.token.span == DUMMY_SP || self.prev_token.span == DUMMY_SP {
// Likely inside a macro, can't provide meaningful suggestions.
@@ -934,11 +425,13 @@ impl<'a> Parser<'a> {
//
// let x = 32:
// let y = 42;
+ self.sess.emit_err(ExpectedSemi {
+ span: self.token.span,
+ token: self.token.clone(),
+ unexpected_token_label: None,
+ sugg: ExpectedSemiSugg::ChangeToSemi(self.token.span),
+ });
self.bump();
- let sp = self.prev_token.span;
- self.struct_span_err(sp, &msg)
- .span_suggestion_short(sp, "change this to `;`", ";", appl)
- .emit();
return Ok(true);
} else if self.look_ahead(0, |t| {
t == &token::CloseDelim(Delimiter::Brace)
@@ -956,11 +449,13 @@ impl<'a> Parser<'a> {
//
// let x = 32
// let y = 42;
- let sp = self.prev_token.span.shrink_to_hi();
- self.struct_span_err(sp, &msg)
- .span_label(self.token.span, "unexpected token")
- .span_suggestion_short(sp, "add `;` here", ";", appl)
- .emit();
+ let span = self.prev_token.span.shrink_to_hi();
+ self.sess.emit_err(ExpectedSemi {
+ span,
+ token: self.token.clone(),
+ unexpected_token_label: Some(self.token.span),
+ sugg: ExpectedSemiSugg::AddSemi(span),
+ });
return Ok(true);
}
}
@@ -997,6 +492,7 @@ impl<'a> Parser<'a> {
)
};
self.last_unexpected_token_span = Some(self.token.span);
+ // FIXME: translation requires list formatting (for `expect`)
let mut err = self.struct_span_err(self.token.span, &msg_exp);
if let TokenKind::Ident(symbol, _) = &self.prev_token.kind {
@@ -1005,7 +501,7 @@ impl<'a> Parser<'a> {
self.prev_token.span,
&format!("write `fn` instead of `{symbol}` to declare a function"),
"fn",
- appl,
+ Applicability::MachineApplicable,
);
}
}
@@ -1019,7 +515,7 @@ impl<'a> Parser<'a> {
self.prev_token.span,
"write `pub` instead of `public` to make the item public",
"pub",
- appl,
+ Applicability::MachineApplicable,
);
}
@@ -1157,19 +653,13 @@ impl<'a> Parser<'a> {
// field: value,
// } }
err.delay_as_bug();
- self.struct_span_err(
- expr.span,
- fluent::parser::struct_literal_body_without_path,
- )
- .multipart_suggestion(
- fluent::parser::suggestion,
- vec![
- (expr.span.shrink_to_lo(), "{ SomeStruct ".to_string()),
- (expr.span.shrink_to_hi(), " }".to_string()),
- ],
- Applicability::MaybeIncorrect,
- )
- .emit();
+ self.sess.emit_err(StructLiteralBodyWithoutPath {
+ span: expr.span,
+ sugg: StructLiteralBodyWithoutPathSugg {
+ before: expr.span.shrink_to_lo(),
+ after: expr.span.shrink_to_hi(),
+ },
+ });
self.restore_snapshot(snapshot);
let mut tail = self.mk_block(
vec![self.mk_stmt_err(expr.span)],
@@ -1363,18 +853,8 @@ impl<'a> Parser<'a> {
self.eat_to_tokens(end);
let span = lo.until(self.token.span);
- let total_num_of_gt = number_of_gt + number_of_shr * 2;
- self.struct_span_err(
- span,
- &format!("unmatched angle bracket{}", pluralize!(total_num_of_gt)),
- )
- .span_suggestion(
- span,
- &format!("remove extra angle bracket{}", pluralize!(total_num_of_gt)),
- "",
- Applicability::MachineApplicable,
- )
- .emit();
+ let num_extra_brackets = number_of_gt + number_of_shr * 2;
+ self.sess.emit_err(UnmatchedAngleBrackets { span, num_extra_brackets });
return true;
}
false
@@ -1403,19 +883,13 @@ impl<'a> Parser<'a> {
let args = AngleBracketedArgs { args, span }.into();
segment.args = args;
- self.struct_span_err(
+ self.sess.emit_err(GenericParamsWithoutAngleBrackets {
span,
- "generic parameters without surrounding angle brackets",
- )
- .multipart_suggestion(
- "surround the type parameters with angle brackets",
- vec![
- (span.shrink_to_lo(), "<".to_string()),
- (trailing_span, ">".to_string()),
- ],
- Applicability::MachineApplicable,
- )
- .emit();
+ sugg: GenericParamsWithoutAngleBracketsSugg {
+ left: span.shrink_to_lo(),
+ right: trailing_span,
+ },
+ });
} else {
// This doesn't look like an invalid turbofish, can't recover parse state.
self.restore_snapshot(snapshot);
@@ -1452,7 +926,7 @@ impl<'a> Parser<'a> {
if self.eat(&token::Gt) {
e.span_suggestion_verbose(
binop.span.shrink_to_lo(),
- TURBOFISH_SUGGESTION_STR,
+ fluent::parser_sugg_turbofish_syntax,
"::",
Applicability::MaybeIncorrect,
)
@@ -1484,7 +958,7 @@ impl<'a> Parser<'a> {
/// parenthesising the leftmost comparison.
fn attempt_chained_comparison_suggestion(
&mut self,
- err: &mut Diagnostic,
+ err: &mut ComparisonOperatorsCannotBeChained,
inner_op: &Expr,
outer_op: &Spanned<AssocOp>,
) -> bool /* advanced the cursor */ {
@@ -1497,16 +971,6 @@ impl<'a> Parser<'a> {
// suggestion being the only one to apply is high.
return false;
}
- let mut enclose = |left: Span, right: Span| {
- err.multipart_suggestion(
- "parenthesize the comparison",
- vec![
- (left.shrink_to_lo(), "(".to_string()),
- (right.shrink_to_hi(), ")".to_string()),
- ],
- Applicability::MaybeIncorrect,
- );
- };
return match (op.node, &outer_op.node) {
// `x == y == z`
(BinOpKind::Eq, AssocOp::Equal) |
@@ -1520,12 +984,10 @@ impl<'a> Parser<'a> {
self.span_to_snippet(e.span)
.unwrap_or_else(|_| pprust::expr_to_string(&e))
};
- err.span_suggestion_verbose(
- inner_op.span.shrink_to_hi(),
- "split the comparison into two",
- format!(" && {}", expr_to_str(&r1)),
- Applicability::MaybeIncorrect,
- );
+ err.chaining_sugg = Some(ComparisonOperatorsCannotBeChainedSugg::SplitComparison {
+ span: inner_op.span.shrink_to_hi(),
+ middle_term: expr_to_str(&r1),
+ });
false // Keep the current parse behavior, where the AST is `(x < y) < z`.
}
// `x == y < z`
@@ -1536,7 +998,10 @@ impl<'a> Parser<'a> {
Ok(r2) => {
// We are sure that outer-op-rhs could be consumed, the suggestion is
// likely correct.
- enclose(r1.span, r2.span);
+ err.chaining_sugg = Some(ComparisonOperatorsCannotBeChainedSugg::Parenthesize {
+ left: r1.span.shrink_to_lo(),
+ right: r2.span.shrink_to_hi(),
+ });
true
}
Err(expr_err) => {
@@ -1553,7 +1018,10 @@ impl<'a> Parser<'a> {
// further checks are necessary.
match self.parse_expr() {
Ok(_) => {
- enclose(l1.span, r1.span);
+ err.chaining_sugg = Some(ComparisonOperatorsCannotBeChainedSugg::Parenthesize {
+ left: l1.span.shrink_to_lo(),
+ right: r1.span.shrink_to_hi(),
+ });
true
}
Err(expr_err) => {
@@ -1602,18 +1070,11 @@ impl<'a> Parser<'a> {
match inner_op.kind {
ExprKind::Binary(op, ref l1, ref r1) if op.node.is_comparison() => {
- let mut err = self.struct_span_err(
- vec![op.span, self.prev_token.span],
- "comparison operators cannot be chained",
- );
-
- let suggest = |err: &mut Diagnostic| {
- err.span_suggestion_verbose(
- op.span.shrink_to_lo(),
- TURBOFISH_SUGGESTION_STR,
- "::",
- Applicability::MaybeIncorrect,
- );
+ let mut err = ComparisonOperatorsCannotBeChained {
+ span: vec![op.span, self.prev_token.span],
+ suggest_turbofish: None,
+ help_turbofish: None,
+ chaining_sugg: None,
};
// Include `<` to provide this recommendation even in a case like
@@ -1640,7 +1101,7 @@ impl<'a> Parser<'a> {
return if token::ModSep == self.token.kind {
// We have some certainty that this was a bad turbofish at this point.
// `foo< bar >::`
- suggest(&mut err);
+ err.suggest_turbofish = Some(op.span.shrink_to_lo());
let snapshot = self.create_snapshot_for_diagnostic();
self.bump(); // `::`
@@ -1649,7 +1110,7 @@ impl<'a> Parser<'a> {
match self.parse_expr() {
Ok(_) => {
// 99% certain that the suggestion is correct, continue parsing.
- err.emit();
+ self.sess.emit_err(err);
// FIXME: actually check that the two expressions in the binop are
// paths and resynthesize new fn call expression instead of using
// `ExprKind::Err` placeholder.
@@ -1660,18 +1121,18 @@ impl<'a> Parser<'a> {
// Not entirely sure now, but we bubble the error up with the
// suggestion.
self.restore_snapshot(snapshot);
- Err(err)
+ Err(err.into_diagnostic(&self.sess.span_diagnostic))
}
}
} else if token::OpenDelim(Delimiter::Parenthesis) == self.token.kind {
// We have high certainty that this was a bad turbofish at this point.
// `foo< bar >(`
- suggest(&mut err);
+ err.suggest_turbofish = Some(op.span.shrink_to_lo());
// Consume the fn call arguments.
match self.consume_fn_args() {
- Err(()) => Err(err),
+ Err(()) => Err(err.into_diagnostic(&self.sess.span_diagnostic)),
Ok(()) => {
- err.emit();
+ self.sess.emit_err(err);
// FIXME: actually check that the two expressions in the binop are
// paths and resynthesize new fn call expression instead of using
// `ExprKind::Err` placeholder.
@@ -1684,25 +1145,24 @@ impl<'a> Parser<'a> {
{
// All we know is that this is `foo < bar >` and *nothing* else. Try to
// be helpful, but don't attempt to recover.
- err.help(TURBOFISH_SUGGESTION_STR);
- err.help("or use `(...)` if you meant to specify fn arguments");
+ err.help_turbofish = Some(());
}
// If it looks like a genuine attempt to chain operators (as opposed to a
// misformatted turbofish, for instance), suggest a correct form.
if self.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op)
{
- err.emit();
+ self.sess.emit_err(err);
mk_err_expr(self, inner_op.span.to(self.prev_token.span))
} else {
// These cases cause too many knock-down errors, bail out (#61329).
- Err(err)
+ Err(err.into_diagnostic(&self.sess.span_diagnostic))
}
};
}
let recover =
self.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op);
- err.emit();
+ self.sess.emit_err(err);
if recover {
return mk_err_expr(self, inner_op.span.to(self.prev_token.span));
}
@@ -1743,17 +1203,13 @@ impl<'a> Parser<'a> {
pub(super) fn maybe_recover_from_question_mark(&mut self, ty: P<Ty>) -> P<Ty> {
if self.token == token::Question {
self.bump();
- self.struct_span_err(self.prev_token.span, "invalid `?` in type")
- .span_label(self.prev_token.span, "`?` is only allowed on expressions, not types")
- .multipart_suggestion(
- "if you meant to express that the type might not contain a value, use the `Option` wrapper type",
- vec![
- (ty.span.shrink_to_lo(), "Option<".to_string()),
- (self.prev_token.span, ">".to_string()),
- ],
- Applicability::MachineApplicable,
- )
- .emit();
+ self.sess.emit_err(QuestionMarkInType {
+ span: self.prev_token.span,
+ sugg: QuestionMarkInTypeSugg {
+ left: ty.span.shrink_to_lo(),
+ right: self.prev_token.span,
+ },
+ });
self.mk_ty(ty.span.to(self.prev_token.span), TyKind::Err)
} else {
ty
@@ -1918,9 +1374,17 @@ impl<'a> Parser<'a> {
kind: IncDecRecovery,
(pre_span, post_span): (Span, Span),
) -> MultiSugg {
+ let mut patches = Vec::new();
+
+ if !pre_span.is_empty() {
+ patches.push((pre_span, String::new()));
+ }
+
+ patches.push((post_span, format!(" {}= 1", kind.op.chr())));
+
MultiSugg {
msg: format!("use `{}= 1` instead", kind.op.chr()),
- patches: vec![(pre_span, String::new()), (post_span, format!(" {}= 1", kind.op.chr()))],
+ patches,
applicability: Applicability::MachineApplicable,
}
}
@@ -2005,7 +1469,7 @@ impl<'a> Parser<'a> {
let (prev_sp, sp) = match (&self.token.kind, self.subparser_name) {
// Point at the end of the macro call when reaching end of macro arguments.
(token::Eof, Some(_)) => {
- let sp = self.sess.source_map().next_point(self.prev_token.span);
+ let sp = self.prev_token.span.shrink_to_hi();
(sp, sp)
}
// We don't want to point at the following span after DUMMY_SP.
@@ -2177,19 +1641,16 @@ impl<'a> Parser<'a> {
(token::CloseDelim(Delimiter::Parenthesis), Some(begin_par_sp)) => {
self.bump();
- self.struct_span_err(
- MultiSpan::from_spans(vec![begin_par_sp, self.prev_token.span]),
- "unexpected parentheses surrounding `for` loop head",
- )
- .multipart_suggestion(
- "remove parentheses in `for` loop",
- vec![(begin_par_sp, String::new()), (self.prev_token.span, String::new())],
+ self.sess.emit_err(ParenthesesInForHead {
+ span: vec![begin_par_sp, self.prev_token.span],
// With e.g. `for (x) in y)` this would replace `(x) in y)`
// with `x) in y)` which is syntactically invalid.
// However, this is prevented before we get here.
- Applicability::MachineApplicable,
- )
- .emit();
+ sugg: ParenthesesInForHeadSugg {
+ left: begin_par_sp,
+ right: self.prev_token.span,
+ },
+ });
// Unwrap `(pat)` into `pat` to avoid the `unused_parens` lint.
pat.and_then(|pat| match pat.kind {
@@ -2408,12 +1869,7 @@ impl<'a> Parser<'a> {
pub(super) fn eat_incorrect_doc_comment_for_param_type(&mut self) {
if let token::DocComment(..) = self.token.kind {
- self.struct_span_err(
- self.token.span,
- "documentation comments cannot be applied to a function parameter's type",
- )
- .span_label(self.token.span, "doc comments are not allowed here")
- .emit();
+ self.sess.emit_err(DocCommentOnParamType { span: self.token.span });
self.bump();
} else if self.token == token::Pound
&& self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Bracket))
@@ -2425,9 +1881,7 @@ impl<'a> Parser<'a> {
}
let sp = lo.to(self.token.span);
self.bump();
- self.struct_span_err(sp, "attributes cannot be applied to a function parameter's type")
- .span_label(sp, "attributes are not allowed here")
- .emit();
+ self.sess.emit_err(AttributeOnParamType { span: sp });
}
}
@@ -2548,19 +2002,7 @@ impl<'a> Parser<'a> {
self.expect(&token::Colon)?;
let ty = self.parse_ty()?;
- struct_span_err!(
- self.diagnostic(),
- pat.span,
- E0642,
- "patterns aren't allowed in methods without bodies",
- )
- .span_suggestion_short(
- pat.span,
- "give this argument a name or use an underscore to ignore it",
- "_",
- Applicability::MachineApplicable,
- )
- .emit();
+ self.sess.emit_err(PatternMethodParamWithoutBody { span: pat.span });
// Pretend the pattern is `_`, to avoid duplicate errors from AST validation.
let pat =
@@ -2569,11 +2011,9 @@ impl<'a> Parser<'a> {
}
pub(super) fn recover_bad_self_param(&mut self, mut param: Param) -> PResult<'a, Param> {
- let sp = param.pat.span;
+ let span = param.pat.span;
param.ty.kind = TyKind::Err;
- self.struct_span_err(sp, "unexpected `self` parameter in function")
- .span_label(sp, "must be the first parameter of an associated function")
- .emit();
+ self.sess.emit_err(SelfParamNotFirst { span });
Ok(param)
}
@@ -2607,7 +2047,7 @@ impl<'a> Parser<'a> {
pub(super) fn expected_expression_found(&self) -> DiagnosticBuilder<'a, ErrorGuaranteed> {
let (span, msg) = match (&self.token.kind, self.subparser_name) {
(&token::Eof, Some(origin)) => {
- let sp = self.sess.source_map().next_point(self.prev_token.span);
+ let sp = self.prev_token.span.shrink_to_hi();
(sp, format!("expected expression, found end of {origin}"))
}
_ => (
@@ -2618,7 +2058,7 @@ impl<'a> Parser<'a> {
let mut err = self.struct_span_err(span, &msg);
let sp = self.sess.source_map().start_point(self.token.span);
if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) {
- self.sess.expr_parentheses_needed(&mut err, *sp);
+ err.subdiagnostic(ExprParenthesesNeeded::surrounding(*sp));
}
err.span_label(span, "expected expression");
err
@@ -2722,20 +2162,13 @@ impl<'a> Parser<'a> {
err
})?;
if !self.expr_is_valid_const_arg(&expr) {
- self.struct_span_err(
- expr.span,
- "expressions must be enclosed in braces to be used as const generic \
- arguments",
- )
- .multipart_suggestion(
- "enclose the `const` expression in braces",
- vec![
- (expr.span.shrink_to_lo(), "{ ".to_string()),
- (expr.span.shrink_to_hi(), " }".to_string()),
- ],
- Applicability::MachineApplicable,
- )
- .emit();
+ self.sess.emit_err(ConstGenericWithoutBraces {
+ span: expr.span,
+ sugg: ConstGenericWithoutBracesSugg {
+ left: expr.span.shrink_to_lo(),
+ right: expr.span.shrink_to_hi(),
+ },
+ });
}
Ok(expr)
}
@@ -2750,24 +2183,30 @@ impl<'a> Parser<'a> {
return None;
}
};
- let mut err =
- self.struct_span_err(param.span(), "unexpected `const` parameter declaration");
- err.span_label(param.span(), "expected a `const` expression, not a parameter declaration");
- if let (Some(generics), Ok(snippet)) =
- (ty_generics, self.sess.source_map().span_to_snippet(param.span()))
- {
- let (span, sugg) = match &generics.params[..] {
- [] => (generics.span, format!("<{snippet}>")),
- [.., generic] => (generic.span().shrink_to_hi(), format!(", {snippet}")),
- };
- err.multipart_suggestion(
- "`const` parameters must be declared for the `impl`",
- vec![(span, sugg), (param.span(), param.ident.to_string())],
- Applicability::MachineApplicable,
- );
- }
+
+ let ident = param.ident.to_string();
+ let sugg = match (ty_generics, self.sess.source_map().span_to_snippet(param.span())) {
+ (Some(Generics { params, span: impl_generics, .. }), Ok(snippet)) => {
+ Some(match &params[..] {
+ [] => UnexpectedConstParamDeclarationSugg::AddParam {
+ impl_generics: *impl_generics,
+ incorrect_decl: param.span(),
+ snippet,
+ ident,
+ },
+ [.., generic] => UnexpectedConstParamDeclarationSugg::AppendParam {
+ impl_generics_end: generic.span().shrink_to_hi(),
+ incorrect_decl: param.span(),
+ snippet,
+ ident,
+ },
+ })
+ }
+ _ => None,
+ };
+ self.sess.emit_err(UnexpectedConstParamDeclaration { span: param.span(), sugg });
+
let value = self.mk_expr_err(param.span());
- err.emit();
Some(GenericArg::Const(AnonConst { id: ast::DUMMY_NODE_ID, value }))
}
@@ -2785,20 +2224,15 @@ impl<'a> Parser<'a> {
self.bump(); // `const`
// Detect and recover from the old, pre-RFC2000 syntax for const generics.
- let mut err = self
- .struct_span_err(start, "expected lifetime, type, or constant, found keyword `const`");
+ let mut err = UnexpectedConstInGenericParam { span: start, to_remove: None };
if self.check_const_arg() {
- err.span_suggestion_verbose(
- start.until(self.token.span),
- "the `const` keyword is only needed in the definition of the type",
- "",
- Applicability::MaybeIncorrect,
- );
- err.emit();
+ err.to_remove = Some(start.until(self.token.span));
+ self.sess.emit_err(err);
Ok(Some(GenericArg::Const(self.parse_const_arg()?)))
} else {
let after_kw_const = self.token.span;
- self.recover_const_arg(after_kw_const, err).map(Some)
+ self.recover_const_arg(after_kw_const, err.into_diagnostic(&self.sess.span_diagnostic))
+ .map(Some)
}
}
@@ -2806,7 +2240,7 @@ impl<'a> Parser<'a> {
///
/// When encountering code like `foo::< bar + 3 >` or `foo::< bar - baz >` we suggest
/// `foo::<{ bar + 3 }>` and `foo::<{ bar - baz }>`, respectively. We only provide a suggestion
- /// if we think that that the resulting expression would be well formed.
+ /// if we think that the resulting expression would be well formed.
pub fn recover_const_arg(
&mut self,
start: Span,
@@ -2904,24 +2338,6 @@ impl<'a> Parser<'a> {
GenericArg::Const(AnonConst { id: ast::DUMMY_NODE_ID, value })
}
- /// Get the diagnostics for the cases where `move async` is found.
- ///
- /// `move_async_span` starts at the 'm' of the move keyword and ends with the 'c' of the async keyword
- pub(super) fn incorrect_move_async_order_found(
- &self,
- move_async_span: Span,
- ) -> DiagnosticBuilder<'a, ErrorGuaranteed> {
- let mut err =
- self.struct_span_err(move_async_span, "the order of `move` and `async` is incorrect");
- err.span_suggestion_verbose(
- move_async_span,
- "try switching the order",
- "async move",
- Applicability::MaybeIncorrect,
- );
- err
- }
-
/// Some special error handling for the "top-level" patterns in a match arm,
/// `for` loop, `let`, &c. (in contrast to subpatterns within such).
pub(crate) fn maybe_recover_colon_colon_in_pat_typo(
@@ -3040,11 +2456,15 @@ impl<'a> Parser<'a> {
}
pub(crate) fn maybe_recover_unexpected_block_label(&mut self) -> bool {
- let Some(label) = self.eat_label().filter(|_| {
- self.eat(&token::Colon) && self.token.kind == token::OpenDelim(Delimiter::Brace)
- }) else {
+ // Check for `'a : {`
+ if !(self.check_lifetime()
+ && self.look_ahead(1, |tok| tok.kind == token::Colon)
+ && self.look_ahead(2, |tok| tok.kind == token::OpenDelim(Delimiter::Brace)))
+ {
return false;
- };
+ }
+ let label = self.eat_label().expect("just checked if a label exists");
+ self.bump(); // eat `:`
let span = label.ident.span.to(self.prev_token.span);
let mut err = self.struct_span_err(span, "block label not supported here");
err.span_label(span, "not supported here");
@@ -3117,17 +2537,11 @@ impl<'a> Parser<'a> {
let (a_span, b_span) = (a.span(), b.span());
let between_span = a_span.shrink_to_hi().to(b_span.shrink_to_lo());
if self.span_to_snippet(between_span).as_ref().map(|a| &a[..]) == Ok(":: ") {
- let mut err = self.struct_span_err(
- path.span.shrink_to_hi(),
- "expected `:` followed by trait or lifetime",
- );
- err.span_suggestion(
- between_span,
- "use single colon",
- ": ",
- Applicability::MachineApplicable,
- );
- return Err(err);
+ return Err(DoubleColonInBound {
+ span: path.span.shrink_to_hi(),
+ between: between_span,
+ }
+ .into_diagnostic(&self.sess.span_diagnostic));
}
}
}
diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs
index 725768c1f..a781748ef 100644
--- a/compiler/rustc_parse/src/parser/expr.rs
+++ b/compiler/rustc_parse/src/parser/expr.rs
@@ -1,26 +1,33 @@
-use super::diagnostics::{
- CatchAfterTry, CommaAfterBaseStruct, DoCatchSyntaxRemoved, DotDotDot, EqFieldInit,
- ExpectedElseBlock, ExpectedExpressionFoundLet, FieldExpressionWithGeneric,
- FloatLiteralRequiresIntegerPart, IfExpressionMissingCondition, IfExpressionMissingThenBlock,
- IfExpressionMissingThenBlockSub, InvalidBlockMacroSegment, InvalidComparisonOperator,
- InvalidComparisonOperatorSub, InvalidLogicalOperator, InvalidLogicalOperatorSub,
- LeftArrowOperator, LifetimeInBorrowExpression, MacroInvocationWithQualifiedPath,
- MalformedLoopLabel, MissingInInForLoop, MissingInInForLoopSub, MissingSemicolonBeforeArray,
- NotAsNegationOperator, OuterAttributeNotAllowedOnIfElse, RequireColonAfterLabeledExpression,
- SnapshotParser, TildeAsUnaryOperator, UnexpectedTokenAfterLabel,
-};
+use super::diagnostics::SnapshotParser;
use super::pat::{CommaRecoveryMode, RecoverColon, RecoverComma, PARAM_EXPECTED};
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
use super::{
AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Restrictions,
SemiColonMode, SeqSep, TokenExpectType, TokenType, TrailingToken,
};
-use crate::maybe_recover_from_interpolated_ty_qpath;
-use crate::parser::diagnostics::{
- IntLiteralTooLarge, InvalidFloatLiteralSuffix, InvalidFloatLiteralWidth,
- InvalidIntLiteralWidth, InvalidNumLiteralBasePrefix, InvalidNumLiteralSuffix,
- MissingCommaAfterMatchArm,
+use crate::errors::{
+ ArrayBracketsInsteadOfSpaces, ArrayBracketsInsteadOfSpacesSugg, AsyncMoveOrderIncorrect,
+ BinaryFloatLiteralNotSupported, BracesForStructLiteral, CatchAfterTry, CommaAfterBaseStruct,
+ ComparisonInterpretedAsGeneric, ComparisonOrShiftInterpretedAsGenericSugg,
+ DoCatchSyntaxRemoved, DotDotDot, EqFieldInit, ExpectedElseBlock, ExpectedExpressionFoundLet,
+ FieldExpressionWithGeneric, FloatLiteralRequiresIntegerPart, FoundExprWouldBeStmt,
+ HexadecimalFloatLiteralNotSupported, IfExpressionMissingCondition,
+ IfExpressionMissingThenBlock, IfExpressionMissingThenBlockSub, IntLiteralTooLarge,
+ InvalidBlockMacroSegment, InvalidComparisonOperator, InvalidComparisonOperatorSub,
+ InvalidFloatLiteralSuffix, InvalidFloatLiteralWidth, InvalidIntLiteralWidth,
+ InvalidInterpolatedExpression, InvalidLiteralSuffix, InvalidLiteralSuffixOnTupleIndex,
+ InvalidLogicalOperator, InvalidLogicalOperatorSub, InvalidNumLiteralBasePrefix,
+ InvalidNumLiteralSuffix, LabeledLoopInBreak, LeadingPlusNotSupported, LeftArrowOperator,
+ LifetimeInBorrowExpression, MacroInvocationWithQualifiedPath, MalformedLoopLabel,
+ MatchArmBodyWithoutBraces, MatchArmBodyWithoutBracesSugg, MissingCommaAfterMatchArm,
+ MissingInInForLoop, MissingInInForLoopSub, MissingSemicolonBeforeArray, NoFieldsForFnCall,
+ NotAsNegationOperator, NotAsNegationOperatorSub, OctalFloatLiteralNotSupported,
+ OuterAttributeNotAllowedOnIfElse, ParenthesesWithStructFields,
+ RequireColonAfterLabeledExpression, ShiftInterpretedAsGeneric, StructLiteralNotAllowedHere,
+ StructLiteralNotAllowedHereSugg, TildeAsUnaryOperator, UnexpectedTokenAfterLabel,
+ UnexpectedTokenAfterLabelSugg, WrapExpressionInParentheses,
};
+use crate::maybe_recover_from_interpolated_ty_qpath;
use core::mem;
use rustc_ast::ptr::P;
@@ -35,10 +42,13 @@ use rustc_ast::{AnonConst, BinOp, BinOpKind, FnDecl, FnRetTy, MacCall, Param, Ty
use rustc_ast::{Arm, Async, BlockCheckMode, Expr, ExprKind, Label, Movability, RangeLimits};
use rustc_ast::{ClosureBinder, StmtKind};
use rustc_ast_pretty::pprust;
-use rustc_errors::{Applicability, Diagnostic, PResult};
+use rustc_errors::{
+ Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, IntoDiagnostic, PResult,
+ StashKey,
+};
+use rustc_session::errors::ExprParenthesesNeeded;
use rustc_session::lint::builtin::BREAK_WITH_LABEL_AND_LOOP;
use rustc_session::lint::BuiltinLintDiagnostics;
-use rustc_session::SessionDiagnostic;
use rustc_span::source_map::{self, Span, Spanned};
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::{BytePos, Pos};
@@ -420,13 +430,11 @@ impl<'a> Parser<'a> {
/// but the next token implies this should be parsed as an expression.
/// For example: `if let Some(x) = x { x } else { 0 } / 2`.
fn error_found_expr_would_be_stmt(&self, lhs: &Expr) {
- let mut err = self.struct_span_err(
- self.token.span,
- &format!("expected expression, found `{}`", pprust::token_to_string(&self.token),),
- );
- err.span_label(self.token.span, "expected expression");
- self.sess.expr_parentheses_needed(&mut err, lhs.span);
- err.emit();
+ self.sess.emit_err(FoundExprWouldBeStmt {
+ span: self.token.span,
+ token: self.token.clone(),
+ suggestion: ExprParenthesesNeeded::surrounding(lhs.span),
+ });
}
/// Possibly translate the current token to an associative operator.
@@ -577,21 +585,16 @@ impl<'a> Parser<'a> {
make_it!(this, attrs, |this, _| this.parse_borrow_expr(lo))
}
token::BinOp(token::Plus) if this.look_ahead(1, |tok| tok.is_numeric_lit()) => {
- let mut err = this.struct_span_err(lo, "leading `+` is not supported");
- err.span_label(lo, "unexpected `+`");
+ let mut err =
+ LeadingPlusNotSupported { span: lo, remove_plus: None, add_parentheses: None };
// a block on the LHS might have been intended to be an expression instead
if let Some(sp) = this.sess.ambiguous_block_expr_parse.borrow().get(&lo) {
- this.sess.expr_parentheses_needed(&mut err, *sp);
+ err.add_parentheses = Some(ExprParenthesesNeeded::surrounding(*sp));
} else {
- err.span_suggestion_verbose(
- lo,
- "try removing the `+`",
- "",
- Applicability::MachineApplicable,
- );
+ err.remove_plus = Some(lo);
}
- err.emit();
+ this.sess.emit_err(err);
this.bump();
this.parse_prefix_expr(None)
@@ -660,12 +663,23 @@ impl<'a> Parser<'a> {
fn recover_not_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
// Emit the error...
let negated_token = self.look_ahead(1, |t| t.clone());
+
+ let sub_diag = if negated_token.is_numeric_lit() {
+ NotAsNegationOperatorSub::SuggestNotBitwise
+ } else if negated_token.is_bool_lit() {
+ NotAsNegationOperatorSub::SuggestNotLogical
+ } else {
+ NotAsNegationOperatorSub::SuggestNotDefault
+ };
+
self.sess.emit_err(NotAsNegationOperator {
negated: negated_token.span,
negated_desc: super::token_descr(&negated_token),
// Span the `not` plus trailing whitespace to avoid
// trailing whitespace after the `!` in our suggestion
- not: self.sess.source_map().span_until_non_whitespace(lo.to(negated_token.span)),
+ sub: sub_diag(
+ self.sess.source_map().span_until_non_whitespace(lo.to(negated_token.span)),
+ ),
});
// ...and recover!
@@ -743,9 +757,34 @@ impl<'a> Parser<'a> {
match self.parse_path(PathStyle::Expr) {
Ok(path) => {
- let (op_noun, op_verb) = match self.token.kind {
- token::Lt => ("comparison", "comparing"),
- token::BinOp(token::Shl) => ("shift", "shifting"),
+ let span_after_type = parser_snapshot_after_type.token.span;
+ let expr = mk_expr(
+ self,
+ lhs,
+ self.mk_ty(path.span, TyKind::Path(None, path.clone())),
+ );
+
+ let args_span = self.look_ahead(1, |t| t.span).to(span_after_type);
+ let suggestion = ComparisonOrShiftInterpretedAsGenericSugg {
+ left: expr.span.shrink_to_lo(),
+ right: expr.span.shrink_to_hi(),
+ };
+
+ match self.token.kind {
+ token::Lt => self.sess.emit_err(ComparisonInterpretedAsGeneric {
+ comparison: self.token.span,
+ r#type: path,
+ args: args_span,
+ suggestion,
+ }),
+ token::BinOp(token::Shl) => {
+ self.sess.emit_err(ShiftInterpretedAsGeneric {
+ shift: self.token.span,
+ r#type: path,
+ args: args_span,
+ suggestion,
+ })
+ }
_ => {
// We can end up here even without `<` being the next token, for
// example because `parse_ty_no_plus` returns `Err` on keywords,
@@ -759,33 +798,7 @@ impl<'a> Parser<'a> {
// Successfully parsed the type path leaving a `<` yet to parse.
type_err.cancel();
- // Report non-fatal diagnostics, keep `x as usize` as an expression
- // in AST and continue parsing.
- let msg = format!(
- "`<` is interpreted as a start of generic arguments for `{}`, not a {}",
- pprust::path_to_string(&path),
- op_noun,
- );
- let span_after_type = parser_snapshot_after_type.token.span;
- let expr =
- mk_expr(self, lhs, self.mk_ty(path.span, TyKind::Path(None, path)));
-
- self.struct_span_err(self.token.span, &msg)
- .span_label(
- self.look_ahead(1, |t| t.span).to(span_after_type),
- "interpreted as generic arguments",
- )
- .span_label(self.token.span, format!("not interpreted as {op_noun}"))
- .multipart_suggestion(
- &format!("try {op_verb} the cast value"),
- vec![
- (expr.span.shrink_to_lo(), "(".to_string()),
- (expr.span.shrink_to_hi(), ")".to_string()),
- ],
- Applicability::MachineApplicable,
- )
- .emit();
-
+ // Keep `x as usize` as an expression in AST and continue parsing.
expr
}
Err(path_err) => {
@@ -832,7 +845,7 @@ impl<'a> Parser<'a> {
ExprKind::Index(_, _) => "indexing",
ExprKind::Try(_) => "`?`",
ExprKind::Field(_, _) => "a field access",
- ExprKind::MethodCall(_, _, _) => "a method call",
+ ExprKind::MethodCall(_, _, _, _) => "a method call",
ExprKind::Call(_, _) => "a function call",
ExprKind::Await(_) => "`.await`",
ExprKind::Err => return Ok(with_postfix),
@@ -1146,7 +1159,9 @@ impl<'a> Parser<'a> {
}
let span = self.prev_token.span;
let field = ExprKind::Field(base, Ident::new(field, span));
- self.expect_no_suffix(span, "a tuple index", suffix);
+ if let Some(suffix) = suffix {
+ self.expect_no_tuple_index_suffix(span, suffix);
+ }
self.mk_expr(lo.to(span), field)
}
@@ -1184,9 +1199,8 @@ impl<'a> Parser<'a> {
) -> Option<P<Expr>> {
match (seq.as_mut(), snapshot) {
(Err(err), Some((mut snapshot, ExprKind::Path(None, path)))) => {
- let name = pprust::path_to_string(&path);
snapshot.bump(); // `(`
- match snapshot.parse_struct_fields(path, false, Delimiter::Parenthesis) {
+ match snapshot.parse_struct_fields(path.clone(), false, Delimiter::Parenthesis) {
Ok((fields, ..))
if snapshot.eat(&token::CloseDelim(Delimiter::Parenthesis)) =>
{
@@ -1196,29 +1210,25 @@ impl<'a> Parser<'a> {
let close_paren = self.prev_token.span;
let span = lo.to(self.prev_token.span);
if !fields.is_empty() {
- let replacement_err = self.struct_span_err(
+ let mut replacement_err = ParenthesesWithStructFields {
span,
- "invalid `struct` delimiters or `fn` call arguments",
- );
- mem::replace(err, replacement_err).cancel();
-
- err.multipart_suggestion(
- &format!("if `{name}` is a struct, use braces as delimiters"),
- vec![
- (open_paren, " { ".to_string()),
- (close_paren, " }".to_string()),
- ],
- Applicability::MaybeIncorrect,
- );
- err.multipart_suggestion(
- &format!("if `{name}` is a function, use the arguments directly"),
- fields
- .into_iter()
- .map(|field| (field.span.until(field.expr.span), String::new()))
- .collect(),
- Applicability::MaybeIncorrect,
- );
- err.emit();
+ r#type: path,
+ braces_for_struct: BracesForStructLiteral {
+ first: open_paren,
+ second: close_paren,
+ },
+ no_fields_for_fn: NoFieldsForFnCall {
+ fields: fields
+ .into_iter()
+ .map(|field| field.span.until(field.expr.span))
+ .collect(),
+ },
+ }
+ .into_diagnostic(&self.sess.span_diagnostic);
+ replacement_err.emit();
+
+ let old_err = mem::replace(err, replacement_err);
+ old_err.cancel();
} else {
err.emit();
}
@@ -1259,12 +1269,10 @@ impl<'a> Parser<'a> {
if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
// Method call `expr.f()`
- let mut args = self.parse_paren_expr_seq()?;
- args.insert(0, self_arg);
-
+ let args = self.parse_paren_expr_seq()?;
let fn_span = fn_span_lo.to(self.prev_token.span);
let span = lo.to(self.prev_token.span);
- Ok(self.mk_expr(span, ExprKind::MethodCall(segment, args, fn_span)))
+ Ok(self.mk_expr(span, ExprKind::MethodCall(segment, self_arg, args, fn_span)))
} else {
// Field access `expr.f`
if let Some(args) = segment.args {
@@ -1304,7 +1312,7 @@ impl<'a> Parser<'a> {
// If the input is something like `if a { 1 } else { 2 } | if a { 3 } else { 4 }`
// then suggest parens around the lhs.
if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&lo) {
- self.sess.expr_parentheses_needed(&mut err, *sp);
+ err.subdiagnostic(ExprParenthesesNeeded::surrounding(*sp));
}
err
})
@@ -1507,11 +1515,11 @@ impl<'a> Parser<'a> {
/// Parse `'label: $expr`. The label is already parsed.
fn parse_labeled_expr(
&mut self,
- label: Label,
+ label_: Label,
mut consume_colon: bool,
) -> PResult<'a, P<Expr>> {
- let lo = label.ident.span;
- let label = Some(label);
+ let lo = label_.ident.span;
+ let label = Some(label_);
let ate_colon = self.eat(&token::Colon);
let expr = if self.eat_keyword(kw::While) {
self.parse_while_expr(label, lo)
@@ -1524,18 +1532,35 @@ impl<'a> Parser<'a> {
{
self.parse_block_expr(label, lo, BlockCheckMode::Default)
} else if !ate_colon
+ && (matches!(self.token.kind, token::CloseDelim(_) | token::Comma)
+ || self.token.is_op())
+ {
+ let lit = self.recover_unclosed_char(label_.ident, |self_| {
+ self_.sess.create_err(UnexpectedTokenAfterLabel {
+ span: self_.token.span,
+ remove_label: None,
+ enclose_in_block: None,
+ })
+ });
+ consume_colon = false;
+ Ok(self.mk_expr(lo, ExprKind::Lit(lit)))
+ } else if !ate_colon
&& (self.check_noexpect(&TokenKind::Comma) || self.check_noexpect(&TokenKind::Gt))
{
// We're probably inside of a `Path<'a>` that needs a turbofish
- self.sess.emit_err(UnexpectedTokenAfterLabel(self.token.span));
+ self.sess.emit_err(UnexpectedTokenAfterLabel {
+ span: self.token.span,
+ remove_label: None,
+ enclose_in_block: None,
+ });
consume_colon = false;
Ok(self.mk_expr_err(lo))
} else {
- // FIXME: use UnexpectedTokenAfterLabel, needs multipart suggestions
- let msg = "expected `while`, `for`, `loop` or `{` after a label";
-
- let mut err = self.struct_span_err(self.token.span, msg);
- err.span_label(self.token.span, msg);
+ let mut err = UnexpectedTokenAfterLabel {
+ span: self.token.span,
+ remove_label: None,
+ enclose_in_block: None,
+ };
// Continue as an expression in an effort to recover on `'label: non_block_expr`.
let expr = self.parse_expr().map(|expr| {
@@ -1562,28 +1587,15 @@ impl<'a> Parser<'a> {
// If there are no breaks that may use this label, suggest removing the label and
// recover to the unmodified expression.
if !found_labeled_breaks {
- let msg = "consider removing the label";
- err.span_suggestion_verbose(
- lo.until(span),
- msg,
- "",
- Applicability::MachineApplicable,
- );
+ err.remove_label = Some(lo.until(span));
return expr;
}
- let sugg_msg = "consider enclosing expression in a block";
- let suggestions = vec![
- (span.shrink_to_lo(), "{ ".to_owned()),
- (span.shrink_to_hi(), " }".to_owned()),
- ];
-
- err.multipart_suggestion_verbose(
- sugg_msg,
- suggestions,
- Applicability::MachineApplicable,
- );
+ err.enclose_in_block = Some(UnexpectedTokenAfterLabelSugg {
+ left: span.shrink_to_lo(),
+ right: span.shrink_to_hi(),
+ });
// Replace `'label: non_block_expr` with `'label: {non_block_expr}` in order to suppress future errors about `break 'label`.
let stmt = self.mk_stmt(span, StmtKind::Expr(expr));
@@ -1591,7 +1603,7 @@ impl<'a> Parser<'a> {
self.mk_expr(span, ExprKind::Block(blk, label))
});
- err.emit();
+ self.sess.emit_err(err);
expr
}?;
@@ -1606,6 +1618,39 @@ impl<'a> Parser<'a> {
Ok(expr)
}
+ /// Emit an error when a char is parsed as a lifetime because of a missing quote
+ pub(super) fn recover_unclosed_char(
+ &mut self,
+ lifetime: Ident,
+ err: impl FnOnce(&mut Self) -> DiagnosticBuilder<'a, ErrorGuaranteed>,
+ ) -> ast::Lit {
+ if let Some(mut diag) =
+ self.sess.span_diagnostic.steal_diagnostic(lifetime.span, StashKey::LifetimeIsChar)
+ {
+ diag.span_suggestion_verbose(
+ lifetime.span.shrink_to_hi(),
+ "add `'` to close the char literal",
+ "'",
+ Applicability::MaybeIncorrect,
+ )
+ .emit();
+ } else {
+ err(self)
+ .span_suggestion_verbose(
+ lifetime.span.shrink_to_hi(),
+ "add `'` to close the char literal",
+ "'",
+ Applicability::MaybeIncorrect,
+ )
+ .emit();
+ }
+ ast::Lit {
+ token_lit: token::Lit::new(token::LitKind::Char, lifetime.name, None),
+ kind: ast::LitKind::Char(lifetime.name.as_str().chars().next().unwrap_or('_')),
+ span: lifetime.span,
+ }
+ }
+
/// Recover on the syntax `do catch { ... }` suggesting `try { ... }` instead.
fn recover_do_catch(&mut self) -> PResult<'a, P<Expr>> {
let lo = self.token.span;
@@ -1662,19 +1707,13 @@ impl<'a> Parser<'a> {
// The value expression can be a labeled loop, see issue #86948, e.g.:
// `loop { break 'label: loop { break 'label 42; }; }`
let lexpr = self.parse_labeled_expr(label.take().unwrap(), true)?;
- self.struct_span_err(
- lexpr.span,
- "parentheses are required around this expression to avoid confusion with a labeled break expression",
- )
- .multipart_suggestion(
- "wrap the expression in parentheses",
- vec![
- (lexpr.span.shrink_to_lo(), "(".to_string()),
- (lexpr.span.shrink_to_hi(), ")".to_string()),
- ],
- Applicability::MachineApplicable,
- )
- .emit();
+ self.sess.emit_err(LabeledLoopInBreak {
+ span: lexpr.span,
+ sub: WrapExpressionInParentheses {
+ left: lexpr.span.shrink_to_lo(),
+ right: lexpr.span.shrink_to_hi(),
+ },
+ });
Some(lexpr)
} else if self.token != token::OpenDelim(Delimiter::Brace)
|| !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
@@ -1737,7 +1776,7 @@ impl<'a> Parser<'a> {
}
pub(super) fn parse_lit(&mut self) -> PResult<'a, Lit> {
- self.parse_opt_lit().ok_or_else(|| {
+ self.parse_opt_lit().ok_or(()).or_else(|()| {
if let token::Interpolated(inner) = &self.token.kind {
let expr = match inner.as_ref() {
token::NtExpr(expr) => Some(expr),
@@ -1746,16 +1785,25 @@ impl<'a> Parser<'a> {
};
if let Some(expr) = expr {
if matches!(expr.kind, ExprKind::Err) {
- let mut err = self
- .diagnostic()
- .struct_span_err(self.token.span, "invalid interpolated expression");
+ let mut err = InvalidInterpolatedExpression { span: self.token.span }
+ .into_diagnostic(&self.sess.span_diagnostic);
err.downgrade_to_delayed_bug();
- return err;
+ return Err(err);
}
}
}
- let msg = format!("unexpected token: {}", super::token_descr(&self.token));
- self.struct_span_err(self.token.span, &msg)
+ let token = self.token.clone();
+ let err = |self_: &mut Self| {
+ let msg = format!("unexpected token: {}", super::token_descr(&token));
+ self_.struct_span_err(token.span, &msg)
+ };
+ // On an error path, eagerly consider a lifetime to be an unclosed character lit
+ if self.token.is_lifetime() {
+ let lt = self.expect_lifetime();
+ Ok(self.recover_unclosed_char(lt.ident, err))
+ } else {
+ Err(err(self))
+ }
})
}
@@ -1780,7 +1828,10 @@ impl<'a> Parser<'a> {
});
if let Some(token) = &recovered {
self.bump();
- self.error_float_lits_must_have_int_part(&token);
+ self.sess.emit_err(FloatLiteralRequiresIntegerPart {
+ span: token.span,
+ correct: pprust::token_to_string(token).into_owned(),
+ });
}
}
@@ -1808,13 +1859,6 @@ impl<'a> Parser<'a> {
}
}
- fn error_float_lits_must_have_int_part(&self, token: &Token) {
- self.sess.emit_err(FloatLiteralRequiresIntegerPart {
- span: token.span,
- correct: pprust::token_to_string(token).into_owned(),
- });
- }
-
fn report_lit_error(&self, err: LitError, lit: token::Lit, span: Span) {
// Checks if `s` looks like i32 or u1234 etc.
fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
@@ -1843,11 +1887,13 @@ impl<'a> Parser<'a> {
// by lexer, so here we don't report it the second time.
LitError::LexerError => {}
LitError::InvalidSuffix => {
- self.expect_no_suffix(
- span,
- &format!("{} {} literal", kind.article(), kind.descr()),
- suffix,
- );
+ if let Some(suffix) = suffix {
+ self.sess.emit_err(InvalidLiteralSuffix {
+ span,
+ kind: format!("{}", kind.descr()),
+ suffix,
+ });
+ }
}
LitError::InvalidIntSuffix => {
let suf = suffix.expect("suffix error with no suffix");
@@ -1873,15 +1919,12 @@ impl<'a> Parser<'a> {
}
}
LitError::NonDecimalFloat(base) => {
- let descr = match base {
- 16 => "hexadecimal",
- 8 => "octal",
- 2 => "binary",
+ match base {
+ 16 => self.sess.emit_err(HexadecimalFloatLiteralNotSupported { span }),
+ 8 => self.sess.emit_err(OctalFloatLiteralNotSupported { span }),
+ 2 => self.sess.emit_err(BinaryFloatLiteralNotSupported { span }),
_ => unreachable!(),
};
- self.struct_span_err(span, &format!("{descr} float literal is not supported"))
- .span_label(span, "not supported")
- .emit();
}
LitError::IntTooLarge => {
self.sess.emit_err(IntLiteralTooLarge { span });
@@ -1889,38 +1932,17 @@ impl<'a> Parser<'a> {
}
}
- pub(super) fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option<Symbol>) {
- if let Some(suf) = suffix {
- let mut err = if kind == "a tuple index"
- && [sym::i32, sym::u32, sym::isize, sym::usize].contains(&suf)
- {
- // #59553: warn instead of reject out of hand to allow the fix to percolate
- // through the ecosystem when people fix their macros
- let mut err = self
- .sess
- .span_diagnostic
- .struct_span_warn(sp, &format!("suffixes on {kind} are invalid"));
- err.note(&format!(
- "`{}` is *temporarily* accepted on tuple index fields as it was \
- incorrectly accepted on stable for a few releases",
- suf,
- ));
- err.help(
- "on proc macros, you'll want to use `syn::Index::from` or \
- `proc_macro::Literal::*_unsuffixed` for code that will desugar \
- to tuple field access",
- );
- err.note(
- "see issue #60210 <https://github.com/rust-lang/rust/issues/60210> \
- for more information",
- );
- err
- } else {
- self.struct_span_err(sp, &format!("suffixes on {kind} are invalid"))
- .forget_guarantee()
- };
- err.span_label(sp, format!("invalid suffix `{suf}`"));
- err.emit();
+ pub(super) fn expect_no_tuple_index_suffix(&self, span: Span, suffix: Symbol) {
+ if [sym::i32, sym::u32, sym::isize, sym::usize].contains(&suffix) {
+ // #59553: warn instead of reject out of hand to allow the fix to percolate
+ // through the ecosystem when people fix their macros
+ self.sess.emit_warning(InvalidLiteralSuffixOnTupleIndex {
+ span,
+ suffix,
+ exception: Some(()),
+ });
+ } else {
+ self.sess.emit_err(InvalidLiteralSuffixOnTupleIndex { span, suffix, exception: None });
}
}
@@ -1954,14 +1976,13 @@ impl<'a> Parser<'a> {
let mut snapshot = self.create_snapshot_for_diagnostic();
match snapshot.parse_array_or_repeat_expr(Delimiter::Brace) {
Ok(arr) => {
- let hi = snapshot.prev_token.span;
- self.struct_span_err(arr.span, "this is a block expression, not an array")
- .multipart_suggestion(
- "to make an array, use square brackets instead of curly braces",
- vec![(lo, "[".to_owned()), (hi, "]".to_owned())],
- Applicability::MaybeIncorrect,
- )
- .emit();
+ self.sess.emit_err(ArrayBracketsInsteadOfSpaces {
+ span: arr.span,
+ sub: ArrayBracketsInsteadOfSpacesSugg {
+ left: lo,
+ right: snapshot.prev_token.span,
+ },
+ });
self.restore_snapshot(snapshot);
Some(self.mk_expr_err(arr.span))
@@ -2088,6 +2109,12 @@ impl<'a> Parser<'a> {
if self.token.kind == TokenKind::Semi
&& matches!(self.token_cursor.frame.delim_sp, Some((Delimiter::Parenthesis, _)))
+ // HACK: This is needed so we can detect whether we're inside a macro,
+ // where regular assumptions about what tokens can follow other tokens
+ // don't necessarily apply.
+ && self.may_recover()
+ // FIXME(Nilstrieb): Remove this check once `may_recover` actually stops recovery
+ && self.subparser_name.is_none()
{
// It is likely that the closure body is a block but where the
// braces have been removed. We will recover and eat the next
@@ -2124,7 +2151,8 @@ impl<'a> Parser<'a> {
// Check for `move async` and recover
if self.check_keyword(kw::Async) {
let move_async_span = self.token.span.with_lo(self.prev_token.span.data().lo);
- Err(self.incorrect_move_async_order_found(move_async_span))
+ Err(AsyncMoveOrderIncorrect { span: move_async_span }
+ .into_diagnostic(&self.sess.span_diagnostic))
} else {
Ok(CaptureBy::Value)
}
@@ -2208,7 +2236,7 @@ impl<'a> Parser<'a> {
},
ExprKind::Block(_, None) => {
self.sess.emit_err(IfExpressionMissingCondition {
- if_span: self.sess.source_map().next_point(lo),
+ if_span: lo.shrink_to_hi(),
block_span: self.sess.source_map().start_point(cond_span),
});
std::mem::replace(&mut cond, this.mk_expr_err(cond_span.shrink_to_hi()))
@@ -2505,39 +2533,22 @@ impl<'a> Parser<'a> {
self.bump(); // `;`
let mut stmts =
vec![self.mk_stmt(first_expr.span, ast::StmtKind::Expr(first_expr.clone()))];
- let err = |this: &mut Parser<'_>, stmts: Vec<ast::Stmt>| {
+ let err = |this: &Parser<'_>, stmts: Vec<ast::Stmt>| {
let span = stmts[0].span.to(stmts[stmts.len() - 1].span);
- let mut err = this.struct_span_err(span, "`match` arm body without braces");
- let (these, s, are) =
- if stmts.len() > 1 { ("these", "s", "are") } else { ("this", "", "is") };
- err.span_label(
- span,
- &format!(
- "{these} statement{s} {are} not surrounded by a body",
- these = these,
- s = s,
- are = are
- ),
- );
- err.span_label(arrow_span, "while parsing the `match` arm starting here");
- if stmts.len() > 1 {
- err.multipart_suggestion(
- &format!("surround the statement{s} with a body"),
- vec![
- (span.shrink_to_lo(), "{ ".to_string()),
- (span.shrink_to_hi(), " }".to_string()),
- ],
- Applicability::MachineApplicable,
- );
- } else {
- err.span_suggestion(
- semi_sp,
- "use a comma to end a `match` arm expression",
- ",",
- Applicability::MachineApplicable,
- );
- }
- err.emit();
+
+ this.sess.emit_err(MatchArmBodyWithoutBraces {
+ statements: span,
+ arrow: arrow_span,
+ num_statements: stmts.len(),
+ sub: if stmts.len() > 1 {
+ MatchArmBodyWithoutBracesSugg::AddBraces {
+ left: span.shrink_to_lo(),
+ right: span.shrink_to_hi(),
+ }
+ } else {
+ MatchArmBodyWithoutBracesSugg::UseComma { semicolon: semi_sp }
+ },
+ });
this.mk_expr_err(span)
};
// We might have either a `,` -> `;` typo, or a block without braces. We need
@@ -2826,23 +2837,19 @@ impl<'a> Parser<'a> {
let expr = self.parse_struct_expr(qself.cloned(), path.clone(), true);
if let (Ok(expr), false) = (&expr, struct_allowed) {
// This is a struct literal, but we don't can't accept them here.
- self.error_struct_lit_not_allowed_here(path.span, expr.span);
+ self.sess.emit_err(StructLiteralNotAllowedHere {
+ span: expr.span,
+ sub: StructLiteralNotAllowedHereSugg {
+ left: path.span.shrink_to_lo(),
+ right: expr.span.shrink_to_hi(),
+ },
+ });
}
return Some(expr);
}
None
}
- fn error_struct_lit_not_allowed_here(&self, lo: Span, sp: Span) {
- self.struct_span_err(sp, "struct literals are not allowed here")
- .multipart_suggestion(
- "surround the struct literal with parentheses",
- vec![(lo.shrink_to_lo(), "(".to_string()), (sp.shrink_to_hi(), ")".to_string())],
- Applicability::MachineApplicable,
- )
- .emit();
- }
-
pub(super) fn parse_struct_fields(
&mut self,
pth: ast::Path,
@@ -3137,6 +3144,8 @@ impl<'a> Parser<'a> {
&& this.token.kind == token::Semi
{
TrailingToken::Semi
+ } else if this.token.kind == token::Gt {
+ TrailingToken::Gt
} else {
// FIXME - pass this through from the place where we know
// we need a comma, rather than assuming that `#[attr] expr,`
diff --git a/compiler/rustc_parse/src/parser/generics.rs b/compiler/rustc_parse/src/parser/generics.rs
index 4d0a8b05e..fa75670b2 100644
--- a/compiler/rustc_parse/src/parser/generics.rs
+++ b/compiler/rustc_parse/src/parser/generics.rs
@@ -1,7 +1,9 @@
use super::{ForceCollect, Parser, TrailingToken};
use rustc_ast::token;
-use rustc_ast::{self as ast, AttrVec, GenericBounds, GenericParam, GenericParamKind, WhereClause};
+use rustc_ast::{
+ self as ast, AttrVec, GenericBounds, GenericParam, GenericParamKind, TyKind, WhereClause,
+};
use rustc_errors::{Applicability, PResult};
use rustc_span::symbol::kw;
@@ -31,13 +33,43 @@ impl<'a> Parser<'a> {
let mut colon_span = None;
let bounds = if self.eat(&token::Colon) {
colon_span = Some(self.prev_token.span);
+ // recover from `impl Trait` in type param bound
+ if self.token.is_keyword(kw::Impl) {
+ let impl_span = self.token.span;
+ let snapshot = self.create_snapshot_for_diagnostic();
+ match self.parse_ty() {
+ Ok(p) => {
+ if let TyKind::ImplTrait(_, bounds) = &(*p).kind {
+ let span = impl_span.to(self.token.span.shrink_to_lo());
+ let mut err = self.struct_span_err(
+ span,
+ "expected trait bound, found `impl Trait` type",
+ );
+ err.span_label(span, "not a trait");
+ if let [bound, ..] = &bounds[..] {
+ err.span_suggestion_verbose(
+ impl_span.until(bound.span()),
+ "use the trait bounds directly",
+ String::new(),
+ Applicability::MachineApplicable,
+ );
+ }
+ err.emit();
+ return Err(err);
+ }
+ }
+ Err(err) => {
+ err.cancel();
+ }
+ }
+ self.restore_snapshot(snapshot);
+ }
self.parse_generic_bounds(colon_span)?
} else {
Vec::new()
};
let default = if self.eat(&token::Eq) { Some(self.parse_ty()?) } else { None };
-
Ok(GenericParam {
ident,
id: ast::DUMMY_NODE_ID,
diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs
index e55b5ce71..bda301c52 100644
--- a/compiler/rustc_parse/src/parser/item.rs
+++ b/compiler/rustc_parse/src/parser/item.rs
@@ -1,4 +1,6 @@
-use super::diagnostics::{dummy_arg, ConsumeClosingDelim, Error};
+use crate::errors::{DocCommentDoesNotDocumentAnything, UseEmptyBlockNotSemi};
+
+use super::diagnostics::{dummy_arg, ConsumeClosingDelim};
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
use super::{AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, TrailingToken};
@@ -13,7 +15,7 @@ use rustc_ast::{EnumDef, FieldDef, Generics, TraitRef, Ty, TyKind, Variant, Vari
use rustc_ast::{FnHeader, ForeignItem, Path, PathSegment, Visibility, VisibilityKind};
use rustc_ast::{MacArgs, MacCall, MacDelimiter};
use rustc_ast_pretty::pprust;
-use rustc_errors::{struct_span_err, Applicability, PResult, StashKey};
+use rustc_errors::{struct_span_err, Applicability, IntoDiagnostic, PResult, StashKey};
use rustc_span::edition::Edition;
use rustc_span::lev_distance::lev_distance;
use rustc_span::source_map::{self, Span};
@@ -664,6 +666,14 @@ impl<'a> Parser<'a> {
mut parse_item: impl FnMut(&mut Parser<'a>) -> PResult<'a, Option<Option<T>>>,
) -> PResult<'a, Vec<T>> {
let open_brace_span = self.token.span;
+
+ // Recover `impl Ty;` instead of `impl Ty {}`
+ if self.token == TokenKind::Semi {
+ self.sess.emit_err(UseEmptyBlockNotSemi { span: self.token.span });
+ self.bump();
+ return Ok(vec![]);
+ }
+
self.expect(&token::OpenDelim(Delimiter::Brace))?;
attrs.extend(self.parse_inner_attributes()?);
@@ -750,8 +760,8 @@ impl<'a> Parser<'a> {
)
.span_label(self.token.span, "this doc comment doesn't document anything")
.help(
- "doc comments must come before what they document, maybe a \
- comment was intended with `//`?",
+ "doc comments must come before what they document, if a comment was \
+ intended use `//`",
)
.emit();
self.bump();
@@ -1283,12 +1293,10 @@ impl<'a> Parser<'a> {
/// Parses an enum declaration.
fn parse_item_enum(&mut self) -> PResult<'a, ItemInfo> {
if self.token.is_keyword(kw::Struct) {
- let mut err = self.struct_span_err(
- self.prev_token.span.to(self.token.span),
- "`enum` and `struct` are mutually exclusive",
- );
+ let span = self.prev_token.span.to(self.token.span);
+ let mut err = self.struct_span_err(span, "`enum` and `struct` are mutually exclusive");
err.span_suggestion(
- self.prev_token.span.to(self.token.span),
+ span,
"replace `enum struct` with",
"enum",
Applicability::MachineApplicable,
@@ -1305,12 +1313,20 @@ impl<'a> Parser<'a> {
let mut generics = self.parse_generics()?;
generics.where_clause = self.parse_where_clause()?;
- let (variants, _) = self
- .parse_delim_comma_seq(Delimiter::Brace, |p| p.parse_enum_variant())
- .map_err(|e| {
- self.recover_stmt();
- e
- })?;
+ // Possibly recover `enum Foo;` instead of `enum Foo {}`
+ let (variants, _) = if self.token == TokenKind::Semi {
+ self.sess.emit_err(UseEmptyBlockNotSemi { span: self.token.span });
+ self.bump();
+ (vec![], false)
+ } else {
+ self.parse_delim_comma_seq(Delimiter::Brace, |p| p.parse_enum_variant()).map_err(
+ |mut e| {
+ e.span_label(id.span, "while parsing this enum");
+ self.recover_stmt();
+ e
+ },
+ )?
+ };
let enum_definition = EnumDef { variants: variants.into_iter().flatten().collect() };
Ok((id, ItemKind::Enum(enum_definition, generics)))
@@ -1332,7 +1348,8 @@ impl<'a> Parser<'a> {
let struct_def = if this.check(&token::OpenDelim(Delimiter::Brace)) {
// Parse a struct variant.
- let (fields, recovered) = this.parse_record_struct_body("struct", false)?;
+ let (fields, recovered) =
+ this.parse_record_struct_body("struct", ident.span, false)?;
VariantData::Struct(fields, recovered)
} else if this.check(&token::OpenDelim(Delimiter::Parenthesis)) {
VariantData::Tuple(this.parse_tuple_struct_body()?, DUMMY_NODE_ID)
@@ -1386,8 +1403,11 @@ impl<'a> Parser<'a> {
VariantData::Unit(DUMMY_NODE_ID)
} else {
// If we see: `struct Foo<T> where T: Copy { ... }`
- let (fields, recovered) =
- self.parse_record_struct_body("struct", generics.where_clause.has_where_token)?;
+ let (fields, recovered) = self.parse_record_struct_body(
+ "struct",
+ class_name.span,
+ generics.where_clause.has_where_token,
+ )?;
VariantData::Struct(fields, recovered)
}
// No `where` so: `struct Foo<T>;`
@@ -1395,8 +1415,11 @@ impl<'a> Parser<'a> {
VariantData::Unit(DUMMY_NODE_ID)
// Record-style struct definition
} else if self.token == token::OpenDelim(Delimiter::Brace) {
- let (fields, recovered) =
- self.parse_record_struct_body("struct", generics.where_clause.has_where_token)?;
+ let (fields, recovered) = self.parse_record_struct_body(
+ "struct",
+ class_name.span,
+ generics.where_clause.has_where_token,
+ )?;
VariantData::Struct(fields, recovered)
// Tuple-style struct definition with optional where-clause.
} else if self.token == token::OpenDelim(Delimiter::Parenthesis) {
@@ -1425,12 +1448,18 @@ impl<'a> Parser<'a> {
let vdata = if self.token.is_keyword(kw::Where) {
generics.where_clause = self.parse_where_clause()?;
- let (fields, recovered) =
- self.parse_record_struct_body("union", generics.where_clause.has_where_token)?;
+ let (fields, recovered) = self.parse_record_struct_body(
+ "union",
+ class_name.span,
+ generics.where_clause.has_where_token,
+ )?;
VariantData::Struct(fields, recovered)
} else if self.token == token::OpenDelim(Delimiter::Brace) {
- let (fields, recovered) =
- self.parse_record_struct_body("union", generics.where_clause.has_where_token)?;
+ let (fields, recovered) = self.parse_record_struct_body(
+ "union",
+ class_name.span,
+ generics.where_clause.has_where_token,
+ )?;
VariantData::Struct(fields, recovered)
} else {
let token_str = super::token_descr(&self.token);
@@ -1446,6 +1475,7 @@ impl<'a> Parser<'a> {
fn parse_record_struct_body(
&mut self,
adt_ty: &str,
+ ident_span: Span,
parsed_where: bool,
) -> PResult<'a, (Vec<FieldDef>, /* recovered */ bool)> {
let mut fields = Vec::new();
@@ -1460,6 +1490,7 @@ impl<'a> Parser<'a> {
match field {
Ok(field) => fields.push(field),
Err(mut err) => {
+ err.span_label(ident_span, format!("while parsing this {adt_ty}"));
err.emit();
break;
}
@@ -1555,7 +1586,10 @@ impl<'a> Parser<'a> {
token::CloseDelim(Delimiter::Brace) => {}
token::DocComment(..) => {
let previous_span = self.prev_token.span;
- let mut err = self.span_err(self.token.span, Error::UselessDocComment);
+ let mut err = DocCommentDoesNotDocumentAnything {
+ span: self.token.span,
+ missing_comma: None,
+ };
self.bump(); // consume the doc comment
let comma_after_doc_seen = self.eat(&token::Comma);
// `seen_comma` is always false, because we are inside doc block
@@ -1564,18 +1598,13 @@ impl<'a> Parser<'a> {
seen_comma = true;
}
if comma_after_doc_seen || self.token == token::CloseDelim(Delimiter::Brace) {
- err.emit();
+ self.sess.emit_err(err);
} else {
if !seen_comma {
- let sp = self.sess.source_map().next_point(previous_span);
- err.span_suggestion(
- sp,
- "missing comma here",
- ",",
- Applicability::MachineApplicable,
- );
+ let sp = previous_span.shrink_to_hi();
+ err.missing_comma = Some(sp);
}
- return Err(err);
+ return Err(err.into_diagnostic(&self.sess.span_diagnostic));
}
}
_ => {
@@ -1715,6 +1744,7 @@ impl<'a> Parser<'a> {
fn parse_field_ident(&mut self, adt_ty: &str, lo: Span) -> PResult<'a, Ident> {
let (ident, is_raw) = self.ident_or_err()?;
if !is_raw && ident.is_reserved() {
+ let snapshot = self.create_snapshot_for_diagnostic();
let err = if self.check_fn_front_matter(false) {
let inherited_vis = Visibility {
span: rustc_span::DUMMY_SP,
@@ -1723,20 +1753,63 @@ impl<'a> Parser<'a> {
};
// We use `parse_fn` to get a span for the function
let fn_parse_mode = FnParseMode { req_name: |_| true, req_body: true };
- if let Err(mut db) =
- self.parse_fn(&mut AttrVec::new(), fn_parse_mode, lo, &inherited_vis)
+ match self.parse_fn(&mut AttrVec::new(), fn_parse_mode, lo, &inherited_vis) {
+ Ok(_) => {
+ let mut err = self.struct_span_err(
+ lo.to(self.prev_token.span),
+ &format!("functions are not allowed in {adt_ty} definitions"),
+ );
+ err.help(
+ "unlike in C++, Java, and C#, functions are declared in `impl` blocks",
+ );
+ err.help("see https://doc.rust-lang.org/book/ch05-03-method-syntax.html for more information");
+ err
+ }
+ Err(err) => {
+ err.cancel();
+ self.restore_snapshot(snapshot);
+ self.expected_ident_found()
+ }
+ }
+ } else if self.eat_keyword(kw::Struct) {
+ match self.parse_item_struct() {
+ Ok((ident, _)) => {
+ let mut err = self.struct_span_err(
+ lo.with_hi(ident.span.hi()),
+ &format!("structs are not allowed in {adt_ty} definitions"),
+ );
+ err.help("consider creating a new `struct` definition instead of nesting");
+ err
+ }
+ Err(err) => {
+ err.cancel();
+ self.restore_snapshot(snapshot);
+ self.expected_ident_found()
+ }
+ }
+ } else {
+ let mut err = self.expected_ident_found();
+ if self.eat_keyword_noexpect(kw::Let)
+ && let removal_span = self.prev_token.span.until(self.token.span)
+ && let Ok(ident) = self.parse_ident_common(false)
+ // Cancel this error, we don't need it.
+ .map_err(|err| err.cancel())
+ && self.token.kind == TokenKind::Colon
{
- db.delay_as_bug();
+ err.span_suggestion(
+ removal_span,
+ "remove this `let` keyword",
+ String::new(),
+ Applicability::MachineApplicable,
+ );
+ err.note("the `let` keyword is not allowed in `struct` fields");
+ err.note("see <https://doc.rust-lang.org/book/ch05-01-defining-structs.html> for more information");
+ err.emit();
+ return Ok(ident);
+ } else {
+ self.restore_snapshot(snapshot);
}
- let mut err = self.struct_span_err(
- lo.to(self.prev_token.span),
- &format!("functions are not allowed in {adt_ty} definitions"),
- );
- err.help("unlike in C++, Java, and C#, functions are declared in `impl` blocks");
- err.help("see https://doc.rust-lang.org/book/ch05-03-method-syntax.html for more information");
err
- } else {
- self.expected_ident_found()
};
return Err(err);
}
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index 4cb198561..5fe29062b 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -13,7 +13,6 @@ mod ty;
use crate::lexer::UnmatchedBrace;
pub use attr_wrapper::AttrWrapper;
pub use diagnostics::AttemptLocalParseRecovery;
-use diagnostics::Error;
pub(crate) use item::FnParseMode;
pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma};
pub use path::PathStyle;
@@ -32,7 +31,7 @@ use rustc_ast_pretty::pprust;
use rustc_data_structures::fx::FxHashMap;
use rustc_errors::PResult;
use rustc_errors::{
- struct_span_err, Applicability, DiagnosticBuilder, ErrorGuaranteed, FatalError, MultiSpan,
+ Applicability, DiagnosticBuilder, ErrorGuaranteed, FatalError, IntoDiagnostic, MultiSpan,
};
use rustc_session::parse::ParseSess;
use rustc_span::source_map::{Span, DUMMY_SP};
@@ -41,6 +40,11 @@ use rustc_span::symbol::{kw, sym, Ident, Symbol};
use std::ops::Range;
use std::{cmp, mem, slice};
+use crate::errors::{
+ DocCommentDoesNotDocumentAnything, IncorrectVisibilityRestriction, MismatchedClosingDelimiter,
+ NonStringAbiLiteral,
+};
+
bitflags::bitflags! {
struct Restrictions: u8 {
const STMT_EXPR = 1 << 0;
@@ -75,6 +79,7 @@ pub enum ForceCollect {
pub enum TrailingToken {
None,
Semi,
+ Gt,
/// If the trailing token is a comma, then capture it
/// Otherwise, ignore the trailing token
MaybeComma,
@@ -110,6 +115,12 @@ macro_rules! maybe_recover_from_interpolated_ty_qpath {
};
}
+#[derive(Clone, Copy)]
+pub enum Recovery {
+ Allowed,
+ Forbidden,
+}
+
#[derive(Clone)]
pub struct Parser<'a> {
pub sess: &'a ParseSess,
@@ -147,12 +158,15 @@ pub struct Parser<'a> {
/// This allows us to recover when the user forget to add braces around
/// multiple statements in the closure body.
pub current_closure: Option<ClosureSpans>,
+ /// Whether the parser is allowed to do recovery.
+ /// This is disabled when parsing macro arguments, see #103534
+ pub recovery: Recovery,
}
-// This type is used a lot, e.g. it's cloned when matching many declarative macro rules. Make sure
+// This type is used a lot, e.g. it's cloned when matching many declarative macro rules with nonterminals. Make sure
// it doesn't unintentionally get bigger.
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
-rustc_data_structures::static_assert_size!(Parser<'_>, 328);
+rustc_data_structures::static_assert_size!(Parser<'_>, 336);
/// Stores span information about a closure.
#[derive(Clone)]
@@ -298,7 +312,10 @@ impl TokenCursor {
fn desugar(&mut self, attr_style: AttrStyle, data: Symbol, span: Span) -> (Token, Spacing) {
// Searches for the occurrences of `"#*` and returns the minimum number of `#`s
- // required to wrap the text.
+ // required to wrap the text. E.g.
+ // - `abc d` is wrapped as `r"abc d"` (num_of_hashes = 0)
+ // - `abc "d"` is wrapped as `r#"abc "d""#` (num_of_hashes = 1)
+ // - `abc "##d##"` is wrapped as `r###"abc "d""###` (num_of_hashes = 3)
let mut num_of_hashes = 0;
let mut count = 0;
for ch in data.as_str().chars() {
@@ -310,6 +327,7 @@ impl TokenCursor {
num_of_hashes = cmp::max(num_of_hashes, count);
}
+ // `/// foo` becomes `doc = r"foo".
let delim_span = DelimSpan::from_single(span);
let body = TokenTree::Delimited(
delim_span,
@@ -406,24 +424,39 @@ pub enum FollowedByType {
No,
}
-fn token_descr_opt(token: &Token) -> Option<&'static str> {
- Some(match token.kind {
- _ if token.is_special_ident() => "reserved identifier",
- _ if token.is_used_keyword() => "keyword",
- _ if token.is_unused_keyword() => "reserved keyword",
- token::DocComment(..) => "doc comment",
- _ => return None,
- })
+#[derive(Clone, Copy, PartialEq, Eq)]
+pub enum TokenDescription {
+ ReservedIdentifier,
+ Keyword,
+ ReservedKeyword,
+ DocComment,
}
-pub(super) fn token_descr(token: &Token) -> String {
- let token_str = pprust::token_to_string(token);
- match token_descr_opt(token) {
- Some(prefix) => format!("{} `{}`", prefix, token_str),
- _ => format!("`{}`", token_str),
+impl TokenDescription {
+ pub fn from_token(token: &Token) -> Option<Self> {
+ match token.kind {
+ _ if token.is_special_ident() => Some(TokenDescription::ReservedIdentifier),
+ _ if token.is_used_keyword() => Some(TokenDescription::Keyword),
+ _ if token.is_unused_keyword() => Some(TokenDescription::ReservedKeyword),
+ token::DocComment(..) => Some(TokenDescription::DocComment),
+ _ => None,
+ }
}
}
+pub(super) fn token_descr(token: &Token) -> String {
+ let name = pprust::token_to_string(token).to_string();
+
+ let kind = TokenDescription::from_token(token).map(|kind| match kind {
+ TokenDescription::ReservedIdentifier => "reserved identifier",
+ TokenDescription::Keyword => "keyword",
+ TokenDescription::ReservedKeyword => "reserved keyword",
+ TokenDescription::DocComment => "doc comment",
+ });
+
+ if let Some(kind) = kind { format!("{} `{}`", kind, name) } else { format!("`{}`", name) }
+}
+
impl<'a> Parser<'a> {
pub fn new(
sess: &'a ParseSess,
@@ -459,6 +492,7 @@ impl<'a> Parser<'a> {
inner_attr_ranges: Default::default(),
},
current_closure: None,
+ recovery: Recovery::Allowed,
};
// Make parser point to the first token.
@@ -467,6 +501,22 @@ impl<'a> Parser<'a> {
parser
}
+ pub fn forbid_recovery(mut self) -> Self {
+ self.recovery = Recovery::Forbidden;
+ self
+ }
+
+ /// Whether the parser is allowed to recover from broken code.
+ ///
+ /// If this returns false, recovering broken code into valid code (especially if this recovery does lookahead)
+ /// is not allowed. All recovery done by the parser must be gated behind this check.
+ ///
+ /// Technically, this only needs to restrict eager recovery by doing lookahead at more tokens.
+ /// But making the distinction is very subtle, and simply forbidding all recovery is a lot simpler to uphold.
+ fn may_recover(&self) -> bool {
+ matches!(self.recovery, Recovery::Allowed)
+ }
+
pub fn unexpected<T>(&mut self) -> PResult<'a, T> {
match self.expect_one_of(&[], &[]) {
Err(e) => Err(e),
@@ -518,9 +568,11 @@ impl<'a> Parser<'a> {
fn ident_or_err(&mut self) -> PResult<'a, (Ident, /* is_raw */ bool)> {
self.token.ident().ok_or_else(|| match self.prev_token.kind {
- TokenKind::DocComment(..) => {
- self.span_err(self.prev_token.span, Error::UselessDocComment)
+ TokenKind::DocComment(..) => DocCommentDoesNotDocumentAnything {
+ span: self.prev_token.span,
+ missing_comma: None,
}
+ .into_diagnostic(&self.sess.span_diagnostic),
_ => self.expected_ident_found(),
})
}
@@ -1144,7 +1196,9 @@ impl<'a> Parser<'a> {
fn parse_field_name(&mut self) -> PResult<'a, Ident> {
if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token.kind
{
- self.expect_no_suffix(self.token.span, "a tuple index", suffix);
+ if let Some(suffix) = suffix {
+ self.expect_no_tuple_index_suffix(self.token.span, suffix);
+ }
self.bump();
Ok(Ident::new(symbol, self.prev_token.span))
} else {
@@ -1342,23 +1396,8 @@ impl<'a> Parser<'a> {
let path = self.parse_path(PathStyle::Mod)?;
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
- let msg = "incorrect visibility restriction";
- let suggestion = r##"some possible visibility restrictions are:
-`pub(crate)`: visible only on the current crate
-`pub(super)`: visible only in the current module's parent
-`pub(in path::to::module)`: visible only on the specified path"##;
-
let path_str = pprust::path_to_string(&path);
-
- struct_span_err!(self.sess.span_diagnostic, path.span, E0704, "{}", msg)
- .help(suggestion)
- .span_suggestion(
- path.span,
- &format!("make this visible only to module `{}` with `in`", path_str),
- format!("in {}", path_str),
- Applicability::MachineApplicable,
- )
- .emit();
+ self.sess.emit_err(IncorrectVisibilityRestriction { span: path.span, inner_str: path_str });
Ok(())
}
@@ -1384,14 +1423,7 @@ impl<'a> Parser<'a> {
Err(Some(lit)) => match lit.kind {
ast::LitKind::Err => None,
_ => {
- self.struct_span_err(lit.span, "non-string ABI literal")
- .span_suggestion(
- lit.span,
- "specify the ABI with a string literal",
- "\"C\"",
- Applicability::MaybeIncorrect,
- )
- .emit();
+ self.sess.emit_err(NonStringAbiLiteral { span: lit.span });
None
}
},
@@ -1432,25 +1464,18 @@ pub(crate) fn make_unclosed_delims_error(
// `None` here means an `Eof` was found. We already emit those errors elsewhere, we add them to
// `unmatched_braces` only for error recovery in the `Parser`.
let found_delim = unmatched.found_delim?;
- let span: MultiSpan = if let Some(sp) = unmatched.unclosed_span {
- vec![unmatched.found_span, sp].into()
- } else {
- unmatched.found_span.into()
- };
- let mut err = sess.span_diagnostic.struct_span_err(
- span,
- &format!(
- "mismatched closing delimiter: `{}`",
- pprust::token_kind_to_string(&token::CloseDelim(found_delim)),
- ),
- );
- err.span_label(unmatched.found_span, "mismatched closing delimiter");
- if let Some(sp) = unmatched.candidate_span {
- err.span_label(sp, "closing delimiter possibly meant for this");
- }
+ let mut spans = vec![unmatched.found_span];
if let Some(sp) = unmatched.unclosed_span {
- err.span_label(sp, "unclosed delimiter");
- }
+ spans.push(sp);
+ };
+ let err = MismatchedClosingDelimiter {
+ spans,
+ delimiter: pprust::token_kind_to_string(&token::CloseDelim(found_delim)).to_string(),
+ unmatched: unmatched.found_span,
+ opening_candidate: unmatched.candidate_span,
+ unclosed: unmatched.unclosed_span,
+ }
+ .into_diagnostic(&sess.span_diagnostic);
Some(err)
}
diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs
index 120a3c267..52c11b4e3 100644
--- a/compiler/rustc_parse/src/parser/pat.rs
+++ b/compiler/rustc_parse/src/parser/pat.rs
@@ -1,5 +1,5 @@
use super::{ForceCollect, Parser, PathStyle, TrailingToken};
-use crate::parser::diagnostics::RemoveLet;
+use crate::errors::RemoveLet;
use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
use rustc_ast::mut_visit::{noop_visit_pat, MutVisitor};
use rustc_ast::ptr::P;
@@ -10,6 +10,7 @@ use rustc_ast::{
};
use rustc_ast_pretty::pprust;
use rustc_errors::{struct_span_err, Applicability, DiagnosticBuilder, ErrorGuaranteed, PResult};
+use rustc_session::errors::ExprParenthesesNeeded;
use rustc_span::source_map::{respan, Span, Spanned};
use rustc_span::symbol::{kw, sym, Ident};
@@ -401,6 +402,25 @@ impl<'a> Parser<'a> {
} else {
PatKind::Path(qself, path)
}
+ } else if matches!(self.token.kind, token::Lifetime(_))
+ // In pattern position, we're totally fine with using "next token isn't colon"
+ // as a heuristic. We could probably just always try to recover if it's a lifetime,
+ // because we never have `'a: label {}` in a pattern position anyways, but it does
+ // keep us from suggesting something like `let 'a: Ty = ..` => `let 'a': Ty = ..`
+ && !self.look_ahead(1, |token| matches!(token.kind, token::Colon))
+ {
+ // Recover a `'a` as a `'a'` literal
+ let lt = self.expect_lifetime();
+ let lit = self.recover_unclosed_char(lt.ident, |self_| {
+ let expected = expected.unwrap_or("pattern");
+ let msg =
+ format!("expected {}, found {}", expected, super::token_descr(&self_.token));
+
+ let mut err = self_.struct_span_err(self_.token.span, &msg);
+ err.span_label(self_.token.span, format!("expected {}", expected));
+ err
+ });
+ PatKind::Lit(self.mk_expr(lo, ExprKind::Lit(lit)))
} else {
// Try to parse everything else as literal with optional minus
match self.parse_literal_maybe_minus() {
@@ -693,7 +713,7 @@ impl<'a> Parser<'a> {
let sp = self.sess.source_map().start_point(self.token.span);
if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) {
- self.sess.expr_parentheses_needed(&mut err, *sp);
+ err.subdiagnostic(ExprParenthesesNeeded::surrounding(*sp));
}
Err(err)
@@ -776,7 +796,6 @@ impl<'a> Parser<'a> {
/// expression syntax `...expr` for splatting in expressions.
fn parse_pat_range_to(&mut self, mut re: Spanned<RangeEnd>) -> PResult<'a, PatKind> {
let end = self.parse_pat_range_end()?;
- self.sess.gated_spans.gate(sym::half_open_range_patterns, re.span.to(self.prev_token.span));
if let RangeEnd::Included(ref mut syn @ RangeSyntax::DotDotDot) = &mut re.node {
*syn = RangeSyntax::DotDotEq;
self.struct_span_err(re.span, "range-to patterns with `...` are not allowed")
@@ -799,6 +818,7 @@ impl<'a> Parser<'a> {
|| t.kind == token::Dot // e.g. `.5` for recovery;
|| t.can_begin_literal_maybe_minus() // e.g. `42`.
|| t.is_whole_expr()
+ || t.is_lifetime() // recover `'a` instead of `'a'`
})
}
diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs
index 3d957406b..12753c678 100644
--- a/compiler/rustc_parse/src/parser/stmt.rs
+++ b/compiler/rustc_parse/src/parser/stmt.rs
@@ -1,7 +1,5 @@
-use super::attr::DEFAULT_INNER_ATTR_FORBIDDEN;
-use super::diagnostics::{
- AttemptLocalParseRecovery, Error, InvalidVariableDeclaration, InvalidVariableDeclarationSub,
-};
+use super::attr::InnerAttrForbiddenReason;
+use super::diagnostics::AttemptLocalParseRecovery;
use super::expr::LhsExpr;
use super::pat::RecoverComma;
use super::path::PathStyle;
@@ -9,6 +7,12 @@ use super::TrailingToken;
use super::{
AttrWrapper, BlockMode, FnParseMode, ForceCollect, Parser, Restrictions, SemiColonMode,
};
+use crate::errors::{
+ AssignmentElseNotAllowed, CompoundAssignmentExpressionInLet, ConstLetMutuallyExclusive,
+ DocCommentDoesNotDocumentAnything, ExpectedStatementAfterOuterAttr, InvalidCurlyInLetElse,
+ InvalidExpressionInLetElse, InvalidVariableDeclaration, InvalidVariableDeclarationSub,
+ WrapExpressionInParentheses,
+};
use crate::maybe_whole;
use rustc_ast as ast;
@@ -112,11 +116,7 @@ impl<'a> Parser<'a> {
let bl = self.parse_block()?;
// Destructuring assignment ... else.
// This is not allowed, but point it out in a nice way.
- let mut err = self.struct_span_err(
- e.span.to(bl.span),
- "<assignment> ... else { ... } is not allowed",
- );
- err.emit();
+ self.sess.emit_err(AssignmentElseNotAllowed { span: e.span.to(bl.span) });
}
self.mk_stmt(lo.to(e.span), StmtKind::Expr(e))
} else {
@@ -202,9 +202,12 @@ impl<'a> Parser<'a> {
fn error_outer_attrs(&self, attrs: &[Attribute]) {
if let [.., last] = attrs {
if last.is_doc_comment() {
- self.span_err(last.span, Error::UselessDocComment).emit();
+ self.sess.emit_err(DocCommentDoesNotDocumentAnything {
+ span: last.span,
+ missing_comma: None,
+ });
} else if attrs.iter().any(|a| a.style == AttrStyle::Outer) {
- self.struct_span_err(last.span, "expected statement after outer attribute").emit();
+ self.sess.emit_err(ExpectedStatementAfterOuterAttr { span: last.span });
}
}
}
@@ -255,17 +258,7 @@ impl<'a> Parser<'a> {
let lo = self.prev_token.span;
if self.token.is_keyword(kw::Const) && self.look_ahead(1, |t| t.is_ident()) {
- self.struct_span_err(
- lo.to(self.token.span),
- "`const` and `let` are mutually exclusive",
- )
- .span_suggestion(
- lo.to(self.token.span),
- "remove `let`",
- "const",
- Applicability::MaybeIncorrect,
- )
- .emit();
+ self.sess.emit_err(ConstLetMutuallyExclusive { span: lo.to(self.token.span) });
self.bump();
}
@@ -363,44 +356,27 @@ impl<'a> Parser<'a> {
fn check_let_else_init_bool_expr(&self, init: &ast::Expr) {
if let ast::ExprKind::Binary(op, ..) = init.kind {
if op.node.lazy() {
- let suggs = vec![
- (init.span.shrink_to_lo(), "(".to_string()),
- (init.span.shrink_to_hi(), ")".to_string()),
- ];
- self.struct_span_err(
- init.span,
- &format!(
- "a `{}` expression cannot be directly assigned in `let...else`",
- op.node.to_string()
- ),
- )
- .multipart_suggestion(
- "wrap the expression in parentheses",
- suggs,
- Applicability::MachineApplicable,
- )
- .emit();
+ self.sess.emit_err(InvalidExpressionInLetElse {
+ span: init.span,
+ operator: op.node.to_string(),
+ sugg: WrapExpressionInParentheses {
+ left: init.span.shrink_to_lo(),
+ right: init.span.shrink_to_hi(),
+ },
+ });
}
}
}
fn check_let_else_init_trailing_brace(&self, init: &ast::Expr) {
if let Some(trailing) = classify::expr_trailing_brace(init) {
- let err_span = trailing.span.with_lo(trailing.span.hi() - BytePos(1));
- let suggs = vec![
- (trailing.span.shrink_to_lo(), "(".to_string()),
- (trailing.span.shrink_to_hi(), ")".to_string()),
- ];
- self.struct_span_err(
- err_span,
- "right curly brace `}` before `else` in a `let...else` statement not allowed",
- )
- .multipart_suggestion(
- "try wrapping the expression in parentheses",
- suggs,
- Applicability::MachineApplicable,
- )
- .emit();
+ self.sess.emit_err(InvalidCurlyInLetElse {
+ span: trailing.span.with_lo(trailing.span.hi() - BytePos(1)),
+ sugg: WrapExpressionInParentheses {
+ left: trailing.span.shrink_to_lo(),
+ right: trailing.span.shrink_to_hi(),
+ },
+ });
}
}
@@ -409,18 +385,7 @@ impl<'a> Parser<'a> {
let eq_consumed = match self.token.kind {
token::BinOpEq(..) => {
// Recover `let x <op>= 1` as `let x = 1`
- self.struct_span_err(
- self.token.span,
- "can't reassign to an uninitialized variable",
- )
- .span_suggestion_short(
- self.token.span,
- "initialize the variable",
- "=",
- Applicability::MaybeIncorrect,
- )
- .help("if you meant to overwrite, remove the `let` binding")
- .emit();
+ self.sess.emit_err(CompoundAssignmentExpressionInLet { span: self.token.span });
self.bump();
true
}
@@ -434,7 +399,12 @@ impl<'a> Parser<'a> {
pub(super) fn parse_block(&mut self) -> PResult<'a, P<Block>> {
let (attrs, block) = self.parse_inner_attrs_and_block()?;
if let [.., last] = &*attrs {
- self.error_on_forbidden_inner_attr(last.span, DEFAULT_INNER_ATTR_FORBIDDEN);
+ self.error_on_forbidden_inner_attr(
+ last.span,
+ super::attr::InnerAttrPolicy::Forbidden(Some(
+ InnerAttrForbiddenReason::InCodeBlock,
+ )),
+ );
}
Ok(block)
}
@@ -583,39 +553,46 @@ impl<'a> Parser<'a> {
match stmt.kind {
// Expression without semicolon.
StmtKind::Expr(ref mut expr)
- if self.token != token::Eof && classify::expr_requires_semi_to_be_stmt(expr) =>
- {
+ if self.token != token::Eof && classify::expr_requires_semi_to_be_stmt(expr) => {
// Just check for errors and recover; do not eat semicolon yet.
- if let Err(mut e) =
- self.expect_one_of(&[], &[token::Semi, token::CloseDelim(Delimiter::Brace)])
- {
- if let TokenKind::DocComment(..) = self.token.kind {
- if let Ok(snippet) = self.span_to_snippet(self.token.span) {
- let sp = self.token.span;
- let marker = &snippet[..3];
- let (comment_marker, doc_comment_marker) = marker.split_at(2);
-
- e.span_suggestion(
- sp.with_hi(sp.lo() + BytePos(marker.len() as u32)),
- &format!(
- "add a space before `{}` to use a regular comment",
- doc_comment_marker,
- ),
- format!("{} {}", comment_marker, doc_comment_marker),
- Applicability::MaybeIncorrect,
- );
+ // `expect_one_of` returns PResult<'a, bool /* recovered */>
+ let replace_with_err =
+ match self.expect_one_of(&[], &[token::Semi, token::CloseDelim(Delimiter::Brace)]) {
+ // Recover from parser, skip type error to avoid extra errors.
+ Ok(true) => true,
+ Err(mut e) => {
+ if let TokenKind::DocComment(..) = self.token.kind &&
+ let Ok(snippet) = self.span_to_snippet(self.token.span) {
+ let sp = self.token.span;
+ let marker = &snippet[..3];
+ let (comment_marker, doc_comment_marker) = marker.split_at(2);
+
+ e.span_suggestion(
+ sp.with_hi(sp.lo() + BytePos(marker.len() as u32)),
+ &format!(
+ "add a space before `{}` to use a regular comment",
+ doc_comment_marker,
+ ),
+ format!("{} {}", comment_marker, doc_comment_marker),
+ Applicability::MaybeIncorrect,
+ );
}
- }
- if let Err(mut e) =
- self.check_mistyped_turbofish_with_multiple_type_params(e, expr)
- {
- if recover.no() {
- return Err(e);
+
+ if let Err(mut e) =
+ self.check_mistyped_turbofish_with_multiple_type_params(e, expr)
+ {
+ if recover.no() {
+ return Err(e);
+ }
+ e.emit();
+ self.recover_stmt();
}
- e.emit();
- self.recover_stmt();
+ true
}
- // Don't complain about type errors in body tail after parse error (#57383).
+ _ => false
+ };
+ if replace_with_err {
+ // We already emitted an error, so don't emit another type error
let sp = expr.span.to(self.prev_token.span);
*expr = self.mk_expr_err(sp);
}
diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs
index b47f0c097..2a8512acf 100644
--- a/compiler/rustc_parse/src/parser/ty.rs
+++ b/compiler/rustc_parse/src/parser/ty.rs
@@ -397,10 +397,13 @@ impl<'a> Parser<'a> {
fn parse_ty_ptr(&mut self) -> PResult<'a, TyKind> {
let mutbl = self.parse_const_or_mut().unwrap_or_else(|| {
let span = self.prev_token.span;
- let msg = "expected mut or const in raw pointer type";
- self.struct_span_err(span, msg)
- .span_label(span, msg)
- .help("use `*mut T` or `*const T` as appropriate")
+ self.struct_span_err(span, "expected `mut` or `const` keyword in raw pointer type")
+ .span_suggestions(
+ span.shrink_to_hi(),
+ "add `mut` or `const` here",
+ ["mut ".to_string(), "const ".to_string()].into_iter(),
+ Applicability::HasPlaceholders,
+ )
.emit();
Mutability::Not
});