summaryrefslogtreecommitdiffstats
path: root/compiler/rustc_parse
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_parse')
-rw-r--r--compiler/rustc_parse/Cargo.toml2
-rw-r--r--compiler/rustc_parse/locales/en-US.ftl734
-rw-r--r--compiler/rustc_parse/src/errors.rs1136
-rw-r--r--compiler/rustc_parse/src/lexer/diagnostics.rs119
-rw-r--r--compiler/rustc_parse/src/lexer/mod.rs189
-rw-r--r--compiler/rustc_parse/src/lexer/tokentrees.rs150
-rw-r--r--compiler/rustc_parse/src/lexer/unescape_error_reporting.rs248
-rw-r--r--compiler/rustc_parse/src/lexer/unicode_chars.rs60
-rw-r--r--compiler/rustc_parse/src/lib.rs36
-rw-r--r--compiler/rustc_parse/src/parser/attr.rs26
-rw-r--r--compiler/rustc_parse/src/parser/attr_wrapper.rs6
-rw-r--r--compiler/rustc_parse/src/parser/diagnostics.rs173
-rw-r--r--compiler/rustc_parse/src/parser/expr.rs543
-rw-r--r--compiler/rustc_parse/src/parser/generics.rs61
-rw-r--r--compiler/rustc_parse/src/parser/item.rs429
-rw-r--r--compiler/rustc_parse/src/parser/mod.rs203
-rw-r--r--compiler/rustc_parse/src/parser/nonterminal.rs22
-rw-r--r--compiler/rustc_parse/src/parser/pat.rs382
-rw-r--r--compiler/rustc_parse/src/parser/path.rs58
-rw-r--r--compiler/rustc_parse/src/parser/stmt.rs96
-rw-r--r--compiler/rustc_parse/src/parser/ty.rs182
21 files changed, 3288 insertions, 1567 deletions
diff --git a/compiler/rustc_parse/Cargo.toml b/compiler/rustc_parse/Cargo.toml
index dbcfb3903..3eb158c81 100644
--- a/compiler/rustc_parse/Cargo.toml
+++ b/compiler/rustc_parse/Cargo.toml
@@ -16,7 +16,7 @@ rustc_lexer = { path = "../rustc_lexer" }
rustc_macros = { path = "../rustc_macros" }
rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }
-thin-vec = "0.2.8"
+thin-vec = "0.2.12"
tracing = "0.1"
unicode-normalization = "0.1.11"
unicode-width = "0.1.4"
diff --git a/compiler/rustc_parse/locales/en-US.ftl b/compiler/rustc_parse/locales/en-US.ftl
new file mode 100644
index 000000000..e76e91fc1
--- /dev/null
+++ b/compiler/rustc_parse/locales/en-US.ftl
@@ -0,0 +1,734 @@
+parse_struct_literal_body_without_path =
+ struct literal body without path
+ .suggestion = you might have forgotten to add the struct literal inside the block
+
+parse_struct_literal_needing_parens =
+ invalid struct literal
+ .suggestion = you might need to surround the struct literal in parentheses
+
+parse_maybe_report_ambiguous_plus =
+ ambiguous `+` in a type
+ .suggestion = use parentheses to disambiguate
+
+parse_maybe_recover_from_bad_type_plus =
+ expected a path on the left-hand side of `+`, not `{$ty}`
+
+parse_add_paren = try adding parentheses
+
+parse_forgot_paren = perhaps you forgot parentheses?
+
+parse_expect_path = expected a path
+
+parse_maybe_recover_from_bad_qpath_stage_2 =
+ missing angle brackets in associated item path
+ .suggestion = try: `{$ty}`
+
+parse_incorrect_semicolon =
+ expected item, found `;`
+ .suggestion = remove this semicolon
+ .help = {$name} declarations are not followed by a semicolon
+
+parse_incorrect_use_of_await =
+ incorrect use of `await`
+ .parentheses_suggestion = `await` is not a method call, remove the parentheses
+ .postfix_suggestion = `await` is a postfix operation
+
+parse_in_in_typo =
+ expected iterable, found keyword `in`
+ .suggestion = remove the duplicated `in`
+
+parse_invalid_variable_declaration =
+ invalid variable declaration
+
+parse_switch_mut_let_order =
+ switch the order of `mut` and `let`
+parse_missing_let_before_mut = missing keyword
+parse_use_let_not_auto = write `let` instead of `auto` to introduce a new variable
+parse_use_let_not_var = write `let` instead of `var` to introduce a new variable
+
+parse_invalid_comparison_operator = invalid comparison operator `{$invalid}`
+ .use_instead = `{$invalid}` is not a valid comparison operator, use `{$correct}`
+ .spaceship_operator_invalid = `<=>` is not a valid comparison operator, use `std::cmp::Ordering`
+
+parse_invalid_logical_operator = `{$incorrect}` is not a logical operator
+ .note = unlike in e.g., Python and PHP, `&&` and `||` are used for logical operators
+ .use_amp_amp_for_conjunction = use `&&` to perform logical conjunction
+ .use_pipe_pipe_for_disjunction = use `||` to perform logical disjunction
+
+parse_tilde_is_not_unary_operator = `~` cannot be used as a unary operator
+ .suggestion = use `!` to perform bitwise not
+
+parse_unexpected_if_with_if = unexpected `if` in the condition expression
+ .suggestion = remove the `if`
+
+parse_unexpected_token_after_not = unexpected {$negated_desc} after identifier
+parse_unexpected_token_after_not_bitwise = use `!` to perform bitwise not
+parse_unexpected_token_after_not_logical = use `!` to perform logical negation
+parse_unexpected_token_after_not_default = use `!` to perform logical negation or bitwise not
+
+parse_malformed_loop_label = malformed loop label
+ .suggestion = use the correct loop label format
+
+parse_lifetime_in_borrow_expression = borrow expressions cannot be annotated with lifetimes
+ .suggestion = remove the lifetime annotation
+ .label = annotated with lifetime here
+
+parse_field_expression_with_generic = field expressions cannot have generic arguments
+
+parse_macro_invocation_with_qualified_path = macros cannot use qualified paths
+
+parse_unexpected_token_after_label = expected `while`, `for`, `loop` or `{"{"}` after a label
+ .suggestion_remove_label = consider removing the label
+ .suggestion_enclose_in_block = consider enclosing expression in a block
+
+parse_require_colon_after_labeled_expression = labeled expression must be followed by `:`
+ .note = labels are used before loops and blocks, allowing e.g., `break 'label` to them
+ .label = the label
+ .suggestion = add `:` after the label
+
+parse_do_catch_syntax_removed = found removed `do catch` syntax
+ .note = following RFC #2388, the new non-placeholder syntax is `try`
+ .suggestion = replace with the new syntax
+
+parse_float_literal_requires_integer_part = float literals must have an integer part
+ .suggestion = must have an integer part
+
+parse_invalid_int_literal_width = invalid width `{$width}` for integer literal
+ .help = valid widths are 8, 16, 32, 64 and 128
+
+parse_invalid_num_literal_base_prefix = invalid base prefix for number literal
+ .note = base prefixes (`0xff`, `0b1010`, `0o755`) are lowercase
+ .suggestion = try making the prefix lowercase
+
+parse_invalid_num_literal_suffix = invalid suffix `{$suffix}` for number literal
+ .label = invalid suffix `{$suffix}`
+ .help = the suffix must be one of the numeric types (`u32`, `isize`, `f32`, etc.)
+
+parse_invalid_float_literal_width = invalid width `{$width}` for float literal
+ .help = valid widths are 32 and 64
+
+parse_invalid_float_literal_suffix = invalid suffix `{$suffix}` for float literal
+ .label = invalid suffix `{$suffix}`
+ .help = valid suffixes are `f32` and `f64`
+
+parse_int_literal_too_large = integer literal is too large
+
+parse_missing_semicolon_before_array = expected `;`, found `[`
+ .suggestion = consider adding `;` here
+
+parse_invalid_block_macro_segment = cannot use a `block` macro fragment here
+ .label = the `block` fragment is within this context
+
+parse_expect_dotdot_not_dotdotdot = expected `..`, found `...`
+ .suggestion = use `..` to fill in the rest of the fields
+
+parse_if_expression_missing_then_block = this `if` expression is missing a block after the condition
+ .add_then_block = add a block here
+ .condition_possibly_unfinished = this binary operation is possibly unfinished
+
+parse_if_expression_missing_condition = missing condition for `if` expression
+ .condition_label = expected condition here
+ .block_label = if this block is the condition of the `if` expression, then it must be followed by another block
+
+parse_expected_expression_found_let = expected expression, found `let` statement
+
+parse_expect_eq_instead_of_eqeq = expected `=`, found `==`
+ .suggestion = consider using `=` here
+
+parse_expected_else_block = expected `{"{"}`, found {$first_tok}
+ .label = expected an `if` or a block after this `else`
+ .suggestion = add an `if` if this is the condition of a chained `else if` statement
+
+parse_outer_attribute_not_allowed_on_if_else = outer attributes are not allowed on `if` and `else` branches
+ .branch_label = the attributes are attached to this branch
+ .ctx_label = the branch belongs to this `{$ctx}`
+ .suggestion = remove the attributes
+
+parse_missing_in_in_for_loop = missing `in` in `for` loop
+ .use_in_not_of = try using `in` here instead
+ .add_in = try adding `in` here
+
+parse_missing_expression_in_for_loop = missing expression to iterate on in `for` loop
+ .suggestion = try adding an expression to the `for` loop
+
+parse_loop_else = `{$loop_kind}...else` loops are not supported
+ .note = consider moving this `else` clause to a separate `if` statement and use a `bool` variable to control if it should run
+ .loop_keyword = `else` is attached to this loop
+
+parse_missing_comma_after_match_arm = expected `,` following `match` arm
+ .suggestion = missing a comma here to end this `match` arm
+
+parse_catch_after_try = keyword `catch` cannot follow a `try` block
+ .help = try using `match` on the result of the `try` block instead
+
+parse_comma_after_base_struct = cannot use a comma after the base struct
+ .note = the base struct must always be the last field
+ .suggestion = remove this comma
+
+parse_eq_field_init = expected `:`, found `=`
+ .suggestion = replace equals symbol with a colon
+
+parse_dotdotdot = unexpected token: `...`
+ .suggest_exclusive_range = use `..` for an exclusive range
+ .suggest_inclusive_range = or `..=` for an inclusive range
+
+parse_left_arrow_operator = unexpected token: `<-`
+ .suggestion = if you meant to write a comparison against a negative value, add a space in between `<` and `-`
+
+parse_remove_let = expected pattern, found `let`
+ .suggestion = remove the unnecessary `let` keyword
+
+parse_use_eq_instead = unexpected `==`
+ .suggestion = try using `=` instead
+
+parse_use_empty_block_not_semi = expected { "`{}`" }, found `;`
+ .suggestion = try using { "`{}`" } instead
+
+parse_comparison_interpreted_as_generic =
+ `<` is interpreted as a start of generic arguments for `{$type}`, not a comparison
+ .label_args = interpreted as generic arguments
+ .label_comparison = not interpreted as comparison
+ .suggestion = try comparing the cast value
+
+parse_shift_interpreted_as_generic =
+ `<<` is interpreted as a start of generic arguments for `{$type}`, not a shift
+ .label_args = interpreted as generic arguments
+ .label_comparison = not interpreted as shift
+ .suggestion = try shifting the cast value
+
+parse_found_expr_would_be_stmt = expected expression, found `{$token}`
+ .label = expected expression
+
+parse_leading_plus_not_supported = leading `+` is not supported
+ .label = unexpected `+`
+ .suggestion_remove_plus = try removing the `+`
+
+parse_parentheses_with_struct_fields = invalid `struct` delimiters or `fn` call arguments
+ .suggestion_braces_for_struct = if `{$type}` is a struct, use braces as delimiters
+ .suggestion_no_fields_for_fn = if `{$type}` is a function, use the arguments directly
+
+parse_labeled_loop_in_break = parentheses are required around this expression to avoid confusion with a labeled break expression
+
+parse_sugg_wrap_expression_in_parentheses = wrap the expression in parentheses
+
+parse_array_brackets_instead_of_braces = this is a block expression, not an array
+ .suggestion = to make an array, use square brackets instead of curly braces
+
+parse_match_arm_body_without_braces = `match` arm body without braces
+ .label_statements = {$num_statements ->
+ [one] this statement is not surrounded by a body
+ *[other] these statements are not surrounded by a body
+ }
+ .label_arrow = while parsing the `match` arm starting here
+ .suggestion_add_braces = surround the {$num_statements ->
+ [one] statement
+ *[other] statements
+ } with a body
+ .suggestion_use_comma_not_semicolon = replace `;` with `,` to end a `match` arm expression
+
+parse_inclusive_range_extra_equals = unexpected `=` after inclusive range
+ .suggestion_remove_eq = use `..=` instead
+ .note = inclusive ranges end with a single equals sign (`..=`)
+
+parse_inclusive_range_match_arrow = unexpected `>` after inclusive range
+ .label = this is parsed as an inclusive range `..=`
+ .suggestion = add a space between the pattern and `=>`
+
+parse_inclusive_range_no_end = inclusive range with no end
+ .suggestion_open_range = use `..` instead
+ .note = inclusive ranges must be bounded at the end (`..=b` or `a..=b`)
+
+parse_struct_literal_not_allowed_here = struct literals are not allowed here
+ .suggestion = surround the struct literal with parentheses
+
+parse_invalid_interpolated_expression = invalid interpolated expression
+
+parse_hexadecimal_float_literal_not_supported = hexadecimal float literal is not supported
+parse_octal_float_literal_not_supported = octal float literal is not supported
+parse_binary_float_literal_not_supported = binary float literal is not supported
+parse_not_supported = not supported
+
+parse_invalid_literal_suffix = suffixes on {$kind} literals are invalid
+ .label = invalid suffix `{$suffix}`
+
+parse_invalid_literal_suffix_on_tuple_index = suffixes on a tuple index are invalid
+ .label = invalid suffix `{$suffix}`
+ .tuple_exception_line_1 = `{$suffix}` is *temporarily* accepted on tuple index fields as it was incorrectly accepted on stable for a few releases
+ .tuple_exception_line_2 = on proc macros, you'll want to use `syn::Index::from` or `proc_macro::Literal::*_unsuffixed` for code that will desugar to tuple field access
+ .tuple_exception_line_3 = see issue #60210 <https://github.com/rust-lang/rust/issues/60210> for more information
+
+parse_non_string_abi_literal = non-string ABI literal
+ .suggestion = specify the ABI with a string literal
+
+parse_mismatched_closing_delimiter = mismatched closing delimiter: `{$delimiter}`
+ .label_unmatched = mismatched closing delimiter
+ .label_opening_candidate = closing delimiter possibly meant for this
+ .label_unclosed = unclosed delimiter
+
+parse_incorrect_visibility_restriction = incorrect visibility restriction
+ .help = some possible visibility restrictions are:
+ `pub(crate)`: visible only on the current crate
+ `pub(super)`: visible only in the current module's parent
+ `pub(in path::to::module)`: visible only on the specified path
+ .suggestion = make this visible only to module `{$inner_str}` with `in`
+
+parse_assignment_else_not_allowed = <assignment> ... else {"{"} ... {"}"} is not allowed
+
+parse_expected_statement_after_outer_attr = expected statement after outer attribute
+
+parse_doc_comment_does_not_document_anything = found a documentation comment that doesn't document anything
+ .help = doc comments must come before what they document, if a comment was intended use `//`
+ .suggestion = missing comma here
+
+parse_const_let_mutually_exclusive = `const` and `let` are mutually exclusive
+ .suggestion = remove `let`
+
+parse_invalid_expression_in_let_else = a `{$operator}` expression cannot be directly assigned in `let...else`
+parse_invalid_curly_in_let_else = right curly brace `{"}"}` before `else` in a `let...else` statement not allowed
+parse_extra_if_in_let_else = remove the `if` if you meant to write a `let...else` statement
+
+parse_compound_assignment_expression_in_let = can't reassign to an uninitialized variable
+ .suggestion = initialize the variable
+ .help = if you meant to overwrite, remove the `let` binding
+
+parse_suffixed_literal_in_attribute = suffixed literals are not allowed in attributes
+ .help = instead of using a suffixed literal (`1u8`, `1.0f32`, etc.), use an unsuffixed version (`1`, `1.0`, etc.)
+
+parse_invalid_meta_item = expected unsuffixed literal or identifier, found `{$token}`
+
+parse_label_inner_attr_does_not_annotate_this = the inner attribute doesn't annotate this {$item}
+parse_sugg_change_inner_attr_to_outer = to annotate the {$item}, change the attribute from inner to outer style
+
+parse_inner_attr_not_permitted_after_outer_doc_comment = an inner attribute is not permitted following an outer doc comment
+ .label_attr = not permitted following an outer doc comment
+ .label_prev_doc_comment = previous doc comment
+ .label_does_not_annotate_this = {parse_label_inner_attr_does_not_annotate_this}
+ .sugg_change_inner_to_outer = {parse_sugg_change_inner_attr_to_outer}
+
+parse_inner_attr_not_permitted_after_outer_attr = an inner attribute is not permitted following an outer attribute
+ .label_attr = not permitted following an outer attribute
+ .label_prev_attr = previous outer attribute
+ .label_does_not_annotate_this = {parse_label_inner_attr_does_not_annotate_this}
+ .sugg_change_inner_to_outer = {parse_sugg_change_inner_attr_to_outer}
+
+parse_inner_attr_not_permitted = an inner attribute is not permitted in this context
+ .label_does_not_annotate_this = {parse_label_inner_attr_does_not_annotate_this}
+ .sugg_change_inner_to_outer = {parse_sugg_change_inner_attr_to_outer}
+
+parse_inner_attr_explanation = inner attributes, like `#![no_std]`, annotate the item enclosing them, and are usually found at the beginning of source files
+parse_outer_attr_explanation = outer attributes, like `#[test]`, annotate the item following them
+
+parse_inner_doc_comment_not_permitted = expected outer doc comment
+ .note = inner doc comments like this (starting with `//!` or `/*!`) can only appear before items
+ .suggestion = you might have meant to write a regular comment
+ .label_does_not_annotate_this = the inner doc comment doesn't annotate this {$item}
+ .sugg_change_inner_to_outer = to annotate the {$item}, change the doc comment from inner to outer style
+
+parse_expected_identifier_found_reserved_identifier_str = expected identifier, found reserved identifier `{$token}`
+parse_expected_identifier_found_keyword_str = expected identifier, found keyword `{$token}`
+parse_expected_identifier_found_reserved_keyword_str = expected identifier, found reserved keyword `{$token}`
+parse_expected_identifier_found_doc_comment_str = expected identifier, found doc comment `{$token}`
+parse_expected_identifier_found_str = expected identifier, found `{$token}`
+
+parse_expected_identifier_found_reserved_identifier = expected identifier, found reserved identifier
+parse_expected_identifier_found_keyword = expected identifier, found keyword
+parse_expected_identifier_found_reserved_keyword = expected identifier, found reserved keyword
+parse_expected_identifier_found_doc_comment = expected identifier, found doc comment
+parse_expected_identifier = expected identifier
+
+parse_sugg_escape_to_use_as_identifier = escape `{$ident_name}` to use it as an identifier
+
+parse_sugg_remove_comma = remove this comma
+
+parse_expected_semi_found_reserved_identifier_str = expected `;`, found reserved identifier `{$token}`
+parse_expected_semi_found_keyword_str = expected `;`, found keyword `{$token}`
+parse_expected_semi_found_reserved_keyword_str = expected `;`, found reserved keyword `{$token}`
+parse_expected_semi_found_doc_comment_str = expected `;`, found doc comment `{$token}`
+parse_expected_semi_found_str = expected `;`, found `{$token}`
+
+parse_sugg_change_this_to_semi = change this to `;`
+parse_sugg_add_semi = add `;` here
+parse_label_unexpected_token = unexpected token
+
+parse_unmatched_angle_brackets = {$num_extra_brackets ->
+ [one] unmatched angle bracket
+ *[other] unmatched angle brackets
+ }
+ .suggestion = {$num_extra_brackets ->
+ [one] remove extra angle bracket
+ *[other] remove extra angle brackets
+ }
+
+parse_generic_parameters_without_angle_brackets = generic parameters without surrounding angle brackets
+ .suggestion = surround the type parameters with angle brackets
+
+parse_comparison_operators_cannot_be_chained = comparison operators cannot be chained
+ .sugg_parentheses_for_function_args = or use `(...)` if you meant to specify fn arguments
+ .sugg_split_comparison = split the comparison into two
+ .sugg_parenthesize = parenthesize the comparison
+parse_sugg_turbofish_syntax = use `::<...>` instead of `<...>` to specify lifetime, type, or const arguments
+
+parse_question_mark_in_type = invalid `?` in type
+ .label = `?` is only allowed on expressions, not types
+ .suggestion = if you meant to express that the type might not contain a value, use the `Option` wrapper type
+
+parse_unexpected_parentheses_in_for_head = unexpected parentheses surrounding `for` loop head
+ .suggestion = remove parentheses in `for` loop
+
+parse_doc_comment_on_param_type = documentation comments cannot be applied to a function parameter's type
+ .label = doc comments are not allowed here
+
+parse_attribute_on_param_type = attributes cannot be applied to a function parameter's type
+ .label = attributes are not allowed here
+
+parse_pattern_method_param_without_body = patterns aren't allowed in methods without bodies
+ .suggestion = give this argument a name or use an underscore to ignore it
+
+parse_self_param_not_first = unexpected `self` parameter in function
+ .label = must be the first parameter of an associated function
+
+parse_const_generic_without_braces = expressions must be enclosed in braces to be used as const generic arguments
+ .suggestion = enclose the `const` expression in braces
+
+parse_unexpected_const_param_declaration = unexpected `const` parameter declaration
+ .label = expected a `const` expression, not a parameter declaration
+ .suggestion = `const` parameters must be declared for the `impl`
+
+parse_unexpected_const_in_generic_param = expected lifetime, type, or constant, found keyword `const`
+ .suggestion = the `const` keyword is only needed in the definition of the type
+
+parse_async_move_order_incorrect = the order of `move` and `async` is incorrect
+ .suggestion = try switching the order
+
+parse_double_colon_in_bound = expected `:` followed by trait or lifetime
+ .suggestion = use single colon
+
+parse_fn_ptr_with_generics = function pointer types may not have generic parameters
+ .suggestion = consider moving the lifetime {$arity ->
+ [one] parameter
+ *[other] parameters
+ } to {$for_param_list_exists ->
+ [true] the
+ *[false] a
+ } `for` parameter list
+
+parse_invalid_identifier_with_leading_number = expected identifier, found number literal
+ .label = identifiers cannot start with a number
+
+parse_maybe_fn_typo_with_impl = you might have meant to write `impl` instead of `fn`
+ .suggestion = replace `fn` with `impl` here
+
+parse_expected_fn_path_found_fn_keyword = expected identifier, found keyword `fn`
+ .suggestion = use `Fn` to refer to the trait
+
+parse_where_clause_before_tuple_struct_body = where clauses are not allowed before tuple struct bodies
+ .label = unexpected where clause
+ .name_label = while parsing this tuple struct
+ .body_label = the struct body
+ .suggestion = move the body before the where clause
+
+parse_async_fn_in_2015 = `async fn` is not permitted in Rust 2015
+ .label = to use `async fn`, switch to Rust 2018 or later
+
+parse_async_block_in_2015 = `async` blocks are only allowed in Rust 2018 or later
+
+parse_self_argument_pointer = cannot pass `self` by raw pointer
+ .label = cannot pass `self` by raw pointer
+
+parse_visibility_not_followed_by_item = visibility `{$vis}` is not followed by an item
+ .label = the visibility
+ .help = you likely meant to define an item, e.g., `{$vis} fn foo() {"{}"}`
+
+parse_default_not_followed_by_item = `default` is not followed by an item
+ .label = the `default` qualifier
+ .note = only `fn`, `const`, `type`, or `impl` items may be prefixed by `default`
+
+parse_missing_struct_for_struct_definition = missing `struct` for struct definition
+ .suggestion = add `struct` here to parse `{$ident}` as a public struct
+
+parse_missing_fn_for_function_definition = missing `fn` for function definition
+ .suggestion = add `fn` here to parse `{$ident}` as a public function
+
+parse_missing_fn_for_method_definition = missing `fn` for method definition
+ .suggestion = add `fn` here to parse `{$ident}` as a public method
+
+parse_ambiguous_missing_keyword_for_item_definition = missing `fn` or `struct` for function or struct definition
+ .suggestion = if you meant to call a macro, try
+ .help = if you meant to call a macro, remove the `pub` and add a trailing `!` after the identifier
+
+parse_missing_trait_in_trait_impl = missing trait in a trait impl
+ .suggestion_add_trait = add a trait here
+ .suggestion_remove_for = for an inherent impl, drop this `for`
+
+parse_missing_for_in_trait_impl = missing `for` in a trait impl
+ .suggestion = add `for` here
+
+parse_expected_trait_in_trait_impl_found_type = expected a trait, found type
+
+parse_non_item_in_item_list = non-item in item list
+ .suggestion_use_const_not_let = consider using `const` instead of `let` for associated const
+ .label_list_start = item list starts here
+ .label_non_item = non-item starts here
+ .label_list_end = item list ends here
+ .suggestion_remove_semicolon = consider removing this semicolon
+
+parse_bounds_not_allowed_on_trait_aliases = bounds are not allowed on trait aliases
+
+parse_trait_alias_cannot_be_auto = trait aliases cannot be `auto`
+parse_trait_alias_cannot_be_unsafe = trait aliases cannot be `unsafe`
+
+parse_associated_static_item_not_allowed = associated `static` items are not allowed
+
+parse_extern_crate_name_with_dashes = crate name using dashes are not valid in `extern crate` statements
+ .label = dash-separated idents are not valid
+ .suggestion = if the original crate name uses dashes you need to use underscores in the code
+
+parse_extern_item_cannot_be_const = extern items cannot be `const`
+ .suggestion = try using a static value
+ .note = for more information, visit https://doc.rust-lang.org/std/keyword.extern.html
+
+parse_const_global_cannot_be_mutable = const globals cannot be mutable
+ .label = cannot be mutable
+ .suggestion = you might want to declare a static instead
+
+parse_missing_const_type = missing type for `{$kind}` item
+ .suggestion = provide a type for the item
+
+parse_enum_struct_mutually_exclusive = `enum` and `struct` are mutually exclusive
+ .suggestion = replace `enum struct` with
+
+parse_unexpected_token_after_struct_name = expected `where`, `{"{"}`, `(`, or `;` after struct name
+parse_unexpected_token_after_struct_name_found_reserved_identifier = expected `where`, `{"{"}`, `(`, or `;` after struct name, found reserved identifier `{$token}`
+parse_unexpected_token_after_struct_name_found_keyword = expected `where`, `{"{"}`, `(`, or `;` after struct name, found keyword `{$token}`
+parse_unexpected_token_after_struct_name_found_reserved_keyword = expected `where`, `{"{"}`, `(`, or `;` after struct name, found reserved keyword `{$token}`
+parse_unexpected_token_after_struct_name_found_doc_comment = expected `where`, `{"{"}`, `(`, or `;` after struct name, found doc comment `{$token}`
+parse_unexpected_token_after_struct_name_found_other = expected `where`, `{"{"}`, `(`, or `;` after struct name, found `{$token}`
+
+parse_unexpected_self_in_generic_parameters = unexpected keyword `Self` in generic parameters
+ .note = you cannot use `Self` as a generic parameter because it is reserved for associated items
+
+parse_unexpected_default_value_for_lifetime_in_generic_parameters = unexpected default lifetime parameter
+ .label = lifetime parameters cannot have default values
+
+parse_multiple_where_clauses = cannot define duplicate `where` clauses on an item
+ .label = previous `where` clause starts here
+ .suggestion = consider joining the two `where` clauses into one
+
+parse_nonterminal_expected_item_keyword = expected an item keyword
+parse_nonterminal_expected_statement = expected a statement
+parse_nonterminal_expected_ident = expected ident, found `{$token}`
+parse_nonterminal_expected_lifetime = expected a lifetime, found `{$token}`
+
+parse_or_pattern_not_allowed_in_let_binding = top-level or-patterns are not allowed in `let` bindings
+parse_or_pattern_not_allowed_in_fn_parameters = top-level or-patterns are not allowed in function parameters
+parse_sugg_remove_leading_vert_in_pattern = remove the `|`
+parse_sugg_wrap_pattern_in_parens = wrap the pattern in parentheses
+
+parse_note_pattern_alternatives_use_single_vert = alternatives in or-patterns are separated with `|`, not `||`
+
+parse_unexpected_vert_vert_before_function_parameter = unexpected `||` before function parameter
+ .suggestion = remove the `||`
+
+parse_label_while_parsing_or_pattern_here = while parsing this or-pattern starting here
+
+parse_unexpected_vert_vert_in_pattern = unexpected token `||` in pattern
+ .suggestion = use a single `|` to separate multiple alternative patterns
+
+parse_trailing_vert_not_allowed = a trailing `|` is not allowed in an or-pattern
+ .suggestion = remove the `{$token}`
+
+parse_dotdotdot_rest_pattern = unexpected `...`
+ .label = not a valid pattern
+ .suggestion = for a rest pattern, use `..` instead of `...`
+
+parse_pattern_on_wrong_side_of_at = pattern on wrong side of `@`
+ .label_pattern = pattern on the left, should be on the right
+ .label_binding = binding on the right, should be on the left
+ .suggestion = switch the order
+
+parse_expected_binding_left_of_at = left-hand side of `@` must be a binding
+ .label_lhs = interpreted as a pattern, not a binding
+ .label_rhs = also a pattern
+ .note = bindings are `x`, `mut x`, `ref x`, and `ref mut x`
+
+parse_ambiguous_range_pattern = the range pattern here has ambiguous interpretation
+ .suggestion = add parentheses to clarify the precedence
+
+parse_unexpected_lifetime_in_pattern = unexpected lifetime `{$symbol}` in pattern
+ .suggestion = remove the lifetime
+
+parse_ref_mut_order_incorrect = the order of `mut` and `ref` is incorrect
+ .suggestion = try switching the order
+
+parse_mut_on_nested_ident_pattern = `mut` must be attached to each individual binding
+ .suggestion = add `mut` to each binding
+parse_mut_on_non_ident_pattern = `mut` must be followed by a named binding
+ .suggestion = remove the `mut` prefix
+parse_note_mut_pattern_usage = `mut` may be followed by `variable` and `variable @ pattern`
+
+parse_repeated_mut_in_pattern = `mut` on a binding may not be repeated
+ .suggestion = remove the additional `mut`s
+
+parse_dot_dot_dot_range_to_pattern_not_allowed = range-to patterns with `...` are not allowed
+ .suggestion = use `..=` instead
+
+parse_enum_pattern_instead_of_identifier = expected identifier, found enum pattern
+
+parse_dot_dot_dot_for_remaining_fields = expected field pattern, found `{$token_str}`
+ .suggestion = to omit remaining fields, use `..`
+
+parse_expected_comma_after_pattern_field = expected `,`
+
+parse_return_types_use_thin_arrow = return types are denoted using `->`
+ .suggestion = use `->` instead
+
+parse_need_plus_after_trait_object_lifetime = lifetime in trait object type must be followed by `+`
+
+parse_expected_mut_or_const_in_raw_pointer_type = expected `mut` or `const` keyword in raw pointer type
+ .suggestion = add `mut` or `const` here
+
+parse_lifetime_after_mut = lifetime must precede `mut`
+ .suggestion = place the lifetime before `mut`
+
+parse_dyn_after_mut = `mut` must precede `dyn`
+ .suggestion = place `mut` before `dyn`
+
+parse_fn_pointer_cannot_be_const = an `fn` pointer type cannot be `const`
+ .label = `const` because of this
+ .suggestion = remove the `const` qualifier
+
+parse_fn_pointer_cannot_be_async = an `fn` pointer type cannot be `async`
+ .label = `async` because of this
+ .suggestion = remove the `async` qualifier
+
+parse_nested_c_variadic_type = C-variadic type `...` may not be nested inside another type
+
+parse_invalid_dyn_keyword = invalid `dyn` keyword
+ .help = `dyn` is only needed at the start of a trait `+`-separated list
+ .suggestion = remove this keyword
+
+parse_negative_bounds_not_supported = negative bounds are not supported
+ .label = negative bounds are not supported
+ .suggestion = {$num_bounds ->
+ [one] remove the bound
+ *[other] remove the bounds
+ }
+
+parse_help_set_edition_cargo = set `edition = "{$edition}"` in `Cargo.toml`
+parse_help_set_edition_standalone = pass `--edition {$edition}` to `rustc`
+parse_note_edition_guide = for more on editions, read https://doc.rust-lang.org/edition-guide
+
+parse_unexpected_token_after_dot = unexpected token: `{$actual}`
+
+parse_cannot_be_raw_ident = `{$ident}` cannot be a raw identifier
+
+parse_cr_doc_comment = bare CR not allowed in {$block ->
+ [true] block doc-comment
+ *[false] doc-comment
+}
+
+parse_no_digits_literal = no valid digits found for number
+
+parse_invalid_digit_literal = invalid digit for a base {$base} literal
+
+parse_empty_exponent_float = expected at least one digit in exponent
+
+parse_float_literal_unsupported_base = {$base} float literal is not supported
+
+parse_more_than_one_char = character literal may only contain one codepoint
+ .followed_by = this `{$chr}` is followed by the combining {$len ->
+ [one] mark
+ *[other] marks
+ } `{$escaped_marks}`
+ .non_printing = there are non-printing characters, the full sequence is `{$escaped}`
+ .consider_normalized = consider using the normalized form `{$ch}` of this character
+ .remove_non = consider removing the non-printing characters
+ .use_double_quotes = if you meant to write a {$is_byte ->
+ [true] byte string
+ *[false] `str`
+ } literal, use double quotes
+
+parse_no_brace_unicode_escape = incorrect unicode escape sequence
+ .label = {parse_no_brace_unicode_escape}
+ .use_braces = format of unicode escape sequences uses braces
+ .format_of_unicode = format of unicode escape sequences is `\u{"{...}"}`
+
+parse_invalid_unicode_escape = invalid unicode character escape
+ .label = invalid escape
+ .help = unicode escape must {$surrogate ->
+ [true] not be a surrogate
+ *[false] be at most 10FFFF
+ }
+
+parse_escape_only_char = {$byte ->
+ [true] byte
+ *[false] character
+ } constant must be escaped: `{$escaped_msg}`
+ .escape = escape the character
+
+parse_bare_cr = {$double_quotes ->
+ [true] bare CR not allowed in string, use `\r` instead
+ *[false] character constant must be escaped: `\r`
+ }
+ .escape = escape the character
+
+parse_bare_cr_in_raw_string = bare CR not allowed in raw string
+
+parse_too_short_hex_escape = numeric character escape is too short
+
+parse_invalid_char_in_escape = {parse_invalid_char_in_escape_msg}: `{$ch}`
+ .label = {parse_invalid_char_in_escape_msg}
+
+parse_invalid_char_in_escape_msg = invalid character in {$is_hex ->
+ [true] numeric character
+ *[false] unicode
+ } escape
+
+parse_out_of_range_hex_escape = out of range hex escape
+ .label = must be a character in the range [\x00-\x7f]
+
+parse_leading_underscore_unicode_escape = {parse_leading_underscore_unicode_escape_label}: `_`
+parse_leading_underscore_unicode_escape_label = invalid start of unicode escape
+
+parse_overlong_unicode_escape = overlong unicode escape
+ .label = must have at most 6 hex digits
+
+parse_unclosed_unicode_escape = unterminated unicode escape
+ .label = missing a closing `{"}"}`
+ .terminate = terminate the unicode escape
+
+parse_unicode_escape_in_byte = unicode escape in byte string
+ .label = {parse_unicode_escape_in_byte}
+ .help = unicode escape sequences cannot be used as a byte or in a byte string
+
+parse_empty_unicode_escape = empty unicode escape
+ .label = this escape must have at least 1 hex digit
+
+parse_zero_chars = empty character literal
+ .label = {parse_zero_chars}
+
+parse_lone_slash = invalid trailing slash in literal
+ .label = {parse_lone_slash}
+
+parse_unskipped_whitespace = non-ASCII whitespace symbol '{$ch}' is not skipped
+ .label = {parse_unskipped_whitespace}
+
+parse_multiple_skipped_lines = multiple lines skipped by escaped newline
+ .label = skipping everything up to and including this point
+
+parse_unknown_prefix = prefix `{$prefix}` is unknown
+ .label = unknown prefix
+ .note = prefixed identifiers and literals are reserved since Rust 2021
+ .suggestion_br = use `br` for a raw byte string
+ .suggestion_whitespace = consider inserting whitespace here
+
+parse_too_many_hashes = too many `#` symbols: raw strings may be delimited by up to 255 `#` symbols, but found {$num}
+
+parse_unknown_start_of_token = unknown start of token: {$escaped}
+ .sugg_quotes = Unicode characters '“' (Left Double Quotation Mark) and '”' (Right Double Quotation Mark) look like '{$ascii_str}' ({$ascii_name}), but are not
+ .sugg_other = Unicode character '{$ch}' ({$u_name}) looks like '{$ascii_str}' ({$ascii_name}), but it is not
+ .help_null = source files must contain UTF-8 encoded text, unexpected null bytes might occur when a different encoding is used
+ .note_repeats = character appears {$repeats ->
+ [one] once more
+ *[other] {$repeats} more times
+ }
diff --git a/compiler/rustc_parse/src/errors.rs b/compiler/rustc_parse/src/errors.rs
index 06b970ad9..1662db36d 100644
--- a/compiler/rustc_parse/src/errors.rs
+++ b/compiler/rustc_parse/src/errors.rs
@@ -1,11 +1,15 @@
+use std::borrow::Cow;
+
use rustc_ast::token::Token;
-use rustc_ast::Path;
-use rustc_errors::{fluent, AddToDiagnostic, Applicability, EmissionGuarantee, IntoDiagnostic};
+use rustc_ast::{Path, Visibility};
+use rustc_errors::{AddToDiagnostic, Applicability, EmissionGuarantee, IntoDiagnostic};
use rustc_macros::{Diagnostic, Subdiagnostic};
use rustc_session::errors::ExprParenthesesNeeded;
+use rustc_span::edition::{Edition, LATEST_STABLE_EDITION};
use rustc_span::symbol::Ident;
use rustc_span::{Span, Symbol};
+use crate::fluent_generated as fluent;
use crate::parser::TokenDescription;
#[derive(Diagnostic)]
@@ -75,7 +79,7 @@ pub(crate) struct IncorrectSemicolon<'a> {
#[diag(parse_incorrect_use_of_await)]
pub(crate) struct IncorrectUseOfAwait {
#[primary_span]
- #[suggestion(parentheses_suggestion, code = "", applicability = "machine-applicable")]
+ #[suggestion(parse_parentheses_suggestion, code = "", applicability = "machine-applicable")]
pub span: Span,
}
@@ -84,7 +88,7 @@ pub(crate) struct IncorrectUseOfAwait {
pub(crate) struct IncorrectAwait {
#[primary_span]
pub span: Span,
- #[suggestion(postfix_suggestion, code = "{expr}.await{question_mark}")]
+ #[suggestion(parse_postfix_suggestion, code = "{expr}.await{question_mark}")]
pub sugg_span: (Span, Applicability),
pub expr: String,
pub question_mark: &'static str,
@@ -137,7 +141,7 @@ pub(crate) struct InvalidComparisonOperator {
#[derive(Subdiagnostic)]
pub(crate) enum InvalidComparisonOperatorSub {
#[suggestion(
- use_instead,
+ parse_use_instead,
style = "short",
applicability = "machine-applicable",
code = "{correct}"
@@ -148,7 +152,7 @@ pub(crate) enum InvalidComparisonOperatorSub {
invalid: String,
correct: String,
},
- #[label(spaceship_operator_invalid)]
+ #[label(parse_spaceship_operator_invalid)]
Spaceship(#[primary_span] Span),
}
@@ -166,14 +170,14 @@ pub(crate) struct InvalidLogicalOperator {
#[derive(Subdiagnostic)]
pub(crate) enum InvalidLogicalOperatorSub {
#[suggestion(
- use_amp_amp_for_conjunction,
+ parse_use_amp_amp_for_conjunction,
style = "short",
applicability = "machine-applicable",
code = "&&"
)]
Conjunction(#[primary_span] Span),
#[suggestion(
- use_pipe_pipe_for_disjunction,
+ parse_use_pipe_pipe_for_disjunction,
style = "short",
applicability = "machine-applicable",
code = "||"
@@ -259,14 +263,14 @@ pub(crate) struct UnexpectedTokenAfterLabel {
#[primary_span]
#[label(parse_unexpected_token_after_label)]
pub span: Span,
- #[suggestion(suggestion_remove_label, style = "verbose", code = "")]
+ #[suggestion(parse_suggestion_remove_label, style = "verbose", code = "")]
pub remove_label: Option<Span>,
#[subdiagnostic]
pub enclose_in_block: Option<UnexpectedTokenAfterLabelSugg>,
}
#[derive(Subdiagnostic)]
-#[multipart_suggestion(suggestion_enclose_in_block, applicability = "machine-applicable")]
+#[multipart_suggestion(parse_suggestion_enclose_in_block, applicability = "machine-applicable")]
pub(crate) struct UnexpectedTokenAfterLabelSugg {
#[suggestion_part(code = "{{ ")]
pub left: Span,
@@ -337,24 +341,33 @@ pub(crate) struct IfExpressionMissingThenBlock {
#[primary_span]
pub if_span: Span,
#[subdiagnostic]
- pub sub: IfExpressionMissingThenBlockSub,
+ pub missing_then_block_sub: IfExpressionMissingThenBlockSub,
+ #[subdiagnostic]
+ pub let_else_sub: Option<IfExpressionLetSomeSub>,
}
#[derive(Subdiagnostic)]
pub(crate) enum IfExpressionMissingThenBlockSub {
- #[help(condition_possibly_unfinished)]
+ #[help(parse_condition_possibly_unfinished)]
UnfinishedCondition(#[primary_span] Span),
- #[help(add_then_block)]
+ #[help(parse_add_then_block)]
AddThenBlock(#[primary_span] Span),
}
+#[derive(Subdiagnostic)]
+#[suggestion(parse_extra_if_in_let_else, applicability = "maybe-incorrect", code = "")]
+pub(crate) struct IfExpressionLetSomeSub {
+ #[primary_span]
+ pub if_span: Span,
+}
+
#[derive(Diagnostic)]
#[diag(parse_if_expression_missing_condition)]
pub(crate) struct IfExpressionMissingCondition {
#[primary_span]
- #[label(condition_label)]
+ #[label(parse_condition_label)]
pub if_span: Span,
- #[label(block_label)]
+ #[label(parse_block_label)]
pub block_span: Span,
}
@@ -392,10 +405,10 @@ pub(crate) struct OuterAttributeNotAllowedOnIfElse {
#[primary_span]
pub last: Span,
- #[label(branch_label)]
+ #[label(parse_branch_label)]
pub branch_span: Span,
- #[label(ctx_label)]
+ #[label(parse_ctx_label)]
pub ctx_span: Span,
pub ctx: String,
@@ -415,13 +428,41 @@ pub(crate) struct MissingInInForLoop {
#[derive(Subdiagnostic)]
pub(crate) enum MissingInInForLoopSub {
// Has been misleading, at least in the past (closed Issue #48492), thus maybe-incorrect
- #[suggestion(use_in_not_of, style = "short", applicability = "maybe-incorrect", code = "in")]
+ #[suggestion(
+ parse_use_in_not_of,
+ style = "short",
+ applicability = "maybe-incorrect",
+ code = "in"
+ )]
InNotOf(#[primary_span] Span),
- #[suggestion(add_in, style = "short", applicability = "maybe-incorrect", code = " in ")]
+ #[suggestion(parse_add_in, style = "short", applicability = "maybe-incorrect", code = " in ")]
AddIn(#[primary_span] Span),
}
#[derive(Diagnostic)]
+#[diag(parse_missing_expression_in_for_loop)]
+pub(crate) struct MissingExpressionInForLoop {
+ #[primary_span]
+ #[suggestion(
+ code = "/* expression */ ",
+ applicability = "has-placeholders",
+ style = "verbose"
+ )]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_loop_else)]
+#[note]
+pub(crate) struct LoopElseNotSupported {
+ #[primary_span]
+ pub span: Span,
+ pub loop_kind: &'static str,
+ #[label(parse_loop_keyword)]
+ pub loop_kw: Span,
+}
+
+#[derive(Diagnostic)]
#[diag(parse_missing_comma_after_match_arm)]
pub(crate) struct MissingCommaAfterMatchArm {
#[primary_span]
@@ -460,8 +501,8 @@ pub(crate) struct EqFieldInit {
#[diag(parse_dotdotdot)]
pub(crate) struct DotDotDot {
#[primary_span]
- #[suggestion(suggest_exclusive_range, applicability = "maybe-incorrect", code = "..")]
- #[suggestion(suggest_inclusive_range, applicability = "maybe-incorrect", code = "..=")]
+ #[suggestion(parse_suggest_exclusive_range, applicability = "maybe-incorrect", code = "..")]
+ #[suggestion(parse_suggest_inclusive_range, applicability = "maybe-incorrect", code = "..=")]
pub span: Span,
}
@@ -501,10 +542,10 @@ pub(crate) struct UseEmptyBlockNotSemi {
#[diag(parse_comparison_interpreted_as_generic)]
pub(crate) struct ComparisonInterpretedAsGeneric {
#[primary_span]
- #[label(label_comparison)]
+ #[label(parse_label_comparison)]
pub comparison: Span,
pub r#type: Path,
- #[label(label_args)]
+ #[label(parse_label_args)]
pub args: Span,
#[subdiagnostic]
pub suggestion: ComparisonOrShiftInterpretedAsGenericSugg,
@@ -514,17 +555,17 @@ pub(crate) struct ComparisonInterpretedAsGeneric {
#[diag(parse_shift_interpreted_as_generic)]
pub(crate) struct ShiftInterpretedAsGeneric {
#[primary_span]
- #[label(label_comparison)]
+ #[label(parse_label_comparison)]
pub shift: Span,
pub r#type: Path,
- #[label(label_args)]
+ #[label(parse_label_args)]
pub args: Span,
#[subdiagnostic]
pub suggestion: ComparisonOrShiftInterpretedAsGenericSugg,
}
#[derive(Subdiagnostic)]
-#[multipart_suggestion(suggestion, applicability = "machine-applicable")]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
pub(crate) struct ComparisonOrShiftInterpretedAsGenericSugg {
#[suggestion_part(code = "(")]
pub left: Span,
@@ -550,7 +591,7 @@ pub(crate) struct LeadingPlusNotSupported {
#[label]
pub span: Span,
#[suggestion(
- suggestion_remove_plus,
+ parse_suggestion_remove_plus,
style = "verbose",
code = "",
applicability = "machine-applicable"
@@ -573,7 +614,7 @@ pub(crate) struct ParenthesesWithStructFields {
}
#[derive(Subdiagnostic)]
-#[multipart_suggestion(suggestion_braces_for_struct, applicability = "maybe-incorrect")]
+#[multipart_suggestion(parse_suggestion_braces_for_struct, applicability = "maybe-incorrect")]
pub(crate) struct BracesForStructLiteral {
#[suggestion_part(code = " {{ ")]
pub first: Span,
@@ -582,7 +623,7 @@ pub(crate) struct BracesForStructLiteral {
}
#[derive(Subdiagnostic)]
-#[multipart_suggestion(suggestion_no_fields_for_fn, applicability = "maybe-incorrect")]
+#[multipart_suggestion(parse_suggestion_no_fields_for_fn, applicability = "maybe-incorrect")]
pub(crate) struct NoFieldsForFnCall {
#[suggestion_part(code = "")]
pub fields: Vec<Span>,
@@ -619,7 +660,7 @@ pub(crate) struct ArrayBracketsInsteadOfSpaces {
}
#[derive(Subdiagnostic)]
-#[multipart_suggestion(suggestion, applicability = "maybe-incorrect")]
+#[multipart_suggestion(parse_suggestion, applicability = "maybe-incorrect")]
pub(crate) struct ArrayBracketsInsteadOfSpacesSugg {
#[suggestion_part(code = "[")]
pub left: Span,
@@ -631,18 +672,57 @@ pub(crate) struct ArrayBracketsInsteadOfSpacesSugg {
#[diag(parse_match_arm_body_without_braces)]
pub(crate) struct MatchArmBodyWithoutBraces {
#[primary_span]
- #[label(label_statements)]
+ #[label(parse_label_statements)]
pub statements: Span,
- #[label(label_arrow)]
+ #[label(parse_label_arrow)]
pub arrow: Span,
pub num_statements: usize,
#[subdiagnostic]
pub sub: MatchArmBodyWithoutBracesSugg,
}
+#[derive(Diagnostic)]
+#[diag(parse_inclusive_range_extra_equals)]
+#[note]
+pub(crate) struct InclusiveRangeExtraEquals {
+ #[primary_span]
+ #[suggestion(
+ parse_suggestion_remove_eq,
+ style = "short",
+ code = "..=",
+ applicability = "maybe-incorrect"
+ )]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_inclusive_range_match_arrow)]
+pub(crate) struct InclusiveRangeMatchArrow {
+ #[primary_span]
+ pub arrow: Span,
+ #[label]
+ pub span: Span,
+ #[suggestion(style = "verbose", code = " ", applicability = "machine-applicable")]
+ pub after_pat: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_inclusive_range_no_end, code = "E0586")]
+#[note]
+pub(crate) struct InclusiveRangeNoEnd {
+ #[primary_span]
+ #[suggestion(
+ parse_suggestion_open_range,
+ code = "..",
+ applicability = "machine-applicable",
+ style = "short"
+ )]
+ pub span: Span,
+}
+
#[derive(Subdiagnostic)]
pub(crate) enum MatchArmBodyWithoutBracesSugg {
- #[multipart_suggestion(suggestion_add_braces, applicability = "machine-applicable")]
+ #[multipart_suggestion(parse_suggestion_add_braces, applicability = "machine-applicable")]
AddBraces {
#[suggestion_part(code = "{{ ")]
left: Span,
@@ -650,7 +730,7 @@ pub(crate) enum MatchArmBodyWithoutBracesSugg {
right: Span,
},
#[suggestion(
- suggestion_use_comma_not_semicolon,
+ parse_suggestion_use_comma_not_semicolon,
code = ",",
applicability = "machine-applicable"
)]
@@ -670,7 +750,7 @@ pub(crate) struct StructLiteralNotAllowedHere {
}
#[derive(Subdiagnostic)]
-#[multipart_suggestion(suggestion, applicability = "machine-applicable")]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
pub(crate) struct StructLiteralNotAllowedHereSugg {
#[suggestion_part(code = "(")]
pub left: Span,
@@ -692,9 +772,9 @@ pub(crate) struct InvalidLiteralSuffixOnTupleIndex {
#[label]
pub span: Span,
pub suffix: Symbol,
- #[help(tuple_exception_line_1)]
- #[help(tuple_exception_line_2)]
- #[help(tuple_exception_line_3)]
+ #[help(parse_tuple_exception_line_1)]
+ #[help(parse_tuple_exception_line_2)]
+ #[help(parse_tuple_exception_line_3)]
pub exception: Option<()>,
}
@@ -712,11 +792,11 @@ pub(crate) struct MismatchedClosingDelimiter {
#[primary_span]
pub spans: Vec<Span>,
pub delimiter: String,
- #[label(label_unmatched)]
+ #[label(parse_label_unmatched)]
pub unmatched: Span,
- #[label(label_opening_candidate)]
+ #[label(parse_label_opening_candidate)]
pub opening_candidate: Option<Span>,
- #[label(label_unclosed)]
+ #[label(parse_label_unclosed)]
pub unclosed: Option<Span>,
}
@@ -867,7 +947,7 @@ impl<'a, G: EmissionGuarantee> IntoDiagnostic<'a, G> for ExpectedIdentifier {
self,
handler: &'a rustc_errors::Handler,
) -> rustc_errors::DiagnosticBuilder<'a, G> {
- let token_descr = super::parser::TokenDescription::from_token(&self.token);
+ let token_descr = TokenDescription::from_token(&self.token);
let mut diag = handler.struct_diagnostic(match token_descr {
Some(TokenDescription::ReservedIdentifier) => {
@@ -913,7 +993,7 @@ impl<'a, G: EmissionGuarantee> IntoDiagnostic<'a, G> for ExpectedSemi {
self,
handler: &'a rustc_errors::Handler,
) -> rustc_errors::DiagnosticBuilder<'a, G> {
- let token_descr = super::parser::TokenDescription::from_token(&self.token);
+ let token_descr = TokenDescription::from_token(&self.token);
let mut diag = handler.struct_diagnostic(match token_descr {
Some(TokenDescription::ReservedIdentifier) => {
@@ -962,7 +1042,7 @@ pub(crate) struct StructLiteralBodyWithoutPath {
}
#[derive(Subdiagnostic)]
-#[multipart_suggestion(suggestion, applicability = "has-placeholders")]
+#[multipart_suggestion(parse_suggestion, applicability = "has-placeholders")]
pub(crate) struct StructLiteralBodyWithoutPathSugg {
#[suggestion_part(code = "{{ SomeStruct ")]
pub before: Span,
@@ -980,7 +1060,7 @@ pub(crate) struct StructLiteralNeedingParens {
}
#[derive(Subdiagnostic)]
-#[multipart_suggestion(suggestion, applicability = "machine-applicable")]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
pub(crate) struct StructLiteralNeedingParensSugg {
#[suggestion_part(code = "(")]
pub before: Span,
@@ -1007,7 +1087,7 @@ pub(crate) struct GenericParamsWithoutAngleBrackets {
}
#[derive(Subdiagnostic)]
-#[multipart_suggestion(suggestion, applicability = "machine-applicable")]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
pub(crate) struct GenericParamsWithoutAngleBracketsSugg {
#[suggestion_part(code = "<")]
pub left: Span,
@@ -1028,7 +1108,7 @@ pub(crate) struct ComparisonOperatorsCannotBeChained {
)]
pub suggest_turbofish: Option<Span>,
#[help(parse_sugg_turbofish_syntax)]
- #[help(sugg_parentheses_for_function_args)]
+ #[help(parse_sugg_parentheses_for_function_args)]
pub help_turbofish: Option<()>,
#[subdiagnostic]
pub chaining_sugg: Option<ComparisonOperatorsCannotBeChainedSugg>,
@@ -1037,7 +1117,7 @@ pub(crate) struct ComparisonOperatorsCannotBeChained {
#[derive(Subdiagnostic)]
pub(crate) enum ComparisonOperatorsCannotBeChainedSugg {
#[suggestion(
- sugg_split_comparison,
+ parse_sugg_split_comparison,
style = "verbose",
code = " && {middle_term}",
applicability = "maybe-incorrect"
@@ -1047,7 +1127,7 @@ pub(crate) enum ComparisonOperatorsCannotBeChainedSugg {
span: Span,
middle_term: String,
},
- #[multipart_suggestion(sugg_parenthesize, applicability = "maybe-incorrect")]
+ #[multipart_suggestion(parse_sugg_parenthesize, applicability = "maybe-incorrect")]
Parenthesize {
#[suggestion_part(code = "(")]
left: Span,
@@ -1067,7 +1147,7 @@ pub(crate) struct QuestionMarkInType {
}
#[derive(Subdiagnostic)]
-#[multipart_suggestion(suggestion, applicability = "machine-applicable")]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
pub(crate) struct QuestionMarkInTypeSugg {
#[suggestion_part(code = "Option<")]
pub left: Span,
@@ -1085,7 +1165,7 @@ pub(crate) struct ParenthesesInForHead {
}
#[derive(Subdiagnostic)]
-#[multipart_suggestion(suggestion, applicability = "machine-applicable")]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
pub(crate) struct ParenthesesInForHeadSugg {
#[suggestion_part(code = "{left_snippet}")]
pub left: Span,
@@ -1145,7 +1225,7 @@ pub(crate) struct ConstGenericWithoutBraces {
}
#[derive(Subdiagnostic)]
-#[multipart_suggestion(suggestion, applicability = "machine-applicable")]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
pub(crate) struct ConstGenericWithoutBracesSugg {
#[suggestion_part(code = "{{ ")]
pub left: Span,
@@ -1165,7 +1245,7 @@ pub(crate) struct UnexpectedConstParamDeclaration {
#[derive(Subdiagnostic)]
pub(crate) enum UnexpectedConstParamDeclarationSugg {
- #[multipart_suggestion(suggestion, applicability = "machine-applicable")]
+ #[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
AddParam {
#[suggestion_part(code = "<{snippet}>")]
impl_generics: Span,
@@ -1174,7 +1254,7 @@ pub(crate) enum UnexpectedConstParamDeclarationSugg {
snippet: String,
ident: String,
},
- #[multipart_suggestion(suggestion, applicability = "machine-applicable")]
+ #[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
AppendParam {
#[suggestion_part(code = ", {snippet}")]
impl_generics_end: Span,
@@ -1221,7 +1301,7 @@ pub(crate) struct FnPtrWithGenerics {
}
#[derive(Subdiagnostic)]
-#[multipart_suggestion(suggestion, applicability = "maybe-incorrect")]
+#[multipart_suggestion(parse_suggestion, applicability = "maybe-incorrect")]
pub(crate) struct FnPtrWithGenericsSugg {
#[suggestion_part(code = "{snippet}")]
pub left: Span,
@@ -1262,16 +1342,16 @@ pub(crate) struct WhereClauseBeforeTupleStructBody {
#[primary_span]
#[label]
pub span: Span,
- #[label(name_label)]
+ #[label(parse_name_label)]
pub name: Span,
- #[label(body_label)]
+ #[label(parse_body_label)]
pub body: Span,
#[subdiagnostic]
pub sugg: Option<WhereClauseBeforeTupleStructBodySugg>,
}
#[derive(Subdiagnostic)]
-#[multipart_suggestion(suggestion, applicability = "machine-applicable")]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
pub(crate) struct WhereClauseBeforeTupleStructBodySugg {
#[suggestion_part(code = "{snippet}")]
pub left: Span,
@@ -1279,3 +1359,943 @@ pub(crate) struct WhereClauseBeforeTupleStructBodySugg {
#[suggestion_part(code = "")]
pub right: Span,
}
+
+#[derive(Diagnostic)]
+#[diag(parse_async_fn_in_2015, code = "E0670")]
+pub(crate) struct AsyncFnIn2015 {
+ #[primary_span]
+ #[label]
+ pub span: Span,
+ #[subdiagnostic]
+ pub help: HelpUseLatestEdition,
+}
+
+#[derive(Subdiagnostic)]
+#[label(parse_async_block_in_2015)]
+pub(crate) struct AsyncBlockIn2015 {
+ #[primary_span]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_self_argument_pointer)]
+pub(crate) struct SelfArgumentPointer {
+ #[primary_span]
+ #[label]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_token_after_dot)]
+pub struct UnexpectedTokenAfterDot<'a> {
+ #[primary_span]
+ pub span: Span,
+ pub actual: Cow<'a, str>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_visibility_not_followed_by_item)]
+#[help]
+pub(crate) struct VisibilityNotFollowedByItem {
+ #[primary_span]
+ #[label]
+ pub span: Span,
+ pub vis: Visibility,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_default_not_followed_by_item)]
+#[note]
+pub(crate) struct DefaultNotFollowedByItem {
+ #[primary_span]
+ #[label]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+pub(crate) enum MissingKeywordForItemDefinition {
+ #[diag(parse_missing_struct_for_struct_definition)]
+ Struct {
+ #[primary_span]
+ #[suggestion(style = "short", applicability = "maybe-incorrect", code = " struct ")]
+ span: Span,
+ ident: Ident,
+ },
+ #[diag(parse_missing_fn_for_function_definition)]
+ Function {
+ #[primary_span]
+ #[suggestion(style = "short", applicability = "maybe-incorrect", code = " fn ")]
+ span: Span,
+ ident: Ident,
+ },
+ #[diag(parse_missing_fn_for_method_definition)]
+ Method {
+ #[primary_span]
+ #[suggestion(style = "short", applicability = "maybe-incorrect", code = " fn ")]
+ span: Span,
+ ident: Ident,
+ },
+ #[diag(parse_ambiguous_missing_keyword_for_item_definition)]
+ Ambiguous {
+ #[primary_span]
+ span: Span,
+ #[subdiagnostic]
+ subdiag: Option<AmbiguousMissingKwForItemSub>,
+ },
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum AmbiguousMissingKwForItemSub {
+ #[suggestion(parse_suggestion, applicability = "maybe-incorrect", code = "{snippet}!")]
+ SuggestMacro {
+ #[primary_span]
+ span: Span,
+ snippet: String,
+ },
+ #[help(parse_help)]
+ HelpMacro,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_missing_trait_in_trait_impl)]
+pub(crate) struct MissingTraitInTraitImpl {
+ #[primary_span]
+ #[suggestion(parse_suggestion_add_trait, code = " Trait ", applicability = "has-placeholders")]
+ pub span: Span,
+ #[suggestion(parse_suggestion_remove_for, code = "", applicability = "maybe-incorrect")]
+ pub for_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_missing_for_in_trait_impl)]
+pub(crate) struct MissingForInTraitImpl {
+ #[primary_span]
+ #[suggestion(style = "short", code = " for ", applicability = "machine-applicable")]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expected_trait_in_trait_impl_found_type)]
+pub(crate) struct ExpectedTraitInTraitImplFoundType {
+ #[primary_span]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_bounds_not_allowed_on_trait_aliases)]
+pub(crate) struct BoundsNotAllowedOnTraitAliases {
+ #[primary_span]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_trait_alias_cannot_be_auto)]
+pub(crate) struct TraitAliasCannotBeAuto {
+ #[primary_span]
+ #[label(parse_trait_alias_cannot_be_auto)]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_trait_alias_cannot_be_unsafe)]
+pub(crate) struct TraitAliasCannotBeUnsafe {
+ #[primary_span]
+ #[label(parse_trait_alias_cannot_be_unsafe)]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_associated_static_item_not_allowed)]
+pub(crate) struct AssociatedStaticItemNotAllowed {
+ #[primary_span]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_extern_crate_name_with_dashes)]
+pub(crate) struct ExternCrateNameWithDashes {
+ #[primary_span]
+ #[label]
+ pub span: Span,
+ #[subdiagnostic]
+ pub sugg: ExternCrateNameWithDashesSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct ExternCrateNameWithDashesSugg {
+ #[suggestion_part(code = "_")]
+ pub dashes: Vec<Span>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_extern_item_cannot_be_const)]
+#[note]
+pub(crate) struct ExternItemCannotBeConst {
+ #[primary_span]
+ pub ident_span: Span,
+ #[suggestion(code = "static ", applicability = "machine-applicable")]
+ pub const_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_const_global_cannot_be_mutable)]
+pub(crate) struct ConstGlobalCannotBeMutable {
+ #[primary_span]
+ #[label]
+ pub ident_span: Span,
+ #[suggestion(code = "static", applicability = "maybe-incorrect")]
+ pub const_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_missing_const_type)]
+pub(crate) struct MissingConstType {
+ #[primary_span]
+ #[suggestion(code = "{colon} <type>", applicability = "has-placeholders")]
+ pub span: Span,
+
+ pub kind: &'static str,
+ pub colon: &'static str,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_enum_struct_mutually_exclusive)]
+pub(crate) struct EnumStructMutuallyExclusive {
+ #[primary_span]
+ #[suggestion(code = "enum", applicability = "machine-applicable")]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+pub(crate) enum UnexpectedTokenAfterStructName {
+ #[diag(parse_unexpected_token_after_struct_name_found_reserved_identifier)]
+ ReservedIdentifier {
+ #[primary_span]
+ #[label(parse_unexpected_token_after_struct_name)]
+ span: Span,
+ token: Token,
+ },
+ #[diag(parse_unexpected_token_after_struct_name_found_keyword)]
+ Keyword {
+ #[primary_span]
+ #[label(parse_unexpected_token_after_struct_name)]
+ span: Span,
+ token: Token,
+ },
+ #[diag(parse_unexpected_token_after_struct_name_found_reserved_keyword)]
+ ReservedKeyword {
+ #[primary_span]
+ #[label(parse_unexpected_token_after_struct_name)]
+ span: Span,
+ token: Token,
+ },
+ #[diag(parse_unexpected_token_after_struct_name_found_doc_comment)]
+ DocComment {
+ #[primary_span]
+ #[label(parse_unexpected_token_after_struct_name)]
+ span: Span,
+ token: Token,
+ },
+ #[diag(parse_unexpected_token_after_struct_name_found_other)]
+ Other {
+ #[primary_span]
+ #[label(parse_unexpected_token_after_struct_name)]
+ span: Span,
+ token: Token,
+ },
+}
+
+impl UnexpectedTokenAfterStructName {
+ pub fn new(span: Span, token: Token) -> Self {
+ match TokenDescription::from_token(&token) {
+ Some(TokenDescription::ReservedIdentifier) => Self::ReservedIdentifier { span, token },
+ Some(TokenDescription::Keyword) => Self::Keyword { span, token },
+ Some(TokenDescription::ReservedKeyword) => Self::ReservedKeyword { span, token },
+ Some(TokenDescription::DocComment) => Self::DocComment { span, token },
+ None => Self::Other { span, token },
+ }
+ }
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_self_in_generic_parameters)]
+#[note]
+pub(crate) struct UnexpectedSelfInGenericParameters {
+ #[primary_span]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_default_value_for_lifetime_in_generic_parameters)]
+pub(crate) struct UnexpectedDefaultValueForLifetimeInGenericParameters {
+ #[primary_span]
+ #[label]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_multiple_where_clauses)]
+pub(crate) struct MultipleWhereClauses {
+ #[primary_span]
+ pub span: Span,
+ #[label]
+ pub previous: Span,
+ #[suggestion(style = "verbose", code = ",", applicability = "maybe-incorrect")]
+ pub between: Span,
+}
+
+#[derive(Diagnostic)]
+pub(crate) enum UnexpectedNonterminal {
+ #[diag(parse_nonterminal_expected_item_keyword)]
+ Item(#[primary_span] Span),
+ #[diag(parse_nonterminal_expected_statement)]
+ Statement(#[primary_span] Span),
+ #[diag(parse_nonterminal_expected_ident)]
+ Ident {
+ #[primary_span]
+ span: Span,
+ token: Token,
+ },
+ #[diag(parse_nonterminal_expected_lifetime)]
+ Lifetime {
+ #[primary_span]
+ span: Span,
+ token: Token,
+ },
+}
+
+#[derive(Diagnostic)]
+pub(crate) enum TopLevelOrPatternNotAllowed {
+ #[diag(parse_or_pattern_not_allowed_in_let_binding)]
+ LetBinding {
+ #[primary_span]
+ span: Span,
+ #[subdiagnostic]
+ sub: Option<TopLevelOrPatternNotAllowedSugg>,
+ },
+ #[diag(parse_or_pattern_not_allowed_in_fn_parameters)]
+ FunctionParameter {
+ #[primary_span]
+ span: Span,
+ #[subdiagnostic]
+ sub: Option<TopLevelOrPatternNotAllowedSugg>,
+ },
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_cannot_be_raw_ident)]
+pub struct CannotBeRawIdent {
+ #[primary_span]
+ pub span: Span,
+ pub ident: Symbol,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_cr_doc_comment)]
+pub struct CrDocComment {
+ #[primary_span]
+ pub span: Span,
+ pub block: bool,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_no_digits_literal, code = "E0768")]
+pub struct NoDigitsLiteral {
+ #[primary_span]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_invalid_digit_literal)]
+pub struct InvalidDigitLiteral {
+ #[primary_span]
+ pub span: Span,
+ pub base: u32,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_empty_exponent_float)]
+pub struct EmptyExponentFloat {
+ #[primary_span]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_float_literal_unsupported_base)]
+pub struct FloatLiteralUnsupportedBase {
+ #[primary_span]
+ pub span: Span,
+ pub base: &'static str,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unknown_prefix)]
+#[note]
+pub struct UnknownPrefix<'a> {
+ #[primary_span]
+ #[label]
+ pub span: Span,
+ pub prefix: &'a str,
+ #[subdiagnostic]
+ pub sugg: Option<UnknownPrefixSugg>,
+}
+
+#[derive(Subdiagnostic)]
+pub enum UnknownPrefixSugg {
+ #[suggestion(
+ parse_suggestion_br,
+ code = "br",
+ applicability = "maybe-incorrect",
+ style = "verbose"
+ )]
+ UseBr(#[primary_span] Span),
+ #[suggestion(
+ parse_suggestion_whitespace,
+ code = " ",
+ applicability = "maybe-incorrect",
+ style = "verbose"
+ )]
+ Whitespace(#[primary_span] Span),
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_too_many_hashes)]
+pub struct TooManyHashes {
+ #[primary_span]
+ pub span: Span,
+ pub num: u32,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unknown_start_of_token)]
+pub struct UnknownTokenStart {
+ #[primary_span]
+ pub span: Span,
+ pub escaped: String,
+ #[subdiagnostic]
+ pub sugg: Option<TokenSubstitution>,
+ #[subdiagnostic]
+ pub null: Option<UnknownTokenNull>,
+ #[subdiagnostic]
+ pub repeat: Option<UnknownTokenRepeat>,
+}
+
+#[derive(Subdiagnostic)]
+pub enum TokenSubstitution {
+ #[suggestion(parse_sugg_quotes, code = "{suggestion}", applicability = "maybe-incorrect")]
+ DirectedQuotes {
+ #[primary_span]
+ span: Span,
+ suggestion: String,
+ ascii_str: &'static str,
+ ascii_name: &'static str,
+ },
+ #[suggestion(parse_sugg_other, code = "{suggestion}", applicability = "maybe-incorrect")]
+ Other {
+ #[primary_span]
+ span: Span,
+ suggestion: String,
+ ch: String,
+ u_name: &'static str,
+ ascii_str: &'static str,
+ ascii_name: &'static str,
+ },
+}
+
+#[derive(Subdiagnostic)]
+#[note(parse_note_repeats)]
+pub struct UnknownTokenRepeat {
+ pub repeats: usize,
+}
+
+#[derive(Subdiagnostic)]
+#[help(parse_help_null)]
+pub struct UnknownTokenNull;
+
+#[derive(Diagnostic)]
+pub enum UnescapeError {
+ #[diag(parse_invalid_unicode_escape)]
+ #[help]
+ InvalidUnicodeEscape {
+ #[primary_span]
+ #[label]
+ span: Span,
+ surrogate: bool,
+ },
+ #[diag(parse_escape_only_char)]
+ EscapeOnlyChar {
+ #[primary_span]
+ span: Span,
+ #[suggestion(parse_escape, applicability = "machine-applicable", code = "{escaped_sugg}")]
+ char_span: Span,
+ escaped_sugg: String,
+ escaped_msg: String,
+ byte: bool,
+ },
+ #[diag(parse_bare_cr)]
+ BareCr {
+ #[primary_span]
+ #[suggestion(parse_escape, applicability = "machine-applicable", code = "\\r")]
+ span: Span,
+ double_quotes: bool,
+ },
+ #[diag(parse_bare_cr_in_raw_string)]
+ BareCrRawString(#[primary_span] Span),
+ #[diag(parse_too_short_hex_escape)]
+ TooShortHexEscape(#[primary_span] Span),
+ #[diag(parse_invalid_char_in_escape)]
+ InvalidCharInEscape {
+ #[primary_span]
+ #[label]
+ span: Span,
+ is_hex: bool,
+ ch: String,
+ },
+ #[diag(parse_out_of_range_hex_escape)]
+ OutOfRangeHexEscape(
+ #[primary_span]
+ #[label]
+ Span,
+ ),
+ #[diag(parse_leading_underscore_unicode_escape)]
+ LeadingUnderscoreUnicodeEscape {
+ #[primary_span]
+ #[label(parse_leading_underscore_unicode_escape_label)]
+ span: Span,
+ ch: String,
+ },
+ #[diag(parse_overlong_unicode_escape)]
+ OverlongUnicodeEscape(
+ #[primary_span]
+ #[label]
+ Span,
+ ),
+ #[diag(parse_unclosed_unicode_escape)]
+ UnclosedUnicodeEscape(
+ #[primary_span]
+ #[label]
+ Span,
+ #[suggestion(
+ parse_terminate,
+ code = "}}",
+ applicability = "maybe-incorrect",
+ style = "verbose"
+ )]
+ Span,
+ ),
+ #[diag(parse_no_brace_unicode_escape)]
+ NoBraceInUnicodeEscape {
+ #[primary_span]
+ span: Span,
+ #[label]
+ label: Option<Span>,
+ #[subdiagnostic]
+ sub: NoBraceUnicodeSub,
+ },
+ #[diag(parse_unicode_escape_in_byte)]
+ #[help]
+ UnicodeEscapeInByte(
+ #[primary_span]
+ #[label]
+ Span,
+ ),
+ #[diag(parse_empty_unicode_escape)]
+ EmptyUnicodeEscape(
+ #[primary_span]
+ #[label]
+ Span,
+ ),
+ #[diag(parse_zero_chars)]
+ ZeroChars(
+ #[primary_span]
+ #[label]
+ Span,
+ ),
+ #[diag(parse_lone_slash)]
+ LoneSlash(
+ #[primary_span]
+ #[label]
+ Span,
+ ),
+ #[diag(parse_unskipped_whitespace)]
+ UnskippedWhitespace {
+ #[primary_span]
+ span: Span,
+ #[label]
+ char_span: Span,
+ ch: String,
+ },
+ #[diag(parse_multiple_skipped_lines)]
+ MultipleSkippedLinesWarning(
+ #[primary_span]
+ #[label]
+ Span,
+ ),
+ #[diag(parse_more_than_one_char)]
+ MoreThanOneChar {
+ #[primary_span]
+ span: Span,
+ #[subdiagnostic]
+ note: Option<MoreThanOneCharNote>,
+ #[subdiagnostic]
+ suggestion: MoreThanOneCharSugg,
+ },
+}
+
+#[derive(Subdiagnostic)]
+pub enum MoreThanOneCharSugg {
+ #[suggestion(
+ parse_consider_normalized,
+ code = "{normalized}",
+ applicability = "machine-applicable"
+ )]
+ NormalizedForm {
+ #[primary_span]
+ span: Span,
+ ch: String,
+ normalized: String,
+ },
+ #[suggestion(parse_remove_non, code = "{ch}", applicability = "maybe-incorrect")]
+ RemoveNonPrinting {
+ #[primary_span]
+ span: Span,
+ ch: String,
+ },
+ #[suggestion(parse_use_double_quotes, code = "{sugg}", applicability = "machine-applicable")]
+ Quotes {
+ #[primary_span]
+ span: Span,
+ is_byte: bool,
+ sugg: String,
+ },
+}
+
+#[derive(Subdiagnostic)]
+pub enum MoreThanOneCharNote {
+ #[note(parse_followed_by)]
+ AllCombining {
+ #[primary_span]
+ span: Span,
+ chr: String,
+ len: usize,
+ escaped_marks: String,
+ },
+ #[note(parse_non_printing)]
+ NonPrinting {
+ #[primary_span]
+ span: Span,
+ escaped: String,
+ },
+}
+
+#[derive(Subdiagnostic)]
+pub enum NoBraceUnicodeSub {
+ #[suggestion(parse_use_braces, code = "{suggestion}", applicability = "maybe-incorrect")]
+ Suggestion {
+ #[primary_span]
+ span: Span,
+ suggestion: String,
+ },
+ #[help(parse_format_of_unicode)]
+ Help,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum TopLevelOrPatternNotAllowedSugg {
+ #[suggestion(
+ parse_sugg_remove_leading_vert_in_pattern,
+ code = "{pat}",
+ applicability = "machine-applicable"
+ )]
+ RemoveLeadingVert {
+ #[primary_span]
+ span: Span,
+ pat: String,
+ },
+ #[suggestion(
+ parse_sugg_wrap_pattern_in_parens,
+ code = "({pat})",
+ applicability = "machine-applicable"
+ )]
+ WrapInParens {
+ #[primary_span]
+ span: Span,
+ pat: String,
+ },
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_vert_vert_before_function_parameter)]
+#[note(parse_note_pattern_alternatives_use_single_vert)]
+pub(crate) struct UnexpectedVertVertBeforeFunctionParam {
+ #[primary_span]
+ #[suggestion(code = "", applicability = "machine-applicable")]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_vert_vert_in_pattern)]
+pub(crate) struct UnexpectedVertVertInPattern {
+ #[primary_span]
+ #[suggestion(code = "|", applicability = "machine-applicable")]
+ pub span: Span,
+ #[label(parse_label_while_parsing_or_pattern_here)]
+ pub start: Option<Span>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_trailing_vert_not_allowed)]
+pub(crate) struct TrailingVertNotAllowed {
+ #[primary_span]
+ #[suggestion(code = "", applicability = "machine-applicable")]
+ pub span: Span,
+ #[label(parse_label_while_parsing_or_pattern_here)]
+ pub start: Option<Span>,
+ pub token: Token,
+ #[note(parse_note_pattern_alternatives_use_single_vert)]
+ pub note_double_vert: Option<()>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_dotdotdot_rest_pattern)]
+pub(crate) struct DotDotDotRestPattern {
+ #[primary_span]
+ #[suggestion(style = "short", code = "..", applicability = "machine-applicable")]
+ #[label]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_pattern_on_wrong_side_of_at)]
+pub(crate) struct PatternOnWrongSideOfAt {
+ #[primary_span]
+ #[suggestion(code = "{whole_pat}", applicability = "machine-applicable")]
+ pub whole_span: Span,
+ pub whole_pat: String,
+ #[label(parse_label_pattern)]
+ pub pattern: Span,
+ #[label(parse_label_binding)]
+ pub binding: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expected_binding_left_of_at)]
+#[note]
+pub(crate) struct ExpectedBindingLeftOfAt {
+ #[primary_span]
+ pub whole_span: Span,
+ #[label(parse_label_lhs)]
+ pub lhs: Span,
+ #[label(parse_label_rhs)]
+ pub rhs: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_ambiguous_range_pattern)]
+pub(crate) struct AmbiguousRangePattern {
+ #[primary_span]
+ #[suggestion(code = "({pat})", applicability = "maybe-incorrect")]
+ pub span: Span,
+ pub pat: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_lifetime_in_pattern)]
+pub(crate) struct UnexpectedLifetimeInPattern {
+ #[primary_span]
+ #[suggestion(code = "", applicability = "machine-applicable")]
+ pub span: Span,
+ pub symbol: Symbol,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_ref_mut_order_incorrect)]
+pub(crate) struct RefMutOrderIncorrect {
+ #[primary_span]
+ #[suggestion(code = "ref mut", applicability = "machine-applicable")]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+pub(crate) enum InvalidMutInPattern {
+ #[diag(parse_mut_on_nested_ident_pattern)]
+ #[note(parse_note_mut_pattern_usage)]
+ NestedIdent {
+ #[primary_span]
+ #[suggestion(code = "{pat}", applicability = "machine-applicable")]
+ span: Span,
+ pat: String,
+ },
+ #[diag(parse_mut_on_non_ident_pattern)]
+ #[note(parse_note_mut_pattern_usage)]
+ NonIdent {
+ #[primary_span]
+ #[suggestion(code = "{pat}", applicability = "machine-applicable")]
+ span: Span,
+ pat: String,
+ },
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_repeated_mut_in_pattern)]
+pub(crate) struct RepeatedMutInPattern {
+ #[primary_span]
+ #[suggestion(code = "", applicability = "machine-applicable")]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_dot_dot_dot_range_to_pattern_not_allowed)]
+pub(crate) struct DotDotDotRangeToPatternNotAllowed {
+ #[primary_span]
+ #[suggestion(style = "short", code = "..=", applicability = "machine-applicable")]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_enum_pattern_instead_of_identifier)]
+pub(crate) struct EnumPatternInsteadOfIdentifier {
+ #[primary_span]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_dot_dot_dot_for_remaining_fields)]
+pub(crate) struct DotDotDotForRemainingFields {
+ #[primary_span]
+ #[suggestion(code = "..", style = "verbose", applicability = "machine-applicable")]
+ pub span: Span,
+ pub token_str: Cow<'static, str>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expected_comma_after_pattern_field)]
+pub(crate) struct ExpectedCommaAfterPatternField {
+ #[primary_span]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_return_types_use_thin_arrow)]
+pub(crate) struct ReturnTypesUseThinArrow {
+ #[primary_span]
+ #[suggestion(style = "short", code = "->", applicability = "machine-applicable")]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_need_plus_after_trait_object_lifetime)]
+pub(crate) struct NeedPlusAfterTraitObjectLifetime {
+ #[primary_span]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expected_mut_or_const_in_raw_pointer_type)]
+pub(crate) struct ExpectedMutOrConstInRawPointerType {
+ #[primary_span]
+ pub span: Span,
+ #[suggestion(code("mut ", "const "), applicability = "has-placeholders")]
+ pub after_asterisk: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_lifetime_after_mut)]
+pub(crate) struct LifetimeAfterMut {
+ #[primary_span]
+ pub span: Span,
+ #[suggestion(code = "&{snippet} mut", applicability = "maybe-incorrect")]
+ pub suggest_lifetime: Option<Span>,
+ pub snippet: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_dyn_after_mut)]
+pub(crate) struct DynAfterMut {
+ #[primary_span]
+ #[suggestion(code = "&mut dyn", applicability = "machine-applicable")]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_fn_pointer_cannot_be_const)]
+pub(crate) struct FnPointerCannotBeConst {
+ #[primary_span]
+ pub span: Span,
+ #[suggestion(code = "", applicability = "maybe-incorrect")]
+ #[label]
+ pub qualifier: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_fn_pointer_cannot_be_async)]
+pub(crate) struct FnPointerCannotBeAsync {
+ #[primary_span]
+ pub span: Span,
+ #[suggestion(code = "", applicability = "maybe-incorrect")]
+ #[label]
+ pub qualifier: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_nested_c_variadic_type, code = "E0743")]
+pub(crate) struct NestedCVariadicType {
+ #[primary_span]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_invalid_dyn_keyword)]
+#[help]
+pub(crate) struct InvalidDynKeyword {
+ #[primary_span]
+ #[suggestion(code = "", applicability = "machine-applicable")]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_negative_bounds_not_supported)]
+pub(crate) struct NegativeBoundsNotSupported {
+ #[primary_span]
+ pub negative_bounds: Vec<Span>,
+ #[label]
+ pub last_span: Span,
+ #[subdiagnostic]
+ pub sub: Option<NegativeBoundsNotSupportedSugg>,
+}
+
+#[derive(Subdiagnostic)]
+#[suggestion(
+ parse_suggestion,
+ style = "tool-only",
+ code = "{fixed}",
+ applicability = "machine-applicable"
+)]
+pub(crate) struct NegativeBoundsNotSupportedSugg {
+ #[primary_span]
+ pub bound_list: Span,
+ pub num_bounds: usize,
+ pub fixed: String,
+}
+
+#[derive(Subdiagnostic)]
+pub enum HelpUseLatestEdition {
+ #[help(parse_help_set_edition_cargo)]
+ #[note(parse_note_edition_guide)]
+ Cargo { edition: Edition },
+ #[help(parse_help_set_edition_standalone)]
+ #[note(parse_note_edition_guide)]
+ Standalone { edition: Edition },
+}
+
+impl HelpUseLatestEdition {
+ pub fn new() -> Self {
+ let edition = LATEST_STABLE_EDITION;
+ if std::env::var_os("CARGO").is_some() {
+ Self::Cargo { edition }
+ } else {
+ Self::Standalone { edition }
+ }
+ }
+}
diff --git a/compiler/rustc_parse/src/lexer/diagnostics.rs b/compiler/rustc_parse/src/lexer/diagnostics.rs
new file mode 100644
index 000000000..27f4428d3
--- /dev/null
+++ b/compiler/rustc_parse/src/lexer/diagnostics.rs
@@ -0,0 +1,119 @@
+use super::UnmatchedDelim;
+use rustc_ast::token::Delimiter;
+use rustc_errors::Diagnostic;
+use rustc_span::source_map::SourceMap;
+use rustc_span::Span;
+
+#[derive(Default)]
+pub struct TokenTreeDiagInfo {
+ /// Stack of open delimiters and their spans. Used for error message.
+ pub open_braces: Vec<(Delimiter, Span)>,
+ pub unmatched_delims: Vec<UnmatchedDelim>,
+
+ /// Used only for error recovery when arriving to EOF with mismatched braces.
+ pub last_unclosed_found_span: Option<Span>,
+
+ /// Collect empty block spans that might have been auto-inserted by editors.
+ pub empty_block_spans: Vec<Span>,
+
+ /// Collect the spans of braces (Open, Close). Used only
+ /// for detecting if blocks are empty and only braces.
+ pub matching_block_spans: Vec<(Span, Span)>,
+}
+
+pub fn same_identation_level(sm: &SourceMap, open_sp: Span, close_sp: Span) -> bool {
+ match (sm.span_to_margin(open_sp), sm.span_to_margin(close_sp)) {
+ (Some(open_padding), Some(close_padding)) => open_padding == close_padding,
+ _ => false,
+ }
+}
+
+// When we get a `)` or `]` for `{`, we should emit help message here
+// it's more friendly compared to report `unmatched error` in later phase
+pub fn report_missing_open_delim(
+ err: &mut Diagnostic,
+ unmatched_delims: &[UnmatchedDelim],
+) -> bool {
+ let mut reported_missing_open = false;
+ for unmatch_brace in unmatched_delims.iter() {
+ if let Some(delim) = unmatch_brace.found_delim
+ && matches!(delim, Delimiter::Parenthesis | Delimiter::Bracket)
+ {
+ let missed_open = match delim {
+ Delimiter::Parenthesis => "(",
+ Delimiter::Bracket => "[",
+ _ => unreachable!(),
+ };
+ err.span_label(
+ unmatch_brace.found_span.shrink_to_lo(),
+ format!("missing open `{}` for this delimiter", missed_open),
+ );
+ reported_missing_open = true;
+ }
+ }
+ reported_missing_open
+}
+
+pub fn report_suspicious_mismatch_block(
+ err: &mut Diagnostic,
+ diag_info: &TokenTreeDiagInfo,
+ sm: &SourceMap,
+ delim: Delimiter,
+) {
+ if report_missing_open_delim(err, &diag_info.unmatched_delims) {
+ return;
+ }
+
+ let mut matched_spans: Vec<(Span, bool)> = diag_info
+ .matching_block_spans
+ .iter()
+ .map(|&(open, close)| (open.with_hi(close.lo()), same_identation_level(sm, open, close)))
+ .collect();
+
+ // sort by `lo`, so the large block spans in the front
+ matched_spans.sort_by(|a, b| a.0.lo().cmp(&b.0.lo()));
+
+ // We use larger block whose identation is well to cover those inner mismatched blocks
+ // O(N^2) here, but we are on error reporting path, so it is fine
+ for i in 0..matched_spans.len() {
+ let (block_span, same_ident) = matched_spans[i];
+ if same_ident {
+ for j in i + 1..matched_spans.len() {
+ let (inner_block, inner_same_ident) = matched_spans[j];
+ if block_span.contains(inner_block) && !inner_same_ident {
+ matched_spans[j] = (inner_block, true);
+ }
+ }
+ }
+ }
+
+ // Find the inner-most span candidate for final report
+ let candidate_span =
+ matched_spans.into_iter().rev().find(|&(_, same_ident)| !same_ident).map(|(span, _)| span);
+
+ if let Some(block_span) = candidate_span {
+ err.span_label(block_span.shrink_to_lo(), "this delimiter might not be properly closed...");
+ err.span_label(
+ block_span.shrink_to_hi(),
+ "...as it matches this but it has different indentation",
+ );
+
+ // If there is a empty block in the mismatched span, note it
+ if delim == Delimiter::Brace {
+ for span in diag_info.empty_block_spans.iter() {
+ if block_span.contains(*span) {
+ err.span_label(*span, "block is empty, you might have not meant to close it");
+ break;
+ }
+ }
+ }
+ } else {
+ // If there is no suspicious span, give the last properly closed block may help
+ if let Some(parent) = diag_info.matching_block_spans.last()
+ && diag_info.open_braces.last().is_none()
+ && diag_info.empty_block_spans.iter().all(|&sp| sp != parent.0.to(parent.1)) {
+ err.span_label(parent.0, "this opening brace...");
+ err.span_label(parent.1, "...matches this closing brace");
+ }
+ }
+}
diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs
index 9fe8d9836..59958a309 100644
--- a/compiler/rustc_parse/src/lexer/mod.rs
+++ b/compiler/rustc_parse/src/lexer/mod.rs
@@ -1,11 +1,11 @@
+use crate::errors;
use crate::lexer::unicode_chars::UNICODE_ARRAY;
+use crate::make_unclosed_delims_error;
use rustc_ast::ast::{self, AttrStyle};
use rustc_ast::token::{self, CommentKind, Delimiter, Token, TokenKind};
use rustc_ast::tokenstream::TokenStream;
use rustc_ast::util::unicode::contains_text_flow_control_chars;
-use rustc_errors::{
- error_code, Applicability, DiagnosticBuilder, ErrorGuaranteed, PResult, StashKey,
-};
+use rustc_errors::{error_code, Applicability, Diagnostic, DiagnosticBuilder, StashKey};
use rustc_lexer::unescape::{self, Mode};
use rustc_lexer::Cursor;
use rustc_lexer::{Base, DocStyle, RawStrError};
@@ -17,6 +17,7 @@ use rustc_session::parse::ParseSess;
use rustc_span::symbol::{sym, Symbol};
use rustc_span::{edition::Edition, BytePos, Pos, Span};
+mod diagnostics;
mod tokentrees;
mod unescape_error_reporting;
mod unicode_chars;
@@ -31,7 +32,7 @@ use unescape_error_reporting::{emit_unescape_error, escaped_char};
rustc_data_structures::static_assert_size!(rustc_lexer::Token, 12);
#[derive(Clone, Debug)]
-pub struct UnmatchedBrace {
+pub struct UnmatchedDelim {
pub expected_delim: Delimiter,
pub found_delim: Option<Delimiter>,
pub found_span: Span,
@@ -44,7 +45,7 @@ pub(crate) fn parse_token_trees<'a>(
mut src: &'a str,
mut start_pos: BytePos,
override_span: Option<Span>,
-) -> (PResult<'a, TokenStream>, Vec<UnmatchedBrace>) {
+) -> Result<TokenStream, Vec<Diagnostic>> {
// Skip `#!`, if present.
if let Some(shebang_len) = rustc_lexer::strip_shebang(src) {
src = &src[shebang_len..];
@@ -61,7 +62,29 @@ pub(crate) fn parse_token_trees<'a>(
override_span,
nbsp_is_whitespace: false,
};
- tokentrees::TokenTreesReader::parse_all_token_trees(string_reader)
+ let (token_trees, unmatched_delims) =
+ tokentrees::TokenTreesReader::parse_all_token_trees(string_reader);
+ match token_trees {
+ Ok(stream) if unmatched_delims.is_empty() => Ok(stream),
+ _ => {
+ // Return error if there are unmatched delimiters or unclosng delimiters.
+ // We emit delimiter mismatch errors first, then emit the unclosing delimiter mismatch
+ // because the delimiter mismatch is more likely to be the root cause of error
+
+ let mut buffer = Vec::with_capacity(1);
+ // Not using `emit_unclosed_delims` to use `db.buffer`
+ for unmatched in unmatched_delims {
+ if let Some(err) = make_unclosed_delims_error(unmatched, &sess) {
+ err.buffer(&mut buffer);
+ }
+ }
+ if let Err(err) = token_trees {
+ // Add unclosing delimiter error
+ err.buffer(&mut buffer);
+ }
+ Err(buffer)
+ }
+ }
}
struct StringReader<'a> {
@@ -150,7 +173,7 @@ impl<'a> StringReader<'a> {
let span = self.mk_sp(start, self.pos);
self.sess.symbol_gallery.insert(sym, span);
if !sym.can_be_raw() {
- self.err_span(span, &format!("`{}` cannot be a raw identifier", sym));
+ self.sess.emit_err(errors::CannotBeRawIdent { span, ident: sym });
}
self.sess.raw_identifier_spans.borrow_mut().push(span);
token::Ident(sym, true)
@@ -261,27 +284,24 @@ impl<'a> StringReader<'a> {
self.nbsp_is_whitespace = true;
}
let repeats = it.take_while(|c1| *c1 == c).count();
- let mut err =
- self.struct_err_span_char(start, self.pos + Pos::from_usize(repeats * c.len_utf8()), "unknown start of token", c);
// FIXME: the lexer could be used to turn the ASCII version of unicode
// homoglyphs, instead of keeping a table in `check_for_substitution`into the
// token. Ideally, this should be inside `rustc_lexer`. However, we should
// first remove compound tokens like `<<` from `rustc_lexer`, and then add
// fancier error recovery to it, as there will be less overall work to do this
// way.
- let token = unicode_chars::check_for_substitution(self, start, c, &mut err, repeats+1);
- if c == '\x00' {
- err.help("source files must contain UTF-8 encoded text, unexpected null bytes might occur when a different encoding is used");
- }
- if repeats > 0 {
- if repeats == 1 {
- err.note(format!("character appears once more"));
- } else {
- err.note(format!("character appears {repeats} more times"));
- }
- swallow_next_invalid = repeats;
- }
- err.emit();
+ let (token, sugg) = unicode_chars::check_for_substitution(self, start, c, repeats+1);
+ self.sess.emit_err(errors::UnknownTokenStart {
+ span: self.mk_sp(start, self.pos + Pos::from_usize(repeats * c.len_utf8())),
+ escaped: escaped_char(c),
+ sugg,
+ null: if c == '\x00' {Some(errors::UnknownTokenNull)} else {None},
+ repeat: if repeats > 0 {
+ swallow_next_invalid = repeats;
+ Some(errors::UnknownTokenRepeat { repeats })
+ } else {None}
+ });
+
if let Some(token) = token {
token
} else {
@@ -296,26 +316,6 @@ impl<'a> StringReader<'a> {
}
}
- /// Report a fatal lexical error with a given span.
- fn fatal_span(&self, sp: Span, m: &str) -> ! {
- self.sess.span_diagnostic.span_fatal(sp, m)
- }
-
- /// Report a lexical error with a given span.
- fn err_span(&self, sp: Span, m: &str) {
- self.sess.span_diagnostic.struct_span_err(sp, m).emit();
- }
-
- /// Report a fatal error spanning [`from_pos`, `to_pos`).
- fn fatal_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) -> ! {
- self.fatal_span(self.mk_sp(from_pos, to_pos), m)
- }
-
- /// Report a lexical error spanning [`from_pos`, `to_pos`).
- fn err_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) {
- self.err_span(self.mk_sp(from_pos, to_pos), m)
- }
-
fn struct_fatal_span_char(
&self,
from_pos: BytePos,
@@ -328,18 +328,6 @@ impl<'a> StringReader<'a> {
.struct_span_fatal(self.mk_sp(from_pos, to_pos), &format!("{}: {}", m, escaped_char(c)))
}
- fn struct_err_span_char(
- &self,
- from_pos: BytePos,
- to_pos: BytePos,
- m: &str,
- c: char,
- ) -> DiagnosticBuilder<'a, ErrorGuaranteed> {
- self.sess
- .span_diagnostic
- .struct_span_err(self.mk_sp(from_pos, to_pos), &format!("{}: {}", m, escaped_char(c)))
- }
-
/// Detect usages of Unicode codepoints changing the direction of the text on screen and loudly
/// complain about it.
fn lint_unicode_text_flow(&self, start: BytePos) {
@@ -367,14 +355,12 @@ impl<'a> StringReader<'a> {
) -> TokenKind {
if content.contains('\r') {
for (idx, _) in content.char_indices().filter(|&(_, c)| c == '\r') {
- self.err_span_(
+ let span = self.mk_sp(
content_start + BytePos(idx as u32),
content_start + BytePos(idx as u32 + 1),
- match comment_kind {
- CommentKind::Line => "bare CR not allowed in doc-comment",
- CommentKind::Block => "bare CR not allowed in block doc-comment",
- },
);
+ let block = matches!(comment_kind, CommentKind::Block);
+ self.sess.emit_err(errors::CrDocComment { span, block });
}
}
@@ -453,26 +439,20 @@ impl<'a> StringReader<'a> {
}
rustc_lexer::LiteralKind::Int { base, empty_int } => {
if empty_int {
- self.sess
- .span_diagnostic
- .struct_span_err_with_code(
- self.mk_sp(start, end),
- "no valid digits found for number",
- error_code!(E0768),
- )
- .emit();
+ let span = self.mk_sp(start, end);
+ self.sess.emit_err(errors::NoDigitsLiteral { span });
(token::Integer, sym::integer(0))
} else {
if matches!(base, Base::Binary | Base::Octal) {
let base = base as u32;
let s = self.str_from_to(start + BytePos(2), end);
for (idx, c) in s.char_indices() {
+ let span = self.mk_sp(
+ start + BytePos::from_usize(2 + idx),
+ start + BytePos::from_usize(2 + idx + c.len_utf8()),
+ );
if c != '_' && c.to_digit(base).is_none() {
- self.err_span_(
- start + BytePos::from_usize(2 + idx),
- start + BytePos::from_usize(2 + idx + c.len_utf8()),
- &format!("invalid digit for a base {} literal", base),
- );
+ self.sess.emit_err(errors::InvalidDigitLiteral { span, base });
}
}
}
@@ -481,19 +461,18 @@ impl<'a> StringReader<'a> {
}
rustc_lexer::LiteralKind::Float { base, empty_exponent } => {
if empty_exponent {
- self.err_span_(start, self.pos, "expected at least one digit in exponent");
+ let span = self.mk_sp(start, self.pos);
+ self.sess.emit_err(errors::EmptyExponentFloat { span });
}
- match base {
- Base::Hexadecimal => {
- self.err_span_(start, end, "hexadecimal float literal is not supported")
- }
- Base::Octal => {
- self.err_span_(start, end, "octal float literal is not supported")
- }
- Base::Binary => {
- self.err_span_(start, end, "binary float literal is not supported")
- }
- _ => {}
+ let base = match base {
+ Base::Hexadecimal => Some("hexadecimal"),
+ Base::Octal => Some("octal"),
+ Base::Binary => Some("binary"),
+ _ => None,
+ };
+ if let Some(base) = base {
+ let span = self.mk_sp(start, end);
+ self.sess.emit_err(errors::FloatLiteralUnsupportedBase { span, base });
}
(token::Float, self.symbol_from_to(start, end))
}
@@ -643,54 +622,34 @@ impl<'a> StringReader<'a> {
// identifier tokens.
fn report_unknown_prefix(&self, start: BytePos) {
let prefix_span = self.mk_sp(start, self.pos);
- let prefix_str = self.str_from_to(start, self.pos);
- let msg = format!("prefix `{}` is unknown", prefix_str);
+ let prefix = self.str_from_to(start, self.pos);
let expn_data = prefix_span.ctxt().outer_expn_data();
if expn_data.edition >= Edition::Edition2021 {
// In Rust 2021, this is a hard error.
- let mut err = self.sess.span_diagnostic.struct_span_err(prefix_span, &msg);
- err.span_label(prefix_span, "unknown prefix");
- if prefix_str == "rb" {
- err.span_suggestion_verbose(
- prefix_span,
- "use `br` for a raw byte string",
- "br",
- Applicability::MaybeIncorrect,
- );
+ let sugg = if prefix == "rb" {
+ Some(errors::UnknownPrefixSugg::UseBr(prefix_span))
} else if expn_data.is_root() {
- err.span_suggestion_verbose(
- prefix_span.shrink_to_hi(),
- "consider inserting whitespace here",
- " ",
- Applicability::MaybeIncorrect,
- );
- }
- err.note("prefixed identifiers and literals are reserved since Rust 2021");
- err.emit();
+ Some(errors::UnknownPrefixSugg::Whitespace(prefix_span.shrink_to_hi()))
+ } else {
+ None
+ };
+ self.sess.emit_err(errors::UnknownPrefix { span: prefix_span, prefix, sugg });
} else {
// Before Rust 2021, only emit a lint for migration.
self.sess.buffer_lint_with_diagnostic(
&RUST_2021_PREFIXES_INCOMPATIBLE_SYNTAX,
prefix_span,
ast::CRATE_NODE_ID,
- &msg,
+ &format!("prefix `{prefix}` is unknown"),
BuiltinLintDiagnostics::ReservedPrefix(prefix_span),
);
}
}
- fn report_too_many_hashes(&self, start: BytePos, found: u32) -> ! {
- self.fatal_span_(
- start,
- self.pos,
- &format!(
- "too many `#` symbols: raw strings may be delimited \
- by up to 255 `#` symbols, but found {}",
- found
- ),
- )
+ fn report_too_many_hashes(&self, start: BytePos, num: u32) -> ! {
+ self.sess.emit_fatal(errors::TooManyHashes { span: self.mk_sp(start, self.pos), num });
}
fn cook_quoted(
diff --git a/compiler/rustc_parse/src/lexer/tokentrees.rs b/compiler/rustc_parse/src/lexer/tokentrees.rs
index b2701817d..36fd1e37d 100644
--- a/compiler/rustc_parse/src/lexer/tokentrees.rs
+++ b/compiler/rustc_parse/src/lexer/tokentrees.rs
@@ -1,47 +1,31 @@
-use super::{StringReader, UnmatchedBrace};
+use super::diagnostics::report_suspicious_mismatch_block;
+use super::diagnostics::same_identation_level;
+use super::diagnostics::TokenTreeDiagInfo;
+use super::{StringReader, UnmatchedDelim};
use rustc_ast::token::{self, Delimiter, Token};
use rustc_ast::tokenstream::{DelimSpan, Spacing, TokenStream, TokenTree};
use rustc_ast_pretty::pprust::token_to_string;
-use rustc_data_structures::fx::FxHashMap;
use rustc_errors::{PErr, PResult};
-use rustc_span::Span;
pub(super) struct TokenTreesReader<'a> {
string_reader: StringReader<'a>,
/// The "next" token, which has been obtained from the `StringReader` but
/// not yet handled by the `TokenTreesReader`.
token: Token,
- /// Stack of open delimiters and their spans. Used for error message.
- open_braces: Vec<(Delimiter, Span)>,
- unmatched_braces: Vec<UnmatchedBrace>,
- /// The type and spans for all braces
- ///
- /// Used only for error recovery when arriving to EOF with mismatched braces.
- matching_delim_spans: Vec<(Delimiter, Span, Span)>,
- last_unclosed_found_span: Option<Span>,
- /// Collect empty block spans that might have been auto-inserted by editors.
- last_delim_empty_block_spans: FxHashMap<Delimiter, Span>,
- /// Collect the spans of braces (Open, Close). Used only
- /// for detecting if blocks are empty and only braces.
- matching_block_spans: Vec<(Span, Span)>,
+ diag_info: TokenTreeDiagInfo,
}
impl<'a> TokenTreesReader<'a> {
pub(super) fn parse_all_token_trees(
string_reader: StringReader<'a>,
- ) -> (PResult<'a, TokenStream>, Vec<UnmatchedBrace>) {
+ ) -> (PResult<'a, TokenStream>, Vec<UnmatchedDelim>) {
let mut tt_reader = TokenTreesReader {
string_reader,
token: Token::dummy(),
- open_braces: Vec::new(),
- unmatched_braces: Vec::new(),
- matching_delim_spans: Vec::new(),
- last_unclosed_found_span: None,
- last_delim_empty_block_spans: FxHashMap::default(),
- matching_block_spans: Vec::new(),
+ diag_info: TokenTreeDiagInfo::default(),
};
let res = tt_reader.parse_token_trees(/* is_delimited */ false);
- (res, tt_reader.unmatched_braces)
+ (res, tt_reader.diag_info.unmatched_delims)
}
// Parse a stream of tokens into a list of `TokenTree`s.
@@ -50,7 +34,7 @@ impl<'a> TokenTreesReader<'a> {
let mut buf = Vec::new();
loop {
match self.token.kind {
- token::OpenDelim(delim) => buf.push(self.parse_token_tree_open_delim(delim)),
+ token::OpenDelim(delim) => buf.push(self.parse_token_tree_open_delim(delim)?),
token::CloseDelim(delim) => {
return if is_delimited {
Ok(TokenStream::new(buf))
@@ -59,10 +43,11 @@ impl<'a> TokenTreesReader<'a> {
};
}
token::Eof => {
- if is_delimited {
- self.eof_err().emit();
- }
- return Ok(TokenStream::new(buf));
+ return if is_delimited {
+ Err(self.eof_err())
+ } else {
+ Ok(TokenStream::new(buf))
+ };
}
_ => {
// Get the next normal token. This might require getting multiple adjacent
@@ -92,9 +77,9 @@ impl<'a> TokenTreesReader<'a> {
fn eof_err(&mut self) -> PErr<'a> {
let msg = "this file contains an unclosed delimiter";
let mut err = self.string_reader.sess.span_diagnostic.struct_span_err(self.token.span, msg);
- for &(_, sp) in &self.open_braces {
+ for &(_, sp) in &self.diag_info.open_braces {
err.span_label(sp, "unclosed delimiter");
- self.unmatched_braces.push(UnmatchedBrace {
+ self.diag_info.unmatched_delims.push(UnmatchedDelim {
expected_delim: Delimiter::Brace,
found_delim: None,
found_span: self.token.span,
@@ -103,69 +88,53 @@ impl<'a> TokenTreesReader<'a> {
});
}
- if let Some((delim, _)) = self.open_braces.last() {
- if let Some((_, open_sp, close_sp)) =
- self.matching_delim_spans.iter().find(|(d, open_sp, close_sp)| {
- let sm = self.string_reader.sess.source_map();
- if let Some(close_padding) = sm.span_to_margin(*close_sp) {
- if let Some(open_padding) = sm.span_to_margin(*open_sp) {
- return delim == d && close_padding != open_padding;
- }
- }
- false
- })
- // these are in reverse order as they get inserted on close, but
- {
- // we want the last open/first close
- err.span_label(*open_sp, "this delimiter might not be properly closed...");
- err.span_label(*close_sp, "...as it matches this but it has different indentation");
- }
+ if let Some((delim, _)) = self.diag_info.open_braces.last() {
+ report_suspicious_mismatch_block(
+ &mut err,
+ &self.diag_info,
+ &self.string_reader.sess.source_map(),
+ *delim,
+ )
}
err
}
- fn parse_token_tree_open_delim(&mut self, open_delim: Delimiter) -> TokenTree {
+ fn parse_token_tree_open_delim(&mut self, open_delim: Delimiter) -> PResult<'a, TokenTree> {
// The span for beginning of the delimited section
let pre_span = self.token.span;
- self.open_braces.push((open_delim, self.token.span));
+ self.diag_info.open_braces.push((open_delim, self.token.span));
// Parse the token trees within the delimiters.
// We stop at any delimiter so we can try to recover if the user
// uses an incorrect delimiter.
- let tts = self.parse_token_trees(/* is_delimited */ true).unwrap();
+ let tts = self.parse_token_trees(/* is_delimited */ true)?;
// Expand to cover the entire delimited token tree
let delim_span = DelimSpan::from_pair(pre_span, self.token.span);
+ let sm = self.string_reader.sess.source_map();
match self.token.kind {
// Correct delimiter.
token::CloseDelim(close_delim) if close_delim == open_delim => {
- let (open_brace, open_brace_span) = self.open_braces.pop().unwrap();
+ let (open_brace, open_brace_span) = self.diag_info.open_braces.pop().unwrap();
let close_brace_span = self.token.span;
- if tts.is_empty() {
+ if tts.is_empty() && close_delim == Delimiter::Brace {
let empty_block_span = open_brace_span.to(close_brace_span);
- let sm = self.string_reader.sess.source_map();
if !sm.is_multiline(empty_block_span) {
// Only track if the block is in the form of `{}`, otherwise it is
// likely that it was written on purpose.
- self.last_delim_empty_block_spans.insert(open_delim, empty_block_span);
+ self.diag_info.empty_block_spans.push(empty_block_span);
}
}
- //only add braces
+ // only add braces
if let (Delimiter::Brace, Delimiter::Brace) = (open_brace, open_delim) {
- self.matching_block_spans.push((open_brace_span, close_brace_span));
+ // Add all the matching spans, we will sort by span later
+ self.diag_info.matching_block_spans.push((open_brace_span, close_brace_span));
}
- if self.open_braces.is_empty() {
- // Clear up these spans to avoid suggesting them as we've found
- // properly matched delimiters so far for an entire block.
- self.matching_delim_spans.clear();
- } else {
- self.matching_delim_spans.push((open_brace, open_brace_span, close_brace_span));
- }
// Move past the closing delimiter.
self.token = self.string_reader.next_token().0;
}
@@ -174,28 +143,25 @@ impl<'a> TokenTreesReader<'a> {
let mut unclosed_delimiter = None;
let mut candidate = None;
- if self.last_unclosed_found_span != Some(self.token.span) {
+ if self.diag_info.last_unclosed_found_span != Some(self.token.span) {
// do not complain about the same unclosed delimiter multiple times
- self.last_unclosed_found_span = Some(self.token.span);
+ self.diag_info.last_unclosed_found_span = Some(self.token.span);
// This is a conservative error: only report the last unclosed
// delimiter. The previous unclosed delimiters could actually be
// closed! The parser just hasn't gotten to them yet.
- if let Some(&(_, sp)) = self.open_braces.last() {
+ if let Some(&(_, sp)) = self.diag_info.open_braces.last() {
unclosed_delimiter = Some(sp);
};
- let sm = self.string_reader.sess.source_map();
- if let Some(current_padding) = sm.span_to_margin(self.token.span) {
- for (brace, brace_span) in &self.open_braces {
- if let Some(padding) = sm.span_to_margin(*brace_span) {
- // high likelihood of these two corresponding
- if current_padding == padding && brace == &close_delim {
- candidate = Some(*brace_span);
- }
- }
+ for (brace, brace_span) in &self.diag_info.open_braces {
+ if same_identation_level(&sm, self.token.span, *brace_span)
+ && brace == &close_delim
+ {
+ // high likelihood of these two corresponding
+ candidate = Some(*brace_span);
}
}
- let (tok, _) = self.open_braces.pop().unwrap();
- self.unmatched_braces.push(UnmatchedBrace {
+ let (tok, _) = self.diag_info.open_braces.pop().unwrap();
+ self.diag_info.unmatched_delims.push(UnmatchedDelim {
expected_delim: tok,
found_delim: Some(close_delim),
found_span: self.token.span,
@@ -203,7 +169,7 @@ impl<'a> TokenTreesReader<'a> {
candidate_span: candidate,
});
} else {
- self.open_braces.pop();
+ self.diag_info.open_braces.pop();
}
// If the incorrect delimiter matches an earlier opening
@@ -213,7 +179,7 @@ impl<'a> TokenTreesReader<'a> {
// fn foo() {
// bar(baz(
// } // Incorrect delimiter but matches the earlier `{`
- if !self.open_braces.iter().any(|&(b, _)| b == close_delim) {
+ if !self.diag_info.open_braces.iter().any(|&(b, _)| b == close_delim) {
self.token = self.string_reader.next_token().0;
}
}
@@ -225,7 +191,7 @@ impl<'a> TokenTreesReader<'a> {
_ => unreachable!(),
}
- TokenTree::Delimited(delim_span, open_delim, tts)
+ Ok(TokenTree::Delimited(delim_span, open_delim, tts))
}
fn close_delim_err(&mut self, delim: Delimiter) -> PErr<'a> {
@@ -236,22 +202,12 @@ impl<'a> TokenTreesReader<'a> {
let mut err =
self.string_reader.sess.span_diagnostic.struct_span_err(self.token.span, &msg);
- // Braces are added at the end, so the last element is the biggest block
- if let Some(parent) = self.matching_block_spans.last() {
- if let Some(span) = self.last_delim_empty_block_spans.remove(&delim) {
- // Check if the (empty block) is in the last properly closed block
- if (parent.0.to(parent.1)).contains(span) {
- err.span_label(span, "block is empty, you might have not meant to close it");
- } else {
- err.span_label(parent.0, "this opening brace...");
- err.span_label(parent.1, "...matches this closing brace");
- }
- } else {
- err.span_label(parent.0, "this opening brace...");
- err.span_label(parent.1, "...matches this closing brace");
- }
- }
-
+ report_suspicious_mismatch_block(
+ &mut err,
+ &self.diag_info,
+ &self.string_reader.sess.source_map(),
+ delim,
+ );
err.span_label(self.token.span, "unexpected closing delimiter");
err
}
diff --git a/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs b/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
index 6373f5b4f..0d12ec608 100644
--- a/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
+++ b/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
@@ -3,10 +3,12 @@
use std::iter::once;
use std::ops::Range;
-use rustc_errors::{pluralize, Applicability, Handler};
+use rustc_errors::{Applicability, Handler};
use rustc_lexer::unescape::{EscapeError, Mode};
use rustc_span::{BytePos, Span};
+use crate::errors::{MoreThanOneCharNote, MoreThanOneCharSugg, NoBraceUnicodeSub, UnescapeError};
+
pub(crate) fn emit_unescape_error(
handler: &Handler,
// interior part of the literal, without quotes
@@ -31,53 +33,32 @@ pub(crate) fn emit_unescape_error(
};
match error {
EscapeError::LoneSurrogateUnicodeEscape => {
- handler
- .struct_span_err(span, "invalid unicode character escape")
- .span_label(span, "invalid escape")
- .help("unicode escape must not be a surrogate")
- .emit();
+ handler.emit_err(UnescapeError::InvalidUnicodeEscape { span, surrogate: true });
}
EscapeError::OutOfRangeUnicodeEscape => {
- handler
- .struct_span_err(span, "invalid unicode character escape")
- .span_label(span, "invalid escape")
- .help("unicode escape must be at most 10FFFF")
- .emit();
+ handler.emit_err(UnescapeError::InvalidUnicodeEscape { span, surrogate: false });
}
EscapeError::MoreThanOneChar => {
use unicode_normalization::{char::is_combining_mark, UnicodeNormalization};
+ let mut sugg = None;
+ let mut note = None;
- let mut has_help = false;
- let mut handler = handler.struct_span_err(
- span_with_quotes,
- "character literal may only contain one codepoint",
- );
-
- if lit.chars().skip(1).all(|c| is_combining_mark(c)) {
- let escaped_marks =
- lit.chars().skip(1).map(|c| c.escape_default().to_string()).collect::<Vec<_>>();
- handler.span_note(
- span,
- &format!(
- "this `{}` is followed by the combining mark{} `{}`",
- lit.chars().next().unwrap(),
- pluralize!(escaped_marks.len()),
- escaped_marks.join(""),
- ),
- );
+ let lit_chars = lit.chars().collect::<Vec<_>>();
+ let (first, rest) = lit_chars.split_first().unwrap();
+ if rest.iter().copied().all(is_combining_mark) {
let normalized = lit.nfc().to_string();
if normalized.chars().count() == 1 {
- has_help = true;
- handler.span_suggestion(
- span,
- &format!(
- "consider using the normalized form `{}` of this character",
- normalized.chars().next().unwrap().escape_default()
- ),
- normalized,
- Applicability::MachineApplicable,
- );
+ let ch = normalized.chars().next().unwrap().escape_default().to_string();
+ sugg = Some(MoreThanOneCharSugg::NormalizedForm { span, ch, normalized });
}
+ let escaped_marks =
+ rest.iter().map(|c| c.escape_default().to_string()).collect::<Vec<_>>();
+ note = Some(MoreThanOneCharNote::AllCombining {
+ span,
+ chr: format!("{first}"),
+ len: escaped_marks.len(),
+ escaped_marks: escaped_marks.join(""),
+ });
} else {
let printable: Vec<char> = lit
.chars()
@@ -87,32 +68,18 @@ pub(crate) fn emit_unescape_error(
})
.collect();
- if let [ch] = printable.as_slice() {
- has_help = true;
-
- handler.span_note(
+ if let &[ch] = printable.as_slice() {
+ sugg =
+ Some(MoreThanOneCharSugg::RemoveNonPrinting { span, ch: ch.to_string() });
+ note = Some(MoreThanOneCharNote::NonPrinting {
span,
- &format!(
- "there are non-printing characters, the full sequence is `{}`",
- lit.escape_default(),
- ),
- );
-
- handler.span_suggestion(
- span,
- "consider removing the non-printing characters",
- ch,
- Applicability::MaybeIncorrect,
- );
+ escaped: lit.escape_default().to_string(),
+ });
}
- }
-
- if !has_help {
- let (prefix, msg) = if mode.is_byte() {
- ("b", "if you meant to write a byte string literal, use double quotes")
- } else {
- ("", "if you meant to write a `str` literal, use double quotes")
- };
+ };
+ let sugg = sugg.unwrap_or_else(|| {
+ let is_byte = mode.is_byte();
+ let prefix = if is_byte { "b" } else { "" };
let mut escaped = String::with_capacity(lit.len());
let mut chrs = lit.chars().peekable();
while let Some(first) = chrs.next() {
@@ -129,54 +96,32 @@ pub(crate) fn emit_unescape_error(
(c, _) => escaped.push(c),
};
}
- handler.span_suggestion(
- span_with_quotes,
- msg,
- format!("{prefix}\"{escaped}\""),
- Applicability::MachineApplicable,
- );
- }
-
- handler.emit();
+ let sugg = format!("{prefix}\"{escaped}\"");
+ MoreThanOneCharSugg::Quotes { span: span_with_quotes, is_byte, sugg }
+ });
+ handler.emit_err(UnescapeError::MoreThanOneChar {
+ span: span_with_quotes,
+ note,
+ suggestion: sugg,
+ });
}
EscapeError::EscapeOnlyChar => {
let (c, char_span) = last_char();
-
- let msg = if mode.is_byte() {
- "byte constant must be escaped"
- } else {
- "character constant must be escaped"
- };
- handler
- .struct_span_err(span, &format!("{}: `{}`", msg, escaped_char(c)))
- .span_suggestion(
- char_span,
- "escape the character",
- c.escape_default(),
- Applicability::MachineApplicable,
- )
- .emit();
+ handler.emit_err(UnescapeError::EscapeOnlyChar {
+ span,
+ char_span,
+ escaped_sugg: c.escape_default().to_string(),
+ escaped_msg: escaped_char(c),
+ byte: mode.is_byte(),
+ });
}
EscapeError::BareCarriageReturn => {
- let msg = if mode.in_double_quotes() {
- "bare CR not allowed in string, use `\\r` instead"
- } else {
- "character constant must be escaped: `\\r`"
- };
- handler
- .struct_span_err(span, msg)
- .span_suggestion(
- span,
- "escape the character",
- "\\r",
- Applicability::MachineApplicable,
- )
- .emit();
+ let double_quotes = mode.in_double_quotes();
+ handler.emit_err(UnescapeError::BareCr { span, double_quotes });
}
EscapeError::BareCarriageReturnInRawString => {
assert!(mode.in_double_quotes());
- let msg = "bare CR not allowed in raw string";
- handler.span_err(span, msg);
+ handler.emit_err(UnescapeError::BareCrRawString(span));
}
EscapeError::InvalidEscape => {
let (c, span) = last_char();
@@ -213,22 +158,13 @@ pub(crate) fn emit_unescape_error(
diag.emit();
}
EscapeError::TooShortHexEscape => {
- handler.span_err(span, "numeric character escape is too short");
+ handler.emit_err(UnescapeError::TooShortHexEscape(span));
}
EscapeError::InvalidCharInHexEscape | EscapeError::InvalidCharInUnicodeEscape => {
let (c, span) = last_char();
-
- let msg = if error == EscapeError::InvalidCharInHexEscape {
- "invalid character in numeric character escape"
- } else {
- "invalid character in unicode escape"
- };
- let c = escaped_char(c);
-
- handler
- .struct_span_err(span, &format!("{}: `{}`", msg, c))
- .span_label(span, msg)
- .emit();
+ let is_hex = error == EscapeError::InvalidCharInHexEscape;
+ let ch = escaped_char(c);
+ handler.emit_err(UnescapeError::InvalidCharInEscape { span, is_hex, ch });
}
EscapeError::NonAsciiCharInByte => {
let (c, span) = last_char();
@@ -278,41 +214,22 @@ pub(crate) fn emit_unescape_error(
err.emit();
}
EscapeError::OutOfRangeHexEscape => {
- handler
- .struct_span_err(span, "out of range hex escape")
- .span_label(span, "must be a character in the range [\\x00-\\x7f]")
- .emit();
+ handler.emit_err(UnescapeError::OutOfRangeHexEscape(span));
}
EscapeError::LeadingUnderscoreUnicodeEscape => {
let (c, span) = last_char();
- let msg = "invalid start of unicode escape";
- handler
- .struct_span_err(span, &format!("{}: `{}`", msg, c))
- .span_label(span, msg)
- .emit();
+ handler.emit_err(UnescapeError::LeadingUnderscoreUnicodeEscape {
+ span,
+ ch: escaped_char(c),
+ });
}
EscapeError::OverlongUnicodeEscape => {
- handler
- .struct_span_err(span, "overlong unicode escape")
- .span_label(span, "must have at most 6 hex digits")
- .emit();
+ handler.emit_err(UnescapeError::OverlongUnicodeEscape(span));
}
EscapeError::UnclosedUnicodeEscape => {
- handler
- .struct_span_err(span, "unterminated unicode escape")
- .span_label(span, "missing a closing `}`")
- .span_suggestion_verbose(
- span.shrink_to_hi(),
- "terminate the unicode escape",
- "}",
- Applicability::MaybeIncorrect,
- )
- .emit();
+ handler.emit_err(UnescapeError::UnclosedUnicodeEscape(span, span.shrink_to_hi()));
}
EscapeError::NoBraceInUnicodeEscape => {
- let msg = "incorrect unicode escape sequence";
- let mut diag = handler.struct_span_err(span, msg);
-
let mut suggestion = "\\u{".to_owned();
let mut suggestion_len = 0;
let (c, char_span) = last_char();
@@ -322,54 +239,37 @@ pub(crate) fn emit_unescape_error(
suggestion_len += c.len_utf8();
}
- if suggestion_len > 0 {
+ let (label, sub) = if suggestion_len > 0 {
suggestion.push('}');
let hi = char_span.lo() + BytePos(suggestion_len as u32);
- diag.span_suggestion(
- span.with_hi(hi),
- "format of unicode escape sequences uses braces",
- suggestion,
- Applicability::MaybeIncorrect,
- );
+ (None, NoBraceUnicodeSub::Suggestion { span: span.with_hi(hi), suggestion })
} else {
- diag.span_label(span, msg);
- diag.help("format of unicode escape sequences is `\\u{...}`");
- }
-
- diag.emit();
+ (Some(span), NoBraceUnicodeSub::Help)
+ };
+ handler.emit_err(UnescapeError::NoBraceInUnicodeEscape { span, label, sub });
}
EscapeError::UnicodeEscapeInByte => {
- let msg = "unicode escape in byte string";
- handler
- .struct_span_err(span, msg)
- .span_label(span, msg)
- .help("unicode escape sequences cannot be used as a byte or in a byte string")
- .emit();
+ handler.emit_err(UnescapeError::UnicodeEscapeInByte(span));
}
EscapeError::EmptyUnicodeEscape => {
- handler
- .struct_span_err(span, "empty unicode escape")
- .span_label(span, "this escape must have at least 1 hex digit")
- .emit();
+ handler.emit_err(UnescapeError::EmptyUnicodeEscape(span));
}
EscapeError::ZeroChars => {
- let msg = "empty character literal";
- handler.struct_span_err(span, msg).span_label(span, msg).emit();
+ handler.emit_err(UnescapeError::ZeroChars(span));
}
EscapeError::LoneSlash => {
- let msg = "invalid trailing slash in literal";
- handler.struct_span_err(span, msg).span_label(span, msg).emit();
+ handler.emit_err(UnescapeError::LoneSlash(span));
}
EscapeError::UnskippedWhitespaceWarning => {
let (c, char_span) = last_char();
- let msg =
- format!("non-ASCII whitespace symbol '{}' is not skipped", c.escape_unicode());
- handler.struct_span_warn(span, &msg).span_label(char_span, &msg).emit();
+ handler.emit_warning(UnescapeError::UnskippedWhitespace {
+ span,
+ ch: escaped_char(c),
+ char_span,
+ });
}
EscapeError::MultipleSkippedLinesWarning => {
- let msg = "multiple lines skipped by escaped newline";
- let bottom_msg = "skipping everything up to and including this point";
- handler.struct_span_warn(span, msg).span_label(span, bottom_msg).emit();
+ handler.emit_warning(UnescapeError::MultipleSkippedLinesWarning(span));
}
}
}
diff --git a/compiler/rustc_parse/src/lexer/unicode_chars.rs b/compiler/rustc_parse/src/lexer/unicode_chars.rs
index 34d003ccf..d4f971d5b 100644
--- a/compiler/rustc_parse/src/lexer/unicode_chars.rs
+++ b/compiler/rustc_parse/src/lexer/unicode_chars.rs
@@ -2,8 +2,10 @@
//! <https://www.unicode.org/Public/security/10.0.0/confusables.txt>
use super::StringReader;
-use crate::token::{self, Delimiter};
-use rustc_errors::{Applicability, Diagnostic};
+use crate::{
+ errors::TokenSubstitution,
+ token::{self, Delimiter},
+};
use rustc_span::{symbol::kw, BytePos, Pos, Span};
#[rustfmt::skip] // for line breaks
@@ -338,48 +340,44 @@ pub(super) fn check_for_substitution<'a>(
reader: &StringReader<'a>,
pos: BytePos,
ch: char,
- err: &mut Diagnostic,
count: usize,
-) -> Option<token::TokenKind> {
- let &(_, u_name, ascii_str) = UNICODE_ARRAY.iter().find(|&&(c, _, _)| c == ch)?;
+) -> (Option<token::TokenKind>, Option<TokenSubstitution>) {
+ let Some(&(_, u_name, ascii_str)) = UNICODE_ARRAY.iter().find(|&&(c, _, _)| c == ch) else {
+ return (None, None);
+ };
let span = Span::with_root_ctxt(pos, pos + Pos::from_usize(ch.len_utf8() * count));
let Some((_, ascii_name, token)) = ASCII_ARRAY.iter().find(|&&(s, _, _)| s == ascii_str) else {
let msg = format!("substitution character not found for '{}'", ch);
reader.sess.span_diagnostic.span_bug_no_panic(span, &msg);
- return None;
+ return (None, None);
};
// special help suggestion for "directed" double quotes
- if let Some(s) = peek_delimited(&reader.src[reader.src_index(pos)..], '“', '”') {
- let msg = format!(
- "Unicode characters '“' (Left Double Quotation Mark) and \
- '”' (Right Double Quotation Mark) look like '{}' ({}), but are not",
- ascii_str, ascii_name
- );
- err.span_suggestion(
- Span::with_root_ctxt(
- pos,
- pos + Pos::from_usize('“'.len_utf8() + s.len() + '”'.len_utf8()),
- ),
- &msg,
- format!("\"{}\"", s),
- Applicability::MaybeIncorrect,
+ let sugg = if let Some(s) = peek_delimited(&reader.src[reader.src_index(pos)..], '“', '”') {
+ let span = Span::with_root_ctxt(
+ pos,
+ pos + Pos::from_usize('“'.len_utf8() + s.len() + '”'.len_utf8()),
);
+ Some(TokenSubstitution::DirectedQuotes {
+ span,
+ suggestion: format!("\"{s}\""),
+ ascii_str,
+ ascii_name,
+ })
} else {
- let msg = format!(
- "Unicode character '{}' ({}) looks like '{}' ({}), but it is not",
- ch, u_name, ascii_str, ascii_name
- );
- err.span_suggestion(
+ let suggestion = ascii_str.to_string().repeat(count);
+ Some(TokenSubstitution::Other {
span,
- &msg,
- ascii_str.to_string().repeat(count),
- Applicability::MaybeIncorrect,
- );
- }
- token.clone()
+ suggestion,
+ ch: ch.to_string(),
+ u_name,
+ ascii_str,
+ ascii_name,
+ })
+ };
+ (token.clone(), sugg)
}
/// Extract string if found at current position with given delimiters
diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs
index b49a01d75..d1c3fd0cd 100644
--- a/compiler/rustc_parse/src/lib.rs
+++ b/compiler/rustc_parse/src/lib.rs
@@ -19,6 +19,8 @@ use rustc_ast::{AttrItem, Attribute, MetaItem};
use rustc_ast_pretty::pprust;
use rustc_data_structures::sync::Lrc;
use rustc_errors::{Applicability, Diagnostic, FatalError, Level, PResult};
+use rustc_errors::{DiagnosticMessage, SubdiagnosticMessage};
+use rustc_macros::fluent_messages;
use rustc_session::parse::ParseSess;
use rustc_span::{FileName, SourceFile, Span};
@@ -28,12 +30,14 @@ pub const MACRO_ARGUMENTS: Option<&str> = Some("macro arguments");
#[macro_use]
pub mod parser;
-use parser::{emit_unclosed_delims, make_unclosed_delims_error, Parser};
+use parser::{make_unclosed_delims_error, Parser};
pub mod lexer;
pub mod validate_attr;
mod errors;
+fluent_messages! { "../locales/en-US.ftl" }
+
// A bunch of utility functions of the form `parse_<thing>_from_<source>`
// where <thing> includes crate, expr, item, stmt, tts, and one that
// uses a HOF to parse anything, and <source> includes file and
@@ -92,10 +96,7 @@ pub fn parse_stream_from_source_str(
sess: &ParseSess,
override_span: Option<Span>,
) -> TokenStream {
- let (stream, mut errors) =
- source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span);
- emit_unclosed_delims(&mut errors, &sess);
- stream
+ source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span)
}
/// Creates a new parser from a source string.
@@ -131,9 +132,8 @@ fn maybe_source_file_to_parser(
source_file: Lrc<SourceFile>,
) -> Result<Parser<'_>, Vec<Diagnostic>> {
let end_pos = source_file.end_pos;
- let (stream, unclosed_delims) = maybe_file_to_stream(sess, source_file, None)?;
+ let stream = maybe_file_to_stream(sess, source_file, None)?;
let mut parser = stream_to_parser(sess, stream, None);
- parser.unclosed_delims = unclosed_delims;
if parser.token == token::Eof {
parser.token.span = Span::new(end_pos, end_pos, parser.token.span.ctxt(), None);
}
@@ -178,7 +178,7 @@ pub fn source_file_to_stream(
sess: &ParseSess,
source_file: Lrc<SourceFile>,
override_span: Option<Span>,
-) -> (TokenStream, Vec<lexer::UnmatchedBrace>) {
+) -> TokenStream {
panictry_buffer!(&sess.span_diagnostic, maybe_file_to_stream(sess, source_file, override_span))
}
@@ -188,7 +188,7 @@ pub fn maybe_file_to_stream(
sess: &ParseSess,
source_file: Lrc<SourceFile>,
override_span: Option<Span>,
-) -> Result<(TokenStream, Vec<lexer::UnmatchedBrace>), Vec<Diagnostic>> {
+) -> Result<TokenStream, Vec<Diagnostic>> {
let src = source_file.src.as_ref().unwrap_or_else(|| {
sess.span_diagnostic.bug(&format!(
"cannot lex `source_file` without source: {}",
@@ -196,23 +196,7 @@ pub fn maybe_file_to_stream(
));
});
- let (token_trees, unmatched_braces) =
- lexer::parse_token_trees(sess, src.as_str(), source_file.start_pos, override_span);
-
- match token_trees {
- Ok(stream) => Ok((stream, unmatched_braces)),
- Err(err) => {
- let mut buffer = Vec::with_capacity(1);
- err.buffer(&mut buffer);
- // Not using `emit_unclosed_delims` to use `db.buffer`
- for unmatched in unmatched_braces {
- if let Some(err) = make_unclosed_delims_error(unmatched, &sess) {
- err.buffer(&mut buffer);
- }
- }
- Err(buffer)
- }
- }
+ lexer::parse_token_trees(sess, src.as_str(), source_file.start_pos, override_span)
}
/// Given a stream and the `ParseSess`, produces a parser.
diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs
index 686454a8f..e3e7c63e3 100644
--- a/compiler/rustc_parse/src/parser/attr.rs
+++ b/compiler/rustc_parse/src/parser/attr.rs
@@ -1,11 +1,15 @@
use crate::errors::{InvalidMetaItem, SuffixedLiteralInAttribute};
+use crate::fluent_generated as fluent;
use super::{AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, PathStyle};
use rustc_ast as ast;
use rustc_ast::attr;
use rustc_ast::token::{self, Delimiter, Nonterminal};
-use rustc_errors::{error_code, fluent, Diagnostic, IntoDiagnostic, PResult};
+use rustc_errors::{error_code, Diagnostic, IntoDiagnostic, PResult};
use rustc_span::{sym, BytePos, Span};
+use std::convert::TryInto;
+use thin_vec::ThinVec;
+use tracing::debug;
// Public for rustfmt usage
#[derive(Debug)]
@@ -65,10 +69,10 @@ impl<'a> Parser<'a> {
token::CommentKind::Block => OuterAttributeType::DocBlockComment,
},
) {
- err.note(fluent::note);
+ err.note(fluent::parse_note);
err.span_suggestion_verbose(
replacement_span,
- fluent::suggestion,
+ fluent::parse_suggestion,
"",
rustc_errors::Applicability::MachineApplicable,
);
@@ -172,10 +176,10 @@ impl<'a> Parser<'a> {
Ok(Some(item)) => {
// FIXME(#100717)
err.set_arg("item", item.kind.descr());
- err.span_label(item.span, fluent::label_does_not_annotate_this);
+ err.span_label(item.span, fluent::parse_label_does_not_annotate_this);
err.span_suggestion_verbose(
replacement_span,
- fluent::sugg_change_inner_to_outer,
+ fluent::parse_sugg_change_inner_to_outer,
match attr_type {
OuterAttributeType::Attribute => "",
OuterAttributeType::DocBlockComment => "*",
@@ -201,8 +205,8 @@ impl<'a> Parser<'a> {
attr_sp,
fluent::parse_inner_attr_not_permitted_after_outer_doc_comment,
);
- diag.span_label(attr_sp, fluent::label_attr)
- .span_label(prev_doc_comment_span, fluent::label_prev_doc_comment);
+ diag.span_label(attr_sp, fluent::parse_label_attr)
+ .span_label(prev_doc_comment_span, fluent::parse_label_prev_doc_comment);
diag
}
Some(InnerAttrForbiddenReason::AfterOuterAttribute { prev_outer_attr_sp }) => {
@@ -210,8 +214,8 @@ impl<'a> Parser<'a> {
attr_sp,
fluent::parse_inner_attr_not_permitted_after_outer_attr,
);
- diag.span_label(attr_sp, fluent::label_attr)
- .span_label(prev_outer_attr_sp, fluent::label_prev_attr);
+ diag.span_label(attr_sp, fluent::parse_label_attr)
+ .span_label(prev_outer_attr_sp, fluent::parse_label_prev_attr);
diag
}
Some(InnerAttrForbiddenReason::InCodeBlock) | None => {
@@ -346,9 +350,9 @@ impl<'a> Parser<'a> {
}
/// Matches `COMMASEP(meta_item_inner)`.
- pub(crate) fn parse_meta_seq_top(&mut self) -> PResult<'a, Vec<ast::NestedMetaItem>> {
+ pub(crate) fn parse_meta_seq_top(&mut self) -> PResult<'a, ThinVec<ast::NestedMetaItem>> {
// Presumably, the majority of the time there will only be one attr.
- let mut nmis = Vec::with_capacity(1);
+ let mut nmis = ThinVec::with_capacity(1);
while self.token.kind != token::Eof {
nmis.push(self.parse_meta_item_inner()?);
if !self.eat(&token::Comma) {
diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs
index b97f22417..b0ab0f106 100644
--- a/compiler/rustc_parse/src/parser/attr_wrapper.rs
+++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs
@@ -134,11 +134,11 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
// Process the replace ranges, starting from the highest start
// position and working our way back. If have tokens like:
//
- // `#[cfg(FALSE)]` struct Foo { #[cfg(FALSE)] field: bool }`
+ // `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }`
//
// Then we will generate replace ranges for both
// the `#[cfg(FALSE)] field: bool` and the entire
- // `#[cfg(FALSE)]` struct Foo { #[cfg(FALSE)] field: bool }`
+ // `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }`
//
// By starting processing from the replace range with the greatest
// start position, we ensure that any replace range which encloses
@@ -469,6 +469,6 @@ mod size_asserts {
use rustc_data_structures::static_assert_size;
// tidy-alphabetical-start
static_assert_size!(AttrWrapper, 16);
- static_assert_size!(LazyAttrTokenStreamImpl, 144);
+ static_assert_size!(LazyAttrTokenStreamImpl, 120);
// tidy-alphabetical-end
}
diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs
index 4c918c670..a051dbe9f 100644
--- a/compiler/rustc_parse/src/parser/diagnostics.rs
+++ b/compiler/rustc_parse/src/parser/diagnostics.rs
@@ -18,7 +18,8 @@ use crate::errors::{
UseEqInstead,
};
-use crate::lexer::UnmatchedBrace;
+use crate::fluent_generated as fluent;
+use crate::lexer::UnmatchedDelim;
use crate::parser;
use rustc_ast as ast;
use rustc_ast::ptr::P;
@@ -32,10 +33,9 @@ use rustc_ast::{
use rustc_ast_pretty::pprust;
use rustc_data_structures::fx::FxHashSet;
use rustc_errors::{
- fluent, Applicability, DiagnosticBuilder, DiagnosticMessage, FatalError, Handler, MultiSpan,
- PResult,
+ pluralize, Applicability, Diagnostic, DiagnosticBuilder, DiagnosticMessage, ErrorGuaranteed,
+ FatalError, Handler, IntoDiagnostic, MultiSpan, PResult,
};
-use rustc_errors::{pluralize, Diagnostic, ErrorGuaranteed, IntoDiagnostic};
use rustc_session::errors::ExprParenthesesNeeded;
use rustc_span::source_map::Spanned;
use rustc_span::symbol::{kw, sym, Ident};
@@ -165,8 +165,6 @@ enum IsStandalone {
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
enum IncOrDec {
Inc,
- // FIXME: `i--` recovery isn't implemented yet
- #[allow(dead_code)]
Dec,
}
@@ -222,7 +220,7 @@ impl MultiSugg {
/// is dropped.
pub struct SnapshotParser<'a> {
parser: Parser<'a>,
- unclosed_delims: Vec<UnmatchedBrace>,
+ unclosed_delims: Vec<UnmatchedDelim>,
}
impl<'a> Deref for SnapshotParser<'a> {
@@ -264,7 +262,7 @@ impl<'a> Parser<'a> {
self.unclosed_delims.extend(snapshot.unclosed_delims);
}
- pub fn unclosed_delims(&self) -> &[UnmatchedBrace] {
+ pub fn unclosed_delims(&self) -> &[UnmatchedDelim] {
&self.unclosed_delims
}
@@ -284,7 +282,7 @@ impl<'a> Parser<'a> {
self.sess.source_map().span_to_snippet(span)
}
- pub(super) fn expected_ident_found(&self) -> DiagnosticBuilder<'a, ErrorGuaranteed> {
+ pub(super) fn expected_ident_found(&mut self) -> DiagnosticBuilder<'a, ErrorGuaranteed> {
let valid_follow = &[
TokenKind::Eq,
TokenKind::Colon,
@@ -324,7 +322,61 @@ impl<'a> Parser<'a> {
suggest_raw,
suggest_remove_comma,
};
- err.into_diagnostic(&self.sess.span_diagnostic)
+ let mut err = err.into_diagnostic(&self.sess.span_diagnostic);
+
+ // if the token we have is a `<`
+ // it *might* be a misplaced generic
+ if self.token == token::Lt {
+ // all keywords that could have generic applied
+ let valid_prev_keywords =
+ [kw::Fn, kw::Type, kw::Struct, kw::Enum, kw::Union, kw::Trait];
+
+ // If we've expected an identifier,
+ // and the current token is a '<'
+ // if the previous token is a valid keyword
+ // that might use a generic, then suggest a correct
+ // generic placement (later on)
+ let maybe_keyword = self.prev_token.clone();
+ if valid_prev_keywords.into_iter().any(|x| maybe_keyword.is_keyword(x)) {
+ // if we have a valid keyword, attempt to parse generics
+ // also obtain the keywords symbol
+ match self.parse_generics() {
+ Ok(generic) => {
+ if let TokenKind::Ident(symbol, _) = maybe_keyword.kind {
+ let ident_name = symbol;
+ // at this point, we've found something like
+ // `fn <T>id`
+ // and current token should be Ident with the item name (i.e. the function name)
+ // if there is a `<` after the fn name, then don't show a suggestion, show help
+
+ if !self.look_ahead(1, |t| *t == token::Lt) &&
+ let Ok(snippet) = self.sess.source_map().span_to_snippet(generic.span) {
+ err.multipart_suggestion_verbose(
+ format!("place the generic parameter name after the {ident_name} name"),
+ vec![
+ (self.token.span.shrink_to_hi(), snippet),
+ (generic.span, String::new())
+ ],
+ Applicability::MaybeIncorrect,
+ );
+ } else {
+ err.help(format!(
+ "place the generic parameter name after the {ident_name} name"
+ ));
+ }
+ }
+ }
+ Err(err) => {
+ // if there's an error parsing the generics,
+ // then don't do a misplaced generics suggestion
+ // and emit the expected ident error instead;
+ err.cancel();
+ }
+ }
+ }
+ }
+
+ err
}
pub(super) fn expected_one_of_not_found(
@@ -639,7 +691,7 @@ impl<'a> Parser<'a> {
span: self.prev_token.span.shrink_to_lo(),
tokens: None,
};
- let struct_expr = snapshot.parse_struct_expr(None, path, false);
+ let struct_expr = snapshot.parse_expr_struct(None, path, false);
let block_tail = self.parse_block_tail(lo, s, AttemptLocalParseRecovery::No);
return Some(match (struct_expr, block_tail) {
(Ok(expr), Err(mut err)) => {
@@ -654,7 +706,7 @@ impl<'a> Parser<'a> {
err.delay_as_bug();
self.restore_snapshot(snapshot);
let mut tail = self.mk_block(
- vec![self.mk_stmt_err(expr.span)],
+ thin_vec![self.mk_stmt_err(expr.span)],
s,
lo.to(self.prev_token.span),
);
@@ -1303,6 +1355,20 @@ impl<'a> Parser<'a> {
self.recover_from_inc_dec(operand_expr, kind, op_span)
}
+ pub(super) fn recover_from_postfix_decrement(
+ &mut self,
+ operand_expr: P<Expr>,
+ op_span: Span,
+ start_stmt: bool,
+ ) -> PResult<'a, P<Expr>> {
+ let kind = IncDecRecovery {
+ standalone: if start_stmt { IsStandalone::Standalone } else { IsStandalone::Subexpr },
+ op: IncOrDec::Dec,
+ fixity: UnaryFixity::Post,
+ };
+ self.recover_from_inc_dec(operand_expr, kind, op_span)
+ }
+
fn recover_from_inc_dec(
&mut self,
base: P<Expr>,
@@ -1570,7 +1636,7 @@ impl<'a> Parser<'a> {
// Handle `await { <expr> }`.
// This needs to be handled separately from the next arm to avoid
// interpreting `await { <expr> }?` as `<expr>?.await`.
- self.parse_block_expr(None, self.token.span, BlockCheckMode::Default)
+ self.parse_expr_block(None, self.token.span, BlockCheckMode::Default)
} else {
self.parse_expr()
}
@@ -2030,7 +2096,7 @@ impl<'a> Parser<'a> {
}
pub(super) fn recover_arg_parse(&mut self) -> PResult<'a, (P<ast::Pat>, P<ast::Ty>)> {
- let pat = self.parse_pat_no_top_alt(Some("argument name"))?;
+ let pat = self.parse_pat_no_top_alt(Some(Expected::ArgumentName))?;
self.expect(&token::Colon)?;
let ty = self.parse_ty()?;
@@ -2121,7 +2187,7 @@ impl<'a> Parser<'a> {
/// the parameters are *names* (so we don't emit errors about not being able to find `b` in
/// the local scope), but if we find the same name multiple times, like in `fn foo(i8, i8)`,
/// we deduplicate them to not complain about duplicated parameter names.
- pub(super) fn deduplicate_recovered_params_names(&self, fn_inputs: &mut Vec<Param>) {
+ pub(super) fn deduplicate_recovered_params_names(&self, fn_inputs: &mut ThinVec<Param>) {
let mut seen_inputs = FxHashSet::default();
for input in fn_inputs.iter_mut() {
let opt_ident = if let (PatKind::Ident(_, ident, _), TyKind::Err) =
@@ -2145,7 +2211,7 @@ impl<'a> Parser<'a> {
/// like the user has forgotten them.
pub fn handle_ambiguous_unbraced_const_arg(
&mut self,
- args: &mut Vec<AngleBracketedArg>,
+ args: &mut ThinVec<AngleBracketedArg>,
) -> PResult<'a, bool> {
// If we haven't encountered a closing `>`, then the argument is malformed.
// It's likely that the user has written a const expression without enclosing it
@@ -2353,6 +2419,28 @@ impl<'a> Parser<'a> {
Err(err)
}
+ /// Try to recover from an unbraced const argument whose first token [could begin a type][ty].
+ ///
+ /// [ty]: token::Token::can_begin_type
+ pub(crate) fn recover_unbraced_const_arg_that_can_begin_ty(
+ &mut self,
+ mut snapshot: SnapshotParser<'a>,
+ ) -> Option<P<ast::Expr>> {
+ match snapshot.parse_expr_res(Restrictions::CONST_EXPR, None) {
+ // Since we don't know the exact reason why we failed to parse the type or the
+ // expression, employ a simple heuristic to weed out some pathological cases.
+ Ok(expr) if let token::Comma | token::Gt = snapshot.token.kind => {
+ self.restore_snapshot(snapshot);
+ Some(expr)
+ }
+ Ok(_) => None,
+ Err(err) => {
+ err.cancel();
+ None
+ }
+ }
+ }
+
/// Creates a dummy const argument, and reports that the expression must be enclosed in braces
pub fn dummy_const_arg_needs_braces(
&self,
@@ -2375,7 +2463,7 @@ impl<'a> Parser<'a> {
pub(crate) fn maybe_recover_colon_colon_in_pat_typo(
&mut self,
mut first_pat: P<Pat>,
- expected: Expected,
+ expected: Option<Expected>,
) -> P<Pat> {
if token::Colon != self.token.kind {
return first_pat;
@@ -2383,26 +2471,42 @@ impl<'a> Parser<'a> {
if !matches!(first_pat.kind, PatKind::Ident(_, _, None) | PatKind::Path(..))
|| !self.look_ahead(1, |token| token.is_ident() && !token.is_reserved_ident())
{
+ let mut snapshot_type = self.create_snapshot_for_diagnostic();
+ snapshot_type.bump(); // `:`
+ match snapshot_type.parse_ty() {
+ Err(inner_err) => {
+ inner_err.cancel();
+ }
+ Ok(ty) => {
+ let Err(mut err) = self.expected_one_of_not_found(&[], &[]) else {
+ return first_pat;
+ };
+ err.span_label(ty.span, "specifying the type of a pattern isn't supported");
+ self.restore_snapshot(snapshot_type);
+ let span = first_pat.span.to(ty.span);
+ first_pat = self.mk_pat(span, PatKind::Wild);
+ err.emit();
+ }
+ }
return first_pat;
}
// The pattern looks like it might be a path with a `::` -> `:` typo:
// `match foo { bar:baz => {} }`
- let span = self.token.span;
+ let colon_span = self.token.span;
// We only emit "unexpected `:`" error here if we can successfully parse the
// whole pattern correctly in that case.
- let snapshot = self.create_snapshot_for_diagnostic();
+ let mut snapshot_pat = self.create_snapshot_for_diagnostic();
+ let mut snapshot_type = self.create_snapshot_for_diagnostic();
// Create error for "unexpected `:`".
match self.expected_one_of_not_found(&[], &[]) {
Err(mut err) => {
- self.bump(); // Skip the `:`.
- match self.parse_pat_no_top_alt(expected) {
+ // Skip the `:`.
+ snapshot_pat.bump();
+ snapshot_type.bump();
+ match snapshot_pat.parse_pat_no_top_alt(expected) {
Err(inner_err) => {
- // Carry on as if we had not done anything, callers will emit a
- // reasonable error.
inner_err.cancel();
- err.cancel();
- self.restore_snapshot(snapshot);
}
Ok(mut pat) => {
// We've parsed the rest of the pattern.
@@ -2466,8 +2570,8 @@ impl<'a> Parser<'a> {
_ => {}
}
if show_sugg {
- err.span_suggestion(
- span,
+ err.span_suggestion_verbose(
+ colon_span.until(self.look_ahead(1, |t| t.span)),
"maybe write a path separator here",
"::",
Applicability::MaybeIncorrect,
@@ -2475,13 +2579,24 @@ impl<'a> Parser<'a> {
} else {
first_pat = self.mk_pat(new_span, PatKind::Wild);
}
- err.emit();
+ self.restore_snapshot(snapshot_pat);
+ }
+ }
+ match snapshot_type.parse_ty() {
+ Err(inner_err) => {
+ inner_err.cancel();
+ }
+ Ok(ty) => {
+ err.span_label(ty.span, "specifying the type of a pattern isn't supported");
+ self.restore_snapshot(snapshot_type);
+ let new_span = first_pat.span.to(ty.span);
+ first_pat = self.mk_pat(new_span, PatKind::Wild);
}
}
+ err.emit();
}
_ => {
// Carry on as if we had not done anything. This should be unreachable.
- self.restore_snapshot(snapshot);
}
};
first_pat
diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs
index bf93a89f0..95a7ca80d 100644
--- a/compiler/rustc_parse/src/parser/expr.rs
+++ b/compiler/rustc_parse/src/parser/expr.rs
@@ -1,29 +1,12 @@
use super::diagnostics::SnapshotParser;
-use super::pat::{CommaRecoveryMode, RecoverColon, RecoverComma, PARAM_EXPECTED};
+use super::pat::{CommaRecoveryMode, Expected, RecoverColon, RecoverComma};
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
use super::{
AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Restrictions,
SemiColonMode, SeqSep, TokenExpectType, TokenType, TrailingToken,
};
-use crate::errors::{
- ArrayBracketsInsteadOfSpaces, ArrayBracketsInsteadOfSpacesSugg, AsyncMoveOrderIncorrect,
- BracesForStructLiteral, CatchAfterTry, CommaAfterBaseStruct, ComparisonInterpretedAsGeneric,
- ComparisonOrShiftInterpretedAsGenericSugg, DoCatchSyntaxRemoved, DotDotDot, EqFieldInit,
- ExpectedElseBlock, ExpectedEqForLetExpr, ExpectedExpressionFoundLet,
- FieldExpressionWithGeneric, FloatLiteralRequiresIntegerPart, FoundExprWouldBeStmt,
- IfExpressionMissingCondition, IfExpressionMissingThenBlock, IfExpressionMissingThenBlockSub,
- InvalidBlockMacroSegment, InvalidComparisonOperator, InvalidComparisonOperatorSub,
- InvalidInterpolatedExpression, InvalidLiteralSuffixOnTupleIndex, InvalidLogicalOperator,
- InvalidLogicalOperatorSub, LabeledLoopInBreak, LeadingPlusNotSupported, LeftArrowOperator,
- LifetimeInBorrowExpression, MacroInvocationWithQualifiedPath, MalformedLoopLabel,
- MatchArmBodyWithoutBraces, MatchArmBodyWithoutBracesSugg, MissingCommaAfterMatchArm,
- MissingDotDot, MissingInInForLoop, MissingInInForLoopSub, MissingSemicolonBeforeArray,
- NoFieldsForFnCall, NotAsNegationOperator, NotAsNegationOperatorSub,
- OuterAttributeNotAllowedOnIfElse, ParenthesesWithStructFields,
- RequireColonAfterLabeledExpression, ShiftInterpretedAsGeneric, StructLiteralNotAllowedHere,
- StructLiteralNotAllowedHereSugg, TildeAsUnaryOperator, UnexpectedIfWithIf,
- UnexpectedTokenAfterLabel, UnexpectedTokenAfterLabelSugg, WrapExpressionInParentheses,
-};
+
+use crate::errors;
use crate::maybe_recover_from_interpolated_ty_qpath;
use core::mem;
use rustc_ast::ptr::P;
@@ -39,8 +22,8 @@ use rustc_ast::{Arm, Async, BlockCheckMode, Expr, ExprKind, Label, Movability, R
use rustc_ast::{ClosureBinder, MetaItemLit, StmtKind};
use rustc_ast_pretty::pprust;
use rustc_errors::{
- Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, IntoDiagnostic, PResult,
- StashKey,
+ AddToDiagnostic, Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, IntoDiagnostic,
+ PResult, StashKey,
};
use rustc_session::errors::{report_lit_error, ExprParenthesesNeeded};
use rustc_session::lint::builtin::BREAK_WITH_LABEL_AND_LOOP;
@@ -48,6 +31,7 @@ use rustc_session::lint::BuiltinLintDiagnostics;
use rustc_span::source_map::{self, Span, Spanned};
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::{BytePos, Pos};
+use thin_vec::{thin_vec, ThinVec};
/// Possibly accepts an `token::Interpolated` expression (a pre-parsed expression
/// dropped into the token stream, which happens while parsing the result of
@@ -119,7 +103,7 @@ impl<'a> Parser<'a> {
self.collect_tokens_no_attrs(|this| this.parse_expr())
}
- pub fn parse_anon_const_expr(&mut self) -> PResult<'a, AnonConst> {
+ pub fn parse_expr_anon_const(&mut self) -> PResult<'a, AnonConst> {
self.parse_expr().map(|value| AnonConst { id: DUMMY_NODE_ID, value })
}
@@ -141,7 +125,7 @@ impl<'a> Parser<'a> {
}
/// Parses a sequence of expressions delimited by parentheses.
- fn parse_paren_expr_seq(&mut self) -> PResult<'a, Vec<P<Expr>>> {
+ fn parse_expr_paren_seq(&mut self) -> PResult<'a, ThinVec<P<Expr>>> {
self.parse_paren_comma_seq(|p| p.parse_expr_catch_underscore()).map(|(r, _)| r)
}
@@ -152,7 +136,7 @@ impl<'a> Parser<'a> {
r: Restrictions,
already_parsed_attrs: Option<AttrWrapper>,
) -> PResult<'a, P<Expr>> {
- self.with_res(r, |this| this.parse_assoc_expr(already_parsed_attrs))
+ self.with_res(r, |this| this.parse_expr_assoc(already_parsed_attrs))
}
/// Parses an associative expression.
@@ -160,15 +144,15 @@ impl<'a> Parser<'a> {
/// This parses an expression accounting for associativity and precedence of the operators in
/// the expression.
#[inline]
- fn parse_assoc_expr(
+ fn parse_expr_assoc(
&mut self,
already_parsed_attrs: Option<AttrWrapper>,
) -> PResult<'a, P<Expr>> {
- self.parse_assoc_expr_with(0, already_parsed_attrs.into())
+ self.parse_expr_assoc_with(0, already_parsed_attrs.into())
}
/// Parses an associative expression with operators of at least `min_prec` precedence.
- pub(super) fn parse_assoc_expr_with(
+ pub(super) fn parse_expr_assoc_with(
&mut self,
min_prec: usize,
lhs: LhsExpr,
@@ -183,9 +167,9 @@ impl<'a> Parser<'a> {
_ => None,
};
if self.token.is_range_separator() {
- return self.parse_prefix_range_expr(attrs);
+ return self.parse_expr_prefix_range(attrs);
} else {
- self.parse_prefix_expr(attrs)?
+ self.parse_expr_prefix(attrs)?
}
};
let last_type_ascription_set = self.last_type_ascription.is_some();
@@ -243,10 +227,10 @@ impl<'a> Parser<'a> {
}
.into();
let invalid = format!("{}=", &sugg);
- self.sess.emit_err(InvalidComparisonOperator {
+ self.sess.emit_err(errors::InvalidComparisonOperator {
span: sp,
invalid: invalid.clone(),
- sub: InvalidComparisonOperatorSub::Correctable {
+ sub: errors::InvalidComparisonOperatorSub::Correctable {
span: sp,
invalid,
correct: sugg,
@@ -261,10 +245,10 @@ impl<'a> Parser<'a> {
&& self.prev_token.span.hi() == self.token.span.lo()
{
let sp = op.span.to(self.token.span);
- self.sess.emit_err(InvalidComparisonOperator {
+ self.sess.emit_err(errors::InvalidComparisonOperator {
span: sp,
invalid: "<>".into(),
- sub: InvalidComparisonOperatorSub::Correctable {
+ sub: errors::InvalidComparisonOperatorSub::Correctable {
span: sp,
invalid: "<>".into(),
correct: "!=".into(),
@@ -279,10 +263,10 @@ impl<'a> Parser<'a> {
&& self.prev_token.span.hi() == self.token.span.lo()
{
let sp = op.span.to(self.token.span);
- self.sess.emit_err(InvalidComparisonOperator {
+ self.sess.emit_err(errors::InvalidComparisonOperator {
span: sp,
invalid: "<=>".into(),
- sub: InvalidComparisonOperatorSub::Spaceship(sp),
+ sub: errors::InvalidComparisonOperatorSub::Spaceship(sp),
});
self.bump();
}
@@ -298,6 +282,18 @@ impl<'a> Parser<'a> {
continue;
}
+ if self.prev_token == token::BinOp(token::Minus)
+ && self.token == token::BinOp(token::Minus)
+ && self.prev_token.span.between(self.token.span).is_empty()
+ && !self.look_ahead(1, |tok| tok.can_begin_expr())
+ {
+ let op_span = self.prev_token.span.to(self.token.span);
+ // Eat the second `-`
+ self.bump();
+ lhs = self.recover_from_postfix_decrement(lhs, op_span, starts_stmt)?;
+ continue;
+ }
+
let op = op.node;
// Special cases:
if op == AssocOp::As {
@@ -309,7 +305,7 @@ impl<'a> Parser<'a> {
} else if op == AssocOp::DotDot || op == AssocOp::DotDotEq {
// If we didn't have to handle `x..`/`x..=`, it would be pretty easy to
// generalise it to the Fixity::None code.
- lhs = self.parse_range_expr(prec, lhs, op, cur_op_span)?;
+ lhs = self.parse_expr_range(prec, lhs, op, cur_op_span)?;
break;
}
@@ -322,7 +318,7 @@ impl<'a> Parser<'a> {
Fixity::None => 1,
};
let rhs = self.with_res(restrictions - Restrictions::STMT_EXPR, |this| {
- this.parse_assoc_expr_with(prec + prec_adjustment, LhsExpr::NotYetParsed)
+ this.parse_expr_assoc_with(prec + prec_adjustment, LhsExpr::NotYetParsed)
})?;
let span = self.mk_expr_sp(&lhs, lhs_span, rhs.span);
@@ -419,7 +415,7 @@ impl<'a> Parser<'a> {
/// but the next token implies this should be parsed as an expression.
/// For example: `if let Some(x) = x { x } else { 0 } / 2`.
fn error_found_expr_would_be_stmt(&self, lhs: &Expr) {
- self.sess.emit_err(FoundExprWouldBeStmt {
+ self.sess.emit_err(errors::FoundExprWouldBeStmt {
span: self.token.span,
token: self.token.clone(),
suggestion: ExprParenthesesNeeded::surrounding(lhs.span),
@@ -446,18 +442,18 @@ impl<'a> Parser<'a> {
}
(Some(op), _) => (op, self.token.span),
(None, Some((Ident { name: sym::and, span }, false))) if self.may_recover() => {
- self.sess.emit_err(InvalidLogicalOperator {
+ self.sess.emit_err(errors::InvalidLogicalOperator {
span: self.token.span,
incorrect: "and".into(),
- sub: InvalidLogicalOperatorSub::Conjunction(self.token.span),
+ sub: errors::InvalidLogicalOperatorSub::Conjunction(self.token.span),
});
(AssocOp::LAnd, span)
}
(None, Some((Ident { name: sym::or, span }, false))) if self.may_recover() => {
- self.sess.emit_err(InvalidLogicalOperator {
+ self.sess.emit_err(errors::InvalidLogicalOperator {
span: self.token.span,
incorrect: "or".into(),
- sub: InvalidLogicalOperatorSub::Disjunction(self.token.span),
+ sub: errors::InvalidLogicalOperatorSub::Disjunction(self.token.span),
});
(AssocOp::LOr, span)
}
@@ -474,7 +470,7 @@ impl<'a> Parser<'a> {
/// Parses `x..y`, `x..=y`, and `x..`/`x..=`.
/// The other two variants are handled in `parse_prefix_range_expr` below.
- fn parse_range_expr(
+ fn parse_expr_range(
&mut self,
prec: usize,
lhs: P<Expr>,
@@ -482,7 +478,7 @@ impl<'a> Parser<'a> {
cur_op_span: Span,
) -> PResult<'a, P<Expr>> {
let rhs = if self.is_at_start_of_range_notation_rhs() {
- Some(self.parse_assoc_expr_with(prec + 1, LhsExpr::NotYetParsed)?)
+ Some(self.parse_expr_assoc_with(prec + 1, LhsExpr::NotYetParsed)?)
} else {
None
};
@@ -507,7 +503,7 @@ impl<'a> Parser<'a> {
}
/// Parses prefix-forms of range notation: `..expr`, `..`, `..=expr`.
- fn parse_prefix_range_expr(&mut self, attrs: Option<AttrWrapper>) -> PResult<'a, P<Expr>> {
+ fn parse_expr_prefix_range(&mut self, attrs: Option<AttrWrapper>) -> PResult<'a, P<Expr>> {
// Check for deprecated `...` syntax.
if self.token == token::DotDotDot {
self.err_dotdotdot_syntax(self.token.span);
@@ -534,7 +530,7 @@ impl<'a> Parser<'a> {
this.bump();
let (span, opt_end) = if this.is_at_start_of_range_notation_rhs() {
// RHS must be parsed with more associativity than the dots.
- this.parse_assoc_expr_with(op.unwrap().precedence() + 1, LhsExpr::NotYetParsed)
+ this.parse_expr_assoc_with(op.unwrap().precedence() + 1, LhsExpr::NotYetParsed)
.map(|x| (lo.to(x.span), Some(x)))?
} else {
(lo, None)
@@ -545,7 +541,7 @@ impl<'a> Parser<'a> {
}
/// Parses a prefix-unary-operator expr.
- fn parse_prefix_expr(&mut self, attrs: Option<AttrWrapper>) -> PResult<'a, P<Expr>> {
+ fn parse_expr_prefix(&mut self, attrs: Option<AttrWrapper>) -> PResult<'a, P<Expr>> {
let attrs = self.parse_or_use_outer_attributes(attrs)?;
let lo = self.token.span;
@@ -563,25 +559,28 @@ impl<'a> Parser<'a> {
// Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr()
match this.token.uninterpolate().kind {
// `!expr`
- token::Not => make_it!(this, attrs, |this, _| this.parse_unary_expr(lo, UnOp::Not)),
+ token::Not => make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Not)),
// `~expr`
token::Tilde => make_it!(this, attrs, |this, _| this.recover_tilde_expr(lo)),
// `-expr`
token::BinOp(token::Minus) => {
- make_it!(this, attrs, |this, _| this.parse_unary_expr(lo, UnOp::Neg))
+ make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Neg))
}
// `*expr`
token::BinOp(token::Star) => {
- make_it!(this, attrs, |this, _| this.parse_unary_expr(lo, UnOp::Deref))
+ make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Deref))
}
// `&expr` and `&&expr`
token::BinOp(token::And) | token::AndAnd => {
- make_it!(this, attrs, |this, _| this.parse_borrow_expr(lo))
+ make_it!(this, attrs, |this, _| this.parse_expr_borrow(lo))
}
// `+lit`
token::BinOp(token::Plus) if this.look_ahead(1, |tok| tok.is_numeric_lit()) => {
- let mut err =
- LeadingPlusNotSupported { span: lo, remove_plus: None, add_parentheses: None };
+ let mut err = errors::LeadingPlusNotSupported {
+ span: lo,
+ remove_plus: None,
+ add_parentheses: None,
+ };
// a block on the LHS might have been intended to be an expression instead
if let Some(sp) = this.sess.ambiguous_block_expr_parse.borrow().get(&lo) {
@@ -592,7 +591,7 @@ impl<'a> Parser<'a> {
this.sess.emit_err(err);
this.bump();
- this.parse_prefix_expr(None)
+ this.parse_expr_prefix(None)
}
// Recover from `++x`:
token::BinOp(token::Plus)
@@ -605,41 +604,41 @@ impl<'a> Parser<'a> {
this.bump();
this.bump();
- let operand_expr = this.parse_dot_or_call_expr(Default::default())?;
+ let operand_expr = this.parse_expr_dot_or_call(Default::default())?;
this.recover_from_prefix_increment(operand_expr, pre_span, starts_stmt)
}
token::Ident(..) if this.token.is_keyword(kw::Box) => {
- make_it!(this, attrs, |this, _| this.parse_box_expr(lo))
+ make_it!(this, attrs, |this, _| this.parse_expr_box(lo))
}
token::Ident(..) if this.may_recover() && this.is_mistaken_not_ident_negation() => {
make_it!(this, attrs, |this, _| this.recover_not_expr(lo))
}
- _ => return this.parse_dot_or_call_expr(Some(attrs)),
+ _ => return this.parse_expr_dot_or_call(Some(attrs)),
}
}
- fn parse_prefix_expr_common(&mut self, lo: Span) -> PResult<'a, (Span, P<Expr>)> {
+ fn parse_expr_prefix_common(&mut self, lo: Span) -> PResult<'a, (Span, P<Expr>)> {
self.bump();
- let expr = self.parse_prefix_expr(None);
+ let expr = self.parse_expr_prefix(None);
let (span, expr) = self.interpolated_or_expr_span(expr)?;
Ok((lo.to(span), expr))
}
- fn parse_unary_expr(&mut self, lo: Span, op: UnOp) -> PResult<'a, (Span, ExprKind)> {
- let (span, expr) = self.parse_prefix_expr_common(lo)?;
+ fn parse_expr_unary(&mut self, lo: Span, op: UnOp) -> PResult<'a, (Span, ExprKind)> {
+ let (span, expr) = self.parse_expr_prefix_common(lo)?;
Ok((span, self.mk_unary(op, expr)))
}
/// Recover on `~expr` in favor of `!expr`.
fn recover_tilde_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
- self.sess.emit_err(TildeAsUnaryOperator(lo));
+ self.sess.emit_err(errors::TildeAsUnaryOperator(lo));
- self.parse_unary_expr(lo, UnOp::Not)
+ self.parse_expr_unary(lo, UnOp::Not)
}
/// Parse `box expr`.
- fn parse_box_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
- let (span, expr) = self.parse_prefix_expr_common(lo)?;
+ fn parse_expr_box(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
+ let (span, expr) = self.parse_expr_prefix_common(lo)?;
self.sess.gated_spans.gate(sym::box_syntax, span);
Ok((span, ExprKind::Box(expr)))
}
@@ -660,14 +659,14 @@ impl<'a> Parser<'a> {
let negated_token = self.look_ahead(1, |t| t.clone());
let sub_diag = if negated_token.is_numeric_lit() {
- NotAsNegationOperatorSub::SuggestNotBitwise
+ errors::NotAsNegationOperatorSub::SuggestNotBitwise
} else if negated_token.is_bool_lit() {
- NotAsNegationOperatorSub::SuggestNotLogical
+ errors::NotAsNegationOperatorSub::SuggestNotLogical
} else {
- NotAsNegationOperatorSub::SuggestNotDefault
+ errors::NotAsNegationOperatorSub::SuggestNotDefault
};
- self.sess.emit_err(NotAsNegationOperator {
+ self.sess.emit_err(errors::NotAsNegationOperator {
negated: negated_token.span,
negated_desc: super::token_descr(&negated_token),
// Span the `not` plus trailing whitespace to avoid
@@ -677,7 +676,7 @@ impl<'a> Parser<'a> {
),
});
- self.parse_unary_expr(lo, UnOp::Not)
+ self.parse_expr_unary(lo, UnOp::Not)
}
/// Returns the span of expr, if it was not interpolated or the span of the interpolated token.
@@ -735,10 +734,10 @@ impl<'a> Parser<'a> {
segments[0].ident.span,
),
};
- match self.parse_labeled_expr(label, false) {
+ match self.parse_expr_labeled(label, false) {
Ok(expr) => {
type_err.cancel();
- self.sess.emit_err(MalformedLoopLabel {
+ self.sess.emit_err(errors::MalformedLoopLabel {
span: label.ident.span,
correct_label: label.ident,
});
@@ -763,20 +762,22 @@ impl<'a> Parser<'a> {
);
let args_span = self.look_ahead(1, |t| t.span).to(span_after_type);
- let suggestion = ComparisonOrShiftInterpretedAsGenericSugg {
+ let suggestion = errors::ComparisonOrShiftInterpretedAsGenericSugg {
left: expr.span.shrink_to_lo(),
right: expr.span.shrink_to_hi(),
};
match self.token.kind {
- token::Lt => self.sess.emit_err(ComparisonInterpretedAsGeneric {
- comparison: self.token.span,
- r#type: path,
- args: args_span,
- suggestion,
- }),
+ token::Lt => {
+ self.sess.emit_err(errors::ComparisonInterpretedAsGeneric {
+ comparison: self.token.span,
+ r#type: path,
+ args: args_span,
+ suggestion,
+ })
+ }
token::BinOp(token::Shl) => {
- self.sess.emit_err(ShiftInterpretedAsGeneric {
+ self.sess.emit_err(errors::ShiftInterpretedAsGeneric {
shift: self.token.span,
r#type: path,
args: args_span,
@@ -827,7 +828,7 @@ impl<'a> Parser<'a> {
("cast", None)
};
- let with_postfix = self.parse_dot_or_call_expr_with_(cast_expr, span)?;
+ let with_postfix = self.parse_expr_dot_or_call_with_(cast_expr, span)?;
// Check if an illegal postfix operator has been added after the cast.
// If the resulting expression is not a cast, it is an illegal postfix operator.
@@ -898,15 +899,15 @@ impl<'a> Parser<'a> {
}
/// Parse `& mut? <expr>` or `& raw [ const | mut ] <expr>`.
- fn parse_borrow_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
+ fn parse_expr_borrow(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
self.expect_and()?;
let has_lifetime = self.token.is_lifetime() && self.look_ahead(1, |t| t != &token::Colon);
let lifetime = has_lifetime.then(|| self.expect_lifetime()); // For recovery, see below.
let (borrow_kind, mutbl) = self.parse_borrow_modifiers(lo);
let expr = if self.token.is_range_separator() {
- self.parse_prefix_range_expr(None)
+ self.parse_expr_prefix_range(None)
} else {
- self.parse_prefix_expr(None)
+ self.parse_expr_prefix(None)
};
let (hi, expr) = self.interpolated_or_expr_span(expr)?;
let span = lo.to(hi);
@@ -917,7 +918,7 @@ impl<'a> Parser<'a> {
}
fn error_remove_borrow_lifetime(&self, span: Span, lt_span: Span) {
- self.sess.emit_err(LifetimeInBorrowExpression { span, lifetime_span: lt_span });
+ self.sess.emit_err(errors::LifetimeInBorrowExpression { span, lifetime_span: lt_span });
}
/// Parse `mut?` or `raw [ const | mut ]`.
@@ -936,16 +937,16 @@ impl<'a> Parser<'a> {
}
/// Parses `a.b` or `a(13)` or `a[4]` or just `a`.
- fn parse_dot_or_call_expr(&mut self, attrs: Option<AttrWrapper>) -> PResult<'a, P<Expr>> {
+ fn parse_expr_dot_or_call(&mut self, attrs: Option<AttrWrapper>) -> PResult<'a, P<Expr>> {
let attrs = self.parse_or_use_outer_attributes(attrs)?;
self.collect_tokens_for_expr(attrs, |this, attrs| {
- let base = this.parse_bottom_expr();
+ let base = this.parse_expr_bottom();
let (span, base) = this.interpolated_or_expr_span(base)?;
- this.parse_dot_or_call_expr_with(base, span, attrs)
+ this.parse_expr_dot_or_call_with(base, span, attrs)
})
}
- pub(super) fn parse_dot_or_call_expr_with(
+ pub(super) fn parse_expr_dot_or_call_with(
&mut self,
e0: P<Expr>,
lo: Span,
@@ -954,7 +955,7 @@ impl<'a> Parser<'a> {
// Stitch the list of outer attributes onto the return value.
// A little bit ugly, but the best way given the current code
// structure
- let res = self.parse_dot_or_call_expr_with_(e0, lo);
+ let res = self.parse_expr_dot_or_call_with_(e0, lo);
if attrs.is_empty() {
res
} else {
@@ -968,7 +969,7 @@ impl<'a> Parser<'a> {
}
}
- fn parse_dot_or_call_expr_with_(&mut self, mut e: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
+ fn parse_expr_dot_or_call_with_(&mut self, mut e: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
loop {
let has_question = if self.prev_token.kind == TokenKind::Ident(kw::Return, false) {
// we are using noexpect here because we don't expect a `?` directly after a `return`
@@ -991,15 +992,15 @@ impl<'a> Parser<'a> {
};
if has_dot {
// expr.f
- e = self.parse_dot_suffix_expr(lo, e)?;
+ e = self.parse_expr_dot_suffix(lo, e)?;
continue;
}
if self.expr_is_complete(&e) {
return Ok(e);
}
e = match self.token.kind {
- token::OpenDelim(Delimiter::Parenthesis) => self.parse_fn_call_expr(lo, e),
- token::OpenDelim(Delimiter::Bracket) => self.parse_index_expr(lo, e)?,
+ token::OpenDelim(Delimiter::Parenthesis) => self.parse_expr_fn_call(lo, e),
+ token::OpenDelim(Delimiter::Bracket) => self.parse_expr_index(lo, e)?,
_ => return Ok(e),
}
}
@@ -1011,14 +1012,14 @@ impl<'a> Parser<'a> {
&& self.look_ahead(3, |t| t.can_begin_expr())
}
- fn parse_dot_suffix_expr(&mut self, lo: Span, base: P<Expr>) -> PResult<'a, P<Expr>> {
+ fn parse_expr_dot_suffix(&mut self, lo: Span, base: P<Expr>) -> PResult<'a, P<Expr>> {
match self.token.uninterpolate().kind {
token::Ident(..) => self.parse_dot_suffix(base, lo),
token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) => {
- Ok(self.parse_tuple_field_access_expr(lo, base, symbol, suffix, None))
+ Ok(self.parse_expr_tuple_field_access(lo, base, symbol, suffix, None))
}
token::Literal(token::Lit { kind: token::Float, symbol, suffix }) => {
- Ok(self.parse_tuple_field_access_expr_float(lo, base, symbol, suffix))
+ Ok(self.parse_expr_tuple_field_access_float(lo, base, symbol, suffix))
}
_ => {
self.error_unexpected_after_dot();
@@ -1030,7 +1031,7 @@ impl<'a> Parser<'a> {
fn error_unexpected_after_dot(&self) {
// FIXME Could factor this out into non_fatal_unexpected or something.
let actual = pprust::token_to_string(&self.token);
- self.struct_span_err(self.token.span, &format!("unexpected token: `{actual}`")).emit();
+ self.sess.emit_err(errors::UnexpectedTokenAfterDot { span: self.token.span, actual });
}
// We need an identifier or integer, but the next token is a float.
@@ -1040,7 +1041,7 @@ impl<'a> Parser<'a> {
// support pushing "future tokens" (would be also helpful to `break_and_eat`), or
// we should break everything including floats into more basic proc-macro style
// tokens in the lexer (probably preferable).
- fn parse_tuple_field_access_expr_float(
+ fn parse_expr_tuple_field_access_float(
&mut self,
lo: Span,
base: P<Expr>,
@@ -1083,7 +1084,7 @@ impl<'a> Parser<'a> {
match &*components {
// 1e2
[IdentLike(i)] => {
- self.parse_tuple_field_access_expr(lo, base, Symbol::intern(&i), suffix, None)
+ self.parse_expr_tuple_field_access(lo, base, Symbol::intern(&i), suffix, None)
}
// 1.
[IdentLike(i), Punct('.')] => {
@@ -1099,7 +1100,7 @@ impl<'a> Parser<'a> {
let symbol = Symbol::intern(&i);
self.token = Token::new(token::Ident(symbol, false), ident_span);
let next_token = (Token::new(token::Dot, dot_span), self.token_spacing);
- self.parse_tuple_field_access_expr(lo, base, symbol, None, Some(next_token))
+ self.parse_expr_tuple_field_access(lo, base, symbol, None, Some(next_token))
}
// 1.2 | 1.2e3
[IdentLike(i1), Punct('.'), IdentLike(i2)] => {
@@ -1120,11 +1121,11 @@ impl<'a> Parser<'a> {
// See issue #76399 and PR #76285 for more details
let next_token1 = (Token::new(token::Dot, dot_span), Spacing::Alone);
let base1 =
- self.parse_tuple_field_access_expr(lo, base, symbol1, None, Some(next_token1));
+ self.parse_expr_tuple_field_access(lo, base, symbol1, None, Some(next_token1));
let symbol2 = Symbol::intern(&i2);
let next_token2 = Token::new(token::Ident(symbol2, false), ident2_span);
self.bump_with((next_token2, self.token_spacing)); // `.`
- self.parse_tuple_field_access_expr(lo, base1, symbol2, suffix, None)
+ self.parse_expr_tuple_field_access(lo, base1, symbol2, suffix, None)
}
// 1e+ | 1e- (recovered)
[IdentLike(_), Punct('+' | '-')] |
@@ -1142,7 +1143,7 @@ impl<'a> Parser<'a> {
}
}
- fn parse_tuple_field_access_expr(
+ fn parse_expr_tuple_field_access(
&mut self,
lo: Span,
base: P<Expr>,
@@ -1163,7 +1164,7 @@ impl<'a> Parser<'a> {
}
/// Parse a function call expression, `expr(...)`.
- fn parse_fn_call_expr(&mut self, lo: Span, fun: P<Expr>) -> P<Expr> {
+ fn parse_expr_fn_call(&mut self, lo: Span, fun: P<Expr>) -> P<Expr> {
let snapshot = if self.token.kind == token::OpenDelim(Delimiter::Parenthesis)
&& self.look_ahead_type_ascription_as_field()
{
@@ -1174,7 +1175,7 @@ impl<'a> Parser<'a> {
let open_paren = self.token.span;
let mut seq = self
- .parse_paren_expr_seq()
+ .parse_expr_paren_seq()
.map(|args| self.mk_expr(lo.to(self.prev_token.span), self.mk_call(fun, args)));
if let Some(expr) =
self.maybe_recover_struct_lit_bad_delims(lo, open_paren, &mut seq, snapshot)
@@ -1209,16 +1210,21 @@ impl<'a> Parser<'a> {
// `Enum::Foo { a: 3, b: 4 }` or `Enum::Foo(3, 4)`.
self.restore_snapshot(snapshot);
let close_paren = self.prev_token.span;
- let span = lo.to(self.prev_token.span);
- if !fields.is_empty() {
- let mut replacement_err = ParenthesesWithStructFields {
+ let span = lo.to(close_paren);
+ if !fields.is_empty() &&
+ // `token.kind` should not be compared here.
+ // This is because the `snapshot.token.kind` is treated as the same as
+ // that of the open delim in `TokenTreesReader::parse_token_tree`, even if they are different.
+ self.span_to_snippet(close_paren).map_or(false, |snippet| snippet == ")")
+ {
+ let mut replacement_err = errors::ParenthesesWithStructFields {
span,
r#type: path,
- braces_for_struct: BracesForStructLiteral {
+ braces_for_struct: errors::BracesForStructLiteral {
first: open_paren,
second: close_paren,
},
- no_fields_for_fn: NoFieldsForFnCall {
+ no_fields_for_fn: errors::NoFieldsForFnCall {
fields: fields
.into_iter()
.map(|field| field.span.until(field.expr.span))
@@ -1247,7 +1253,7 @@ impl<'a> Parser<'a> {
}
/// Parse an indexing expression `expr[...]`.
- fn parse_index_expr(&mut self, lo: Span, base: P<Expr>) -> PResult<'a, P<Expr>> {
+ fn parse_expr_index(&mut self, lo: Span, base: P<Expr>) -> PResult<'a, P<Expr>> {
let prev_span = self.prev_token.span;
let open_delim_span = self.token.span;
self.bump(); // `[`
@@ -1270,7 +1276,7 @@ impl<'a> Parser<'a> {
if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
// Method call `expr.f()`
- let args = self.parse_paren_expr_seq()?;
+ let args = self.parse_expr_paren_seq()?;
let fn_span = fn_span_lo.to(self.prev_token.span);
let span = lo.to(self.prev_token.span);
Ok(self.mk_expr(
@@ -1285,7 +1291,7 @@ impl<'a> Parser<'a> {
} else {
// Field access `expr.f`
if let Some(args) = seg.args {
- self.sess.emit_err(FieldExpressionWithGeneric(args.span()));
+ self.sess.emit_err(errors::FieldExpressionWithGeneric(args.span()));
}
let span = lo.to(self.prev_token.span);
@@ -1298,7 +1304,7 @@ impl<'a> Parser<'a> {
///
/// N.B., this does not parse outer attributes, and is private because it only works
/// correctly if called from `parse_dot_or_call_expr()`.
- fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> {
+ fn parse_expr_bottom(&mut self) -> PResult<'a, P<Expr>> {
maybe_recover_from_interpolated_ty_qpath!(self, true);
maybe_whole_expr!(self);
@@ -1311,13 +1317,13 @@ impl<'a> Parser<'a> {
// This match arm is a special-case of the `_` match arm below and
// could be removed without changing functionality, but it's faster
// to have it here, especially for programs with large constants.
- self.parse_lit_expr()
+ self.parse_expr_lit()
} else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
- self.parse_tuple_parens_expr()
+ self.parse_expr_tuple_parens()
} else if self.check(&token::OpenDelim(Delimiter::Brace)) {
- self.parse_block_expr(None, lo, BlockCheckMode::Default)
+ self.parse_expr_block(None, lo, BlockCheckMode::Default)
} else if self.check(&token::BinOp(token::Or)) || self.check(&token::OrOr) {
- self.parse_closure_expr().map_err(|mut err| {
+ self.parse_expr_closure().map_err(|mut err| {
// If the input is something like `if a { 1 } else { 2 } | if a { 3 } else { 4 }`
// then suggest parens around the lhs.
if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&lo) {
@@ -1326,42 +1332,42 @@ impl<'a> Parser<'a> {
err
})
} else if self.check(&token::OpenDelim(Delimiter::Bracket)) {
- self.parse_array_or_repeat_expr(Delimiter::Bracket)
+ self.parse_expr_array_or_repeat(Delimiter::Bracket)
} else if self.check_path() {
- self.parse_path_start_expr()
+ self.parse_expr_path_start()
} else if self.check_keyword(kw::Move)
|| self.check_keyword(kw::Static)
|| self.check_const_closure()
{
- self.parse_closure_expr()
+ self.parse_expr_closure()
} else if self.eat_keyword(kw::If) {
- self.parse_if_expr()
+ self.parse_expr_if()
} else if self.check_keyword(kw::For) {
if self.choose_generics_over_qpath(1) {
- self.parse_closure_expr()
+ self.parse_expr_closure()
} else {
assert!(self.eat_keyword(kw::For));
- self.parse_for_expr(None, self.prev_token.span)
+ self.parse_expr_for(None, self.prev_token.span)
}
} else if self.eat_keyword(kw::While) {
- self.parse_while_expr(None, self.prev_token.span)
+ self.parse_expr_while(None, self.prev_token.span)
} else if let Some(label) = self.eat_label() {
- self.parse_labeled_expr(label, true)
+ self.parse_expr_labeled(label, true)
} else if self.eat_keyword(kw::Loop) {
let sp = self.prev_token.span;
- self.parse_loop_expr(None, self.prev_token.span).map_err(|mut err| {
+ self.parse_expr_loop(None, self.prev_token.span).map_err(|mut err| {
err.span_label(sp, "while parsing this `loop` expression");
err
})
} else if self.eat_keyword(kw::Match) {
let match_sp = self.prev_token.span;
- self.parse_match_expr().map_err(|mut err| {
+ self.parse_expr_match().map_err(|mut err| {
err.span_label(match_sp, "while parsing this `match` expression");
err
})
} else if self.eat_keyword(kw::Unsafe) {
let sp = self.prev_token.span;
- self.parse_block_expr(None, lo, BlockCheckMode::Unsafe(ast::UserProvided)).map_err(
+ self.parse_expr_block(None, lo, BlockCheckMode::Unsafe(ast::UserProvided)).map_err(
|mut err| {
err.span_label(sp, "while parsing this `unsafe` expression");
err
@@ -1375,17 +1381,17 @@ impl<'a> Parser<'a> {
self.expect_keyword(kw::Try)?;
self.parse_try_block(lo)
} else if self.eat_keyword(kw::Return) {
- self.parse_return_expr()
+ self.parse_expr_return()
} else if self.eat_keyword(kw::Continue) {
- self.parse_continue_expr(lo)
+ self.parse_expr_continue(lo)
} else if self.eat_keyword(kw::Break) {
- self.parse_break_expr()
+ self.parse_expr_break()
} else if self.eat_keyword(kw::Yield) {
- self.parse_yield_expr()
+ self.parse_expr_yield()
} else if self.is_do_yeet() {
- self.parse_yeet_expr()
+ self.parse_expr_yeet()
} else if self.check_keyword(kw::Let) {
- self.parse_let_expr()
+ self.parse_expr_let()
} else if self.eat_keyword(kw::Underscore) {
Ok(self.mk_expr(self.prev_token.span, ExprKind::Underscore))
} else if !self.unclosed_delims.is_empty() && self.check(&token::Semi) {
@@ -1408,19 +1414,19 @@ impl<'a> Parser<'a> {
// Check for `async {` and `async move {`.
self.parse_async_block()
} else {
- self.parse_closure_expr()
+ self.parse_expr_closure()
}
} else if self.eat_keyword(kw::Await) {
self.recover_incorrect_await_syntax(lo, self.prev_token.span)
} else {
- self.parse_lit_expr()
+ self.parse_expr_lit()
}
} else {
- self.parse_lit_expr()
+ self.parse_expr_lit()
}
}
- fn parse_lit_expr(&mut self) -> PResult<'a, P<Expr>> {
+ fn parse_expr_lit(&mut self) -> PResult<'a, P<Expr>> {
let lo = self.token.span;
match self.parse_opt_token_lit() {
Some((token_lit, _)) => {
@@ -1431,7 +1437,7 @@ impl<'a> Parser<'a> {
}
}
- fn parse_tuple_parens_expr(&mut self) -> PResult<'a, P<Expr>> {
+ fn parse_expr_tuple_parens(&mut self) -> PResult<'a, P<Expr>> {
let lo = self.token.span;
self.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
let (es, trailing_comma) = match self.parse_seq_to_end(
@@ -1455,20 +1461,20 @@ impl<'a> Parser<'a> {
self.maybe_recover_from_bad_qpath(expr)
}
- fn parse_array_or_repeat_expr(&mut self, close_delim: Delimiter) -> PResult<'a, P<Expr>> {
+ fn parse_expr_array_or_repeat(&mut self, close_delim: Delimiter) -> PResult<'a, P<Expr>> {
let lo = self.token.span;
self.bump(); // `[` or other open delim
let close = &token::CloseDelim(close_delim);
let kind = if self.eat(close) {
// Empty vector
- ExprKind::Array(Vec::new())
+ ExprKind::Array(ThinVec::new())
} else {
// Non-empty vector
let first_expr = self.parse_expr()?;
if self.eat(&token::Semi) {
// Repeating array syntax: `[ 0; 512 ]`
- let count = self.parse_anon_const_expr()?;
+ let count = self.parse_expr_anon_const()?;
self.expect(close)?;
ExprKind::Repeat(first_expr, count)
} else if self.eat(&token::Comma) {
@@ -1480,14 +1486,14 @@ impl<'a> Parser<'a> {
} else {
// Vector with one element
self.expect(close)?;
- ExprKind::Array(vec![first_expr])
+ ExprKind::Array(thin_vec![first_expr])
}
};
let expr = self.mk_expr(lo.to(self.prev_token.span), kind);
self.maybe_recover_from_bad_qpath(expr)
}
- fn parse_path_start_expr(&mut self) -> PResult<'a, P<Expr>> {
+ fn parse_expr_path_start(&mut self) -> PResult<'a, P<Expr>> {
let (qself, path) = if self.eat_lt() {
let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
(Some(qself), path)
@@ -1499,7 +1505,7 @@ impl<'a> Parser<'a> {
let (span, kind) = if self.eat(&token::Not) {
// MACRO INVOCATION expression
if qself.is_some() {
- self.sess.emit_err(MacroInvocationWithQualifiedPath(path.span));
+ self.sess.emit_err(errors::MacroInvocationWithQualifiedPath(path.span));
}
let lo = path.span;
let mac = P(MacCall {
@@ -1524,7 +1530,7 @@ impl<'a> Parser<'a> {
}
/// Parse `'label: $expr`. The label is already parsed.
- fn parse_labeled_expr(
+ fn parse_expr_labeled(
&mut self,
label_: Label,
mut consume_colon: bool,
@@ -1533,15 +1539,15 @@ impl<'a> Parser<'a> {
let label = Some(label_);
let ate_colon = self.eat(&token::Colon);
let expr = if self.eat_keyword(kw::While) {
- self.parse_while_expr(label, lo)
+ self.parse_expr_while(label, lo)
} else if self.eat_keyword(kw::For) {
- self.parse_for_expr(label, lo)
+ self.parse_expr_for(label, lo)
} else if self.eat_keyword(kw::Loop) {
- self.parse_loop_expr(label, lo)
+ self.parse_expr_loop(label, lo)
} else if self.check_noexpect(&token::OpenDelim(Delimiter::Brace))
|| self.token.is_whole_block()
{
- self.parse_block_expr(label, lo, BlockCheckMode::Default)
+ self.parse_expr_block(label, lo, BlockCheckMode::Default)
} else if !ate_colon
&& self.may_recover()
&& (matches!(self.token.kind, token::CloseDelim(_) | token::Comma)
@@ -1549,7 +1555,7 @@ impl<'a> Parser<'a> {
{
let (lit, _) =
self.recover_unclosed_char(label_.ident, Parser::mk_token_lit_char, |self_| {
- self_.sess.create_err(UnexpectedTokenAfterLabel {
+ self_.sess.create_err(errors::UnexpectedTokenAfterLabel {
span: self_.token.span,
remove_label: None,
enclose_in_block: None,
@@ -1561,7 +1567,7 @@ impl<'a> Parser<'a> {
&& (self.check_noexpect(&TokenKind::Comma) || self.check_noexpect(&TokenKind::Gt))
{
// We're probably inside of a `Path<'a>` that needs a turbofish
- self.sess.emit_err(UnexpectedTokenAfterLabel {
+ self.sess.emit_err(errors::UnexpectedTokenAfterLabel {
span: self.token.span,
remove_label: None,
enclose_in_block: None,
@@ -1569,7 +1575,7 @@ impl<'a> Parser<'a> {
consume_colon = false;
Ok(self.mk_expr_err(lo))
} else {
- let mut err = UnexpectedTokenAfterLabel {
+ let mut err = errors::UnexpectedTokenAfterLabel {
span: self.token.span,
remove_label: None,
enclose_in_block: None,
@@ -1605,14 +1611,14 @@ impl<'a> Parser<'a> {
return expr;
}
- err.enclose_in_block = Some(UnexpectedTokenAfterLabelSugg {
+ err.enclose_in_block = Some(errors::UnexpectedTokenAfterLabelSugg {
left: span.shrink_to_lo(),
right: span.shrink_to_hi(),
});
// Replace `'label: non_block_expr` with `'label: {non_block_expr}` in order to suppress future errors about `break 'label`.
let stmt = self.mk_stmt(span, StmtKind::Expr(expr));
- let blk = self.mk_block(vec![stmt], BlockCheckMode::Default, span);
+ let blk = self.mk_block(thin_vec![stmt], BlockCheckMode::Default, span);
self.mk_expr(span, ExprKind::Block(blk, label))
});
@@ -1621,7 +1627,7 @@ impl<'a> Parser<'a> {
}?;
if !ate_colon && consume_colon {
- self.sess.emit_err(RequireColonAfterLabeledExpression {
+ self.sess.emit_err(errors::RequireColonAfterLabeledExpression {
span: expr.span,
label: lo,
label_end: lo.shrink_to_hi(),
@@ -1670,7 +1676,7 @@ impl<'a> Parser<'a> {
self.bump(); // `catch`
let span = lo.to(self.prev_token.span);
- self.sess.emit_err(DoCatchSyntaxRemoved { span });
+ self.sess.emit_err(errors::DoCatchSyntaxRemoved { span });
self.parse_try_block(lo)
}
@@ -1681,7 +1687,7 @@ impl<'a> Parser<'a> {
}
/// Parse `"return" expr?`.
- fn parse_return_expr(&mut self) -> PResult<'a, P<Expr>> {
+ fn parse_expr_return(&mut self) -> PResult<'a, P<Expr>> {
let lo = self.prev_token.span;
let kind = ExprKind::Ret(self.parse_expr_opt()?);
let expr = self.mk_expr(lo.to(self.prev_token.span), kind);
@@ -1689,7 +1695,7 @@ impl<'a> Parser<'a> {
}
/// Parse `"do" "yeet" expr?`.
- fn parse_yeet_expr(&mut self) -> PResult<'a, P<Expr>> {
+ fn parse_expr_yeet(&mut self) -> PResult<'a, P<Expr>> {
let lo = self.token.span;
self.bump(); // `do`
@@ -1711,16 +1717,16 @@ impl<'a> Parser<'a> {
/// `break 'lbl: loop {}`); a labeled break with an unlabeled loop as its value
/// expression only gets a warning for compatibility reasons; and a labeled break
/// with a labeled loop does not even get a warning because there is no ambiguity.
- fn parse_break_expr(&mut self) -> PResult<'a, P<Expr>> {
+ fn parse_expr_break(&mut self) -> PResult<'a, P<Expr>> {
let lo = self.prev_token.span;
let mut label = self.eat_label();
let kind = if self.token == token::Colon && let Some(label) = label.take() {
// The value expression can be a labeled loop, see issue #86948, e.g.:
// `loop { break 'label: loop { break 'label 42; }; }`
- let lexpr = self.parse_labeled_expr(label, true)?;
- self.sess.emit_err(LabeledLoopInBreak {
+ let lexpr = self.parse_expr_labeled(label, true)?;
+ self.sess.emit_err(errors::LabeledLoopInBreak {
span: lexpr.span,
- sub: WrapExpressionInParentheses {
+ sub: errors::WrapExpressionInParentheses {
left: lexpr.span.shrink_to_lo(),
right: lexpr.span.shrink_to_hi(),
},
@@ -1770,7 +1776,7 @@ impl<'a> Parser<'a> {
}
/// Parse `"continue" label?`.
- fn parse_continue_expr(&mut self, lo: Span) -> PResult<'a, P<Expr>> {
+ fn parse_expr_continue(&mut self, lo: Span) -> PResult<'a, P<Expr>> {
let mut label = self.eat_label();
// Recover `continue label` -> `continue 'label`
@@ -1787,7 +1793,7 @@ impl<'a> Parser<'a> {
}
/// Parse `"yield" expr?`.
- fn parse_yield_expr(&mut self) -> PResult<'a, P<Expr>> {
+ fn parse_expr_yield(&mut self) -> PResult<'a, P<Expr>> {
let lo = self.prev_token.span;
let kind = ExprKind::Yield(self.parse_expr_opt()?);
let span = lo.to(self.prev_token.span);
@@ -1840,7 +1846,7 @@ impl<'a> Parser<'a> {
};
if let Some(expr) = expr {
if matches!(expr.kind, ExprKind::Err) {
- let mut err = InvalidInterpolatedExpression { span: self.token.span }
+ let mut err = errors::InvalidInterpolatedExpression { span: self.token.span }
.into_diagnostic(&self.sess.span_diagnostic);
err.downgrade_to_delayed_bug();
return Err(err);
@@ -1882,7 +1888,16 @@ impl<'a> Parser<'a> {
if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) =
next_token.kind
{
- if self.token.span.hi() == next_token.span.lo() {
+ // If this integer looks like a float, then recover as such.
+ //
+ // We will never encounter the exponent part of a floating
+ // point literal here, since there's no use of the exponent
+ // syntax that also constitutes a valid integer, so we need
+ // not check for that.
+ if suffix.map_or(true, |s| s == sym::f32 || s == sym::f64)
+ && symbol.as_str().chars().all(|c| c.is_numeric() || c == '_')
+ && self.token.span.hi() == next_token.span.lo()
+ {
let s = String::from("0.") + symbol.as_str();
let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix);
return Some(Token::new(kind, self.token.span.to(next_token.span)));
@@ -1892,7 +1907,7 @@ impl<'a> Parser<'a> {
});
if let Some(token) = &recovered {
self.bump();
- self.sess.emit_err(FloatLiteralRequiresIntegerPart {
+ self.sess.emit_err(errors::FloatLiteralRequiresIntegerPart {
span: token.span,
correct: pprust::token_to_string(token).into_owned(),
});
@@ -1953,13 +1968,17 @@ impl<'a> Parser<'a> {
if [sym::i32, sym::u32, sym::isize, sym::usize].contains(&suffix) {
// #59553: warn instead of reject out of hand to allow the fix to percolate
// through the ecosystem when people fix their macros
- self.sess.emit_warning(InvalidLiteralSuffixOnTupleIndex {
+ self.sess.emit_warning(errors::InvalidLiteralSuffixOnTupleIndex {
span,
suffix,
exception: Some(()),
});
} else {
- self.sess.emit_err(InvalidLiteralSuffixOnTupleIndex { span, suffix, exception: None });
+ self.sess.emit_err(errors::InvalidLiteralSuffixOnTupleIndex {
+ span,
+ suffix,
+ exception: None,
+ });
}
}
@@ -1991,11 +2010,11 @@ impl<'a> Parser<'a> {
/// expression.
fn maybe_suggest_brackets_instead_of_braces(&mut self, lo: Span) -> Option<P<Expr>> {
let mut snapshot = self.create_snapshot_for_diagnostic();
- match snapshot.parse_array_or_repeat_expr(Delimiter::Brace) {
+ match snapshot.parse_expr_array_or_repeat(Delimiter::Brace) {
Ok(arr) => {
- self.sess.emit_err(ArrayBracketsInsteadOfSpaces {
+ self.sess.emit_err(errors::ArrayBracketsInsteadOfSpaces {
span: arr.span,
- sub: ArrayBracketsInsteadOfSpacesSugg {
+ sub: errors::ArrayBracketsInsteadOfSpacesSugg {
left: lo,
right: snapshot.prev_token.span,
},
@@ -2041,7 +2060,7 @@ impl<'a> Parser<'a> {
.span_to_snippet(snapshot.token.span)
.map_or(false, |snippet| snippet == "]") =>
{
- return Err(MissingSemicolonBeforeArray {
+ return Err(errors::MissingSemicolonBeforeArray {
open_delim: open_delim_span,
semicolon: prev_span.shrink_to_hi(),
}.into_diagnostic(&self.sess.span_diagnostic));
@@ -2054,7 +2073,7 @@ impl<'a> Parser<'a> {
}
/// Parses a block or unsafe block.
- pub(super) fn parse_block_expr(
+ pub(super) fn parse_expr_block(
&mut self,
opt_label: Option<Label>,
lo: Span,
@@ -2067,7 +2086,7 @@ impl<'a> Parser<'a> {
}
if self.token.is_whole_block() {
- self.sess.emit_err(InvalidBlockMacroSegment {
+ self.sess.emit_err(errors::InvalidBlockMacroSegment {
span: self.token.span,
context: lo.to(self.token.span),
});
@@ -2084,7 +2103,7 @@ impl<'a> Parser<'a> {
}
/// Parses a closure expression (e.g., `move |args| expr`).
- fn parse_closure_expr(&mut self) -> PResult<'a, P<Expr>> {
+ fn parse_expr_closure(&mut self) -> PResult<'a, P<Expr>> {
let lo = self.token.span;
let binder = if self.check_keyword(kw::For) {
@@ -2094,12 +2113,12 @@ impl<'a> Parser<'a> {
self.sess.gated_spans.gate(sym::closure_lifetime_binder, span);
- ClosureBinder::For { span, generic_params: P::from_vec(lifetime_defs) }
+ ClosureBinder::For { span, generic_params: lifetime_defs }
} else {
ClosureBinder::NotPresent
};
- let constness = self.parse_constness(Case::Sensitive);
+ let constness = self.parse_closure_constness(Case::Sensitive);
let movability =
if self.eat_keyword(kw::Static) { Movability::Static } else { Movability::Movable };
@@ -2121,7 +2140,7 @@ impl<'a> Parser<'a> {
_ => {
// If an explicit return type is given, require a block to appear (RFC 968).
let body_lo = self.token.span;
- self.parse_block_expr(None, body_lo, BlockCheckMode::Default)?
+ self.parse_expr_block(None, body_lo, BlockCheckMode::Default)?
}
};
@@ -2131,7 +2150,7 @@ impl<'a> Parser<'a> {
}
if self.token.kind == TokenKind::Semi
- && matches!(self.token_cursor.frame.delim_sp, Some((Delimiter::Parenthesis, _)))
+ && matches!(self.token_cursor.stack.last(), Some((_, Delimiter::Parenthesis, _)))
&& self.may_recover()
{
// It is likely that the closure body is a block but where the
@@ -2171,7 +2190,7 @@ impl<'a> Parser<'a> {
// Check for `move async` and recover
if self.check_keyword(kw::Async) {
let move_async_span = self.token.span.with_lo(self.prev_token.span.data().lo);
- Err(AsyncMoveOrderIncorrect { span: move_async_span }
+ Err(errors::AsyncMoveOrderIncorrect { span: move_async_span }
.into_diagnostic(&self.sess.span_diagnostic))
} else {
Ok(CaptureBy::Value)
@@ -2186,7 +2205,7 @@ impl<'a> Parser<'a> {
let arg_start = self.token.span.lo();
let inputs = if self.eat(&token::OrOr) {
- Vec::new()
+ ThinVec::new()
} else {
self.expect(&token::BinOp(token::Or))?;
let args = self
@@ -2212,7 +2231,7 @@ impl<'a> Parser<'a> {
let lo = self.token.span;
let attrs = self.parse_outer_attributes()?;
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
- let pat = this.parse_pat_no_top_alt(PARAM_EXPECTED)?;
+ let pat = this.parse_pat_no_top_alt(Some(Expected::ParameterName))?;
let ty = if this.eat(&token::Colon) {
this.parse_ty()?
} else {
@@ -2234,9 +2253,9 @@ impl<'a> Parser<'a> {
}
/// Parses an `if` expression (`if` token already eaten).
- fn parse_if_expr(&mut self) -> PResult<'a, P<Expr>> {
+ fn parse_expr_if(&mut self) -> PResult<'a, P<Expr>> {
let lo = self.prev_token.span;
- let cond = self.parse_cond_expr()?;
+ let cond = self.parse_expr_cond()?;
self.parse_if_after_cond(lo, cond)
}
@@ -2249,16 +2268,17 @@ impl<'a> Parser<'a> {
let block = match &mut cond.kind {
ExprKind::Binary(Spanned { span: binop_span, .. }, _, right)
if let ExprKind::Block(_, None) = right.kind => {
- self.sess.emit_err(IfExpressionMissingThenBlock {
+ self.sess.emit_err(errors::IfExpressionMissingThenBlock {
if_span: lo,
- sub: IfExpressionMissingThenBlockSub::UnfinishedCondition(
- cond_span.shrink_to_lo().to(*binop_span)
- ),
+ missing_then_block_sub:
+ errors::IfExpressionMissingThenBlockSub::UnfinishedCondition(cond_span.shrink_to_lo().to(*binop_span)),
+ let_else_sub: None,
+
});
std::mem::replace(right, this.mk_expr_err(binop_span.shrink_to_hi()))
},
ExprKind::Block(_, None) => {
- self.sess.emit_err(IfExpressionMissingCondition {
+ self.sess.emit_err(errors::IfExpressionMissingCondition {
if_span: lo.shrink_to_hi(),
block_span: self.sess.source_map().start_point(cond_span),
});
@@ -2279,9 +2299,15 @@ impl<'a> Parser<'a> {
if let Some(block) = recover_block_from_condition(self) {
block
} else {
- self.sess.emit_err(IfExpressionMissingThenBlock {
+ let let_else_sub = matches!(cond.kind, ExprKind::Let(..))
+ .then(|| errors::IfExpressionLetSomeSub { if_span: lo.until(cond_span) });
+
+ self.sess.emit_err(errors::IfExpressionMissingThenBlock {
if_span: lo,
- sub: IfExpressionMissingThenBlockSub::AddThenBlock(cond_span.shrink_to_hi()),
+ missing_then_block_sub: errors::IfExpressionMissingThenBlockSub::AddThenBlock(
+ cond_span.shrink_to_hi(),
+ ),
+ let_else_sub,
});
self.mk_block_err(cond_span.shrink_to_hi())
}
@@ -2307,12 +2333,12 @@ impl<'a> Parser<'a> {
self.error_on_if_block_attrs(lo, false, block.span, attrs);
block
};
- let els = if self.eat_keyword(kw::Else) { Some(self.parse_else_expr()?) } else { None };
+ let els = if self.eat_keyword(kw::Else) { Some(self.parse_expr_else()?) } else { None };
Ok(self.mk_expr(lo.to(self.prev_token.span), ExprKind::If(cond, thn, els)))
}
/// Parses the condition of a `if` or `while` expression.
- fn parse_cond_expr(&mut self) -> PResult<'a, P<Expr>> {
+ fn parse_expr_cond(&mut self) -> PResult<'a, P<Expr>> {
let cond =
self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL | Restrictions::ALLOW_LET, None)?;
@@ -2325,7 +2351,7 @@ impl<'a> Parser<'a> {
}
/// Parses a `let $pat = $expr` pseudo-expression.
- fn parse_let_expr(&mut self) -> PResult<'a, P<Expr>> {
+ fn parse_expr_let(&mut self) -> PResult<'a, P<Expr>> {
// This is a *approximate* heuristic that detects if `let` chains are
// being parsed in the right position. It's approximate because it
// doesn't deny all invalid `let` expressions, just completely wrong usages.
@@ -2334,7 +2360,7 @@ impl<'a> Parser<'a> {
TokenKind::AndAnd | TokenKind::Ident(kw::If, _) | TokenKind::Ident(kw::While, _)
);
if !self.restrictions.contains(Restrictions::ALLOW_LET) || not_in_chain {
- self.sess.emit_err(ExpectedExpressionFoundLet { span: self.token.span });
+ self.sess.emit_err(errors::ExpectedExpressionFoundLet { span: self.token.span });
}
self.bump(); // Eat `let` token
@@ -2346,7 +2372,7 @@ impl<'a> Parser<'a> {
CommaRecoveryMode::LikelyTuple,
)?;
if self.token == token::EqEq {
- self.sess.emit_err(ExpectedEqForLetExpr {
+ self.sess.emit_err(errors::ExpectedEqForLetExpr {
span: self.token.span,
sugg_span: self.token.span,
});
@@ -2355,7 +2381,7 @@ impl<'a> Parser<'a> {
self.expect(&token::Eq)?;
}
let expr = self.with_res(self.restrictions | Restrictions::NO_STRUCT_LITERAL, |this| {
- this.parse_assoc_expr_with(1 + prec_let_scrutinee_needs_par(), None.into())
+ this.parse_expr_assoc_with(1 + prec_let_scrutinee_needs_par(), None.into())
})?;
let span = lo.to(expr.span);
self.sess.gated_spans.gate(sym::let_chains, span);
@@ -2363,11 +2389,11 @@ impl<'a> Parser<'a> {
}
/// Parses an `else { ... }` expression (`else` token already eaten).
- fn parse_else_expr(&mut self) -> PResult<'a, P<Expr>> {
+ fn parse_expr_else(&mut self) -> PResult<'a, P<Expr>> {
let else_span = self.prev_token.span; // `else`
let attrs = self.parse_outer_attributes()?; // For recovery.
let expr = if self.eat_keyword(kw::If) {
- self.parse_if_expr()?
+ self.parse_expr_if()?
} else if self.check(&TokenKind::OpenDelim(Delimiter::Brace)) {
self.parse_simple_block()?
} else {
@@ -2381,7 +2407,7 @@ impl<'a> Parser<'a> {
if self.check(&TokenKind::OpenDelim(Delimiter::Brace))
&& classify::expr_requires_semi_to_be_stmt(&cond) =>
{
- self.sess.emit_err(ExpectedElseBlock {
+ self.sess.emit_err(errors::ExpectedElseBlock {
first_tok_span,
first_tok,
else_span,
@@ -2421,7 +2447,7 @@ impl<'a> Parser<'a> {
[x0 @ xn] | [x0, .., xn] => (x0.span.to(xn.span), xn.span),
};
let ctx = if is_ctx_else { "else" } else { "if" };
- self.sess.emit_err(OuterAttributeNotAllowedOnIfElse {
+ self.sess.emit_err(errors::OuterAttributeNotAllowedOnIfElse {
last,
branch_span,
ctx_span,
@@ -2434,14 +2460,14 @@ impl<'a> Parser<'a> {
if let ExprKind::Binary(Spanned { span: binop_span, node: binop}, _, right) = &cond.kind &&
let BinOpKind::And = binop &&
let ExprKind::If(cond, ..) = &right.kind {
- Err(self.sess.create_err(UnexpectedIfWithIf(binop_span.shrink_to_hi().to(cond.span.shrink_to_lo()))))
+ Err(self.sess.create_err(errors::UnexpectedIfWithIf(binop_span.shrink_to_hi().to(cond.span.shrink_to_lo()))))
} else {
Ok(())
}
}
/// Parses `for <src_pat> in <src_expr> <src_loop_block>` (`for` token already eaten).
- fn parse_for_expr(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
+ fn parse_expr_for(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
// Record whether we are about to parse `for (`.
// This is used below for recovery in case of `for ( $stuff ) $block`
// in which case we will suggest `for $stuff $block`.
@@ -2464,28 +2490,61 @@ impl<'a> Parser<'a> {
let pat = self.recover_parens_around_for_head(pat, begin_paren);
+ // Recover from missing expression in `for` loop
+ if matches!(expr.kind, ExprKind::Block(..))
+ && !matches!(self.token.kind, token::OpenDelim(token::Delimiter::Brace))
+ && self.may_recover()
+ {
+ self.sess
+ .emit_err(errors::MissingExpressionInForLoop { span: expr.span.shrink_to_lo() });
+ let err_expr = self.mk_expr(expr.span, ExprKind::Err);
+ let block = self.mk_block(thin_vec![], BlockCheckMode::Default, self.prev_token.span);
+ return Ok(self.mk_expr(
+ lo.to(self.prev_token.span),
+ ExprKind::ForLoop(pat, err_expr, block, opt_label),
+ ));
+ }
+
let (attrs, loop_block) = self.parse_inner_attrs_and_block()?;
let kind = ExprKind::ForLoop(pat, expr, loop_block, opt_label);
+
+ self.recover_loop_else("for", lo)?;
+
Ok(self.mk_expr_with_attrs(lo.to(self.prev_token.span), kind, attrs))
}
+ /// Recovers from an `else` clause after a loop (`for...else`, `while...else`)
+ fn recover_loop_else(&mut self, loop_kind: &'static str, loop_kw: Span) -> PResult<'a, ()> {
+ if self.token.is_keyword(kw::Else) && self.may_recover() {
+ let else_span = self.token.span;
+ self.bump();
+ let else_clause = self.parse_expr_else()?;
+ self.sess.emit_err(errors::LoopElseNotSupported {
+ span: else_span.to(else_clause.span),
+ loop_kind,
+ loop_kw,
+ });
+ }
+ Ok(())
+ }
+
fn error_missing_in_for_loop(&mut self) {
let (span, sub): (_, fn(_) -> _) = if self.token.is_ident_named(sym::of) {
// Possibly using JS syntax (#75311).
let span = self.token.span;
self.bump();
- (span, MissingInInForLoopSub::InNotOf)
+ (span, errors::MissingInInForLoopSub::InNotOf)
} else {
- (self.prev_token.span.between(self.token.span), MissingInInForLoopSub::AddIn)
+ (self.prev_token.span.between(self.token.span), errors::MissingInInForLoopSub::AddIn)
};
- self.sess.emit_err(MissingInInForLoop { span, sub: sub(span) });
+ self.sess.emit_err(errors::MissingInInForLoop { span, sub: sub(span) });
}
/// Parses a `while` or `while let` expression (`while` token already eaten).
- fn parse_while_expr(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
- let cond = self.parse_cond_expr().map_err(|mut err| {
+ fn parse_expr_while(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
+ let cond = self.parse_expr_cond().map_err(|mut err| {
err.span_label(lo, "while parsing the condition of this `while` expression");
err
})?;
@@ -2494,6 +2553,9 @@ impl<'a> Parser<'a> {
err.span_label(cond.span, "this `while` condition successfully parsed");
err
})?;
+
+ self.recover_loop_else("while", lo)?;
+
Ok(self.mk_expr_with_attrs(
lo.to(self.prev_token.span),
ExprKind::While(cond, body, opt_label),
@@ -2502,9 +2564,10 @@ impl<'a> Parser<'a> {
}
/// Parses `loop { ... }` (`loop` token already eaten).
- fn parse_loop_expr(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
+ fn parse_expr_loop(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
let loop_span = self.prev_token.span;
let (attrs, body) = self.parse_inner_attrs_and_block()?;
+ self.recover_loop_else("loop", lo)?;
Ok(self.mk_expr_with_attrs(
lo.to(self.prev_token.span),
ExprKind::Loop(body, opt_label, loop_span),
@@ -2520,7 +2583,7 @@ impl<'a> Parser<'a> {
}
/// Parses a `match ... { ... }` expression (`match` token already eaten).
- fn parse_match_expr(&mut self) -> PResult<'a, P<Expr>> {
+ fn parse_expr_match(&mut self) -> PResult<'a, P<Expr>> {
let match_span = self.prev_token.span;
let lo = self.prev_token.span;
let scrutinee = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
@@ -2542,7 +2605,7 @@ impl<'a> Parser<'a> {
}
let attrs = self.parse_inner_attributes()?;
- let mut arms: Vec<Arm> = Vec::new();
+ let mut arms = ThinVec::new();
while self.token != token::CloseDelim(Delimiter::Brace) {
match self.parse_arm() {
Ok(arm) => arms.push(arm),
@@ -2584,17 +2647,17 @@ impl<'a> Parser<'a> {
let err = |this: &Parser<'_>, stmts: Vec<ast::Stmt>| {
let span = stmts[0].span.to(stmts[stmts.len() - 1].span);
- this.sess.emit_err(MatchArmBodyWithoutBraces {
+ this.sess.emit_err(errors::MatchArmBodyWithoutBraces {
statements: span,
arrow: arrow_span,
num_statements: stmts.len(),
sub: if stmts.len() > 1 {
- MatchArmBodyWithoutBracesSugg::AddBraces {
+ errors::MatchArmBodyWithoutBracesSugg::AddBraces {
left: span.shrink_to_lo(),
right: span.shrink_to_hi(),
}
} else {
- MatchArmBodyWithoutBracesSugg::UseComma { semicolon: semi_sp }
+ errors::MatchArmBodyWithoutBracesSugg::UseComma { semicolon: semi_sp }
},
});
this.mk_expr_err(span)
@@ -2700,6 +2763,14 @@ impl<'a> Parser<'a> {
);
err.emit();
this.bump();
+ } else if matches!(
+ (&this.prev_token.kind, &this.token.kind),
+ (token::DotDotEq, token::Gt)
+ ) {
+ // `error_inclusive_range_match_arrow` handles cases like `0..=> {}`,
+ // so we supress the error here
+ err.delay_as_bug();
+ this.bump();
} else {
return Err(err);
}
@@ -2777,7 +2848,7 @@ impl<'a> Parser<'a> {
.is_ok();
if pattern_follows && snapshot.check(&TokenKind::FatArrow) {
err.cancel();
- this.sess.emit_err(MissingCommaAfterMatchArm {
+ this.sess.emit_err(errors::MissingCommaAfterMatchArm {
span: hi.shrink_to_hi(),
});
return Ok(true);
@@ -2809,7 +2880,7 @@ impl<'a> Parser<'a> {
fn parse_try_block(&mut self, span_lo: Span) -> PResult<'a, P<Expr>> {
let (attrs, body) = self.parse_inner_attrs_and_block()?;
if self.eat_keyword(kw::Catch) {
- Err(CatchAfterTry { span: self.prev_token.span }
+ Err(errors::CatchAfterTry { span: self.prev_token.span }
.into_diagnostic(&self.sess.span_diagnostic))
} else {
let span = span_lo.to(body.span);
@@ -2882,12 +2953,12 @@ impl<'a> Parser<'a> {
if let Err(err) = self.expect(&token::OpenDelim(Delimiter::Brace)) {
return Some(Err(err));
}
- let expr = self.parse_struct_expr(qself.clone(), path.clone(), true);
+ let expr = self.parse_expr_struct(qself.clone(), path.clone(), true);
if let (Ok(expr), false) = (&expr, struct_allowed) {
// This is a struct literal, but we don't can't accept them here.
- self.sess.emit_err(StructLiteralNotAllowedHere {
+ self.sess.emit_err(errors::StructLiteralNotAllowedHere {
span: expr.span,
- sub: StructLiteralNotAllowedHereSugg {
+ sub: errors::StructLiteralNotAllowedHereSugg {
left: path.span.shrink_to_lo(),
right: expr.span.shrink_to_hi(),
},
@@ -2903,15 +2974,15 @@ impl<'a> Parser<'a> {
pth: ast::Path,
recover: bool,
close_delim: Delimiter,
- ) -> PResult<'a, (Vec<ExprField>, ast::StructRest, bool)> {
- let mut fields = Vec::new();
+ ) -> PResult<'a, (ThinVec<ExprField>, ast::StructRest, bool)> {
+ let mut fields = ThinVec::new();
let mut base = ast::StructRest::None;
let mut recover_async = false;
let mut async_block_err = |e: &mut Diagnostic, span: Span| {
recover_async = true;
- e.span_label(span, "`async` blocks are only allowed in Rust 2018 or later");
- e.help_use_latest_edition();
+ errors::AsyncBlockIn2015 { span }.add_to_diagnostic(e);
+ errors::HelpUseLatestEdition::new().add_to_diagnostic(e);
};
while self.token != token::CloseDelim(close_delim) {
@@ -3011,7 +3082,7 @@ impl<'a> Parser<'a> {
}
/// Precondition: already parsed the '{'.
- pub(super) fn parse_struct_expr(
+ pub(super) fn parse_expr_struct(
&mut self,
qself: Option<P<ast::QSelf>>,
pth: ast::Path,
@@ -3055,7 +3126,7 @@ impl<'a> Parser<'a> {
if self.token != token::Comma {
return;
}
- self.sess.emit_err(CommaAfterBaseStruct {
+ self.sess.emit_err(errors::CommaAfterBaseStruct {
span: span.to(self.prev_token.span),
comma: self.token.span,
});
@@ -3068,7 +3139,7 @@ impl<'a> Parser<'a> {
{
// recover from typo of `...`, suggest `..`
let span = self.prev_token.span;
- self.sess.emit_err(MissingDotDot { token_span: span, sugg_span: span });
+ self.sess.emit_err(errors::MissingDotDot { token_span: span, sugg_span: span });
return true;
}
false
@@ -3136,18 +3207,18 @@ impl<'a> Parser<'a> {
return;
}
- self.sess.emit_err(EqFieldInit {
+ self.sess.emit_err(errors::EqFieldInit {
span: self.token.span,
eq: field_name.span.shrink_to_hi().to(self.token.span),
});
}
fn err_dotdotdot_syntax(&self, span: Span) {
- self.sess.emit_err(DotDotDot { span });
+ self.sess.emit_err(errors::DotDotDot { span });
}
fn err_larrow_operator(&self, span: Span) {
- self.sess.emit_err(LeftArrowOperator { span });
+ self.sess.emit_err(errors::LeftArrowOperator { span });
}
fn mk_assign_op(&self, binop: BinOp, lhs: P<Expr>, rhs: P<Expr>) -> ExprKind {
@@ -3161,7 +3232,7 @@ impl<'a> Parser<'a> {
limits: RangeLimits,
) -> ExprKind {
if end.is_none() && limits == RangeLimits::Closed {
- self.inclusive_range_with_incorrect_end(self.prev_token.span);
+ self.inclusive_range_with_incorrect_end();
ExprKind::Err
} else {
ExprKind::Range(start, end, limits)
@@ -3180,7 +3251,7 @@ impl<'a> Parser<'a> {
ExprKind::Index(expr, idx)
}
- fn mk_call(&self, f: P<Expr>, args: Vec<P<Expr>>) -> ExprKind {
+ fn mk_call(&self, f: P<Expr>, args: ThinVec<P<Expr>>) -> ExprKind {
ExprKind::Call(f, args)
}
diff --git a/compiler/rustc_parse/src/parser/generics.rs b/compiler/rustc_parse/src/parser/generics.rs
index 8ba811715..8d0f168e0 100644
--- a/compiler/rustc_parse/src/parser/generics.rs
+++ b/compiler/rustc_parse/src/parser/generics.rs
@@ -1,4 +1,8 @@
-use crate::errors::{WhereClauseBeforeTupleStructBody, WhereClauseBeforeTupleStructBodySugg};
+use crate::errors::{
+ MultipleWhereClauses, UnexpectedDefaultValueForLifetimeInGenericParameters,
+ UnexpectedSelfInGenericParameters, WhereClauseBeforeTupleStructBody,
+ WhereClauseBeforeTupleStructBodySugg,
+};
use super::{ForceCollect, Parser, TrailingToken};
@@ -10,10 +14,11 @@ use rustc_ast::{
use rustc_errors::{Applicability, PResult};
use rustc_span::symbol::{kw, Ident};
use rustc_span::Span;
+use thin_vec::ThinVec;
enum PredicateOrStructBody {
Predicate(ast::WherePredicate),
- StructBody(Vec<ast::FieldDef>),
+ StructBody(ThinVec<ast::FieldDef>),
}
impl<'a> Parser<'a> {
@@ -117,8 +122,8 @@ impl<'a> Parser<'a> {
/// Parses a (possibly empty) list of lifetime and type parameters, possibly including
/// a trailing comma and erroneous trailing attributes.
- pub(super) fn parse_generic_params(&mut self) -> PResult<'a, Vec<ast::GenericParam>> {
- let mut params = Vec::new();
+ pub(super) fn parse_generic_params(&mut self) -> PResult<'a, ThinVec<ast::GenericParam>> {
+ let mut params = ThinVec::new();
let mut done = false;
while !done {
let attrs = self.parse_outer_attributes()?;
@@ -127,12 +132,9 @@ impl<'a> Parser<'a> {
if this.eat_keyword_noexpect(kw::SelfUpper) {
// `Self` as a generic param is invalid. Here we emit the diagnostic and continue parsing
// as if `Self` never existed.
- this.struct_span_err(
- this.prev_token.span,
- "unexpected keyword `Self` in generic parameters",
- )
- .note("you cannot use `Self` as a generic parameter because it is reserved for associated items")
- .emit();
+ this.sess.emit_err(UnexpectedSelfInGenericParameters {
+ span: this.prev_token.span,
+ });
this.eat(&token::Comma);
}
@@ -145,6 +147,20 @@ impl<'a> Parser<'a> {
} else {
(None, Vec::new())
};
+
+ if this.check_noexpect(&token::Eq)
+ && this.look_ahead(1, |t| t.is_lifetime())
+ {
+ let lo = this.token.span;
+ // Parse `= 'lifetime`.
+ this.bump(); // `=`
+ this.bump(); // `'lifetime`
+ let span = lo.to(this.prev_token.span);
+ this.sess.emit_err(
+ UnexpectedDefaultValueForLifetimeInGenericParameters { span },
+ );
+ }
+
Some(ast::GenericParam {
ident: lifetime.ident,
id: lifetime.id,
@@ -236,13 +252,13 @@ impl<'a> Parser<'a> {
self.expect_gt()?;
(params, span_lo.to(self.prev_token.span))
} else {
- (vec![], self.prev_token.span.shrink_to_hi())
+ (ThinVec::new(), self.prev_token.span.shrink_to_hi())
};
Ok(ast::Generics {
params,
where_clause: WhereClause {
has_where_token: false,
- predicates: Vec::new(),
+ predicates: ThinVec::new(),
span: self.prev_token.span.shrink_to_hi(),
},
span,
@@ -262,17 +278,17 @@ impl<'a> Parser<'a> {
&mut self,
struct_name: Ident,
body_insertion_point: Span,
- ) -> PResult<'a, (WhereClause, Option<Vec<ast::FieldDef>>)> {
+ ) -> PResult<'a, (WhereClause, Option<ThinVec<ast::FieldDef>>)> {
self.parse_where_clause_common(Some((struct_name, body_insertion_point)))
}
fn parse_where_clause_common(
&mut self,
struct_: Option<(Ident, Span)>,
- ) -> PResult<'a, (WhereClause, Option<Vec<ast::FieldDef>>)> {
+ ) -> PResult<'a, (WhereClause, Option<ThinVec<ast::FieldDef>>)> {
let mut where_clause = WhereClause {
has_where_token: false,
- predicates: Vec::new(),
+ predicates: ThinVec::new(),
span: self.prev_token.span.shrink_to_hi(),
};
let mut tuple_struct_body = None;
@@ -329,16 +345,11 @@ impl<'a> Parser<'a> {
let ate_comma = self.eat(&token::Comma);
if self.eat_keyword_noexpect(kw::Where) {
- let msg = "cannot define duplicate `where` clauses on an item";
- let mut err = self.struct_span_err(self.token.span, msg);
- err.span_label(pred_lo, "previous `where` clause starts here");
- err.span_suggestion_verbose(
- prev_token.shrink_to_hi().to(self.prev_token.span),
- "consider joining the two `where` clauses into one",
- ",",
- Applicability::MaybeIncorrect,
- );
- err.emit();
+ self.sess.emit_err(MultipleWhereClauses {
+ span: self.token.span,
+ previous: pred_lo,
+ between: prev_token.shrink_to_hi().to(self.prev_token.span),
+ });
} else if !ate_comma {
break;
}
diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs
index 53680a82b..9d9ae154a 100644
--- a/compiler/rustc_parse/src/parser/item.rs
+++ b/compiler/rustc_parse/src/parser/item.rs
@@ -1,9 +1,8 @@
-use crate::errors::{DocCommentDoesNotDocumentAnything, UseEmptyBlockNotSemi};
+use crate::errors;
use super::diagnostics::{dummy_arg, ConsumeClosingDelim};
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
use super::{AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, TrailingToken};
-use crate::errors::FnTypoWithImpl;
use rustc_ast::ast::*;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, TokenKind};
@@ -16,14 +15,18 @@ use rustc_ast::{EnumDef, FieldDef, Generics, TraitRef, Ty, TyKind, Variant, Vari
use rustc_ast::{FnHeader, ForeignItem, Path, PathSegment, Visibility, VisibilityKind};
use rustc_ast::{MacCall, MacDelimiter};
use rustc_ast_pretty::pprust;
-use rustc_errors::{struct_span_err, Applicability, IntoDiagnostic, PResult, StashKey};
+use rustc_errors::{
+ struct_span_err, Applicability, DiagnosticBuilder, ErrorGuaranteed, IntoDiagnostic, PResult,
+ StashKey,
+};
+use rustc_span::edit_distance::edit_distance;
use rustc_span::edition::Edition;
-use rustc_span::lev_distance::lev_distance;
use rustc_span::source_map::{self, Span};
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::DUMMY_SP;
+use std::fmt::Write;
use std::mem;
-use thin_vec::ThinVec;
+use thin_vec::{thin_vec, ThinVec};
impl<'a> Parser<'a> {
/// Parses a source module as a crate. This is the main entry point for the parser.
@@ -53,12 +56,12 @@ impl<'a> Parser<'a> {
pub fn parse_mod(
&mut self,
term: &TokenKind,
- ) -> PResult<'a, (AttrVec, Vec<P<Item>>, ModSpans)> {
+ ) -> PResult<'a, (AttrVec, ThinVec<P<Item>>, ModSpans)> {
let lo = self.token.span;
let attrs = self.parse_inner_attributes()?;
let post_attr_lo = self.token.span;
- let mut items = vec![];
+ let mut items = ThinVec::new();
while let Some(item) = self.parse_item(ForceCollect::No)? {
items.push(item);
self.maybe_consume_incorrect_semicolon(&items);
@@ -163,35 +166,18 @@ impl<'a> Parser<'a> {
}
// At this point, we have failed to parse an item.
- self.error_on_unmatched_vis(&vis);
- self.error_on_unmatched_defaultness(def);
- if !attrs_allowed {
- self.recover_attrs_no_item(&attrs)?;
+ if !matches!(vis.kind, VisibilityKind::Inherited) {
+ self.sess.emit_err(errors::VisibilityNotFollowedByItem { span: vis.span, vis });
}
- Ok(None)
- }
- /// Error in-case a non-inherited visibility was parsed but no item followed.
- fn error_on_unmatched_vis(&self, vis: &Visibility) {
- if let VisibilityKind::Inherited = vis.kind {
- return;
+ if let Defaultness::Default(span) = def {
+ self.sess.emit_err(errors::DefaultNotFollowedByItem { span });
}
- let vs = pprust::vis_to_string(&vis);
- let vs = vs.trim_end();
- self.struct_span_err(vis.span, &format!("visibility `{vs}` is not followed by an item"))
- .span_label(vis.span, "the visibility")
- .help(&format!("you likely meant to define an item, e.g., `{vs} fn foo() {{}}`"))
- .emit();
- }
- /// Error in-case a `default` was parsed but no item followed.
- fn error_on_unmatched_defaultness(&self, def: Defaultness) {
- if let Defaultness::Default(sp) = def {
- self.struct_span_err(sp, "`default` is not followed by an item")
- .span_label(sp, "the `default` qualifier")
- .note("only `fn`, `const`, `type`, or `impl` items may be prefixed by `default`")
- .emit();
+ if !attrs_allowed {
+ self.recover_attrs_no_item(&attrs)?;
}
+ Ok(None)
}
/// Error in-case `default` was parsed in an in-appropriate context.
@@ -384,86 +370,74 @@ impl<'a> Parser<'a> {
let sp = self.prev_token.span.between(self.token.span);
let full_sp = self.prev_token.span.to(self.token.span);
let ident_sp = self.token.span;
- if self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace)) {
+
+ let ident = if self.look_ahead(1, |t| {
+ [
+ token::Lt,
+ token::OpenDelim(Delimiter::Brace),
+ token::OpenDelim(Delimiter::Parenthesis),
+ ]
+ .contains(&t.kind)
+ }) {
+ self.parse_ident().unwrap()
+ } else {
+ return Ok(());
+ };
+
+ let mut found_generics = false;
+ if self.check(&token::Lt) {
+ found_generics = true;
+ self.eat_to_tokens(&[&token::Gt]);
+ self.bump(); // `>`
+ }
+
+ let err = if self.check(&token::OpenDelim(Delimiter::Brace)) {
// possible public struct definition where `struct` was forgotten
- let ident = self.parse_ident().unwrap();
- let msg = format!("add `struct` here to parse `{ident}` as a public struct");
- let mut err = self.struct_span_err(sp, "missing `struct` for struct definition");
- err.span_suggestion_short(
- sp,
- &msg,
- " struct ",
- Applicability::MaybeIncorrect, // speculative
- );
- Err(err)
- } else if self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Parenthesis)) {
- let ident = self.parse_ident().unwrap();
+ Some(errors::MissingKeywordForItemDefinition::Struct { span: sp, ident })
+ } else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
+ // possible public function or tuple struct definition where `fn`/`struct` was
+ // forgotten
self.bump(); // `(`
- let kw_name = self.recover_first_param();
+ let is_method = self.recover_self_param();
+
self.consume_block(Delimiter::Parenthesis, ConsumeClosingDelim::Yes);
- let (kw, kw_name, ambiguous) = if self.check(&token::RArrow) {
- self.eat_to_tokens(&[&token::OpenDelim(Delimiter::Brace)]);
- self.bump(); // `{`
- ("fn", kw_name, false)
- } else if self.check(&token::OpenDelim(Delimiter::Brace)) {
- self.bump(); // `{`
- ("fn", kw_name, false)
- } else if self.check(&token::Colon) {
- let kw = "struct";
- (kw, kw, false)
- } else {
- ("fn` or `struct", "function or struct", true)
- };
- let msg = format!("missing `{kw}` for {kw_name} definition");
- let mut err = self.struct_span_err(sp, &msg);
- if !ambiguous {
- self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes);
- let suggestion =
- format!("add `{kw}` here to parse `{ident}` as a public {kw_name}");
- err.span_suggestion_short(
- sp,
- &suggestion,
- format!(" {kw} "),
- Applicability::MachineApplicable,
- );
- } else if let Ok(snippet) = self.span_to_snippet(ident_sp) {
- err.span_suggestion(
- full_sp,
- "if you meant to call a macro, try",
- format!("{}!", snippet),
- // this is the `ambiguous` conditional branch
- Applicability::MaybeIncorrect,
- );
- } else {
- err.help(
- "if you meant to call a macro, remove the `pub` \
- and add a trailing `!` after the identifier",
- );
- }
- Err(err)
- } else if self.look_ahead(1, |t| *t == token::Lt) {
- let ident = self.parse_ident().unwrap();
- self.eat_to_tokens(&[&token::Gt]);
- self.bump(); // `>`
- let (kw, kw_name, ambiguous) = if self.eat(&token::OpenDelim(Delimiter::Parenthesis)) {
- ("fn", self.recover_first_param(), false)
- } else if self.check(&token::OpenDelim(Delimiter::Brace)) {
- ("struct", "struct", false)
- } else {
- ("fn` or `struct", "function or struct", true)
- };
- let msg = format!("missing `{kw}` for {kw_name} definition");
- let mut err = self.struct_span_err(sp, &msg);
- if !ambiguous {
- err.span_suggestion_short(
- sp,
- &format!("add `{kw}` here to parse `{ident}` as a public {kw_name}"),
- format!(" {} ", kw),
- Applicability::MachineApplicable,
- );
- }
- Err(err)
+ let err =
+ if self.check(&token::RArrow) || self.check(&token::OpenDelim(Delimiter::Brace)) {
+ self.eat_to_tokens(&[&token::OpenDelim(Delimiter::Brace)]);
+ self.bump(); // `{`
+ self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes);
+ if is_method {
+ errors::MissingKeywordForItemDefinition::Method { span: sp, ident }
+ } else {
+ errors::MissingKeywordForItemDefinition::Function { span: sp, ident }
+ }
+ } else if self.check(&token::Semi) {
+ errors::MissingKeywordForItemDefinition::Struct { span: sp, ident }
+ } else {
+ errors::MissingKeywordForItemDefinition::Ambiguous {
+ span: sp,
+ subdiag: if found_generics {
+ None
+ } else if let Ok(snippet) = self.span_to_snippet(ident_sp) {
+ Some(errors::AmbiguousMissingKwForItemSub::SuggestMacro {
+ span: full_sp,
+ snippet,
+ })
+ } else {
+ Some(errors::AmbiguousMissingKwForItemSub::HelpMacro)
+ },
+ }
+ };
+ Some(err)
+ } else if found_generics {
+ Some(errors::MissingKeywordForItemDefinition::Ambiguous { span: sp, subdiag: None })
+ } else {
+ None
+ };
+
+ if let Some(err) = err {
+ Err(err.into_diagnostic(&self.sess.span_diagnostic))
} else {
Ok(())
}
@@ -485,7 +459,8 @@ impl<'a> Parser<'a> {
// Maybe the user misspelled `macro_rules` (issue #91227)
if self.token.is_ident()
&& path.segments.len() == 1
- && lev_distance("macro_rules", &path.segments[0].ident.to_string(), 3).is_some()
+ && edit_distance("macro_rules", &path.segments[0].ident.to_string(), 2)
+ .is_some()
{
err.span_suggestion(
path.span,
@@ -512,16 +487,13 @@ impl<'a> Parser<'a> {
let mut err = self.struct_span_err(end.span, msg);
if end.is_doc_comment() {
err.span_label(end.span, "this doc comment doesn't document anything");
- }
- if end.meta_kind().is_some() {
- if self.token.kind == TokenKind::Semi {
- err.span_suggestion_verbose(
- self.token.span,
- "consider removing this semicolon",
- "",
- Applicability::MaybeIncorrect,
- );
- }
+ } else if self.token.kind == TokenKind::Semi {
+ err.span_suggestion_verbose(
+ self.token.span,
+ "consider removing this semicolon",
+ "",
+ Applicability::MaybeIncorrect,
+ );
}
if let [.., penultimate, _] = attrs {
err.span_label(start.span.to(penultimate.span), "other attributes here");
@@ -588,20 +560,11 @@ impl<'a> Parser<'a> {
let ty_first = if self.token.is_keyword(kw::For) && self.look_ahead(1, |t| t != &token::Lt)
{
let span = self.prev_token.span.between(self.token.span);
- self.struct_span_err(span, "missing trait in a trait impl")
- .span_suggestion(
- span,
- "add a trait here",
- " Trait ",
- Applicability::HasPlaceholders,
- )
- .span_suggestion(
- span.to(self.token.span),
- "for an inherent impl, drop this `for`",
- "",
- Applicability::MaybeIncorrect,
- )
- .emit();
+ self.sess.emit_err(errors::MissingTraitInTraitImpl {
+ span,
+ for_span: span.to(self.token.span),
+ });
+
P(Ty {
kind: TyKind::Path(None, err_path(span)),
span,
@@ -634,14 +597,7 @@ impl<'a> Parser<'a> {
Some(ty_second) => {
// impl Trait for Type
if !has_for {
- self.struct_span_err(missing_for_span, "missing `for` in a trait impl")
- .span_suggestion_short(
- missing_for_span,
- "add `for` here",
- " for ",
- Applicability::MachineApplicable,
- )
- .emit();
+ self.sess.emit_err(errors::MissingForInTraitImpl { span: missing_for_span });
}
let ty_first = ty_first.into_inner();
@@ -649,7 +605,9 @@ impl<'a> Parser<'a> {
// This notably includes paths passed through `ty` macro fragments (#46438).
TyKind::Path(None, path) => path,
_ => {
- self.struct_span_err(ty_first.span, "expected a trait, found type").emit();
+ self.sess.emit_err(errors::ExpectedTraitInTraitImplFoundType {
+ span: ty_first.span,
+ });
err_path(ty_first.span)
}
};
@@ -688,20 +646,20 @@ impl<'a> Parser<'a> {
&mut self,
attrs: &mut AttrVec,
mut parse_item: impl FnMut(&mut Parser<'a>) -> PResult<'a, Option<Option<T>>>,
- ) -> PResult<'a, Vec<T>> {
+ ) -> PResult<'a, ThinVec<T>> {
let open_brace_span = self.token.span;
// Recover `impl Ty;` instead of `impl Ty {}`
if self.token == TokenKind::Semi {
- self.sess.emit_err(UseEmptyBlockNotSemi { span: self.token.span });
+ self.sess.emit_err(errors::UseEmptyBlockNotSemi { span: self.token.span });
self.bump();
- return Ok(vec![]);
+ return Ok(ThinVec::new());
}
self.expect(&token::OpenDelim(Delimiter::Brace))?;
attrs.extend(self.parse_inner_attributes()?);
- let mut items = Vec::new();
+ let mut items = ThinVec::new();
while !self.eat(&token::CloseDelim(Delimiter::Brace)) {
if self.recover_doc_comment_before_brace() {
continue;
@@ -783,6 +741,7 @@ impl<'a> Parser<'a> {
fn recover_doc_comment_before_brace(&mut self) -> bool {
if let token::DocComment(..) = self.token.kind {
if self.look_ahead(1, |tok| tok == &token::CloseDelim(Delimiter::Brace)) {
+ // FIXME: merge with `DocCommentDoesNotDocumentAnything` (E0585)
struct_span_err!(
self.diagnostic(),
self.token.span,
@@ -849,7 +808,7 @@ impl<'a> Parser<'a> {
// It's a trait alias.
if had_colon {
let span = span_at_colon.to(span_before_eq);
- self.struct_span_err(span, "bounds are not allowed on trait aliases").emit();
+ self.sess.emit_err(errors::BoundsNotAllowedOnTraitAliases { span });
}
let bounds = self.parse_generic_bounds(None)?;
@@ -858,12 +817,10 @@ impl<'a> Parser<'a> {
let whole_span = lo.to(self.prev_token.span);
if is_auto == IsAuto::Yes {
- let msg = "trait aliases cannot be `auto`";
- self.struct_span_err(whole_span, msg).span_label(whole_span, msg).emit();
+ self.sess.emit_err(errors::TraitAliasCannotBeAuto { span: whole_span });
}
if let Unsafe::Yes(_) = unsafety {
- let msg = "trait aliases cannot be `unsafe`";
- self.struct_span_err(whole_span, msg).span_label(whole_span, msg).emit();
+ self.sess.emit_err(errors::TraitAliasCannotBeUnsafe { span: whole_span });
}
self.sess.gated_spans.gate(sym::trait_alias, whole_span);
@@ -909,8 +866,7 @@ impl<'a> Parser<'a> {
Ok(kind) => kind,
Err(kind) => match kind {
ItemKind::Static(a, _, b) => {
- self.struct_span_err(span, "associated `static` items are not allowed")
- .emit();
+ self.sess.emit_err(errors::AssociatedStaticItemNotAllowed { span });
AssocItemKind::Const(Defaultness::Final, a, b)
}
_ => return self.error_bad_item_kind(span, &kind, "`trait`s or `impl`s"),
@@ -1041,7 +997,7 @@ impl<'a> Parser<'a> {
/// ```text
/// USE_TREE_LIST = Ø | (USE_TREE `,`)* USE_TREE [`,`]
/// ```
- fn parse_use_tree_list(&mut self) -> PResult<'a, Vec<(UseTree, ast::NodeId)>> {
+ fn parse_use_tree_list(&mut self) -> PResult<'a, ThinVec<(UseTree, ast::NodeId)>> {
self.parse_delim_comma_seq(Delimiter::Brace, |p| {
p.recover_diff_marker();
Ok((p.parse_use_tree()?, DUMMY_NODE_ID))
@@ -1084,41 +1040,37 @@ impl<'a> Parser<'a> {
}
fn parse_crate_name_with_dashes(&mut self) -> PResult<'a, Ident> {
- let error_msg = "crate name using dashes are not valid in `extern crate` statements";
- let suggestion_msg = "if the original crate name uses dashes you need to use underscores \
- in the code";
- let mut ident = if self.token.is_keyword(kw::SelfLower) {
+ let ident = if self.token.is_keyword(kw::SelfLower) {
self.parse_path_segment_ident()
} else {
self.parse_ident()
}?;
- let mut idents = vec![];
- let mut replacement = vec![];
- let mut fixed_crate_name = false;
- // Accept `extern crate name-like-this` for better diagnostics.
+
let dash = token::BinOp(token::BinOpToken::Minus);
- if self.token == dash {
- // Do not include `-` as part of the expected tokens list.
- while self.eat(&dash) {
- fixed_crate_name = true;
- replacement.push((self.prev_token.span, "_".to_string()));
- idents.push(self.parse_ident()?);
- }
+ if self.token != dash {
+ return Ok(ident);
}
- if fixed_crate_name {
- let fixed_name_sp = ident.span.to(idents.last().unwrap().span);
- let mut fixed_name = ident.name.to_string();
- for part in idents {
- fixed_name.push_str(&format!("_{}", part.name));
- }
- ident = Ident::from_str_and_span(&fixed_name, fixed_name_sp);
- self.struct_span_err(fixed_name_sp, error_msg)
- .span_label(fixed_name_sp, "dash-separated idents are not valid")
- .multipart_suggestion(suggestion_msg, replacement, Applicability::MachineApplicable)
- .emit();
+ // Accept `extern crate name-like-this` for better diagnostics.
+ let mut dashes = vec![];
+ let mut idents = vec![];
+ while self.eat(&dash) {
+ dashes.push(self.prev_token.span);
+ idents.push(self.parse_ident()?);
}
- Ok(ident)
+
+ let fixed_name_sp = ident.span.to(idents.last().unwrap().span);
+ let mut fixed_name = ident.name.to_string();
+ for part in idents {
+ write!(fixed_name, "_{}", part.name).unwrap();
+ }
+
+ self.sess.emit_err(errors::ExternCrateNameWithDashes {
+ span: fixed_name_sp,
+ sugg: errors::ExternCrateNameWithDashesSugg { dashes },
+ });
+
+ Ok(Ident::from_str_and_span(&fixed_name, fixed_name_sp))
}
/// Parses `extern` for foreign ABIs modules.
@@ -1166,7 +1118,10 @@ impl<'a> Parser<'a> {
Ok(kind) => kind,
Err(kind) => match kind {
ItemKind::Const(_, a, b) => {
- self.error_on_foreign_const(span, ident);
+ self.sess.emit_err(errors::ExternItemCannotBeConst {
+ ident_span: ident.span,
+ const_span: span.with_hi(ident.span.lo()),
+ });
ForeignItemKind::Static(a, Mutability::Not, b)
}
_ => return self.error_bad_item_kind(span, &kind, "`extern` blocks"),
@@ -1178,6 +1133,7 @@ impl<'a> Parser<'a> {
}
fn error_bad_item_kind<T>(&self, span: Span, kind: &ItemKind, ctx: &str) -> Option<T> {
+ // FIXME(#100717): needs variant for each `ItemKind` (instead of using `ItemKind::descr()`)
let span = self.sess.source_map().guess_head_span(span);
let descr = kind.descr();
self.struct_span_err(span, &format!("{descr} is not supported in {ctx}"))
@@ -1186,18 +1142,6 @@ impl<'a> Parser<'a> {
None
}
- fn error_on_foreign_const(&self, span: Span, ident: Ident) {
- self.struct_span_err(ident.span, "extern items cannot be `const`")
- .span_suggestion(
- span.with_hi(ident.span.lo()),
- "try using a static value",
- "static ",
- Applicability::MachineApplicable,
- )
- .note("for more information, visit https://doc.rust-lang.org/std/keyword.extern.html")
- .emit();
- }
-
fn is_unsafe_foreign_mod(&self) -> bool {
self.token.is_keyword(kw::Unsafe)
&& self.is_keyword_ahead(1, &[kw::Extern])
@@ -1225,25 +1169,10 @@ impl<'a> Parser<'a> {
fn recover_const_mut(&mut self, const_span: Span) {
if self.eat_keyword(kw::Mut) {
let span = self.prev_token.span;
- self.struct_span_err(span, "const globals cannot be mutable")
- .span_label(span, "cannot be mutable")
- .span_suggestion(
- const_span,
- "you might want to declare a static instead",
- "static",
- Applicability::MaybeIncorrect,
- )
- .emit();
+ self.sess.emit_err(errors::ConstGlobalCannotBeMutable { ident_span: span, const_span });
} else if self.eat_keyword(kw::Let) {
let span = self.prev_token.span;
- self.struct_span_err(const_span.to(span), "`const` and `let` are mutually exclusive")
- .span_suggestion(
- const_span.to(span),
- "remove `let`",
- "const",
- Applicability::MaybeIncorrect,
- )
- .emit();
+ self.sess.emit_err(errors::ConstLetMutuallyExclusive { span: const_span.to(span) });
}
}
@@ -1328,13 +1257,9 @@ impl<'a> Parser<'a> {
};
let span = self.prev_token.span.shrink_to_hi();
- let mut err = self.struct_span_err(span, &format!("missing type for `{kind}` item"));
- err.span_suggestion(
- span,
- "provide a type for the item",
- format!("{colon} <type>"),
- Applicability::HasPlaceholders,
- );
+ let err: DiagnosticBuilder<'_, ErrorGuaranteed> =
+ errors::MissingConstType { span, colon, kind }
+ .into_diagnostic(&self.sess.span_diagnostic);
err.stash(span, StashKey::ItemNoType);
// The user intended that the type be inferred,
@@ -1346,18 +1271,12 @@ impl<'a> Parser<'a> {
fn parse_item_enum(&mut self) -> PResult<'a, ItemInfo> {
if self.token.is_keyword(kw::Struct) {
let span = self.prev_token.span.to(self.token.span);
- let mut err = self.struct_span_err(span, "`enum` and `struct` are mutually exclusive");
- err.span_suggestion(
- span,
- "replace `enum struct` with",
- "enum",
- Applicability::MachineApplicable,
- );
+ let err = errors::EnumStructMutuallyExclusive { span };
if self.look_ahead(1, |t| t.is_ident()) {
self.bump();
- err.emit();
+ self.sess.emit_err(err);
} else {
- return Err(err);
+ return Err(err.into_diagnostic(&self.sess.span_diagnostic));
}
}
@@ -1367,9 +1286,9 @@ impl<'a> Parser<'a> {
// Possibly recover `enum Foo;` instead of `enum Foo {}`
let (variants, _) = if self.token == TokenKind::Semi {
- self.sess.emit_err(UseEmptyBlockNotSemi { span: self.token.span });
+ self.sess.emit_err(errors::UseEmptyBlockNotSemi { span: self.token.span });
self.bump();
- (vec![], false)
+ (thin_vec![], false)
} else {
self.parse_delim_comma_seq(Delimiter::Brace, |p| p.parse_enum_variant()).map_err(
|mut e| {
@@ -1412,7 +1331,7 @@ impl<'a> Parser<'a> {
};
let disr_expr =
- if this.eat(&token::Eq) { Some(this.parse_anon_const_expr()?) } else { None };
+ if this.eat(&token::Eq) { Some(this.parse_expr_anon_const()?) } else { None };
let vr = ast::Variant {
ident,
@@ -1493,13 +1412,9 @@ impl<'a> Parser<'a> {
self.expect_semi()?;
body
} else {
- let token_str = super::token_descr(&self.token);
- let msg = &format!(
- "expected `where`, `{{`, `(`, or `;` after struct name, found {token_str}"
- );
- let mut err = self.struct_span_err(self.token.span, msg);
- err.span_label(self.token.span, "expected `where`, `{`, `(`, or `;` after struct name");
- return Err(err);
+ let err =
+ errors::UnexpectedTokenAfterStructName::new(self.token.span, self.token.clone());
+ return Err(err.into_diagnostic(&self.sess.span_diagnostic));
};
Ok((class_name, ItemKind::Struct(vdata, generics)))
@@ -1542,8 +1457,8 @@ impl<'a> Parser<'a> {
adt_ty: &str,
ident_span: Span,
parsed_where: bool,
- ) -> PResult<'a, (Vec<FieldDef>, /* recovered */ bool)> {
- let mut fields = Vec::new();
+ ) -> PResult<'a, (ThinVec<FieldDef>, /* recovered */ bool)> {
+ let mut fields = ThinVec::new();
let mut recovered = false;
if self.eat(&token::OpenDelim(Delimiter::Brace)) {
while self.token != token::CloseDelim(Delimiter::Brace) {
@@ -1583,7 +1498,7 @@ impl<'a> Parser<'a> {
Ok((fields, recovered))
}
- pub(super) fn parse_tuple_struct_body(&mut self) -> PResult<'a, Vec<FieldDef>> {
+ pub(super) fn parse_tuple_struct_body(&mut self) -> PResult<'a, ThinVec<FieldDef>> {
// This is the case where we find `struct Foo<T>(T) where T: Copy;`
// Unit like structs are handled in parse_item_struct function
self.parse_paren_comma_seq(|p| {
@@ -1676,7 +1591,7 @@ impl<'a> Parser<'a> {
token::CloseDelim(Delimiter::Brace) => {}
token::DocComment(..) => {
let previous_span = self.prev_token.span;
- let mut err = DocCommentDoesNotDocumentAnything {
+ let mut err = errors::DocCommentDoesNotDocumentAnything {
span: self.token.span,
missing_comma: None,
};
@@ -1807,7 +1722,7 @@ impl<'a> Parser<'a> {
}
if self.token.kind == token::Eq {
self.bump();
- let const_expr = self.parse_anon_const_expr()?;
+ let const_expr = self.parse_expr_anon_const()?;
let sp = ty.span.shrink_to_hi().to(const_expr.value.span);
self.struct_span_err(sp, "default values on `struct` fields aren't supported")
.span_suggestion(
@@ -2186,7 +2101,7 @@ impl<'a> Parser<'a> {
// If we see `for Ty ...` then user probably meant `impl` item.
if self.token.is_keyword(kw::For) {
old_err.cancel();
- return Err(self.sess.create_err(FnTypoWithImpl { fn_span }));
+ return Err(self.sess.create_err(errors::FnTypoWithImpl { fn_span }));
} else {
return Err(old_err);
}
@@ -2330,7 +2245,12 @@ impl<'a> Parser<'a> {
let ext = self.parse_extern(case);
if let Async::Yes { span, .. } = asyncness {
- self.ban_async_in_2015(span);
+ if span.is_rust_2015() {
+ self.sess.emit_err(errors::AsyncFnIn2015 {
+ span,
+ help: errors::HelpUseLatestEdition::new(),
+ });
+ }
}
if !self.eat_keyword_case(kw::Fn, case) {
@@ -2440,17 +2360,6 @@ impl<'a> Parser<'a> {
Ok(FnHeader { constness, unsafety, asyncness, ext })
}
- /// We are parsing `async fn`. If we are on Rust 2015, emit an error.
- fn ban_async_in_2015(&self, span: Span) {
- if span.rust_2015() {
- let diag = self.diagnostic();
- struct_span_err!(diag, span, E0670, "`async fn` is not permitted in Rust 2015")
- .span_label(span, "to use `async fn`, switch to Rust 2018 or later")
- .help_use_latest_edition()
- .emit();
- }
- }
-
/// Parses the parameter list and result type of a function declaration.
pub(super) fn parse_fn_decl(
&mut self,
@@ -2465,7 +2374,7 @@ impl<'a> Parser<'a> {
}
/// Parses the parameter list of a function, including the `(` and `)` delimiters.
- pub(super) fn parse_fn_params(&mut self, req_name: ReqName) -> PResult<'a, Vec<Param>> {
+ pub(super) fn parse_fn_params(&mut self, req_name: ReqName) -> PResult<'a, ThinVec<Param>> {
let mut first_param = true;
// Parse the arguments, starting out with `self` being allowed...
let (mut params, _) = self.parse_paren_comma_seq(|p| {
@@ -2593,9 +2502,7 @@ impl<'a> Parser<'a> {
};
// Recover for the grammar `*self`, `*const self`, and `*mut self`.
let recover_self_ptr = |this: &mut Self| {
- let msg = "cannot pass `self` by raw pointer";
- let span = this.token.span;
- this.struct_span_err(span, msg).span_label(span, msg).emit();
+ self.sess.emit_err(errors::SelfArgumentPointer { span: this.token.span });
Ok((SelfKind::Value(Mutability::Not), expect_self_ident(this), this.prev_token.span))
};
@@ -2676,14 +2583,14 @@ impl<'a> Parser<'a> {
&& self.look_ahead(offset + 1, |t| t == &token::Colon)
}
- fn recover_first_param(&mut self) -> &'static str {
+ fn recover_self_param(&mut self) -> bool {
match self
.parse_outer_attributes()
.and_then(|_| self.parse_self_param())
.map_err(|e| e.cancel())
{
- Ok(Some(_)) => "method",
- _ => "function",
+ Ok(Some(_)) => true,
+ _ => false,
}
}
}
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index ffb23b50a..da82e4724 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -10,7 +10,7 @@ mod path;
mod stmt;
mod ty;
-use crate::lexer::UnmatchedBrace;
+use crate::lexer::UnmatchedDelim;
pub use attr_wrapper::AttrWrapper;
pub use diagnostics::AttemptLocalParseRecovery;
pub(crate) use item::FnParseMode;
@@ -19,9 +19,8 @@ pub use path::PathStyle;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, Nonterminal, Token, TokenKind};
-use rustc_ast::tokenstream::AttributesData;
-use rustc_ast::tokenstream::{self, DelimSpan, Spacing};
-use rustc_ast::tokenstream::{TokenStream, TokenTree};
+use rustc_ast::tokenstream::{AttributesData, DelimSpan, Spacing};
+use rustc_ast::tokenstream::{TokenStream, TokenTree, TokenTreeCursor};
use rustc_ast::util::case::Case;
use rustc_ast::AttrId;
use rustc_ast::DUMMY_NODE_ID;
@@ -37,9 +36,10 @@ use rustc_errors::{
use rustc_session::parse::ParseSess;
use rustc_span::source_map::{Span, DUMMY_SP};
use rustc_span::symbol::{kw, sym, Ident, Symbol};
-
use std::ops::Range;
use std::{cmp, mem, slice};
+use thin_vec::ThinVec;
+use tracing::debug;
use crate::errors::{
DocCommentDoesNotDocumentAnything, IncorrectVisibilityRestriction, MismatchedClosingDelimiter,
@@ -149,7 +149,7 @@ pub struct Parser<'a> {
/// A list of all unclosed delimiters found by the lexer. If an entry is used for error recovery
/// it gets removed from here. Every entry left at the end gets emitted as an independent
/// error.
- pub(super) unclosed_delims: Vec<UnmatchedBrace>,
+ pub(super) unclosed_delims: Vec<UnmatchedDelim>,
last_unexpected_token_span: Option<Span>,
/// Span pointing at the `:` for the last type ascription the parser has seen, and whether it
/// looked like it could have been a mistyped path or literal `Option:Some(42)`).
@@ -168,7 +168,7 @@ pub struct Parser<'a> {
// This type is used a lot, e.g. it's cloned when matching many declarative macro rules with nonterminals. Make sure
// it doesn't unintentionally get bigger.
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
-rustc_data_structures::static_assert_size!(Parser<'_>, 336);
+rustc_data_structures::static_assert_size!(Parser<'_>, 312);
/// Stores span information about a closure.
#[derive(Clone)]
@@ -221,18 +221,27 @@ impl<'a> Drop for Parser<'a> {
}
}
+/// Iterator over a `TokenStream` that produces `Token`s. It's a bit odd that
+/// we (a) lex tokens into a nice tree structure (`TokenStream`), and then (b)
+/// use this type to emit them as a linear sequence. But a linear sequence is
+/// what the parser expects, for the most part.
#[derive(Clone)]
struct TokenCursor {
- // The current (innermost) frame. `frame` and `stack` could be combined,
- // but it's faster to have them separately to access `frame` directly
- // rather than via something like `stack.last().unwrap()` or
- // `stack[stack.len() - 1]`.
- frame: TokenCursorFrame,
- // Additional frames that enclose `frame`.
- stack: Vec<TokenCursorFrame>,
+ // Cursor for the current (innermost) token stream. The delimiters for this
+ // token stream are found in `self.stack.last()`; when that is `None` then
+ // we are in the outermost token stream which never has delimiters.
+ tree_cursor: TokenTreeCursor,
+
+ // Token streams surrounding the current one. The delimiters for stack[n]'s
+ // tokens are in `stack[n-1]`. `stack[0]` (when present) has no delimiters
+ // because it's the outermost token stream which never has delimiters.
+ stack: Vec<(TokenTreeCursor, Delimiter, DelimSpan)>,
+
desugar_doc_comments: bool,
+
// Counts the number of calls to `{,inlined_}next`.
num_next_calls: usize,
+
// During parsing, we may sometimes need to 'unglue' a
// glued token into two component tokens
// (e.g. '>>' into '>' and '>), so that the parser
@@ -257,18 +266,6 @@ struct TokenCursor {
break_last_token: bool,
}
-#[derive(Clone)]
-struct TokenCursorFrame {
- delim_sp: Option<(Delimiter, DelimSpan)>,
- tree_cursor: tokenstream::Cursor,
-}
-
-impl TokenCursorFrame {
- fn new(delim_sp: Option<(Delimiter, DelimSpan)>, tts: TokenStream) -> Self {
- TokenCursorFrame { delim_sp, tree_cursor: tts.into_trees() }
- }
-}
-
impl TokenCursor {
fn next(&mut self, desugar_doc_comments: bool) -> (Token, Spacing) {
self.inlined_next(desugar_doc_comments)
@@ -281,38 +278,47 @@ impl TokenCursor {
// FIXME: we currently don't return `Delimiter` open/close delims. To fix #67062 we will
// need to, whereupon the `delim != Delimiter::Invisible` conditions below can be
// removed.
- if let Some(tree) = self.frame.tree_cursor.next_ref() {
+ if let Some(tree) = self.tree_cursor.next_ref() {
match tree {
&TokenTree::Token(ref token, spacing) => match (desugar_doc_comments, token) {
(true, &Token { kind: token::DocComment(_, attr_style, data), span }) => {
- return self.desugar(attr_style, data, span);
+ let desugared = self.desugar(attr_style, data, span);
+ self.tree_cursor.replace_prev_and_rewind(desugared);
+ // Continue to get the first token of the desugared doc comment.
+ }
+ _ => {
+ debug_assert!(!matches!(
+ token.kind,
+ token::OpenDelim(_) | token::CloseDelim(_)
+ ));
+ return (token.clone(), spacing);
}
- _ => return (token.clone(), spacing),
},
&TokenTree::Delimited(sp, delim, ref tts) => {
- // Set `open_delim` to true here because we deal with it immediately.
- let frame = TokenCursorFrame::new(Some((delim, sp)), tts.clone());
- self.stack.push(mem::replace(&mut self.frame, frame));
+ let trees = tts.clone().into_trees();
+ self.stack.push((mem::replace(&mut self.tree_cursor, trees), delim, sp));
if delim != Delimiter::Invisible {
return (Token::new(token::OpenDelim(delim), sp.open), Spacing::Alone);
}
// No open delimiter to return; continue on to the next iteration.
}
};
- } else if let Some(frame) = self.stack.pop() {
- if let Some((delim, span)) = self.frame.delim_sp && delim != Delimiter::Invisible {
- self.frame = frame;
+ } else if let Some((tree_cursor, delim, span)) = self.stack.pop() {
+ // We have exhausted this token stream. Move back to its parent token stream.
+ self.tree_cursor = tree_cursor;
+ if delim != Delimiter::Invisible {
return (Token::new(token::CloseDelim(delim), span.close), Spacing::Alone);
}
- self.frame = frame;
// No close delimiter to return; continue on to the next iteration.
} else {
+ // We have exhausted the outermost token stream.
return (Token::new(token::Eof, DUMMY_SP), Spacing::Alone);
}
}
}
- fn desugar(&mut self, attr_style: AttrStyle, data: Symbol, span: Span) -> (Token, Spacing) {
+ // Desugar a doc comment into something like `#[doc = r"foo"]`.
+ fn desugar(&mut self, attr_style: AttrStyle, data: Symbol, span: Span) -> Vec<TokenTree> {
// Searches for the occurrences of `"#*` and returns the minimum number of `#`s
// required to wrap the text. E.g.
// - `abc d` is wrapped as `r"abc d"` (num_of_hashes = 0)
@@ -329,7 +335,7 @@ impl TokenCursor {
num_of_hashes = cmp::max(num_of_hashes, count);
}
- // `/// foo` becomes `doc = r"foo".
+ // `/// foo` becomes `doc = r"foo"`.
let delim_span = DelimSpan::from_single(span);
let body = TokenTree::Delimited(
delim_span,
@@ -346,27 +352,15 @@ impl TokenCursor {
.collect::<TokenStream>(),
);
- self.stack.push(mem::replace(
- &mut self.frame,
- TokenCursorFrame::new(
- None,
- if attr_style == AttrStyle::Inner {
- [
- TokenTree::token_alone(token::Pound, span),
- TokenTree::token_alone(token::Not, span),
- body,
- ]
- .into_iter()
- .collect::<TokenStream>()
- } else {
- [TokenTree::token_alone(token::Pound, span), body]
- .into_iter()
- .collect::<TokenStream>()
- },
- ),
- ));
-
- self.next(/* desugar_doc_comments */ false)
+ if attr_style == AttrStyle::Inner {
+ vec![
+ TokenTree::token_alone(token::Pound, span),
+ TokenTree::token_alone(token::Not, span),
+ body,
+ ]
+ } else {
+ vec![TokenTree::token_alone(token::Pound, span), body]
+ }
}
}
@@ -475,7 +469,7 @@ impl<'a> Parser<'a> {
restrictions: Restrictions::empty(),
expected_tokens: Vec::new(),
token_cursor: TokenCursor {
- frame: TokenCursorFrame::new(None, tokens),
+ tree_cursor: tokens.into_trees(),
stack: Vec::new(),
num_next_calls: 0,
desugar_doc_comments,
@@ -739,9 +733,10 @@ impl<'a> Parser<'a> {
fn check_const_closure(&self) -> bool {
self.is_keyword_ahead(0, &[kw::Const])
&& self.look_ahead(1, |t| match &t.kind {
- token::Ident(kw::Move | kw::Static | kw::Async, _)
- | token::OrOr
- | token::BinOp(token::Or) => true,
+ // async closures do not work with const closures, so we do not parse that here.
+ token::Ident(kw::Move | kw::Static, _) | token::OrOr | token::BinOp(token::Or) => {
+ true
+ }
_ => false,
})
}
@@ -859,11 +854,11 @@ impl<'a> Parser<'a> {
sep: SeqSep,
expect: TokenExpectType,
mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
- ) -> PResult<'a, (Vec<T>, bool /* trailing */, bool /* recovered */)> {
+ ) -> PResult<'a, (ThinVec<T>, bool /* trailing */, bool /* recovered */)> {
let mut first = true;
let mut recovered = false;
let mut trailing = false;
- let mut v = vec![];
+ let mut v = ThinVec::new();
let unclosed_delims = !self.unclosed_delims.is_empty();
while !self.expect_any_with_type(kets, expect) {
@@ -987,7 +982,11 @@ impl<'a> Parser<'a> {
let initial_semicolon = self.token.span;
while self.eat(&TokenKind::Semi) {
- let _ = self.parse_stmt(ForceCollect::Yes)?;
+ let _ =
+ self.parse_stmt_without_recovery(false, ForceCollect::Yes).unwrap_or_else(|e| {
+ e.cancel();
+ None
+ });
}
expect_err.set_primary_message(
@@ -1043,7 +1042,7 @@ impl<'a> Parser<'a> {
ket: &TokenKind,
sep: SeqSep,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
- ) -> PResult<'a, (Vec<T>, bool, bool)> {
+ ) -> PResult<'a, (ThinVec<T>, bool, bool)> {
self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f)
}
@@ -1055,7 +1054,7 @@ impl<'a> Parser<'a> {
ket: &TokenKind,
sep: SeqSep,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
- ) -> PResult<'a, (Vec<T>, bool /* trailing */)> {
+ ) -> PResult<'a, (ThinVec<T>, bool /* trailing */)> {
let (val, trailing, recovered) = self.parse_seq_to_before_end(ket, sep, f)?;
if !recovered {
self.eat(ket);
@@ -1072,7 +1071,7 @@ impl<'a> Parser<'a> {
ket: &TokenKind,
sep: SeqSep,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
- ) -> PResult<'a, (Vec<T>, bool)> {
+ ) -> PResult<'a, (ThinVec<T>, bool)> {
self.expect(bra)?;
self.parse_seq_to_end(ket, sep, f)
}
@@ -1081,7 +1080,7 @@ impl<'a> Parser<'a> {
&mut self,
delim: Delimiter,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
- ) -> PResult<'a, (Vec<T>, bool)> {
+ ) -> PResult<'a, (ThinVec<T>, bool)> {
self.parse_unspanned_seq(
&token::OpenDelim(delim),
&token::CloseDelim(delim),
@@ -1093,7 +1092,7 @@ impl<'a> Parser<'a> {
fn parse_paren_comma_seq<T>(
&mut self,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
- ) -> PResult<'a, (Vec<T>, bool)> {
+ ) -> PResult<'a, (ThinVec<T>, bool)> {
self.parse_delim_comma_seq(Delimiter::Parenthesis, f)
}
@@ -1142,14 +1141,16 @@ impl<'a> Parser<'a> {
return looker(&self.token);
}
- let frame = &self.token_cursor.frame;
- if let Some((delim, span)) = frame.delim_sp && delim != Delimiter::Invisible {
+ let tree_cursor = &self.token_cursor.tree_cursor;
+ if let Some(&(_, delim, span)) = self.token_cursor.stack.last()
+ && delim != Delimiter::Invisible
+ {
let all_normal = (0..dist).all(|i| {
- let token = frame.tree_cursor.look_ahead(i);
+ let token = tree_cursor.look_ahead(i);
!matches!(token, Some(TokenTree::Delimited(_, Delimiter::Invisible, _)))
});
if all_normal {
- return match frame.tree_cursor.look_ahead(dist - 1) {
+ return match tree_cursor.look_ahead(dist - 1) {
Some(tree) => match tree {
TokenTree::Token(token, _) => looker(token),
TokenTree::Delimited(dspan, delim, _) => {
@@ -1203,8 +1204,18 @@ impl<'a> Parser<'a> {
/// Parses constness: `const` or nothing.
fn parse_constness(&mut self, case: Case) -> Const {
- // Avoid const blocks to be parsed as const items
- if self.look_ahead(1, |t| t != &token::OpenDelim(Delimiter::Brace))
+ self.parse_constness_(case, false)
+ }
+
+ /// Parses constness for closures
+ fn parse_closure_constness(&mut self, case: Case) -> Const {
+ self.parse_constness_(case, true)
+ }
+
+ fn parse_constness_(&mut self, case: Case, is_closure: bool) -> Const {
+ // Avoid const blocks and const closures to be parsed as const items
+ if (self.check_const_closure() == is_closure)
+ && self.look_ahead(1, |t| t != &token::OpenDelim(Delimiter::Brace))
&& self.eat_keyword_case(kw::Const, case)
{
Const::Yes(self.prev_token.uninterpolated_span())
@@ -1277,22 +1288,16 @@ impl<'a> Parser<'a> {
}
fn parse_delim_args_inner(&mut self) -> Option<DelimArgs> {
- if self.check(&token::OpenDelim(Delimiter::Parenthesis))
+ let delimited = self.check(&token::OpenDelim(Delimiter::Parenthesis))
|| self.check(&token::OpenDelim(Delimiter::Bracket))
- || self.check(&token::OpenDelim(Delimiter::Brace))
- {
- match self.parse_token_tree() {
- // We've confirmed above that there is a delimiter so unwrapping is OK.
- TokenTree::Delimited(dspan, delim, tokens) => Some(DelimArgs {
- dspan,
- delim: MacDelimiter::from_token(delim).unwrap(),
- tokens,
- }),
- _ => unreachable!(),
- }
- } else {
- None
- }
+ || self.check(&token::OpenDelim(Delimiter::Brace));
+
+ delimited.then(|| {
+ // We've confirmed above that there is a delimiter so unwrapping is OK.
+ let TokenTree::Delimited(dspan, delim, tokens) = self.parse_token_tree() else { unreachable!() };
+
+ DelimArgs { dspan, delim: MacDelimiter::from_token(delim).unwrap(), tokens }
+ })
}
fn parse_or_use_outer_attributes(
@@ -1310,10 +1315,10 @@ impl<'a> Parser<'a> {
pub(crate) fn parse_token_tree(&mut self) -> TokenTree {
match self.token.kind {
token::OpenDelim(..) => {
- // Grab the tokens from this frame.
- let frame = &self.token_cursor.frame;
- let stream = frame.tree_cursor.stream.clone();
- let (delim, span) = frame.delim_sp.unwrap();
+ // Grab the tokens within the delimiters.
+ let tree_cursor = &self.token_cursor.tree_cursor;
+ let stream = tree_cursor.stream.clone();
+ let (_, delim, span) = *self.token_cursor.stack.last().unwrap();
// Advance the token cursor through the entire delimited
// sequence. After getting the `OpenDelim` we are *within* the
@@ -1516,11 +1521,11 @@ impl<'a> Parser<'a> {
}
pub(crate) fn make_unclosed_delims_error(
- unmatched: UnmatchedBrace,
+ unmatched: UnmatchedDelim,
sess: &ParseSess,
) -> Option<DiagnosticBuilder<'_, ErrorGuaranteed>> {
// `None` here means an `Eof` was found. We already emit those errors elsewhere, we add them to
- // `unmatched_braces` only for error recovery in the `Parser`.
+ // `unmatched_delims` only for error recovery in the `Parser`.
let found_delim = unmatched.found_delim?;
let mut spans = vec![unmatched.found_span];
if let Some(sp) = unmatched.unclosed_span {
@@ -1537,7 +1542,7 @@ pub(crate) fn make_unclosed_delims_error(
Some(err)
}
-pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedBrace>, sess: &ParseSess) {
+pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedDelim>, sess: &ParseSess) {
*sess.reached_eof.borrow_mut() |=
unclosed_delims.iter().any(|unmatched_delim| unmatched_delim.found_delim.is_none());
for unmatched in unclosed_delims.drain(..) {
diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs
index 239ed79ce..7a4d53ed8 100644
--- a/compiler/rustc_parse/src/parser/nonterminal.rs
+++ b/compiler/rustc_parse/src/parser/nonterminal.rs
@@ -2,9 +2,11 @@ use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, NonterminalKind, Token};
use rustc_ast::HasTokens;
use rustc_ast_pretty::pprust;
+use rustc_errors::IntoDiagnostic;
use rustc_errors::PResult;
use rustc_span::symbol::{kw, Ident};
+use crate::errors::UnexpectedNonterminal;
use crate::parser::pat::{CommaRecoveryMode, RecoverColon, RecoverComma};
use crate::parser::{FollowedByType, ForceCollect, NtOrTt, Parser, PathStyle};
@@ -113,7 +115,8 @@ impl<'a> Parser<'a> {
NonterminalKind::Item => match self.parse_item(ForceCollect::Yes)? {
Some(item) => token::NtItem(item),
None => {
- return Err(self.struct_span_err(self.token.span, "expected an item keyword"));
+ return Err(UnexpectedNonterminal::Item(self.token.span)
+ .into_diagnostic(&self.sess.span_diagnostic));
}
},
NonterminalKind::Block => {
@@ -124,7 +127,8 @@ impl<'a> Parser<'a> {
NonterminalKind::Stmt => match self.parse_stmt(ForceCollect::Yes)? {
Some(s) => token::NtStmt(P(s)),
None => {
- return Err(self.struct_span_err(self.token.span, "expected a statement"));
+ return Err(UnexpectedNonterminal::Statement(self.token.span)
+ .into_diagnostic(&self.sess.span_diagnostic));
}
},
NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr { .. } => {
@@ -160,9 +164,10 @@ impl<'a> Parser<'a> {
token::NtIdent(ident, is_raw)
}
NonterminalKind::Ident => {
- let token_str = pprust::token_to_string(&self.token);
- let msg = &format!("expected ident, found {}", &token_str);
- return Err(self.struct_span_err(self.token.span, msg));
+ return Err(UnexpectedNonterminal::Ident {
+ span: self.token.span,
+ token: self.token.clone(),
+ }.into_diagnostic(&self.sess.span_diagnostic));
}
NonterminalKind::Path => token::NtPath(
P(self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))?),
@@ -175,9 +180,10 @@ impl<'a> Parser<'a> {
if self.check_lifetime() {
token::NtLifetime(self.expect_lifetime().ident)
} else {
- let token_str = pprust::token_to_string(&self.token);
- let msg = &format!("expected a lifetime, found `{}`", &token_str);
- return Err(self.struct_span_err(self.token.span, msg));
+ return Err(UnexpectedNonterminal::Lifetime {
+ span: self.token.span,
+ token: self.token.clone(),
+ }.into_diagnostic(&self.sess.span_diagnostic));
}
}
};
diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs
index e73a17ced..8e920f1c4 100644
--- a/compiler/rustc_parse/src/parser/pat.rs
+++ b/compiler/rustc_parse/src/parser/pat.rs
@@ -1,5 +1,14 @@
use super::{ForceCollect, Parser, PathStyle, TrailingToken};
-use crate::errors::RemoveLet;
+use crate::errors::{
+ AmbiguousRangePattern, DotDotDotForRemainingFields, DotDotDotRangeToPatternNotAllowed,
+ DotDotDotRestPattern, EnumPatternInsteadOfIdentifier, ExpectedBindingLeftOfAt,
+ ExpectedCommaAfterPatternField, InclusiveRangeExtraEquals, InclusiveRangeMatchArrow,
+ InclusiveRangeNoEnd, InvalidMutInPattern, PatternOnWrongSideOfAt, RefMutOrderIncorrect,
+ RemoveLet, RepeatedMutInPattern, TopLevelOrPatternNotAllowed, TopLevelOrPatternNotAllowedSugg,
+ TrailingVertNotAllowed, UnexpectedLifetimeInPattern, UnexpectedVertVertBeforeFunctionParam,
+ UnexpectedVertVertInPattern,
+};
+use crate::fluent_generated as fluent;
use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
use rustc_ast::mut_visit::{noop_visit_pat, MutVisitor};
use rustc_ast::ptr::P;
@@ -9,15 +18,32 @@ use rustc_ast::{
PatField, PatKind, Path, QSelf, RangeEnd, RangeSyntax,
};
use rustc_ast_pretty::pprust;
-use rustc_errors::{struct_span_err, Applicability, DiagnosticBuilder, ErrorGuaranteed, PResult};
+use rustc_errors::{Applicability, DiagnosticBuilder, ErrorGuaranteed, IntoDiagnostic, PResult};
use rustc_session::errors::ExprParenthesesNeeded;
use rustc_span::source_map::{respan, Span, Spanned};
use rustc_span::symbol::{kw, sym, Ident};
+use thin_vec::{thin_vec, ThinVec};
-pub(super) type Expected = Option<&'static str>;
+#[derive(PartialEq, Copy, Clone)]
+pub enum Expected {
+ ParameterName,
+ ArgumentName,
+ Identifier,
+ BindingPattern,
+}
-/// `Expected` for function and lambda parameter patterns.
-pub(super) const PARAM_EXPECTED: Expected = Some("parameter name");
+impl Expected {
+ // FIXME(#100717): migrate users of this to proper localization
+ fn to_string_or_fallback(expected: Option<Expected>) -> &'static str {
+ match expected {
+ Some(Expected::ParameterName) => "parameter name",
+ Some(Expected::ArgumentName) => "argument name",
+ Some(Expected::Identifier) => "identifier",
+ Some(Expected::BindingPattern) => "binding pattern",
+ None => "pattern",
+ }
+ }
+}
const WHILE_PARSING_OR_MSG: &str = "while parsing this or-pattern starting here";
@@ -54,13 +80,19 @@ enum EatOrResult {
None,
}
+/// The syntax location of a given pattern. Used for diagnostics.
+pub(super) enum PatternLocation {
+ LetBinding,
+ FunctionParameter,
+}
+
impl<'a> Parser<'a> {
/// Parses a pattern.
///
/// Corresponds to `pat<no_top_alt>` in RFC 2535 and does not admit or-patterns
/// at the top level. Used when parsing the parameters of lambda expressions,
/// functions, function pointers, and `pat` macro fragments.
- pub fn parse_pat_no_top_alt(&mut self, expected: Expected) -> PResult<'a, P<Pat>> {
+ pub fn parse_pat_no_top_alt(&mut self, expected: Option<Expected>) -> PResult<'a, P<Pat>> {
self.parse_pat_with_range_pat(true, expected)
}
@@ -74,7 +106,7 @@ impl<'a> Parser<'a> {
/// simplify the grammar somewhat.
pub fn parse_pat_allow_top_alt(
&mut self,
- expected: Expected,
+ expected: Option<Expected>,
rc: RecoverComma,
ra: RecoverColon,
rt: CommaRecoveryMode,
@@ -86,7 +118,7 @@ impl<'a> Parser<'a> {
/// recovered).
fn parse_pat_allow_top_alt_inner(
&mut self,
- expected: Expected,
+ expected: Option<Expected>,
rc: RecoverComma,
ra: RecoverColon,
rt: CommaRecoveryMode,
@@ -123,7 +155,7 @@ impl<'a> Parser<'a> {
// If there was a leading vert, treat this as an or-pattern. This improves
// diagnostics.
let span = leading_vert_span.to(self.prev_token.span);
- return Ok((self.mk_pat(span, PatKind::Or(vec![first_pat])), trailing_vert));
+ return Ok((self.mk_pat(span, PatKind::Or(thin_vec![first_pat])), trailing_vert));
}
return Ok((first_pat, trailing_vert));
@@ -131,7 +163,7 @@ impl<'a> Parser<'a> {
// Parse the patterns `p_1 | ... | p_n` where `n > 0`.
let lo = leading_vert_span.unwrap_or(first_pat.span);
- let mut pats = vec![first_pat];
+ let mut pats = thin_vec![first_pat];
loop {
match self.eat_or_separator(Some(lo)) {
EatOrResult::AteOr => {}
@@ -165,9 +197,9 @@ impl<'a> Parser<'a> {
/// otherwise).
pub(super) fn parse_pat_before_ty(
&mut self,
- expected: Expected,
+ expected: Option<Expected>,
rc: RecoverComma,
- syntax_loc: &str,
+ syntax_loc: PatternLocation,
) -> PResult<'a, (P<Pat>, bool)> {
// We use `parse_pat_allow_top_alt` regardless of whether we actually want top-level
// or-patterns so that we can detect when a user tries to use it. This allows us to print a
@@ -181,27 +213,41 @@ impl<'a> Parser<'a> {
let colon = self.eat(&token::Colon);
if let PatKind::Or(pats) = &pat.kind {
- let msg = format!("top-level or-patterns are not allowed in {}", syntax_loc);
- let (help, fix) = if pats.len() == 1 {
- // If all we have is a leading vert, then print a special message. This is the case
- // if `parse_pat_allow_top_alt` returns an or-pattern with one variant.
- let msg = "remove the `|`";
- let fix = pprust::pat_to_string(&pat);
- (msg, fix)
- } else {
- let msg = "wrap the pattern in parentheses";
- let fix = format!("({})", pprust::pat_to_string(&pat));
- (msg, fix)
- };
+ let span = pat.span;
if trailing_vert {
// We already emitted an error and suggestion to remove the trailing vert. Don't
// emit again.
- self.sess.span_diagnostic.delay_span_bug(pat.span, &msg);
+
+ // FIXME(#100717): pass `TopLevelOrPatternNotAllowed::* { sub: None }` to
+ // `delay_span_bug()` instead of fluent message
+ self.sess.span_diagnostic.delay_span_bug(
+ span,
+ match syntax_loc {
+ PatternLocation::LetBinding => {
+ fluent::parse_or_pattern_not_allowed_in_let_binding
+ }
+ PatternLocation::FunctionParameter => {
+ fluent::parse_or_pattern_not_allowed_in_fn_parameters
+ }
+ },
+ );
} else {
- self.struct_span_err(pat.span, &msg)
- .span_suggestion(pat.span, help, fix, Applicability::MachineApplicable)
- .emit();
+ let pat = pprust::pat_to_string(&pat);
+ let sub = if pats.len() == 1 {
+ Some(TopLevelOrPatternNotAllowedSugg::RemoveLeadingVert { span, pat })
+ } else {
+ Some(TopLevelOrPatternNotAllowedSugg::WrapInParens { span, pat })
+ };
+
+ self.sess.emit_err(match syntax_loc {
+ PatternLocation::LetBinding => {
+ TopLevelOrPatternNotAllowed::LetBinding { span, sub }
+ }
+ PatternLocation::FunctionParameter => {
+ TopLevelOrPatternNotAllowed::FunctionParameter { span, sub }
+ }
+ });
}
}
@@ -218,15 +264,15 @@ impl<'a> Parser<'a> {
// a leading `||` probably doesn't indicate an or-pattern attempt, so we handle that
// separately.
if let token::OrOr = self.token.kind {
- let span = self.token.span;
- let mut err = self.struct_span_err(span, "unexpected `||` before function parameter");
- err.span_suggestion(span, "remove the `||`", "", Applicability::MachineApplicable);
- err.note("alternatives in or-patterns are separated with `|`, not `||`");
- err.emit();
+ self.sess.emit_err(UnexpectedVertVertBeforeFunctionParam { span: self.token.span });
self.bump();
}
- self.parse_pat_before_ty(PARAM_EXPECTED, RecoverComma::No, "function parameters")
+ self.parse_pat_before_ty(
+ Some(Expected::ParameterName),
+ RecoverComma::No,
+ PatternLocation::FunctionParameter,
+ )
}
/// Eat the or-pattern `|` separator.
@@ -236,7 +282,7 @@ impl<'a> Parser<'a> {
EatOrResult::TrailingVert
} else if matches!(self.token.kind, token::OrOr) {
// Found `||`; Recover and pretend we parsed `|`.
- self.ban_unexpected_or_or(lo);
+ self.sess.emit_err(UnexpectedVertVertInPattern { span: self.token.span, start: lo });
self.bump();
EatOrResult::AteOr
} else if self.eat(&token::BinOp(token::Or)) {
@@ -270,7 +316,13 @@ impl<'a> Parser<'a> {
});
match (is_end_ahead, &self.token.kind) {
(true, token::BinOp(token::Or) | token::OrOr) => {
- self.ban_illegal_vert(lo, "trailing", "not allowed in an or-pattern");
+ // A `|` or possibly `||` token shouldn't be here. Ban it.
+ self.sess.emit_err(TrailingVertNotAllowed {
+ span: self.token.span,
+ start: lo,
+ token: self.token.clone(),
+ note_double_vert: matches!(self.token.kind, token::OrOr).then_some(()),
+ });
self.bump();
true
}
@@ -278,46 +330,12 @@ impl<'a> Parser<'a> {
}
}
- /// We have parsed `||` instead of `|`. Error and suggest `|` instead.
- fn ban_unexpected_or_or(&mut self, lo: Option<Span>) {
- let mut err = self.struct_span_err(self.token.span, "unexpected token `||` in pattern");
- err.span_suggestion(
- self.token.span,
- "use a single `|` to separate multiple alternative patterns",
- "|",
- Applicability::MachineApplicable,
- );
- if let Some(lo) = lo {
- err.span_label(lo, WHILE_PARSING_OR_MSG);
- }
- err.emit();
- }
-
- /// A `|` or possibly `||` token shouldn't be here. Ban it.
- fn ban_illegal_vert(&mut self, lo: Option<Span>, pos: &str, ctx: &str) {
- let span = self.token.span;
- let mut err = self.struct_span_err(span, &format!("a {} `|` is {}", pos, ctx));
- err.span_suggestion(
- span,
- &format!("remove the `{}`", pprust::token_to_string(&self.token)),
- "",
- Applicability::MachineApplicable,
- );
- if let Some(lo) = lo {
- err.span_label(lo, WHILE_PARSING_OR_MSG);
- }
- if let token::OrOr = self.token.kind {
- err.note("alternatives in or-patterns are separated with `|`, not `||`");
- }
- err.emit();
- }
-
/// Parses a pattern, with a setting whether modern range patterns (e.g., `a..=b`, `a..b` are
/// allowed).
fn parse_pat_with_range_pat(
&mut self,
allow_range_pat: bool,
- expected: Expected,
+ expected: Option<Expected>,
) -> PResult<'a, P<Pat>> {
maybe_recover_from_interpolated_ty_qpath!(self, true);
maybe_whole!(self, NtPat, |x| x);
@@ -413,7 +431,7 @@ impl<'a> Parser<'a> {
let lt = self.expect_lifetime();
let (lit, _) =
self.recover_unclosed_char(lt.ident, Parser::mk_token_lit_char, |self_| {
- let expected = expected.unwrap_or("pattern");
+ let expected = Expected::to_string_or_fallback(expected);
let msg = format!(
"expected {}, found {}",
expected,
@@ -454,15 +472,7 @@ impl<'a> Parser<'a> {
self.bump(); // `...`
// The user probably mistook `...` for a rest pattern `..`.
- self.struct_span_err(lo, "unexpected `...`")
- .span_label(lo, "not a valid pattern")
- .span_suggestion_short(
- lo,
- "for a rest pattern, use `..` instead of `...`",
- "..",
- Applicability::MachineApplicable,
- )
- .emit();
+ self.sess.emit_err(DotDotDotRestPattern { span: lo });
PatKind::Rest
}
@@ -487,7 +497,7 @@ impl<'a> Parser<'a> {
// At this point we attempt to parse `@ $pat_rhs` and emit an error.
self.bump(); // `@`
let mut rhs = self.parse_pat_no_top_alt(None)?;
- let sp = lhs.span.to(rhs.span);
+ let whole_span = lhs.span.to(rhs.span);
if let PatKind::Ident(_, _, sub @ None) = &mut rhs.kind {
// The user inverted the order, so help them fix that.
@@ -496,27 +506,23 @@ impl<'a> Parser<'a> {
// The RHS is now the full pattern.
*sub = Some(lhs);
- self.struct_span_err(sp, "pattern on wrong side of `@`")
- .span_label(lhs_span, "pattern on the left, should be on the right")
- .span_label(rhs.span, "binding on the right, should be on the left")
- .span_suggestion(
- sp,
- "switch the order",
- pprust::pat_to_string(&rhs),
- Applicability::MachineApplicable,
- )
- .emit();
+ self.sess.emit_err(PatternOnWrongSideOfAt {
+ whole_span,
+ whole_pat: pprust::pat_to_string(&rhs),
+ pattern: lhs_span,
+ binding: rhs.span,
+ });
} else {
// The special case above doesn't apply so we may have e.g. `A(x) @ B(y)`.
rhs.kind = PatKind::Wild;
- self.struct_span_err(sp, "left-hand side of `@` must be a binding")
- .span_label(lhs.span, "interpreted as a pattern, not a binding")
- .span_label(rhs.span, "also a pattern")
- .note("bindings are `x`, `mut x`, `ref x`, and `ref mut x`")
- .emit();
+ self.sess.emit_err(ExpectedBindingLeftOfAt {
+ whole_span,
+ lhs: lhs.span,
+ rhs: rhs.span,
+ });
}
- rhs.span = sp;
+ rhs.span = whole_span;
Ok(rhs)
}
@@ -531,35 +537,23 @@ impl<'a> Parser<'a> {
_ => return,
}
- self.struct_span_err(pat.span, "the range pattern here has ambiguous interpretation")
- .span_suggestion(
- pat.span,
- "add parentheses to clarify the precedence",
- format!("({})", pprust::pat_to_string(&pat)),
- // "ambiguous interpretation" implies that we have to be guessing
- Applicability::MaybeIncorrect,
- )
- .emit();
+ self.sess
+ .emit_err(AmbiguousRangePattern { span: pat.span, pat: pprust::pat_to_string(&pat) });
}
/// Parse `&pat` / `&mut pat`.
- fn parse_pat_deref(&mut self, expected: Expected) -> PResult<'a, PatKind> {
+ fn parse_pat_deref(&mut self, expected: Option<Expected>) -> PResult<'a, PatKind> {
self.expect_and()?;
- self.recover_lifetime_in_deref_pat();
- let mutbl = self.parse_mutability();
- let subpat = self.parse_pat_with_range_pat(false, expected)?;
- Ok(PatKind::Ref(subpat, mutbl))
- }
-
- fn recover_lifetime_in_deref_pat(&mut self) {
if let token::Lifetime(name) = self.token.kind {
self.bump(); // `'a`
- let span = self.prev_token.span;
- self.struct_span_err(span, &format!("unexpected lifetime `{}` in pattern", name))
- .span_suggestion(span, "remove the lifetime", "", Applicability::MachineApplicable)
- .emit();
+ self.sess
+ .emit_err(UnexpectedLifetimeInPattern { span: self.prev_token.span, symbol: name });
}
+
+ let mutbl = self.parse_mutability();
+ let subpat = self.parse_pat_with_range_pat(false, expected)?;
+ Ok(PatKind::Ref(subpat, mutbl))
}
/// Parse a tuple or parenthesis pattern.
@@ -587,7 +581,8 @@ impl<'a> Parser<'a> {
let mut_span = self.prev_token.span;
if self.eat_keyword(kw::Ref) {
- return self.recover_mut_ref_ident(mut_span);
+ self.sess.emit_err(RefMutOrderIncorrect { span: mut_span.to(self.prev_token.span) });
+ return self.parse_pat_ident(BindingAnnotation::REF_MUT);
}
self.recover_additional_muts();
@@ -600,7 +595,7 @@ impl<'a> Parser<'a> {
}
// Parse the pattern we hope to be an identifier.
- let mut pat = self.parse_pat_no_top_alt(Some("identifier"))?;
+ let mut pat = self.parse_pat_no_top_alt(Some(Expected::Identifier))?;
// If we don't have `mut $ident (@ pat)?`, error.
if let PatKind::Ident(BindingAnnotation(ByRef::No, m @ Mutability::Not), ..) = &mut pat.kind
@@ -617,22 +612,6 @@ impl<'a> Parser<'a> {
Ok(pat.into_inner().kind)
}
- /// Recover on `mut ref? ident @ pat` and suggest
- /// that the order of `mut` and `ref` is incorrect.
- fn recover_mut_ref_ident(&mut self, lo: Span) -> PResult<'a, PatKind> {
- let mutref_span = lo.to(self.prev_token.span);
- self.struct_span_err(mutref_span, "the order of `mut` and `ref` is incorrect")
- .span_suggestion(
- mutref_span,
- "try switching the order",
- "ref mut",
- Applicability::MachineApplicable,
- )
- .emit();
-
- self.parse_pat_ident(BindingAnnotation::REF_MUT)
- }
-
/// Turn all by-value immutable bindings in a pattern into mutable bindings.
/// Returns `true` if any change was made.
fn make_all_value_bindings_mutable(pat: &mut P<Pat>) -> bool {
@@ -657,16 +636,13 @@ impl<'a> Parser<'a> {
/// Error on `mut $pat` where `$pat` is not an ident.
fn ban_mut_general_pat(&self, lo: Span, pat: &Pat, changed_any_binding: bool) {
let span = lo.to(pat.span);
- let fix = pprust::pat_to_string(&pat);
- let (problem, suggestion) = if changed_any_binding {
- ("`mut` must be attached to each individual binding", "add `mut` to each binding")
+ let pat = pprust::pat_to_string(&pat);
+
+ self.sess.emit_err(if changed_any_binding {
+ InvalidMutInPattern::NestedIdent { span, pat }
} else {
- ("`mut` must be followed by a named binding", "remove the `mut` prefix")
- };
- self.struct_span_err(span, problem)
- .span_suggestion(span, suggestion, fix, Applicability::MachineApplicable)
- .note("`mut` may be followed by `variable` and `variable @ pattern`")
- .emit();
+ InvalidMutInPattern::NonIdent { span, pat }
+ });
}
/// Eat any extraneous `mut`s and error + recover if we ate any.
@@ -677,15 +653,7 @@ impl<'a> Parser<'a> {
return;
}
- let span = lo.to(self.prev_token.span);
- self.struct_span_err(span, "`mut` on a binding may not be repeated")
- .span_suggestion(
- span,
- "remove the additional `mut`s",
- "",
- Applicability::MachineApplicable,
- )
- .emit();
+ self.sess.emit_err(RepeatedMutInPattern { span: lo.to(self.prev_token.span) });
}
/// Parse macro invocation
@@ -699,11 +667,11 @@ impl<'a> Parser<'a> {
fn fatal_unexpected_non_pat(
&mut self,
err: DiagnosticBuilder<'a, ErrorGuaranteed>,
- expected: Expected,
+ expected: Option<Expected>,
) -> PResult<'a, P<Pat>> {
err.cancel();
- let expected = expected.unwrap_or("pattern");
+ let expected = Expected::to_string_or_fallback(expected);
let msg = format!("expected {}, found {}", expected, super::token_descr(&self.token));
let mut err = self.struct_span_err(self.token.span, &msg);
@@ -745,49 +713,44 @@ impl<'a> Parser<'a> {
// Parsing e.g. `X..`.
if let RangeEnd::Included(_) = re.node {
// FIXME(Centril): Consider semantic errors instead in `ast_validation`.
- self.inclusive_range_with_incorrect_end(re.span);
+ self.inclusive_range_with_incorrect_end();
}
None
};
Ok(PatKind::Range(Some(begin), end, re))
}
- pub(super) fn inclusive_range_with_incorrect_end(&mut self, span: Span) {
+ pub(super) fn inclusive_range_with_incorrect_end(&mut self) {
let tok = &self.token;
-
+ let span = self.prev_token.span;
// If the user typed "..==" instead of "..=", we want to give them
// a specific error message telling them to use "..=".
+ // If they typed "..=>", suggest they use ".. =>".
// Otherwise, we assume that they meant to type a half open exclusive
// range and give them an error telling them to do that instead.
- if matches!(tok.kind, token::Eq) && tok.span.lo() == span.hi() {
- let span_with_eq = span.to(tok.span);
+ let no_space = tok.span.lo() == span.hi();
+ match tok.kind {
+ token::Eq if no_space => {
+ let span_with_eq = span.to(tok.span);
- // Ensure the user doesn't receive unhelpful unexpected token errors
- self.bump();
- if self.is_pat_range_end_start(0) {
- let _ = self.parse_pat_range_end().map_err(|e| e.cancel());
- }
+ // Ensure the user doesn't receive unhelpful unexpected token errors
+ self.bump();
+ if self.is_pat_range_end_start(0) {
+ let _ = self.parse_pat_range_end().map_err(|e| e.cancel());
+ }
- self.error_inclusive_range_with_extra_equals(span_with_eq);
- } else {
- self.error_inclusive_range_with_no_end(span);
+ self.sess.emit_err(InclusiveRangeExtraEquals { span: span_with_eq });
+ }
+ token::Gt if no_space => {
+ let after_pat = span.with_hi(span.hi() - rustc_span::BytePos(1)).shrink_to_hi();
+ self.sess.emit_err(InclusiveRangeMatchArrow { span, arrow: tok.span, after_pat });
+ }
+ _ => {
+ self.sess.emit_err(InclusiveRangeNoEnd { span });
+ }
}
}
- fn error_inclusive_range_with_extra_equals(&self, span: Span) {
- self.struct_span_err(span, "unexpected `=` after inclusive range")
- .span_suggestion_short(span, "use `..=` instead", "..=", Applicability::MaybeIncorrect)
- .note("inclusive ranges end with a single equals sign (`..=`)")
- .emit();
- }
-
- fn error_inclusive_range_with_no_end(&self, span: Span) {
- struct_span_err!(self.sess.span_diagnostic, span, E0586, "inclusive range with no end")
- .span_suggestion_short(span, "use `..` instead", "..", Applicability::MachineApplicable)
- .note("inclusive ranges must be bounded at the end (`..=b` or `a..=b`)")
- .emit();
- }
-
/// Parse a range-to pattern, `..X` or `..=X` where `X` remains to be parsed.
///
/// The form `...X` is prohibited to reduce confusion with the potential
@@ -796,14 +759,7 @@ impl<'a> Parser<'a> {
let end = self.parse_pat_range_end()?;
if let RangeEnd::Included(syn @ RangeSyntax::DotDotDot) = &mut re.node {
*syn = RangeSyntax::DotDotEq;
- self.struct_span_err(re.span, "range-to patterns with `...` are not allowed")
- .span_suggestion_short(
- re.span,
- "use `..=` instead",
- "..=",
- Applicability::MachineApplicable,
- )
- .emit();
+ self.sess.emit_err(DotDotDotRangeToPatternNotAllowed { span: re.span });
}
Ok(PatKind::Range(None, Some(end), re))
}
@@ -868,7 +824,7 @@ impl<'a> Parser<'a> {
fn parse_pat_ident(&mut self, binding_annotation: BindingAnnotation) -> PResult<'a, PatKind> {
let ident = self.parse_ident()?;
let sub = if self.eat(&token::At) {
- Some(self.parse_pat_no_top_alt(Some("binding pattern"))?)
+ Some(self.parse_pat_no_top_alt(Some(Expected::BindingPattern))?)
} else {
None
};
@@ -879,8 +835,8 @@ impl<'a> Parser<'a> {
// binding mode then we do not end up here, because the lookahead
// will direct us over to `parse_enum_variant()`.
if self.token == token::OpenDelim(Delimiter::Parenthesis) {
- return Err(self
- .struct_span_err(self.prev_token.span, "expected identifier, found enum pattern"));
+ return Err(EnumPatternInsteadOfIdentifier { span: self.prev_token.span }
+ .into_diagnostic(&self.sess.span_diagnostic));
}
Ok(PatKind::Ident(binding_annotation, ident, sub))
@@ -897,7 +853,7 @@ impl<'a> Parser<'a> {
e.span_label(path.span, "while parsing the fields for this pattern");
e.emit();
self.recover_stmt();
- (vec![], true)
+ (ThinVec::new(), true)
});
self.bump();
Ok(PatKind::Struct(qself, path, fields, etc))
@@ -962,7 +918,7 @@ impl<'a> Parser<'a> {
// We cannot use `parse_pat_ident()` since it will complain `box`
// is not an identifier.
let sub = if self.eat(&token::At) {
- Some(self.parse_pat_no_top_alt(Some("binding pattern"))?)
+ Some(self.parse_pat_no_top_alt(Some(Expected::BindingPattern))?)
} else {
None
};
@@ -976,8 +932,8 @@ impl<'a> Parser<'a> {
}
/// Parses the fields of a struct-like pattern.
- fn parse_pat_fields(&mut self) -> PResult<'a, (Vec<PatField>, bool)> {
- let mut fields = Vec::new();
+ fn parse_pat_fields(&mut self) -> PResult<'a, (ThinVec<PatField>, bool)> {
+ let mut fields = ThinVec::new();
let mut etc = false;
let mut ate_comma = true;
let mut delayed_err: Option<DiagnosticBuilder<'a, ErrorGuaranteed>> = None;
@@ -997,7 +953,8 @@ impl<'a> Parser<'a> {
// check that a comma comes after every field
if !ate_comma {
- let err = self.struct_span_err(self.token.span, "expected `,`");
+ let err = ExpectedCommaAfterPatternField { span: self.token.span }
+ .into_diagnostic(&self.sess.span_diagnostic);
if let Some(mut delayed) = delayed_err {
delayed.emit();
}
@@ -1005,12 +962,15 @@ impl<'a> Parser<'a> {
}
ate_comma = false;
- if self.check(&token::DotDot) || self.token == token::DotDotDot {
+ if self.check(&token::DotDot)
+ || self.check_noexpect(&token::DotDotDot)
+ || self.check_keyword(kw::Underscore)
+ {
etc = true;
let mut etc_sp = self.token.span;
- self.recover_one_fewer_dotdot();
- self.bump(); // `..` || `...`
+ self.recover_bad_dot_dot();
+ self.bump(); // `..` || `...` || `_`
if self.token == token::CloseDelim(Delimiter::Brace) {
etc_span = Some(etc_sp);
@@ -1103,21 +1063,15 @@ impl<'a> Parser<'a> {
Ok((fields, etc))
}
- /// Recover on `...` as if it were `..` to avoid further errors.
+ /// Recover on `...` or `_` as if it were `..` to avoid further errors.
/// See issue #46718.
- fn recover_one_fewer_dotdot(&self) {
- if self.token != token::DotDotDot {
+ fn recover_bad_dot_dot(&self) {
+ if self.token == token::DotDot {
return;
}
- self.struct_span_err(self.token.span, "expected field pattern, found `...`")
- .span_suggestion(
- self.token.span,
- "to omit remaining fields, use one fewer `.`",
- "..",
- Applicability::MachineApplicable,
- )
- .emit();
+ let token_str = pprust::token_to_string(&self.token);
+ self.sess.emit_err(DotDotDotForRemainingFields { span: self.token.span, token_str });
}
fn parse_pat_field(&mut self, lo: Span, attrs: AttrVec) -> PResult<'a, PatField> {
diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs
index 5333d3b85..b50d2984a 100644
--- a/compiler/rustc_parse/src/parser/path.rs
+++ b/compiler/rustc_parse/src/parser/path.rs
@@ -332,7 +332,7 @@ impl<'a> Parser<'a> {
style: PathStyle,
lo: Span,
ty_generics: Option<&Generics>,
- ) -> PResult<'a, Vec<AngleBracketedArg>> {
+ ) -> PResult<'a, ThinVec<AngleBracketedArg>> {
// We need to detect whether there are extra leading left angle brackets and produce an
// appropriate error and suggestion. This cannot be implemented by looking ahead at
// upcoming tokens for a matching `>` character - if there are unmatched `<` tokens
@@ -404,7 +404,7 @@ impl<'a> Parser<'a> {
let is_first_invocation = style == PathStyle::Expr;
// Take a snapshot before attempting to parse - we can restore this later.
- let snapshot = if is_first_invocation { Some(self.clone()) } else { None };
+ let snapshot = is_first_invocation.then(|| self.clone());
debug!("parse_generic_args_with_leading_angle_bracket_recovery: (snapshotting)");
match self.parse_angle_args(ty_generics) {
@@ -472,8 +472,8 @@ impl<'a> Parser<'a> {
pub(super) fn parse_angle_args(
&mut self,
ty_generics: Option<&Generics>,
- ) -> PResult<'a, Vec<AngleBracketedArg>> {
- let mut args = Vec::new();
+ ) -> PResult<'a, ThinVec<AngleBracketedArg>> {
+ let mut args = ThinVec::new();
while let Some(arg) = self.parse_angle_arg(ty_generics)? {
args.push(arg);
if !self.eat(&token::Comma) {
@@ -653,7 +653,7 @@ impl<'a> Parser<'a> {
pub(super) fn parse_const_arg(&mut self) -> PResult<'a, AnonConst> {
// Parse const argument.
let value = if let token::OpenDelim(Delimiter::Brace) = self.token.kind {
- self.parse_block_expr(None, self.token.span, BlockCheckMode::Default)?
+ self.parse_expr_block(None, self.token.span, BlockCheckMode::Default)?
} else {
self.handle_unambiguous_unbraced_const_arg()?
};
@@ -675,22 +675,42 @@ impl<'a> Parser<'a> {
GenericArg::Const(self.parse_const_arg()?)
} else if self.check_type() {
// Parse type argument.
- let is_const_fn =
- self.look_ahead(1, |t| t.kind == token::OpenDelim(Delimiter::Parenthesis));
- let mut snapshot = self.create_snapshot_for_diagnostic();
+
+ // Proactively create a parser snapshot enabling us to rewind and try to reparse the
+ // input as a const expression in case we fail to parse a type. If we successfully
+ // do so, we will report an error that it needs to be wrapped in braces.
+ let mut snapshot = None;
+ if self.may_recover() && self.token.can_begin_expr() {
+ snapshot = Some(self.create_snapshot_for_diagnostic());
+ }
+
match self.parse_ty() {
- Ok(ty) => GenericArg::Type(ty),
+ Ok(ty) => {
+ // Since the type parser recovers from some malformed slice and array types and
+ // successfully returns a type, we need to look for `TyKind::Err`s in the
+ // type to determine if error recovery has occurred and if the input is not a
+ // syntactically valid type after all.
+ if let ast::TyKind::Slice(inner_ty) | ast::TyKind::Array(inner_ty, _) = &ty.kind
+ && let ast::TyKind::Err = inner_ty.kind
+ && let Some(snapshot) = snapshot
+ && let Some(expr) = self.recover_unbraced_const_arg_that_can_begin_ty(snapshot)
+ {
+ return Ok(Some(self.dummy_const_arg_needs_braces(
+ self.struct_span_err(expr.span, "invalid const generic expression"),
+ expr.span,
+ )));
+ }
+
+ GenericArg::Type(ty)
+ }
Err(err) => {
- if is_const_fn {
- match (*snapshot).parse_expr_res(Restrictions::CONST_EXPR, None) {
- Ok(expr) => {
- self.restore_snapshot(snapshot);
- return Ok(Some(self.dummy_const_arg_needs_braces(err, expr.span)));
- }
- Err(err) => {
- err.cancel();
- }
- }
+ if let Some(snapshot) = snapshot
+ && let Some(expr) = self.recover_unbraced_const_arg_that_can_begin_ty(snapshot)
+ {
+ return Ok(Some(self.dummy_const_arg_needs_braces(
+ err,
+ expr.span,
+ )));
}
// Try to recover from possible `const` arg without braces.
return self.recover_const_arg(start, err).map(Some);
diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs
index 4ff9927aa..92a22ffc2 100644
--- a/compiler/rustc_parse/src/parser/stmt.rs
+++ b/compiler/rustc_parse/src/parser/stmt.rs
@@ -1,18 +1,13 @@
use super::attr::InnerAttrForbiddenReason;
use super::diagnostics::AttemptLocalParseRecovery;
use super::expr::LhsExpr;
-use super::pat::RecoverComma;
+use super::pat::{PatternLocation, RecoverComma};
use super::path::PathStyle;
use super::TrailingToken;
use super::{
AttrWrapper, BlockMode, FnParseMode, ForceCollect, Parser, Restrictions, SemiColonMode,
};
-use crate::errors::{
- AssignmentElseNotAllowed, CompoundAssignmentExpressionInLet, ConstLetMutuallyExclusive,
- DocCommentDoesNotDocumentAnything, ExpectedStatementAfterOuterAttr, InvalidCurlyInLetElse,
- InvalidExpressionInLetElse, InvalidIdentiferStartsWithNumber, InvalidVariableDeclaration,
- InvalidVariableDeclarationSub, WrapExpressionInParentheses,
-};
+use crate::errors;
use crate::maybe_whole;
use rustc_ast as ast;
@@ -25,8 +20,8 @@ use rustc_ast::{StmtKind, DUMMY_NODE_ID};
use rustc_errors::{Applicability, DiagnosticBuilder, ErrorGuaranteed, PResult};
use rustc_span::source_map::{BytePos, Span};
use rustc_span::symbol::{kw, sym};
-
use std::mem;
+use thin_vec::{thin_vec, ThinVec};
impl<'a> Parser<'a> {
/// Parses a statement. This stops just before trailing semicolons on everything but items.
@@ -64,40 +59,45 @@ impl<'a> Parser<'a> {
if self.token.is_keyword(kw::Mut) && self.is_keyword_ahead(1, &[kw::Let]) {
self.bump();
let mut_let_span = lo.to(self.token.span);
- self.sess.emit_err(InvalidVariableDeclaration {
+ self.sess.emit_err(errors::InvalidVariableDeclaration {
span: mut_let_span,
- sub: InvalidVariableDeclarationSub::SwitchMutLetOrder(mut_let_span),
+ sub: errors::InvalidVariableDeclarationSub::SwitchMutLetOrder(mut_let_span),
});
}
Ok(Some(if self.token.is_keyword(kw::Let) {
self.parse_local_mk(lo, attrs, capture_semi, force_collect)?
} else if self.is_kw_followed_by_ident(kw::Mut) && self.may_recover() {
- self.recover_stmt_local_after_let(lo, attrs, InvalidVariableDeclarationSub::MissingLet)?
+ self.recover_stmt_local_after_let(
+ lo,
+ attrs,
+ errors::InvalidVariableDeclarationSub::MissingLet,
+ )?
} else if self.is_kw_followed_by_ident(kw::Auto) && self.may_recover() {
self.bump(); // `auto`
self.recover_stmt_local_after_let(
lo,
attrs,
- InvalidVariableDeclarationSub::UseLetNotAuto,
+ errors::InvalidVariableDeclarationSub::UseLetNotAuto,
)?
} else if self.is_kw_followed_by_ident(sym::var) && self.may_recover() {
self.bump(); // `var`
self.recover_stmt_local_after_let(
lo,
attrs,
- InvalidVariableDeclarationSub::UseLetNotVar,
+ errors::InvalidVariableDeclarationSub::UseLetNotVar,
)?
} else if self.check_path() && !self.token.is_qpath_start() && !self.is_path_start_item() {
// We have avoided contextual keywords like `union`, items with `crate` visibility,
// or `auto trait` items. We aim to parse an arbitrary path `a::b` but not something
// that starts like a path (1 token), but it fact not a path.
// Also, we avoid stealing syntax from `parse_item_`.
- if force_collect == ForceCollect::Yes {
- self.collect_tokens_no_attrs(|this| this.parse_stmt_path_start(lo, attrs))
- } else {
- self.parse_stmt_path_start(lo, attrs)
- }?
+ match force_collect {
+ ForceCollect::Yes => {
+ self.collect_tokens_no_attrs(|this| this.parse_stmt_path_start(lo, attrs))?
+ }
+ ForceCollect::No => self.parse_stmt_path_start(lo, attrs)?,
+ }
} else if let Some(item) = self.parse_item_common(
attrs.clone(),
false,
@@ -113,18 +113,17 @@ impl<'a> Parser<'a> {
self.mk_stmt(lo, StmtKind::Empty)
} else if self.token != token::CloseDelim(Delimiter::Brace) {
// Remainder are line-expr stmts.
- let e = if force_collect == ForceCollect::Yes {
- self.collect_tokens_no_attrs(|this| {
+ let e = match force_collect {
+ ForceCollect::Yes => self.collect_tokens_no_attrs(|this| {
this.parse_expr_res(Restrictions::STMT_EXPR, Some(attrs))
- })
- } else {
- self.parse_expr_res(Restrictions::STMT_EXPR, Some(attrs))
- }?;
+ })?,
+ ForceCollect::No => self.parse_expr_res(Restrictions::STMT_EXPR, Some(attrs))?,
+ };
if matches!(e.kind, ExprKind::Assign(..)) && self.eat_keyword(kw::Else) {
let bl = self.parse_block()?;
// Destructuring assignment ... else.
// This is not allowed, but point it out in a nice way.
- self.sess.emit_err(AssignmentElseNotAllowed { span: e.span.to(bl.span) });
+ self.sess.emit_err(errors::AssignmentElseNotAllowed { span: e.span.to(bl.span) });
}
self.mk_stmt(lo.to(e.span), StmtKind::Expr(e))
} else {
@@ -147,14 +146,14 @@ impl<'a> Parser<'a> {
}
let expr = if this.eat(&token::OpenDelim(Delimiter::Brace)) {
- this.parse_struct_expr(None, path, true)?
+ this.parse_expr_struct(None, path, true)?
} else {
let hi = this.prev_token.span;
this.mk_expr(lo.to(hi), ExprKind::Path(None, path))
};
let expr = this.with_res(Restrictions::STMT_EXPR, |this| {
- this.parse_dot_or_call_expr_with(expr, lo, attrs)
+ this.parse_expr_dot_or_call_with(expr, lo, attrs)
})?;
// `DUMMY_SP` will get overwritten later in this function
Ok((this.mk_stmt(rustc_span::DUMMY_SP, StmtKind::Expr(expr)), TrailingToken::None))
@@ -164,7 +163,7 @@ impl<'a> Parser<'a> {
// Perform this outside of the `collect_tokens_trailing_token` closure,
// since our outer attributes do not apply to this part of the expression
let expr = self.with_res(Restrictions::STMT_EXPR, |this| {
- this.parse_assoc_expr_with(
+ this.parse_expr_assoc_with(
0,
LhsExpr::AlreadyParsed { expr, starts_statement: true },
)
@@ -200,8 +199,8 @@ impl<'a> Parser<'a> {
// Since none of the above applied, this is an expression statement macro.
let e = self.mk_expr(lo.to(hi), ExprKind::MacCall(mac));
let e = self.maybe_recover_from_bad_qpath(e)?;
- let e = self.parse_dot_or_call_expr_with(e, lo, attrs)?;
- let e = self.parse_assoc_expr_with(
+ let e = self.parse_expr_dot_or_call_with(e, lo, attrs)?;
+ let e = self.parse_expr_assoc_with(
0,
LhsExpr::AlreadyParsed { expr: e, starts_statement: false },
)?;
@@ -217,12 +216,12 @@ impl<'a> Parser<'a> {
&& let attrs = attrs.take_for_recovery(self.sess)
&& let attrs @ [.., last] = &*attrs {
if last.is_doc_comment() {
- self.sess.emit_err(DocCommentDoesNotDocumentAnything {
+ self.sess.emit_err(errors::DocCommentDoesNotDocumentAnything {
span: last.span,
missing_comma: None,
});
} else if attrs.iter().any(|a| a.style == AttrStyle::Outer) {
- self.sess.emit_err(ExpectedStatementAfterOuterAttr { span: last.span });
+ self.sess.emit_err(errors::ExpectedStatementAfterOuterAttr { span: last.span });
}
}
}
@@ -231,7 +230,7 @@ impl<'a> Parser<'a> {
&mut self,
lo: Span,
attrs: AttrWrapper,
- subdiagnostic: fn(Span) -> InvalidVariableDeclarationSub,
+ subdiagnostic: fn(Span) -> errors::InvalidVariableDeclarationSub,
) -> PResult<'a, Stmt> {
let stmt =
self.collect_tokens_trailing_token(attrs, ForceCollect::Yes, |this, attrs| {
@@ -242,7 +241,7 @@ impl<'a> Parser<'a> {
TrailingToken::None,
))
})?;
- self.sess.emit_err(InvalidVariableDeclaration { span: lo, sub: subdiagnostic(lo) });
+ self.sess.emit_err(errors::InvalidVariableDeclaration { span: lo, sub: subdiagnostic(lo) });
Ok(stmt)
}
@@ -270,12 +269,13 @@ impl<'a> Parser<'a> {
let lo = self.prev_token.span;
if self.token.is_keyword(kw::Const) && self.look_ahead(1, |t| t.is_ident()) {
- self.sess.emit_err(ConstLetMutuallyExclusive { span: lo.to(self.token.span) });
+ self.sess.emit_err(errors::ConstLetMutuallyExclusive { span: lo.to(self.token.span) });
self.bump();
}
self.report_invalid_identifier_error()?;
- let (pat, colon) = self.parse_pat_before_ty(None, RecoverComma::Yes, "`let` bindings")?;
+ let (pat, colon) =
+ self.parse_pat_before_ty(None, RecoverComma::Yes, PatternLocation::LetBinding)?;
let (err, ty) = if colon {
// Save the state of the parser before parsing type normally, in case there is a `:`
@@ -372,7 +372,7 @@ impl<'a> Parser<'a> {
rustc_ast::MetaItemLit::from_token(&self.token).is_none() &&
(lit.kind == token::LitKind::Integer || lit.kind == token::LitKind::Float) &&
self.look_ahead(1, |t| matches!(t.kind, token::Eq) || matches!(t.kind, token::Colon ) ) {
- return Err(self.sess.create_err(InvalidIdentiferStartsWithNumber { span: self.token.span }));
+ return Err(self.sess.create_err(errors::InvalidIdentiferStartsWithNumber { span: self.token.span }));
}
Ok(())
}
@@ -380,10 +380,10 @@ impl<'a> Parser<'a> {
fn check_let_else_init_bool_expr(&self, init: &ast::Expr) {
if let ast::ExprKind::Binary(op, ..) = init.kind {
if op.node.lazy() {
- self.sess.emit_err(InvalidExpressionInLetElse {
+ self.sess.emit_err(errors::InvalidExpressionInLetElse {
span: init.span,
operator: op.node.to_string(),
- sugg: WrapExpressionInParentheses {
+ sugg: errors::WrapExpressionInParentheses {
left: init.span.shrink_to_lo(),
right: init.span.shrink_to_hi(),
},
@@ -394,9 +394,9 @@ impl<'a> Parser<'a> {
fn check_let_else_init_trailing_brace(&self, init: &ast::Expr) {
if let Some(trailing) = classify::expr_trailing_brace(init) {
- self.sess.emit_err(InvalidCurlyInLetElse {
+ self.sess.emit_err(errors::InvalidCurlyInLetElse {
span: trailing.span.with_lo(trailing.span.hi() - BytePos(1)),
- sugg: WrapExpressionInParentheses {
+ sugg: errors::WrapExpressionInParentheses {
left: trailing.span.shrink_to_lo(),
right: trailing.span.shrink_to_hi(),
},
@@ -409,7 +409,8 @@ impl<'a> Parser<'a> {
let eq_consumed = match self.token.kind {
token::BinOpEq(..) => {
// Recover `let x <op>= 1` as `let x = 1`
- self.sess.emit_err(CompoundAssignmentExpressionInLet { span: self.token.span });
+ self.sess
+ .emit_err(errors::CompoundAssignmentExpressionInLet { span: self.token.span });
self.bump();
true
}
@@ -543,7 +544,7 @@ impl<'a> Parser<'a> {
s: BlockCheckMode,
recover: AttemptLocalParseRecovery,
) -> PResult<'a, P<Block>> {
- let mut stmts = vec![];
+ let mut stmts = ThinVec::new();
let mut snapshot = None;
while !self.eat(&token::CloseDelim(Delimiter::Brace)) {
if self.token == token::Eof {
@@ -661,7 +662,12 @@ impl<'a> Parser<'a> {
Ok(Some(stmt))
}
- pub(super) fn mk_block(&self, stmts: Vec<Stmt>, rules: BlockCheckMode, span: Span) -> P<Block> {
+ pub(super) fn mk_block(
+ &self,
+ stmts: ThinVec<Stmt>,
+ rules: BlockCheckMode,
+ span: Span,
+ ) -> P<Block> {
P(Block {
stmts,
id: DUMMY_NODE_ID,
@@ -681,6 +687,6 @@ impl<'a> Parser<'a> {
}
pub(super) fn mk_block_err(&self, span: Span) -> P<Block> {
- self.mk_block(vec![self.mk_stmt_err(span)], BlockCheckMode::Default, span)
+ self.mk_block(thin_vec![self.mk_stmt_err(span)], BlockCheckMode::Default, span)
}
}
diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs
index 867974672..6fe4da71f 100644
--- a/compiler/rustc_parse/src/parser/ty.rs
+++ b/compiler/rustc_parse/src/parser/ty.rs
@@ -1,6 +1,12 @@
use super::{Parser, PathStyle, TokenType};
-use crate::errors::{ExpectedFnPathFoundFnKeyword, FnPtrWithGenerics, FnPtrWithGenericsSugg};
+use crate::errors::{
+ DynAfterMut, ExpectedFnPathFoundFnKeyword, ExpectedMutOrConstInRawPointerType,
+ FnPointerCannotBeAsync, FnPointerCannotBeConst, FnPtrWithGenerics, FnPtrWithGenericsSugg,
+ InvalidDynKeyword, LifetimeAfterMut, NeedPlusAfterTraitObjectLifetime,
+ NegativeBoundsNotSupported, NegativeBoundsNotSupportedSugg, NestedCVariadicType,
+ ReturnTypesUseThinArrow,
+};
use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
use ast::DUMMY_NODE_ID;
@@ -11,11 +17,11 @@ use rustc_ast::{
self as ast, BareFnTy, FnRetTy, GenericBound, GenericBounds, GenericParam, Generics, Lifetime,
MacCall, MutTy, Mutability, PolyTraitRef, TraitBoundModifier, TraitObjectSyntax, Ty, TyKind,
};
-use rustc_errors::{pluralize, struct_span_err, Applicability, PResult};
+use rustc_errors::{Applicability, PResult};
use rustc_span::source_map::Span;
use rustc_span::symbol::{kw, sym, Ident};
use rustc_span::Symbol;
-use thin_vec::thin_vec;
+use thin_vec::{thin_vec, ThinVec};
/// Any `?` or `~const` modifiers that appear at the start of a bound.
struct BoundModifiers {
@@ -217,14 +223,7 @@ impl<'a> Parser<'a> {
// Don't `eat` to prevent `=>` from being added as an expected token which isn't
// actually expected and could only confuse users
self.bump();
- self.struct_span_err(self.prev_token.span, "return types are denoted using `->`")
- .span_suggestion_short(
- self.prev_token.span,
- "use `->` instead",
- "->",
- Applicability::MachineApplicable,
- )
- .emit();
+ self.sess.emit_err(ReturnTypesUseThinArrow { span: self.prev_token.span });
let ty = self.parse_ty_common(
allow_plus,
AllowCVariadic::No,
@@ -274,7 +273,7 @@ impl<'a> Parser<'a> {
TyKind::Infer
} else if self.check_fn_front_matter(false, Case::Sensitive) {
// Function pointer type
- self.parse_ty_bare_fn(lo, Vec::new(), None, recover_return_sign)?
+ self.parse_ty_bare_fn(lo, ThinVec::new(), None, recover_return_sign)?
} else if self.check_keyword(kw::For) {
// Function pointer type or bound list (trait object type) starting with a poly-trait.
// `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T`
@@ -305,13 +304,14 @@ impl<'a> Parser<'a> {
} else if self.can_begin_bound() {
self.parse_bare_trait_object(lo, allow_plus)?
} else if self.eat(&token::DotDotDot) {
- if allow_c_variadic == AllowCVariadic::Yes {
- TyKind::CVarArgs
- } else {
- // FIXME(Centril): Should we just allow `...` syntactically
- // anywhere in a type and use semantic restrictions instead?
- self.error_illegal_c_varadic_ty(lo);
- TyKind::Err
+ match allow_c_variadic {
+ AllowCVariadic::Yes => TyKind::CVarArgs,
+ AllowCVariadic::No => {
+ // FIXME(Centril): Should we just allow `...` syntactically
+ // anywhere in a type and use semantic restrictions instead?
+ self.sess.emit_err(NestedCVariadicType { span: lo.to(self.prev_token.span) });
+ TyKind::Err
+ }
}
} else {
let msg = format!("expected type, found {}", super::token_descr(&self.token));
@@ -325,10 +325,9 @@ impl<'a> Parser<'a> {
let mut ty = self.mk_ty(span, kind);
// Try to recover from use of `+` with incorrect priority.
- if matches!(allow_plus, AllowPlus::Yes) {
- self.maybe_recover_from_bad_type_plus(&ty)?;
- } else {
- self.maybe_report_ambiguous_plus(impl_dyn_multi, &ty);
+ match allow_plus {
+ AllowPlus::Yes => self.maybe_recover_from_bad_type_plus(&ty)?,
+ AllowPlus::No => self.maybe_report_ambiguous_plus(impl_dyn_multi, &ty),
}
if let RecoverQuestionMark::Yes = recover_question_mark {
ty = self.maybe_recover_from_question_mark(ty);
@@ -353,7 +352,7 @@ impl<'a> Parser<'a> {
match ty.kind {
// `(TY_BOUND_NOPAREN) + BOUND + ...`.
TyKind::Path(None, path) if maybe_bounds => {
- self.parse_remaining_bounds_path(Vec::new(), path, lo, true)
+ self.parse_remaining_bounds_path(ThinVec::new(), path, lo, true)
}
TyKind::TraitObject(bounds, TraitObjectSyntax::None)
if maybe_bounds && bounds.len() == 1 && !trailing_plus =>
@@ -372,15 +371,14 @@ impl<'a> Parser<'a> {
let lt_no_plus = self.check_lifetime() && !self.look_ahead(1, |t| t.is_like_plus());
let bounds = self.parse_generic_bounds_common(allow_plus, None)?;
if lt_no_plus {
- self.struct_span_err(lo, "lifetime in trait object type must be followed by `+`")
- .emit();
+ self.sess.emit_err(NeedPlusAfterTraitObjectLifetime { span: lo });
}
Ok(TyKind::TraitObject(bounds, TraitObjectSyntax::None))
}
fn parse_remaining_bounds_path(
&mut self,
- generic_params: Vec<GenericParam>,
+ generic_params: ThinVec<GenericParam>,
path: ast::Path,
lo: Span,
parse_plus: bool,
@@ -407,14 +405,10 @@ impl<'a> Parser<'a> {
fn parse_ty_ptr(&mut self) -> PResult<'a, TyKind> {
let mutbl = self.parse_const_or_mut().unwrap_or_else(|| {
let span = self.prev_token.span;
- self.struct_span_err(span, "expected `mut` or `const` keyword in raw pointer type")
- .span_suggestions(
- span.shrink_to_hi(),
- "add `mut` or `const` here",
- ["mut ".to_string(), "const ".to_string()],
- Applicability::HasPlaceholders,
- )
- .emit();
+ self.sess.emit_err(ExpectedMutOrConstInRawPointerType {
+ span,
+ after_asterisk: span.shrink_to_hi(),
+ });
Mutability::Not
});
let ty = self.parse_ty_no_plus()?;
@@ -439,7 +433,7 @@ impl<'a> Parser<'a> {
};
let ty = if self.eat(&token::Semi) {
- let mut length = self.parse_anon_const_expr()?;
+ let mut length = self.parse_expr_anon_const()?;
if let Err(e) = self.expect(&token::CloseDelim(Delimiter::Bracket)) {
// Try to recover from `X<Y, ...>` when `X::<Y, ...>` works
self.check_mistyped_turbofish_with_multiple_type_params(e, &mut length.value)?;
@@ -456,8 +450,7 @@ impl<'a> Parser<'a> {
fn parse_borrowed_pointee(&mut self) -> PResult<'a, TyKind> {
let and_span = self.prev_token.span;
- let mut opt_lifetime =
- if self.check_lifetime() { Some(self.expect_lifetime()) } else { None };
+ let mut opt_lifetime = self.check_lifetime().then(|| self.expect_lifetime());
let mut mutbl = self.parse_mutability();
if self.token.is_lifetime() && mutbl == Mutability::Mut && opt_lifetime.is_none() {
// A lifetime is invalid here: it would be part of a bare trait bound, which requires
@@ -469,16 +462,13 @@ impl<'a> Parser<'a> {
let lifetime_span = self.token.span;
let span = and_span.to(lifetime_span);
- let mut err = self.struct_span_err(span, "lifetime must precede `mut`");
- if let Ok(lifetime_src) = self.span_to_snippet(lifetime_span) {
- err.span_suggestion(
- span,
- "place the lifetime before `mut`",
- format!("&{} mut", lifetime_src),
- Applicability::MaybeIncorrect,
- );
- }
- err.emit();
+ let (suggest_lifetime, snippet) =
+ if let Ok(lifetime_src) = self.span_to_snippet(lifetime_span) {
+ (Some(span), lifetime_src)
+ } else {
+ (None, String::new())
+ };
+ self.sess.emit_err(LifetimeAfterMut { span, suggest_lifetime, snippet });
opt_lifetime = Some(self.expect_lifetime());
}
@@ -488,14 +478,7 @@ impl<'a> Parser<'a> {
{
// We have `&dyn mut ...`, which is invalid and should be `&mut dyn ...`.
let span = and_span.to(self.look_ahead(1, |t| t.span));
- let mut err = self.struct_span_err(span, "`mut` must precede `dyn`");
- err.span_suggestion(
- span,
- "place `mut` before `dyn`",
- "&mut dyn",
- Applicability::MachineApplicable,
- );
- err.emit();
+ self.sess.emit_err(DynAfterMut { span });
// Recovery
mutbl = Mutability::Mut;
@@ -511,7 +494,7 @@ impl<'a> Parser<'a> {
// To avoid ambiguity, the type is surrounded by parentheses.
fn parse_typeof_ty(&mut self) -> PResult<'a, TyKind> {
self.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
- let expr = self.parse_anon_const_expr()?;
+ let expr = self.parse_expr_anon_const()?;
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
Ok(TyKind::Typeof(expr))
}
@@ -528,7 +511,7 @@ impl<'a> Parser<'a> {
fn parse_ty_bare_fn(
&mut self,
lo: Span,
- mut params: Vec<GenericParam>,
+ mut params: ThinVec<GenericParam>,
param_insertion_point: Option<Span>,
recover_return_sign: RecoverReturnSign,
) -> PResult<'a, TyKind> {
@@ -549,10 +532,10 @@ impl<'a> Parser<'a> {
// If we ever start to allow `const fn()`, then update
// feature gating for `#![feature(const_extern_fn)]` to
// cover it.
- self.error_fn_ptr_bad_qualifier(whole_span, span, "const");
+ self.sess.emit_err(FnPointerCannotBeConst { span: whole_span, qualifier: span });
}
if let ast::Async::Yes { span, .. } = asyncness {
- self.error_fn_ptr_bad_qualifier(whole_span, span, "async");
+ self.sess.emit_err(FnPointerCannotBeAsync { span: whole_span, qualifier: span });
}
let decl_span = span_start.to(self.token.span);
Ok(TyKind::BareFn(P(BareFnTy { ext, unsafety, generic_params: params, decl, decl_span })))
@@ -562,13 +545,13 @@ impl<'a> Parser<'a> {
fn recover_fn_ptr_with_generics(
&mut self,
lo: Span,
- params: &mut Vec<GenericParam>,
+ params: &mut ThinVec<GenericParam>,
param_insertion_point: Option<Span>,
) -> PResult<'a, ()> {
let generics = self.parse_generics()?;
let arity = generics.params.len();
- let mut lifetimes: Vec<_> = generics
+ let mut lifetimes: ThinVec<_> = generics
.params
.into_iter()
.filter(|param| matches!(param.kind, ast::GenericParamKind::Lifetime))
@@ -600,19 +583,6 @@ impl<'a> Parser<'a> {
Ok(())
}
- /// Emit an error for the given bad function pointer qualifier.
- fn error_fn_ptr_bad_qualifier(&self, span: Span, qual_span: Span, qual: &str) {
- self.struct_span_err(span, &format!("an `fn` pointer type cannot be `{}`", qual))
- .span_label(qual_span, format!("`{}` because of this", qual))
- .span_suggestion_short(
- qual_span,
- &format!("remove the `{}` qualifier", qual),
- "",
- Applicability::MaybeIncorrect,
- )
- .emit();
- }
-
/// Parses an `impl B0 + ... + Bn` type.
fn parse_impl_ty(&mut self, impl_dyn_multi: &mut bool) -> PResult<'a, TyKind> {
// Always parse bounds greedily for better error recovery.
@@ -643,7 +613,7 @@ impl<'a> Parser<'a> {
/// Is a `dyn B0 + ... + Bn` type allowed here?
fn is_explicit_dyn_type(&mut self) -> bool {
self.check_keyword(kw::Dyn)
- && (!self.token.uninterpolated_span().rust_2015()
+ && (self.token.uninterpolated_span().rust_2018()
|| self.look_ahead(1, |t| {
(t.can_begin_bound() || t.kind == TokenKind::BinOp(token::Star))
&& !can_continue_type_after_non_fn_ident(t)
@@ -692,23 +662,13 @@ impl<'a> Parser<'a> {
})))
} else if allow_plus == AllowPlus::Yes && self.check_plus() {
// `Trait1 + Trait2 + 'a`
- self.parse_remaining_bounds_path(Vec::new(), path, lo, true)
+ self.parse_remaining_bounds_path(ThinVec::new(), path, lo, true)
} else {
// Just a type path.
Ok(TyKind::Path(None, path))
}
}
- fn error_illegal_c_varadic_ty(&self, lo: Span) {
- struct_span_err!(
- self.sess.span_diagnostic,
- lo.to(self.prev_token.span),
- E0743,
- "C-variadic type `...` may not be nested inside another type",
- )
- .emit();
- }
-
pub(super) fn parse_generic_bounds(
&mut self,
colon_span: Option<Span>,
@@ -739,15 +699,7 @@ impl<'a> Parser<'a> {
{
if self.token.is_keyword(kw::Dyn) {
// Account for `&dyn Trait + dyn Other`.
- self.struct_span_err(self.token.span, "invalid `dyn` keyword")
- .help("`dyn` is only needed at the start of a trait `+`-separated list")
- .span_suggestion(
- self.token.span,
- "remove this keyword",
- "",
- Applicability::MachineApplicable,
- )
- .emit();
+ self.sess.emit_err(InvalidDynKeyword { span: self.token.span });
self.bump();
}
match self.parse_generic_bound()? {
@@ -784,11 +736,7 @@ impl<'a> Parser<'a> {
bounds: &[GenericBound],
negative_bounds: Vec<Span>,
) {
- let negative_bounds_len = negative_bounds.len();
- let last_span = *negative_bounds.last().expect("no negative bounds, but still error?");
- let mut err = self.struct_span_err(negative_bounds, "negative bounds are not supported");
- err.span_label(last_span, "negative bounds are not supported");
- if let Some(bound_list) = colon_span {
+ let sub = if let Some(bound_list) = colon_span {
let bound_list = bound_list.to(self.prev_token.span);
let mut new_bound_list = String::new();
if !bounds.is_empty() {
@@ -799,14 +747,18 @@ impl<'a> Parser<'a> {
}
new_bound_list = new_bound_list.replacen(" +", ":", 1);
}
- err.tool_only_span_suggestion(
+
+ Some(NegativeBoundsNotSupportedSugg {
bound_list,
- &format!("remove the bound{}", pluralize!(negative_bounds_len)),
- new_bound_list,
- Applicability::MachineApplicable,
- );
- }
- err.emit();
+ num_bounds: negative_bounds.len(),
+ fixed: new_bound_list,
+ })
+ } else {
+ None
+ };
+
+ let last_span = *negative_bounds.last().expect("no negative bounds, but still error?");
+ self.sess.emit_err(NegativeBoundsNotSupported { negative_bounds, last_span, sub });
}
/// Parses a bound according to the grammar:
@@ -918,7 +870,7 @@ impl<'a> Parser<'a> {
None
};
- let maybe = if self.eat(&token::Question) { Some(self.prev_token.span) } else { None };
+ let maybe = self.eat(&token::Question).then_some(self.prev_token.span);
Ok(BoundModifiers { maybe, maybe_const })
}
@@ -990,7 +942,7 @@ impl<'a> Parser<'a> {
self.parse_remaining_bounds(bounds, true)?;
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
let sp = vec![lo, self.prev_token.span];
- let sugg: Vec<_> = sp.iter().map(|sp| (*sp, String::new())).collect();
+ let sugg = vec![(lo, String::from(" ")), (self.prev_token.span, String::new())];
self.struct_span_err(sp, "incorrect braces around trait bounds")
.multipart_suggestion(
"remove the parentheses",
@@ -1041,7 +993,7 @@ impl<'a> Parser<'a> {
}
/// Optionally parses `for<$generic_params>`.
- pub(super) fn parse_late_bound_lifetime_defs(&mut self) -> PResult<'a, Vec<GenericParam>> {
+ pub(super) fn parse_late_bound_lifetime_defs(&mut self) -> PResult<'a, ThinVec<GenericParam>> {
if self.eat_keyword(kw::For) {
self.expect_lt()?;
let params = self.parse_generic_params()?;
@@ -1050,7 +1002,7 @@ impl<'a> Parser<'a> {
// parameters, and the lifetime parameters must not have bounds.
Ok(params)
} else {
- Ok(Vec::new())
+ Ok(ThinVec::new())
}
}
@@ -1060,7 +1012,7 @@ impl<'a> Parser<'a> {
fn recover_fn_trait_with_lifetime_params(
&mut self,
fn_path: &mut ast::Path,
- lifetime_defs: &mut Vec<GenericParam>,
+ lifetime_defs: &mut ThinVec<GenericParam>,
) -> PResult<'a, ()> {
let fn_path_segment = fn_path.segments.last_mut().unwrap();
let generic_args = if let Some(p_args) = &fn_path_segment.args {
@@ -1094,7 +1046,7 @@ impl<'a> Parser<'a> {
// Parse `(T, U) -> R`.
let inputs_lo = self.token.span;
- let inputs: Vec<_> =
+ let inputs: ThinVec<_> =
self.parse_fn_params(|_| false)?.into_iter().map(|input| input.ty).collect();
let inputs_span = inputs_lo.to(self.prev_token.span);
let output = self.parse_ret_ty(AllowPlus::No, RecoverQPath::No, RecoverReturnSign::No)?;
@@ -1120,7 +1072,7 @@ impl<'a> Parser<'a> {
kind: ast::GenericParamKind::Lifetime,
colon_span: None,
})
- .collect::<Vec<GenericParam>>();
+ .collect::<ThinVec<GenericParam>>();
lifetime_defs.append(&mut generic_params);
let generic_args_span = generic_args.span();