From 698f8c2f01ea549d77d7dc3338a12e04c11057b9 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Wed, 17 Apr 2024 14:02:58 +0200 Subject: Adding upstream version 1.64.0+dfsg1. Signed-off-by: Daniel Baumann --- src/tools/rust-analyzer/crates/syntax/Cargo.toml | 39 + .../rust-analyzer/crates/syntax/fuzz/Cargo.toml | 27 + .../crates/syntax/fuzz/fuzz_targets/parser.rs | 11 + .../crates/syntax/fuzz/fuzz_targets/reparse.rs | 11 + src/tools/rust-analyzer/crates/syntax/rust.ungram | 667 +++ src/tools/rust-analyzer/crates/syntax/src/algo.rs | 660 +++ src/tools/rust-analyzer/crates/syntax/src/ast.rs | 367 ++ .../rust-analyzer/crates/syntax/src/ast/edit.rs | 174 + .../crates/syntax/src/ast/edit_in_place.rs | 717 +++ .../crates/syntax/src/ast/expr_ext.rs | 410 ++ .../crates/syntax/src/ast/generated.rs | 41 + .../crates/syntax/src/ast/generated/nodes.rs | 4806 ++++++++++++++++++++ .../crates/syntax/src/ast/generated/tokens.rs | 196 + .../rust-analyzer/crates/syntax/src/ast/make.rs | 901 ++++ .../crates/syntax/src/ast/node_ext.rs | 875 ++++ .../crates/syntax/src/ast/operators.rs | 122 + .../crates/syntax/src/ast/token_ext.rs | 472 ++ .../rust-analyzer/crates/syntax/src/ast/traits.rs | 136 + src/tools/rust-analyzer/crates/syntax/src/fuzz.rs | 75 + src/tools/rust-analyzer/crates/syntax/src/hacks.rs | 15 + src/tools/rust-analyzer/crates/syntax/src/lib.rs | 358 ++ .../rust-analyzer/crates/syntax/src/parsing.rs | 46 + .../crates/syntax/src/parsing/reparsing.rs | 441 ++ src/tools/rust-analyzer/crates/syntax/src/ptr.rs | 104 + .../crates/syntax/src/syntax_error.rs | 44 + .../rust-analyzer/crates/syntax/src/syntax_node.rs | 75 + src/tools/rust-analyzer/crates/syntax/src/ted.rs | 206 + src/tools/rust-analyzer/crates/syntax/src/tests.rs | 186 + .../crates/syntax/src/tests/ast_src.rs | 252 + .../crates/syntax/src/tests/sourcegen_ast.rs | 862 ++++ .../rust-analyzer/crates/syntax/src/token_text.rs | 95 + src/tools/rust-analyzer/crates/syntax/src/utils.rs | 43 + .../rust-analyzer/crates/syntax/src/validation.rs | 378 ++ .../crates/syntax/src/validation/block.rs | 24 + .../syntax/test_data/parser/fuzz-failures/0000.rs | 199 + .../syntax/test_data/parser/fuzz-failures/0001.rs | 106 + .../syntax/test_data/parser/fuzz-failures/0002.rs | 1 + .../syntax/test_data/parser/fuzz-failures/0003.rs | 1 + .../syntax/test_data/parser/fuzz-failures/0004.rs | 1 + .../parser/validation/0031_block_inner_attrs.rast | 127 + .../parser/validation/0031_block_inner_attrs.rs | 15 + .../validation/0037_visibility_in_traits.rast | 105 + .../parser/validation/0037_visibility_in_traits.rs | 6 + .../validation/0038_endless_inclusive_range.rast | 30 + .../validation/0038_endless_inclusive_range.rs | 4 + .../validation/0040_illegal_crate_kw_location.rast | 96 + .../validation/0040_illegal_crate_kw_location.rs | 4 + .../0041_illegal_self_keyword_location.rast | 29 + .../0041_illegal_self_keyword_location.rs | 2 + .../validation/0045_ambiguous_trait_object.rast | 196 + .../validation/0045_ambiguous_trait_object.rs | 6 + .../parser/validation/0046_mutable_const_item.rast | 22 + .../parser/validation/0046_mutable_const_item.rs | 1 + .../parser/validation/invalid_let_expr.rast | 216 + .../parser/validation/invalid_let_expr.rs | 14 + .../syntax/test_data/reparse/fuzz-failures/0000.rs | 6 + .../syntax/test_data/reparse/fuzz-failures/0001.rs | 4 + .../syntax/test_data/reparse/fuzz-failures/0002.rs | 4 + .../syntax/test_data/reparse/fuzz-failures/0003.rs | Bin 0 -> 8 bytes .../syntax/test_data/reparse/fuzz-failures/0004.rs | 4 + .../syntax/test_data/reparse/fuzz-failures/0005.rs | 7 + 61 files changed, 15042 insertions(+) create mode 100644 src/tools/rust-analyzer/crates/syntax/Cargo.toml create mode 100644 src/tools/rust-analyzer/crates/syntax/fuzz/Cargo.toml create mode 100644 src/tools/rust-analyzer/crates/syntax/fuzz/fuzz_targets/parser.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/fuzz/fuzz_targets/reparse.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/rust.ungram create mode 100644 src/tools/rust-analyzer/crates/syntax/src/algo.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/src/ast.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/src/ast/generated.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/src/ast/generated/tokens.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/src/ast/make.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/src/ast/operators.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/src/fuzz.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/src/hacks.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/src/lib.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/src/parsing.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/src/ptr.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/src/syntax_error.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/src/syntax_node.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/src/ted.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/src/tests.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/src/tests/ast_src.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/src/tests/sourcegen_ast.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/src/token_text.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/src/utils.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/src/validation.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/src/validation/block.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0000.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0001.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0002.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0003.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures/0004.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0031_block_inner_attrs.rast create mode 100644 src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0031_block_inner_attrs.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0037_visibility_in_traits.rast create mode 100644 src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0037_visibility_in_traits.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0038_endless_inclusive_range.rast create mode 100644 src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0038_endless_inclusive_range.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0040_illegal_crate_kw_location.rast create mode 100644 src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0040_illegal_crate_kw_location.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0041_illegal_self_keyword_location.rast create mode 100644 src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0041_illegal_self_keyword_location.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0045_ambiguous_trait_object.rast create mode 100644 src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0045_ambiguous_trait_object.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0046_mutable_const_item.rast create mode 100644 src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0046_mutable_const_item.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/invalid_let_expr.rast create mode 100644 src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/invalid_let_expr.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0000.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0001.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0002.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0003.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0004.rs create mode 100644 src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures/0005.rs (limited to 'src/tools/rust-analyzer/crates/syntax') diff --git a/src/tools/rust-analyzer/crates/syntax/Cargo.toml b/src/tools/rust-analyzer/crates/syntax/Cargo.toml new file mode 100644 index 000000000..0e2dec386 --- /dev/null +++ b/src/tools/rust-analyzer/crates/syntax/Cargo.toml @@ -0,0 +1,39 @@ +[package] +name = "syntax" +version = "0.0.0" +description = "Comment and whitespace preserving parser for the Rust language" +license = "MIT OR Apache-2.0" +repository = "https://github.com/rust-lang/rust-analyzer" +edition = "2021" +rust-version = "1.57" + +[lib] +doctest = false + +[dependencies] +cov-mark = "2.0.0-pre.1" +itertools = "0.10.3" +rowan = "0.15.8" +rustc_lexer = { version = "725.0.0", package = "rustc-ap-rustc_lexer" } +rustc-hash = "1.1.0" +once_cell = "1.12.0" +indexmap = "1.9.1" +smol_str = "0.1.23" + +stdx = { path = "../stdx", version = "0.0.0" } +text-edit = { path = "../text-edit", version = "0.0.0" } +parser = { path = "../parser", version = "0.0.0" } +profile = { path = "../profile", version = "0.0.0" } + +[dev-dependencies] +rayon = "1.5.3" +expect-test = "1.4.0" +proc-macro2 = "1.0.39" +quote = "1.0.20" +ungrammar = "1.16.1" + +test-utils = { path = "../test-utils" } +sourcegen = { path = "../sourcegen" } + +[features] +in-rust-tree = [] diff --git a/src/tools/rust-analyzer/crates/syntax/fuzz/Cargo.toml b/src/tools/rust-analyzer/crates/syntax/fuzz/Cargo.toml new file mode 100644 index 000000000..ba2f515b0 --- /dev/null +++ b/src/tools/rust-analyzer/crates/syntax/fuzz/Cargo.toml @@ -0,0 +1,27 @@ + +[package] +name = "syntax-fuzz" +version = "0.0.1" +publish = false +edition = "2021" +rust-version = "1.57" + +[package.metadata] +cargo-fuzz = true + +[dependencies] +syntax = { path = "..", version = "0.0.0" } +text_edit = { path = "../../text_edit", version = "0.0.0" } +libfuzzer-sys = { git = "https://github.com/rust-fuzz/libfuzzer-sys.git" } + +# Prevent this from interfering with workspaces +[workspace] +members = ["."] + +[[bin]] +name = "parser" +path = "fuzz_targets/parser.rs" + +[[bin]] +name = "reparse" +path = "fuzz_targets/reparse.rs" diff --git a/src/tools/rust-analyzer/crates/syntax/fuzz/fuzz_targets/parser.rs b/src/tools/rust-analyzer/crates/syntax/fuzz/fuzz_targets/parser.rs new file mode 100644 index 000000000..f80e13002 --- /dev/null +++ b/src/tools/rust-analyzer/crates/syntax/fuzz/fuzz_targets/parser.rs @@ -0,0 +1,11 @@ +//! Fuzzing for from-scratch parsing. + +#![no_main] +use libfuzzer_sys::fuzz_target; +use syntax::fuzz::check_parser; + +fuzz_target!(|data: &[u8]| { + if let Ok(text) = std::str::from_utf8(data) { + check_parser(text) + } +}); diff --git a/src/tools/rust-analyzer/crates/syntax/fuzz/fuzz_targets/reparse.rs b/src/tools/rust-analyzer/crates/syntax/fuzz/fuzz_targets/reparse.rs new file mode 100644 index 000000000..f865ce8d6 --- /dev/null +++ b/src/tools/rust-analyzer/crates/syntax/fuzz/fuzz_targets/reparse.rs @@ -0,0 +1,11 @@ +//! Fuzzing for incremental parsing. + +#![no_main] +use libfuzzer_sys::fuzz_target; +use syntax::fuzz::CheckReparse; + +fuzz_target!(|data: &[u8]| { + if let Some(check) = CheckReparse::from_data(data) { + check.run(); + } +}); diff --git a/src/tools/rust-analyzer/crates/syntax/rust.ungram b/src/tools/rust-analyzer/crates/syntax/rust.ungram new file mode 100644 index 000000000..62aa47839 --- /dev/null +++ b/src/tools/rust-analyzer/crates/syntax/rust.ungram @@ -0,0 +1,667 @@ +// Rust Un-Grammar. +// +// This grammar specifies the structure of Rust's concrete syntax tree. +// It does not specify parsing rules (ambiguities, precedence, etc are out of scope). +// Tokens are processed -- contextual keywords are recognised, compound operators glued. +// +// Legend: +// +// // -- comment +// Name = -- non-terminal definition +// 'ident' -- token (terminal) +// A B -- sequence +// A | B -- alternation +// A* -- zero or more repetition +// A? -- zero or one repetition +// (A) -- same as A +// label:A -- suggested name for field of AST node + +//*************************// +// Names, Paths and Macros // +//*************************// + +Name = + 'ident' | 'self' + +NameRef = + 'ident' | 'int_number' | 'self' | 'super' | 'crate' | 'Self' + +Lifetime = + 'lifetime_ident' + +Path = + (qualifier:Path '::')? segment:PathSegment + +PathSegment = + '::'? NameRef +| NameRef GenericArgList? +| NameRef ParamList RetType? +| '<' PathType ('as' PathType)? '>' + +GenericArgList = + '::'? '<' (GenericArg (',' GenericArg)* ','?)? '>' + +GenericArg = + TypeArg +| AssocTypeArg +| LifetimeArg +| ConstArg + +TypeArg = + Type + +AssocTypeArg = + NameRef GenericParamList? (':' TypeBoundList | ('=' Type | ConstArg)) + +LifetimeArg = + Lifetime + +ConstArg = + Expr + +MacroCall = + Attr* Path '!' TokenTree ';'? + +TokenTree = + '(' ')' +| '{' '}' +| '[' ']' + +MacroItems = + Item* + +MacroStmts = + statements:Stmt* + Expr? + +//*************************// +// Items // +//*************************// + +SourceFile = + 'shebang'? + Attr* + Item* + +Item = + Const +| Enum +| ExternBlock +| ExternCrate +| Fn +| Impl +| MacroCall +| MacroRules +| MacroDef +| Module +| Static +| Struct +| Trait +| TypeAlias +| Union +| Use + +MacroRules = + Attr* Visibility? + 'macro_rules' '!' Name + TokenTree + +MacroDef = + Attr* Visibility? + 'macro' Name args:TokenTree? + body:TokenTree + +Module = + Attr* Visibility? + 'mod' Name + (ItemList | ';') + +ItemList = + '{' Attr* Item* '}' + +ExternCrate = + Attr* Visibility? + 'extern' 'crate' NameRef Rename? ';' + +Rename = + 'as' (Name | '_') + +Use = + Attr* Visibility? + 'use' UseTree ';' + +UseTree = + (Path? '::')? ('*' | UseTreeList) +| Path Rename? + +UseTreeList = + '{' (UseTree (',' UseTree)* ','?)? '}' + +Fn = + Attr* Visibility? + 'default'? 'const'? 'async'? 'unsafe'? Abi? + 'fn' Name GenericParamList? ParamList RetType? WhereClause? + (body:BlockExpr | ';') + +Abi = + 'extern' 'string'? + +ParamList = + '('( + SelfParam + | (SelfParam ',')? (Param (',' Param)* ','?)? + )')' +| '|' (Param (',' Param)* ','?)? '|' + +SelfParam = + Attr* ( + ('&' Lifetime?)? 'mut'? Name + | 'mut'? Name ':' Type + ) + +Param = + Attr* ( + Pat (':' Type)? + | Type + | '...' + ) + +RetType = + '->' Type + +TypeAlias = + Attr* Visibility? + 'default'? + 'type' Name GenericParamList? (':' TypeBoundList?)? WhereClause? + ('=' Type)? ';' + +Struct = + Attr* Visibility? + 'struct' Name GenericParamList? ( + WhereClause? (RecordFieldList | ';') + | TupleFieldList WhereClause? ';' + ) + +RecordFieldList = + '{' fields:(RecordField (',' RecordField)* ','?)? '}' + +RecordField = + Attr* Visibility? + Name ':' Type + +TupleFieldList = + '(' fields:(TupleField (',' TupleField)* ','?)? ')' + +TupleField = + Attr* Visibility? + Type + +FieldList = + RecordFieldList +| TupleFieldList + +Enum = + Attr* Visibility? + 'enum' Name GenericParamList? WhereClause? + VariantList + +VariantList = + '{' (Variant (',' Variant)* ','?)? '}' + +Variant = + Attr* Visibility? + Name FieldList? ('=' Expr)? + +Union = + Attr* Visibility? + 'union' Name GenericParamList? WhereClause? + RecordFieldList + +// A Data Type. +// +// Not used directly in the grammar, but handy to have anyway. +Adt = + Enum +| Struct +| Union + +Const = + Attr* Visibility? + 'default'? + 'const' (Name | '_') ':' Type + ('=' body:Expr)? ';' + +Static = + Attr* Visibility? + 'static' 'mut'? Name ':' Type + ('=' body:Expr)? ';' + +Trait = + Attr* Visibility? + 'unsafe'? 'auto'? + 'trait' Name GenericParamList? (':' TypeBoundList?)? WhereClause? + AssocItemList + +AssocItemList = + '{' Attr* AssocItem* '}' + +AssocItem = + Const +| Fn +| MacroCall +| TypeAlias + +Impl = + Attr* Visibility? + 'default'? 'unsafe'? + 'impl' GenericParamList? ('const'? '!'? trait:Type 'for')? self_ty:Type WhereClause? + AssocItemList + +ExternBlock = + Attr* 'unsafe'? Abi ExternItemList + +ExternItemList = + '{' Attr* ExternItem* '}' + +ExternItem = + Fn +| MacroCall +| Static +| TypeAlias + +GenericParamList = + '<' (GenericParam (',' GenericParam)* ','?)? '>' + +GenericParam = + ConstParam +| LifetimeParam +| TypeParam + +TypeParam = + Attr* Name (':' TypeBoundList?)? + ('=' default_type:Type)? + +ConstParam = + Attr* 'const' Name ':' Type + ('=' default_val:Expr)? + +LifetimeParam = + Attr* Lifetime (':' TypeBoundList?)? + +WhereClause = + 'where' predicates:(WherePred (',' WherePred)* ','?) + +WherePred = + ('for' GenericParamList)? (Lifetime | Type) ':' TypeBoundList? + +Visibility = + 'pub' ('(' 'in'? Path ')')? + +Attr = + '#' '!'? '[' Meta ']' + +Meta = + Path ('=' Expr | TokenTree)? + +//****************************// +// Statements and Expressions // +//****************************// + +Stmt = + ';' +| ExprStmt +| Item +| LetStmt + +LetStmt = + Attr* 'let' Pat (':' Type)? + '=' initializer:Expr + LetElse? + ';' + +LetElse = + 'else' BlockExpr + +ExprStmt = + Expr ';'? + +Expr = + ArrayExpr +| AwaitExpr +| BinExpr +| BlockExpr +| BoxExpr +| BreakExpr +| CallExpr +| CastExpr +| ClosureExpr +| ContinueExpr +| FieldExpr +| ForExpr +| IfExpr +| IndexExpr +| Literal +| LoopExpr +| MacroExpr +| MacroStmts +| MatchExpr +| MethodCallExpr +| ParenExpr +| PathExpr +| PrefixExpr +| RangeExpr +| RecordExpr +| RefExpr +| ReturnExpr +| TryExpr +| TupleExpr +| WhileExpr +| YieldExpr +| LetExpr +| UnderscoreExpr + +MacroExpr = + MacroCall + +Literal = + Attr* value:( + 'int_number' | 'float_number' + | 'string' | 'raw_string' + | 'byte_string' | 'raw_byte_string' + | 'true' | 'false' + | 'char' | 'byte' + ) + +PathExpr = + Attr* Path + +StmtList = + '{' + Attr* + statements:Stmt* + tail_expr:Expr? + '}' + +RefExpr = + Attr* '&' ('raw' | 'mut' | 'const') Expr + +TryExpr = + Attr* Expr '?' + +BlockExpr = + Attr* Label? ('try' | 'unsafe' | 'async' | 'const') StmtList + +PrefixExpr = + Attr* op:('-' | '!' | '*') Expr + +BinExpr = + Attr* + lhs:Expr + op:( + '||' | '&&' + | '==' | '!=' | '<=' | '>=' | '<' | '>' + | '+' | '*' | '-' | '/' | '%' | '<<' | '>>' | '^' | '|' | '&' + | '=' | '+=' | '/=' | '*=' | '%=' | '>>=' | '<<=' | '-=' | '|=' | '&=' | '^=' + ) + rhs:Expr + +CastExpr = + Attr* Expr 'as' Type + +ParenExpr = + Attr* '(' Attr* Expr ')' + +ArrayExpr = + Attr* '[' Attr* ( + (Expr (',' Expr)* ','?)? + | Expr ';' Expr + ) ']' + +IndexExpr = + Attr* base:Expr '[' index:Expr ']' + +TupleExpr = + Attr* '(' Attr* fields:(Expr (',' Expr)* ','?)? ')' + +RecordExpr = + Path RecordExprFieldList + +RecordExprFieldList = + '{' + Attr* + fields:(RecordExprField (',' RecordExprField)* ','?)? + ('..' spread:Expr?)? + '}' + +RecordExprField = + Attr* (NameRef ':')? Expr + +CallExpr = + Attr* Expr ArgList + +ArgList = + '(' args:(Expr (',' Expr)* ','?)? ')' + +MethodCallExpr = + Attr* receiver:Expr '.' NameRef GenericArgList? ArgList + +FieldExpr = + Attr* Expr '.' NameRef + +ClosureExpr = + Attr* ('for' GenericParamList)? 'static'? 'async'? 'move'? ParamList RetType? + body:Expr + +IfExpr = + Attr* 'if' condition:Expr then_branch:BlockExpr + ('else' else_branch:(IfExpr | BlockExpr))? + +LoopExpr = + Attr* Label? 'loop' + loop_body:BlockExpr + +ForExpr = + Attr* Label? 'for' Pat 'in' iterable:Expr + loop_body:BlockExpr + +WhileExpr = + Attr* Label? 'while' condition:Expr + loop_body:BlockExpr + +Label = + Lifetime ':' + +BreakExpr = + Attr* 'break' Lifetime? Expr? + +ContinueExpr = + Attr* 'continue' Lifetime? + +RangeExpr = + Attr* start:Expr? op:('..' | '..=') end:Expr? + +MatchExpr = + Attr* 'match' Expr MatchArmList + +MatchArmList = + '{' + Attr* + arms:MatchArm* + '}' + +MatchArm = + Attr* Pat guard:MatchGuard? '=>' Expr ','? + +MatchGuard = + 'if' condition:Expr + +ReturnExpr = + Attr* 'return' Expr? + +YieldExpr = + Attr* 'yield' Expr? + +LetExpr = + Attr* 'let' Pat '=' Expr + +UnderscoreExpr = + Attr* '_' + +AwaitExpr = + Attr* Expr '.' 'await' + +BoxExpr = + Attr* 'box' Expr + +//*************************// +// Types // +//*************************// + +Type = + ArrayType +| DynTraitType +| FnPtrType +| ForType +| ImplTraitType +| InferType +| MacroType +| NeverType +| ParenType +| PathType +| PtrType +| RefType +| SliceType +| TupleType + +ParenType = + '(' Type ')' + +NeverType = + '!' + +MacroType = + MacroCall + +PathType = + Path + +TupleType = + '(' fields:(Type (',' Type)* ','?)? ')' + +PtrType = + '*' ('const' | 'mut') Type + +RefType = + '&' Lifetime? 'mut'? Type + +ArrayType = + '[' Type ';' Expr ']' + +SliceType = + '[' Type ']' + +InferType = + '_' + +FnPtrType = + 'const'? 'async'? 'unsafe'? Abi? 'fn' ParamList RetType? + +ForType = + 'for' GenericParamList Type + +ImplTraitType = + 'impl' TypeBoundList + +DynTraitType = + 'dyn' TypeBoundList + +TypeBoundList = + bounds:(TypeBound ('+' TypeBound)* '+'?) + +TypeBound = + Lifetime +| ('?' | '~' 'const')? Type + +//************************// +// Patterns // +//************************// + +Pat = + IdentPat +| BoxPat +| RestPat +| LiteralPat +| MacroPat +| OrPat +| ParenPat +| PathPat +| WildcardPat +| RangePat +| RecordPat +| RefPat +| SlicePat +| TuplePat +| TupleStructPat +| ConstBlockPat + +LiteralPat = + Literal + +IdentPat = + Attr* 'ref'? 'mut'? Name ('@' Pat)? + +WildcardPat = + '_' + +RangePat = + // 1.. + start:Pat op:('..' | '..=') + // 1..2 + | start:Pat op:('..' | '..=') end:Pat + // ..2 + | op:('..' | '..=') end:Pat + +RefPat = + '&' 'mut'? Pat + +RecordPat = + Path RecordPatFieldList + +RecordPatFieldList = + '{' + fields:(RecordPatField (',' RecordPatField)* ','?)? + RestPat? + '}' + +RecordPatField = + Attr* (NameRef ':')? Pat + +TupleStructPat = + Path '(' fields:(Pat (',' Pat)* ','?)? ')' + +TuplePat = + '(' fields:(Pat (',' Pat)* ','?)? ')' + +ParenPat = + '(' Pat ')' + +SlicePat = + '[' (Pat (',' Pat)* ','?)? ']' + +PathPat = + Path + +OrPat = + (Pat ('|' Pat)* '|'?) + +BoxPat = + 'box' Pat + +RestPat = + Attr* '..' + +MacroPat = + MacroCall + +ConstBlockPat = + 'const' BlockExpr diff --git a/src/tools/rust-analyzer/crates/syntax/src/algo.rs b/src/tools/rust-analyzer/crates/syntax/src/algo.rs new file mode 100644 index 000000000..8b14789dd --- /dev/null +++ b/src/tools/rust-analyzer/crates/syntax/src/algo.rs @@ -0,0 +1,660 @@ +//! Collection of assorted algorithms for syntax trees. + +use std::hash::BuildHasherDefault; + +use indexmap::IndexMap; +use itertools::Itertools; +use rustc_hash::FxHashMap; +use text_edit::TextEditBuilder; + +use crate::{ + AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, + TextSize, +}; + +/// Returns ancestors of the node at the offset, sorted by length. This should +/// do the right thing at an edge, e.g. when searching for expressions at `{ +/// $0foo }` we will get the name reference instead of the whole block, which +/// we would get if we just did `find_token_at_offset(...).flat_map(|t| +/// t.parent().ancestors())`. +pub fn ancestors_at_offset( + node: &SyntaxNode, + offset: TextSize, +) -> impl Iterator { + node.token_at_offset(offset) + .map(|token| token.parent_ancestors()) + .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) +} + +/// Finds a node of specific Ast type at offset. Note that this is slightly +/// imprecise: if the cursor is strictly between two nodes of the desired type, +/// as in +/// +/// ```no_run +/// struct Foo {}|struct Bar; +/// ``` +/// +/// then the shorter node will be silently preferred. +pub fn find_node_at_offset(syntax: &SyntaxNode, offset: TextSize) -> Option { + ancestors_at_offset(syntax, offset).find_map(N::cast) +} + +pub fn find_node_at_range(syntax: &SyntaxNode, range: TextRange) -> Option { + syntax.covering_element(range).ancestors().find_map(N::cast) +} + +/// Skip to next non `trivia` token +pub fn skip_trivia_token(mut token: SyntaxToken, direction: Direction) -> Option { + while token.kind().is_trivia() { + token = match direction { + Direction::Next => token.next_token()?, + Direction::Prev => token.prev_token()?, + } + } + Some(token) +} +/// Skip to next non `whitespace` token +pub fn skip_whitespace_token(mut token: SyntaxToken, direction: Direction) -> Option { + while token.kind() == SyntaxKind::WHITESPACE { + token = match direction { + Direction::Next => token.next_token()?, + Direction::Prev => token.prev_token()?, + } + } + Some(token) +} + +/// Finds the first sibling in the given direction which is not `trivia` +pub fn non_trivia_sibling(element: SyntaxElement, direction: Direction) -> Option { + return match element { + NodeOrToken::Node(node) => node.siblings_with_tokens(direction).skip(1).find(not_trivia), + NodeOrToken::Token(token) => token.siblings_with_tokens(direction).skip(1).find(not_trivia), + }; + + fn not_trivia(element: &SyntaxElement) -> bool { + match element { + NodeOrToken::Node(_) => true, + NodeOrToken::Token(token) => !token.kind().is_trivia(), + } + } +} + +pub fn least_common_ancestor(u: &SyntaxNode, v: &SyntaxNode) -> Option { + if u == v { + return Some(u.clone()); + } + + let u_depth = u.ancestors().count(); + let v_depth = v.ancestors().count(); + let keep = u_depth.min(v_depth); + + let u_candidates = u.ancestors().skip(u_depth - keep); + let v_candidates = v.ancestors().skip(v_depth - keep); + let (res, _) = u_candidates.zip(v_candidates).find(|(x, y)| x == y)?; + Some(res) +} + +pub fn neighbor(me: &T, direction: Direction) -> Option { + me.syntax().siblings(direction).skip(1).find_map(T::cast) +} + +pub fn has_errors(node: &SyntaxNode) -> bool { + node.children().any(|it| it.kind() == SyntaxKind::ERROR) +} + +type FxIndexMap = IndexMap>; + +#[derive(Debug, Hash, PartialEq, Eq)] +enum TreeDiffInsertPos { + After(SyntaxElement), + AsFirstChild(SyntaxElement), +} + +#[derive(Debug)] +pub struct TreeDiff { + replacements: FxHashMap, + deletions: Vec, + // the vec as well as the indexmap are both here to preserve order + insertions: FxIndexMap>, +} + +impl TreeDiff { + pub fn into_text_edit(&self, builder: &mut TextEditBuilder) { + let _p = profile::span("into_text_edit"); + + for (anchor, to) in &self.insertions { + let offset = match anchor { + TreeDiffInsertPos::After(it) => it.text_range().end(), + TreeDiffInsertPos::AsFirstChild(it) => it.text_range().start(), + }; + to.iter().for_each(|to| builder.insert(offset, to.to_string())); + } + for (from, to) in &self.replacements { + builder.replace(from.text_range(), to.to_string()); + } + for text_range in self.deletions.iter().map(SyntaxElement::text_range) { + builder.delete(text_range); + } + } + + pub fn is_empty(&self) -> bool { + self.replacements.is_empty() && self.deletions.is_empty() && self.insertions.is_empty() + } +} + +/// Finds a (potentially minimal) diff, which, applied to `from`, will result in `to`. +/// +/// Specifically, returns a structure that consists of a replacements, insertions and deletions +/// such that applying this map on `from` will result in `to`. +/// +/// This function tries to find a fine-grained diff. +pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff { + let _p = profile::span("diff"); + + let mut diff = TreeDiff { + replacements: FxHashMap::default(), + insertions: FxIndexMap::default(), + deletions: Vec::new(), + }; + let (from, to) = (from.clone().into(), to.clone().into()); + + if !syntax_element_eq(&from, &to) { + go(&mut diff, from, to); + } + return diff; + + fn syntax_element_eq(lhs: &SyntaxElement, rhs: &SyntaxElement) -> bool { + lhs.kind() == rhs.kind() + && lhs.text_range().len() == rhs.text_range().len() + && match (&lhs, &rhs) { + (NodeOrToken::Node(lhs), NodeOrToken::Node(rhs)) => { + lhs == rhs || lhs.text() == rhs.text() + } + (NodeOrToken::Token(lhs), NodeOrToken::Token(rhs)) => lhs.text() == rhs.text(), + _ => false, + } + } + + // FIXME: this is horribly inefficient. I bet there's a cool algorithm to diff trees properly. + fn go(diff: &mut TreeDiff, lhs: SyntaxElement, rhs: SyntaxElement) { + let (lhs, rhs) = match lhs.as_node().zip(rhs.as_node()) { + Some((lhs, rhs)) => (lhs, rhs), + _ => { + cov_mark::hit!(diff_node_token_replace); + diff.replacements.insert(lhs, rhs); + return; + } + }; + + let mut look_ahead_scratch = Vec::default(); + + let mut rhs_children = rhs.children_with_tokens(); + let mut lhs_children = lhs.children_with_tokens(); + let mut last_lhs = None; + loop { + let lhs_child = lhs_children.next(); + match (lhs_child.clone(), rhs_children.next()) { + (None, None) => break, + (None, Some(element)) => { + let insert_pos = match last_lhs.clone() { + Some(prev) => { + cov_mark::hit!(diff_insert); + TreeDiffInsertPos::After(prev) + } + // first iteration, insert into out parent as the first child + None => { + cov_mark::hit!(diff_insert_as_first_child); + TreeDiffInsertPos::AsFirstChild(lhs.clone().into()) + } + }; + diff.insertions.entry(insert_pos).or_insert_with(Vec::new).push(element); + } + (Some(element), None) => { + cov_mark::hit!(diff_delete); + diff.deletions.push(element); + } + (Some(ref lhs_ele), Some(ref rhs_ele)) if syntax_element_eq(lhs_ele, rhs_ele) => {} + (Some(lhs_ele), Some(rhs_ele)) => { + // nodes differ, look for lhs_ele in rhs, if its found we can mark everything up + // until that element as insertions. This is important to keep the diff minimal + // in regards to insertions that have been actually done, this is important for + // use insertions as we do not want to replace the entire module node. + look_ahead_scratch.push(rhs_ele.clone()); + let mut rhs_children_clone = rhs_children.clone(); + let mut insert = false; + for rhs_child in &mut rhs_children_clone { + if syntax_element_eq(&lhs_ele, &rhs_child) { + cov_mark::hit!(diff_insertions); + insert = true; + break; + } + look_ahead_scratch.push(rhs_child); + } + let drain = look_ahead_scratch.drain(..); + if insert { + let insert_pos = if let Some(prev) = last_lhs.clone().filter(|_| insert) { + TreeDiffInsertPos::After(prev) + } else { + cov_mark::hit!(insert_first_child); + TreeDiffInsertPos::AsFirstChild(lhs.clone().into()) + }; + + diff.insertions.entry(insert_pos).or_insert_with(Vec::new).extend(drain); + rhs_children = rhs_children_clone; + } else { + go(diff, lhs_ele, rhs_ele); + } + } + } + last_lhs = lhs_child.or(last_lhs); + } + } +} + +#[cfg(test)] +mod tests { + use expect_test::{expect, Expect}; + use itertools::Itertools; + use parser::SyntaxKind; + use text_edit::TextEdit; + + use crate::{AstNode, SyntaxElement}; + + #[test] + fn replace_node_token() { + cov_mark::check!(diff_node_token_replace); + check_diff( + r#"use node;"#, + r#"ident"#, + expect![[r#" + insertions: + + + + replacements: + + Line 0: Token(USE_KW@0..3 "use") -> ident + + deletions: + + Line 1: " " + Line 1: node + Line 1: ; + "#]], + ); + } + + #[test] + fn replace_parent() { + cov_mark::check!(diff_insert_as_first_child); + check_diff( + r#""#, + r#"use foo::bar;"#, + expect![[r#" + insertions: + + Line 0: AsFirstChild(Node(SOURCE_FILE@0..0)) + -> use foo::bar; + + replacements: + + + + deletions: + + + "#]], + ); + } + + #[test] + fn insert_last() { + cov_mark::check!(diff_insert); + check_diff( + r#" +use foo; +use bar;"#, + r#" +use foo; +use bar; +use baz;"#, + expect![[r#" + insertions: + + Line 2: After(Node(USE@10..18)) + -> "\n" + -> use baz; + + replacements: + + + + deletions: + + + "#]], + ); + } + + #[test] + fn insert_middle() { + check_diff( + r#" +use foo; +use baz;"#, + r#" +use foo; +use bar; +use baz;"#, + expect![[r#" + insertions: + + Line 2: After(Token(WHITESPACE@9..10 "\n")) + -> use bar; + -> "\n" + + replacements: + + + + deletions: + + + "#]], + ) + } + + #[test] + fn insert_first() { + check_diff( + r#" +use bar; +use baz;"#, + r#" +use foo; +use bar; +use baz;"#, + expect![[r#" + insertions: + + Line 0: After(Token(WHITESPACE@0..1 "\n")) + -> use foo; + -> "\n" + + replacements: + + + + deletions: + + + "#]], + ) + } + + #[test] + fn first_child_insertion() { + cov_mark::check!(insert_first_child); + check_diff( + r#"fn main() { + stdi + }"#, + r#"use foo::bar; + + fn main() { + stdi + }"#, + expect![[r#" + insertions: + + Line 0: AsFirstChild(Node(SOURCE_FILE@0..30)) + -> use foo::bar; + -> "\n\n " + + replacements: + + + + deletions: + + + "#]], + ); + } + + #[test] + fn delete_last() { + cov_mark::check!(diff_delete); + check_diff( + r#"use foo; + use bar;"#, + r#"use foo;"#, + expect![[r#" + insertions: + + + + replacements: + + + + deletions: + + Line 1: "\n " + Line 2: use bar; + "#]], + ); + } + + #[test] + fn delete_middle() { + cov_mark::check!(diff_insertions); + check_diff( + r#" +use expect_test::{expect, Expect}; +use text_edit::TextEdit; + +use crate::AstNode; +"#, + r#" +use expect_test::{expect, Expect}; + +use crate::AstNode; +"#, + expect![[r#" + insertions: + + Line 1: After(Node(USE@1..35)) + -> "\n\n" + -> use crate::AstNode; + + replacements: + + + + deletions: + + Line 2: use text_edit::TextEdit; + Line 3: "\n\n" + Line 4: use crate::AstNode; + Line 5: "\n" + "#]], + ) + } + + #[test] + fn delete_first() { + check_diff( + r#" +use text_edit::TextEdit; + +use crate::AstNode; +"#, + r#" +use crate::AstNode; +"#, + expect![[r#" + insertions: + + + + replacements: + + Line 2: Token(IDENT@5..14 "text_edit") -> crate + Line 2: Token(IDENT@16..24 "TextEdit") -> AstNode + Line 2: Token(WHITESPACE@25..27 "\n\n") -> "\n" + + deletions: + + Line 3: use crate::AstNode; + Line 4: "\n" + "#]], + ) + } + + #[test] + fn merge_use() { + check_diff( + r#" +use std::{ + fmt, + hash::BuildHasherDefault, + ops::{self, RangeInclusive}, +}; +"#, + r#" +use std::fmt; +use std::hash::BuildHasherDefault; +use std::ops::{self, RangeInclusive}; +"#, + expect![[r#" + insertions: + + Line 2: After(Node(PATH_SEGMENT@5..8)) + -> :: + -> fmt + Line 6: After(Token(WHITESPACE@86..87 "\n")) + -> use std::hash::BuildHasherDefault; + -> "\n" + -> use std::ops::{self, RangeInclusive}; + -> "\n" + + replacements: + + Line 2: Token(IDENT@5..8 "std") -> std + + deletions: + + Line 2: :: + Line 2: { + fmt, + hash::BuildHasherDefault, + ops::{self, RangeInclusive}, + } + "#]], + ) + } + + #[test] + fn early_return_assist() { + check_diff( + r#" +fn main() { + if let Ok(x) = Err(92) { + foo(x); + } +} + "#, + r#" +fn main() { + let x = match Err(92) { + Ok(it) => it, + _ => return, + }; + foo(x); +} + "#, + expect![[r#" + insertions: + + Line 3: After(Node(BLOCK_EXPR@40..63)) + -> " " + -> match Err(92) { + Ok(it) => it, + _ => return, + } + -> ; + Line 3: After(Node(IF_EXPR@17..63)) + -> "\n " + -> foo(x); + + replacements: + + Line 3: Token(IF_KW@17..19 "if") -> let + Line 3: Token(LET_KW@20..23 "let") -> x + Line 3: Node(BLOCK_EXPR@40..63) -> = + + deletions: + + Line 3: " " + Line 3: Ok(x) + Line 3: " " + Line 3: = + Line 3: " " + Line 3: Err(92) + "#]], + ) + } + + fn check_diff(from: &str, to: &str, expected_diff: Expect) { + let from_node = crate::SourceFile::parse(from).tree().syntax().clone(); + let to_node = crate::SourceFile::parse(to).tree().syntax().clone(); + let diff = super::diff(&from_node, &to_node); + + let line_number = + |syn: &SyntaxElement| from[..syn.text_range().start().into()].lines().count(); + + let fmt_syntax = |syn: &SyntaxElement| match syn.kind() { + SyntaxKind::WHITESPACE => format!("{:?}", syn.to_string()), + _ => format!("{}", syn), + }; + + let insertions = + diff.insertions.iter().format_with("\n", |(k, v), f| -> Result<(), std::fmt::Error> { + f(&format!( + "Line {}: {:?}\n-> {}", + line_number(match k { + super::TreeDiffInsertPos::After(syn) => syn, + super::TreeDiffInsertPos::AsFirstChild(syn) => syn, + }), + k, + v.iter().format_with("\n-> ", |v, f| f(&fmt_syntax(v))) + )) + }); + + let replacements = diff + .replacements + .iter() + .sorted_by_key(|(syntax, _)| syntax.text_range().start()) + .format_with("\n", |(k, v), f| { + f(&format!("Line {}: {:?} -> {}", line_number(k), k, fmt_syntax(v))) + }); + + let deletions = diff + .deletions + .iter() + .format_with("\n", |v, f| f(&format!("Line {}: {}", line_number(v), &fmt_syntax(v)))); + + let actual = format!( + "insertions:\n\n{}\n\nreplacements:\n\n{}\n\ndeletions:\n\n{}\n", + insertions, replacements, deletions + ); + expected_diff.assert_eq(&actual); + + let mut from = from.to_owned(); + let mut text_edit = TextEdit::builder(); + diff.into_text_edit(&mut text_edit); + text_edit.finish().apply(&mut from); + assert_eq!(&*from, to, "diff did not turn `from` to `to`"); + } +} diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast.rs b/src/tools/rust-analyzer/crates/syntax/src/ast.rs new file mode 100644 index 000000000..4aa64d0d6 --- /dev/null +++ b/src/tools/rust-analyzer/crates/syntax/src/ast.rs @@ -0,0 +1,367 @@ +//! Abstract Syntax Tree, layered on top of untyped `SyntaxNode`s + +mod generated; +mod traits; +mod token_ext; +mod node_ext; +mod expr_ext; +mod operators; +pub mod edit; +pub mod edit_in_place; +pub mod make; + +use std::marker::PhantomData; + +use crate::{ + syntax_node::{SyntaxNode, SyntaxNodeChildren, SyntaxToken}, + SyntaxKind, +}; + +pub use self::{ + expr_ext::{ArrayExprKind, BlockModifier, CallableExpr, ElseBranch, LiteralKind}, + generated::{nodes::*, tokens::*}, + node_ext::{ + AttrKind, FieldKind, Macro, NameLike, NameOrNameRef, PathSegmentKind, SelfParamKind, + SlicePatComponents, StructKind, TypeBoundKind, TypeOrConstParam, VisibilityKind, + }, + operators::{ArithOp, BinaryOp, CmpOp, LogicOp, Ordering, RangeOp, UnaryOp}, + token_ext::{CommentKind, CommentPlacement, CommentShape, IsString, QuoteOffsets, Radix}, + traits::{ + AttrDocCommentIter, DocCommentIter, HasArgList, HasAttrs, HasDocComments, HasGenericParams, + HasLoopBody, HasModuleItem, HasName, HasTypeBounds, HasVisibility, + }, +}; + +/// The main trait to go from untyped `SyntaxNode` to a typed ast. The +/// conversion itself has zero runtime cost: ast and syntax nodes have exactly +/// the same representation: a pointer to the tree root and a pointer to the +/// node itself. +pub trait AstNode { + fn can_cast(kind: SyntaxKind) -> bool + where + Self: Sized; + + fn cast(syntax: SyntaxNode) -> Option + where + Self: Sized; + + fn syntax(&self) -> &SyntaxNode; + fn clone_for_update(&self) -> Self + where + Self: Sized, + { + Self::cast(self.syntax().clone_for_update()).unwrap() + } + fn clone_subtree(&self) -> Self + where + Self: Sized, + { + Self::cast(self.syntax().clone_subtree()).unwrap() + } +} + +/// Like `AstNode`, but wraps tokens rather than interior nodes. +pub trait AstToken { + fn can_cast(token: SyntaxKind) -> bool + where + Self: Sized; + + fn cast(syntax: SyntaxToken) -> Option + where + Self: Sized; + + fn syntax(&self) -> &SyntaxToken; + + fn text(&self) -> &str { + self.syntax().text() + } +} + +/// An iterator over `SyntaxNode` children of a particular AST type. +#[derive(Debug, Clone)] +pub struct AstChildren { + inner: SyntaxNodeChildren, + ph: PhantomData, +} + +impl AstChildren { + fn new(parent: &SyntaxNode) -> Self { + AstChildren { inner: parent.children(), ph: PhantomData } + } +} + +impl Iterator for AstChildren { + type Item = N; + fn next(&mut self) -> Option { + self.inner.find_map(N::cast) + } +} + +mod support { + use super::{AstChildren, AstNode, SyntaxKind, SyntaxNode, SyntaxToken}; + + pub(super) fn child(parent: &SyntaxNode) -> Option { + parent.children().find_map(N::cast) + } + + pub(super) fn children(parent: &SyntaxNode) -> AstChildren { + AstChildren::new(parent) + } + + pub(super) fn token(parent: &SyntaxNode, kind: SyntaxKind) -> Option { + parent.children_with_tokens().filter_map(|it| it.into_token()).find(|it| it.kind() == kind) + } +} + +#[test] +fn assert_ast_is_object_safe() { + fn _f(_: &dyn AstNode, _: &dyn HasName) {} +} + +#[test] +fn test_doc_comment_none() { + let file = SourceFile::parse( + r#" + // non-doc + mod foo {} + "#, + ) + .ok() + .unwrap(); + let module = file.syntax().descendants().find_map(Module::cast).unwrap(); + assert!(module.doc_comments().doc_comment_text().is_none()); +} + +#[test] +fn test_outer_doc_comment_of_items() { + let file = SourceFile::parse( + r#" + /// doc + // non-doc + mod foo {} + "#, + ) + .ok() + .unwrap(); + let module = file.syntax().descendants().find_map(Module::cast).unwrap(); + assert_eq!(" doc", module.doc_comments().doc_comment_text().unwrap()); +} + +#[test] +fn test_inner_doc_comment_of_items() { + let file = SourceFile::parse( + r#" + //! doc + // non-doc + mod foo {} + "#, + ) + .ok() + .unwrap(); + let module = file.syntax().descendants().find_map(Module::cast).unwrap(); + assert!(module.doc_comments().doc_comment_text().is_none()); +} + +#[test] +fn test_doc_comment_of_statics() { + let file = SourceFile::parse( + r#" + /// Number of levels + static LEVELS: i32 = 0; + "#, + ) + .ok() + .unwrap(); + let st = file.syntax().descendants().find_map(Static::cast).unwrap(); + assert_eq!(" Number of levels", st.doc_comments().doc_comment_text().unwrap()); +} + +#[test] +fn test_doc_comment_preserves_indents() { + let file = SourceFile::parse( + r#" + /// doc1 + /// ``` + /// fn foo() { + /// // ... + /// } + /// ``` + mod foo {} + "#, + ) + .ok() + .unwrap(); + let module = file.syntax().descendants().find_map(Module::cast).unwrap(); + assert_eq!( + " doc1\n ```\n fn foo() {\n // ...\n }\n ```", + module.doc_comments().doc_comment_text().unwrap() + ); +} + +#[test] +fn test_doc_comment_preserves_newlines() { + let file = SourceFile::parse( + r#" + /// this + /// is + /// mod + /// foo + mod foo {} + "#, + ) + .ok() + .unwrap(); + let module = file.syntax().descendants().find_map(Module::cast).unwrap(); + assert_eq!(" this\n is\n mod\n foo", module.doc_comments().doc_comment_text().unwrap()); +} + +#[test] +fn test_doc_comment_single_line_block_strips_suffix() { + let file = SourceFile::parse( + r#" + /** this is mod foo*/ + mod foo {} + "#, + ) + .ok() + .unwrap(); + let module = file.syntax().descendants().find_map(Module::cast).unwrap(); + assert_eq!(" this is mod foo", module.doc_comments().doc_comment_text().unwrap()); +} + +#[test] +fn test_doc_comment_single_line_block_strips_suffix_whitespace() { + let file = SourceFile::parse( + r#" + /** this is mod foo */ + mod foo {} + "#, + ) + .ok() + .unwrap(); + let module = file.syntax().descendants().find_map(Module::cast).unwrap(); + assert_eq!(" this is mod foo ", module.doc_comments().doc_comment_text().unwrap()); +} + +#[test] +fn test_doc_comment_multi_line_block_strips_suffix() { + let file = SourceFile::parse( + r#" + /** + this + is + mod foo + */ + mod foo {} + "#, + ) + .ok() + .unwrap(); + let module = file.syntax().descendants().find_map(Module::cast).unwrap(); + assert_eq!( + "\n this\n is\n mod foo\n ", + module.doc_comments().doc_comment_text().unwrap() + ); +} + +#[test] +fn test_comments_preserve_trailing_whitespace() { + let file = SourceFile::parse( + "\n/// Representation of a Realm. \n/// In the specification these are called Realm Records.\nstruct Realm {}", + ) + .ok() + .unwrap(); + let def = file.syntax().descendants().find_map(Struct::cast).unwrap(); + assert_eq!( + " Representation of a Realm. \n In the specification these are called Realm Records.", + def.doc_comments().doc_comment_text().unwrap() + ); +} + +#[test] +fn test_four_slash_line_comment() { + let file = SourceFile::parse( + r#" + //// too many slashes to be a doc comment + /// doc comment + mod foo {} + "#, + ) + .ok() + .unwrap(); + let module = file.syntax().descendants().find_map(Module::cast).unwrap(); + assert_eq!(" doc comment", module.doc_comments().doc_comment_text().unwrap()); +} + +#[test] +fn test_where_predicates() { + fn assert_bound(text: &str, bound: Option) { + assert_eq!(text, bound.unwrap().syntax().text().to_string()); + } + + let file = SourceFile::parse( + r#" +fn foo() +where + T: Clone + Copy + Debug + 'static, + 'a: 'b + 'c, + Iterator::Item: 'a + Debug, + Iterator::Item: Debug + 'a, + ::Item: Debug + 'a, + for<'a> F: Fn(&'a str) +{} + "#, + ) + .ok() + .unwrap(); + let where_clause = file.syntax().descendants().find_map(WhereClause::cast).unwrap(); + + let mut predicates = where_clause.predicates(); + + let pred = predicates.next().unwrap(); + let mut bounds = pred.type_bound_list().unwrap().bounds(); + + assert!(pred.for_token().is_none()); + assert!(pred.generic_param_list().is_none()); + assert_eq!("T", pred.ty().unwrap().syntax().text().to_string()); + assert_bound("Clone", bounds.next()); + assert_bound("Copy", bounds.next()); + assert_bound("Debug", bounds.next()); + assert_bound("'static", bounds.next()); + + let pred = predicates.next().unwrap(); + let mut bounds = pred.type_bound_list().unwrap().bounds(); + + assert_eq!("'a", pred.lifetime().unwrap().lifetime_ident_token().unwrap().text()); + + assert_bound("'b", bounds.next()); + assert_bound("'c", bounds.next()); + + let pred = predicates.next().unwrap(); + let mut bounds = pred.type_bound_list().unwrap().bounds(); + + assert_eq!("Iterator::Item", pred.ty().unwrap().syntax().text().to_string()); + assert_bound("'a", bounds.next()); + + let pred = predicates.next().unwrap(); + let mut bounds = pred.type_bound_list().unwrap().bounds(); + + assert_eq!("Iterator::Item", pred.ty().unwrap().syntax().text().to_string()); + assert_bound("Debug", bounds.next()); + assert_bound("'a", bounds.next()); + + let pred = predicates.next().unwrap(); + let mut bounds = pred.type_bound_list().unwrap().bounds(); + + assert_eq!("::Item", pred.ty().unwrap().syntax().text().to_string()); + assert_bound("Debug", bounds.next()); + assert_bound("'a", bounds.next()); + + let pred = predicates.next().unwrap(); + let mut bounds = pred.type_bound_list().unwrap().bounds(); + + assert!(pred.for_token().is_some()); + assert_eq!("<'a>", pred.generic_param_list().unwrap().syntax().text().to_string()); + assert_eq!("F", pred.ty().unwrap().syntax().text().to_string()); + assert_bound("Fn(&'a str)", bounds.next()); +} diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs new file mode 100644 index 000000000..15805dfc8 --- /dev/null +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs @@ -0,0 +1,174 @@ +//! This module contains functions for editing syntax trees. As the trees are +//! immutable, all function here return a fresh copy of the tree, instead of +//! doing an in-place modification. +use std::{fmt, iter, ops}; + +use crate::{ + ast::{self, make, AstNode}, + ted, AstToken, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxToken, +}; + +#[derive(Debug, Clone, Copy)] +pub struct IndentLevel(pub u8); + +impl From for IndentLevel { + fn from(level: u8) -> IndentLevel { + IndentLevel(level) + } +} + +impl fmt::Display for IndentLevel { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let spaces = " "; + let buf; + let len = self.0 as usize * 4; + let indent = if len <= spaces.len() { + &spaces[..len] + } else { + buf = " ".repeat(len); + &buf + }; + fmt::Display::fmt(indent, f) + } +} + +impl ops::Add for IndentLevel { + type Output = IndentLevel; + fn add(self, rhs: u8) -> IndentLevel { + IndentLevel(self.0 + rhs) + } +} + +impl IndentLevel { + pub fn single() -> IndentLevel { + IndentLevel(0) + } + pub fn is_zero(&self) -> bool { + self.0 == 0 + } + pub fn from_element(element: &SyntaxElement) -> IndentLevel { + match element { + rowan::NodeOrToken::Node(it) => IndentLevel::from_node(it), + rowan::NodeOrToken::Token(it) => IndentLevel::from_token(it), + } + } + + pub fn from_node(node: &SyntaxNode) -> IndentLevel { + match node.first_token() { + Some(it) => Self::from_token(&it), + None => IndentLevel(0), + } + } + + pub fn from_token(token: &SyntaxToken) -> IndentLevel { + for ws in prev_tokens(token.clone()).filter_map(ast::Whitespace::cast) { + let text = ws.syntax().text(); + if let Some(pos) = text.rfind('\n') { + let level = text[pos + 1..].chars().count() / 4; + return IndentLevel(level as u8); + } + } + IndentLevel(0) + } + + /// XXX: this intentionally doesn't change the indent of the very first token. + /// Ie, in something like + /// ``` + /// fn foo() { + /// 92 + /// } + /// ``` + /// if you indent the block, the `{` token would stay put. + pub(super) fn increase_indent(self, node: &SyntaxNode) { + let tokens = node.preorder_with_tokens().filter_map(|event| match event { + rowan::WalkEvent::Leave(NodeOrToken::Token(it)) => Some(it), + _ => None, + }); + for token in tokens { + if let Some(ws) = ast::Whitespace::cast(token) { + if ws.text().contains('\n') { + let new_ws = make::tokens::whitespace(&format!("{}{}", ws.syntax(), self)); + ted::replace(ws.syntax(), &new_ws); + } + } + } + } + + pub(super) fn decrease_indent(self, node: &SyntaxNode) { + let tokens = node.preorder_with_tokens().filter_map(|event| match event { + rowan::WalkEvent::Leave(NodeOrToken::Token(it)) => Some(it), + _ => None, + }); + for token in tokens { + if let Some(ws) = ast::Whitespace::cast(token) { + if ws.text().contains('\n') { + let new_ws = make::tokens::whitespace( + &ws.syntax().text().replace(&format!("\n{}", self), "\n"), + ); + ted::replace(ws.syntax(), &new_ws); + } + } + } + } +} + +fn prev_tokens(token: SyntaxToken) -> impl Iterator { + iter::successors(Some(token), |token| token.prev_token()) +} + +/// Soft-deprecated in favor of mutable tree editing API `edit_in_place::Ident`. +pub trait AstNodeEdit: AstNode + Clone + Sized { + fn indent_level(&self) -> IndentLevel { + IndentLevel::from_node(self.syntax()) + } + #[must_use] + fn indent(&self, level: IndentLevel) -> Self { + fn indent_inner(node: &SyntaxNode, level: IndentLevel) -> SyntaxNode { + let res = node.clone_subtree().clone_for_update(); + level.increase_indent(&res); + res.clone_subtree() + } + + Self::cast(indent_inner(self.syntax(), level)).unwrap() + } + #[must_use] + fn dedent(&self, level: IndentLevel) -> Self { + fn dedent_inner(node: &SyntaxNode, level: IndentLevel) -> SyntaxNode { + let res = node.clone_subtree().clone_for_update(); + level.decrease_indent(&res); + res.clone_subtree() + } + + Self::cast(dedent_inner(self.syntax(), level)).unwrap() + } + #[must_use] + fn reset_indent(&self) -> Self { + let level = IndentLevel::from_node(self.syntax()); + self.dedent(level) + } +} + +impl AstNodeEdit for N {} + +#[test] +fn test_increase_indent() { + let arm_list = { + let arm = make::match_arm(iter::once(make::wildcard_pat().into()), None, make::expr_unit()); + make::match_arm_list(vec![arm.clone(), arm]) + }; + assert_eq!( + arm_list.syntax().to_string(), + "{ + _ => (), + _ => (), +}" + ); + let indented = arm_list.indent(IndentLevel(2)); + assert_eq!( + indented.syntax().to_string(), + "{ + _ => (), + _ => (), + }" + ); +} diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs new file mode 100644 index 000000000..e3e928aec --- /dev/null +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs @@ -0,0 +1,717 @@ +//! Structural editing for ast. + +use std::iter::{empty, successors}; + +use parser::{SyntaxKind, T}; +use rowan::SyntaxElement; + +use crate::{ + algo::{self, neighbor}, + ast::{self, edit::IndentLevel, make, HasGenericParams}, + ted::{self, Position}, + AstNode, AstToken, Direction, + SyntaxKind::{ATTR, COMMENT, WHITESPACE}, + SyntaxNode, +}; + +use super::HasName; + +pub trait GenericParamsOwnerEdit: ast::HasGenericParams { + fn get_or_create_generic_param_list(&self) -> ast::GenericParamList; + fn get_or_create_where_clause(&self) -> ast::WhereClause; +} + +impl GenericParamsOwnerEdit for ast::Fn { + fn get_or_create_generic_param_list(&self) -> ast::GenericParamList { + match self.generic_param_list() { + Some(it) => it, + None => { + let position = if let Some(name) = self.name() { + Position::after(name.syntax) + } else if let Some(fn_token) = self.fn_token() { + Position::after(fn_token) + } else if let Some(param_list) = self.param_list() { + Position::before(param_list.syntax) + } else { + Position::last_child_of(self.syntax()) + }; + create_generic_param_list(position) + } + } + } + + fn get_or_create_where_clause(&self) -> ast::WhereClause { + if self.where_clause().is_none() { + let position = if let Some(ty) = self.ret_type() { + Position::after(ty.syntax()) + } else if let Some(param_list) = self.param_list() { + Position::after(param_list.syntax()) + } else { + Position::last_child_of(self.syntax()) + }; + create_where_clause(position); + } + self.where_clause().unwrap() + } +} + +impl GenericParamsOwnerEdit for ast::Impl { + fn get_or_create_generic_param_list(&self) -> ast::GenericParamList { + match self.generic_param_list() { + Some(it) => it, + None => { + let position = match self.impl_token() { + Some(imp_token) => Position::after(imp_token), + None => Position::last_child_of(self.syntax()), + }; + create_generic_param_list(position) + } + } + } + + fn get_or_create_where_clause(&self) -> ast::WhereClause { + if self.where_clause().is_none() { + let position = match self.assoc_item_list() { + Some(items) => Position::before(items.syntax()), + None => Position::last_child_of(self.syntax()), + }; + create_where_clause(position); + } + self.where_clause().unwrap() + } +} + +impl GenericParamsOwnerEdit for ast::Trait { + fn get_or_create_generic_param_list(&self) -> ast::GenericParamList { + match self.generic_param_list() { + Some(it) => it, + None => { + let position = if let Some(name) = self.name() { + Position::after(name.syntax) + } else if let Some(trait_token) = self.trait_token() { + Position::after(trait_token) + } else { + Position::last_child_of(self.syntax()) + }; + create_generic_param_list(position) + } + } + } + + fn get_or_create_where_clause(&self) -> ast::WhereClause { + if self.where_clause().is_none() { + let position = match self.assoc_item_list() { + Some(items) => Position::before(items.syntax()), + None => Position::last_child_of(self.syntax()), + }; + create_where_clause(position); + } + self.where_clause().unwrap() + } +} + +impl GenericParamsOwnerEdit for ast::Struct { + fn get_or_create_generic_param_list(&self) -> ast::GenericParamList { + match self.generic_param_list() { + Some(it) => it, + None => { + let position = if let Some(name) = self.name() { + Position::after(name.syntax) + } else if let Some(struct_token) = self.struct_token() { + Position::after(struct_token) + } else { + Position::last_child_of(self.syntax()) + }; + create_generic_param_list(position) + } + } + } + + fn get_or_create_where_clause(&self) -> ast::WhereClause { + if self.where_clause().is_none() { + let tfl = self.field_list().and_then(|fl| match fl { + ast::FieldList::RecordFieldList(_) => None, + ast::FieldList::TupleFieldList(it) => Some(it), + }); + let position = if let Some(tfl) = tfl { + Position::after(tfl.syntax()) + } else if let Some(gpl) = self.generic_param_list() { + Position::after(gpl.syntax()) + } else if let Some(name) = self.name() { + Position::after(name.syntax()) + } else { + Position::last_child_of(self.syntax()) + }; + create_where_clause(position); + } + self.where_clause().unwrap() + } +} + +impl GenericParamsOwnerEdit for ast::Enum { + fn get_or_create_generic_param_list(&self) -> ast::GenericParamList { + match self.generic_param_list() { + Some(it) => it, + None => { + let position = if let Some(name) = self.name() { + Position::after(name.syntax) + } else if let Some(enum_token) = self.enum_token() { + Position::after(enum_token) + } else { + Position::last_child_of(self.syntax()) + }; + create_generic_param_list(position) + } + } + } + + fn get_or_create_where_clause(&self) -> ast::WhereClause { + if self.where_clause().is_none() { + let position = if let Some(gpl) = self.generic_param_list() { + Position::after(gpl.syntax()) + } else if let Some(name) = self.name() { + Position::after(name.syntax()) + } else { + Position::last_child_of(self.syntax()) + }; + create_where_clause(position); + } + self.where_clause().unwrap() + } +} + +fn create_where_clause(position: Position) { + let where_clause = make::where_clause(empty()).clone_for_update(); + ted::insert(position, where_clause.syntax()); +} + +fn create_generic_param_list(position: Position) -> ast::GenericParamList { + let gpl = make::generic_param_list(empty()).clone_for_update(); + ted::insert_raw(position, gpl.syntax()); + gpl +} + +pub trait AttrsOwnerEdit: ast::HasAttrs { + fn remove_attrs_and_docs(&self) { + remove_attrs_and_docs(self.syntax()); + + fn remove_attrs_and_docs(node: &SyntaxNode) { + let mut remove_next_ws = false; + for child in node.children_with_tokens() { + match child.kind() { + ATTR | COMMENT => { + remove_next_ws = true; + child.detach(); + continue; + } + WHITESPACE if remove_next_ws => { + child.detach(); + } + _ => (), + } + remove_next_ws = false; + } + } + } +} + +impl AttrsOwnerEdit for T {} + +impl ast::GenericParamList { + pub fn add_generic_param(&self, generic_param: ast::GenericParam) { + match self.generic_params().last() { + Some(last_param) => { + let position = Position::after(last_param.syntax()); + let elements = vec![ + make::token(T![,]).into(), + make::tokens::single_space().into(), + generic_param.syntax().clone().into(), + ]; + ted::insert_all(position, elements); + } + None => { + let after_l_angle = Position::after(self.l_angle_token().unwrap()); + ted::insert(after_l_angle, generic_param.syntax()); + } + } + } +} + +impl ast::WhereClause { + pub fn add_predicate(&self, predicate: ast::WherePred) { + if let Some(pred) = self.predicates().last() { + if !pred.syntax().siblings_with_tokens(Direction::Next).any(|it| it.kind() == T![,]) { + ted::append_child_raw(self.syntax(), make::token(T![,])); + } + } + ted::append_child(self.syntax(), predicate.syntax()); + } +} + +impl ast::TypeBoundList { + pub fn remove(&self) { + match self.syntax().siblings_with_tokens(Direction::Prev).find(|it| it.kind() == T![:]) { + Some(colon) => ted::remove_all(colon..=self.syntax().clone().into()), + None => ted::remove(self.syntax()), + } + } +} + +impl ast::PathSegment { + pub fn get_or_create_generic_arg_list(&self) -> ast::GenericArgList { + if self.generic_arg_list().is_none() { + let arg_list = make::generic_arg_list().clone_for_update(); + ted::append_child(self.syntax(), arg_list.syntax()); + } + self.generic_arg_list().unwrap() + } +} + +impl ast::UseTree { + pub fn remove(&self) { + for dir in [Direction::Next, Direction::Prev] { + if let Some(next_use_tree) = neighbor(self, dir) { + let separators = self + .syntax() + .siblings_with_tokens(dir) + .skip(1) + .take_while(|it| it.as_node() != Some(next_use_tree.syntax())); + ted::remove_all_iter(separators); + break; + } + } + ted::remove(self.syntax()); + } + + pub fn get_or_create_use_tree_list(&self) -> ast::UseTreeList { + match self.use_tree_list() { + Some(it) => it, + None => { + let position = Position::last_child_of(self.syntax()); + let use_tree_list = make::use_tree_list(empty()).clone_for_update(); + let mut elements = Vec::with_capacity(2); + if self.coloncolon_token().is_none() { + elements.push(make::token(T![::]).into()); + } + elements.push(use_tree_list.syntax().clone().into()); + ted::insert_all_raw(position, elements); + use_tree_list + } + } + } + + /// Splits off the given prefix, making it the path component of the use tree, + /// appending the rest of the path to all UseTreeList items. + /// + /// # Examples + /// + /// `prefix$0::suffix` -> `prefix::{suffix}` + /// + /// `prefix$0` -> `prefix::{self}` + /// + /// `prefix$0::*` -> `prefix::{*}` + pub fn split_prefix(&self, prefix: &ast::Path) { + debug_assert_eq!(self.path(), Some(prefix.top_path())); + let path = self.path().unwrap(); + if &path == prefix && self.use_tree_list().is_none() { + if self.star_token().is_some() { + // path$0::* -> * + self.coloncolon_token().map(ted::remove); + ted::remove(prefix.syntax()); + } else { + // path$0 -> self + let self_suffix = + make::path_unqualified(make::path_segment_self()).clone_for_update(); + ted::replace(path.syntax(), self_suffix.syntax()); + } + } else if split_path_prefix(prefix).is_none() { + return; + } + // At this point, prefix path is detached; _self_ use tree has suffix path. + // Next, transform 'suffix' use tree into 'prefix::{suffix}' + let subtree = self.clone_subtree().clone_for_update(); + ted::remove_all_iter(self.syntax().children_with_tokens()); + ted::insert(Position::first_child_of(self.syntax()), prefix.syntax()); + self.get_or_create_use_tree_list().add_use_tree(subtree); + + fn split_path_prefix(prefix: &ast::Path) -> Option<()> { + let parent = prefix.parent_path()?; + let segment = parent.segment()?; + if algo::has_errors(segment.syntax()) { + return None; + } + for p in successors(parent.parent_path(), |it| it.parent_path()) { + p.segment()?; + } + prefix.parent_path().and_then(|p| p.coloncolon_token()).map(ted::remove); + ted::remove(prefix.syntax()); + Some(()) + } + } +} + +impl ast::UseTreeList { + pub fn add_use_tree(&self, use_tree: ast::UseTree) { + let (position, elements) = match self.use_trees().last() { + Some(last_tree) => ( + Position::after(last_tree.syntax()), + vec![ + make::token(T![,]).into(), + make::tokens::single_space().into(), + use_tree.syntax.into(), + ], + ), + None => { + let position = match self.l_curly_token() { + Some(l_curly) => Position::after(l_curly), + None => Position::last_child_of(self.syntax()), + }; + (position, vec![use_tree.syntax.into()]) + } + }; + ted::insert_all_raw(position, elements); + } +} + +impl ast::Use { + pub fn remove(&self) { + let next_ws = self + .syntax() + .next_sibling_or_token() + .and_then(|it| it.into_token()) + .and_then(ast::Whitespace::cast); + if let Some(next_ws) = next_ws { + let ws_text = next_ws.syntax().text(); + if let Some(rest) = ws_text.strip_prefix('\n') { + if rest.is_empty() { + ted::remove(next_ws.syntax()); + } else { + ted::replace(next_ws.syntax(), make::tokens::whitespace(rest)); + } + } + } + ted::remove(self.syntax()); + } +} + +impl ast::Impl { + pub fn get_or_create_assoc_item_list(&self) -> ast::AssocItemList { + if self.assoc_item_list().is_none() { + let assoc_item_list = make::assoc_item_list().clone_for_update(); + ted::append_child(self.syntax(), assoc_item_list.syntax()); + } + self.assoc_item_list().unwrap() + } +} + +impl ast::AssocItemList { + pub fn add_item(&self, item: ast::AssocItem) { + let (indent, position, whitespace) = match self.assoc_items().last() { + Some(last_item) => ( + IndentLevel::from_node(last_item.syntax()), + Position::after(last_item.syntax()), + "\n\n", + ), + None => match self.l_curly_token() { + Some(l_curly) => { + normalize_ws_between_braces(self.syntax()); + (IndentLevel::from_token(&l_curly) + 1, Position::after(&l_curly), "\n") + } + None => (IndentLevel::single(), Position::last_child_of(self.syntax()), "\n"), + }, + }; + let elements: Vec> = vec![ + make::tokens::whitespace(&format!("{}{}", whitespace, indent)).into(), + item.syntax().clone().into(), + ]; + ted::insert_all(position, elements); + } +} + +impl ast::Fn { + pub fn get_or_create_body(&self) -> ast::BlockExpr { + if self.body().is_none() { + let body = make::ext::empty_block_expr().clone_for_update(); + match self.semicolon_token() { + Some(semi) => { + ted::replace(semi, body.syntax()); + ted::insert(Position::before(body.syntax), make::tokens::single_space()); + } + None => ted::append_child(self.syntax(), body.syntax()), + } + } + self.body().unwrap() + } +} + +impl ast::MatchArm { + pub fn remove(&self) { + if let Some(sibling) = self.syntax().prev_sibling_or_token() { + if sibling.kind() == SyntaxKind::WHITESPACE { + ted::remove(sibling); + } + } + if let Some(sibling) = self.syntax().next_sibling_or_token() { + if sibling.kind() == T![,] { + ted::remove(sibling); + } + } + ted::remove(self.syntax()); + } +} + +impl ast::MatchArmList { + pub fn add_arm(&self, arm: ast::MatchArm) { + normalize_ws_between_braces(self.syntax()); + let mut elements = Vec::new(); + let position = match self.arms().last() { + Some(last_arm) => { + if needs_comma(&last_arm) { + ted::append_child(last_arm.syntax(), make::token(SyntaxKind::COMMA)); + } + Position::after(last_arm.syntax().clone()) + } + None => match self.l_curly_token() { + Some(it) => Position::after(it), + None => Position::last_child_of(self.syntax()), + }, + }; + let indent = IndentLevel::from_node(self.syntax()) + 1; + elements.push(make::tokens::whitespace(&format!("\n{}", indent)).into()); + elements.push(arm.syntax().clone().into()); + if needs_comma(&arm) { + ted::append_child(arm.syntax(), make::token(SyntaxKind::COMMA)); + } + ted::insert_all(position, elements); + + fn needs_comma(arm: &ast::MatchArm) -> bool { + arm.expr().map_or(false, |e| !e.is_block_like()) && arm.comma_token().is_none() + } + } +} + +impl ast::RecordExprFieldList { + pub fn add_field(&self, field: ast::RecordExprField) { + let is_multiline = self.syntax().text().contains_char('\n'); + let whitespace = if is_multiline { + let indent = IndentLevel::from_node(self.syntax()) + 1; + make::tokens::whitespace(&format!("\n{}", indent)) + } else { + make::tokens::single_space() + }; + + if is_multiline { + normalize_ws_between_braces(self.syntax()); + } + + let position = match self.fields().last() { + Some(last_field) => { + let comma = match last_field + .syntax() + .siblings_with_tokens(Direction::Next) + .filter_map(|it| it.into_token()) + .find(|it| it.kind() == T![,]) + { + Some(it) => it, + None => { + let comma = ast::make::token(T![,]); + ted::insert(Position::after(last_field.syntax()), &comma); + comma + } + }; + Position::after(comma) + } + None => match self.l_curly_token() { + Some(it) => Position::after(it), + None => Position::last_child_of(self.syntax()), + }, + }; + + ted::insert_all(position, vec![whitespace.into(), field.syntax().clone().into()]); + if is_multiline { + ted::insert(Position::after(field.syntax()), ast::make::token(T![,])); + } + } +} + +impl ast::RecordExprField { + /// This will either replace the initializer, or in the case that this is a shorthand convert + /// the initializer into the name ref and insert the expr as the new initializer. + pub fn replace_expr(&self, expr: ast::Expr) { + if self.name_ref().is_some() { + match self.expr() { + Some(prev) => ted::replace(prev.syntax(), expr.syntax()), + None => ted::append_child(self.syntax(), expr.syntax()), + } + return; + } + // this is a shorthand + if let Some(ast::Expr::PathExpr(path_expr)) = self.expr() { + if let Some(path) = path_expr.path() { + if let Some(name_ref) = path.as_single_name_ref() { + path_expr.syntax().detach(); + let children = vec![ + name_ref.syntax().clone().into(), + ast::make::token(T![:]).into(), + ast::make::tokens::single_space().into(), + expr.syntax().clone().into(), + ]; + ted::insert_all_raw(Position::last_child_of(self.syntax()), children); + } + } + } + } +} + +impl ast::RecordPatFieldList { + pub fn add_field(&self, field: ast::RecordPatField) { + let is_multiline = self.syntax().text().contains_char('\n'); + let whitespace = if is_multiline { + let indent = IndentLevel::from_node(self.syntax()) + 1; + make::tokens::whitespace(&format!("\n{}", indent)) + } else { + make::tokens::single_space() + }; + + if is_multiline { + normalize_ws_between_braces(self.syntax()); + } + + let position = match self.fields().last() { + Some(last_field) => { + let comma = match last_field + .syntax() + .siblings_with_tokens(Direction::Next) + .filter_map(|it| it.into_token()) + .find(|it| it.kind() == T![,]) + { + Some(it) => it, + None => { + let comma = ast::make::token(T![,]); + ted::insert(Position::after(last_field.syntax()), &comma); + comma + } + }; + Position::after(comma) + } + None => match self.l_curly_token() { + Some(it) => Position::after(it), + None => Position::last_child_of(self.syntax()), + }, + }; + + ted::insert_all(position, vec![whitespace.into(), field.syntax().clone().into()]); + if is_multiline { + ted::insert(Position::after(field.syntax()), ast::make::token(T![,])); + } + } +} +impl ast::StmtList { + pub fn push_front(&self, statement: ast::Stmt) { + ted::insert(Position::after(self.l_curly_token().unwrap()), statement.syntax()); + } +} + +fn normalize_ws_between_braces(node: &SyntaxNode) -> Option<()> { + let l = node + .children_with_tokens() + .filter_map(|it| it.into_token()) + .find(|it| it.kind() == T!['{'])?; + let r = node + .children_with_tokens() + .filter_map(|it| it.into_token()) + .find(|it| it.kind() == T!['}'])?; + + let indent = IndentLevel::from_node(node); + + match l.next_sibling_or_token() { + Some(ws) if ws.kind() == SyntaxKind::WHITESPACE => { + if ws.next_sibling_or_token()?.into_token()? == r { + ted::replace(ws, make::tokens::whitespace(&format!("\n{}", indent))); + } + } + Some(ws) if ws.kind() == T!['}'] => { + ted::insert(Position::after(l), make::tokens::whitespace(&format!("\n{}", indent))); + } + _ => (), + } + Some(()) +} + +pub trait Indent: AstNode + Clone + Sized { + fn indent_level(&self) -> IndentLevel { + IndentLevel::from_node(self.syntax()) + } + fn indent(&self, by: IndentLevel) { + by.increase_indent(self.syntax()); + } + fn dedent(&self, by: IndentLevel) { + by.decrease_indent(self.syntax()); + } + fn reindent_to(&self, target_level: IndentLevel) { + let current_level = IndentLevel::from_node(self.syntax()); + self.dedent(current_level); + self.indent(target_level); + } +} + +impl Indent for N {} + +#[cfg(test)] +mod tests { + use std::fmt; + + use crate::SourceFile; + + use super::*; + + fn ast_mut_from_text(text: &str) -> N { + let parse = SourceFile::parse(text); + parse.tree().syntax().descendants().find_map(N::cast).unwrap().clone_for_update() + } + + #[test] + fn test_create_generic_param_list() { + fn check_create_gpl(before: &str, after: &str) { + let gpl_owner = ast_mut_from_text::(before); + gpl_owner.get_or_create_generic_param_list(); + assert_eq!(gpl_owner.to_string(), after); + } + + check_create_gpl::("fn foo", "fn foo<>"); + check_create_gpl::("fn foo() {}", "fn foo<>() {}"); + + check_create_gpl::("impl", "impl<>"); + check_create_gpl::("impl Struct {}", "impl<> Struct {}"); + check_create_gpl::("impl Trait for Struct {}", "impl<> Trait for Struct {}"); + + check_create_gpl::("trait Trait<>", "trait Trait<>"); + check_create_gpl::("trait Trait<> {}", "trait Trait<> {}"); + + check_create_gpl::("struct A", "struct A<>"); + check_create_gpl::("struct A;", "struct A<>;"); + check_create_gpl::("struct A();", "struct A<>();"); + check_create_gpl::("struct A {}", "struct A<> {}"); + + check_create_gpl::("enum E", "enum E<>"); + check_create_gpl::("enum E {", "enum E<> {"); + } + + #[test] + fn test_increase_indent() { + let arm_list = ast_mut_from_text::( + "fn foo() { + ; + ; +}", + ); + arm_list.indent(IndentLevel(2)); + assert_eq!( + arm_list.to_string(), + "fn foo() { + ; + ; + }", + ); + } +} diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs new file mode 100644 index 000000000..db66d08a7 --- /dev/null +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs @@ -0,0 +1,410 @@ +//! Various extension methods to ast Expr Nodes, which are hard to code-generate. +//! +//! These methods should only do simple, shallow tasks related to the syntax of the node itself. + +use crate::{ + ast::{ + self, + operators::{ArithOp, BinaryOp, CmpOp, LogicOp, Ordering, RangeOp, UnaryOp}, + support, AstChildren, AstNode, + }, + AstToken, + SyntaxKind::*, + SyntaxNode, SyntaxToken, T, +}; + +impl ast::HasAttrs for ast::Expr {} + +impl ast::Expr { + pub fn is_block_like(&self) -> bool { + matches!( + self, + ast::Expr::IfExpr(_) + | ast::Expr::LoopExpr(_) + | ast::Expr::ForExpr(_) + | ast::Expr::WhileExpr(_) + | ast::Expr::BlockExpr(_) + | ast::Expr::MatchExpr(_) + ) + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum ElseBranch { + Block(ast::BlockExpr), + IfExpr(ast::IfExpr), +} + +impl From for ElseBranch { + fn from(block_expr: ast::BlockExpr) -> Self { + Self::Block(block_expr) + } +} + +impl From for ElseBranch { + fn from(if_expr: ast::IfExpr) -> Self { + Self::IfExpr(if_expr) + } +} + +impl ast::IfExpr { + pub fn then_branch(&self) -> Option { + self.children_after_condition().next() + } + + pub fn else_branch(&self) -> Option { + let res = match self.children_after_condition().nth(1) { + Some(block) => ElseBranch::Block(block), + None => { + let elif = self.children_after_condition().next()?; + ElseBranch::IfExpr(elif) + } + }; + Some(res) + } + + fn children_after_condition(&self) -> impl Iterator { + self.syntax().children().skip(1).filter_map(N::cast) + } +} + +#[test] +fn if_block_condition() { + let parse = ast::SourceFile::parse( + r#" + fn test() { + if { true } { "if" } + else if { false } { "first elif" } + else if true { "second elif" } + else if (true) { "third elif" } + else { "else" } + } + "#, + ); + let if_ = parse.tree().syntax().descendants().find_map(ast::IfExpr::cast).unwrap(); + assert_eq!(if_.then_branch().unwrap().syntax().text(), r#"{ "if" }"#); + let elif = match if_.else_branch().unwrap() { + ElseBranch::IfExpr(elif) => elif, + ElseBranch::Block(_) => panic!("should be `else if`"), + }; + assert_eq!(elif.then_branch().unwrap().syntax().text(), r#"{ "first elif" }"#); + let elif = match elif.else_branch().unwrap() { + ElseBranch::IfExpr(elif) => elif, + ElseBranch::Block(_) => panic!("should be `else if`"), + }; + assert_eq!(elif.then_branch().unwrap().syntax().text(), r#"{ "second elif" }"#); + let elif = match elif.else_branch().unwrap() { + ElseBranch::IfExpr(elif) => elif, + ElseBranch::Block(_) => panic!("should be `else if`"), + }; + assert_eq!(elif.then_branch().unwrap().syntax().text(), r#"{ "third elif" }"#); + let else_ = match elif.else_branch().unwrap() { + ElseBranch::Block(else_) => else_, + ElseBranch::IfExpr(_) => panic!("should be `else`"), + }; + assert_eq!(else_.syntax().text(), r#"{ "else" }"#); +} + +#[test] +fn if_condition_with_if_inside() { + let parse = ast::SourceFile::parse( + r#" + fn test() { + if if true { true } else { false } { "if" } + else { "else" } + } + "#, + ); + let if_ = parse.tree().syntax().descendants().find_map(ast::IfExpr::cast).unwrap(); + assert_eq!(if_.then_branch().unwrap().syntax().text(), r#"{ "if" }"#); + let else_ = match if_.else_branch().unwrap() { + ElseBranch::Block(else_) => else_, + ElseBranch::IfExpr(_) => panic!("should be `else`"), + }; + assert_eq!(else_.syntax().text(), r#"{ "else" }"#); +} + +impl ast::PrefixExpr { + pub fn op_kind(&self) -> Option { + let res = match self.op_token()?.kind() { + T![*] => UnaryOp::Deref, + T![!] => UnaryOp::Not, + T![-] => UnaryOp::Neg, + _ => return None, + }; + Some(res) + } + + pub fn op_token(&self) -> Option { + self.syntax().first_child_or_token()?.into_token() + } +} + +impl ast::BinExpr { + pub fn op_details(&self) -> Option<(SyntaxToken, BinaryOp)> { + self.syntax().children_with_tokens().filter_map(|it| it.into_token()).find_map(|c| { + #[rustfmt::skip] + let bin_op = match c.kind() { + T![||] => BinaryOp::LogicOp(LogicOp::Or), + T![&&] => BinaryOp::LogicOp(LogicOp::And), + + T![==] => BinaryOp::CmpOp(CmpOp::Eq { negated: false }), + T![!=] => BinaryOp::CmpOp(CmpOp::Eq { negated: true }), + T![<=] => BinaryOp::CmpOp(CmpOp::Ord { ordering: Ordering::Less, strict: false }), + T![>=] => BinaryOp::CmpOp(CmpOp::Ord { ordering: Ordering::Greater, strict: false }), + T![<] => BinaryOp::CmpOp(CmpOp::Ord { ordering: Ordering::Less, strict: true }), + T![>] => BinaryOp::CmpOp(CmpOp::Ord { ordering: Ordering::Greater, strict: true }), + + T![+] => BinaryOp::ArithOp(ArithOp::Add), + T![*] => BinaryOp::ArithOp(ArithOp::Mul), + T![-] => BinaryOp::ArithOp(ArithOp::Sub), + T![/] => BinaryOp::ArithOp(ArithOp::Div), + T![%] => BinaryOp::ArithOp(ArithOp::Rem), + T![<<] => BinaryOp::ArithOp(ArithOp::Shl), + T![>>] => BinaryOp::ArithOp(ArithOp::Shr), + T![^] => BinaryOp::ArithOp(ArithOp::BitXor), + T![|] => BinaryOp::ArithOp(ArithOp::BitOr), + T![&] => BinaryOp::ArithOp(ArithOp::BitAnd), + + T![=] => BinaryOp::Assignment { op: None }, + T![+=] => BinaryOp::Assignment { op: Some(ArithOp::Add) }, + T![*=] => BinaryOp::Assignment { op: Some(ArithOp::Mul) }, + T![-=] => BinaryOp::Assignment { op: Some(ArithOp::Sub) }, + T![/=] => BinaryOp::Assignment { op: Some(ArithOp::Div) }, + T![%=] => BinaryOp::Assignment { op: Some(ArithOp::Rem) }, + T![<<=] => BinaryOp::Assignment { op: Some(ArithOp::Shl) }, + T![>>=] => BinaryOp::Assignment { op: Some(ArithOp::Shr) }, + T![^=] => BinaryOp::Assignment { op: Some(ArithOp::BitXor) }, + T![|=] => BinaryOp::Assignment { op: Some(ArithOp::BitOr) }, + T![&=] => BinaryOp::Assignment { op: Some(ArithOp::BitAnd) }, + + _ => return None, + }; + Some((c, bin_op)) + }) + } + + pub fn op_kind(&self) -> Option { + self.op_details().map(|t| t.1) + } + + pub fn op_token(&self) -> Option { + self.op_details().map(|t| t.0) + } + + pub fn lhs(&self) -> Option { + support::children(self.syntax()).next() + } + + pub fn rhs(&self) -> Option { + support::children(self.syntax()).nth(1) + } + + pub fn sub_exprs(&self) -> (Option, Option) { + let mut children = support::children(self.syntax()); + let first = children.next(); + let second = children.next(); + (first, second) + } +} + +impl ast::RangeExpr { + fn op_details(&self) -> Option<(usize, SyntaxToken, RangeOp)> { + self.syntax().children_with_tokens().enumerate().find_map(|(ix, child)| { + let token = child.into_token()?; + let bin_op = match token.kind() { + T![..] => RangeOp::Exclusive, + T![..=] => RangeOp::Inclusive, + _ => return None, + }; + Some((ix, token, bin_op)) + }) + } + + pub fn op_kind(&self) -> Option { + self.op_details().map(|t| t.2) + } + + pub fn op_token(&self) -> Option { + self.op_details().map(|t| t.1) + } + + pub fn start(&self) -> Option { + let op_ix = self.op_details()?.0; + self.syntax() + .children_with_tokens() + .take(op_ix) + .find_map(|it| ast::Expr::cast(it.into_node()?)) + } + + pub fn end(&self) -> Option { + let op_ix = self.op_details()?.0; + self.syntax() + .children_with_tokens() + .skip(op_ix + 1) + .find_map(|it| ast::Expr::cast(it.into_node()?)) + } +} + +impl ast::IndexExpr { + pub fn base(&self) -> Option { + support::children(self.syntax()).next() + } + pub fn index(&self) -> Option { + support::children(self.syntax()).nth(1) + } +} + +pub enum ArrayExprKind { + Repeat { initializer: Option, repeat: Option }, + ElementList(AstChildren), +} + +impl ast::ArrayExpr { + pub fn kind(&self) -> ArrayExprKind { + if self.is_repeat() { + ArrayExprKind::Repeat { + initializer: support::children(self.syntax()).next(), + repeat: support::children(self.syntax()).nth(1), + } + } else { + ArrayExprKind::ElementList(support::children(self.syntax())) + } + } + + fn is_repeat(&self) -> bool { + self.semicolon_token().is_some() + } +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub enum LiteralKind { + String(ast::String), + ByteString(ast::ByteString), + IntNumber(ast::IntNumber), + FloatNumber(ast::FloatNumber), + Char(ast::Char), + Byte(ast::Byte), + Bool(bool), +} + +impl ast::Literal { + pub fn token(&self) -> SyntaxToken { + self.syntax() + .children_with_tokens() + .find(|e| e.kind() != ATTR && !e.kind().is_trivia()) + .and_then(|e| e.into_token()) + .unwrap() + } + + pub fn kind(&self) -> LiteralKind { + let token = self.token(); + + if let Some(t) = ast::IntNumber::cast(token.clone()) { + return LiteralKind::IntNumber(t); + } + if let Some(t) = ast::FloatNumber::cast(token.clone()) { + return LiteralKind::FloatNumber(t); + } + if let Some(t) = ast::String::cast(token.clone()) { + return LiteralKind::String(t); + } + if let Some(t) = ast::ByteString::cast(token.clone()) { + return LiteralKind::ByteString(t); + } + if let Some(t) = ast::Char::cast(token.clone()) { + return LiteralKind::Char(t); + } + if let Some(t) = ast::Byte::cast(token.clone()) { + return LiteralKind::Byte(t); + } + + match token.kind() { + T![true] => LiteralKind::Bool(true), + T![false] => LiteralKind::Bool(false), + _ => unreachable!(), + } + } +} + +pub enum BlockModifier { + Async(SyntaxToken), + Unsafe(SyntaxToken), + Try(SyntaxToken), + Const(SyntaxToken), + Label(ast::Label), +} + +impl ast::BlockExpr { + pub fn modifier(&self) -> Option { + self.async_token() + .map(BlockModifier::Async) + .or_else(|| self.unsafe_token().map(BlockModifier::Unsafe)) + .or_else(|| self.try_token().map(BlockModifier::Try)) + .or_else(|| self.const_token().map(BlockModifier::Const)) + .or_else(|| self.label().map(BlockModifier::Label)) + } + /// false if the block is an intrinsic part of the syntax and can't be + /// replaced with arbitrary expression. + /// + /// ```not_rust + /// fn foo() { not_stand_alone } + /// const FOO: () = { stand_alone }; + /// ``` + pub fn is_standalone(&self) -> bool { + let parent = match self.syntax().parent() { + Some(it) => it, + None => return true, + }; + !matches!(parent.kind(), FN | IF_EXPR | WHILE_EXPR | LOOP_EXPR) + } +} + +#[test] +fn test_literal_with_attr() { + let parse = ast::SourceFile::parse(r#"const _: &str = { #[attr] "Hello" };"#); + let lit = parse.tree().syntax().descendants().find_map(ast::Literal::cast).unwrap(); + assert_eq!(lit.token().text(), r#""Hello""#); +} + +impl ast::RecordExprField { + pub fn parent_record_lit(&self) -> ast::RecordExpr { + self.syntax().ancestors().find_map(ast::RecordExpr::cast).unwrap() + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum CallableExpr { + Call(ast::CallExpr), + MethodCall(ast::MethodCallExpr), +} + +impl ast::HasAttrs for CallableExpr {} +impl ast::HasArgList for CallableExpr {} + +impl AstNode for CallableExpr { + fn can_cast(kind: parser::SyntaxKind) -> bool + where + Self: Sized, + { + ast::CallExpr::can_cast(kind) || ast::MethodCallExpr::can_cast(kind) + } + + fn cast(syntax: SyntaxNode) -> Option + where + Self: Sized, + { + if let Some(it) = ast::CallExpr::cast(syntax.clone()) { + Some(Self::Call(it)) + } else { + ast::MethodCallExpr::cast(syntax).map(Self::MethodCall) + } + } + + fn syntax(&self) -> &SyntaxNode { + match self { + Self::Call(it) => it.syntax(), + Self::MethodCall(it) => it.syntax(), + } + } +} diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated.rs new file mode 100644 index 000000000..843b43cf0 --- /dev/null +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated.rs @@ -0,0 +1,41 @@ +//! This file is actually hand-written, but the submodules are indeed generated. +#[rustfmt::skip] +pub(crate) mod nodes; +#[rustfmt::skip] +pub(crate) mod tokens; + +use crate::{ + AstNode, + SyntaxKind::{self, *}, + SyntaxNode, +}; + +pub(crate) use nodes::*; + +// Stmt is the only nested enum, so it's easier to just hand-write it +impl AstNode for Stmt { + fn can_cast(kind: SyntaxKind) -> bool { + match kind { + LET_STMT | EXPR_STMT => true, + _ => Item::can_cast(kind), + } + } + fn cast(syntax: SyntaxNode) -> Option { + let res = match syntax.kind() { + LET_STMT => Stmt::LetStmt(LetStmt { syntax }), + EXPR_STMT => Stmt::ExprStmt(ExprStmt { syntax }), + _ => { + let item = Item::cast(syntax)?; + Stmt::Item(item) + } + }; + Some(res) + } + fn syntax(&self) -> &SyntaxNode { + match self { + Stmt::LetStmt(it) => &it.syntax, + Stmt::ExprStmt(it) => &it.syntax, + Stmt::Item(it) => it.syntax(), + } + } +} diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs new file mode 100644 index 000000000..63309a155 --- /dev/null +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs @@ -0,0 +1,4806 @@ +//! Generated by `sourcegen_ast`, do not edit by hand. + +#![allow(non_snake_case)] +use crate::{ + ast::{self, support, AstChildren, AstNode}, + SyntaxKind::{self, *}, + SyntaxNode, SyntaxToken, T, +}; + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Name { + pub(crate) syntax: SyntaxNode, +} +impl Name { + pub fn ident_token(&self) -> Option { support::token(&self.syntax, T![ident]) } + pub fn self_token(&self) -> Option { support::token(&self.syntax, T![self]) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct NameRef { + pub(crate) syntax: SyntaxNode, +} +impl NameRef { + pub fn ident_token(&self) -> Option { support::token(&self.syntax, T![ident]) } + pub fn self_token(&self) -> Option { support::token(&self.syntax, T![self]) } + pub fn super_token(&self) -> Option { support::token(&self.syntax, T![super]) } + pub fn crate_token(&self) -> Option { support::token(&self.syntax, T![crate]) } + pub fn Self_token(&self) -> Option { support::token(&self.syntax, T![Self]) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Lifetime { + pub(crate) syntax: SyntaxNode, +} +impl Lifetime { + pub fn lifetime_ident_token(&self) -> Option { + support::token(&self.syntax, T![lifetime_ident]) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Path { + pub(crate) syntax: SyntaxNode, +} +impl Path { + pub fn qualifier(&self) -> Option { support::child(&self.syntax) } + pub fn coloncolon_token(&self) -> Option { support::token(&self.syntax, T![::]) } + pub fn segment(&self) -> Option { support::child(&self.syntax) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct PathSegment { + pub(crate) syntax: SyntaxNode, +} +impl PathSegment { + pub fn coloncolon_token(&self) -> Option { support::token(&self.syntax, T![::]) } + pub fn name_ref(&self) -> Option { support::child(&self.syntax) } + pub fn generic_arg_list(&self) -> Option { support::child(&self.syntax) } + pub fn param_list(&self) -> Option { support::child(&self.syntax) } + pub fn ret_type(&self) -> Option { support::child(&self.syntax) } + pub fn l_angle_token(&self) -> Option { support::token(&self.syntax, T![<]) } + pub fn path_type(&self) -> Option { support::child(&self.syntax) } + pub fn as_token(&self) -> Option { support::token(&self.syntax, T![as]) } + pub fn r_angle_token(&self) -> Option { support::token(&self.syntax, T![>]) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct GenericArgList { + pub(crate) syntax: SyntaxNode, +} +impl GenericArgList { + pub fn coloncolon_token(&self) -> Option { support::token(&self.syntax, T![::]) } + pub fn l_angle_token(&self) -> Option { support::token(&self.syntax, T![<]) } + pub fn generic_args(&self) -> AstChildren { support::children(&self.syntax) } + pub fn r_angle_token(&self) -> Option { support::token(&self.syntax, T![>]) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ParamList { + pub(crate) syntax: SyntaxNode, +} +impl ParamList { + pub fn l_paren_token(&self) -> Option { support::token(&self.syntax, T!['(']) } + pub fn self_param(&self) -> Option { support::child(&self.syntax) } + pub fn comma_token(&self) -> Option { support::token(&self.syntax, T![,]) } + pub fn params(&self) -> AstChildren { support::children(&self.syntax) } + pub fn r_paren_token(&self) -> Option { support::token(&self.syntax, T![')']) } + pub fn pipe_token(&self) -> Option { support::token(&self.syntax, T![|]) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct RetType { + pub(crate) syntax: SyntaxNode, +} +impl RetType { + pub fn thin_arrow_token(&self) -> Option { support::token(&self.syntax, T![->]) } + pub fn ty(&self) -> Option { support::child(&self.syntax) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct PathType { + pub(crate) syntax: SyntaxNode, +} +impl PathType { + pub fn path(&self) -> Option { support::child(&self.syntax) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct TypeArg { + pub(crate) syntax: SyntaxNode, +} +impl TypeArg { + pub fn ty(&self) -> Option { support::child(&self.syntax) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct AssocTypeArg { + pub(crate) syntax: SyntaxNode, +} +impl ast::HasTypeBounds for AssocTypeArg {} +impl AssocTypeArg { + pub fn name_ref(&self) -> Option { support::child(&self.syntax) } + pub fn generic_param_list(&self) -> Option { support::child(&self.syntax) } + pub fn eq_token(&self) -> Option { support::token(&self.syntax, T![=]) } + pub fn ty(&self) -> Option { support::child(&self.syntax) } + pub fn const_arg(&self) -> Option { support::child(&self.syntax) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct LifetimeArg { + pub(crate) syntax: SyntaxNode, +} +impl LifetimeArg { + pub fn lifetime(&self) -> Option { support::child(&self.syntax) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ConstArg { + pub(crate) syntax: SyntaxNode, +} +impl ConstArg { + pub fn expr(&self) -> Option { support::child(&self.syntax) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct GenericParamList { + pub(crate) syntax: SyntaxNode, +} +impl GenericParamList { + pub fn l_angle_token(&self) -> Option { support::token(&self.syntax, T![<]) } + pub fn generic_params(&self) -> AstChildren { support::children(&self.syntax) } + pub fn r_angle_token(&self) -> Option { support::token(&self.syntax, T![>]) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct TypeBoundList { + pub(crate) syntax: SyntaxNode, +} +impl TypeBoundList { + pub fn bounds(&self) -> AstChildren { support::children(&self.syntax) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct MacroCall { + pub(crate) syntax: SyntaxNode, +} +impl ast::HasAttrs for MacroCall {} +impl ast::HasDocComments for MacroCall {} +impl MacroCall { + pub fn path(&self) -> Option { support::child(&self.syntax) } + pub fn excl_token(&self) -> Option { support::token(&self.syntax, T![!]) } + pub fn token_tree(&self) -> Option { support::child(&self.syntax) } + pub fn semicolon_token(&self) -> Option { support::token(&self.syntax, T![;]) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Attr { + pub(crate) syntax: SyntaxNode, +} +impl Attr { + pub fn pound_token(&self) -> Option { support::token(&self.syntax, T![#]) } + pub fn excl_token(&self) -> Option { support::token(&self.syntax, T![!]) } + pub fn l_brack_token(&self) -> Option { support::token(&self.syntax, T!['[']) } + pub fn meta(&self) -> Option { support::child(&self.syntax) } + pub fn r_brack_token(&self) -> Option { support::token(&self.syntax, T![']']) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct TokenTree { + pub(crate) syntax: SyntaxNode, +} +impl TokenTree { + pub fn l_paren_token(&self) -> Option { support::token(&self.syntax, T!['(']) } + pub fn r_paren_token(&self) -> Option { support::token(&self.syntax, T![')']) } + pub fn l_curly_token(&self) -> Option { support::token(&self.syntax, T!['{']) } + pub fn r_curly_token(&self) -> Option { support::token(&self.syntax, T!['}']) } + pub fn l_brack_token(&self) -> Option { support::token(&self.syntax, T!['[']) } + pub fn r_brack_token(&self) -> Option { support::token(&self.syntax, T![']']) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct MacroItems { + pub(crate) syntax: SyntaxNode, +} +impl ast::HasModuleItem for MacroItems {} +impl MacroItems {} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct MacroStmts { + pub(crate) syntax: SyntaxNode, +} +impl MacroStmts { + pub fn statements(&self) -> AstChildren { support::children(&self.syntax) } + pub fn expr(&self) -> Option { support::child(&self.syntax) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct SourceFile { + pub(crate) syntax: SyntaxNode, +} +impl ast::HasAttrs for SourceFile {} +impl ast::HasModuleItem for SourceFile {} +impl ast::HasDocComments for SourceFile {} +impl SourceFile { + pub fn shebang_token(&self) -> Option { support::token(&self.syntax, T![shebang]) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Const { + pub(crate) syntax: SyntaxNode, +} +impl ast::HasAttrs for Const {} +impl ast::HasName for Const {} +impl ast::HasVisibility for Const {} +impl ast::HasDocComments for Const {} +impl Const { + pub fn default_token(&self) -> Option { support::token(&self.syntax, T![default]) } + pub fn const_token(&self) -> Option { support::token(&self.syntax, T![const]) } + pub fn underscore_token(&self) -> Option { support::token(&self.syntax, T![_]) } + pub fn colon_token(&self) -> Option { support::token(&self.syntax, T![:]) } + pub fn ty(&self) -> Option { support::child(&self.syntax) } + pub fn eq_token(&self) -> Option { support::token(&self.syntax, T![=]) } + pub fn body(&self) -> Option { support::child(&self.syntax) } + pub fn semicolon_token(&self) -> Option { support::token(&self.syntax, T![;]) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Enum { + pub(crate) syntax: SyntaxNode, +} +impl ast::HasAttrs for Enum {} +impl ast::HasName for Enum {} +impl ast::HasVisibility for Enum {} +impl ast::HasGenericParams for Enum {} +impl ast::HasDocComments for Enum {} +impl Enum { + pub fn enum_token(&self) -> Option { support::token(&self.syntax, T![enum]) } + pub fn variant_list(&self) -> Option { support::child(&self.syntax) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ExternBlock { + pub(crate) syntax: SyntaxNode, +} +impl ast::HasAttrs for ExternBlock {} +impl ast::HasDocComments for ExternBlock {} +impl ExternBlock { + pub fn unsafe_token(&self) -> Option { support::token(&self.syntax, T![unsafe]) } + pub fn abi(&self) -> Option { support::child(&self.syntax) } + pub fn extern_item_list(&self) -> Option { support::child(&self.syntax) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ExternCrate { + pub(crate) syntax: SyntaxNode, +} +impl ast::HasAttrs for ExternCrate {} +impl ast::HasVisibility for ExternCrate {} +impl ast::HasDocComments for ExternCrate {} +impl ExternCrate { + pub fn extern_token(&self) -> Option { support::token(&self.syntax, T![extern]) } + pub fn crate_token(&self) -> Option { support::token(&self.syntax, T![crate]) } + pub fn name_ref(&self) -> Option { support::child(&self.syntax) } + pub fn rename(&self) -> Option { support::child(&self.syntax) } + pub fn semicolon_token(&self) -> Option { support::token(&self.syntax, T![;]) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Fn { + pub(crate) syntax: SyntaxNode, +} +impl ast::HasAttrs for Fn {} +impl ast::HasName for Fn {} +impl ast::HasVisibility for Fn {} +impl ast::HasGenericParams for Fn {} +impl ast::HasDocComments for Fn {} +impl Fn { + pub fn default_token(&self) -> Option { support::token(&self.syntax, T![default]) } + pub fn const_token(&self) -> Option { support::token(&self.syntax, T![const]) } + pub fn async_token(&self) -> Option { support::token(&self.syntax, T![async]) } + pub fn unsafe_token(&self) -> Option { support::token(&self.syntax, T![unsafe]) } + pub fn abi(&self) -> Option { support::child(&self.syntax) } + pub fn fn_token(&self) -> Option { support::token(&self.syntax, T![fn]) } + pub fn param_list(&self) -> Option { support::child(&self.syntax) } + pub fn ret_type(&self) -> Option { support::child(&self.syntax) } + pub fn body(&self) -> Option { support::child(&self.syntax) } + pub fn semicolon_token(&self) -> Option { support::token(&self.syntax, T![;]) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Impl { + pub(crate) syntax: SyntaxNode, +} +impl ast::HasAttrs for Impl {} +impl ast::HasVisibility for Impl {} +impl ast::HasGenericParams for Impl {} +impl ast::HasDocComments for Impl {} +impl Impl { + pub fn default_token(&self) -> Option { support::token(&self.syntax, T![default]) } + pub fn unsafe_token(&self) -> Option { support::token(&self.syntax, T![unsafe]) } + pub fn impl_token(&self) -> Option { support::token(&self.syntax, T![impl]) } + pub fn const_token(&self) -> Option { support::token(&self.syntax, T![const]) } + pub fn excl_token(&self) -> Option { support::token(&self.syntax, T![!]) } + pub fn for_token(&self) -> Option { support::token(&self.syntax, T![for]) } + pub fn assoc_item_list(&self) -> Option { support::child(&self.syntax) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct MacroRules { + pub(crate) syntax: SyntaxNode, +} +impl ast::HasAttrs for MacroRules {} +impl ast::HasName for MacroRules {} +impl ast::HasVisibility for MacroRules {} +impl ast::HasDocComments for MacroRules {} +impl MacroRules { + pub fn macro_rules_token(&self) -> Option { + support::token(&self.syntax, T![macro_rules]) + } + pub fn excl_token(&self) -> Option { support::token(&self.syntax, T![!]) } + pub fn token_tree(&self) -> Option { support::child(&self.syntax) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct MacroDef { + pub(crate) syntax: SyntaxNode, +} +impl ast::HasAttrs for MacroDef {} +impl ast::HasName for MacroDef {} +impl ast::HasVisibility for MacroDef {} +impl ast::HasDocComments for MacroDef {} +impl MacroDef { + pub fn macro_token(&self) -> Option { support::token(&self.syntax, T![macro]) } + pub fn args(&self) -> Option { support::child(&self.syntax) } + pub fn body(&self) -> Option { support::child(&self.syntax) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Module { + pub(crate) syntax: SyntaxNode, +} +impl ast::HasAttrs for Module {} +impl ast::HasName for Module {} +impl ast::HasVisibility for Module {} +impl ast::HasDocComments for Module {} +impl Module { + pub fn mod_token(&self) -> Option { support::token(&self.syntax, T![mod]) } + pub fn item_list(&self) -> Option { support::child(&self.syntax) } + pub fn semicolon_token(&self) -> Option { support::token(&self.syntax, T![;]) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Static { + pub(crate) syntax: SyntaxNode, +} +impl ast::HasAttrs for Static {} +impl ast::HasName for Static {} +impl ast::HasVisibility for Static {} +impl ast::HasDocComments for Static {} +impl Static { + pub fn static_token(&self) -> Option { support::token(&self.syntax, T![static]) } + pub fn mut_token(&self) -> Option { support::token(&self.syntax, T![mut]) } + pub fn colon_token(&self) -> Option { support::token(&self.syntax, T![:]) } + pub fn ty(&self) -> Option { support::child(&self.syntax) } + pub fn eq_token(&self) -> Option { support::token(&self.syntax, T![=]) } + pub fn body(&self) -> Option { support::child(&self.syntax) } + pub fn semicolon_token(&self) -> Option { support::token(&self.syntax, T![;]) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Struct { + pub(crate) syntax: SyntaxNode, +} +impl ast::HasAttrs for Struct {} +impl ast::HasName for Struct {} +impl ast::HasVisibility for Struct {} +impl ast::HasGenericParams for Struct {} +impl ast::HasDocComments for Struct {} +impl Struct { + pub fn struct_token(&self) -> Option { support::token(&self.syntax, T![struct]) } + pub fn semicolon_token(&self) -> Option { support::token(&self.syntax, T![;]) } + pub fn field_list(&self) -> Option { support::child(&self.syntax) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Trait { + pub(crate) syntax: SyntaxNode, +} +impl ast::HasAttrs for Trait {} +impl ast::HasName for Trait {} +impl ast::HasVisibility for Trait {} +impl ast::HasGenericParams for Trait {} +impl ast::HasTypeBounds for Trait {} +impl ast::HasDocComments for Trait {} +impl Trait { + pub fn unsafe_token(&self) -> Option { support::token(&self.syntax, T![unsafe]) } + pub fn auto_token(&self) -> Option { support::token(&self.syntax, T![auto]) } + pub fn trait_token(&self) -> Option { support::token(&self.syntax, T![trait]) } + pub fn assoc_item_list(&self) -> Option { support::child(&self.syntax) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct TypeAlias { + pub(crate) syntax: SyntaxNode, +} +impl ast::HasAttrs for TypeAlias {} +impl ast::HasName for TypeAlias {} +impl ast::HasVisibility for TypeAlias {} +impl ast::HasGenericParams for TypeAlias {} +impl ast::HasTypeBounds for TypeAlias {} +impl ast::HasDocComments for TypeAlias {} +impl TypeAlias { + pub fn default_token(&self) -> Option { support::token(&self.syntax, T![default]) } + pub fn type_token(&self) -> Option { support::token(&self.syntax, T![type]) } + pub fn eq_token(&self) -> Option { support::token(&self.syntax, T![=]) } + pub fn ty(&self) -> Option { support::child(&self.syntax) } + pub fn semicolon_token(&self) -> Option { support::token(&self.syntax, T![;]) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Union { + pub(crate) syntax: SyntaxNode, +} +impl ast::HasAttrs for Union {} +impl ast::HasName for Union {} +impl ast::HasVisibility for Union {} +impl ast::HasGenericParams for Union {} +impl ast::HasDocComments for Union {} +impl Union { + pub fn union_token(&self) -> Option { support::token(&self.syntax, T![union]) } + pub fn record_field_list(&self) -> Option { support::child(&self.syntax) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Use { + pub(crate) syntax: SyntaxNode, +} +impl ast::HasAttrs for Use {} +impl ast::HasVisibility for Use {} +impl ast::HasDocComments for Use {} +impl Use { + pub fn use_token(&self) -> Option { support::token(&self.syntax, T![use]) } + pub fn use_tree(&self) -> Option { support::child(&self.syntax) } + pub fn semicolon_token(&self) -> Option { support::token(&self.syntax, T![;]) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Visibility { + pub(crate) syntax: SyntaxNode, +} +impl Visibility { + pub fn pub_token(&self) -> Option { support::token(&self.syntax, T![pub]) } + pub fn l_paren_token(&self) -> Option { support::token(&self.syntax, T!['(']) } + pub fn in_token(&self) -> Option { support::token(&self.syntax, T![in]) } + pub fn path(&self) -> Option { support::child(&self.syntax) } + pub fn r_paren_token(&self) -> Option { support::token(&self.syntax, T![')']) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ItemList { + pub(crate) syntax: SyntaxNode, +} +impl ast::HasAttrs for ItemList {} +impl ast::HasModuleItem for ItemList {} +impl ItemList { + pub fn l_curly_token(&self) -> Option { support::token(&self.syntax, T!['{']) } + pub fn r_curly_token(&self) -> Option { support::token(&self.syntax, T!['}']) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Rename { + pub(crate) syntax: SyntaxNode, +} +impl ast::HasName for Rename {} +impl Rename { + pub fn as_token(&self) -> Option { support::token(&self.syntax, T![as]) } + pub fn underscore_token(&self) -> Option { support::token(&self.syntax, T![_]) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct UseTree { + pub(crate) syntax: SyntaxNode, +} +impl UseTree { + pub fn path(&self) -> Option { support::child(&self.syntax) } + pub fn coloncolon_token(&self) -> Option { support::token(&self.syntax, T![::]) } + pub fn star_token(&self) -> Option { support::token(&self.syntax, T![*]) } + pub fn use_tree_list(&self) -> Option { support::child(&self.syntax) } + pub fn rename(&self) -> Option { support::child(&self.syntax) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct UseTreeList { + pub(crate) syntax: SyntaxNode, +} +impl UseTreeList { + pub fn l_curly_token(&self) -> Option { support::token(&self.syntax, T!['{']) } + pub fn use_trees(&self) -> AstChildren { support::children(&self.syntax) } + pub fn r_curly_token(&self) -> Option { support::token(&self.syntax, T!['}']) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Abi { + pub(crate) syntax: SyntaxNode, +} +impl Abi { + pub fn extern_token(&self) -> Option { support::token(&self.syntax, T![extern]) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct WhereClause { + pub(crate) syntax: SyntaxNode, +} +impl WhereClause { + pub fn where_token(&self) -> Option { support::token(&self.syntax, T![where]) } + pub fn predicates(&self) -> AstChildren { support::children(&self.syntax) } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct BlockExpr { + pub(crate) syntax: SyntaxNode, +} +impl ast::HasAttrs for BlockExpr {} +impl BlockExpr { + pub fn label(&self) -> Option