summaryrefslogtreecommitdiffstats
path: root/src/tools/rust-analyzer/crates/syntax/src
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:02:58 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:02:58 +0000
commit698f8c2f01ea549d77d7dc3338a12e04c11057b9 (patch)
tree173a775858bd501c378080a10dca74132f05bc50 /src/tools/rust-analyzer/crates/syntax/src
parentInitial commit. (diff)
downloadrustc-698f8c2f01ea549d77d7dc3338a12e04c11057b9.tar.xz
rustc-698f8c2f01ea549d77d7dc3338a12e04c11057b9.zip
Adding upstream version 1.64.0+dfsg1.upstream/1.64.0+dfsg1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/tools/rust-analyzer/crates/syntax/src')
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/algo.rs660
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast.rs367
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs174
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs717
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs410
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/generated.rs41
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs4806
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/generated/tokens.rs196
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/make.rs901
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs875
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/operators.rs122
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs472
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs136
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/fuzz.rs75
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/hacks.rs15
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/lib.rs358
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/parsing.rs46
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs441
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ptr.rs104
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/syntax_error.rs44
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/syntax_node.rs75
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ted.rs206
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/tests.rs186
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/tests/ast_src.rs252
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/tests/sourcegen_ast.rs862
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/token_text.rs95
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/utils.rs43
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/validation.rs378
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/validation/block.rs24
29 files changed, 13081 insertions, 0 deletions
diff --git a/src/tools/rust-analyzer/crates/syntax/src/algo.rs b/src/tools/rust-analyzer/crates/syntax/src/algo.rs
new file mode 100644
index 000000000..8b14789dd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/algo.rs
@@ -0,0 +1,660 @@
+//! Collection of assorted algorithms for syntax trees.
+
+use std::hash::BuildHasherDefault;
+
+use indexmap::IndexMap;
+use itertools::Itertools;
+use rustc_hash::FxHashMap;
+use text_edit::TextEditBuilder;
+
+use crate::{
+ AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange,
+ TextSize,
+};
+
+/// Returns ancestors of the node at the offset, sorted by length. This should
+/// do the right thing at an edge, e.g. when searching for expressions at `{
+/// $0foo }` we will get the name reference instead of the whole block, which
+/// we would get if we just did `find_token_at_offset(...).flat_map(|t|
+/// t.parent().ancestors())`.
+pub fn ancestors_at_offset(
+ node: &SyntaxNode,
+ offset: TextSize,
+) -> impl Iterator<Item = SyntaxNode> {
+ node.token_at_offset(offset)
+ .map(|token| token.parent_ancestors())
+ .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len())
+}
+
+/// Finds a node of specific Ast type at offset. Note that this is slightly
+/// imprecise: if the cursor is strictly between two nodes of the desired type,
+/// as in
+///
+/// ```no_run
+/// struct Foo {}|struct Bar;
+/// ```
+///
+/// then the shorter node will be silently preferred.
+pub fn find_node_at_offset<N: AstNode>(syntax: &SyntaxNode, offset: TextSize) -> Option<N> {
+ ancestors_at_offset(syntax, offset).find_map(N::cast)
+}
+
+pub fn find_node_at_range<N: AstNode>(syntax: &SyntaxNode, range: TextRange) -> Option<N> {
+ syntax.covering_element(range).ancestors().find_map(N::cast)
+}
+
+/// Skip to next non `trivia` token
+pub fn skip_trivia_token(mut token: SyntaxToken, direction: Direction) -> Option<SyntaxToken> {
+ while token.kind().is_trivia() {
+ token = match direction {
+ Direction::Next => token.next_token()?,
+ Direction::Prev => token.prev_token()?,
+ }
+ }
+ Some(token)
+}
+/// Skip to next non `whitespace` token
+pub fn skip_whitespace_token(mut token: SyntaxToken, direction: Direction) -> Option<SyntaxToken> {
+ while token.kind() == SyntaxKind::WHITESPACE {
+ token = match direction {
+ Direction::Next => token.next_token()?,
+ Direction::Prev => token.prev_token()?,
+ }
+ }
+ Some(token)
+}
+
+/// Finds the first sibling in the given direction which is not `trivia`
+pub fn non_trivia_sibling(element: SyntaxElement, direction: Direction) -> Option<SyntaxElement> {
+ return match element {
+ NodeOrToken::Node(node) => node.siblings_with_tokens(direction).skip(1).find(not_trivia),
+ NodeOrToken::Token(token) => token.siblings_with_tokens(direction).skip(1).find(not_trivia),
+ };
+
+ fn not_trivia(element: &SyntaxElement) -> bool {
+ match element {
+ NodeOrToken::Node(_) => true,
+ NodeOrToken::Token(token) => !token.kind().is_trivia(),
+ }
+ }
+}
+
+pub fn least_common_ancestor(u: &SyntaxNode, v: &SyntaxNode) -> Option<SyntaxNode> {
+ if u == v {
+ return Some(u.clone());
+ }
+
+ let u_depth = u.ancestors().count();
+ let v_depth = v.ancestors().count();
+ let keep = u_depth.min(v_depth);
+
+ let u_candidates = u.ancestors().skip(u_depth - keep);
+ let v_candidates = v.ancestors().skip(v_depth - keep);
+ let (res, _) = u_candidates.zip(v_candidates).find(|(x, y)| x == y)?;
+ Some(res)
+}
+
+pub fn neighbor<T: AstNode>(me: &T, direction: Direction) -> Option<T> {
+ me.syntax().siblings(direction).skip(1).find_map(T::cast)
+}
+
+pub fn has_errors(node: &SyntaxNode) -> bool {
+ node.children().any(|it| it.kind() == SyntaxKind::ERROR)
+}
+
+type FxIndexMap<K, V> = IndexMap<K, V, BuildHasherDefault<rustc_hash::FxHasher>>;
+
+#[derive(Debug, Hash, PartialEq, Eq)]
+enum TreeDiffInsertPos {
+ After(SyntaxElement),
+ AsFirstChild(SyntaxElement),
+}
+
+#[derive(Debug)]
+pub struct TreeDiff {
+ replacements: FxHashMap<SyntaxElement, SyntaxElement>,
+ deletions: Vec<SyntaxElement>,
+ // the vec as well as the indexmap are both here to preserve order
+ insertions: FxIndexMap<TreeDiffInsertPos, Vec<SyntaxElement>>,
+}
+
+impl TreeDiff {
+ pub fn into_text_edit(&self, builder: &mut TextEditBuilder) {
+ let _p = profile::span("into_text_edit");
+
+ for (anchor, to) in &self.insertions {
+ let offset = match anchor {
+ TreeDiffInsertPos::After(it) => it.text_range().end(),
+ TreeDiffInsertPos::AsFirstChild(it) => it.text_range().start(),
+ };
+ to.iter().for_each(|to| builder.insert(offset, to.to_string()));
+ }
+ for (from, to) in &self.replacements {
+ builder.replace(from.text_range(), to.to_string());
+ }
+ for text_range in self.deletions.iter().map(SyntaxElement::text_range) {
+ builder.delete(text_range);
+ }
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.replacements.is_empty() && self.deletions.is_empty() && self.insertions.is_empty()
+ }
+}
+
+/// Finds a (potentially minimal) diff, which, applied to `from`, will result in `to`.
+///
+/// Specifically, returns a structure that consists of a replacements, insertions and deletions
+/// such that applying this map on `from` will result in `to`.
+///
+/// This function tries to find a fine-grained diff.
+pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff {
+ let _p = profile::span("diff");
+
+ let mut diff = TreeDiff {
+ replacements: FxHashMap::default(),
+ insertions: FxIndexMap::default(),
+ deletions: Vec::new(),
+ };
+ let (from, to) = (from.clone().into(), to.clone().into());
+
+ if !syntax_element_eq(&from, &to) {
+ go(&mut diff, from, to);
+ }
+ return diff;
+
+ fn syntax_element_eq(lhs: &SyntaxElement, rhs: &SyntaxElement) -> bool {
+ lhs.kind() == rhs.kind()
+ && lhs.text_range().len() == rhs.text_range().len()
+ && match (&lhs, &rhs) {
+ (NodeOrToken::Node(lhs), NodeOrToken::Node(rhs)) => {
+ lhs == rhs || lhs.text() == rhs.text()
+ }
+ (NodeOrToken::Token(lhs), NodeOrToken::Token(rhs)) => lhs.text() == rhs.text(),
+ _ => false,
+ }
+ }
+
+ // FIXME: this is horribly inefficient. I bet there's a cool algorithm to diff trees properly.
+ fn go(diff: &mut TreeDiff, lhs: SyntaxElement, rhs: SyntaxElement) {
+ let (lhs, rhs) = match lhs.as_node().zip(rhs.as_node()) {
+ Some((lhs, rhs)) => (lhs, rhs),
+ _ => {
+ cov_mark::hit!(diff_node_token_replace);
+ diff.replacements.insert(lhs, rhs);
+ return;
+ }
+ };
+
+ let mut look_ahead_scratch = Vec::default();
+
+ let mut rhs_children = rhs.children_with_tokens();
+ let mut lhs_children = lhs.children_with_tokens();
+ let mut last_lhs = None;
+ loop {
+ let lhs_child = lhs_children.next();
+ match (lhs_child.clone(), rhs_children.next()) {
+ (None, None) => break,
+ (None, Some(element)) => {
+ let insert_pos = match last_lhs.clone() {
+ Some(prev) => {
+ cov_mark::hit!(diff_insert);
+ TreeDiffInsertPos::After(prev)
+ }
+ // first iteration, insert into out parent as the first child
+ None => {
+ cov_mark::hit!(diff_insert_as_first_child);
+ TreeDiffInsertPos::AsFirstChild(lhs.clone().into())
+ }
+ };
+ diff.insertions.entry(insert_pos).or_insert_with(Vec::new).push(element);
+ }
+ (Some(element), None) => {
+ cov_mark::hit!(diff_delete);
+ diff.deletions.push(element);
+ }
+ (Some(ref lhs_ele), Some(ref rhs_ele)) if syntax_element_eq(lhs_ele, rhs_ele) => {}
+ (Some(lhs_ele), Some(rhs_ele)) => {
+ // nodes differ, look for lhs_ele in rhs, if its found we can mark everything up
+ // until that element as insertions. This is important to keep the diff minimal
+ // in regards to insertions that have been actually done, this is important for
+ // use insertions as we do not want to replace the entire module node.
+ look_ahead_scratch.push(rhs_ele.clone());
+ let mut rhs_children_clone = rhs_children.clone();
+ let mut insert = false;
+ for rhs_child in &mut rhs_children_clone {
+ if syntax_element_eq(&lhs_ele, &rhs_child) {
+ cov_mark::hit!(diff_insertions);
+ insert = true;
+ break;
+ }
+ look_ahead_scratch.push(rhs_child);
+ }
+ let drain = look_ahead_scratch.drain(..);
+ if insert {
+ let insert_pos = if let Some(prev) = last_lhs.clone().filter(|_| insert) {
+ TreeDiffInsertPos::After(prev)
+ } else {
+ cov_mark::hit!(insert_first_child);
+ TreeDiffInsertPos::AsFirstChild(lhs.clone().into())
+ };
+
+ diff.insertions.entry(insert_pos).or_insert_with(Vec::new).extend(drain);
+ rhs_children = rhs_children_clone;
+ } else {
+ go(diff, lhs_ele, rhs_ele);
+ }
+ }
+ }
+ last_lhs = lhs_child.or(last_lhs);
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+ use itertools::Itertools;
+ use parser::SyntaxKind;
+ use text_edit::TextEdit;
+
+ use crate::{AstNode, SyntaxElement};
+
+ #[test]
+ fn replace_node_token() {
+ cov_mark::check!(diff_node_token_replace);
+ check_diff(
+ r#"use node;"#,
+ r#"ident"#,
+ expect![[r#"
+ insertions:
+
+
+
+ replacements:
+
+ Line 0: Token(USE_KW@0..3 "use") -> ident
+
+ deletions:
+
+ Line 1: " "
+ Line 1: node
+ Line 1: ;
+ "#]],
+ );
+ }
+
+ #[test]
+ fn replace_parent() {
+ cov_mark::check!(diff_insert_as_first_child);
+ check_diff(
+ r#""#,
+ r#"use foo::bar;"#,
+ expect![[r#"
+ insertions:
+
+ Line 0: AsFirstChild(Node(SOURCE_FILE@0..0))
+ -> use foo::bar;
+
+ replacements:
+
+
+
+ deletions:
+
+
+ "#]],
+ );
+ }
+
+ #[test]
+ fn insert_last() {
+ cov_mark::check!(diff_insert);
+ check_diff(
+ r#"
+use foo;
+use bar;"#,
+ r#"
+use foo;
+use bar;
+use baz;"#,
+ expect![[r#"
+ insertions:
+
+ Line 2: After(Node(USE@10..18))
+ -> "\n"
+ -> use baz;
+
+ replacements:
+
+
+
+ deletions:
+
+
+ "#]],
+ );
+ }
+
+ #[test]
+ fn insert_middle() {
+ check_diff(
+ r#"
+use foo;
+use baz;"#,
+ r#"
+use foo;
+use bar;
+use baz;"#,
+ expect![[r#"
+ insertions:
+
+ Line 2: After(Token(WHITESPACE@9..10 "\n"))
+ -> use bar;
+ -> "\n"
+
+ replacements:
+
+
+
+ deletions:
+
+
+ "#]],
+ )
+ }
+
+ #[test]
+ fn insert_first() {
+ check_diff(
+ r#"
+use bar;
+use baz;"#,
+ r#"
+use foo;
+use bar;
+use baz;"#,
+ expect![[r#"
+ insertions:
+
+ Line 0: After(Token(WHITESPACE@0..1 "\n"))
+ -> use foo;
+ -> "\n"
+
+ replacements:
+
+
+
+ deletions:
+
+
+ "#]],
+ )
+ }
+
+ #[test]
+ fn first_child_insertion() {
+ cov_mark::check!(insert_first_child);
+ check_diff(
+ r#"fn main() {
+ stdi
+ }"#,
+ r#"use foo::bar;
+
+ fn main() {
+ stdi
+ }"#,
+ expect![[r#"
+ insertions:
+
+ Line 0: AsFirstChild(Node(SOURCE_FILE@0..30))
+ -> use foo::bar;
+ -> "\n\n "
+
+ replacements:
+
+
+
+ deletions:
+
+
+ "#]],
+ );
+ }
+
+ #[test]
+ fn delete_last() {
+ cov_mark::check!(diff_delete);
+ check_diff(
+ r#"use foo;
+ use bar;"#,
+ r#"use foo;"#,
+ expect![[r#"
+ insertions:
+
+
+
+ replacements:
+
+
+
+ deletions:
+
+ Line 1: "\n "
+ Line 2: use bar;
+ "#]],
+ );
+ }
+
+ #[test]
+ fn delete_middle() {
+ cov_mark::check!(diff_insertions);
+ check_diff(
+ r#"
+use expect_test::{expect, Expect};
+use text_edit::TextEdit;
+
+use crate::AstNode;
+"#,
+ r#"
+use expect_test::{expect, Expect};
+
+use crate::AstNode;
+"#,
+ expect![[r#"
+ insertions:
+
+ Line 1: After(Node(USE@1..35))
+ -> "\n\n"
+ -> use crate::AstNode;
+
+ replacements:
+
+
+
+ deletions:
+
+ Line 2: use text_edit::TextEdit;
+ Line 3: "\n\n"
+ Line 4: use crate::AstNode;
+ Line 5: "\n"
+ "#]],
+ )
+ }
+
+ #[test]
+ fn delete_first() {
+ check_diff(
+ r#"
+use text_edit::TextEdit;
+
+use crate::AstNode;
+"#,
+ r#"
+use crate::AstNode;
+"#,
+ expect![[r#"
+ insertions:
+
+
+
+ replacements:
+
+ Line 2: Token(IDENT@5..14 "text_edit") -> crate
+ Line 2: Token(IDENT@16..24 "TextEdit") -> AstNode
+ Line 2: Token(WHITESPACE@25..27 "\n\n") -> "\n"
+
+ deletions:
+
+ Line 3: use crate::AstNode;
+ Line 4: "\n"
+ "#]],
+ )
+ }
+
+ #[test]
+ fn merge_use() {
+ check_diff(
+ r#"
+use std::{
+ fmt,
+ hash::BuildHasherDefault,
+ ops::{self, RangeInclusive},
+};
+"#,
+ r#"
+use std::fmt;
+use std::hash::BuildHasherDefault;
+use std::ops::{self, RangeInclusive};
+"#,
+ expect![[r#"
+ insertions:
+
+ Line 2: After(Node(PATH_SEGMENT@5..8))
+ -> ::
+ -> fmt
+ Line 6: After(Token(WHITESPACE@86..87 "\n"))
+ -> use std::hash::BuildHasherDefault;
+ -> "\n"
+ -> use std::ops::{self, RangeInclusive};
+ -> "\n"
+
+ replacements:
+
+ Line 2: Token(IDENT@5..8 "std") -> std
+
+ deletions:
+
+ Line 2: ::
+ Line 2: {
+ fmt,
+ hash::BuildHasherDefault,
+ ops::{self, RangeInclusive},
+ }
+ "#]],
+ )
+ }
+
+ #[test]
+ fn early_return_assist() {
+ check_diff(
+ r#"
+fn main() {
+ if let Ok(x) = Err(92) {
+ foo(x);
+ }
+}
+ "#,
+ r#"
+fn main() {
+ let x = match Err(92) {
+ Ok(it) => it,
+ _ => return,
+ };
+ foo(x);
+}
+ "#,
+ expect![[r#"
+ insertions:
+
+ Line 3: After(Node(BLOCK_EXPR@40..63))
+ -> " "
+ -> match Err(92) {
+ Ok(it) => it,
+ _ => return,
+ }
+ -> ;
+ Line 3: After(Node(IF_EXPR@17..63))
+ -> "\n "
+ -> foo(x);
+
+ replacements:
+
+ Line 3: Token(IF_KW@17..19 "if") -> let
+ Line 3: Token(LET_KW@20..23 "let") -> x
+ Line 3: Node(BLOCK_EXPR@40..63) -> =
+
+ deletions:
+
+ Line 3: " "
+ Line 3: Ok(x)
+ Line 3: " "
+ Line 3: =
+ Line 3: " "
+ Line 3: Err(92)
+ "#]],
+ )
+ }
+
+ fn check_diff(from: &str, to: &str, expected_diff: Expect) {
+ let from_node = crate::SourceFile::parse(from).tree().syntax().clone();
+ let to_node = crate::SourceFile::parse(to).tree().syntax().clone();
+ let diff = super::diff(&from_node, &to_node);
+
+ let line_number =
+ |syn: &SyntaxElement| from[..syn.text_range().start().into()].lines().count();
+
+ let fmt_syntax = |syn: &SyntaxElement| match syn.kind() {
+ SyntaxKind::WHITESPACE => format!("{:?}", syn.to_string()),
+ _ => format!("{}", syn),
+ };
+
+ let insertions =
+ diff.insertions.iter().format_with("\n", |(k, v), f| -> Result<(), std::fmt::Error> {
+ f(&format!(
+ "Line {}: {:?}\n-> {}",
+ line_number(match k {
+ super::TreeDiffInsertPos::After(syn) => syn,
+ super::TreeDiffInsertPos::AsFirstChild(syn) => syn,
+ }),
+ k,
+ v.iter().format_with("\n-> ", |v, f| f(&fmt_syntax(v)))
+ ))
+ });
+
+ let replacements = diff
+ .replacements
+ .iter()
+ .sorted_by_key(|(syntax, _)| syntax.text_range().start())
+ .format_with("\n", |(k, v), f| {
+ f(&format!("Line {}: {:?} -> {}", line_number(k), k, fmt_syntax(v)))
+ });
+
+ let deletions = diff
+ .deletions
+ .iter()
+ .format_with("\n", |v, f| f(&format!("Line {}: {}", line_number(v), &fmt_syntax(v))));
+
+ let actual = format!(
+ "insertions:\n\n{}\n\nreplacements:\n\n{}\n\ndeletions:\n\n{}\n",
+ insertions, replacements, deletions
+ );
+ expected_diff.assert_eq(&actual);
+
+ let mut from = from.to_owned();
+ let mut text_edit = TextEdit::builder();
+ diff.into_text_edit(&mut text_edit);
+ text_edit.finish().apply(&mut from);
+ assert_eq!(&*from, to, "diff did not turn `from` to `to`");
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast.rs b/src/tools/rust-analyzer/crates/syntax/src/ast.rs
new file mode 100644
index 000000000..4aa64d0d6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast.rs
@@ -0,0 +1,367 @@
+//! Abstract Syntax Tree, layered on top of untyped `SyntaxNode`s
+
+mod generated;
+mod traits;
+mod token_ext;
+mod node_ext;
+mod expr_ext;
+mod operators;
+pub mod edit;
+pub mod edit_in_place;
+pub mod make;
+
+use std::marker::PhantomData;
+
+use crate::{
+ syntax_node::{SyntaxNode, SyntaxNodeChildren, SyntaxToken},
+ SyntaxKind,
+};
+
+pub use self::{
+ expr_ext::{ArrayExprKind, BlockModifier, CallableExpr, ElseBranch, LiteralKind},
+ generated::{nodes::*, tokens::*},
+ node_ext::{
+ AttrKind, FieldKind, Macro, NameLike, NameOrNameRef, PathSegmentKind, SelfParamKind,
+ SlicePatComponents, StructKind, TypeBoundKind, TypeOrConstParam, VisibilityKind,
+ },
+ operators::{ArithOp, BinaryOp, CmpOp, LogicOp, Ordering, RangeOp, UnaryOp},
+ token_ext::{CommentKind, CommentPlacement, CommentShape, IsString, QuoteOffsets, Radix},
+ traits::{
+ AttrDocCommentIter, DocCommentIter, HasArgList, HasAttrs, HasDocComments, HasGenericParams,
+ HasLoopBody, HasModuleItem, HasName, HasTypeBounds, HasVisibility,
+ },
+};
+
+/// The main trait to go from untyped `SyntaxNode` to a typed ast. The
+/// conversion itself has zero runtime cost: ast and syntax nodes have exactly
+/// the same representation: a pointer to the tree root and a pointer to the
+/// node itself.
+pub trait AstNode {
+ fn can_cast(kind: SyntaxKind) -> bool
+ where
+ Self: Sized;
+
+ fn cast(syntax: SyntaxNode) -> Option<Self>
+ where
+ Self: Sized;
+
+ fn syntax(&self) -> &SyntaxNode;
+ fn clone_for_update(&self) -> Self
+ where
+ Self: Sized,
+ {
+ Self::cast(self.syntax().clone_for_update()).unwrap()
+ }
+ fn clone_subtree(&self) -> Self
+ where
+ Self: Sized,
+ {
+ Self::cast(self.syntax().clone_subtree()).unwrap()
+ }
+}
+
+/// Like `AstNode`, but wraps tokens rather than interior nodes.
+pub trait AstToken {
+ fn can_cast(token: SyntaxKind) -> bool
+ where
+ Self: Sized;
+
+ fn cast(syntax: SyntaxToken) -> Option<Self>
+ where
+ Self: Sized;
+
+ fn syntax(&self) -> &SyntaxToken;
+
+ fn text(&self) -> &str {
+ self.syntax().text()
+ }
+}
+
+/// An iterator over `SyntaxNode` children of a particular AST type.
+#[derive(Debug, Clone)]
+pub struct AstChildren<N> {
+ inner: SyntaxNodeChildren,
+ ph: PhantomData<N>,
+}
+
+impl<N> AstChildren<N> {
+ fn new(parent: &SyntaxNode) -> Self {
+ AstChildren { inner: parent.children(), ph: PhantomData }
+ }
+}
+
+impl<N: AstNode> Iterator for AstChildren<N> {
+ type Item = N;
+ fn next(&mut self) -> Option<N> {
+ self.inner.find_map(N::cast)
+ }
+}
+
+mod support {
+ use super::{AstChildren, AstNode, SyntaxKind, SyntaxNode, SyntaxToken};
+
+ pub(super) fn child<N: AstNode>(parent: &SyntaxNode) -> Option<N> {
+ parent.children().find_map(N::cast)
+ }
+
+ pub(super) fn children<N: AstNode>(parent: &SyntaxNode) -> AstChildren<N> {
+ AstChildren::new(parent)
+ }
+
+ pub(super) fn token(parent: &SyntaxNode, kind: SyntaxKind) -> Option<SyntaxToken> {
+ parent.children_with_tokens().filter_map(|it| it.into_token()).find(|it| it.kind() == kind)
+ }
+}
+
+#[test]
+fn assert_ast_is_object_safe() {
+ fn _f(_: &dyn AstNode, _: &dyn HasName) {}
+}
+
+#[test]
+fn test_doc_comment_none() {
+ let file = SourceFile::parse(
+ r#"
+ // non-doc
+ mod foo {}
+ "#,
+ )
+ .ok()
+ .unwrap();
+ let module = file.syntax().descendants().find_map(Module::cast).unwrap();
+ assert!(module.doc_comments().doc_comment_text().is_none());
+}
+
+#[test]
+fn test_outer_doc_comment_of_items() {
+ let file = SourceFile::parse(
+ r#"
+ /// doc
+ // non-doc
+ mod foo {}
+ "#,
+ )
+ .ok()
+ .unwrap();
+ let module = file.syntax().descendants().find_map(Module::cast).unwrap();
+ assert_eq!(" doc", module.doc_comments().doc_comment_text().unwrap());
+}
+
+#[test]
+fn test_inner_doc_comment_of_items() {
+ let file = SourceFile::parse(
+ r#"
+ //! doc
+ // non-doc
+ mod foo {}
+ "#,
+ )
+ .ok()
+ .unwrap();
+ let module = file.syntax().descendants().find_map(Module::cast).unwrap();
+ assert!(module.doc_comments().doc_comment_text().is_none());
+}
+
+#[test]
+fn test_doc_comment_of_statics() {
+ let file = SourceFile::parse(
+ r#"
+ /// Number of levels
+ static LEVELS: i32 = 0;
+ "#,
+ )
+ .ok()
+ .unwrap();
+ let st = file.syntax().descendants().find_map(Static::cast).unwrap();
+ assert_eq!(" Number of levels", st.doc_comments().doc_comment_text().unwrap());
+}
+
+#[test]
+fn test_doc_comment_preserves_indents() {
+ let file = SourceFile::parse(
+ r#"
+ /// doc1
+ /// ```
+ /// fn foo() {
+ /// // ...
+ /// }
+ /// ```
+ mod foo {}
+ "#,
+ )
+ .ok()
+ .unwrap();
+ let module = file.syntax().descendants().find_map(Module::cast).unwrap();
+ assert_eq!(
+ " doc1\n ```\n fn foo() {\n // ...\n }\n ```",
+ module.doc_comments().doc_comment_text().unwrap()
+ );
+}
+
+#[test]
+fn test_doc_comment_preserves_newlines() {
+ let file = SourceFile::parse(
+ r#"
+ /// this
+ /// is
+ /// mod
+ /// foo
+ mod foo {}
+ "#,
+ )
+ .ok()
+ .unwrap();
+ let module = file.syntax().descendants().find_map(Module::cast).unwrap();
+ assert_eq!(" this\n is\n mod\n foo", module.doc_comments().doc_comment_text().unwrap());
+}
+
+#[test]
+fn test_doc_comment_single_line_block_strips_suffix() {
+ let file = SourceFile::parse(
+ r#"
+ /** this is mod foo*/
+ mod foo {}
+ "#,
+ )
+ .ok()
+ .unwrap();
+ let module = file.syntax().descendants().find_map(Module::cast).unwrap();
+ assert_eq!(" this is mod foo", module.doc_comments().doc_comment_text().unwrap());
+}
+
+#[test]
+fn test_doc_comment_single_line_block_strips_suffix_whitespace() {
+ let file = SourceFile::parse(
+ r#"
+ /** this is mod foo */
+ mod foo {}
+ "#,
+ )
+ .ok()
+ .unwrap();
+ let module = file.syntax().descendants().find_map(Module::cast).unwrap();
+ assert_eq!(" this is mod foo ", module.doc_comments().doc_comment_text().unwrap());
+}
+
+#[test]
+fn test_doc_comment_multi_line_block_strips_suffix() {
+ let file = SourceFile::parse(
+ r#"
+ /**
+ this
+ is
+ mod foo
+ */
+ mod foo {}
+ "#,
+ )
+ .ok()
+ .unwrap();
+ let module = file.syntax().descendants().find_map(Module::cast).unwrap();
+ assert_eq!(
+ "\n this\n is\n mod foo\n ",
+ module.doc_comments().doc_comment_text().unwrap()
+ );
+}
+
+#[test]
+fn test_comments_preserve_trailing_whitespace() {
+ let file = SourceFile::parse(
+ "\n/// Representation of a Realm. \n/// In the specification these are called Realm Records.\nstruct Realm {}",
+ )
+ .ok()
+ .unwrap();
+ let def = file.syntax().descendants().find_map(Struct::cast).unwrap();
+ assert_eq!(
+ " Representation of a Realm. \n In the specification these are called Realm Records.",
+ def.doc_comments().doc_comment_text().unwrap()
+ );
+}
+
+#[test]
+fn test_four_slash_line_comment() {
+ let file = SourceFile::parse(
+ r#"
+ //// too many slashes to be a doc comment
+ /// doc comment
+ mod foo {}
+ "#,
+ )
+ .ok()
+ .unwrap();
+ let module = file.syntax().descendants().find_map(Module::cast).unwrap();
+ assert_eq!(" doc comment", module.doc_comments().doc_comment_text().unwrap());
+}
+
+#[test]
+fn test_where_predicates() {
+ fn assert_bound(text: &str, bound: Option<TypeBound>) {
+ assert_eq!(text, bound.unwrap().syntax().text().to_string());
+ }
+
+ let file = SourceFile::parse(
+ r#"
+fn foo()
+where
+ T: Clone + Copy + Debug + 'static,
+ 'a: 'b + 'c,
+ Iterator::Item: 'a + Debug,
+ Iterator::Item: Debug + 'a,
+ <T as Iterator>::Item: Debug + 'a,
+ for<'a> F: Fn(&'a str)
+{}
+ "#,
+ )
+ .ok()
+ .unwrap();
+ let where_clause = file.syntax().descendants().find_map(WhereClause::cast).unwrap();
+
+ let mut predicates = where_clause.predicates();
+
+ let pred = predicates.next().unwrap();
+ let mut bounds = pred.type_bound_list().unwrap().bounds();
+
+ assert!(pred.for_token().is_none());
+ assert!(pred.generic_param_list().is_none());
+ assert_eq!("T", pred.ty().unwrap().syntax().text().to_string());
+ assert_bound("Clone", bounds.next());
+ assert_bound("Copy", bounds.next());
+ assert_bound("Debug", bounds.next());
+ assert_bound("'static", bounds.next());
+
+ let pred = predicates.next().unwrap();
+ let mut bounds = pred.type_bound_list().unwrap().bounds();
+
+ assert_eq!("'a", pred.lifetime().unwrap().lifetime_ident_token().unwrap().text());
+
+ assert_bound("'b", bounds.next());
+ assert_bound("'c", bounds.next());
+
+ let pred = predicates.next().unwrap();
+ let mut bounds = pred.type_bound_list().unwrap().bounds();
+
+ assert_eq!("Iterator::Item", pred.ty().unwrap().syntax().text().to_string());
+ assert_bound("'a", bounds.next());
+
+ let pred = predicates.next().unwrap();
+ let mut bounds = pred.type_bound_list().unwrap().bounds();
+
+ assert_eq!("Iterator::Item", pred.ty().unwrap().syntax().text().to_string());
+ assert_bound("Debug", bounds.next());
+ assert_bound("'a", bounds.next());
+
+ let pred = predicates.next().unwrap();
+ let mut bounds = pred.type_bound_list().unwrap().bounds();
+
+ assert_eq!("<T as Iterator>::Item", pred.ty().unwrap().syntax().text().to_string());
+ assert_bound("Debug", bounds.next());
+ assert_bound("'a", bounds.next());
+
+ let pred = predicates.next().unwrap();
+ let mut bounds = pred.type_bound_list().unwrap().bounds();
+
+ assert!(pred.for_token().is_some());
+ assert_eq!("<'a>", pred.generic_param_list().unwrap().syntax().text().to_string());
+ assert_eq!("F", pred.ty().unwrap().syntax().text().to_string());
+ assert_bound("Fn(&'a str)", bounds.next());
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs
new file mode 100644
index 000000000..15805dfc8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs
@@ -0,0 +1,174 @@
+//! This module contains functions for editing syntax trees. As the trees are
+//! immutable, all function here return a fresh copy of the tree, instead of
+//! doing an in-place modification.
+use std::{fmt, iter, ops};
+
+use crate::{
+ ast::{self, make, AstNode},
+ ted, AstToken, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxToken,
+};
+
+#[derive(Debug, Clone, Copy)]
+pub struct IndentLevel(pub u8);
+
+impl From<u8> for IndentLevel {
+ fn from(level: u8) -> IndentLevel {
+ IndentLevel(level)
+ }
+}
+
+impl fmt::Display for IndentLevel {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let spaces = " ";
+ let buf;
+ let len = self.0 as usize * 4;
+ let indent = if len <= spaces.len() {
+ &spaces[..len]
+ } else {
+ buf = " ".repeat(len);
+ &buf
+ };
+ fmt::Display::fmt(indent, f)
+ }
+}
+
+impl ops::Add<u8> for IndentLevel {
+ type Output = IndentLevel;
+ fn add(self, rhs: u8) -> IndentLevel {
+ IndentLevel(self.0 + rhs)
+ }
+}
+
+impl IndentLevel {
+ pub fn single() -> IndentLevel {
+ IndentLevel(0)
+ }
+ pub fn is_zero(&self) -> bool {
+ self.0 == 0
+ }
+ pub fn from_element(element: &SyntaxElement) -> IndentLevel {
+ match element {
+ rowan::NodeOrToken::Node(it) => IndentLevel::from_node(it),
+ rowan::NodeOrToken::Token(it) => IndentLevel::from_token(it),
+ }
+ }
+
+ pub fn from_node(node: &SyntaxNode) -> IndentLevel {
+ match node.first_token() {
+ Some(it) => Self::from_token(&it),
+ None => IndentLevel(0),
+ }
+ }
+
+ pub fn from_token(token: &SyntaxToken) -> IndentLevel {
+ for ws in prev_tokens(token.clone()).filter_map(ast::Whitespace::cast) {
+ let text = ws.syntax().text();
+ if let Some(pos) = text.rfind('\n') {
+ let level = text[pos + 1..].chars().count() / 4;
+ return IndentLevel(level as u8);
+ }
+ }
+ IndentLevel(0)
+ }
+
+ /// XXX: this intentionally doesn't change the indent of the very first token.
+ /// Ie, in something like
+ /// ```
+ /// fn foo() {
+ /// 92
+ /// }
+ /// ```
+ /// if you indent the block, the `{` token would stay put.
+ pub(super) fn increase_indent(self, node: &SyntaxNode) {
+ let tokens = node.preorder_with_tokens().filter_map(|event| match event {
+ rowan::WalkEvent::Leave(NodeOrToken::Token(it)) => Some(it),
+ _ => None,
+ });
+ for token in tokens {
+ if let Some(ws) = ast::Whitespace::cast(token) {
+ if ws.text().contains('\n') {
+ let new_ws = make::tokens::whitespace(&format!("{}{}", ws.syntax(), self));
+ ted::replace(ws.syntax(), &new_ws);
+ }
+ }
+ }
+ }
+
+ pub(super) fn decrease_indent(self, node: &SyntaxNode) {
+ let tokens = node.preorder_with_tokens().filter_map(|event| match event {
+ rowan::WalkEvent::Leave(NodeOrToken::Token(it)) => Some(it),
+ _ => None,
+ });
+ for token in tokens {
+ if let Some(ws) = ast::Whitespace::cast(token) {
+ if ws.text().contains('\n') {
+ let new_ws = make::tokens::whitespace(
+ &ws.syntax().text().replace(&format!("\n{}", self), "\n"),
+ );
+ ted::replace(ws.syntax(), &new_ws);
+ }
+ }
+ }
+ }
+}
+
+fn prev_tokens(token: SyntaxToken) -> impl Iterator<Item = SyntaxToken> {
+ iter::successors(Some(token), |token| token.prev_token())
+}
+
+/// Soft-deprecated in favor of mutable tree editing API `edit_in_place::Ident`.
+pub trait AstNodeEdit: AstNode + Clone + Sized {
+ fn indent_level(&self) -> IndentLevel {
+ IndentLevel::from_node(self.syntax())
+ }
+ #[must_use]
+ fn indent(&self, level: IndentLevel) -> Self {
+ fn indent_inner(node: &SyntaxNode, level: IndentLevel) -> SyntaxNode {
+ let res = node.clone_subtree().clone_for_update();
+ level.increase_indent(&res);
+ res.clone_subtree()
+ }
+
+ Self::cast(indent_inner(self.syntax(), level)).unwrap()
+ }
+ #[must_use]
+ fn dedent(&self, level: IndentLevel) -> Self {
+ fn dedent_inner(node: &SyntaxNode, level: IndentLevel) -> SyntaxNode {
+ let res = node.clone_subtree().clone_for_update();
+ level.decrease_indent(&res);
+ res.clone_subtree()
+ }
+
+ Self::cast(dedent_inner(self.syntax(), level)).unwrap()
+ }
+ #[must_use]
+ fn reset_indent(&self) -> Self {
+ let level = IndentLevel::from_node(self.syntax());
+ self.dedent(level)
+ }
+}
+
+impl<N: AstNode + Clone> AstNodeEdit for N {}
+
+#[test]
+fn test_increase_indent() {
+ let arm_list = {
+ let arm = make::match_arm(iter::once(make::wildcard_pat().into()), None, make::expr_unit());
+ make::match_arm_list(vec![arm.clone(), arm])
+ };
+ assert_eq!(
+ arm_list.syntax().to_string(),
+ "{
+ _ => (),
+ _ => (),
+}"
+ );
+ let indented = arm_list.indent(IndentLevel(2));
+ assert_eq!(
+ indented.syntax().to_string(),
+ "{
+ _ => (),
+ _ => (),
+ }"
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs
new file mode 100644
index 000000000..e3e928aec
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs
@@ -0,0 +1,717 @@
+//! Structural editing for ast.
+
+use std::iter::{empty, successors};
+
+use parser::{SyntaxKind, T};
+use rowan::SyntaxElement;
+
+use crate::{
+ algo::{self, neighbor},
+ ast::{self, edit::IndentLevel, make, HasGenericParams},
+ ted::{self, Position},
+ AstNode, AstToken, Direction,
+ SyntaxKind::{ATTR, COMMENT, WHITESPACE},
+ SyntaxNode,
+};
+
+use super::HasName;
+
+pub trait GenericParamsOwnerEdit: ast::HasGenericParams {
+ fn get_or_create_generic_param_list(&self) -> ast::GenericParamList;
+ fn get_or_create_where_clause(&self) -> ast::WhereClause;
+}
+
+impl GenericParamsOwnerEdit for ast::Fn {
+ fn get_or_create_generic_param_list(&self) -> ast::GenericParamList {
+ match self.generic_param_list() {
+ Some(it) => it,
+ None => {
+ let position = if let Some(name) = self.name() {
+ Position::after(name.syntax)
+ } else if let Some(fn_token) = self.fn_token() {
+ Position::after(fn_token)
+ } else if let Some(param_list) = self.param_list() {
+ Position::before(param_list.syntax)
+ } else {
+ Position::last_child_of(self.syntax())
+ };
+ create_generic_param_list(position)
+ }
+ }
+ }
+
+ fn get_or_create_where_clause(&self) -> ast::WhereClause {
+ if self.where_clause().is_none() {
+ let position = if let Some(ty) = self.ret_type() {
+ Position::after(ty.syntax())
+ } else if let Some(param_list) = self.param_list() {
+ Position::after(param_list.syntax())
+ } else {
+ Position::last_child_of(self.syntax())
+ };
+ create_where_clause(position);
+ }
+ self.where_clause().unwrap()
+ }
+}
+
+impl GenericParamsOwnerEdit for ast::Impl {
+ fn get_or_create_generic_param_list(&self) -> ast::GenericParamList {
+ match self.generic_param_list() {
+ Some(it) => it,
+ None => {
+ let position = match self.impl_token() {
+ Some(imp_token) => Position::after(imp_token),
+ None => Position::last_child_of(self.syntax()),
+ };
+ create_generic_param_list(position)
+ }
+ }
+ }
+
+ fn get_or_create_where_clause(&self) -> ast::WhereClause {
+ if self.where_clause().is_none() {
+ let position = match self.assoc_item_list() {
+ Some(items) => Position::before(items.syntax()),
+ None => Position::last_child_of(self.syntax()),
+ };
+ create_where_clause(position);
+ }
+ self.where_clause().unwrap()
+ }
+}
+
+impl GenericParamsOwnerEdit for ast::Trait {
+ fn get_or_create_generic_param_list(&self) -> ast::GenericParamList {
+ match self.generic_param_list() {
+ Some(it) => it,
+ None => {
+ let position = if let Some(name) = self.name() {
+ Position::after(name.syntax)
+ } else if let Some(trait_token) = self.trait_token() {
+ Position::after(trait_token)
+ } else {
+ Position::last_child_of(self.syntax())
+ };
+ create_generic_param_list(position)
+ }
+ }
+ }
+
+ fn get_or_create_where_clause(&self) -> ast::WhereClause {
+ if self.where_clause().is_none() {
+ let position = match self.assoc_item_list() {
+ Some(items) => Position::before(items.syntax()),
+ None => Position::last_child_of(self.syntax()),
+ };
+ create_where_clause(position);
+ }
+ self.where_clause().unwrap()
+ }
+}
+
+impl GenericParamsOwnerEdit for ast::Struct {
+ fn get_or_create_generic_param_list(&self) -> ast::GenericParamList {
+ match self.generic_param_list() {
+ Some(it) => it,
+ None => {
+ let position = if let Some(name) = self.name() {
+ Position::after(name.syntax)
+ } else if let Some(struct_token) = self.struct_token() {
+ Position::after(struct_token)
+ } else {
+ Position::last_child_of(self.syntax())
+ };
+ create_generic_param_list(position)
+ }
+ }
+ }
+
+ fn get_or_create_where_clause(&self) -> ast::WhereClause {
+ if self.where_clause().is_none() {
+ let tfl = self.field_list().and_then(|fl| match fl {
+ ast::FieldList::RecordFieldList(_) => None,
+ ast::FieldList::TupleFieldList(it) => Some(it),
+ });
+ let position = if let Some(tfl) = tfl {
+ Position::after(tfl.syntax())
+ } else if let Some(gpl) = self.generic_param_list() {
+ Position::after(gpl.syntax())
+ } else if let Some(name) = self.name() {
+ Position::after(name.syntax())
+ } else {
+ Position::last_child_of(self.syntax())
+ };
+ create_where_clause(position);
+ }
+ self.where_clause().unwrap()
+ }
+}
+
+impl GenericParamsOwnerEdit for ast::Enum {
+ fn get_or_create_generic_param_list(&self) -> ast::GenericParamList {
+ match self.generic_param_list() {
+ Some(it) => it,
+ None => {
+ let position = if let Some(name) = self.name() {
+ Position::after(name.syntax)
+ } else if let Some(enum_token) = self.enum_token() {
+ Position::after(enum_token)
+ } else {
+ Position::last_child_of(self.syntax())
+ };
+ create_generic_param_list(position)
+ }
+ }
+ }
+
+ fn get_or_create_where_clause(&self) -> ast::WhereClause {
+ if self.where_clause().is_none() {
+ let position = if let Some(gpl) = self.generic_param_list() {
+ Position::after(gpl.syntax())
+ } else if let Some(name) = self.name() {
+ Position::after(name.syntax())
+ } else {
+ Position::last_child_of(self.syntax())
+ };
+ create_where_clause(position);
+ }
+ self.where_clause().unwrap()
+ }
+}
+
+fn create_where_clause(position: Position) {
+ let where_clause = make::where_clause(empty()).clone_for_update();
+ ted::insert(position, where_clause.syntax());
+}
+
+fn create_generic_param_list(position: Position) -> ast::GenericParamList {
+ let gpl = make::generic_param_list(empty()).clone_for_update();
+ ted::insert_raw(position, gpl.syntax());
+ gpl
+}
+
+pub trait AttrsOwnerEdit: ast::HasAttrs {
+ fn remove_attrs_and_docs(&self) {
+ remove_attrs_and_docs(self.syntax());
+
+ fn remove_attrs_and_docs(node: &SyntaxNode) {
+ let mut remove_next_ws = false;
+ for child in node.children_with_tokens() {
+ match child.kind() {
+ ATTR | COMMENT => {
+ remove_next_ws = true;
+ child.detach();
+ continue;
+ }
+ WHITESPACE if remove_next_ws => {
+ child.detach();
+ }
+ _ => (),
+ }
+ remove_next_ws = false;
+ }
+ }
+ }
+}
+
+impl<T: ast::HasAttrs> AttrsOwnerEdit for T {}
+
+impl ast::GenericParamList {
+ pub fn add_generic_param(&self, generic_param: ast::GenericParam) {
+ match self.generic_params().last() {
+ Some(last_param) => {
+ let position = Position::after(last_param.syntax());
+ let elements = vec![
+ make::token(T![,]).into(),
+ make::tokens::single_space().into(),
+ generic_param.syntax().clone().into(),
+ ];
+ ted::insert_all(position, elements);
+ }
+ None => {
+ let after_l_angle = Position::after(self.l_angle_token().unwrap());
+ ted::insert(after_l_angle, generic_param.syntax());
+ }
+ }
+ }
+}
+
+impl ast::WhereClause {
+ pub fn add_predicate(&self, predicate: ast::WherePred) {
+ if let Some(pred) = self.predicates().last() {
+ if !pred.syntax().siblings_with_tokens(Direction::Next).any(|it| it.kind() == T![,]) {
+ ted::append_child_raw(self.syntax(), make::token(T![,]));
+ }
+ }
+ ted::append_child(self.syntax(), predicate.syntax());
+ }
+}
+
+impl ast::TypeBoundList {
+ pub fn remove(&self) {
+ match self.syntax().siblings_with_tokens(Direction::Prev).find(|it| it.kind() == T![:]) {
+ Some(colon) => ted::remove_all(colon..=self.syntax().clone().into()),
+ None => ted::remove(self.syntax()),
+ }
+ }
+}
+
+impl ast::PathSegment {
+ pub fn get_or_create_generic_arg_list(&self) -> ast::GenericArgList {
+ if self.generic_arg_list().is_none() {
+ let arg_list = make::generic_arg_list().clone_for_update();
+ ted::append_child(self.syntax(), arg_list.syntax());
+ }
+ self.generic_arg_list().unwrap()
+ }
+}
+
+impl ast::UseTree {
+ pub fn remove(&self) {
+ for dir in [Direction::Next, Direction::Prev] {
+ if let Some(next_use_tree) = neighbor(self, dir) {
+ let separators = self
+ .syntax()
+ .siblings_with_tokens(dir)
+ .skip(1)
+ .take_while(|it| it.as_node() != Some(next_use_tree.syntax()));
+ ted::remove_all_iter(separators);
+ break;
+ }
+ }
+ ted::remove(self.syntax());
+ }
+
+ pub fn get_or_create_use_tree_list(&self) -> ast::UseTreeList {
+ match self.use_tree_list() {
+ Some(it) => it,
+ None => {
+ let position = Position::last_child_of(self.syntax());
+ let use_tree_list = make::use_tree_list(empty()).clone_for_update();
+ let mut elements = Vec::with_capacity(2);
+ if self.coloncolon_token().is_none() {
+ elements.push(make::token(T![::]).into());
+ }
+ elements.push(use_tree_list.syntax().clone().into());
+ ted::insert_all_raw(position, elements);
+ use_tree_list
+ }
+ }
+ }
+
+ /// Splits off the given prefix, making it the path component of the use tree,
+ /// appending the rest of the path to all UseTreeList items.
+ ///
+ /// # Examples
+ ///
+ /// `prefix$0::suffix` -> `prefix::{suffix}`
+ ///
+ /// `prefix$0` -> `prefix::{self}`
+ ///
+ /// `prefix$0::*` -> `prefix::{*}`
+ pub fn split_prefix(&self, prefix: &ast::Path) {
+ debug_assert_eq!(self.path(), Some(prefix.top_path()));
+ let path = self.path().unwrap();
+ if &path == prefix && self.use_tree_list().is_none() {
+ if self.star_token().is_some() {
+ // path$0::* -> *
+ self.coloncolon_token().map(ted::remove);
+ ted::remove(prefix.syntax());
+ } else {
+ // path$0 -> self
+ let self_suffix =
+ make::path_unqualified(make::path_segment_self()).clone_for_update();
+ ted::replace(path.syntax(), self_suffix.syntax());
+ }
+ } else if split_path_prefix(prefix).is_none() {
+ return;
+ }
+ // At this point, prefix path is detached; _self_ use tree has suffix path.
+ // Next, transform 'suffix' use tree into 'prefix::{suffix}'
+ let subtree = self.clone_subtree().clone_for_update();
+ ted::remove_all_iter(self.syntax().children_with_tokens());
+ ted::insert(Position::first_child_of(self.syntax()), prefix.syntax());
+ self.get_or_create_use_tree_list().add_use_tree(subtree);
+
+ fn split_path_prefix(prefix: &ast::Path) -> Option<()> {
+ let parent = prefix.parent_path()?;
+ let segment = parent.segment()?;
+ if algo::has_errors(segment.syntax()) {
+ return None;
+ }
+ for p in successors(parent.parent_path(), |it| it.parent_path()) {
+ p.segment()?;
+ }
+ prefix.parent_path().and_then(|p| p.coloncolon_token()).map(ted::remove);
+ ted::remove(prefix.syntax());
+ Some(())
+ }
+ }
+}
+
+impl ast::UseTreeList {
+ pub fn add_use_tree(&self, use_tree: ast::UseTree) {
+ let (position, elements) = match self.use_trees().last() {
+ Some(last_tree) => (
+ Position::after(last_tree.syntax()),
+ vec![
+ make::token(T![,]).into(),
+ make::tokens::single_space().into(),
+ use_tree.syntax.into(),
+ ],
+ ),
+ None => {
+ let position = match self.l_curly_token() {
+ Some(l_curly) => Position::after(l_curly),
+ None => Position::last_child_of(self.syntax()),
+ };
+ (position, vec![use_tree.syntax.into()])
+ }
+ };
+ ted::insert_all_raw(position, elements);
+ }
+}
+
+impl ast::Use {
+ pub fn remove(&self) {
+ let next_ws = self
+ .syntax()
+ .next_sibling_or_token()
+ .and_then(|it| it.into_token())
+ .and_then(ast::Whitespace::cast);
+ if let Some(next_ws) = next_ws {
+ let ws_text = next_ws.syntax().text();
+ if let Some(rest) = ws_text.strip_prefix('\n') {
+ if rest.is_empty() {
+ ted::remove(next_ws.syntax());
+ } else {
+ ted::replace(next_ws.syntax(), make::tokens::whitespace(rest));
+ }
+ }
+ }
+ ted::remove(self.syntax());
+ }
+}
+
+impl ast::Impl {
+ pub fn get_or_create_assoc_item_list(&self) -> ast::AssocItemList {
+ if self.assoc_item_list().is_none() {
+ let assoc_item_list = make::assoc_item_list().clone_for_update();
+ ted::append_child(self.syntax(), assoc_item_list.syntax());
+ }
+ self.assoc_item_list().unwrap()
+ }
+}
+
+impl ast::AssocItemList {
+ pub fn add_item(&self, item: ast::AssocItem) {
+ let (indent, position, whitespace) = match self.assoc_items().last() {
+ Some(last_item) => (
+ IndentLevel::from_node(last_item.syntax()),
+ Position::after(last_item.syntax()),
+ "\n\n",
+ ),
+ None => match self.l_curly_token() {
+ Some(l_curly) => {
+ normalize_ws_between_braces(self.syntax());
+ (IndentLevel::from_token(&l_curly) + 1, Position::after(&l_curly), "\n")
+ }
+ None => (IndentLevel::single(), Position::last_child_of(self.syntax()), "\n"),
+ },
+ };
+ let elements: Vec<SyntaxElement<_>> = vec![
+ make::tokens::whitespace(&format!("{}{}", whitespace, indent)).into(),
+ item.syntax().clone().into(),
+ ];
+ ted::insert_all(position, elements);
+ }
+}
+
+impl ast::Fn {
+ pub fn get_or_create_body(&self) -> ast::BlockExpr {
+ if self.body().is_none() {
+ let body = make::ext::empty_block_expr().clone_for_update();
+ match self.semicolon_token() {
+ Some(semi) => {
+ ted::replace(semi, body.syntax());
+ ted::insert(Position::before(body.syntax), make::tokens::single_space());
+ }
+ None => ted::append_child(self.syntax(), body.syntax()),
+ }
+ }
+ self.body().unwrap()
+ }
+}
+
+impl ast::MatchArm {
+ pub fn remove(&self) {
+ if let Some(sibling) = self.syntax().prev_sibling_or_token() {
+ if sibling.kind() == SyntaxKind::WHITESPACE {
+ ted::remove(sibling);
+ }
+ }
+ if let Some(sibling) = self.syntax().next_sibling_or_token() {
+ if sibling.kind() == T![,] {
+ ted::remove(sibling);
+ }
+ }
+ ted::remove(self.syntax());
+ }
+}
+
+impl ast::MatchArmList {
+ pub fn add_arm(&self, arm: ast::MatchArm) {
+ normalize_ws_between_braces(self.syntax());
+ let mut elements = Vec::new();
+ let position = match self.arms().last() {
+ Some(last_arm) => {
+ if needs_comma(&last_arm) {
+ ted::append_child(last_arm.syntax(), make::token(SyntaxKind::COMMA));
+ }
+ Position::after(last_arm.syntax().clone())
+ }
+ None => match self.l_curly_token() {
+ Some(it) => Position::after(it),
+ None => Position::last_child_of(self.syntax()),
+ },
+ };
+ let indent = IndentLevel::from_node(self.syntax()) + 1;
+ elements.push(make::tokens::whitespace(&format!("\n{}", indent)).into());
+ elements.push(arm.syntax().clone().into());
+ if needs_comma(&arm) {
+ ted::append_child(arm.syntax(), make::token(SyntaxKind::COMMA));
+ }
+ ted::insert_all(position, elements);
+
+ fn needs_comma(arm: &ast::MatchArm) -> bool {
+ arm.expr().map_or(false, |e| !e.is_block_like()) && arm.comma_token().is_none()
+ }
+ }
+}
+
+impl ast::RecordExprFieldList {
+ pub fn add_field(&self, field: ast::RecordExprField) {
+ let is_multiline = self.syntax().text().contains_char('\n');
+ let whitespace = if is_multiline {
+ let indent = IndentLevel::from_node(self.syntax()) + 1;
+ make::tokens::whitespace(&format!("\n{}", indent))
+ } else {
+ make::tokens::single_space()
+ };
+
+ if is_multiline {
+ normalize_ws_between_braces(self.syntax());
+ }
+
+ let position = match self.fields().last() {
+ Some(last_field) => {
+ let comma = match last_field
+ .syntax()
+ .siblings_with_tokens(Direction::Next)
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == T![,])
+ {
+ Some(it) => it,
+ None => {
+ let comma = ast::make::token(T![,]);
+ ted::insert(Position::after(last_field.syntax()), &comma);
+ comma
+ }
+ };
+ Position::after(comma)
+ }
+ None => match self.l_curly_token() {
+ Some(it) => Position::after(it),
+ None => Position::last_child_of(self.syntax()),
+ },
+ };
+
+ ted::insert_all(position, vec![whitespace.into(), field.syntax().clone().into()]);
+ if is_multiline {
+ ted::insert(Position::after(field.syntax()), ast::make::token(T![,]));
+ }
+ }
+}
+
+impl ast::RecordExprField {
+ /// This will either replace the initializer, or in the case that this is a shorthand convert
+ /// the initializer into the name ref and insert the expr as the new initializer.
+ pub fn replace_expr(&self, expr: ast::Expr) {
+ if self.name_ref().is_some() {
+ match self.expr() {
+ Some(prev) => ted::replace(prev.syntax(), expr.syntax()),
+ None => ted::append_child(self.syntax(), expr.syntax()),
+ }
+ return;
+ }
+ // this is a shorthand
+ if let Some(ast::Expr::PathExpr(path_expr)) = self.expr() {
+ if let Some(path) = path_expr.path() {
+ if let Some(name_ref) = path.as_single_name_ref() {
+ path_expr.syntax().detach();
+ let children = vec![
+ name_ref.syntax().clone().into(),
+ ast::make::token(T![:]).into(),
+ ast::make::tokens::single_space().into(),
+ expr.syntax().clone().into(),
+ ];
+ ted::insert_all_raw(Position::last_child_of(self.syntax()), children);
+ }
+ }
+ }
+ }
+}
+
+impl ast::RecordPatFieldList {
+ pub fn add_field(&self, field: ast::RecordPatField) {
+ let is_multiline = self.syntax().text().contains_char('\n');
+ let whitespace = if is_multiline {
+ let indent = IndentLevel::from_node(self.syntax()) + 1;
+ make::tokens::whitespace(&format!("\n{}", indent))
+ } else {
+ make::tokens::single_space()
+ };
+
+ if is_multiline {
+ normalize_ws_between_braces(self.syntax());
+ }
+
+ let position = match self.fields().last() {
+ Some(last_field) => {
+ let comma = match last_field
+ .syntax()
+ .siblings_with_tokens(Direction::Next)
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == T![,])
+ {
+ Some(it) => it,
+ None => {
+ let comma = ast::make::token(T![,]);
+ ted::insert(Position::after(last_field.syntax()), &comma);
+ comma
+ }
+ };
+ Position::after(comma)
+ }
+ None => match self.l_curly_token() {
+ Some(it) => Position::after(it),
+ None => Position::last_child_of(self.syntax()),
+ },
+ };
+
+ ted::insert_all(position, vec![whitespace.into(), field.syntax().clone().into()]);
+ if is_multiline {
+ ted::insert(Position::after(field.syntax()), ast::make::token(T![,]));
+ }
+ }
+}
+impl ast::StmtList {
+ pub fn push_front(&self, statement: ast::Stmt) {
+ ted::insert(Position::after(self.l_curly_token().unwrap()), statement.syntax());
+ }
+}
+
+fn normalize_ws_between_braces(node: &SyntaxNode) -> Option<()> {
+ let l = node
+ .children_with_tokens()
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == T!['{'])?;
+ let r = node
+ .children_with_tokens()
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == T!['}'])?;
+
+ let indent = IndentLevel::from_node(node);
+
+ match l.next_sibling_or_token() {
+ Some(ws) if ws.kind() == SyntaxKind::WHITESPACE => {
+ if ws.next_sibling_or_token()?.into_token()? == r {
+ ted::replace(ws, make::tokens::whitespace(&format!("\n{}", indent)));
+ }
+ }
+ Some(ws) if ws.kind() == T!['}'] => {
+ ted::insert(Position::after(l), make::tokens::whitespace(&format!("\n{}", indent)));
+ }
+ _ => (),
+ }
+ Some(())
+}
+
+pub trait Indent: AstNode + Clone + Sized {
+ fn indent_level(&self) -> IndentLevel {
+ IndentLevel::from_node(self.syntax())
+ }
+ fn indent(&self, by: IndentLevel) {
+ by.increase_indent(self.syntax());
+ }
+ fn dedent(&self, by: IndentLevel) {
+ by.decrease_indent(self.syntax());
+ }
+ fn reindent_to(&self, target_level: IndentLevel) {
+ let current_level = IndentLevel::from_node(self.syntax());
+ self.dedent(current_level);
+ self.indent(target_level);
+ }
+}
+
+impl<N: AstNode + Clone> Indent for N {}
+
+#[cfg(test)]
+mod tests {
+ use std::fmt;
+
+ use crate::SourceFile;
+
+ use super::*;
+
+ fn ast_mut_from_text<N: AstNode>(text: &str) -> N {
+ let parse = SourceFile::parse(text);
+ parse.tree().syntax().descendants().find_map(N::cast).unwrap().clone_for_update()
+ }
+
+ #[test]
+ fn test_create_generic_param_list() {
+ fn check_create_gpl<N: GenericParamsOwnerEdit + fmt::Display>(before: &str, after: &str) {
+ let gpl_owner = ast_mut_from_text::<N>(before);
+ gpl_owner.get_or_create_generic_param_list();
+ assert_eq!(gpl_owner.to_string(), after);
+ }
+
+ check_create_gpl::<ast::Fn>("fn foo", "fn foo<>");
+ check_create_gpl::<ast::Fn>("fn foo() {}", "fn foo<>() {}");
+
+ check_create_gpl::<ast::Impl>("impl", "impl<>");
+ check_create_gpl::<ast::Impl>("impl Struct {}", "impl<> Struct {}");
+ check_create_gpl::<ast::Impl>("impl Trait for Struct {}", "impl<> Trait for Struct {}");
+
+ check_create_gpl::<ast::Trait>("trait Trait<>", "trait Trait<>");
+ check_create_gpl::<ast::Trait>("trait Trait<> {}", "trait Trait<> {}");
+
+ check_create_gpl::<ast::Struct>("struct A", "struct A<>");
+ check_create_gpl::<ast::Struct>("struct A;", "struct A<>;");
+ check_create_gpl::<ast::Struct>("struct A();", "struct A<>();");
+ check_create_gpl::<ast::Struct>("struct A {}", "struct A<> {}");
+
+ check_create_gpl::<ast::Enum>("enum E", "enum E<>");
+ check_create_gpl::<ast::Enum>("enum E {", "enum E<> {");
+ }
+
+ #[test]
+ fn test_increase_indent() {
+ let arm_list = ast_mut_from_text::<ast::Fn>(
+ "fn foo() {
+ ;
+ ;
+}",
+ );
+ arm_list.indent(IndentLevel(2));
+ assert_eq!(
+ arm_list.to_string(),
+ "fn foo() {
+ ;
+ ;
+ }",
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs
new file mode 100644
index 000000000..db66d08a7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs
@@ -0,0 +1,410 @@
+//! Various extension methods to ast Expr Nodes, which are hard to code-generate.
+//!
+//! These methods should only do simple, shallow tasks related to the syntax of the node itself.
+
+use crate::{
+ ast::{
+ self,
+ operators::{ArithOp, BinaryOp, CmpOp, LogicOp, Ordering, RangeOp, UnaryOp},
+ support, AstChildren, AstNode,
+ },
+ AstToken,
+ SyntaxKind::*,
+ SyntaxNode, SyntaxToken, T,
+};
+
+impl ast::HasAttrs for ast::Expr {}
+
+impl ast::Expr {
+ pub fn is_block_like(&self) -> bool {
+ matches!(
+ self,
+ ast::Expr::IfExpr(_)
+ | ast::Expr::LoopExpr(_)
+ | ast::Expr::ForExpr(_)
+ | ast::Expr::WhileExpr(_)
+ | ast::Expr::BlockExpr(_)
+ | ast::Expr::MatchExpr(_)
+ )
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum ElseBranch {
+ Block(ast::BlockExpr),
+ IfExpr(ast::IfExpr),
+}
+
+impl From<ast::BlockExpr> for ElseBranch {
+ fn from(block_expr: ast::BlockExpr) -> Self {
+ Self::Block(block_expr)
+ }
+}
+
+impl From<ast::IfExpr> for ElseBranch {
+ fn from(if_expr: ast::IfExpr) -> Self {
+ Self::IfExpr(if_expr)
+ }
+}
+
+impl ast::IfExpr {
+ pub fn then_branch(&self) -> Option<ast::BlockExpr> {
+ self.children_after_condition().next()
+ }
+
+ pub fn else_branch(&self) -> Option<ElseBranch> {
+ let res = match self.children_after_condition().nth(1) {
+ Some(block) => ElseBranch::Block(block),
+ None => {
+ let elif = self.children_after_condition().next()?;
+ ElseBranch::IfExpr(elif)
+ }
+ };
+ Some(res)
+ }
+
+ fn children_after_condition<N: AstNode>(&self) -> impl Iterator<Item = N> {
+ self.syntax().children().skip(1).filter_map(N::cast)
+ }
+}
+
+#[test]
+fn if_block_condition() {
+ let parse = ast::SourceFile::parse(
+ r#"
+ fn test() {
+ if { true } { "if" }
+ else if { false } { "first elif" }
+ else if true { "second elif" }
+ else if (true) { "third elif" }
+ else { "else" }
+ }
+ "#,
+ );
+ let if_ = parse.tree().syntax().descendants().find_map(ast::IfExpr::cast).unwrap();
+ assert_eq!(if_.then_branch().unwrap().syntax().text(), r#"{ "if" }"#);
+ let elif = match if_.else_branch().unwrap() {
+ ElseBranch::IfExpr(elif) => elif,
+ ElseBranch::Block(_) => panic!("should be `else if`"),
+ };
+ assert_eq!(elif.then_branch().unwrap().syntax().text(), r#"{ "first elif" }"#);
+ let elif = match elif.else_branch().unwrap() {
+ ElseBranch::IfExpr(elif) => elif,
+ ElseBranch::Block(_) => panic!("should be `else if`"),
+ };
+ assert_eq!(elif.then_branch().unwrap().syntax().text(), r#"{ "second elif" }"#);
+ let elif = match elif.else_branch().unwrap() {
+ ElseBranch::IfExpr(elif) => elif,
+ ElseBranch::Block(_) => panic!("should be `else if`"),
+ };
+ assert_eq!(elif.then_branch().unwrap().syntax().text(), r#"{ "third elif" }"#);
+ let else_ = match elif.else_branch().unwrap() {
+ ElseBranch::Block(else_) => else_,
+ ElseBranch::IfExpr(_) => panic!("should be `else`"),
+ };
+ assert_eq!(else_.syntax().text(), r#"{ "else" }"#);
+}
+
+#[test]
+fn if_condition_with_if_inside() {
+ let parse = ast::SourceFile::parse(
+ r#"
+ fn test() {
+ if if true { true } else { false } { "if" }
+ else { "else" }
+ }
+ "#,
+ );
+ let if_ = parse.tree().syntax().descendants().find_map(ast::IfExpr::cast).unwrap();
+ assert_eq!(if_.then_branch().unwrap().syntax().text(), r#"{ "if" }"#);
+ let else_ = match if_.else_branch().unwrap() {
+ ElseBranch::Block(else_) => else_,
+ ElseBranch::IfExpr(_) => panic!("should be `else`"),
+ };
+ assert_eq!(else_.syntax().text(), r#"{ "else" }"#);
+}
+
+impl ast::PrefixExpr {
+ pub fn op_kind(&self) -> Option<UnaryOp> {
+ let res = match self.op_token()?.kind() {
+ T![*] => UnaryOp::Deref,
+ T![!] => UnaryOp::Not,
+ T![-] => UnaryOp::Neg,
+ _ => return None,
+ };
+ Some(res)
+ }
+
+ pub fn op_token(&self) -> Option<SyntaxToken> {
+ self.syntax().first_child_or_token()?.into_token()
+ }
+}
+
+impl ast::BinExpr {
+ pub fn op_details(&self) -> Option<(SyntaxToken, BinaryOp)> {
+ self.syntax().children_with_tokens().filter_map(|it| it.into_token()).find_map(|c| {
+ #[rustfmt::skip]
+ let bin_op = match c.kind() {
+ T![||] => BinaryOp::LogicOp(LogicOp::Or),
+ T![&&] => BinaryOp::LogicOp(LogicOp::And),
+
+ T![==] => BinaryOp::CmpOp(CmpOp::Eq { negated: false }),
+ T![!=] => BinaryOp::CmpOp(CmpOp::Eq { negated: true }),
+ T![<=] => BinaryOp::CmpOp(CmpOp::Ord { ordering: Ordering::Less, strict: false }),
+ T![>=] => BinaryOp::CmpOp(CmpOp::Ord { ordering: Ordering::Greater, strict: false }),
+ T![<] => BinaryOp::CmpOp(CmpOp::Ord { ordering: Ordering::Less, strict: true }),
+ T![>] => BinaryOp::CmpOp(CmpOp::Ord { ordering: Ordering::Greater, strict: true }),
+
+ T![+] => BinaryOp::ArithOp(ArithOp::Add),
+ T![*] => BinaryOp::ArithOp(ArithOp::Mul),
+ T![-] => BinaryOp::ArithOp(ArithOp::Sub),
+ T![/] => BinaryOp::ArithOp(ArithOp::Div),
+ T![%] => BinaryOp::ArithOp(ArithOp::Rem),
+ T![<<] => BinaryOp::ArithOp(ArithOp::Shl),
+ T![>>] => BinaryOp::ArithOp(ArithOp::Shr),
+ T![^] => BinaryOp::ArithOp(ArithOp::BitXor),
+ T![|] => BinaryOp::ArithOp(ArithOp::BitOr),
+ T![&] => BinaryOp::ArithOp(ArithOp::BitAnd),
+
+ T![=] => BinaryOp::Assignment { op: None },
+ T![+=] => BinaryOp::Assignment { op: Some(ArithOp::Add) },
+ T![*=] => BinaryOp::Assignment { op: Some(ArithOp::Mul) },
+ T![-=] => BinaryOp::Assignment { op: Some(ArithOp::Sub) },
+ T![/=] => BinaryOp::Assignment { op: Some(ArithOp::Div) },
+ T![%=] => BinaryOp::Assignment { op: Some(ArithOp::Rem) },
+ T![<<=] => BinaryOp::Assignment { op: Some(ArithOp::Shl) },
+ T![>>=] => BinaryOp::Assignment { op: Some(ArithOp::Shr) },
+ T![^=] => BinaryOp::Assignment { op: Some(ArithOp::BitXor) },
+ T![|=] => BinaryOp::Assignment { op: Some(ArithOp::BitOr) },
+ T![&=] => BinaryOp::Assignment { op: Some(ArithOp::BitAnd) },
+
+ _ => return None,
+ };
+ Some((c, bin_op))
+ })
+ }
+
+ pub fn op_kind(&self) -> Option<BinaryOp> {
+ self.op_details().map(|t| t.1)
+ }
+
+ pub fn op_token(&self) -> Option<SyntaxToken> {
+ self.op_details().map(|t| t.0)
+ }
+
+ pub fn lhs(&self) -> Option<ast::Expr> {
+ support::children(self.syntax()).next()
+ }
+
+ pub fn rhs(&self) -> Option<ast::Expr> {
+ support::children(self.syntax()).nth(1)
+ }
+
+ pub fn sub_exprs(&self) -> (Option<ast::Expr>, Option<ast::Expr>) {
+ let mut children = support::children(self.syntax());
+ let first = children.next();
+ let second = children.next();
+ (first, second)
+ }
+}
+
+impl ast::RangeExpr {
+ fn op_details(&self) -> Option<(usize, SyntaxToken, RangeOp)> {
+ self.syntax().children_with_tokens().enumerate().find_map(|(ix, child)| {
+ let token = child.into_token()?;
+ let bin_op = match token.kind() {
+ T![..] => RangeOp::Exclusive,
+ T![..=] => RangeOp::Inclusive,
+ _ => return None,
+ };
+ Some((ix, token, bin_op))
+ })
+ }
+
+ pub fn op_kind(&self) -> Option<RangeOp> {
+ self.op_details().map(|t| t.2)
+ }
+
+ pub fn op_token(&self) -> Option<SyntaxToken> {
+ self.op_details().map(|t| t.1)
+ }
+
+ pub fn start(&self) -> Option<ast::Expr> {
+ let op_ix = self.op_details()?.0;
+ self.syntax()
+ .children_with_tokens()
+ .take(op_ix)
+ .find_map(|it| ast::Expr::cast(it.into_node()?))
+ }
+
+ pub fn end(&self) -> Option<ast::Expr> {
+ let op_ix = self.op_details()?.0;
+ self.syntax()
+ .children_with_tokens()
+ .skip(op_ix + 1)
+ .find_map(|it| ast::Expr::cast(it.into_node()?))
+ }
+}
+
+impl ast::IndexExpr {
+ pub fn base(&self) -> Option<ast::Expr> {
+ support::children(self.syntax()).next()
+ }
+ pub fn index(&self) -> Option<ast::Expr> {
+ support::children(self.syntax()).nth(1)
+ }
+}
+
+pub enum ArrayExprKind {
+ Repeat { initializer: Option<ast::Expr>, repeat: Option<ast::Expr> },
+ ElementList(AstChildren<ast::Expr>),
+}
+
+impl ast::ArrayExpr {
+ pub fn kind(&self) -> ArrayExprKind {
+ if self.is_repeat() {
+ ArrayExprKind::Repeat {
+ initializer: support::children(self.syntax()).next(),
+ repeat: support::children(self.syntax()).nth(1),
+ }
+ } else {
+ ArrayExprKind::ElementList(support::children(self.syntax()))
+ }
+ }
+
+ fn is_repeat(&self) -> bool {
+ self.semicolon_token().is_some()
+ }
+}
+
+#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+pub enum LiteralKind {
+ String(ast::String),
+ ByteString(ast::ByteString),
+ IntNumber(ast::IntNumber),
+ FloatNumber(ast::FloatNumber),
+ Char(ast::Char),
+ Byte(ast::Byte),
+ Bool(bool),
+}
+
+impl ast::Literal {
+ pub fn token(&self) -> SyntaxToken {
+ self.syntax()
+ .children_with_tokens()
+ .find(|e| e.kind() != ATTR && !e.kind().is_trivia())
+ .and_then(|e| e.into_token())
+ .unwrap()
+ }
+
+ pub fn kind(&self) -> LiteralKind {
+ let token = self.token();
+
+ if let Some(t) = ast::IntNumber::cast(token.clone()) {
+ return LiteralKind::IntNumber(t);
+ }
+ if let Some(t) = ast::FloatNumber::cast(token.clone()) {
+ return LiteralKind::FloatNumber(t);
+ }
+ if let Some(t) = ast::String::cast(token.clone()) {
+ return LiteralKind::String(t);
+ }
+ if let Some(t) = ast::ByteString::cast(token.clone()) {
+ return LiteralKind::ByteString(t);
+ }
+ if let Some(t) = ast::Char::cast(token.clone()) {
+ return LiteralKind::Char(t);
+ }
+ if let Some(t) = ast::Byte::cast(token.clone()) {
+ return LiteralKind::Byte(t);
+ }
+
+ match token.kind() {
+ T![true] => LiteralKind::Bool(true),
+ T![false] => LiteralKind::Bool(false),
+ _ => unreachable!(),
+ }
+ }
+}
+
+pub enum BlockModifier {
+ Async(SyntaxToken),
+ Unsafe(SyntaxToken),
+ Try(SyntaxToken),
+ Const(SyntaxToken),
+ Label(ast::Label),
+}
+
+impl ast::BlockExpr {
+ pub fn modifier(&self) -> Option<BlockModifier> {
+ self.async_token()
+ .map(BlockModifier::Async)
+ .or_else(|| self.unsafe_token().map(BlockModifier::Unsafe))
+ .or_else(|| self.try_token().map(BlockModifier::Try))
+ .or_else(|| self.const_token().map(BlockModifier::Const))
+ .or_else(|| self.label().map(BlockModifier::Label))
+ }
+ /// false if the block is an intrinsic part of the syntax and can't be
+ /// replaced with arbitrary expression.
+ ///
+ /// ```not_rust
+ /// fn foo() { not_stand_alone }
+ /// const FOO: () = { stand_alone };
+ /// ```
+ pub fn is_standalone(&self) -> bool {
+ let parent = match self.syntax().parent() {
+ Some(it) => it,
+ None => return true,
+ };
+ !matches!(parent.kind(), FN | IF_EXPR | WHILE_EXPR | LOOP_EXPR)
+ }
+}
+
+#[test]
+fn test_literal_with_attr() {
+ let parse = ast::SourceFile::parse(r#"const _: &str = { #[attr] "Hello" };"#);
+ let lit = parse.tree().syntax().descendants().find_map(ast::Literal::cast).unwrap();
+ assert_eq!(lit.token().text(), r#""Hello""#);
+}
+
+impl ast::RecordExprField {
+ pub fn parent_record_lit(&self) -> ast::RecordExpr {
+ self.syntax().ancestors().find_map(ast::RecordExpr::cast).unwrap()
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum CallableExpr {
+ Call(ast::CallExpr),
+ MethodCall(ast::MethodCallExpr),
+}
+
+impl ast::HasAttrs for CallableExpr {}
+impl ast::HasArgList for CallableExpr {}
+
+impl AstNode for CallableExpr {
+ fn can_cast(kind: parser::SyntaxKind) -> bool
+ where
+ Self: Sized,
+ {
+ ast::CallExpr::can_cast(kind) || ast::MethodCallExpr::can_cast(kind)
+ }
+
+ fn cast(syntax: SyntaxNode) -> Option<Self>
+ where
+ Self: Sized,
+ {
+ if let Some(it) = ast::CallExpr::cast(syntax.clone()) {
+ Some(Self::Call(it))
+ } else {
+ ast::MethodCallExpr::cast(syntax).map(Self::MethodCall)
+ }
+ }
+
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ Self::Call(it) => it.syntax(),
+ Self::MethodCall(it) => it.syntax(),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated.rs
new file mode 100644
index 000000000..843b43cf0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated.rs
@@ -0,0 +1,41 @@
+//! This file is actually hand-written, but the submodules are indeed generated.
+#[rustfmt::skip]
+pub(crate) mod nodes;
+#[rustfmt::skip]
+pub(crate) mod tokens;
+
+use crate::{
+ AstNode,
+ SyntaxKind::{self, *},
+ SyntaxNode,
+};
+
+pub(crate) use nodes::*;
+
+// Stmt is the only nested enum, so it's easier to just hand-write it
+impl AstNode for Stmt {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ LET_STMT | EXPR_STMT => true,
+ _ => Item::can_cast(kind),
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ LET_STMT => Stmt::LetStmt(LetStmt { syntax }),
+ EXPR_STMT => Stmt::ExprStmt(ExprStmt { syntax }),
+ _ => {
+ let item = Item::cast(syntax)?;
+ Stmt::Item(item)
+ }
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ Stmt::LetStmt(it) => &it.syntax,
+ Stmt::ExprStmt(it) => &it.syntax,
+ Stmt::Item(it) => it.syntax(),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
new file mode 100644
index 000000000..63309a155
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
@@ -0,0 +1,4806 @@
+//! Generated by `sourcegen_ast`, do not edit by hand.
+
+#![allow(non_snake_case)]
+use crate::{
+ ast::{self, support, AstChildren, AstNode},
+ SyntaxKind::{self, *},
+ SyntaxNode, SyntaxToken, T,
+};
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Name {
+ pub(crate) syntax: SyntaxNode,
+}
+impl Name {
+ pub fn ident_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![ident]) }
+ pub fn self_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![self]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct NameRef {
+ pub(crate) syntax: SyntaxNode,
+}
+impl NameRef {
+ pub fn ident_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![ident]) }
+ pub fn self_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![self]) }
+ pub fn super_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![super]) }
+ pub fn crate_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![crate]) }
+ pub fn Self_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![Self]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Lifetime {
+ pub(crate) syntax: SyntaxNode,
+}
+impl Lifetime {
+ pub fn lifetime_ident_token(&self) -> Option<SyntaxToken> {
+ support::token(&self.syntax, T![lifetime_ident])
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Path {
+ pub(crate) syntax: SyntaxNode,
+}
+impl Path {
+ pub fn qualifier(&self) -> Option<Path> { support::child(&self.syntax) }
+ pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) }
+ pub fn segment(&self) -> Option<PathSegment> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PathSegment {
+ pub(crate) syntax: SyntaxNode,
+}
+impl PathSegment {
+ pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) }
+ pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
+ pub fn generic_arg_list(&self) -> Option<GenericArgList> { support::child(&self.syntax) }
+ pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) }
+ pub fn ret_type(&self) -> Option<RetType> { support::child(&self.syntax) }
+ pub fn l_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![<]) }
+ pub fn path_type(&self) -> Option<PathType> { support::child(&self.syntax) }
+ pub fn as_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![as]) }
+ pub fn r_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![>]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct GenericArgList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl GenericArgList {
+ pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) }
+ pub fn l_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![<]) }
+ pub fn generic_args(&self) -> AstChildren<GenericArg> { support::children(&self.syntax) }
+ pub fn r_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![>]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ParamList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ParamList {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn self_param(&self) -> Option<SelfParam> { support::child(&self.syntax) }
+ pub fn comma_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![,]) }
+ pub fn params(&self) -> AstChildren<Param> { support::children(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+ pub fn pipe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![|]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RetType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl RetType {
+ pub fn thin_arrow_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![->]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PathType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl PathType {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TypeArg {
+ pub(crate) syntax: SyntaxNode,
+}
+impl TypeArg {
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AssocTypeArg {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasTypeBounds for AssocTypeArg {}
+impl AssocTypeArg {
+ pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
+ pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn const_arg(&self) -> Option<ConstArg> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LifetimeArg {
+ pub(crate) syntax: SyntaxNode,
+}
+impl LifetimeArg {
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ConstArg {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ConstArg {
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct GenericParamList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl GenericParamList {
+ pub fn l_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![<]) }
+ pub fn generic_params(&self) -> AstChildren<GenericParam> { support::children(&self.syntax) }
+ pub fn r_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![>]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TypeBoundList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl TypeBoundList {
+ pub fn bounds(&self) -> AstChildren<TypeBound> { support::children(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroCall {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for MacroCall {}
+impl ast::HasDocComments for MacroCall {}
+impl MacroCall {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+ pub fn excl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![!]) }
+ pub fn token_tree(&self) -> Option<TokenTree> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Attr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl Attr {
+ pub fn pound_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![#]) }
+ pub fn excl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![!]) }
+ pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+ pub fn meta(&self) -> Option<Meta> { support::child(&self.syntax) }
+ pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TokenTree {
+ pub(crate) syntax: SyntaxNode,
+}
+impl TokenTree {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+ pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+ pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroItems {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasModuleItem for MacroItems {}
+impl MacroItems {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroStmts {
+ pub(crate) syntax: SyntaxNode,
+}
+impl MacroStmts {
+ pub fn statements(&self) -> AstChildren<Stmt> { support::children(&self.syntax) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct SourceFile {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for SourceFile {}
+impl ast::HasModuleItem for SourceFile {}
+impl ast::HasDocComments for SourceFile {}
+impl SourceFile {
+ pub fn shebang_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![shebang]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Const {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Const {}
+impl ast::HasName for Const {}
+impl ast::HasVisibility for Const {}
+impl ast::HasDocComments for Const {}
+impl Const {
+ pub fn default_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![default]) }
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn underscore_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![_]) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn body(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Enum {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Enum {}
+impl ast::HasName for Enum {}
+impl ast::HasVisibility for Enum {}
+impl ast::HasGenericParams for Enum {}
+impl ast::HasDocComments for Enum {}
+impl Enum {
+ pub fn enum_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![enum]) }
+ pub fn variant_list(&self) -> Option<VariantList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ExternBlock {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ExternBlock {}
+impl ast::HasDocComments for ExternBlock {}
+impl ExternBlock {
+ pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
+ pub fn abi(&self) -> Option<Abi> { support::child(&self.syntax) }
+ pub fn extern_item_list(&self) -> Option<ExternItemList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ExternCrate {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ExternCrate {}
+impl ast::HasVisibility for ExternCrate {}
+impl ast::HasDocComments for ExternCrate {}
+impl ExternCrate {
+ pub fn extern_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![extern]) }
+ pub fn crate_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![crate]) }
+ pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
+ pub fn rename(&self) -> Option<Rename> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Fn {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Fn {}
+impl ast::HasName for Fn {}
+impl ast::HasVisibility for Fn {}
+impl ast::HasGenericParams for Fn {}
+impl ast::HasDocComments for Fn {}
+impl Fn {
+ pub fn default_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![default]) }
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) }
+ pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
+ pub fn abi(&self) -> Option<Abi> { support::child(&self.syntax) }
+ pub fn fn_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![fn]) }
+ pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) }
+ pub fn ret_type(&self) -> Option<RetType> { support::child(&self.syntax) }
+ pub fn body(&self) -> Option<BlockExpr> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Impl {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Impl {}
+impl ast::HasVisibility for Impl {}
+impl ast::HasGenericParams for Impl {}
+impl ast::HasDocComments for Impl {}
+impl Impl {
+ pub fn default_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![default]) }
+ pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
+ pub fn impl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![impl]) }
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn excl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![!]) }
+ pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
+ pub fn assoc_item_list(&self) -> Option<AssocItemList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroRules {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for MacroRules {}
+impl ast::HasName for MacroRules {}
+impl ast::HasVisibility for MacroRules {}
+impl ast::HasDocComments for MacroRules {}
+impl MacroRules {
+ pub fn macro_rules_token(&self) -> Option<SyntaxToken> {
+ support::token(&self.syntax, T![macro_rules])
+ }
+ pub fn excl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![!]) }
+ pub fn token_tree(&self) -> Option<TokenTree> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroDef {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for MacroDef {}
+impl ast::HasName for MacroDef {}
+impl ast::HasVisibility for MacroDef {}
+impl ast::HasDocComments for MacroDef {}
+impl MacroDef {
+ pub fn macro_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![macro]) }
+ pub fn args(&self) -> Option<TokenTree> { support::child(&self.syntax) }
+ pub fn body(&self) -> Option<TokenTree> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Module {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Module {}
+impl ast::HasName for Module {}
+impl ast::HasVisibility for Module {}
+impl ast::HasDocComments for Module {}
+impl Module {
+ pub fn mod_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mod]) }
+ pub fn item_list(&self) -> Option<ItemList> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Static {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Static {}
+impl ast::HasName for Static {}
+impl ast::HasVisibility for Static {}
+impl ast::HasDocComments for Static {}
+impl Static {
+ pub fn static_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![static]) }
+ pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn body(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Struct {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Struct {}
+impl ast::HasName for Struct {}
+impl ast::HasVisibility for Struct {}
+impl ast::HasGenericParams for Struct {}
+impl ast::HasDocComments for Struct {}
+impl Struct {
+ pub fn struct_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![struct]) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+ pub fn field_list(&self) -> Option<FieldList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Trait {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Trait {}
+impl ast::HasName for Trait {}
+impl ast::HasVisibility for Trait {}
+impl ast::HasGenericParams for Trait {}
+impl ast::HasTypeBounds for Trait {}
+impl ast::HasDocComments for Trait {}
+impl Trait {
+ pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
+ pub fn auto_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![auto]) }
+ pub fn trait_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![trait]) }
+ pub fn assoc_item_list(&self) -> Option<AssocItemList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TypeAlias {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for TypeAlias {}
+impl ast::HasName for TypeAlias {}
+impl ast::HasVisibility for TypeAlias {}
+impl ast::HasGenericParams for TypeAlias {}
+impl ast::HasTypeBounds for TypeAlias {}
+impl ast::HasDocComments for TypeAlias {}
+impl TypeAlias {
+ pub fn default_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![default]) }
+ pub fn type_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![type]) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Union {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Union {}
+impl ast::HasName for Union {}
+impl ast::HasVisibility for Union {}
+impl ast::HasGenericParams for Union {}
+impl ast::HasDocComments for Union {}
+impl Union {
+ pub fn union_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![union]) }
+ pub fn record_field_list(&self) -> Option<RecordFieldList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Use {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Use {}
+impl ast::HasVisibility for Use {}
+impl ast::HasDocComments for Use {}
+impl Use {
+ pub fn use_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![use]) }
+ pub fn use_tree(&self) -> Option<UseTree> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Visibility {
+ pub(crate) syntax: SyntaxNode,
+}
+impl Visibility {
+ pub fn pub_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![pub]) }
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn in_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![in]) }
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ItemList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ItemList {}
+impl ast::HasModuleItem for ItemList {}
+impl ItemList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Rename {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasName for Rename {}
+impl Rename {
+ pub fn as_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![as]) }
+ pub fn underscore_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![_]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct UseTree {
+ pub(crate) syntax: SyntaxNode,
+}
+impl UseTree {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+ pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) }
+ pub fn star_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![*]) }
+ pub fn use_tree_list(&self) -> Option<UseTreeList> { support::child(&self.syntax) }
+ pub fn rename(&self) -> Option<Rename> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct UseTreeList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl UseTreeList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn use_trees(&self) -> AstChildren<UseTree> { support::children(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Abi {
+ pub(crate) syntax: SyntaxNode,
+}
+impl Abi {
+ pub fn extern_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![extern]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct WhereClause {
+ pub(crate) syntax: SyntaxNode,
+}
+impl WhereClause {
+ pub fn where_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![where]) }
+ pub fn predicates(&self) -> AstChildren<WherePred> { support::children(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct BlockExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for BlockExpr {}
+impl BlockExpr {
+ pub fn label(&self) -> Option<Label> { support::child(&self.syntax) }
+ pub fn try_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![try]) }
+ pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
+ pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) }
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn stmt_list(&self) -> Option<StmtList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct SelfParam {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for SelfParam {}
+impl ast::HasName for SelfParam {}
+impl SelfParam {
+ pub fn amp_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![&]) }
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+ pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Param {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Param {}
+impl Param {
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn dotdotdot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![...]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordFieldList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl RecordFieldList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn fields(&self) -> AstChildren<RecordField> { support::children(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TupleFieldList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl TupleFieldList {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn fields(&self) -> AstChildren<TupleField> { support::children(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordField {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for RecordField {}
+impl ast::HasName for RecordField {}
+impl ast::HasVisibility for RecordField {}
+impl ast::HasDocComments for RecordField {}
+impl RecordField {
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TupleField {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for TupleField {}
+impl ast::HasVisibility for TupleField {}
+impl ast::HasDocComments for TupleField {}
+impl TupleField {
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct VariantList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl VariantList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn variants(&self) -> AstChildren<Variant> { support::children(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Variant {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Variant {}
+impl ast::HasName for Variant {}
+impl ast::HasVisibility for Variant {}
+impl ast::HasDocComments for Variant {}
+impl Variant {
+ pub fn field_list(&self) -> Option<FieldList> { support::child(&self.syntax) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AssocItemList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for AssocItemList {}
+impl AssocItemList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn assoc_items(&self) -> AstChildren<AssocItem> { support::children(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ExternItemList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ExternItemList {}
+impl ExternItemList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn extern_items(&self) -> AstChildren<ExternItem> { support::children(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ConstParam {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ConstParam {}
+impl ast::HasName for ConstParam {}
+impl ConstParam {
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn default_val(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LifetimeParam {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for LifetimeParam {}
+impl ast::HasTypeBounds for LifetimeParam {}
+impl LifetimeParam {
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TypeParam {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for TypeParam {}
+impl ast::HasName for TypeParam {}
+impl ast::HasTypeBounds for TypeParam {}
+impl TypeParam {
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn default_type(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct WherePred {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasTypeBounds for WherePred {}
+impl WherePred {
+ pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
+ pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) }
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Meta {
+ pub(crate) syntax: SyntaxNode,
+}
+impl Meta {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn token_tree(&self) -> Option<TokenTree> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ExprStmt {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ExprStmt {
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LetStmt {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for LetStmt {}
+impl LetStmt {
+ pub fn let_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![let]) }
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn initializer(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn let_else(&self) -> Option<LetElse> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LetElse {
+ pub(crate) syntax: SyntaxNode,
+}
+impl LetElse {
+ pub fn else_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![else]) }
+ pub fn block_expr(&self) -> Option<BlockExpr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ArrayExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ArrayExpr {}
+impl ArrayExpr {
+ pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+ pub fn exprs(&self) -> AstChildren<Expr> { support::children(&self.syntax) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+ pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AwaitExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for AwaitExpr {}
+impl AwaitExpr {
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn dot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![.]) }
+ pub fn await_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![await]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct BinExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for BinExpr {}
+impl BinExpr {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct BoxExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for BoxExpr {}
+impl BoxExpr {
+ pub fn box_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![box]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct BreakExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for BreakExpr {}
+impl BreakExpr {
+ pub fn break_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![break]) }
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct CallExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for CallExpr {}
+impl ast::HasArgList for CallExpr {}
+impl CallExpr {
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct CastExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for CastExpr {}
+impl CastExpr {
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn as_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![as]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ClosureExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ClosureExpr {}
+impl ClosureExpr {
+ pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
+ pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) }
+ pub fn static_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![static]) }
+ pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) }
+ pub fn move_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![move]) }
+ pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) }
+ pub fn ret_type(&self) -> Option<RetType> { support::child(&self.syntax) }
+ pub fn body(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ContinueExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ContinueExpr {}
+impl ContinueExpr {
+ pub fn continue_token(&self) -> Option<SyntaxToken> {
+ support::token(&self.syntax, T![continue])
+ }
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct FieldExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for FieldExpr {}
+impl FieldExpr {
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn dot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![.]) }
+ pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ForExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ForExpr {}
+impl ForExpr {
+ pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+ pub fn in_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![in]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct IfExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for IfExpr {}
+impl IfExpr {
+ pub fn if_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![if]) }
+ pub fn else_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![else]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct IndexExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for IndexExpr {}
+impl IndexExpr {
+ pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+ pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Literal {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Literal {}
+impl Literal {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LoopExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for LoopExpr {}
+impl ast::HasLoopBody for LoopExpr {}
+impl LoopExpr {
+ pub fn loop_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![loop]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl MacroExpr {
+ pub fn macro_call(&self) -> Option<MacroCall> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MatchExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for MatchExpr {}
+impl MatchExpr {
+ pub fn match_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![match]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn match_arm_list(&self) -> Option<MatchArmList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MethodCallExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for MethodCallExpr {}
+impl ast::HasArgList for MethodCallExpr {}
+impl MethodCallExpr {
+ pub fn receiver(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn dot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![.]) }
+ pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
+ pub fn generic_arg_list(&self) -> Option<GenericArgList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ParenExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ParenExpr {}
+impl ParenExpr {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PathExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for PathExpr {}
+impl PathExpr {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PrefixExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for PrefixExpr {}
+impl PrefixExpr {
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RangeExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for RangeExpr {}
+impl RangeExpr {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl RecordExpr {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+ pub fn record_expr_field_list(&self) -> Option<RecordExprFieldList> {
+ support::child(&self.syntax)
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RefExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for RefExpr {}
+impl RefExpr {
+ pub fn amp_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![&]) }
+ pub fn raw_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![raw]) }
+ pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ReturnExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ReturnExpr {}
+impl ReturnExpr {
+ pub fn return_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![return]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TryExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for TryExpr {}
+impl TryExpr {
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn question_mark_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![?]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TupleExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for TupleExpr {}
+impl TupleExpr {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn fields(&self) -> AstChildren<Expr> { support::children(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct WhileExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for WhileExpr {}
+impl WhileExpr {
+ pub fn while_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![while]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct YieldExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for YieldExpr {}
+impl YieldExpr {
+ pub fn yield_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![yield]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LetExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for LetExpr {}
+impl LetExpr {
+ pub fn let_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![let]) }
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct UnderscoreExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for UnderscoreExpr {}
+impl UnderscoreExpr {
+ pub fn underscore_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![_]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct StmtList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for StmtList {}
+impl StmtList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn statements(&self) -> AstChildren<Stmt> { support::children(&self.syntax) }
+ pub fn tail_expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Label {
+ pub(crate) syntax: SyntaxNode,
+}
+impl Label {
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordExprFieldList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for RecordExprFieldList {}
+impl RecordExprFieldList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn fields(&self) -> AstChildren<RecordExprField> { support::children(&self.syntax) }
+ pub fn dotdot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![..]) }
+ pub fn spread(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordExprField {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for RecordExprField {}
+impl RecordExprField {
+ pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ArgList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ArgList {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn args(&self) -> AstChildren<Expr> { support::children(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MatchArmList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for MatchArmList {}
+impl MatchArmList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn arms(&self) -> AstChildren<MatchArm> { support::children(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MatchArm {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for MatchArm {}
+impl MatchArm {
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+ pub fn guard(&self) -> Option<MatchGuard> { support::child(&self.syntax) }
+ pub fn fat_arrow_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=>]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn comma_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![,]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MatchGuard {
+ pub(crate) syntax: SyntaxNode,
+}
+impl MatchGuard {
+ pub fn if_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![if]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ArrayType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ArrayType {
+ pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct DynTraitType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl DynTraitType {
+ pub fn dyn_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![dyn]) }
+ pub fn type_bound_list(&self) -> Option<TypeBoundList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct FnPtrType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl FnPtrType {
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) }
+ pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
+ pub fn abi(&self) -> Option<Abi> { support::child(&self.syntax) }
+ pub fn fn_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![fn]) }
+ pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) }
+ pub fn ret_type(&self) -> Option<RetType> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ForType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ForType {
+ pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
+ pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ImplTraitType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ImplTraitType {
+ pub fn impl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![impl]) }
+ pub fn type_bound_list(&self) -> Option<TypeBoundList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct InferType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl InferType {
+ pub fn underscore_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![_]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl MacroType {
+ pub fn macro_call(&self) -> Option<MacroCall> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct NeverType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl NeverType {
+ pub fn excl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![!]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ParenType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ParenType {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PtrType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl PtrType {
+ pub fn star_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![*]) }
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RefType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl RefType {
+ pub fn amp_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![&]) }
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+ pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct SliceType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl SliceType {
+ pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TupleType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl TupleType {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn fields(&self) -> AstChildren<Type> { support::children(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TypeBound {
+ pub(crate) syntax: SyntaxNode,
+}
+impl TypeBound {
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+ pub fn question_mark_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![?]) }
+ pub fn tilde_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![~]) }
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct IdentPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for IdentPat {}
+impl ast::HasName for IdentPat {}
+impl IdentPat {
+ pub fn ref_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![ref]) }
+ pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
+ pub fn at_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![@]) }
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct BoxPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl BoxPat {
+ pub fn box_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![box]) }
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RestPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for RestPat {}
+impl RestPat {
+ pub fn dotdot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![..]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LiteralPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl LiteralPat {
+ pub fn literal(&self) -> Option<Literal> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl MacroPat {
+ pub fn macro_call(&self) -> Option<MacroCall> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct OrPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl OrPat {
+ pub fn pats(&self) -> AstChildren<Pat> { support::children(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ParenPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ParenPat {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PathPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl PathPat {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct WildcardPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl WildcardPat {
+ pub fn underscore_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![_]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RangePat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl RangePat {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl RecordPat {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+ pub fn record_pat_field_list(&self) -> Option<RecordPatFieldList> {
+ support::child(&self.syntax)
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RefPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl RefPat {
+ pub fn amp_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![&]) }
+ pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct SlicePat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl SlicePat {
+ pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+ pub fn pats(&self) -> AstChildren<Pat> { support::children(&self.syntax) }
+ pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TuplePat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl TuplePat {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn fields(&self) -> AstChildren<Pat> { support::children(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TupleStructPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl TupleStructPat {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn fields(&self) -> AstChildren<Pat> { support::children(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ConstBlockPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ConstBlockPat {
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn block_expr(&self) -> Option<BlockExpr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordPatFieldList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl RecordPatFieldList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn fields(&self) -> AstChildren<RecordPatField> { support::children(&self.syntax) }
+ pub fn rest_pat(&self) -> Option<RestPat> { support::child(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordPatField {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for RecordPatField {}
+impl RecordPatField {
+ pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum GenericArg {
+ TypeArg(TypeArg),
+ AssocTypeArg(AssocTypeArg),
+ LifetimeArg(LifetimeArg),
+ ConstArg(ConstArg),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum Type {
+ ArrayType(ArrayType),
+ DynTraitType(DynTraitType),
+ FnPtrType(FnPtrType),
+ ForType(ForType),
+ ImplTraitType(ImplTraitType),
+ InferType(InferType),
+ MacroType(MacroType),
+ NeverType(NeverType),
+ ParenType(ParenType),
+ PathType(PathType),
+ PtrType(PtrType),
+ RefType(RefType),
+ SliceType(SliceType),
+ TupleType(TupleType),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum Expr {
+ ArrayExpr(ArrayExpr),
+ AwaitExpr(AwaitExpr),
+ BinExpr(BinExpr),
+ BlockExpr(BlockExpr),
+ BoxExpr(BoxExpr),
+ BreakExpr(BreakExpr),
+ CallExpr(CallExpr),
+ CastExpr(CastExpr),
+ ClosureExpr(ClosureExpr),
+ ContinueExpr(ContinueExpr),
+ FieldExpr(FieldExpr),
+ ForExpr(ForExpr),
+ IfExpr(IfExpr),
+ IndexExpr(IndexExpr),
+ Literal(Literal),
+ LoopExpr(LoopExpr),
+ MacroExpr(MacroExpr),
+ MacroStmts(MacroStmts),
+ MatchExpr(MatchExpr),
+ MethodCallExpr(MethodCallExpr),
+ ParenExpr(ParenExpr),
+ PathExpr(PathExpr),
+ PrefixExpr(PrefixExpr),
+ RangeExpr(RangeExpr),
+ RecordExpr(RecordExpr),
+ RefExpr(RefExpr),
+ ReturnExpr(ReturnExpr),
+ TryExpr(TryExpr),
+ TupleExpr(TupleExpr),
+ WhileExpr(WhileExpr),
+ YieldExpr(YieldExpr),
+ LetExpr(LetExpr),
+ UnderscoreExpr(UnderscoreExpr),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum Item {
+ Const(Const),
+ Enum(Enum),
+ ExternBlock(ExternBlock),
+ ExternCrate(ExternCrate),
+ Fn(Fn),
+ Impl(Impl),
+ MacroCall(MacroCall),
+ MacroRules(MacroRules),
+ MacroDef(MacroDef),
+ Module(Module),
+ Static(Static),
+ Struct(Struct),
+ Trait(Trait),
+ TypeAlias(TypeAlias),
+ Union(Union),
+ Use(Use),
+}
+impl ast::HasAttrs for Item {}
+impl ast::HasDocComments for Item {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum Stmt {
+ ExprStmt(ExprStmt),
+ Item(Item),
+ LetStmt(LetStmt),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum Pat {
+ IdentPat(IdentPat),
+ BoxPat(BoxPat),
+ RestPat(RestPat),
+ LiteralPat(LiteralPat),
+ MacroPat(MacroPat),
+ OrPat(OrPat),
+ ParenPat(ParenPat),
+ PathPat(PathPat),
+ WildcardPat(WildcardPat),
+ RangePat(RangePat),
+ RecordPat(RecordPat),
+ RefPat(RefPat),
+ SlicePat(SlicePat),
+ TuplePat(TuplePat),
+ TupleStructPat(TupleStructPat),
+ ConstBlockPat(ConstBlockPat),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum FieldList {
+ RecordFieldList(RecordFieldList),
+ TupleFieldList(TupleFieldList),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum Adt {
+ Enum(Enum),
+ Struct(Struct),
+ Union(Union),
+}
+impl ast::HasAttrs for Adt {}
+impl ast::HasDocComments for Adt {}
+impl ast::HasGenericParams for Adt {}
+impl ast::HasName for Adt {}
+impl ast::HasVisibility for Adt {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum AssocItem {
+ Const(Const),
+ Fn(Fn),
+ MacroCall(MacroCall),
+ TypeAlias(TypeAlias),
+}
+impl ast::HasAttrs for AssocItem {}
+impl ast::HasDocComments for AssocItem {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum ExternItem {
+ Fn(Fn),
+ MacroCall(MacroCall),
+ Static(Static),
+ TypeAlias(TypeAlias),
+}
+impl ast::HasAttrs for ExternItem {}
+impl ast::HasDocComments for ExternItem {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum GenericParam {
+ ConstParam(ConstParam),
+ LifetimeParam(LifetimeParam),
+ TypeParam(TypeParam),
+}
+impl ast::HasAttrs for GenericParam {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasArgList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasArgList for AnyHasArgList {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasAttrs {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for AnyHasAttrs {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasDocComments {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasDocComments for AnyHasDocComments {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasGenericParams {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasGenericParams for AnyHasGenericParams {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasLoopBody {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasLoopBody for AnyHasLoopBody {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasModuleItem {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasModuleItem for AnyHasModuleItem {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasName {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasName for AnyHasName {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasTypeBounds {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasTypeBounds for AnyHasTypeBounds {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasVisibility {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasVisibility for AnyHasVisibility {}
+impl AstNode for Name {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == NAME }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for NameRef {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == NAME_REF }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Lifetime {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LIFETIME }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Path {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PATH }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for PathSegment {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PATH_SEGMENT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for GenericArgList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == GENERIC_ARG_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ParamList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PARAM_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RetType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RET_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for PathType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PATH_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TypeArg {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_ARG }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for AssocTypeArg {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ASSOC_TYPE_ARG }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for LifetimeArg {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LIFETIME_ARG }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ConstArg {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == CONST_ARG }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for GenericParamList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == GENERIC_PARAM_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TypeBoundList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_BOUND_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroCall {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_CALL }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Attr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ATTR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TokenTree {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TOKEN_TREE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroItems {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_ITEMS }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroStmts {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_STMTS }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for SourceFile {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == SOURCE_FILE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Const {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == CONST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Enum {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ENUM }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ExternBlock {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == EXTERN_BLOCK }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ExternCrate {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == EXTERN_CRATE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Fn {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == FN }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Impl {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == IMPL }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroRules {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_RULES }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroDef {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_DEF }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Module {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MODULE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Static {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == STATIC }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Struct {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == STRUCT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Trait {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TRAIT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TypeAlias {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_ALIAS }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Union {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == UNION }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Use {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == USE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Visibility {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == VISIBILITY }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ItemList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ITEM_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Rename {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RENAME }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for UseTree {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == USE_TREE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for UseTreeList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == USE_TREE_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Abi {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ABI }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for WhereClause {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == WHERE_CLAUSE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for BlockExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == BLOCK_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for SelfParam {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == SELF_PARAM }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Param {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PARAM }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordFieldList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_FIELD_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TupleFieldList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_FIELD_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordField {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_FIELD }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TupleField {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_FIELD }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for VariantList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == VARIANT_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Variant {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == VARIANT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for AssocItemList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ASSOC_ITEM_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ExternItemList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == EXTERN_ITEM_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ConstParam {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == CONST_PARAM }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for LifetimeParam {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LIFETIME_PARAM }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TypeParam {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_PARAM }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for WherePred {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == WHERE_PRED }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Meta {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == META }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ExprStmt {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == EXPR_STMT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for LetStmt {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LET_STMT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for LetElse {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LET_ELSE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ArrayExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ARRAY_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for AwaitExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == AWAIT_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for BinExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == BIN_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for BoxExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == BOX_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for BreakExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == BREAK_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for CallExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == CALL_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for CastExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == CAST_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ClosureExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == CLOSURE_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ContinueExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == CONTINUE_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for FieldExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == FIELD_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ForExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == FOR_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for IfExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == IF_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for IndexExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == INDEX_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Literal {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LITERAL }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for LoopExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LOOP_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MatchExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MATCH_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MethodCallExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == METHOD_CALL_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ParenExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PAREN_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for PathExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PATH_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for PrefixExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PREFIX_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RangeExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RANGE_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RefExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == REF_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ReturnExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RETURN_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TryExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TRY_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TupleExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for WhileExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == WHILE_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for YieldExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == YIELD_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for LetExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LET_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for UnderscoreExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == UNDERSCORE_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for StmtList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == STMT_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Label {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LABEL }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordExprFieldList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_EXPR_FIELD_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordExprField {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_EXPR_FIELD }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ArgList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ARG_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MatchArmList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MATCH_ARM_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MatchArm {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MATCH_ARM }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MatchGuard {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MATCH_GUARD }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ArrayType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ARRAY_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for DynTraitType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == DYN_TRAIT_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for FnPtrType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == FN_PTR_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ForType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == FOR_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ImplTraitType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == IMPL_TRAIT_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for InferType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == INFER_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for NeverType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == NEVER_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ParenType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PAREN_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for PtrType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PTR_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RefType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == REF_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for SliceType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == SLICE_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TupleType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TypeBound {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_BOUND }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for IdentPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == IDENT_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for BoxPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == BOX_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RestPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == REST_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for LiteralPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LITERAL_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for OrPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == OR_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ParenPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PAREN_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for PathPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PATH_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for WildcardPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == WILDCARD_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RangePat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RANGE_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RefPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == REF_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for SlicePat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == SLICE_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TuplePat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TupleStructPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_STRUCT_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ConstBlockPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == CONST_BLOCK_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordPatFieldList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_PAT_FIELD_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordPatField {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_PAT_FIELD }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl From<TypeArg> for GenericArg {
+ fn from(node: TypeArg) -> GenericArg { GenericArg::TypeArg(node) }
+}
+impl From<AssocTypeArg> for GenericArg {
+ fn from(node: AssocTypeArg) -> GenericArg { GenericArg::AssocTypeArg(node) }
+}
+impl From<LifetimeArg> for GenericArg {
+ fn from(node: LifetimeArg) -> GenericArg { GenericArg::LifetimeArg(node) }
+}
+impl From<ConstArg> for GenericArg {
+ fn from(node: ConstArg) -> GenericArg { GenericArg::ConstArg(node) }
+}
+impl AstNode for GenericArg {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ TYPE_ARG | ASSOC_TYPE_ARG | LIFETIME_ARG | CONST_ARG => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ TYPE_ARG => GenericArg::TypeArg(TypeArg { syntax }),
+ ASSOC_TYPE_ARG => GenericArg::AssocTypeArg(AssocTypeArg { syntax }),
+ LIFETIME_ARG => GenericArg::LifetimeArg(LifetimeArg { syntax }),
+ CONST_ARG => GenericArg::ConstArg(ConstArg { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ GenericArg::TypeArg(it) => &it.syntax,
+ GenericArg::AssocTypeArg(it) => &it.syntax,
+ GenericArg::LifetimeArg(it) => &it.syntax,
+ GenericArg::ConstArg(it) => &it.syntax,
+ }
+ }
+}
+impl From<ArrayType> for Type {
+ fn from(node: ArrayType) -> Type { Type::ArrayType(node) }
+}
+impl From<DynTraitType> for Type {
+ fn from(node: DynTraitType) -> Type { Type::DynTraitType(node) }
+}
+impl From<FnPtrType> for Type {
+ fn from(node: FnPtrType) -> Type { Type::FnPtrType(node) }
+}
+impl From<ForType> for Type {
+ fn from(node: ForType) -> Type { Type::ForType(node) }
+}
+impl From<ImplTraitType> for Type {
+ fn from(node: ImplTraitType) -> Type { Type::ImplTraitType(node) }
+}
+impl From<InferType> for Type {
+ fn from(node: InferType) -> Type { Type::InferType(node) }
+}
+impl From<MacroType> for Type {
+ fn from(node: MacroType) -> Type { Type::MacroType(node) }
+}
+impl From<NeverType> for Type {
+ fn from(node: NeverType) -> Type { Type::NeverType(node) }
+}
+impl From<ParenType> for Type {
+ fn from(node: ParenType) -> Type { Type::ParenType(node) }
+}
+impl From<PathType> for Type {
+ fn from(node: PathType) -> Type { Type::PathType(node) }
+}
+impl From<PtrType> for Type {
+ fn from(node: PtrType) -> Type { Type::PtrType(node) }
+}
+impl From<RefType> for Type {
+ fn from(node: RefType) -> Type { Type::RefType(node) }
+}
+impl From<SliceType> for Type {
+ fn from(node: SliceType) -> Type { Type::SliceType(node) }
+}
+impl From<TupleType> for Type {
+ fn from(node: TupleType) -> Type { Type::TupleType(node) }
+}
+impl AstNode for Type {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ ARRAY_TYPE | DYN_TRAIT_TYPE | FN_PTR_TYPE | FOR_TYPE | IMPL_TRAIT_TYPE | INFER_TYPE
+ | MACRO_TYPE | NEVER_TYPE | PAREN_TYPE | PATH_TYPE | PTR_TYPE | REF_TYPE
+ | SLICE_TYPE | TUPLE_TYPE => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ ARRAY_TYPE => Type::ArrayType(ArrayType { syntax }),
+ DYN_TRAIT_TYPE => Type::DynTraitType(DynTraitType { syntax }),
+ FN_PTR_TYPE => Type::FnPtrType(FnPtrType { syntax }),
+ FOR_TYPE => Type::ForType(ForType { syntax }),
+ IMPL_TRAIT_TYPE => Type::ImplTraitType(ImplTraitType { syntax }),
+ INFER_TYPE => Type::InferType(InferType { syntax }),
+ MACRO_TYPE => Type::MacroType(MacroType { syntax }),
+ NEVER_TYPE => Type::NeverType(NeverType { syntax }),
+ PAREN_TYPE => Type::ParenType(ParenType { syntax }),
+ PATH_TYPE => Type::PathType(PathType { syntax }),
+ PTR_TYPE => Type::PtrType(PtrType { syntax }),
+ REF_TYPE => Type::RefType(RefType { syntax }),
+ SLICE_TYPE => Type::SliceType(SliceType { syntax }),
+ TUPLE_TYPE => Type::TupleType(TupleType { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ Type::ArrayType(it) => &it.syntax,
+ Type::DynTraitType(it) => &it.syntax,
+ Type::FnPtrType(it) => &it.syntax,
+ Type::ForType(it) => &it.syntax,
+ Type::ImplTraitType(it) => &it.syntax,
+ Type::InferType(it) => &it.syntax,
+ Type::MacroType(it) => &it.syntax,
+ Type::NeverType(it) => &it.syntax,
+ Type::ParenType(it) => &it.syntax,
+ Type::PathType(it) => &it.syntax,
+ Type::PtrType(it) => &it.syntax,
+ Type::RefType(it) => &it.syntax,
+ Type::SliceType(it) => &it.syntax,
+ Type::TupleType(it) => &it.syntax,
+ }
+ }
+}
+impl From<ArrayExpr> for Expr {
+ fn from(node: ArrayExpr) -> Expr { Expr::ArrayExpr(node) }
+}
+impl From<AwaitExpr> for Expr {
+ fn from(node: AwaitExpr) -> Expr { Expr::AwaitExpr(node) }
+}
+impl From<BinExpr> for Expr {
+ fn from(node: BinExpr) -> Expr { Expr::BinExpr(node) }
+}
+impl From<BlockExpr> for Expr {
+ fn from(node: BlockExpr) -> Expr { Expr::BlockExpr(node) }
+}
+impl From<BoxExpr> for Expr {
+ fn from(node: BoxExpr) -> Expr { Expr::BoxExpr(node) }
+}
+impl From<BreakExpr> for Expr {
+ fn from(node: BreakExpr) -> Expr { Expr::BreakExpr(node) }
+}
+impl From<CallExpr> for Expr {
+ fn from(node: CallExpr) -> Expr { Expr::CallExpr(node) }
+}
+impl From<CastExpr> for Expr {
+ fn from(node: CastExpr) -> Expr { Expr::CastExpr(node) }
+}
+impl From<ClosureExpr> for Expr {
+ fn from(node: ClosureExpr) -> Expr { Expr::ClosureExpr(node) }
+}
+impl From<ContinueExpr> for Expr {
+ fn from(node: ContinueExpr) -> Expr { Expr::ContinueExpr(node) }
+}
+impl From<FieldExpr> for Expr {
+ fn from(node: FieldExpr) -> Expr { Expr::FieldExpr(node) }
+}
+impl From<ForExpr> for Expr {
+ fn from(node: ForExpr) -> Expr { Expr::ForExpr(node) }
+}
+impl From<IfExpr> for Expr {
+ fn from(node: IfExpr) -> Expr { Expr::IfExpr(node) }
+}
+impl From<IndexExpr> for Expr {
+ fn from(node: IndexExpr) -> Expr { Expr::IndexExpr(node) }
+}
+impl From<Literal> for Expr {
+ fn from(node: Literal) -> Expr { Expr::Literal(node) }
+}
+impl From<LoopExpr> for Expr {
+ fn from(node: LoopExpr) -> Expr { Expr::LoopExpr(node) }
+}
+impl From<MacroExpr> for Expr {
+ fn from(node: MacroExpr) -> Expr { Expr::MacroExpr(node) }
+}
+impl From<MacroStmts> for Expr {
+ fn from(node: MacroStmts) -> Expr { Expr::MacroStmts(node) }
+}
+impl From<MatchExpr> for Expr {
+ fn from(node: MatchExpr) -> Expr { Expr::MatchExpr(node) }
+}
+impl From<MethodCallExpr> for Expr {
+ fn from(node: MethodCallExpr) -> Expr { Expr::MethodCallExpr(node) }
+}
+impl From<ParenExpr> for Expr {
+ fn from(node: ParenExpr) -> Expr { Expr::ParenExpr(node) }
+}
+impl From<PathExpr> for Expr {
+ fn from(node: PathExpr) -> Expr { Expr::PathExpr(node) }
+}
+impl From<PrefixExpr> for Expr {
+ fn from(node: PrefixExpr) -> Expr { Expr::PrefixExpr(node) }
+}
+impl From<RangeExpr> for Expr {
+ fn from(node: RangeExpr) -> Expr { Expr::RangeExpr(node) }
+}
+impl From<RecordExpr> for Expr {
+ fn from(node: RecordExpr) -> Expr { Expr::RecordExpr(node) }
+}
+impl From<RefExpr> for Expr {
+ fn from(node: RefExpr) -> Expr { Expr::RefExpr(node) }
+}
+impl From<ReturnExpr> for Expr {
+ fn from(node: ReturnExpr) -> Expr { Expr::ReturnExpr(node) }
+}
+impl From<TryExpr> for Expr {
+ fn from(node: TryExpr) -> Expr { Expr::TryExpr(node) }
+}
+impl From<TupleExpr> for Expr {
+ fn from(node: TupleExpr) -> Expr { Expr::TupleExpr(node) }
+}
+impl From<WhileExpr> for Expr {
+ fn from(node: WhileExpr) -> Expr { Expr::WhileExpr(node) }
+}
+impl From<YieldExpr> for Expr {
+ fn from(node: YieldExpr) -> Expr { Expr::YieldExpr(node) }
+}
+impl From<LetExpr> for Expr {
+ fn from(node: LetExpr) -> Expr { Expr::LetExpr(node) }
+}
+impl From<UnderscoreExpr> for Expr {
+ fn from(node: UnderscoreExpr) -> Expr { Expr::UnderscoreExpr(node) }
+}
+impl AstNode for Expr {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ ARRAY_EXPR | AWAIT_EXPR | BIN_EXPR | BLOCK_EXPR | BOX_EXPR | BREAK_EXPR | CALL_EXPR
+ | CAST_EXPR | CLOSURE_EXPR | CONTINUE_EXPR | FIELD_EXPR | FOR_EXPR | IF_EXPR
+ | INDEX_EXPR | LITERAL | LOOP_EXPR | MACRO_EXPR | MACRO_STMTS | MATCH_EXPR
+ | METHOD_CALL_EXPR | PAREN_EXPR | PATH_EXPR | PREFIX_EXPR | RANGE_EXPR
+ | RECORD_EXPR | REF_EXPR | RETURN_EXPR | TRY_EXPR | TUPLE_EXPR | WHILE_EXPR
+ | YIELD_EXPR | LET_EXPR | UNDERSCORE_EXPR => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ ARRAY_EXPR => Expr::ArrayExpr(ArrayExpr { syntax }),
+ AWAIT_EXPR => Expr::AwaitExpr(AwaitExpr { syntax }),
+ BIN_EXPR => Expr::BinExpr(BinExpr { syntax }),
+ BLOCK_EXPR => Expr::BlockExpr(BlockExpr { syntax }),
+ BOX_EXPR => Expr::BoxExpr(BoxExpr { syntax }),
+ BREAK_EXPR => Expr::BreakExpr(BreakExpr { syntax }),
+ CALL_EXPR => Expr::CallExpr(CallExpr { syntax }),
+ CAST_EXPR => Expr::CastExpr(CastExpr { syntax }),
+ CLOSURE_EXPR => Expr::ClosureExpr(ClosureExpr { syntax }),
+ CONTINUE_EXPR => Expr::ContinueExpr(ContinueExpr { syntax }),
+ FIELD_EXPR => Expr::FieldExpr(FieldExpr { syntax }),
+ FOR_EXPR => Expr::ForExpr(ForExpr { syntax }),
+ IF_EXPR => Expr::IfExpr(IfExpr { syntax }),
+ INDEX_EXPR => Expr::IndexExpr(IndexExpr { syntax }),
+ LITERAL => Expr::Literal(Literal { syntax }),
+ LOOP_EXPR => Expr::LoopExpr(LoopExpr { syntax }),
+ MACRO_EXPR => Expr::MacroExpr(MacroExpr { syntax }),
+ MACRO_STMTS => Expr::MacroStmts(MacroStmts { syntax }),
+ MATCH_EXPR => Expr::MatchExpr(MatchExpr { syntax }),
+ METHOD_CALL_EXPR => Expr::MethodCallExpr(MethodCallExpr { syntax }),
+ PAREN_EXPR => Expr::ParenExpr(ParenExpr { syntax }),
+ PATH_EXPR => Expr::PathExpr(PathExpr { syntax }),
+ PREFIX_EXPR => Expr::PrefixExpr(PrefixExpr { syntax }),
+ RANGE_EXPR => Expr::RangeExpr(RangeExpr { syntax }),
+ RECORD_EXPR => Expr::RecordExpr(RecordExpr { syntax }),
+ REF_EXPR => Expr::RefExpr(RefExpr { syntax }),
+ RETURN_EXPR => Expr::ReturnExpr(ReturnExpr { syntax }),
+ TRY_EXPR => Expr::TryExpr(TryExpr { syntax }),
+ TUPLE_EXPR => Expr::TupleExpr(TupleExpr { syntax }),
+ WHILE_EXPR => Expr::WhileExpr(WhileExpr { syntax }),
+ YIELD_EXPR => Expr::YieldExpr(YieldExpr { syntax }),
+ LET_EXPR => Expr::LetExpr(LetExpr { syntax }),
+ UNDERSCORE_EXPR => Expr::UnderscoreExpr(UnderscoreExpr { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ Expr::ArrayExpr(it) => &it.syntax,
+ Expr::AwaitExpr(it) => &it.syntax,
+ Expr::BinExpr(it) => &it.syntax,
+ Expr::BlockExpr(it) => &it.syntax,
+ Expr::BoxExpr(it) => &it.syntax,
+ Expr::BreakExpr(it) => &it.syntax,
+ Expr::CallExpr(it) => &it.syntax,
+ Expr::CastExpr(it) => &it.syntax,
+ Expr::ClosureExpr(it) => &it.syntax,
+ Expr::ContinueExpr(it) => &it.syntax,
+ Expr::FieldExpr(it) => &it.syntax,
+ Expr::ForExpr(it) => &it.syntax,
+ Expr::IfExpr(it) => &it.syntax,
+ Expr::IndexExpr(it) => &it.syntax,
+ Expr::Literal(it) => &it.syntax,
+ Expr::LoopExpr(it) => &it.syntax,
+ Expr::MacroExpr(it) => &it.syntax,
+ Expr::MacroStmts(it) => &it.syntax,
+ Expr::MatchExpr(it) => &it.syntax,
+ Expr::MethodCallExpr(it) => &it.syntax,
+ Expr::ParenExpr(it) => &it.syntax,
+ Expr::PathExpr(it) => &it.syntax,
+ Expr::PrefixExpr(it) => &it.syntax,
+ Expr::RangeExpr(it) => &it.syntax,
+ Expr::RecordExpr(it) => &it.syntax,
+ Expr::RefExpr(it) => &it.syntax,
+ Expr::ReturnExpr(it) => &it.syntax,
+ Expr::TryExpr(it) => &it.syntax,
+ Expr::TupleExpr(it) => &it.syntax,
+ Expr::WhileExpr(it) => &it.syntax,
+ Expr::YieldExpr(it) => &it.syntax,
+ Expr::LetExpr(it) => &it.syntax,
+ Expr::UnderscoreExpr(it) => &it.syntax,
+ }
+ }
+}
+impl From<Const> for Item {
+ fn from(node: Const) -> Item { Item::Const(node) }
+}
+impl From<Enum> for Item {
+ fn from(node: Enum) -> Item { Item::Enum(node) }
+}
+impl From<ExternBlock> for Item {
+ fn from(node: ExternBlock) -> Item { Item::ExternBlock(node) }
+}
+impl From<ExternCrate> for Item {
+ fn from(node: ExternCrate) -> Item { Item::ExternCrate(node) }
+}
+impl From<Fn> for Item {
+ fn from(node: Fn) -> Item { Item::Fn(node) }
+}
+impl From<Impl> for Item {
+ fn from(node: Impl) -> Item { Item::Impl(node) }
+}
+impl From<MacroCall> for Item {
+ fn from(node: MacroCall) -> Item { Item::MacroCall(node) }
+}
+impl From<MacroRules> for Item {
+ fn from(node: MacroRules) -> Item { Item::MacroRules(node) }
+}
+impl From<MacroDef> for Item {
+ fn from(node: MacroDef) -> Item { Item::MacroDef(node) }
+}
+impl From<Module> for Item {
+ fn from(node: Module) -> Item { Item::Module(node) }
+}
+impl From<Static> for Item {
+ fn from(node: Static) -> Item { Item::Static(node) }
+}
+impl From<Struct> for Item {
+ fn from(node: Struct) -> Item { Item::Struct(node) }
+}
+impl From<Trait> for Item {
+ fn from(node: Trait) -> Item { Item::Trait(node) }
+}
+impl From<TypeAlias> for Item {
+ fn from(node: TypeAlias) -> Item { Item::TypeAlias(node) }
+}
+impl From<Union> for Item {
+ fn from(node: Union) -> Item { Item::Union(node) }
+}
+impl From<Use> for Item {
+ fn from(node: Use) -> Item { Item::Use(node) }
+}
+impl AstNode for Item {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ CONST | ENUM | EXTERN_BLOCK | EXTERN_CRATE | FN | IMPL | MACRO_CALL | MACRO_RULES
+ | MACRO_DEF | MODULE | STATIC | STRUCT | TRAIT | TYPE_ALIAS | UNION | USE => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ CONST => Item::Const(Const { syntax }),
+ ENUM => Item::Enum(Enum { syntax }),
+ EXTERN_BLOCK => Item::ExternBlock(ExternBlock { syntax }),
+ EXTERN_CRATE => Item::ExternCrate(ExternCrate { syntax }),
+ FN => Item::Fn(Fn { syntax }),
+ IMPL => Item::Impl(Impl { syntax }),
+ MACRO_CALL => Item::MacroCall(MacroCall { syntax }),
+ MACRO_RULES => Item::MacroRules(MacroRules { syntax }),
+ MACRO_DEF => Item::MacroDef(MacroDef { syntax }),
+ MODULE => Item::Module(Module { syntax }),
+ STATIC => Item::Static(Static { syntax }),
+ STRUCT => Item::Struct(Struct { syntax }),
+ TRAIT => Item::Trait(Trait { syntax }),
+ TYPE_ALIAS => Item::TypeAlias(TypeAlias { syntax }),
+ UNION => Item::Union(Union { syntax }),
+ USE => Item::Use(Use { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ Item::Const(it) => &it.syntax,
+ Item::Enum(it) => &it.syntax,
+ Item::ExternBlock(it) => &it.syntax,
+ Item::ExternCrate(it) => &it.syntax,
+ Item::Fn(it) => &it.syntax,
+ Item::Impl(it) => &it.syntax,
+ Item::MacroCall(it) => &it.syntax,
+ Item::MacroRules(it) => &it.syntax,
+ Item::MacroDef(it) => &it.syntax,
+ Item::Module(it) => &it.syntax,
+ Item::Static(it) => &it.syntax,
+ Item::Struct(it) => &it.syntax,
+ Item::Trait(it) => &it.syntax,
+ Item::TypeAlias(it) => &it.syntax,
+ Item::Union(it) => &it.syntax,
+ Item::Use(it) => &it.syntax,
+ }
+ }
+}
+impl From<ExprStmt> for Stmt {
+ fn from(node: ExprStmt) -> Stmt { Stmt::ExprStmt(node) }
+}
+impl From<Item> for Stmt {
+ fn from(node: Item) -> Stmt { Stmt::Item(node) }
+}
+impl From<LetStmt> for Stmt {
+ fn from(node: LetStmt) -> Stmt { Stmt::LetStmt(node) }
+}
+impl From<IdentPat> for Pat {
+ fn from(node: IdentPat) -> Pat { Pat::IdentPat(node) }
+}
+impl From<BoxPat> for Pat {
+ fn from(node: BoxPat) -> Pat { Pat::BoxPat(node) }
+}
+impl From<RestPat> for Pat {
+ fn from(node: RestPat) -> Pat { Pat::RestPat(node) }
+}
+impl From<LiteralPat> for Pat {
+ fn from(node: LiteralPat) -> Pat { Pat::LiteralPat(node) }
+}
+impl From<MacroPat> for Pat {
+ fn from(node: MacroPat) -> Pat { Pat::MacroPat(node) }
+}
+impl From<OrPat> for Pat {
+ fn from(node: OrPat) -> Pat { Pat::OrPat(node) }
+}
+impl From<ParenPat> for Pat {
+ fn from(node: ParenPat) -> Pat { Pat::ParenPat(node) }
+}
+impl From<PathPat> for Pat {
+ fn from(node: PathPat) -> Pat { Pat::PathPat(node) }
+}
+impl From<WildcardPat> for Pat {
+ fn from(node: WildcardPat) -> Pat { Pat::WildcardPat(node) }
+}
+impl From<RangePat> for Pat {
+ fn from(node: RangePat) -> Pat { Pat::RangePat(node) }
+}
+impl From<RecordPat> for Pat {
+ fn from(node: RecordPat) -> Pat { Pat::RecordPat(node) }
+}
+impl From<RefPat> for Pat {
+ fn from(node: RefPat) -> Pat { Pat::RefPat(node) }
+}
+impl From<SlicePat> for Pat {
+ fn from(node: SlicePat) -> Pat { Pat::SlicePat(node) }
+}
+impl From<TuplePat> for Pat {
+ fn from(node: TuplePat) -> Pat { Pat::TuplePat(node) }
+}
+impl From<TupleStructPat> for Pat {
+ fn from(node: TupleStructPat) -> Pat { Pat::TupleStructPat(node) }
+}
+impl From<ConstBlockPat> for Pat {
+ fn from(node: ConstBlockPat) -> Pat { Pat::ConstBlockPat(node) }
+}
+impl AstNode for Pat {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ IDENT_PAT | BOX_PAT | REST_PAT | LITERAL_PAT | MACRO_PAT | OR_PAT | PAREN_PAT
+ | PATH_PAT | WILDCARD_PAT | RANGE_PAT | RECORD_PAT | REF_PAT | SLICE_PAT
+ | TUPLE_PAT | TUPLE_STRUCT_PAT | CONST_BLOCK_PAT => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ IDENT_PAT => Pat::IdentPat(IdentPat { syntax }),
+ BOX_PAT => Pat::BoxPat(BoxPat { syntax }),
+ REST_PAT => Pat::RestPat(RestPat { syntax }),
+ LITERAL_PAT => Pat::LiteralPat(LiteralPat { syntax }),
+ MACRO_PAT => Pat::MacroPat(MacroPat { syntax }),
+ OR_PAT => Pat::OrPat(OrPat { syntax }),
+ PAREN_PAT => Pat::ParenPat(ParenPat { syntax }),
+ PATH_PAT => Pat::PathPat(PathPat { syntax }),
+ WILDCARD_PAT => Pat::WildcardPat(WildcardPat { syntax }),
+ RANGE_PAT => Pat::RangePat(RangePat { syntax }),
+ RECORD_PAT => Pat::RecordPat(RecordPat { syntax }),
+ REF_PAT => Pat::RefPat(RefPat { syntax }),
+ SLICE_PAT => Pat::SlicePat(SlicePat { syntax }),
+ TUPLE_PAT => Pat::TuplePat(TuplePat { syntax }),
+ TUPLE_STRUCT_PAT => Pat::TupleStructPat(TupleStructPat { syntax }),
+ CONST_BLOCK_PAT => Pat::ConstBlockPat(ConstBlockPat { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ Pat::IdentPat(it) => &it.syntax,
+ Pat::BoxPat(it) => &it.syntax,
+ Pat::RestPat(it) => &it.syntax,
+ Pat::LiteralPat(it) => &it.syntax,
+ Pat::MacroPat(it) => &it.syntax,
+ Pat::OrPat(it) => &it.syntax,
+ Pat::ParenPat(it) => &it.syntax,
+ Pat::PathPat(it) => &it.syntax,
+ Pat::WildcardPat(it) => &it.syntax,
+ Pat::RangePat(it) => &it.syntax,
+ Pat::RecordPat(it) => &it.syntax,
+ Pat::RefPat(it) => &it.syntax,
+ Pat::SlicePat(it) => &it.syntax,
+ Pat::TuplePat(it) => &it.syntax,
+ Pat::TupleStructPat(it) => &it.syntax,
+ Pat::ConstBlockPat(it) => &it.syntax,
+ }
+ }
+}
+impl From<RecordFieldList> for FieldList {
+ fn from(node: RecordFieldList) -> FieldList { FieldList::RecordFieldList(node) }
+}
+impl From<TupleFieldList> for FieldList {
+ fn from(node: TupleFieldList) -> FieldList { FieldList::TupleFieldList(node) }
+}
+impl AstNode for FieldList {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ RECORD_FIELD_LIST | TUPLE_FIELD_LIST => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ RECORD_FIELD_LIST => FieldList::RecordFieldList(RecordFieldList { syntax }),
+ TUPLE_FIELD_LIST => FieldList::TupleFieldList(TupleFieldList { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ FieldList::RecordFieldList(it) => &it.syntax,
+ FieldList::TupleFieldList(it) => &it.syntax,
+ }
+ }
+}
+impl From<Enum> for Adt {
+ fn from(node: Enum) -> Adt { Adt::Enum(node) }
+}
+impl From<Struct> for Adt {
+ fn from(node: Struct) -> Adt { Adt::Struct(node) }
+}
+impl From<Union> for Adt {
+ fn from(node: Union) -> Adt { Adt::Union(node) }
+}
+impl AstNode for Adt {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ ENUM | STRUCT | UNION => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ ENUM => Adt::Enum(Enum { syntax }),
+ STRUCT => Adt::Struct(Struct { syntax }),
+ UNION => Adt::Union(Union { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ Adt::Enum(it) => &it.syntax,
+ Adt::Struct(it) => &it.syntax,
+ Adt::Union(it) => &it.syntax,
+ }
+ }
+}
+impl From<Const> for AssocItem {
+ fn from(node: Const) -> AssocItem { AssocItem::Const(node) }
+}
+impl From<Fn> for AssocItem {
+ fn from(node: Fn) -> AssocItem { AssocItem::Fn(node) }
+}
+impl From<MacroCall> for AssocItem {
+ fn from(node: MacroCall) -> AssocItem { AssocItem::MacroCall(node) }
+}
+impl From<TypeAlias> for AssocItem {
+ fn from(node: TypeAlias) -> AssocItem { AssocItem::TypeAlias(node) }
+}
+impl AstNode for AssocItem {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ CONST | FN | MACRO_CALL | TYPE_ALIAS => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ CONST => AssocItem::Const(Const { syntax }),
+ FN => AssocItem::Fn(Fn { syntax }),
+ MACRO_CALL => AssocItem::MacroCall(MacroCall { syntax }),
+ TYPE_ALIAS => AssocItem::TypeAlias(TypeAlias { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ AssocItem::Const(it) => &it.syntax,
+ AssocItem::Fn(it) => &it.syntax,
+ AssocItem::MacroCall(it) => &it.syntax,
+ AssocItem::TypeAlias(it) => &it.syntax,
+ }
+ }
+}
+impl From<Fn> for ExternItem {
+ fn from(node: Fn) -> ExternItem { ExternItem::Fn(node) }
+}
+impl From<MacroCall> for ExternItem {
+ fn from(node: MacroCall) -> ExternItem { ExternItem::MacroCall(node) }
+}
+impl From<Static> for ExternItem {
+ fn from(node: Static) -> ExternItem { ExternItem::Static(node) }
+}
+impl From<TypeAlias> for ExternItem {
+ fn from(node: TypeAlias) -> ExternItem { ExternItem::TypeAlias(node) }
+}
+impl AstNode for ExternItem {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ FN | MACRO_CALL | STATIC | TYPE_ALIAS => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ FN => ExternItem::Fn(Fn { syntax }),
+ MACRO_CALL => ExternItem::MacroCall(MacroCall { syntax }),
+ STATIC => ExternItem::Static(Static { syntax }),
+ TYPE_ALIAS => ExternItem::TypeAlias(TypeAlias { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ ExternItem::Fn(it) => &it.syntax,
+ ExternItem::MacroCall(it) => &it.syntax,
+ ExternItem::Static(it) => &it.syntax,
+ ExternItem::TypeAlias(it) => &it.syntax,
+ }
+ }
+}
+impl From<ConstParam> for GenericParam {
+ fn from(node: ConstParam) -> GenericParam { GenericParam::ConstParam(node) }
+}
+impl From<LifetimeParam> for GenericParam {
+ fn from(node: LifetimeParam) -> GenericParam { GenericParam::LifetimeParam(node) }
+}
+impl From<TypeParam> for GenericParam {
+ fn from(node: TypeParam) -> GenericParam { GenericParam::TypeParam(node) }
+}
+impl AstNode for GenericParam {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ CONST_PARAM | LIFETIME_PARAM | TYPE_PARAM => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ CONST_PARAM => GenericParam::ConstParam(ConstParam { syntax }),
+ LIFETIME_PARAM => GenericParam::LifetimeParam(LifetimeParam { syntax }),
+ TYPE_PARAM => GenericParam::TypeParam(TypeParam { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ GenericParam::ConstParam(it) => &it.syntax,
+ GenericParam::LifetimeParam(it) => &it.syntax,
+ GenericParam::TypeParam(it) => &it.syntax,
+ }
+ }
+}
+impl AnyHasArgList {
+ #[inline]
+ pub fn new<T: ast::HasArgList>(node: T) -> AnyHasArgList {
+ AnyHasArgList { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasArgList {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ CALL_EXPR | METHOD_CALL_EXPR => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasArgList { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AnyHasAttrs {
+ #[inline]
+ pub fn new<T: ast::HasAttrs>(node: T) -> AnyHasAttrs {
+ AnyHasAttrs { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasAttrs {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ MACRO_CALL
+ | SOURCE_FILE
+ | CONST
+ | ENUM
+ | EXTERN_BLOCK
+ | EXTERN_CRATE
+ | FN
+ | IMPL
+ | MACRO_RULES
+ | MACRO_DEF
+ | MODULE
+ | STATIC
+ | STRUCT
+ | TRAIT
+ | TYPE_ALIAS
+ | UNION
+ | USE
+ | ITEM_LIST
+ | BLOCK_EXPR
+ | SELF_PARAM
+ | PARAM
+ | RECORD_FIELD
+ | TUPLE_FIELD
+ | VARIANT
+ | ASSOC_ITEM_LIST
+ | EXTERN_ITEM_LIST
+ | CONST_PARAM
+ | LIFETIME_PARAM
+ | TYPE_PARAM
+ | LET_STMT
+ | ARRAY_EXPR
+ | AWAIT_EXPR
+ | BIN_EXPR
+ | BOX_EXPR
+ | BREAK_EXPR
+ | CALL_EXPR
+ | CAST_EXPR
+ | CLOSURE_EXPR
+ | CONTINUE_EXPR
+ | FIELD_EXPR
+ | FOR_EXPR
+ | IF_EXPR
+ | INDEX_EXPR
+ | LITERAL
+ | LOOP_EXPR
+ | MATCH_EXPR
+ | METHOD_CALL_EXPR
+ | PAREN_EXPR
+ | PATH_EXPR
+ | PREFIX_EXPR
+ | RANGE_EXPR
+ | REF_EXPR
+ | RETURN_EXPR
+ | TRY_EXPR
+ | TUPLE_EXPR
+ | WHILE_EXPR
+ | YIELD_EXPR
+ | LET_EXPR
+ | UNDERSCORE_EXPR
+ | STMT_LIST
+ | RECORD_EXPR_FIELD_LIST
+ | RECORD_EXPR_FIELD
+ | MATCH_ARM_LIST
+ | MATCH_ARM
+ | IDENT_PAT
+ | REST_PAT
+ | RECORD_PAT_FIELD => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasAttrs { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AnyHasDocComments {
+ #[inline]
+ pub fn new<T: ast::HasDocComments>(node: T) -> AnyHasDocComments {
+ AnyHasDocComments { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasDocComments {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ MACRO_CALL | SOURCE_FILE | CONST | ENUM | EXTERN_BLOCK | EXTERN_CRATE | FN | IMPL
+ | MACRO_RULES | MACRO_DEF | MODULE | STATIC | STRUCT | TRAIT | TYPE_ALIAS | UNION
+ | USE | RECORD_FIELD | TUPLE_FIELD | VARIANT => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasDocComments { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AnyHasGenericParams {
+ #[inline]
+ pub fn new<T: ast::HasGenericParams>(node: T) -> AnyHasGenericParams {
+ AnyHasGenericParams { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasGenericParams {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ ENUM | FN | IMPL | STRUCT | TRAIT | TYPE_ALIAS | UNION => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasGenericParams { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AnyHasLoopBody {
+ #[inline]
+ pub fn new<T: ast::HasLoopBody>(node: T) -> AnyHasLoopBody {
+ AnyHasLoopBody { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasLoopBody {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ FOR_EXPR | LOOP_EXPR | WHILE_EXPR => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasLoopBody { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AnyHasModuleItem {
+ #[inline]
+ pub fn new<T: ast::HasModuleItem>(node: T) -> AnyHasModuleItem {
+ AnyHasModuleItem { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasModuleItem {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ MACRO_ITEMS | SOURCE_FILE | ITEM_LIST => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasModuleItem { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AnyHasName {
+ #[inline]
+ pub fn new<T: ast::HasName>(node: T) -> AnyHasName {
+ AnyHasName { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasName {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ CONST | ENUM | FN | MACRO_RULES | MACRO_DEF | MODULE | STATIC | STRUCT | TRAIT
+ | TYPE_ALIAS | UNION | RENAME | SELF_PARAM | RECORD_FIELD | VARIANT | CONST_PARAM
+ | TYPE_PARAM | IDENT_PAT => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasName { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AnyHasTypeBounds {
+ #[inline]
+ pub fn new<T: ast::HasTypeBounds>(node: T) -> AnyHasTypeBounds {
+ AnyHasTypeBounds { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasTypeBounds {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ ASSOC_TYPE_ARG | TRAIT | TYPE_ALIAS | LIFETIME_PARAM | TYPE_PARAM | WHERE_PRED => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasTypeBounds { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AnyHasVisibility {
+ #[inline]
+ pub fn new<T: ast::HasVisibility>(node: T) -> AnyHasVisibility {
+ AnyHasVisibility { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasVisibility {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ CONST | ENUM | EXTERN_CRATE | FN | IMPL | MACRO_RULES | MACRO_DEF | MODULE | STATIC
+ | STRUCT | TRAIT | TYPE_ALIAS | UNION | USE | RECORD_FIELD | TUPLE_FIELD | VARIANT => {
+ true
+ }
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasVisibility { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl std::fmt::Display for GenericArg {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Type {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Expr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Item {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Stmt {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Pat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for FieldList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Adt {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for AssocItem {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ExternItem {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for GenericParam {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Name {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for NameRef {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Lifetime {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Path {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for PathSegment {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for GenericArgList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ParamList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RetType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for PathType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TypeArg {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for AssocTypeArg {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for LifetimeArg {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ConstArg {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for GenericParamList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TypeBoundList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MacroCall {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Attr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TokenTree {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MacroItems {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MacroStmts {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for SourceFile {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Const {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Enum {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ExternBlock {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ExternCrate {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Fn {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Impl {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MacroRules {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MacroDef {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Module {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Static {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Struct {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Trait {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TypeAlias {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Union {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Use {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Visibility {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ItemList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Rename {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for UseTree {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for UseTreeList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Abi {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for WhereClause {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for BlockExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for SelfParam {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Param {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RecordFieldList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TupleFieldList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RecordField {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TupleField {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for VariantList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Variant {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for AssocItemList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ExternItemList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ConstParam {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for LifetimeParam {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TypeParam {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for WherePred {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Meta {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ExprStmt {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for LetStmt {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for LetElse {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ArrayExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for AwaitExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for BinExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for BoxExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for BreakExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for CallExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for CastExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ClosureExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ContinueExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for FieldExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ForExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for IfExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for IndexExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Literal {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for LoopExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MacroExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MatchExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MethodCallExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ParenExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for PathExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for PrefixExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RangeExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RecordExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RefExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ReturnExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TryExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TupleExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for WhileExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for YieldExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for LetExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for UnderscoreExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for StmtList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Label {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RecordExprFieldList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RecordExprField {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ArgList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MatchArmList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MatchArm {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MatchGuard {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ArrayType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for DynTraitType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for FnPtrType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ForType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ImplTraitType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for InferType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MacroType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for NeverType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ParenType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for PtrType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RefType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for SliceType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TupleType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TypeBound {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for IdentPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for BoxPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RestPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for LiteralPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MacroPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for OrPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ParenPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for PathPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for WildcardPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RangePat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RecordPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RefPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for SlicePat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TuplePat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TupleStructPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ConstBlockPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RecordPatFieldList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RecordPatField {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/tokens.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/tokens.rs
new file mode 100644
index 000000000..a3209c5ab
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/tokens.rs
@@ -0,0 +1,196 @@
+//! Generated by `sourcegen_ast`, do not edit by hand.
+
+use crate::{
+ ast::AstToken,
+ SyntaxKind::{self, *},
+ SyntaxToken,
+};
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Whitespace {
+ pub(crate) syntax: SyntaxToken,
+}
+impl std::fmt::Display for Whitespace {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(&self.syntax, f)
+ }
+}
+impl AstToken for Whitespace {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == WHITESPACE }
+ fn cast(syntax: SyntaxToken) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxToken { &self.syntax }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Comment {
+ pub(crate) syntax: SyntaxToken,
+}
+impl std::fmt::Display for Comment {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(&self.syntax, f)
+ }
+}
+impl AstToken for Comment {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == COMMENT }
+ fn cast(syntax: SyntaxToken) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxToken { &self.syntax }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct String {
+ pub(crate) syntax: SyntaxToken,
+}
+impl std::fmt::Display for String {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(&self.syntax, f)
+ }
+}
+impl AstToken for String {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == STRING }
+ fn cast(syntax: SyntaxToken) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxToken { &self.syntax }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ByteString {
+ pub(crate) syntax: SyntaxToken,
+}
+impl std::fmt::Display for ByteString {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(&self.syntax, f)
+ }
+}
+impl AstToken for ByteString {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == BYTE_STRING }
+ fn cast(syntax: SyntaxToken) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxToken { &self.syntax }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct IntNumber {
+ pub(crate) syntax: SyntaxToken,
+}
+impl std::fmt::Display for IntNumber {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(&self.syntax, f)
+ }
+}
+impl AstToken for IntNumber {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == INT_NUMBER }
+ fn cast(syntax: SyntaxToken) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxToken { &self.syntax }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct FloatNumber {
+ pub(crate) syntax: SyntaxToken,
+}
+impl std::fmt::Display for FloatNumber {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(&self.syntax, f)
+ }
+}
+impl AstToken for FloatNumber {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == FLOAT_NUMBER }
+ fn cast(syntax: SyntaxToken) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxToken { &self.syntax }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Char {
+ pub(crate) syntax: SyntaxToken,
+}
+impl std::fmt::Display for Char {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(&self.syntax, f)
+ }
+}
+impl AstToken for Char {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == CHAR }
+ fn cast(syntax: SyntaxToken) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxToken { &self.syntax }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Byte {
+ pub(crate) syntax: SyntaxToken,
+}
+impl std::fmt::Display for Byte {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(&self.syntax, f)
+ }
+}
+impl AstToken for Byte {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == BYTE }
+ fn cast(syntax: SyntaxToken) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxToken { &self.syntax }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Ident {
+ pub(crate) syntax: SyntaxToken,
+}
+impl std::fmt::Display for Ident {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(&self.syntax, f)
+ }
+}
+impl AstToken for Ident {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == IDENT }
+ fn cast(syntax: SyntaxToken) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxToken { &self.syntax }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs
new file mode 100644
index 000000000..5908dda8e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs
@@ -0,0 +1,901 @@
+//! This module contains free-standing functions for creating AST fragments out
+//! of smaller pieces.
+//!
+//! Note that all functions here intended to be stupid constructors, which just
+//! assemble a finish node from immediate children. If you want to do something
+//! smarter than that, it belongs to the `ext` submodule.
+//!
+//! Keep in mind that `from_text` functions should be kept private. The public
+//! API should require to assemble every node piecewise. The trick of
+//! `parse(format!())` we use internally is an implementation detail -- long
+//! term, it will be replaced with direct tree manipulation.
+use itertools::Itertools;
+use stdx::{format_to, never};
+
+use crate::{ast, AstNode, SourceFile, SyntaxKind, SyntaxToken};
+
+/// While the parent module defines basic atomic "constructors", the `ext`
+/// module defines shortcuts for common things.
+///
+/// It's named `ext` rather than `shortcuts` just to keep it short.
+pub mod ext {
+ use super::*;
+
+ pub fn simple_ident_pat(name: ast::Name) -> ast::IdentPat {
+ return from_text(&name.text());
+
+ fn from_text(text: &str) -> ast::IdentPat {
+ ast_from_text(&format!("fn f({}: ())", text))
+ }
+ }
+ pub fn ident_path(ident: &str) -> ast::Path {
+ path_unqualified(path_segment(name_ref(ident)))
+ }
+
+ pub fn path_from_idents<'a>(
+ parts: impl std::iter::IntoIterator<Item = &'a str>,
+ ) -> Option<ast::Path> {
+ let mut iter = parts.into_iter();
+ let base = ext::ident_path(iter.next()?);
+ let path = iter.fold(base, |base, s| {
+ let path = ext::ident_path(s);
+ path_concat(base, path)
+ });
+ Some(path)
+ }
+
+ pub fn field_from_idents<'a>(
+ parts: impl std::iter::IntoIterator<Item = &'a str>,
+ ) -> Option<ast::Expr> {
+ let mut iter = parts.into_iter();
+ let base = expr_path(ext::ident_path(iter.next()?));
+ let expr = iter.fold(base, expr_field);
+ Some(expr)
+ }
+
+ pub fn expr_unreachable() -> ast::Expr {
+ expr_from_text("unreachable!()")
+ }
+ pub fn expr_todo() -> ast::Expr {
+ expr_from_text("todo!()")
+ }
+ pub fn expr_ty_default(ty: &ast::Type) -> ast::Expr {
+ expr_from_text(&format!("{}::default()", ty))
+ }
+ pub fn expr_ty_new(ty: &ast::Type) -> ast::Expr {
+ expr_from_text(&format!("{}::new()", ty))
+ }
+
+ pub fn zero_number() -> ast::Expr {
+ expr_from_text("0")
+ }
+ pub fn zero_float() -> ast::Expr {
+ expr_from_text("0.0")
+ }
+ pub fn empty_str() -> ast::Expr {
+ expr_from_text(r#""""#)
+ }
+ pub fn empty_char() -> ast::Expr {
+ expr_from_text("'\x00'")
+ }
+ pub fn default_bool() -> ast::Expr {
+ expr_from_text("false")
+ }
+ pub fn option_none() -> ast::Expr {
+ expr_from_text("None")
+ }
+ pub fn empty_block_expr() -> ast::BlockExpr {
+ block_expr(None, None)
+ }
+
+ pub fn ty_bool() -> ast::Type {
+ ty_path(ident_path("bool"))
+ }
+ pub fn ty_option(t: ast::Type) -> ast::Type {
+ ty_from_text(&format!("Option<{}>", t))
+ }
+ pub fn ty_result(t: ast::Type, e: ast::Type) -> ast::Type {
+ ty_from_text(&format!("Result<{}, {}>", t, e))
+ }
+}
+
+pub fn name(text: &str) -> ast::Name {
+ ast_from_text(&format!("mod {}{};", raw_ident_esc(text), text))
+}
+pub fn name_ref(text: &str) -> ast::NameRef {
+ ast_from_text(&format!("fn f() {{ {}{}; }}", raw_ident_esc(text), text))
+}
+fn raw_ident_esc(ident: &str) -> &'static str {
+ let is_keyword = parser::SyntaxKind::from_keyword(ident).is_some();
+ if is_keyword && !matches!(ident, "self" | "crate" | "super" | "Self") {
+ "r#"
+ } else {
+ ""
+ }
+}
+
+pub fn lifetime(text: &str) -> ast::Lifetime {
+ let mut text = text;
+ let tmp;
+ if never!(!text.starts_with('\'')) {
+ tmp = format!("'{}", text);
+ text = &tmp;
+ }
+ ast_from_text(&format!("fn f<{}>() {{ }}", text))
+}
+
+// FIXME: replace stringly-typed constructor with a family of typed ctors, a-la
+// `expr_xxx`.
+pub fn ty(text: &str) -> ast::Type {
+ ty_from_text(text)
+}
+pub fn ty_placeholder() -> ast::Type {
+ ty_from_text("_")
+}
+pub fn ty_unit() -> ast::Type {
+ ty_from_text("()")
+}
+pub fn ty_tuple(types: impl IntoIterator<Item = ast::Type>) -> ast::Type {
+ let mut count: usize = 0;
+ let mut contents = types.into_iter().inspect(|_| count += 1).join(", ");
+ if count == 1 {
+ contents.push(',');
+ }
+
+ ty_from_text(&format!("({})", contents))
+}
+pub fn ty_ref(target: ast::Type, exclusive: bool) -> ast::Type {
+ ty_from_text(&if exclusive { format!("&mut {}", target) } else { format!("&{}", target) })
+}
+pub fn ty_path(path: ast::Path) -> ast::Type {
+ ty_from_text(&path.to_string())
+}
+fn ty_from_text(text: &str) -> ast::Type {
+ ast_from_text(&format!("type _T = {};", text))
+}
+
+pub fn assoc_item_list() -> ast::AssocItemList {
+ ast_from_text("impl C for D {}")
+}
+
+pub fn impl_(
+ ty: ast::Path,
+ params: Option<ast::GenericParamList>,
+ ty_params: Option<ast::GenericParamList>,
+) -> ast::Impl {
+ let params = match params {
+ Some(params) => params.to_string(),
+ None => String::new(),
+ };
+ let ty_params = match ty_params {
+ Some(params) => params.to_string(),
+ None => String::new(),
+ };
+ ast_from_text(&format!("impl{} {}{} {{}}", params, ty, ty_params))
+}
+
+pub fn impl_trait(
+ trait_: ast::Path,
+ ty: ast::Path,
+ ty_params: Option<ast::GenericParamList>,
+) -> ast::Impl {
+ let ty_params = ty_params.map_or_else(String::new, |params| params.to_string());
+ ast_from_text(&format!("impl{2} {} for {}{2} {{}}", trait_, ty, ty_params))
+}
+
+pub(crate) fn generic_arg_list() -> ast::GenericArgList {
+ ast_from_text("const S: T<> = ();")
+}
+
+pub fn path_segment(name_ref: ast::NameRef) -> ast::PathSegment {
+ ast_from_text(&format!("type __ = {};", name_ref))
+}
+
+pub fn path_segment_ty(type_ref: ast::Type, trait_ref: Option<ast::PathType>) -> ast::PathSegment {
+ let text = match trait_ref {
+ Some(trait_ref) => format!("fn f(x: <{} as {}>) {{}}", type_ref, trait_ref),
+ None => format!("fn f(x: <{}>) {{}}", type_ref),
+ };
+ ast_from_text(&text)
+}
+
+pub fn path_segment_self() -> ast::PathSegment {
+ ast_from_text("use self;")
+}
+
+pub fn path_segment_super() -> ast::PathSegment {
+ ast_from_text("use super;")
+}
+
+pub fn path_segment_crate() -> ast::PathSegment {
+ ast_from_text("use crate;")
+}
+
+pub fn path_unqualified(segment: ast::PathSegment) -> ast::Path {
+ ast_from_text(&format!("type __ = {};", segment))
+}
+
+pub fn path_qualified(qual: ast::Path, segment: ast::PathSegment) -> ast::Path {
+ ast_from_text(&format!("{}::{}", qual, segment))
+}
+// FIXME: path concatenation operation doesn't make sense as AST op.
+pub fn path_concat(first: ast::Path, second: ast::Path) -> ast::Path {
+ ast_from_text(&format!("type __ = {}::{};", first, second))
+}
+
+pub fn path_from_segments(
+ segments: impl IntoIterator<Item = ast::PathSegment>,
+ is_abs: bool,
+) -> ast::Path {
+ let segments = segments.into_iter().map(|it| it.syntax().clone()).join("::");
+ ast_from_text(&if is_abs {
+ format!("fn f(x: ::{}) {{}}", segments)
+ } else {
+ format!("fn f(x: {}) {{}}", segments)
+ })
+}
+
+pub fn join_paths(paths: impl IntoIterator<Item = ast::Path>) -> ast::Path {
+ let paths = paths.into_iter().map(|it| it.syntax().clone()).join("::");
+ ast_from_text(&format!("type __ = {};", paths))
+}
+
+// FIXME: should not be pub
+pub fn path_from_text(text: &str) -> ast::Path {
+ ast_from_text(&format!("fn main() {{ let test = {}; }}", text))
+}
+
+pub fn use_tree_glob() -> ast::UseTree {
+ ast_from_text("use *;")
+}
+pub fn use_tree(
+ path: ast::Path,
+ use_tree_list: Option<ast::UseTreeList>,
+ alias: Option<ast::Rename>,
+ add_star: bool,
+) -> ast::UseTree {
+ let mut buf = "use ".to_string();
+ buf += &path.syntax().to_string();
+ if let Some(use_tree_list) = use_tree_list {
+ format_to!(buf, "::{}", use_tree_list);
+ }
+ if add_star {
+ buf += "::*";
+ }
+
+ if let Some(alias) = alias {
+ format_to!(buf, " {}", alias);
+ }
+ ast_from_text(&buf)
+}
+
+pub fn use_tree_list(use_trees: impl IntoIterator<Item = ast::UseTree>) -> ast::UseTreeList {
+ let use_trees = use_trees.into_iter().map(|it| it.syntax().clone()).join(", ");
+ ast_from_text(&format!("use {{{}}};", use_trees))
+}
+
+pub fn use_(visibility: Option<ast::Visibility>, use_tree: ast::UseTree) -> ast::Use {
+ let visibility = match visibility {
+ None => String::new(),
+ Some(it) => format!("{} ", it),
+ };
+ ast_from_text(&format!("{}use {};", visibility, use_tree))
+}
+
+pub fn record_expr(path: ast::Path, fields: ast::RecordExprFieldList) -> ast::RecordExpr {
+ ast_from_text(&format!("fn f() {{ {} {} }}", path, fields))
+}
+
+pub fn record_expr_field_list(
+ fields: impl IntoIterator<Item = ast::RecordExprField>,
+) -> ast::RecordExprFieldList {
+ let fields = fields.into_iter().join(", ");
+ ast_from_text(&format!("fn f() {{ S {{ {} }} }}", fields))
+}
+
+pub fn record_expr_field(name: ast::NameRef, expr: Option<ast::Expr>) -> ast::RecordExprField {
+ return match expr {
+ Some(expr) => from_text(&format!("{}: {}", name, expr)),
+ None => from_text(&name.to_string()),
+ };
+
+ fn from_text(text: &str) -> ast::RecordExprField {
+ ast_from_text(&format!("fn f() {{ S {{ {}, }} }}", text))
+ }
+}
+
+pub fn record_field(
+ visibility: Option<ast::Visibility>,
+ name: ast::Name,
+ ty: ast::Type,
+) -> ast::RecordField {
+ let visibility = match visibility {
+ None => String::new(),
+ Some(it) => format!("{} ", it),
+ };
+ ast_from_text(&format!("struct S {{ {}{}: {}, }}", visibility, name, ty))
+}
+
+// TODO
+pub fn block_expr(
+ stmts: impl IntoIterator<Item = ast::Stmt>,
+ tail_expr: Option<ast::Expr>,
+) -> ast::BlockExpr {
+ let mut buf = "{\n".to_string();
+ for stmt in stmts.into_iter() {
+ format_to!(buf, " {}\n", stmt);
+ }
+ if let Some(tail_expr) = tail_expr {
+ format_to!(buf, " {}\n", tail_expr);
+ }
+ buf += "}";
+ ast_from_text(&format!("fn f() {}", buf))
+}
+
+/// Ideally this function wouldn't exist since it involves manual indenting.
+/// It differs from `make::block_expr` by also supporting comments.
+///
+/// FIXME: replace usages of this with the mutable syntax tree API
+pub fn hacky_block_expr_with_comments(
+ elements: impl IntoIterator<Item = crate::SyntaxElement>,
+ tail_expr: Option<ast::Expr>,
+) -> ast::BlockExpr {
+ let mut buf = "{\n".to_string();
+ for node_or_token in elements.into_iter() {
+ match node_or_token {
+ rowan::NodeOrToken::Node(n) => format_to!(buf, " {}\n", n),
+ rowan::NodeOrToken::Token(t) if t.kind() == SyntaxKind::COMMENT => {
+ format_to!(buf, " {}\n", t)
+ }
+ _ => (),
+ }
+ }
+ if let Some(tail_expr) = tail_expr {
+ format_to!(buf, " {}\n", tail_expr);
+ }
+ buf += "}";
+ ast_from_text(&format!("fn f() {}", buf))
+}
+
+pub fn expr_unit() -> ast::Expr {
+ expr_from_text("()")
+}
+pub fn expr_literal(text: &str) -> ast::Literal {
+ assert_eq!(text.trim(), text);
+ ast_from_text(&format!("fn f() {{ let _ = {}; }}", text))
+}
+
+pub fn expr_empty_block() -> ast::Expr {
+ expr_from_text("{}")
+}
+pub fn expr_path(path: ast::Path) -> ast::Expr {
+ expr_from_text(&path.to_string())
+}
+pub fn expr_continue(label: Option<ast::Lifetime>) -> ast::Expr {
+ match label {
+ Some(label) => expr_from_text(&format!("continue {}", label)),
+ None => expr_from_text("continue"),
+ }
+}
+// Consider `op: SyntaxKind` instead for nicer syntax at the call-site?
+pub fn expr_bin_op(lhs: ast::Expr, op: ast::BinaryOp, rhs: ast::Expr) -> ast::Expr {
+ expr_from_text(&format!("{} {} {}", lhs, op, rhs))
+}
+pub fn expr_break(label: Option<ast::Lifetime>, expr: Option<ast::Expr>) -> ast::Expr {
+ let mut s = String::from("break");
+
+ if let Some(label) = label {
+ format_to!(s, " {}", label);
+ }
+
+ if let Some(expr) = expr {
+ format_to!(s, " {}", expr);
+ }
+
+ expr_from_text(&s)
+}
+pub fn expr_return(expr: Option<ast::Expr>) -> ast::Expr {
+ match expr {
+ Some(expr) => expr_from_text(&format!("return {}", expr)),
+ None => expr_from_text("return"),
+ }
+}
+pub fn expr_try(expr: ast::Expr) -> ast::Expr {
+ expr_from_text(&format!("{}?", expr))
+}
+pub fn expr_await(expr: ast::Expr) -> ast::Expr {
+ expr_from_text(&format!("{}.await", expr))
+}
+pub fn expr_match(expr: ast::Expr, match_arm_list: ast::MatchArmList) -> ast::Expr {
+ expr_from_text(&format!("match {} {}", expr, match_arm_list))
+}
+pub fn expr_if(
+ condition: ast::Expr,
+ then_branch: ast::BlockExpr,
+ else_branch: Option<ast::ElseBranch>,
+) -> ast::Expr {
+ let else_branch = match else_branch {
+ Some(ast::ElseBranch::Block(block)) => format!("else {}", block),
+ Some(ast::ElseBranch::IfExpr(if_expr)) => format!("else {}", if_expr),
+ None => String::new(),
+ };
+ expr_from_text(&format!("if {} {} {}", condition, then_branch, else_branch))
+}
+pub fn expr_for_loop(pat: ast::Pat, expr: ast::Expr, block: ast::BlockExpr) -> ast::Expr {
+ expr_from_text(&format!("for {} in {} {}", pat, expr, block))
+}
+
+pub fn expr_loop(block: ast::BlockExpr) -> ast::Expr {
+ expr_from_text(&format!("loop {}", block))
+}
+
+pub fn expr_prefix(op: SyntaxKind, expr: ast::Expr) -> ast::Expr {
+ let token = token(op);
+ expr_from_text(&format!("{}{}", token, expr))
+}
+pub fn expr_call(f: ast::Expr, arg_list: ast::ArgList) -> ast::Expr {
+ expr_from_text(&format!("{}{}", f, arg_list))
+}
+pub fn expr_method_call(
+ receiver: ast::Expr,
+ method: ast::NameRef,
+ arg_list: ast::ArgList,
+) -> ast::Expr {
+ expr_from_text(&format!("{}.{}{}", receiver, method, arg_list))
+}
+pub fn expr_macro_call(f: ast::Expr, arg_list: ast::ArgList) -> ast::Expr {
+ expr_from_text(&format!("{}!{}", f, arg_list))
+}
+pub fn expr_ref(expr: ast::Expr, exclusive: bool) -> ast::Expr {
+ expr_from_text(&if exclusive { format!("&mut {}", expr) } else { format!("&{}", expr) })
+}
+pub fn expr_closure(pats: impl IntoIterator<Item = ast::Param>, expr: ast::Expr) -> ast::Expr {
+ let params = pats.into_iter().join(", ");
+ expr_from_text(&format!("|{}| {}", params, expr))
+}
+pub fn expr_field(receiver: ast::Expr, field: &str) -> ast::Expr {
+ expr_from_text(&format!("{}.{}", receiver, field))
+}
+pub fn expr_paren(expr: ast::Expr) -> ast::Expr {
+ expr_from_text(&format!("({})", expr))
+}
+pub fn expr_tuple(elements: impl IntoIterator<Item = ast::Expr>) -> ast::Expr {
+ let expr = elements.into_iter().format(", ");
+ expr_from_text(&format!("({})", expr))
+}
+pub fn expr_assignment(lhs: ast::Expr, rhs: ast::Expr) -> ast::Expr {
+ expr_from_text(&format!("{} = {}", lhs, rhs))
+}
+fn expr_from_text(text: &str) -> ast::Expr {
+ ast_from_text(&format!("const C: () = {};", text))
+}
+pub fn expr_let(pattern: ast::Pat, expr: ast::Expr) -> ast::LetExpr {
+ ast_from_text(&format!("const _: () = while let {} = {} {{}};", pattern, expr))
+}
+
+pub fn arg_list(args: impl IntoIterator<Item = ast::Expr>) -> ast::ArgList {
+ ast_from_text(&format!("fn main() {{ ()({}) }}", args.into_iter().format(", ")))
+}
+
+pub fn ident_pat(ref_: bool, mut_: bool, name: ast::Name) -> ast::IdentPat {
+ let mut s = String::from("fn f(");
+ if ref_ {
+ s.push_str("ref ");
+ }
+ if mut_ {
+ s.push_str("mut ");
+ }
+ format_to!(s, "{}", name);
+ s.push_str(": ())");
+ ast_from_text(&s)
+}
+
+pub fn wildcard_pat() -> ast::WildcardPat {
+ return from_text("_");
+
+ fn from_text(text: &str) -> ast::WildcardPat {
+ ast_from_text(&format!("fn f({}: ())", text))
+ }
+}
+
+pub fn literal_pat(lit: &str) -> ast::LiteralPat {
+ return from_text(lit);
+
+ fn from_text(text: &str) -> ast::LiteralPat {
+ ast_from_text(&format!("fn f() {{ match x {{ {} => {{}} }} }}", text))
+ }
+}
+
+/// Creates a tuple of patterns from an iterator of patterns.
+///
+/// Invariant: `pats` must be length > 0
+pub fn tuple_pat(pats: impl IntoIterator<Item = ast::Pat>) -> ast::TuplePat {
+ let mut count: usize = 0;
+ let mut pats_str = pats.into_iter().inspect(|_| count += 1).join(", ");
+ if count == 1 {
+ pats_str.push(',');
+ }
+ return from_text(&format!("({})", pats_str));
+
+ fn from_text(text: &str) -> ast::TuplePat {
+ ast_from_text(&format!("fn f({}: ())", text))
+ }
+}
+
+pub fn tuple_struct_pat(
+ path: ast::Path,
+ pats: impl IntoIterator<Item = ast::Pat>,
+) -> ast::TupleStructPat {
+ let pats_str = pats.into_iter().join(", ");
+ return from_text(&format!("{}({})", path, pats_str));
+
+ fn from_text(text: &str) -> ast::TupleStructPat {
+ ast_from_text(&format!("fn f({}: ())", text))
+ }
+}
+
+pub fn record_pat(path: ast::Path, pats: impl IntoIterator<Item = ast::Pat>) -> ast::RecordPat {
+ let pats_str = pats.into_iter().join(", ");
+ return from_text(&format!("{} {{ {} }}", path, pats_str));
+
+ fn from_text(text: &str) -> ast::RecordPat {
+ ast_from_text(&format!("fn f({}: ())", text))
+ }
+}
+
+pub fn record_pat_with_fields(path: ast::Path, fields: ast::RecordPatFieldList) -> ast::RecordPat {
+ ast_from_text(&format!("fn f({} {}: ()))", path, fields))
+}
+
+pub fn record_pat_field_list(
+ fields: impl IntoIterator<Item = ast::RecordPatField>,
+) -> ast::RecordPatFieldList {
+ let fields = fields.into_iter().join(", ");
+ ast_from_text(&format!("fn f(S {{ {} }}: ()))", fields))
+}
+
+pub fn record_pat_field(name_ref: ast::NameRef, pat: ast::Pat) -> ast::RecordPatField {
+ ast_from_text(&format!("fn f(S {{ {}: {} }}: ()))", name_ref, pat))
+}
+
+pub fn record_pat_field_shorthand(name_ref: ast::NameRef) -> ast::RecordPatField {
+ ast_from_text(&format!("fn f(S {{ {} }}: ()))", name_ref))
+}
+
+/// Returns a `BindPat` if the path has just one segment, a `PathPat` otherwise.
+pub fn path_pat(path: ast::Path) -> ast::Pat {
+ return from_text(&path.to_string());
+ fn from_text(text: &str) -> ast::Pat {
+ ast_from_text(&format!("fn f({}: ())", text))
+ }
+}
+
+pub fn match_arm(
+ pats: impl IntoIterator<Item = ast::Pat>,
+ guard: Option<ast::Expr>,
+ expr: ast::Expr,
+) -> ast::MatchArm {
+ let pats_str = pats.into_iter().join(" | ");
+ return match guard {
+ Some(guard) => from_text(&format!("{} if {} => {}", pats_str, guard, expr)),
+ None => from_text(&format!("{} => {}", pats_str, expr)),
+ };
+
+ fn from_text(text: &str) -> ast::MatchArm {
+ ast_from_text(&format!("fn f() {{ match () {{{}}} }}", text))
+ }
+}
+
+pub fn match_arm_with_guard(
+ pats: impl IntoIterator<Item = ast::Pat>,
+ guard: ast::Expr,
+ expr: ast::Expr,
+) -> ast::MatchArm {
+ let pats_str = pats.into_iter().join(" | ");
+ return from_text(&format!("{} if {} => {}", pats_str, guard, expr));
+
+ fn from_text(text: &str) -> ast::MatchArm {
+ ast_from_text(&format!("fn f() {{ match () {{{}}} }}", text))
+ }
+}
+
+pub fn match_arm_list(arms: impl IntoIterator<Item = ast::MatchArm>) -> ast::MatchArmList {
+ let arms_str = arms
+ .into_iter()
+ .map(|arm| {
+ let needs_comma = arm.expr().map_or(true, |it| !it.is_block_like());
+ let comma = if needs_comma { "," } else { "" };
+ format!(" {}{}\n", arm.syntax(), comma)
+ })
+ .collect::<String>();
+ return from_text(&arms_str);
+
+ fn from_text(text: &str) -> ast::MatchArmList {
+ ast_from_text(&format!("fn f() {{ match () {{\n{}}} }}", text))
+ }
+}
+
+pub fn where_pred(
+ path: ast::Path,
+ bounds: impl IntoIterator<Item = ast::TypeBound>,
+) -> ast::WherePred {
+ let bounds = bounds.into_iter().join(" + ");
+ return from_text(&format!("{}: {}", path, bounds));
+
+ fn from_text(text: &str) -> ast::WherePred {
+ ast_from_text(&format!("fn f() where {} {{ }}", text))
+ }
+}
+
+pub fn where_clause(preds: impl IntoIterator<Item = ast::WherePred>) -> ast::WhereClause {
+ let preds = preds.into_iter().join(", ");
+ return from_text(preds.as_str());
+
+ fn from_text(text: &str) -> ast::WhereClause {
+ ast_from_text(&format!("fn f() where {} {{ }}", text))
+ }
+}
+
+pub fn let_stmt(
+ pattern: ast::Pat,
+ ty: Option<ast::Type>,
+ initializer: Option<ast::Expr>,
+) -> ast::LetStmt {
+ let mut text = String::new();
+ format_to!(text, "let {}", pattern);
+ if let Some(ty) = ty {
+ format_to!(text, ": {}", ty);
+ }
+ match initializer {
+ Some(it) => format_to!(text, " = {};", it),
+ None => format_to!(text, ";"),
+ };
+ ast_from_text(&format!("fn f() {{ {} }}", text))
+}
+pub fn expr_stmt(expr: ast::Expr) -> ast::ExprStmt {
+ let semi = if expr.is_block_like() { "" } else { ";" };
+ ast_from_text(&format!("fn f() {{ {}{} (); }}", expr, semi))
+}
+
+pub fn item_const(
+ visibility: Option<ast::Visibility>,
+ name: ast::Name,
+ ty: ast::Type,
+ expr: ast::Expr,
+) -> ast::Const {
+ let visibility = match visibility {
+ None => String::new(),
+ Some(it) => format!("{} ", it),
+ };
+ ast_from_text(&format!("{} const {}: {} = {};", visibility, name, ty, expr))
+}
+
+pub fn param(pat: ast::Pat, ty: ast::Type) -> ast::Param {
+ ast_from_text(&format!("fn f({}: {}) {{ }}", pat, ty))
+}
+
+pub fn self_param() -> ast::SelfParam {
+ ast_from_text("fn f(&self) { }")
+}
+
+pub fn ret_type(ty: ast::Type) -> ast::RetType {
+ ast_from_text(&format!("fn f() -> {} {{ }}", ty))
+}
+
+pub fn param_list(
+ self_param: Option<ast::SelfParam>,
+ pats: impl IntoIterator<Item = ast::Param>,
+) -> ast::ParamList {
+ let args = pats.into_iter().join(", ");
+ let list = match self_param {
+ Some(self_param) if args.is_empty() => format!("fn f({}) {{ }}", self_param),
+ Some(self_param) => format!("fn f({}, {}) {{ }}", self_param, args),
+ None => format!("fn f({}) {{ }}", args),
+ };
+ ast_from_text(&list)
+}
+
+pub fn type_param(name: ast::Name, ty: Option<ast::TypeBoundList>) -> ast::TypeParam {
+ let bound = match ty {
+ Some(it) => format!(": {}", it),
+ None => String::new(),
+ };
+ ast_from_text(&format!("fn f<{}{}>() {{ }}", name, bound))
+}
+
+pub fn lifetime_param(lifetime: ast::Lifetime) -> ast::LifetimeParam {
+ ast_from_text(&format!("fn f<{}>() {{ }}", lifetime))
+}
+
+pub fn generic_param_list(
+ pats: impl IntoIterator<Item = ast::GenericParam>,
+) -> ast::GenericParamList {
+ let args = pats.into_iter().join(", ");
+ ast_from_text(&format!("fn f<{}>() {{ }}", args))
+}
+
+pub fn visibility_pub_crate() -> ast::Visibility {
+ ast_from_text("pub(crate) struct S")
+}
+
+pub fn visibility_pub() -> ast::Visibility {
+ ast_from_text("pub struct S")
+}
+
+pub fn tuple_field_list(fields: impl IntoIterator<Item = ast::TupleField>) -> ast::TupleFieldList {
+ let fields = fields.into_iter().join(", ");
+ ast_from_text(&format!("struct f({});", fields))
+}
+
+pub fn record_field_list(
+ fields: impl IntoIterator<Item = ast::RecordField>,
+) -> ast::RecordFieldList {
+ let fields = fields.into_iter().join(", ");
+ ast_from_text(&format!("struct f {{ {} }}", fields))
+}
+
+pub fn tuple_field(visibility: Option<ast::Visibility>, ty: ast::Type) -> ast::TupleField {
+ let visibility = match visibility {
+ None => String::new(),
+ Some(it) => format!("{} ", it),
+ };
+ ast_from_text(&format!("struct f({}{});", visibility, ty))
+}
+
+pub fn variant(name: ast::Name, field_list: Option<ast::FieldList>) -> ast::Variant {
+ let field_list = match field_list {
+ None => String::new(),
+ Some(it) => format!("{}", it),
+ };
+ ast_from_text(&format!("enum f {{ {}{} }}", name, field_list))
+}
+
+pub fn fn_(
+ visibility: Option<ast::Visibility>,
+ fn_name: ast::Name,
+ type_params: Option<ast::GenericParamList>,
+ params: ast::ParamList,
+ body: ast::BlockExpr,
+ ret_type: Option<ast::RetType>,
+ is_async: bool,
+) -> ast::Fn {
+ let type_params = match type_params {
+ Some(type_params) => format!("{}", type_params),
+ None => "".into(),
+ };
+ let ret_type = match ret_type {
+ Some(ret_type) => format!("{} ", ret_type),
+ None => "".into(),
+ };
+ let visibility = match visibility {
+ None => String::new(),
+ Some(it) => format!("{} ", it),
+ };
+
+ let async_literal = if is_async { "async " } else { "" };
+
+ ast_from_text(&format!(
+ "{}{}fn {}{}{} {}{}",
+ visibility, async_literal, fn_name, type_params, params, ret_type, body
+ ))
+}
+
+pub fn struct_(
+ visibility: Option<ast::Visibility>,
+ strukt_name: ast::Name,
+ generic_param_list: Option<ast::GenericParamList>,
+ field_list: ast::FieldList,
+) -> ast::Struct {
+ let semicolon = if matches!(field_list, ast::FieldList::TupleFieldList(_)) { ";" } else { "" };
+ let type_params = generic_param_list.map_or_else(String::new, |it| it.to_string());
+ let visibility = match visibility {
+ None => String::new(),
+ Some(it) => format!("{} ", it),
+ };
+
+ ast_from_text(&format!(
+ "{}struct {}{}{}{}",
+ visibility, strukt_name, type_params, field_list, semicolon
+ ))
+}
+
+#[track_caller]
+fn ast_from_text<N: AstNode>(text: &str) -> N {
+ let parse = SourceFile::parse(text);
+ let node = match parse.tree().syntax().descendants().find_map(N::cast) {
+ Some(it) => it,
+ None => {
+ panic!("Failed to make ast node `{}` from text {}", std::any::type_name::<N>(), text)
+ }
+ };
+ let node = node.clone_subtree();
+ assert_eq!(node.syntax().text_range().start(), 0.into());
+ node
+}
+
+pub fn token(kind: SyntaxKind) -> SyntaxToken {
+ tokens::SOURCE_FILE
+ .tree()
+ .syntax()
+ .clone_for_update()
+ .descendants_with_tokens()
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == kind)
+ .unwrap_or_else(|| panic!("unhandled token: {:?}", kind))
+}
+
+pub mod tokens {
+ use once_cell::sync::Lazy;
+
+ use crate::{ast, AstNode, Parse, SourceFile, SyntaxKind::*, SyntaxToken};
+
+ pub(super) static SOURCE_FILE: Lazy<Parse<SourceFile>> = Lazy::new(|| {
+ SourceFile::parse(
+ "const C: <()>::Item = (1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p)\n;\n\n",
+ )
+ });
+
+ pub fn single_space() -> SyntaxToken {
+ SOURCE_FILE
+ .tree()
+ .syntax()
+ .clone_for_update()
+ .descendants_with_tokens()
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == WHITESPACE && it.text() == " ")
+ .unwrap()
+ }
+
+ pub fn whitespace(text: &str) -> SyntaxToken {
+ assert!(text.trim().is_empty());
+ let sf = SourceFile::parse(text).ok().unwrap();
+ sf.syntax().clone_for_update().first_child_or_token().unwrap().into_token().unwrap()
+ }
+
+ pub fn doc_comment(text: &str) -> SyntaxToken {
+ assert!(!text.trim().is_empty());
+ let sf = SourceFile::parse(text).ok().unwrap();
+ sf.syntax().first_child_or_token().unwrap().into_token().unwrap()
+ }
+
+ pub fn literal(text: &str) -> SyntaxToken {
+ assert_eq!(text.trim(), text);
+ let lit: ast::Literal = super::ast_from_text(&format!("fn f() {{ let _ = {}; }}", text));
+ lit.syntax().first_child_or_token().unwrap().into_token().unwrap()
+ }
+
+ pub fn single_newline() -> SyntaxToken {
+ let res = SOURCE_FILE
+ .tree()
+ .syntax()
+ .clone_for_update()
+ .descendants_with_tokens()
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == WHITESPACE && it.text() == "\n")
+ .unwrap();
+ res.detach();
+ res
+ }
+
+ pub fn blank_line() -> SyntaxToken {
+ SOURCE_FILE
+ .tree()
+ .syntax()
+ .clone_for_update()
+ .descendants_with_tokens()
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == WHITESPACE && it.text() == "\n\n")
+ .unwrap()
+ }
+
+ pub struct WsBuilder(SourceFile);
+
+ impl WsBuilder {
+ pub fn new(text: &str) -> WsBuilder {
+ WsBuilder(SourceFile::parse(text).ok().unwrap())
+ }
+ pub fn ws(&self) -> SyntaxToken {
+ self.0.syntax().first_child_or_token().unwrap().into_token().unwrap()
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs
new file mode 100644
index 000000000..bb92c51e9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs
@@ -0,0 +1,875 @@
+//! Various extension methods to ast Nodes, which are hard to code-generate.
+//! Extensions for various expressions live in a sibling `expr_extensions` module.
+//!
+//! These methods should only do simple, shallow tasks related to the syntax of the node itself.
+
+use std::{borrow::Cow, fmt, iter::successors};
+
+use itertools::Itertools;
+use parser::SyntaxKind;
+use rowan::{GreenNodeData, GreenTokenData};
+
+use crate::{
+ ast::{self, support, AstNode, AstToken, HasAttrs, HasGenericParams, HasName, SyntaxNode},
+ NodeOrToken, SmolStr, SyntaxElement, SyntaxToken, TokenText, T,
+};
+
+impl ast::Lifetime {
+ pub fn text(&self) -> TokenText<'_> {
+ text_of_first_token(self.syntax())
+ }
+}
+
+impl ast::Name {
+ pub fn text(&self) -> TokenText<'_> {
+ text_of_first_token(self.syntax())
+ }
+}
+
+impl ast::NameRef {
+ pub fn text(&self) -> TokenText<'_> {
+ text_of_first_token(self.syntax())
+ }
+
+ pub fn as_tuple_field(&self) -> Option<usize> {
+ self.text().parse().ok()
+ }
+
+ pub fn token_kind(&self) -> SyntaxKind {
+ self.syntax().first_token().map_or(SyntaxKind::ERROR, |it| it.kind())
+ }
+}
+
+fn text_of_first_token(node: &SyntaxNode) -> TokenText<'_> {
+ fn first_token(green_ref: &GreenNodeData) -> &GreenTokenData {
+ green_ref.children().next().and_then(NodeOrToken::into_token).unwrap()
+ }
+
+ match node.green() {
+ Cow::Borrowed(green_ref) => TokenText::borrowed(first_token(green_ref).text()),
+ Cow::Owned(green) => TokenText::owned(first_token(&green).to_owned()),
+ }
+}
+
+impl ast::HasModuleItem for ast::StmtList {}
+
+impl ast::BlockExpr {
+ // FIXME: remove all these methods, they belong to ast::StmtList
+ pub fn statements(&self) -> impl Iterator<Item = ast::Stmt> {
+ self.stmt_list().into_iter().flat_map(|it| it.statements())
+ }
+ pub fn tail_expr(&self) -> Option<ast::Expr> {
+ self.stmt_list()?.tail_expr()
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum Macro {
+ MacroRules(ast::MacroRules),
+ MacroDef(ast::MacroDef),
+}
+
+impl From<ast::MacroRules> for Macro {
+ fn from(it: ast::MacroRules) -> Self {
+ Macro::MacroRules(it)
+ }
+}
+
+impl From<ast::MacroDef> for Macro {
+ fn from(it: ast::MacroDef) -> Self {
+ Macro::MacroDef(it)
+ }
+}
+
+impl AstNode for Macro {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ matches!(kind, SyntaxKind::MACRO_RULES | SyntaxKind::MACRO_DEF)
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ SyntaxKind::MACRO_RULES => Macro::MacroRules(ast::MacroRules { syntax }),
+ SyntaxKind::MACRO_DEF => Macro::MacroDef(ast::MacroDef { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ Macro::MacroRules(it) => it.syntax(),
+ Macro::MacroDef(it) => it.syntax(),
+ }
+ }
+}
+
+impl HasName for Macro {
+ fn name(&self) -> Option<ast::Name> {
+ match self {
+ Macro::MacroRules(mac) => mac.name(),
+ Macro::MacroDef(mac) => mac.name(),
+ }
+ }
+}
+
+impl HasAttrs for Macro {}
+
+impl From<ast::AssocItem> for ast::Item {
+ fn from(assoc: ast::AssocItem) -> Self {
+ match assoc {
+ ast::AssocItem::Const(it) => ast::Item::Const(it),
+ ast::AssocItem::Fn(it) => ast::Item::Fn(it),
+ ast::AssocItem::MacroCall(it) => ast::Item::MacroCall(it),
+ ast::AssocItem::TypeAlias(it) => ast::Item::TypeAlias(it),
+ }
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+pub enum AttrKind {
+ Inner,
+ Outer,
+}
+
+impl AttrKind {
+ /// Returns `true` if the attr_kind is [`Inner`](Self::Inner).
+ pub fn is_inner(&self) -> bool {
+ matches!(self, Self::Inner)
+ }
+
+ /// Returns `true` if the attr_kind is [`Outer`](Self::Outer).
+ pub fn is_outer(&self) -> bool {
+ matches!(self, Self::Outer)
+ }
+}
+
+impl ast::Attr {
+ pub fn as_simple_atom(&self) -> Option<SmolStr> {
+ let meta = self.meta()?;
+ if meta.eq_token().is_some() || meta.token_tree().is_some() {
+ return None;
+ }
+ self.simple_name()
+ }
+
+ pub fn as_simple_call(&self) -> Option<(SmolStr, ast::TokenTree)> {
+ let tt = self.meta()?.token_tree()?;
+ Some((self.simple_name()?, tt))
+ }
+
+ pub fn simple_name(&self) -> Option<SmolStr> {
+ let path = self.meta()?.path()?;
+ match (path.segment(), path.qualifier()) {
+ (Some(segment), None) => Some(segment.syntax().first_token()?.text().into()),
+ _ => None,
+ }
+ }
+
+ pub fn kind(&self) -> AttrKind {
+ match self.excl_token() {
+ Some(_) => AttrKind::Inner,
+ None => AttrKind::Outer,
+ }
+ }
+
+ pub fn path(&self) -> Option<ast::Path> {
+ self.meta()?.path()
+ }
+
+ pub fn expr(&self) -> Option<ast::Expr> {
+ self.meta()?.expr()
+ }
+
+ pub fn token_tree(&self) -> Option<ast::TokenTree> {
+ self.meta()?.token_tree()
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum PathSegmentKind {
+ Name(ast::NameRef),
+ Type { type_ref: Option<ast::Type>, trait_ref: Option<ast::PathType> },
+ SelfTypeKw,
+ SelfKw,
+ SuperKw,
+ CrateKw,
+}
+
+impl ast::PathSegment {
+ pub fn parent_path(&self) -> ast::Path {
+ self.syntax()
+ .parent()
+ .and_then(ast::Path::cast)
+ .expect("segments are always nested in paths")
+ }
+
+ pub fn crate_token(&self) -> Option<SyntaxToken> {
+ self.name_ref().and_then(|it| it.crate_token())
+ }
+
+ pub fn self_token(&self) -> Option<SyntaxToken> {
+ self.name_ref().and_then(|it| it.self_token())
+ }
+
+ pub fn self_type_token(&self) -> Option<SyntaxToken> {
+ self.name_ref().and_then(|it| it.Self_token())
+ }
+
+ pub fn super_token(&self) -> Option<SyntaxToken> {
+ self.name_ref().and_then(|it| it.super_token())
+ }
+
+ pub fn kind(&self) -> Option<PathSegmentKind> {
+ let res = if let Some(name_ref) = self.name_ref() {
+ match name_ref.token_kind() {
+ T![Self] => PathSegmentKind::SelfTypeKw,
+ T![self] => PathSegmentKind::SelfKw,
+ T![super] => PathSegmentKind::SuperKw,
+ T![crate] => PathSegmentKind::CrateKw,
+ _ => PathSegmentKind::Name(name_ref),
+ }
+ } else {
+ match self.syntax().first_child_or_token()?.kind() {
+ T![<] => {
+ // <T> or <T as Trait>
+ // T is any TypeRef, Trait has to be a PathType
+ let mut type_refs =
+ self.syntax().children().filter(|node| ast::Type::can_cast(node.kind()));
+ let type_ref = type_refs.next().and_then(ast::Type::cast);
+ let trait_ref = type_refs.next().and_then(ast::PathType::cast);
+ PathSegmentKind::Type { type_ref, trait_ref }
+ }
+ _ => return None,
+ }
+ };
+ Some(res)
+ }
+}
+
+impl ast::Path {
+ pub fn parent_path(&self) -> Option<ast::Path> {
+ self.syntax().parent().and_then(ast::Path::cast)
+ }
+
+ pub fn as_single_segment(&self) -> Option<ast::PathSegment> {
+ match self.qualifier() {
+ Some(_) => None,
+ None => self.segment(),
+ }
+ }
+
+ pub fn as_single_name_ref(&self) -> Option<ast::NameRef> {
+ match self.qualifier() {
+ Some(_) => None,
+ None => self.segment()?.name_ref(),
+ }
+ }
+
+ pub fn first_qualifier_or_self(&self) -> ast::Path {
+ successors(Some(self.clone()), ast::Path::qualifier).last().unwrap()
+ }
+
+ pub fn first_segment(&self) -> Option<ast::PathSegment> {
+ self.first_qualifier_or_self().segment()
+ }
+
+ pub fn segments(&self) -> impl Iterator<Item = ast::PathSegment> + Clone {
+ successors(self.first_segment(), |p| {
+ p.parent_path().parent_path().and_then(|p| p.segment())
+ })
+ }
+
+ pub fn qualifiers(&self) -> impl Iterator<Item = ast::Path> + Clone {
+ successors(self.qualifier(), |p| p.qualifier())
+ }
+
+ pub fn top_path(&self) -> ast::Path {
+ let mut this = self.clone();
+ while let Some(path) = this.parent_path() {
+ this = path;
+ }
+ this
+ }
+}
+
+impl ast::Use {
+ pub fn is_simple_glob(&self) -> bool {
+ self.use_tree().map_or(false, |use_tree| {
+ use_tree.use_tree_list().is_none() && use_tree.star_token().is_some()
+ })
+ }
+}
+
+impl ast::UseTree {
+ pub fn is_simple_path(&self) -> bool {
+ self.use_tree_list().is_none() && self.star_token().is_none()
+ }
+}
+
+impl ast::UseTreeList {
+ pub fn parent_use_tree(&self) -> ast::UseTree {
+ self.syntax()
+ .parent()
+ .and_then(ast::UseTree::cast)
+ .expect("UseTreeLists are always nested in UseTrees")
+ }
+
+ pub fn has_inner_comment(&self) -> bool {
+ self.syntax()
+ .children_with_tokens()
+ .filter_map(|it| it.into_token())
+ .find_map(ast::Comment::cast)
+ .is_some()
+ }
+}
+
+impl ast::Impl {
+ pub fn self_ty(&self) -> Option<ast::Type> {
+ match self.target() {
+ (Some(t), None) | (_, Some(t)) => Some(t),
+ _ => None,
+ }
+ }
+
+ pub fn trait_(&self) -> Option<ast::Type> {
+ match self.target() {
+ (Some(t), Some(_)) => Some(t),
+ _ => None,
+ }
+ }
+
+ fn target(&self) -> (Option<ast::Type>, Option<ast::Type>) {
+ let mut types = support::children(self.syntax());
+ let first = types.next();
+ let second = types.next();
+ (first, second)
+ }
+
+ pub fn for_trait_name_ref(name_ref: &ast::NameRef) -> Option<ast::Impl> {
+ let this = name_ref.syntax().ancestors().find_map(ast::Impl::cast)?;
+ if this.trait_()?.syntax().text_range().start() == name_ref.syntax().text_range().start() {
+ Some(this)
+ } else {
+ None
+ }
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum StructKind {
+ Record(ast::RecordFieldList),
+ Tuple(ast::TupleFieldList),
+ Unit,
+}
+
+impl StructKind {
+ fn from_node<N: AstNode>(node: &N) -> StructKind {
+ if let Some(nfdl) = support::child::<ast::RecordFieldList>(node.syntax()) {
+ StructKind::Record(nfdl)
+ } else if let Some(pfl) = support::child::<ast::TupleFieldList>(node.syntax()) {
+ StructKind::Tuple(pfl)
+ } else {
+ StructKind::Unit
+ }
+ }
+}
+
+impl ast::Struct {
+ pub fn kind(&self) -> StructKind {
+ StructKind::from_node(self)
+ }
+}
+
+impl ast::RecordExprField {
+ pub fn for_field_name(field_name: &ast::NameRef) -> Option<ast::RecordExprField> {
+ let candidate = Self::for_name_ref(field_name)?;
+ if candidate.field_name().as_ref() == Some(field_name) {
+ Some(candidate)
+ } else {
+ None
+ }
+ }
+
+ pub fn for_name_ref(name_ref: &ast::NameRef) -> Option<ast::RecordExprField> {
+ let syn = name_ref.syntax();
+ syn.parent()
+ .and_then(ast::RecordExprField::cast)
+ .or_else(|| syn.ancestors().nth(4).and_then(ast::RecordExprField::cast))
+ }
+
+ /// Deals with field init shorthand
+ pub fn field_name(&self) -> Option<ast::NameRef> {
+ if let Some(name_ref) = self.name_ref() {
+ return Some(name_ref);
+ }
+ if let ast::Expr::PathExpr(expr) = self.expr()? {
+ let path = expr.path()?;
+ let segment = path.segment()?;
+ let name_ref = segment.name_ref()?;
+ if path.qualifier().is_none() {
+ return Some(name_ref);
+ }
+ }
+ None
+ }
+}
+
+#[derive(Debug, Clone)]
+pub enum NameLike {
+ NameRef(ast::NameRef),
+ Name(ast::Name),
+ Lifetime(ast::Lifetime),
+}
+
+impl NameLike {
+ pub fn as_name_ref(&self) -> Option<&ast::NameRef> {
+ match self {
+ NameLike::NameRef(name_ref) => Some(name_ref),
+ _ => None,
+ }
+ }
+ pub fn as_lifetime(&self) -> Option<&ast::Lifetime> {
+ match self {
+ NameLike::Lifetime(lifetime) => Some(lifetime),
+ _ => None,
+ }
+ }
+ pub fn text(&self) -> TokenText<'_> {
+ match self {
+ NameLike::NameRef(name_ref) => name_ref.text(),
+ NameLike::Name(name) => name.text(),
+ NameLike::Lifetime(lifetime) => lifetime.text(),
+ }
+ }
+}
+
+impl ast::AstNode for NameLike {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ matches!(kind, SyntaxKind::NAME | SyntaxKind::NAME_REF | SyntaxKind::LIFETIME)
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ SyntaxKind::NAME => NameLike::Name(ast::Name { syntax }),
+ SyntaxKind::NAME_REF => NameLike::NameRef(ast::NameRef { syntax }),
+ SyntaxKind::LIFETIME => NameLike::Lifetime(ast::Lifetime { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ NameLike::NameRef(it) => it.syntax(),
+ NameLike::Name(it) => it.syntax(),
+ NameLike::Lifetime(it) => it.syntax(),
+ }
+ }
+}
+
+const _: () = {
+ use ast::{Lifetime, Name, NameRef};
+ stdx::impl_from!(NameRef, Name, Lifetime for NameLike);
+};
+
+#[derive(Debug, Clone, PartialEq)]
+pub enum NameOrNameRef {
+ Name(ast::Name),
+ NameRef(ast::NameRef),
+}
+
+impl fmt::Display for NameOrNameRef {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ NameOrNameRef::Name(it) => fmt::Display::fmt(it, f),
+ NameOrNameRef::NameRef(it) => fmt::Display::fmt(it, f),
+ }
+ }
+}
+
+impl NameOrNameRef {
+ pub fn text(&self) -> TokenText<'_> {
+ match self {
+ NameOrNameRef::Name(name) => name.text(),
+ NameOrNameRef::NameRef(name_ref) => name_ref.text(),
+ }
+ }
+}
+
+impl ast::RecordPatField {
+ pub fn for_field_name_ref(field_name: &ast::NameRef) -> Option<ast::RecordPatField> {
+ let candidate = field_name.syntax().parent().and_then(ast::RecordPatField::cast)?;
+ match candidate.field_name()? {
+ NameOrNameRef::NameRef(name_ref) if name_ref == *field_name => Some(candidate),
+ _ => None,
+ }
+ }
+
+ pub fn for_field_name(field_name: &ast::Name) -> Option<ast::RecordPatField> {
+ let candidate =
+ field_name.syntax().ancestors().nth(2).and_then(ast::RecordPatField::cast)?;
+ match candidate.field_name()? {
+ NameOrNameRef::Name(name) if name == *field_name => Some(candidate),
+ _ => None,
+ }
+ }
+
+ pub fn parent_record_pat(&self) -> ast::RecordPat {
+ self.syntax().ancestors().find_map(ast::RecordPat::cast).unwrap()
+ }
+
+ /// Deals with field init shorthand
+ pub fn field_name(&self) -> Option<NameOrNameRef> {
+ if let Some(name_ref) = self.name_ref() {
+ return Some(NameOrNameRef::NameRef(name_ref));
+ }
+ match self.pat() {
+ Some(ast::Pat::IdentPat(pat)) => {
+ let name = pat.name()?;
+ Some(NameOrNameRef::Name(name))
+ }
+ Some(ast::Pat::BoxPat(pat)) => match pat.pat() {
+ Some(ast::Pat::IdentPat(pat)) => {
+ let name = pat.name()?;
+ Some(NameOrNameRef::Name(name))
+ }
+ _ => None,
+ },
+ _ => None,
+ }
+ }
+}
+
+impl ast::Variant {
+ pub fn parent_enum(&self) -> ast::Enum {
+ self.syntax()
+ .parent()
+ .and_then(|it| it.parent())
+ .and_then(ast::Enum::cast)
+ .expect("EnumVariants are always nested in Enums")
+ }
+ pub fn kind(&self) -> StructKind {
+ StructKind::from_node(self)
+ }
+}
+
+impl ast::Item {
+ pub fn generic_param_list(&self) -> Option<ast::GenericParamList> {
+ ast::AnyHasGenericParams::cast(self.syntax().clone())?.generic_param_list()
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum FieldKind {
+ Name(ast::NameRef),
+ Index(SyntaxToken),
+}
+
+impl ast::FieldExpr {
+ pub fn index_token(&self) -> Option<SyntaxToken> {
+ self.syntax
+ .children_with_tokens()
+ // FIXME: Accepting floats here to reject them in validation later
+ .find(|c| c.kind() == SyntaxKind::INT_NUMBER || c.kind() == SyntaxKind::FLOAT_NUMBER)
+ .as_ref()
+ .and_then(SyntaxElement::as_token)
+ .cloned()
+ }
+
+ pub fn field_access(&self) -> Option<FieldKind> {
+ match self.name_ref() {
+ Some(nr) => Some(FieldKind::Name(nr)),
+ None => self.index_token().map(FieldKind::Index),
+ }
+ }
+}
+
+pub struct SlicePatComponents {
+ pub prefix: Vec<ast::Pat>,
+ pub slice: Option<ast::Pat>,
+ pub suffix: Vec<ast::Pat>,
+}
+
+impl ast::SlicePat {
+ pub fn components(&self) -> SlicePatComponents {
+ let mut args = self.pats().peekable();
+ let prefix = args
+ .peeking_take_while(|p| match p {
+ ast::Pat::RestPat(_) => false,
+ ast::Pat::IdentPat(bp) => !matches!(bp.pat(), Some(ast::Pat::RestPat(_))),
+ ast::Pat::RefPat(rp) => match rp.pat() {
+ Some(ast::Pat::RestPat(_)) => false,
+ Some(ast::Pat::IdentPat(bp)) => !matches!(bp.pat(), Some(ast::Pat::RestPat(_))),
+ _ => true,
+ },
+ _ => true,
+ })
+ .collect();
+ let slice = args.next();
+ let suffix = args.collect();
+
+ SlicePatComponents { prefix, slice, suffix }
+ }
+}
+
+impl ast::IdentPat {
+ pub fn is_simple_ident(&self) -> bool {
+ self.at_token().is_none()
+ && self.mut_token().is_none()
+ && self.ref_token().is_none()
+ && self.pat().is_none()
+ }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum SelfParamKind {
+ /// self
+ Owned,
+ /// &self
+ Ref,
+ /// &mut self
+ MutRef,
+}
+
+impl ast::SelfParam {
+ pub fn kind(&self) -> SelfParamKind {
+ if self.amp_token().is_some() {
+ if self.mut_token().is_some() {
+ SelfParamKind::MutRef
+ } else {
+ SelfParamKind::Ref
+ }
+ } else {
+ SelfParamKind::Owned
+ }
+ }
+}
+
+#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+pub enum TypeBoundKind {
+ /// Trait
+ PathType(ast::PathType),
+ /// for<'a> ...
+ ForType(ast::ForType),
+ /// 'a
+ Lifetime(ast::Lifetime),
+}
+
+impl ast::TypeBound {
+ pub fn kind(&self) -> TypeBoundKind {
+ if let Some(path_type) = support::children(self.syntax()).next() {
+ TypeBoundKind::PathType(path_type)
+ } else if let Some(for_type) = support::children(self.syntax()).next() {
+ TypeBoundKind::ForType(for_type)
+ } else if let Some(lifetime) = self.lifetime() {
+ TypeBoundKind::Lifetime(lifetime)
+ } else {
+ unreachable!()
+ }
+ }
+}
+
+#[derive(Debug, Clone)]
+pub enum TypeOrConstParam {
+ Type(ast::TypeParam),
+ Const(ast::ConstParam),
+}
+
+impl TypeOrConstParam {
+ pub fn name(&self) -> Option<ast::Name> {
+ match self {
+ TypeOrConstParam::Type(x) => x.name(),
+ TypeOrConstParam::Const(x) => x.name(),
+ }
+ }
+}
+
+pub enum VisibilityKind {
+ In(ast::Path),
+ PubCrate,
+ PubSuper,
+ PubSelf,
+ Pub,
+}
+
+impl ast::Visibility {
+ pub fn kind(&self) -> VisibilityKind {
+ match self.path() {
+ Some(path) => {
+ if let Some(segment) =
+ path.as_single_segment().filter(|it| it.coloncolon_token().is_none())
+ {
+ if segment.crate_token().is_some() {
+ return VisibilityKind::PubCrate;
+ } else if segment.super_token().is_some() {
+ return VisibilityKind::PubSuper;
+ } else if segment.self_token().is_some() {
+ return VisibilityKind::PubSelf;
+ }
+ }
+ VisibilityKind::In(path)
+ }
+ None => VisibilityKind::Pub,
+ }
+ }
+}
+
+impl ast::LifetimeParam {
+ pub fn lifetime_bounds(&self) -> impl Iterator<Item = SyntaxToken> {
+ self.syntax()
+ .children_with_tokens()
+ .filter_map(|it| it.into_token())
+ .skip_while(|x| x.kind() != T![:])
+ .filter(|it| it.kind() == T![lifetime_ident])
+ }
+}
+
+impl ast::Module {
+ /// Returns the parent ast::Module, this is different than the semantic parent in that this only
+ /// considers parent declarations in the AST
+ pub fn parent(&self) -> Option<ast::Module> {
+ self.syntax().ancestors().nth(2).and_then(ast::Module::cast)
+ }
+}
+
+impl ast::RangePat {
+ pub fn start(&self) -> Option<ast::Pat> {
+ self.syntax()
+ .children_with_tokens()
+ .take_while(|it| !(it.kind() == T![..] || it.kind() == T![..=]))
+ .filter_map(|it| it.into_node())
+ .find_map(ast::Pat::cast)
+ }
+
+ pub fn end(&self) -> Option<ast::Pat> {
+ self.syntax()
+ .children_with_tokens()
+ .skip_while(|it| !(it.kind() == T![..] || it.kind() == T![..=]))
+ .filter_map(|it| it.into_node())
+ .find_map(ast::Pat::cast)
+ }
+}
+
+impl ast::TokenTree {
+ pub fn token_trees_and_tokens(
+ &self,
+ ) -> impl Iterator<Item = NodeOrToken<ast::TokenTree, SyntaxToken>> {
+ self.syntax().children_with_tokens().filter_map(|not| match not {
+ NodeOrToken::Node(node) => ast::TokenTree::cast(node).map(NodeOrToken::Node),
+ NodeOrToken::Token(t) => Some(NodeOrToken::Token(t)),
+ })
+ }
+
+ pub fn left_delimiter_token(&self) -> Option<SyntaxToken> {
+ self.syntax()
+ .first_child_or_token()?
+ .into_token()
+ .filter(|it| matches!(it.kind(), T!['{'] | T!['('] | T!['[']))
+ }
+
+ pub fn right_delimiter_token(&self) -> Option<SyntaxToken> {
+ self.syntax()
+ .last_child_or_token()?
+ .into_token()
+ .filter(|it| matches!(it.kind(), T!['}'] | T![')'] | T![']']))
+ }
+
+ pub fn parent_meta(&self) -> Option<ast::Meta> {
+ self.syntax().parent().and_then(ast::Meta::cast)
+ }
+}
+
+impl ast::Meta {
+ pub fn parent_attr(&self) -> Option<ast::Attr> {
+ self.syntax().parent().and_then(ast::Attr::cast)
+ }
+}
+
+impl ast::GenericArgList {
+ pub fn lifetime_args(&self) -> impl Iterator<Item = ast::LifetimeArg> {
+ self.generic_args().filter_map(|arg| match arg {
+ ast::GenericArg::LifetimeArg(it) => Some(it),
+ _ => None,
+ })
+ }
+}
+
+impl ast::GenericParamList {
+ pub fn lifetime_params(&self) -> impl Iterator<Item = ast::LifetimeParam> {
+ self.generic_params().filter_map(|param| match param {
+ ast::GenericParam::LifetimeParam(it) => Some(it),
+ ast::GenericParam::TypeParam(_) | ast::GenericParam::ConstParam(_) => None,
+ })
+ }
+ pub fn type_or_const_params(&self) -> impl Iterator<Item = ast::TypeOrConstParam> {
+ self.generic_params().filter_map(|param| match param {
+ ast::GenericParam::TypeParam(it) => Some(ast::TypeOrConstParam::Type(it)),
+ ast::GenericParam::LifetimeParam(_) => None,
+ ast::GenericParam::ConstParam(it) => Some(ast::TypeOrConstParam::Const(it)),
+ })
+ }
+}
+
+impl ast::ForExpr {
+ pub fn iterable(&self) -> Option<ast::Expr> {
+ // If the iterable is a BlockExpr, check if the body is missing.
+ // If it is assume the iterable is the expression that is missing instead.
+ let mut exprs = support::children(self.syntax());
+ let first = exprs.next();
+ match first {
+ Some(ast::Expr::BlockExpr(_)) => exprs.next().and(first),
+ first => first,
+ }
+ }
+}
+
+impl ast::HasLoopBody for ast::ForExpr {
+ fn loop_body(&self) -> Option<ast::BlockExpr> {
+ let mut exprs = support::children(self.syntax());
+ let first = exprs.next();
+ let second = exprs.next();
+ second.or(first)
+ }
+}
+
+impl ast::WhileExpr {
+ pub fn condition(&self) -> Option<ast::Expr> {
+ // If the condition is a BlockExpr, check if the body is missing.
+ // If it is assume the condition is the expression that is missing instead.
+ let mut exprs = support::children(self.syntax());
+ let first = exprs.next();
+ match first {
+ Some(ast::Expr::BlockExpr(_)) => exprs.next().and(first),
+ first => first,
+ }
+ }
+}
+
+impl ast::HasLoopBody for ast::WhileExpr {
+ fn loop_body(&self) -> Option<ast::BlockExpr> {
+ let mut exprs = support::children(self.syntax());
+ let first = exprs.next();
+ let second = exprs.next();
+ second.or(first)
+ }
+}
+
+impl ast::HasAttrs for ast::AnyHasDocComments {}
+
+impl From<ast::Adt> for ast::Item {
+ fn from(it: ast::Adt) -> Self {
+ match it {
+ ast::Adt::Enum(it) => ast::Item::Enum(it),
+ ast::Adt::Struct(it) => ast::Item::Struct(it),
+ ast::Adt::Union(it) => ast::Item::Union(it),
+ }
+ }
+}
+
+impl ast::IfExpr {
+ pub fn condition(&self) -> Option<ast::Expr> {
+ support::child(&self.syntax)
+ }
+}
+
+impl ast::MatchGuard {
+ pub fn condition(&self) -> Option<ast::Expr> {
+ support::child(&self.syntax)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/operators.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/operators.rs
new file mode 100644
index 000000000..a687ba0b7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/operators.rs
@@ -0,0 +1,122 @@
+//! Defines a bunch of data-less enums for unary and binary operators.
+//!
+//! Types here don't know about AST, this allows re-using them for both AST and
+//! HIR.
+use std::fmt;
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum RangeOp {
+ /// `..`
+ Exclusive,
+ /// `..=`
+ Inclusive,
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum UnaryOp {
+ /// `*`
+ Deref,
+ /// `!`
+ Not,
+ /// `-`
+ Neg,
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum BinaryOp {
+ LogicOp(LogicOp),
+ ArithOp(ArithOp),
+ CmpOp(CmpOp),
+ Assignment { op: Option<ArithOp> },
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum LogicOp {
+ And,
+ Or,
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum CmpOp {
+ Eq { negated: bool },
+ Ord { ordering: Ordering, strict: bool },
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum Ordering {
+ Less,
+ Greater,
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum ArithOp {
+ Add,
+ Mul,
+ Sub,
+ Div,
+ Rem,
+ Shl,
+ Shr,
+ BitXor,
+ BitOr,
+ BitAnd,
+}
+
+impl fmt::Display for LogicOp {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let res = match self {
+ LogicOp::And => "&&",
+ LogicOp::Or => "||",
+ };
+ f.write_str(res)
+ }
+}
+
+impl fmt::Display for ArithOp {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let res = match self {
+ ArithOp::Add => "+",
+ ArithOp::Mul => "*",
+ ArithOp::Sub => "-",
+ ArithOp::Div => "/",
+ ArithOp::Rem => "%",
+ ArithOp::Shl => "<<",
+ ArithOp::Shr => ">>",
+ ArithOp::BitXor => "^",
+ ArithOp::BitOr => "|",
+ ArithOp::BitAnd => "&",
+ };
+ f.write_str(res)
+ }
+}
+
+impl fmt::Display for CmpOp {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let res = match self {
+ CmpOp::Eq { negated: false } => "==",
+ CmpOp::Eq { negated: true } => "!=",
+ CmpOp::Ord { ordering: Ordering::Less, strict: false } => "<=",
+ CmpOp::Ord { ordering: Ordering::Less, strict: true } => "<",
+ CmpOp::Ord { ordering: Ordering::Greater, strict: false } => ">=",
+ CmpOp::Ord { ordering: Ordering::Greater, strict: true } => ">",
+ };
+ f.write_str(res)
+ }
+}
+
+impl fmt::Display for BinaryOp {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ BinaryOp::LogicOp(op) => fmt::Display::fmt(op, f),
+ BinaryOp::ArithOp(op) => fmt::Display::fmt(op, f),
+ BinaryOp::CmpOp(op) => fmt::Display::fmt(op, f),
+ BinaryOp::Assignment { op } => {
+ f.write_str("=")?;
+ if let Some(op) = op {
+ fmt::Display::fmt(op, f)?;
+ }
+ Ok(())
+ }
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs
new file mode 100644
index 000000000..28976d837
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs
@@ -0,0 +1,472 @@
+//! There are many AstNodes, but only a few tokens, so we hand-write them here.
+
+use std::borrow::Cow;
+
+use rustc_lexer::unescape::{unescape_byte, unescape_char, unescape_literal, Mode};
+
+use crate::{
+ ast::{self, AstToken},
+ TextRange, TextSize,
+};
+
+impl ast::Comment {
+ pub fn kind(&self) -> CommentKind {
+ CommentKind::from_text(self.text())
+ }
+
+ pub fn is_doc(&self) -> bool {
+ self.kind().doc.is_some()
+ }
+
+ pub fn is_inner(&self) -> bool {
+ self.kind().doc == Some(CommentPlacement::Inner)
+ }
+
+ pub fn is_outer(&self) -> bool {
+ self.kind().doc == Some(CommentPlacement::Outer)
+ }
+
+ pub fn prefix(&self) -> &'static str {
+ let &(prefix, _kind) = CommentKind::BY_PREFIX
+ .iter()
+ .find(|&(prefix, kind)| self.kind() == *kind && self.text().starts_with(prefix))
+ .unwrap();
+ prefix
+ }
+
+ /// Returns the textual content of a doc comment node as a single string with prefix and suffix
+ /// removed.
+ pub fn doc_comment(&self) -> Option<&str> {
+ let kind = self.kind();
+ match kind {
+ CommentKind { shape, doc: Some(_) } => {
+ let prefix = kind.prefix();
+ let text = &self.text()[prefix.len()..];
+ let text = if shape == CommentShape::Block {
+ text.strip_suffix("*/").unwrap_or(text)
+ } else {
+ text
+ };
+ Some(text)
+ }
+ _ => None,
+ }
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, Clone, Copy)]
+pub struct CommentKind {
+ pub shape: CommentShape,
+ pub doc: Option<CommentPlacement>,
+}
+
+#[derive(Debug, PartialEq, Eq, Clone, Copy)]
+pub enum CommentShape {
+ Line,
+ Block,
+}
+
+impl CommentShape {
+ pub fn is_line(self) -> bool {
+ self == CommentShape::Line
+ }
+
+ pub fn is_block(self) -> bool {
+ self == CommentShape::Block
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, Clone, Copy)]
+pub enum CommentPlacement {
+ Inner,
+ Outer,
+}
+
+impl CommentKind {
+ const BY_PREFIX: [(&'static str, CommentKind); 9] = [
+ ("/**/", CommentKind { shape: CommentShape::Block, doc: None }),
+ ("/***", CommentKind { shape: CommentShape::Block, doc: None }),
+ ("////", CommentKind { shape: CommentShape::Line, doc: None }),
+ ("///", CommentKind { shape: CommentShape::Line, doc: Some(CommentPlacement::Outer) }),
+ ("//!", CommentKind { shape: CommentShape::Line, doc: Some(CommentPlacement::Inner) }),
+ ("/**", CommentKind { shape: CommentShape::Block, doc: Some(CommentPlacement::Outer) }),
+ ("/*!", CommentKind { shape: CommentShape::Block, doc: Some(CommentPlacement::Inner) }),
+ ("//", CommentKind { shape: CommentShape::Line, doc: None }),
+ ("/*", CommentKind { shape: CommentShape::Block, doc: None }),
+ ];
+
+ pub(crate) fn from_text(text: &str) -> CommentKind {
+ let &(_prefix, kind) = CommentKind::BY_PREFIX
+ .iter()
+ .find(|&(prefix, _kind)| text.starts_with(prefix))
+ .unwrap();
+ kind
+ }
+
+ pub fn prefix(&self) -> &'static str {
+ let &(prefix, _) =
+ CommentKind::BY_PREFIX.iter().rev().find(|(_, kind)| kind == self).unwrap();
+ prefix
+ }
+}
+
+impl ast::Whitespace {
+ pub fn spans_multiple_lines(&self) -> bool {
+ let text = self.text();
+ text.find('\n').map_or(false, |idx| text[idx + 1..].contains('\n'))
+ }
+}
+
+pub struct QuoteOffsets {
+ pub quotes: (TextRange, TextRange),
+ pub contents: TextRange,
+}
+
+impl QuoteOffsets {
+ fn new(literal: &str) -> Option<QuoteOffsets> {
+ let left_quote = literal.find('"')?;
+ let right_quote = literal.rfind('"')?;
+ if left_quote == right_quote {
+ // `literal` only contains one quote
+ return None;
+ }
+
+ let start = TextSize::from(0);
+ let left_quote = TextSize::try_from(left_quote).unwrap() + TextSize::of('"');
+ let right_quote = TextSize::try_from(right_quote).unwrap();
+ let end = TextSize::of(literal);
+
+ let res = QuoteOffsets {
+ quotes: (TextRange::new(start, left_quote), TextRange::new(right_quote, end)),
+ contents: TextRange::new(left_quote, right_quote),
+ };
+ Some(res)
+ }
+}
+
+pub trait IsString: AstToken {
+ fn quote_offsets(&self) -> Option<QuoteOffsets> {
+ let text = self.text();
+ let offsets = QuoteOffsets::new(text)?;
+ let o = self.syntax().text_range().start();
+ let offsets = QuoteOffsets {
+ quotes: (offsets.quotes.0 + o, offsets.quotes.1 + o),
+ contents: offsets.contents + o,
+ };
+ Some(offsets)
+ }
+ fn text_range_between_quotes(&self) -> Option<TextRange> {
+ self.quote_offsets().map(|it| it.contents)
+ }
+ fn open_quote_text_range(&self) -> Option<TextRange> {
+ self.quote_offsets().map(|it| it.quotes.0)
+ }
+ fn close_quote_text_range(&self) -> Option<TextRange> {
+ self.quote_offsets().map(|it| it.quotes.1)
+ }
+ fn escaped_char_ranges(
+ &self,
+ cb: &mut dyn FnMut(TextRange, Result<char, rustc_lexer::unescape::EscapeError>),
+ ) {
+ let text_range_no_quotes = match self.text_range_between_quotes() {
+ Some(it) => it,
+ None => return,
+ };
+
+ let start = self.syntax().text_range().start();
+ let text = &self.text()[text_range_no_quotes - start];
+ let offset = text_range_no_quotes.start() - start;
+
+ unescape_literal(text, Mode::Str, &mut |range, unescaped_char| {
+ let text_range =
+ TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap());
+ cb(text_range + offset, unescaped_char);
+ });
+ }
+}
+
+impl IsString for ast::String {}
+
+impl ast::String {
+ pub fn is_raw(&self) -> bool {
+ self.text().starts_with('r')
+ }
+ pub fn map_range_up(&self, range: TextRange) -> Option<TextRange> {
+ let contents_range = self.text_range_between_quotes()?;
+ assert!(TextRange::up_to(contents_range.len()).contains_range(range));
+ Some(range + contents_range.start())
+ }
+
+ pub fn value(&self) -> Option<Cow<'_, str>> {
+ if self.is_raw() {
+ let text = self.text();
+ let text =
+ &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
+ return Some(Cow::Borrowed(text));
+ }
+
+ let text = self.text();
+ let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
+
+ let mut buf = String::new();
+ let mut text_iter = text.chars();
+ let mut has_error = false;
+ unescape_literal(text, Mode::Str, &mut |char_range, unescaped_char| match (
+ unescaped_char,
+ buf.capacity() == 0,
+ ) {
+ (Ok(c), false) => buf.push(c),
+ (Ok(c), true) if char_range.len() == 1 && Some(c) == text_iter.next() => (),
+ (Ok(c), true) => {
+ buf.reserve_exact(text.len());
+ buf.push_str(&text[..char_range.start]);
+ buf.push(c);
+ }
+ (Err(_), _) => has_error = true,
+ });
+
+ match (has_error, buf.capacity() == 0) {
+ (true, _) => None,
+ (false, true) => Some(Cow::Borrowed(text)),
+ (false, false) => Some(Cow::Owned(buf)),
+ }
+ }
+}
+
+impl IsString for ast::ByteString {}
+
+impl ast::ByteString {
+ pub fn is_raw(&self) -> bool {
+ self.text().starts_with("br")
+ }
+
+ pub fn value(&self) -> Option<Cow<'_, [u8]>> {
+ if self.is_raw() {
+ let text = self.text();
+ let text =
+ &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
+ return Some(Cow::Borrowed(text.as_bytes()));
+ }
+
+ let text = self.text();
+ let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
+
+ let mut buf: Vec<u8> = Vec::new();
+ let mut text_iter = text.chars();
+ let mut has_error = false;
+ unescape_literal(text, Mode::ByteStr, &mut |char_range, unescaped_char| match (
+ unescaped_char,
+ buf.capacity() == 0,
+ ) {
+ (Ok(c), false) => buf.push(c as u8),
+ (Ok(c), true) if char_range.len() == 1 && Some(c) == text_iter.next() => (),
+ (Ok(c), true) => {
+ buf.reserve_exact(text.len());
+ buf.extend_from_slice(text[..char_range.start].as_bytes());
+ buf.push(c as u8);
+ }
+ (Err(_), _) => has_error = true,
+ });
+
+ match (has_error, buf.capacity() == 0) {
+ (true, _) => None,
+ (false, true) => Some(Cow::Borrowed(text.as_bytes())),
+ (false, false) => Some(Cow::Owned(buf)),
+ }
+ }
+}
+
+impl ast::IntNumber {
+ pub fn radix(&self) -> Radix {
+ match self.text().get(..2).unwrap_or_default() {
+ "0b" => Radix::Binary,
+ "0o" => Radix::Octal,
+ "0x" => Radix::Hexadecimal,
+ _ => Radix::Decimal,
+ }
+ }
+
+ pub fn split_into_parts(&self) -> (&str, &str, &str) {
+ let radix = self.radix();
+ let (prefix, mut text) = self.text().split_at(radix.prefix_len());
+
+ let is_suffix_start: fn(&(usize, char)) -> bool = match radix {
+ Radix::Hexadecimal => |(_, c)| matches!(c, 'g'..='z' | 'G'..='Z'),
+ _ => |(_, c)| c.is_ascii_alphabetic(),
+ };
+
+ let mut suffix = "";
+ if let Some((suffix_start, _)) = text.char_indices().find(is_suffix_start) {
+ let (text2, suffix2) = text.split_at(suffix_start);
+ text = text2;
+ suffix = suffix2;
+ };
+
+ (prefix, text, suffix)
+ }
+
+ pub fn value(&self) -> Option<u128> {
+ let (_, text, _) = self.split_into_parts();
+ let value = u128::from_str_radix(&text.replace('_', ""), self.radix() as u32).ok()?;
+ Some(value)
+ }
+
+ pub fn suffix(&self) -> Option<&str> {
+ let (_, _, suffix) = self.split_into_parts();
+ if suffix.is_empty() {
+ None
+ } else {
+ Some(suffix)
+ }
+ }
+
+ pub fn float_value(&self) -> Option<f64> {
+ let (_, text, _) = self.split_into_parts();
+ text.parse::<f64>().ok()
+ }
+}
+
+impl ast::FloatNumber {
+ pub fn split_into_parts(&self) -> (&str, &str) {
+ let text = self.text();
+ let mut float_text = self.text();
+ let mut suffix = "";
+ let mut indices = text.char_indices();
+ if let Some((mut suffix_start, c)) = indices.by_ref().find(|(_, c)| c.is_ascii_alphabetic())
+ {
+ if c == 'e' || c == 'E' {
+ if let Some(suffix_start_tuple) = indices.find(|(_, c)| c.is_ascii_alphabetic()) {
+ suffix_start = suffix_start_tuple.0;
+
+ float_text = &text[..suffix_start];
+ suffix = &text[suffix_start..];
+ }
+ } else {
+ float_text = &text[..suffix_start];
+ suffix = &text[suffix_start..];
+ }
+ }
+
+ (float_text, suffix)
+ }
+
+ pub fn suffix(&self) -> Option<&str> {
+ let (_, suffix) = self.split_into_parts();
+ if suffix.is_empty() {
+ None
+ } else {
+ Some(suffix)
+ }
+ }
+
+ pub fn value(&self) -> Option<f64> {
+ let (text, _) = self.split_into_parts();
+ text.parse::<f64>().ok()
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, Copy, Clone)]
+pub enum Radix {
+ Binary = 2,
+ Octal = 8,
+ Decimal = 10,
+ Hexadecimal = 16,
+}
+
+impl Radix {
+ pub const ALL: &'static [Radix] =
+ &[Radix::Binary, Radix::Octal, Radix::Decimal, Radix::Hexadecimal];
+
+ const fn prefix_len(self) -> usize {
+ match self {
+ Self::Decimal => 0,
+ _ => 2,
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::ast::{self, make, FloatNumber, IntNumber};
+
+ fn check_float_suffix<'a>(lit: &str, expected: impl Into<Option<&'a str>>) {
+ assert_eq!(FloatNumber { syntax: make::tokens::literal(lit) }.suffix(), expected.into());
+ }
+
+ fn check_int_suffix<'a>(lit: &str, expected: impl Into<Option<&'a str>>) {
+ assert_eq!(IntNumber { syntax: make::tokens::literal(lit) }.suffix(), expected.into());
+ }
+
+ #[test]
+ fn test_float_number_suffix() {
+ check_float_suffix("123.0", None);
+ check_float_suffix("123f32", "f32");
+ check_float_suffix("123.0e", None);
+ check_float_suffix("123.0e4", None);
+ check_float_suffix("123.0ef32", "f32");
+ check_float_suffix("123.0E4f32", "f32");
+ check_float_suffix("1_2_3.0_f32", "f32");
+ }
+
+ #[test]
+ fn test_int_number_suffix() {
+ check_int_suffix("123", None);
+ check_int_suffix("123i32", "i32");
+ check_int_suffix("1_0_1_l_o_l", "l_o_l");
+ check_int_suffix("0b11", None);
+ check_int_suffix("0o11", None);
+ check_int_suffix("0xff", None);
+ check_int_suffix("0b11u32", "u32");
+ check_int_suffix("0o11u32", "u32");
+ check_int_suffix("0xffu32", "u32");
+ }
+
+ fn check_string_value<'a>(lit: &str, expected: impl Into<Option<&'a str>>) {
+ assert_eq!(
+ ast::String { syntax: make::tokens::literal(&format!("\"{}\"", lit)) }
+ .value()
+ .as_deref(),
+ expected.into()
+ );
+ }
+
+ #[test]
+ fn test_string_escape() {
+ check_string_value(r"foobar", "foobar");
+ check_string_value(r"\foobar", None);
+ check_string_value(r"\nfoobar", "\nfoobar");
+ check_string_value(r"C:\\Windows\\System32\\", "C:\\Windows\\System32\\");
+ }
+}
+
+impl ast::Char {
+ pub fn value(&self) -> Option<char> {
+ let mut text = self.text();
+ if text.starts_with('\'') {
+ text = &text[1..];
+ } else {
+ return None;
+ }
+ if text.ends_with('\'') {
+ text = &text[0..text.len() - 1];
+ }
+
+ unescape_char(text).ok()
+ }
+}
+
+impl ast::Byte {
+ pub fn value(&self) -> Option<u8> {
+ let mut text = self.text();
+ if text.starts_with("b\'") {
+ text = &text[2..];
+ } else {
+ return None;
+ }
+ if text.ends_with('\'') {
+ text = &text[0..text.len() - 1];
+ }
+
+ unescape_byte(text).ok()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs
new file mode 100644
index 000000000..aa2b7ed5c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs
@@ -0,0 +1,136 @@
+//! Various traits that are implemented by ast nodes.
+//!
+//! The implementations are usually trivial, and live in generated.rs
+use itertools::Either;
+
+use crate::{
+ ast::{self, support, AstChildren, AstNode, AstToken},
+ syntax_node::SyntaxElementChildren,
+ SyntaxElement, SyntaxToken, T,
+};
+
+pub trait HasName: AstNode {
+ fn name(&self) -> Option<ast::Name> {
+ support::child(self.syntax())
+ }
+}
+
+pub trait HasVisibility: AstNode {
+ fn visibility(&self) -> Option<ast::Visibility> {
+ support::child(self.syntax())
+ }
+}
+
+pub trait HasLoopBody: AstNode {
+ fn loop_body(&self) -> Option<ast::BlockExpr> {
+ support::child(self.syntax())
+ }
+
+ fn label(&self) -> Option<ast::Label> {
+ support::child(self.syntax())
+ }
+}
+
+pub trait HasArgList: AstNode {
+ fn arg_list(&self) -> Option<ast::ArgList> {
+ support::child(self.syntax())
+ }
+}
+
+pub trait HasModuleItem: AstNode {
+ fn items(&self) -> AstChildren<ast::Item> {
+ support::children(self.syntax())
+ }
+}
+
+pub trait HasGenericParams: AstNode {
+ fn generic_param_list(&self) -> Option<ast::GenericParamList> {
+ support::child(self.syntax())
+ }
+
+ fn where_clause(&self) -> Option<ast::WhereClause> {
+ support::child(self.syntax())
+ }
+}
+
+pub trait HasTypeBounds: AstNode {
+ fn type_bound_list(&self) -> Option<ast::TypeBoundList> {
+ support::child(self.syntax())
+ }
+
+ fn colon_token(&self) -> Option<SyntaxToken> {
+ support::token(self.syntax(), T![:])
+ }
+}
+
+pub trait HasAttrs: AstNode {
+ fn attrs(&self) -> AstChildren<ast::Attr> {
+ support::children(self.syntax())
+ }
+ fn has_atom_attr(&self, atom: &str) -> bool {
+ self.attrs().filter_map(|x| x.as_simple_atom()).any(|x| x == atom)
+ }
+}
+
+pub trait HasDocComments: HasAttrs {
+ fn doc_comments(&self) -> DocCommentIter {
+ DocCommentIter { iter: self.syntax().children_with_tokens() }
+ }
+ fn doc_comments_and_attrs(&self) -> AttrDocCommentIter {
+ AttrDocCommentIter { iter: self.syntax().children_with_tokens() }
+ }
+}
+
+impl DocCommentIter {
+ pub fn from_syntax_node(syntax_node: &ast::SyntaxNode) -> DocCommentIter {
+ DocCommentIter { iter: syntax_node.children_with_tokens() }
+ }
+
+ #[cfg(test)]
+ pub fn doc_comment_text(self) -> Option<String> {
+ let docs = itertools::Itertools::join(
+ &mut self.filter_map(|comment| comment.doc_comment().map(ToOwned::to_owned)),
+ "\n",
+ );
+ if docs.is_empty() {
+ None
+ } else {
+ Some(docs)
+ }
+ }
+}
+
+pub struct DocCommentIter {
+ iter: SyntaxElementChildren,
+}
+
+impl Iterator for DocCommentIter {
+ type Item = ast::Comment;
+ fn next(&mut self) -> Option<ast::Comment> {
+ self.iter.by_ref().find_map(|el| {
+ el.into_token().and_then(ast::Comment::cast).filter(ast::Comment::is_doc)
+ })
+ }
+}
+
+pub struct AttrDocCommentIter {
+ iter: SyntaxElementChildren,
+}
+
+impl AttrDocCommentIter {
+ pub fn from_syntax_node(syntax_node: &ast::SyntaxNode) -> AttrDocCommentIter {
+ AttrDocCommentIter { iter: syntax_node.children_with_tokens() }
+ }
+}
+
+impl Iterator for AttrDocCommentIter {
+ type Item = Either<ast::Attr, ast::Comment>;
+ fn next(&mut self) -> Option<Self::Item> {
+ self.iter.by_ref().find_map(|el| match el {
+ SyntaxElement::Node(node) => ast::Attr::cast(node).map(Either::Left),
+ SyntaxElement::Token(tok) => {
+ ast::Comment::cast(tok).filter(ast::Comment::is_doc).map(Either::Right)
+ }
+ })
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/fuzz.rs b/src/tools/rust-analyzer/crates/syntax/src/fuzz.rs
new file mode 100644
index 000000000..256999fe0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/fuzz.rs
@@ -0,0 +1,75 @@
+//! Some infrastructure for fuzzy testing.
+//!
+//! We don't normally run fuzzying, so this is hopelessly bitrotten :(
+
+use std::{
+ convert::TryInto,
+ str::{self, FromStr},
+};
+
+use text_edit::Indel;
+
+use crate::{validation, AstNode, SourceFile, TextRange};
+
+fn check_file_invariants(file: &SourceFile) {
+ let root = file.syntax();
+ validation::validate_block_structure(root);
+}
+
+pub fn check_parser(text: &str) {
+ let file = SourceFile::parse(text);
+ check_file_invariants(&file.tree());
+}
+
+#[derive(Debug, Clone)]
+pub struct CheckReparse {
+ text: String,
+ edit: Indel,
+ edited_text: String,
+}
+
+impl CheckReparse {
+ pub fn from_data(data: &[u8]) -> Option<Self> {
+ const PREFIX: &str = "fn main(){\n\t";
+ const SUFFIX: &str = "\n}";
+
+ let data = str::from_utf8(data).ok()?;
+ let mut lines = data.lines();
+ let delete_start = usize::from_str(lines.next()?).ok()? + PREFIX.len();
+ let delete_len = usize::from_str(lines.next()?).ok()?;
+ let insert = lines.next()?.to_string();
+ let text = lines.collect::<Vec<_>>().join("\n");
+ let text = format!("{}{}{}", PREFIX, text, SUFFIX);
+ text.get(delete_start..delete_start.checked_add(delete_len)?)?; // make sure delete is a valid range
+ let delete =
+ TextRange::at(delete_start.try_into().unwrap(), delete_len.try_into().unwrap());
+ let edited_text =
+ format!("{}{}{}", &text[..delete_start], &insert, &text[delete_start + delete_len..]);
+ let edit = Indel { insert, delete };
+ Some(CheckReparse { text, edit, edited_text })
+ }
+
+ pub fn run(&self) {
+ let parse = SourceFile::parse(&self.text);
+ let new_parse = parse.reparse(&self.edit);
+ check_file_invariants(&new_parse.tree());
+ assert_eq!(&new_parse.tree().syntax().text().to_string(), &self.edited_text);
+ let full_reparse = SourceFile::parse(&self.edited_text);
+ for (a, b) in
+ new_parse.tree().syntax().descendants().zip(full_reparse.tree().syntax().descendants())
+ {
+ if (a.kind(), a.text_range()) != (b.kind(), b.text_range()) {
+ eprint!("original:\n{:#?}", parse.tree().syntax());
+ eprint!("reparsed:\n{:#?}", new_parse.tree().syntax());
+ eprint!("full reparse:\n{:#?}", full_reparse.tree().syntax());
+ assert_eq!(
+ format!("{:?}", a),
+ format!("{:?}", b),
+ "different syntax tree produced by the full reparse"
+ );
+ }
+ }
+ // FIXME
+ // assert_eq!(new_file.errors(), full_reparse.errors());
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/hacks.rs b/src/tools/rust-analyzer/crates/syntax/src/hacks.rs
new file mode 100644
index 000000000..a047f61fa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/hacks.rs
@@ -0,0 +1,15 @@
+//! Things which exist to solve practial issues, but which shouldn't exist.
+//!
+//! Please avoid adding new usages of the functions in this module
+
+use crate::{ast, AstNode};
+
+pub fn parse_expr_from_str(s: &str) -> Option<ast::Expr> {
+ let s = s.trim();
+ let file = ast::SourceFile::parse(&format!("const _: () = {};", s));
+ let expr = file.syntax_node().descendants().find_map(ast::Expr::cast)?;
+ if expr.syntax().text() != s {
+ return None;
+ }
+ Some(expr)
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/lib.rs b/src/tools/rust-analyzer/crates/syntax/src/lib.rs
new file mode 100644
index 000000000..7fa354c0c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/lib.rs
@@ -0,0 +1,358 @@
+//! Syntax Tree library used throughout the rust analyzer.
+//!
+//! Properties:
+//! - easy and fast incremental re-parsing
+//! - graceful handling of errors
+//! - full-fidelity representation (*any* text can be precisely represented as
+//! a syntax tree)
+//!
+//! For more information, see the [RFC]. Current implementation is inspired by
+//! the [Swift] one.
+//!
+//! The most interesting modules here are `syntax_node` (which defines concrete
+//! syntax tree) and `ast` (which defines abstract syntax tree on top of the
+//! CST). The actual parser live in a separate `parser` crate, though the
+//! lexer lives in this crate.
+//!
+//! See `api_walkthrough` test in this file for a quick API tour!
+//!
+//! [RFC]: <https://github.com/rust-lang/rfcs/pull/2256>
+//! [Swift]: <https://github.com/apple/swift/blob/13d593df6f359d0cb2fc81cfaac273297c539455/lib/Syntax/README.md>
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+#[allow(unused)]
+macro_rules! eprintln {
+ ($($tt:tt)*) => { stdx::eprintln!($($tt)*) };
+}
+
+mod syntax_node;
+mod syntax_error;
+mod parsing;
+mod validation;
+mod ptr;
+mod token_text;
+#[cfg(test)]
+mod tests;
+
+pub mod algo;
+pub mod ast;
+#[doc(hidden)]
+pub mod fuzz;
+pub mod utils;
+pub mod ted;
+pub mod hacks;
+
+use std::{marker::PhantomData, sync::Arc};
+
+use stdx::format_to;
+use text_edit::Indel;
+
+pub use crate::{
+ ast::{AstNode, AstToken},
+ ptr::{AstPtr, SyntaxNodePtr},
+ syntax_error::SyntaxError,
+ syntax_node::{
+ PreorderWithTokens, RustLanguage, SyntaxElement, SyntaxElementChildren, SyntaxNode,
+ SyntaxNodeChildren, SyntaxToken, SyntaxTreeBuilder,
+ },
+ token_text::TokenText,
+};
+pub use parser::{SyntaxKind, T};
+pub use rowan::{
+ api::Preorder, Direction, GreenNode, NodeOrToken, SyntaxText, TextRange, TextSize,
+ TokenAtOffset, WalkEvent,
+};
+pub use smol_str::SmolStr;
+
+/// `Parse` is the result of the parsing: a syntax tree and a collection of
+/// errors.
+///
+/// Note that we always produce a syntax tree, even for completely invalid
+/// files.
+#[derive(Debug, PartialEq, Eq)]
+pub struct Parse<T> {
+ green: GreenNode,
+ errors: Arc<Vec<SyntaxError>>,
+ _ty: PhantomData<fn() -> T>,
+}
+
+impl<T> Clone for Parse<T> {
+ fn clone(&self) -> Parse<T> {
+ Parse { green: self.green.clone(), errors: self.errors.clone(), _ty: PhantomData }
+ }
+}
+
+impl<T> Parse<T> {
+ fn new(green: GreenNode, errors: Vec<SyntaxError>) -> Parse<T> {
+ Parse { green, errors: Arc::new(errors), _ty: PhantomData }
+ }
+
+ pub fn syntax_node(&self) -> SyntaxNode {
+ SyntaxNode::new_root(self.green.clone())
+ }
+ pub fn errors(&self) -> &[SyntaxError] {
+ &*self.errors
+ }
+}
+
+impl<T: AstNode> Parse<T> {
+ pub fn to_syntax(self) -> Parse<SyntaxNode> {
+ Parse { green: self.green, errors: self.errors, _ty: PhantomData }
+ }
+
+ pub fn tree(&self) -> T {
+ T::cast(self.syntax_node()).unwrap()
+ }
+
+ pub fn ok(self) -> Result<T, Arc<Vec<SyntaxError>>> {
+ if self.errors.is_empty() {
+ Ok(self.tree())
+ } else {
+ Err(self.errors)
+ }
+ }
+}
+
+impl Parse<SyntaxNode> {
+ pub fn cast<N: AstNode>(self) -> Option<Parse<N>> {
+ if N::cast(self.syntax_node()).is_some() {
+ Some(Parse { green: self.green, errors: self.errors, _ty: PhantomData })
+ } else {
+ None
+ }
+ }
+}
+
+impl Parse<SourceFile> {
+ pub fn debug_dump(&self) -> String {
+ let mut buf = format!("{:#?}", self.tree().syntax());
+ for err in self.errors.iter() {
+ format_to!(buf, "error {:?}: {}\n", err.range(), err);
+ }
+ buf
+ }
+
+ pub fn reparse(&self, indel: &Indel) -> Parse<SourceFile> {
+ self.incremental_reparse(indel).unwrap_or_else(|| self.full_reparse(indel))
+ }
+
+ fn incremental_reparse(&self, indel: &Indel) -> Option<Parse<SourceFile>> {
+ // FIXME: validation errors are not handled here
+ parsing::incremental_reparse(self.tree().syntax(), indel, self.errors.to_vec()).map(
+ |(green_node, errors, _reparsed_range)| Parse {
+ green: green_node,
+ errors: Arc::new(errors),
+ _ty: PhantomData,
+ },
+ )
+ }
+
+ fn full_reparse(&self, indel: &Indel) -> Parse<SourceFile> {
+ let mut text = self.tree().syntax().text().to_string();
+ indel.apply(&mut text);
+ SourceFile::parse(&text)
+ }
+}
+
+/// `SourceFile` represents a parse tree for a single Rust file.
+pub use crate::ast::SourceFile;
+
+impl SourceFile {
+ pub fn parse(text: &str) -> Parse<SourceFile> {
+ let (green, mut errors) = parsing::parse_text(text);
+ let root = SyntaxNode::new_root(green.clone());
+
+ errors.extend(validation::validate(&root));
+
+ assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE);
+ Parse { green, errors: Arc::new(errors), _ty: PhantomData }
+ }
+}
+
+/// Matches a `SyntaxNode` against an `ast` type.
+///
+/// # Example:
+///
+/// ```ignore
+/// match_ast! {
+/// match node {
+/// ast::CallExpr(it) => { ... },
+/// ast::MethodCallExpr(it) => { ... },
+/// ast::MacroCall(it) => { ... },
+/// _ => None,
+/// }
+/// }
+/// ```
+#[macro_export]
+macro_rules! match_ast {
+ (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) };
+
+ (match ($node:expr) {
+ $( $( $path:ident )::+ ($it:pat) => $res:expr, )*
+ _ => $catch_all:expr $(,)?
+ }) => {{
+ $( if let Some($it) = $($path::)+cast($node.clone()) { $res } else )*
+ { $catch_all }
+ }};
+}
+
+/// This test does not assert anything and instead just shows off the crate's
+/// API.
+#[test]
+fn api_walkthrough() {
+ use ast::{HasModuleItem, HasName};
+
+ let source_code = "
+ fn foo() {
+ 1 + 1
+ }
+ ";
+ // `SourceFile` is the main entry point.
+ //
+ // The `parse` method returns a `Parse` -- a pair of syntax tree and a list
+ // of errors. That is, syntax tree is constructed even in presence of errors.
+ let parse = SourceFile::parse(source_code);
+ assert!(parse.errors().is_empty());
+
+ // The `tree` method returns an owned syntax node of type `SourceFile`.
+ // Owned nodes are cheap: inside, they are `Rc` handles to the underling data.
+ let file: SourceFile = parse.tree();
+
+ // `SourceFile` is the root of the syntax tree. We can iterate file's items.
+ // Let's fetch the `foo` function.
+ let mut func = None;
+ for item in file.items() {
+ match item {
+ ast::Item::Fn(f) => func = Some(f),
+ _ => unreachable!(),
+ }
+ }
+ let func: ast::Fn = func.unwrap();
+
+ // Each AST node has a bunch of getters for children. All getters return
+ // `Option`s though, to account for incomplete code. Some getters are common
+ // for several kinds of node. In this case, a trait like `ast::NameOwner`
+ // usually exists. By convention, all ast types should be used with `ast::`
+ // qualifier.
+ let name: Option<ast::Name> = func.name();
+ let name = name.unwrap();
+ assert_eq!(name.text(), "foo");
+
+ // Let's get the `1 + 1` expression!
+ let body: ast::BlockExpr = func.body().unwrap();
+ let stmt_list: ast::StmtList = body.stmt_list().unwrap();
+ let expr: ast::Expr = stmt_list.tail_expr().unwrap();
+
+ // Enums are used to group related ast nodes together, and can be used for
+ // matching. However, because there are no public fields, it's possible to
+ // match only the top level enum: that is the price we pay for increased API
+ // flexibility
+ let bin_expr: &ast::BinExpr = match &expr {
+ ast::Expr::BinExpr(e) => e,
+ _ => unreachable!(),
+ };
+
+ // Besides the "typed" AST API, there's an untyped CST one as well.
+ // To switch from AST to CST, call `.syntax()` method:
+ let expr_syntax: &SyntaxNode = expr.syntax();
+
+ // Note how `expr` and `bin_expr` are in fact the same node underneath:
+ assert!(expr_syntax == bin_expr.syntax());
+
+ // To go from CST to AST, `AstNode::cast` function is used:
+ let _expr: ast::Expr = match ast::Expr::cast(expr_syntax.clone()) {
+ Some(e) => e,
+ None => unreachable!(),
+ };
+
+ // The two properties each syntax node has is a `SyntaxKind`:
+ assert_eq!(expr_syntax.kind(), SyntaxKind::BIN_EXPR);
+
+ // And text range:
+ assert_eq!(expr_syntax.text_range(), TextRange::new(32.into(), 37.into()));
+
+ // You can get node's text as a `SyntaxText` object, which will traverse the
+ // tree collecting token's text:
+ let text: SyntaxText = expr_syntax.text();
+ assert_eq!(text.to_string(), "1 + 1");
+
+ // There's a bunch of traversal methods on `SyntaxNode`:
+ assert_eq!(expr_syntax.parent().as_ref(), Some(stmt_list.syntax()));
+ assert_eq!(stmt_list.syntax().first_child_or_token().map(|it| it.kind()), Some(T!['{']));
+ assert_eq!(
+ expr_syntax.next_sibling_or_token().map(|it| it.kind()),
+ Some(SyntaxKind::WHITESPACE)
+ );
+
+ // As well as some iterator helpers:
+ let f = expr_syntax.ancestors().find_map(ast::Fn::cast);
+ assert_eq!(f, Some(func));
+ assert!(expr_syntax.siblings_with_tokens(Direction::Next).any(|it| it.kind() == T!['}']));
+ assert_eq!(
+ expr_syntax.descendants_with_tokens().count(),
+ 8, // 5 tokens `1`, ` `, `+`, ` `, `!`
+ // 2 child literal expressions: `1`, `1`
+ // 1 the node itself: `1 + 1`
+ );
+
+ // There's also a `preorder` method with a more fine-grained iteration control:
+ let mut buf = String::new();
+ let mut indent = 0;
+ for event in expr_syntax.preorder_with_tokens() {
+ match event {
+ WalkEvent::Enter(node) => {
+ let text = match &node {
+ NodeOrToken::Node(it) => it.text().to_string(),
+ NodeOrToken::Token(it) => it.text().to_string(),
+ };
+ format_to!(buf, "{:indent$}{:?} {:?}\n", " ", text, node.kind(), indent = indent);
+ indent += 2;
+ }
+ WalkEvent::Leave(_) => indent -= 2,
+ }
+ }
+ assert_eq!(indent, 0);
+ assert_eq!(
+ buf.trim(),
+ r#"
+"1 + 1" BIN_EXPR
+ "1" LITERAL
+ "1" INT_NUMBER
+ " " WHITESPACE
+ "+" PLUS
+ " " WHITESPACE
+ "1" LITERAL
+ "1" INT_NUMBER
+"#
+ .trim()
+ );
+
+ // To recursively process the tree, there are three approaches:
+ // 1. explicitly call getter methods on AST nodes.
+ // 2. use descendants and `AstNode::cast`.
+ // 3. use descendants and `match_ast!`.
+ //
+ // Here's how the first one looks like:
+ let exprs_cast: Vec<String> = file
+ .syntax()
+ .descendants()
+ .filter_map(ast::Expr::cast)
+ .map(|expr| expr.syntax().text().to_string())
+ .collect();
+
+ // An alternative is to use a macro.
+ let mut exprs_visit = Vec::new();
+ for node in file.syntax().descendants() {
+ match_ast! {
+ match node {
+ ast::Expr(it) => {
+ let res = it.syntax().text().to_string();
+ exprs_visit.push(res);
+ },
+ _ => (),
+ }
+ }
+ }
+ assert_eq!(exprs_cast, exprs_visit);
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/parsing.rs b/src/tools/rust-analyzer/crates/syntax/src/parsing.rs
new file mode 100644
index 000000000..047e670c9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/parsing.rs
@@ -0,0 +1,46 @@
+//! Lexing, bridging to parser (which does the actual parsing) and
+//! incremental reparsing.
+
+mod reparsing;
+
+use rowan::TextRange;
+
+use crate::{syntax_node::GreenNode, SyntaxError, SyntaxTreeBuilder};
+
+pub(crate) use crate::parsing::reparsing::incremental_reparse;
+
+pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) {
+ let lexed = parser::LexedStr::new(text);
+ let parser_input = lexed.to_input();
+ let parser_output = parser::TopEntryPoint::SourceFile.parse(&parser_input);
+ let (node, errors, _eof) = build_tree(lexed, parser_output);
+ (node, errors)
+}
+
+pub(crate) fn build_tree(
+ lexed: parser::LexedStr<'_>,
+ parser_output: parser::Output,
+) -> (GreenNode, Vec<SyntaxError>, bool) {
+ let mut builder = SyntaxTreeBuilder::default();
+
+ let is_eof = lexed.intersperse_trivia(&parser_output, &mut |step| match step {
+ parser::StrStep::Token { kind, text } => builder.token(kind, text),
+ parser::StrStep::Enter { kind } => builder.start_node(kind),
+ parser::StrStep::Exit => builder.finish_node(),
+ parser::StrStep::Error { msg, pos } => {
+ builder.error(msg.to_string(), pos.try_into().unwrap())
+ }
+ });
+
+ let (node, mut errors) = builder.finish_raw();
+ for (i, err) in lexed.errors() {
+ let text_range = lexed.text_range(i);
+ let text_range = TextRange::new(
+ text_range.start.try_into().unwrap(),
+ text_range.end.try_into().unwrap(),
+ );
+ errors.push(SyntaxError::new(err, text_range))
+ }
+
+ (node, errors, is_eof)
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs b/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs
new file mode 100644
index 000000000..701e6232d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs
@@ -0,0 +1,441 @@
+//! Implementation of incremental re-parsing.
+//!
+//! We use two simple strategies for this:
+//! - if the edit modifies only a single token (like changing an identifier's
+//! letter), we replace only this token.
+//! - otherwise, we search for the nearest `{}` block which contains the edit
+//! and try to parse only this block.
+
+use parser::Reparser;
+use text_edit::Indel;
+
+use crate::{
+ parsing::build_tree,
+ syntax_node::{GreenNode, GreenToken, NodeOrToken, SyntaxElement, SyntaxNode},
+ SyntaxError,
+ SyntaxKind::*,
+ TextRange, TextSize, T,
+};
+
+pub(crate) fn incremental_reparse(
+ node: &SyntaxNode,
+ edit: &Indel,
+ errors: Vec<SyntaxError>,
+) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
+ if let Some((green, new_errors, old_range)) = reparse_token(node, edit) {
+ return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range));
+ }
+
+ if let Some((green, new_errors, old_range)) = reparse_block(node, edit) {
+ return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range));
+ }
+ None
+}
+
+fn reparse_token(
+ root: &SyntaxNode,
+ edit: &Indel,
+) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
+ let prev_token = root.covering_element(edit.delete).as_token()?.clone();
+ let prev_token_kind = prev_token.kind();
+ match prev_token_kind {
+ WHITESPACE | COMMENT | IDENT | STRING => {
+ if prev_token_kind == WHITESPACE || prev_token_kind == COMMENT {
+ // removing a new line may extends previous token
+ let deleted_range = edit.delete - prev_token.text_range().start();
+ if prev_token.text()[deleted_range].contains('\n') {
+ return None;
+ }
+ }
+
+ let mut new_text = get_text_after_edit(prev_token.clone().into(), edit);
+ let (new_token_kind, new_err) = parser::LexedStr::single_token(&new_text)?;
+
+ if new_token_kind != prev_token_kind
+ || (new_token_kind == IDENT && is_contextual_kw(&new_text))
+ {
+ return None;
+ }
+
+ // Check that edited token is not a part of the bigger token.
+ // E.g. if for source code `bruh"str"` the user removed `ruh`, then
+ // `b` no longer remains an identifier, but becomes a part of byte string literal
+ if let Some(next_char) = root.text().char_at(prev_token.text_range().end()) {
+ new_text.push(next_char);
+ let token_with_next_char = parser::LexedStr::single_token(&new_text);
+ if let Some((_kind, _error)) = token_with_next_char {
+ return None;
+ }
+ new_text.pop();
+ }
+
+ let new_token = GreenToken::new(rowan::SyntaxKind(prev_token_kind.into()), &new_text);
+ let range = TextRange::up_to(TextSize::of(&new_text));
+ Some((
+ prev_token.replace_with(new_token),
+ new_err.into_iter().map(|msg| SyntaxError::new(msg, range)).collect(),
+ prev_token.text_range(),
+ ))
+ }
+ _ => None,
+ }
+}
+
+fn reparse_block(
+ root: &SyntaxNode,
+ edit: &Indel,
+) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
+ let (node, reparser) = find_reparsable_node(root, edit.delete)?;
+ let text = get_text_after_edit(node.clone().into(), edit);
+
+ let lexed = parser::LexedStr::new(text.as_str());
+ let parser_input = lexed.to_input();
+ if !is_balanced(&lexed) {
+ return None;
+ }
+
+ let tree_traversal = reparser.parse(&parser_input);
+
+ let (green, new_parser_errors, _eof) = build_tree(lexed, tree_traversal);
+
+ Some((node.replace_with(green), new_parser_errors, node.text_range()))
+}
+
+fn get_text_after_edit(element: SyntaxElement, edit: &Indel) -> String {
+ let edit = Indel::replace(edit.delete - element.text_range().start(), edit.insert.clone());
+
+ let mut text = match element {
+ NodeOrToken::Token(token) => token.text().to_string(),
+ NodeOrToken::Node(node) => node.text().to_string(),
+ };
+ edit.apply(&mut text);
+ text
+}
+
+fn is_contextual_kw(text: &str) -> bool {
+ matches!(text, "auto" | "default" | "union")
+}
+
+fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(SyntaxNode, Reparser)> {
+ let node = node.covering_element(range);
+
+ node.ancestors().find_map(|node| {
+ let first_child = node.first_child_or_token().map(|it| it.kind());
+ let parent = node.parent().map(|it| it.kind());
+ Reparser::for_node(node.kind(), first_child, parent).map(|r| (node, r))
+ })
+}
+
+fn is_balanced(lexed: &parser::LexedStr<'_>) -> bool {
+ if lexed.is_empty() || lexed.kind(0) != T!['{'] || lexed.kind(lexed.len() - 1) != T!['}'] {
+ return false;
+ }
+ let mut balance = 0usize;
+ for i in 1..lexed.len() - 1 {
+ match lexed.kind(i) {
+ T!['{'] => balance += 1,
+ T!['}'] => {
+ balance = match balance.checked_sub(1) {
+ Some(b) => b,
+ None => return false,
+ }
+ }
+ _ => (),
+ }
+ }
+ balance == 0
+}
+
+fn merge_errors(
+ old_errors: Vec<SyntaxError>,
+ new_errors: Vec<SyntaxError>,
+ range_before_reparse: TextRange,
+ edit: &Indel,
+) -> Vec<SyntaxError> {
+ let mut res = Vec::new();
+
+ for old_err in old_errors {
+ let old_err_range = old_err.range();
+ if old_err_range.end() <= range_before_reparse.start() {
+ res.push(old_err);
+ } else if old_err_range.start() >= range_before_reparse.end() {
+ let inserted_len = TextSize::of(&edit.insert);
+ res.push(old_err.with_range((old_err_range + inserted_len) - edit.delete.len()));
+ // Note: extra parens are intentional to prevent uint underflow, HWAB (here was a bug)
+ }
+ }
+ res.extend(new_errors.into_iter().map(|new_err| {
+ // fighting borrow checker with a variable ;)
+ let offseted_range = new_err.range() + range_before_reparse.start();
+ new_err.with_range(offseted_range)
+ }));
+ res
+}
+
+#[cfg(test)]
+mod tests {
+ use test_utils::{assert_eq_text, extract_range};
+
+ use super::*;
+ use crate::{AstNode, Parse, SourceFile};
+
+ fn do_check(before: &str, replace_with: &str, reparsed_len: u32) {
+ let (range, before) = extract_range(before);
+ let edit = Indel::replace(range, replace_with.to_owned());
+ let after = {
+ let mut after = before.clone();
+ edit.apply(&mut after);
+ after
+ };
+
+ let fully_reparsed = SourceFile::parse(&after);
+ let incrementally_reparsed: Parse<SourceFile> = {
+ let before = SourceFile::parse(&before);
+ let (green, new_errors, range) =
+ incremental_reparse(before.tree().syntax(), &edit, before.errors.to_vec()).unwrap();
+ assert_eq!(range.len(), reparsed_len.into(), "reparsed fragment has wrong length");
+ Parse::new(green, new_errors)
+ };
+
+ assert_eq_text!(
+ &format!("{:#?}", fully_reparsed.tree().syntax()),
+ &format!("{:#?}", incrementally_reparsed.tree().syntax()),
+ );
+ assert_eq!(fully_reparsed.errors(), incrementally_reparsed.errors());
+ }
+
+ #[test] // FIXME: some test here actually test token reparsing
+ fn reparse_block_tests() {
+ do_check(
+ r"
+fn foo() {
+ let x = foo + $0bar$0
+}
+",
+ "baz",
+ 3,
+ );
+ do_check(
+ r"
+fn foo() {
+ let x = foo$0 + bar$0
+}
+",
+ "baz",
+ 25,
+ );
+ do_check(
+ r"
+struct Foo {
+ f: foo$0$0
+}
+",
+ ",\n g: (),",
+ 14,
+ );
+ do_check(
+ r"
+fn foo {
+ let;
+ 1 + 1;
+ $092$0;
+}
+",
+ "62",
+ 31, // FIXME: reparse only int literal here
+ );
+ do_check(
+ r"
+mod foo {
+ fn $0$0
+}
+",
+ "bar",
+ 11,
+ );
+
+ do_check(
+ r"
+trait Foo {
+ type $0Foo$0;
+}
+",
+ "Output",
+ 3,
+ );
+ do_check(
+ r"
+impl IntoIterator<Item=i32> for Foo {
+ f$0$0
+}
+",
+ "n next(",
+ 9,
+ );
+ do_check(r"use a::b::{foo,$0,bar$0};", "baz", 10);
+ do_check(
+ r"
+pub enum A {
+ Foo$0$0
+}
+",
+ "\nBar;\n",
+ 11,
+ );
+ do_check(
+ r"
+foo!{a, b$0$0 d}
+",
+ ", c[3]",
+ 8,
+ );
+ do_check(
+ r"
+fn foo() {
+ vec![$0$0]
+}
+",
+ "123",
+ 14,
+ );
+ do_check(
+ r"
+extern {
+ fn$0;$0
+}
+",
+ " exit(code: c_int)",
+ 11,
+ );
+ }
+
+ #[test]
+ fn reparse_token_tests() {
+ do_check(
+ r"$0$0
+fn foo() -> i32 { 1 }
+",
+ "\n\n\n \n",
+ 1,
+ );
+ do_check(
+ r"
+fn foo() -> $0$0 {}
+",
+ " \n",
+ 2,
+ );
+ do_check(
+ r"
+fn $0foo$0() -> i32 { 1 }
+",
+ "bar",
+ 3,
+ );
+ do_check(
+ r"
+fn foo$0$0foo() { }
+",
+ "bar",
+ 6,
+ );
+ do_check(
+ r"
+fn foo /* $0$0 */ () {}
+",
+ "some comment",
+ 6,
+ );
+ do_check(
+ r"
+fn baz $0$0 () {}
+",
+ " \t\t\n\n",
+ 2,
+ );
+ do_check(
+ r"
+fn baz $0$0 () {}
+",
+ " \t\t\n\n",
+ 2,
+ );
+ do_check(
+ r"
+/// foo $0$0omment
+mod { }
+",
+ "c",
+ 14,
+ );
+ do_check(
+ r#"
+fn -> &str { "Hello$0$0" }
+"#,
+ ", world",
+ 7,
+ );
+ do_check(
+ r#"
+fn -> &str { // "Hello$0$0"
+"#,
+ ", world",
+ 10,
+ );
+ do_check(
+ r##"
+fn -> &str { r#"Hello$0$0"#
+"##,
+ ", world",
+ 10,
+ );
+ do_check(
+ r"
+#[derive($0Copy$0)]
+enum Foo {
+
+}
+",
+ "Clone",
+ 4,
+ );
+ }
+
+ #[test]
+ fn reparse_str_token_with_error_unchanged() {
+ do_check(r#""$0Unclosed$0 string literal"#, "Still unclosed", 24);
+ }
+
+ #[test]
+ fn reparse_str_token_with_error_fixed() {
+ do_check(r#""unterinated$0$0"#, "\"", 12);
+ }
+
+ #[test]
+ fn reparse_block_with_error_in_middle_unchanged() {
+ do_check(
+ r#"fn main() {
+ if {}
+ 32 + 4$0$0
+ return
+ if {}
+ }"#,
+ "23",
+ 105,
+ )
+ }
+
+ #[test]
+ fn reparse_block_with_error_in_middle_fixed() {
+ do_check(
+ r#"fn main() {
+ if {}
+ 32 + 4$0$0
+ return
+ if {}
+ }"#,
+ ";",
+ 105,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ptr.rs b/src/tools/rust-analyzer/crates/syntax/src/ptr.rs
new file mode 100644
index 000000000..a886972ff
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ptr.rs
@@ -0,0 +1,104 @@
+//! In rust-analyzer, syntax trees are transient objects.
+//!
+//! That means that we create trees when we need them, and tear them down to
+//! save memory. In this architecture, hanging on to a particular syntax node
+//! for a long time is ill-advisable, as that keeps the whole tree resident.
+//!
+//! Instead, we provide a [`SyntaxNodePtr`] type, which stores information about
+//! *location* of a particular syntax node in a tree. Its a small type which can
+//! be cheaply stored, and which can be resolved to a real [`SyntaxNode`] when
+//! necessary.
+
+use std::{
+ hash::{Hash, Hasher},
+ marker::PhantomData,
+};
+
+use rowan::TextRange;
+
+use crate::{syntax_node::RustLanguage, AstNode, SyntaxNode};
+
+/// A "pointer" to a [`SyntaxNode`], via location in the source code.
+pub type SyntaxNodePtr = rowan::ast::SyntaxNodePtr<RustLanguage>;
+
+/// Like `SyntaxNodePtr`, but remembers the type of node.
+#[derive(Debug)]
+pub struct AstPtr<N: AstNode> {
+ raw: SyntaxNodePtr,
+ _ty: PhantomData<fn() -> N>,
+}
+
+impl<N: AstNode> Clone for AstPtr<N> {
+ fn clone(&self) -> AstPtr<N> {
+ AstPtr { raw: self.raw.clone(), _ty: PhantomData }
+ }
+}
+
+impl<N: AstNode> Eq for AstPtr<N> {}
+
+impl<N: AstNode> PartialEq for AstPtr<N> {
+ fn eq(&self, other: &AstPtr<N>) -> bool {
+ self.raw == other.raw
+ }
+}
+
+impl<N: AstNode> Hash for AstPtr<N> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.raw.hash(state);
+ }
+}
+
+impl<N: AstNode> AstPtr<N> {
+ pub fn new(node: &N) -> AstPtr<N> {
+ AstPtr { raw: SyntaxNodePtr::new(node.syntax()), _ty: PhantomData }
+ }
+
+ pub fn to_node(&self, root: &SyntaxNode) -> N {
+ let syntax_node = self.raw.to_node(root);
+ N::cast(syntax_node).unwrap()
+ }
+
+ pub fn syntax_node_ptr(&self) -> SyntaxNodePtr {
+ self.raw.clone()
+ }
+
+ pub fn text_range(&self) -> TextRange {
+ self.raw.text_range()
+ }
+
+ pub fn cast<U: AstNode>(self) -> Option<AstPtr<U>> {
+ if !U::can_cast(self.raw.kind()) {
+ return None;
+ }
+ Some(AstPtr { raw: self.raw, _ty: PhantomData })
+ }
+
+ pub fn upcast<M: AstNode>(self) -> AstPtr<M>
+ where
+ N: Into<M>,
+ {
+ AstPtr { raw: self.raw, _ty: PhantomData }
+ }
+
+ /// Like `SyntaxNodePtr::cast` but the trait bounds work out.
+ pub fn try_from_raw(raw: SyntaxNodePtr) -> Option<AstPtr<N>> {
+ N::can_cast(raw.kind()).then(|| AstPtr { raw, _ty: PhantomData })
+ }
+}
+
+impl<N: AstNode> From<AstPtr<N>> for SyntaxNodePtr {
+ fn from(ptr: AstPtr<N>) -> SyntaxNodePtr {
+ ptr.raw
+ }
+}
+
+#[test]
+fn test_local_syntax_ptr() {
+ use crate::{ast, AstNode, SourceFile};
+
+ let file = SourceFile::parse("struct Foo { f: u32, }").ok().unwrap();
+ let field = file.syntax().descendants().find_map(ast::RecordField::cast).unwrap();
+ let ptr = SyntaxNodePtr::new(field.syntax());
+ let field_syntax = ptr.to_node(file.syntax());
+ assert_eq!(field.syntax(), &field_syntax);
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/syntax_error.rs b/src/tools/rust-analyzer/crates/syntax/src/syntax_error.rs
new file mode 100644
index 000000000..dc6130bd6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/syntax_error.rs
@@ -0,0 +1,44 @@
+//! See docs for `SyntaxError`.
+
+use std::fmt;
+
+use crate::{TextRange, TextSize};
+
+/// Represents the result of unsuccessful tokenization, parsing
+/// or tree validation.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct SyntaxError(String, TextRange);
+
+// FIXME: there was an unused SyntaxErrorKind previously (before this enum was removed)
+// It was introduced in this PR: https://github.com/rust-lang/rust-analyzer/pull/846/files#diff-827da9b03b8f9faa1bade5cdd44d5dafR95
+// but it was not removed by a mistake.
+//
+// So, we need to find a place where to stick validation for attributes in match clauses.
+// Code before refactor:
+// InvalidMatchInnerAttr => {
+// write!(f, "Inner attributes are only allowed directly after the opening brace of the match expression")
+// }
+
+impl SyntaxError {
+ pub fn new(message: impl Into<String>, range: TextRange) -> Self {
+ Self(message.into(), range)
+ }
+ pub fn new_at_offset(message: impl Into<String>, offset: TextSize) -> Self {
+ Self(message.into(), TextRange::empty(offset))
+ }
+
+ pub fn range(&self) -> TextRange {
+ self.1
+ }
+
+ pub fn with_range(mut self, range: TextRange) -> Self {
+ self.1 = range;
+ self
+ }
+}
+
+impl fmt::Display for SyntaxError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/syntax_node.rs b/src/tools/rust-analyzer/crates/syntax/src/syntax_node.rs
new file mode 100644
index 000000000..a08c01597
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/syntax_node.rs
@@ -0,0 +1,75 @@
+//! This module defines Concrete Syntax Tree (CST), used by rust-analyzer.
+//!
+//! The CST includes comments and whitespace, provides a single node type,
+//! `SyntaxNode`, and a basic traversal API (parent, children, siblings).
+//!
+//! The *real* implementation is in the (language-agnostic) `rowan` crate, this
+//! module just wraps its API.
+
+use rowan::{GreenNodeBuilder, Language};
+
+use crate::{Parse, SyntaxError, SyntaxKind, TextSize};
+
+pub(crate) use rowan::{GreenNode, GreenToken, NodeOrToken};
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub enum RustLanguage {}
+impl Language for RustLanguage {
+ type Kind = SyntaxKind;
+
+ fn kind_from_raw(raw: rowan::SyntaxKind) -> SyntaxKind {
+ SyntaxKind::from(raw.0)
+ }
+
+ fn kind_to_raw(kind: SyntaxKind) -> rowan::SyntaxKind {
+ rowan::SyntaxKind(kind.into())
+ }
+}
+
+pub type SyntaxNode = rowan::SyntaxNode<RustLanguage>;
+pub type SyntaxToken = rowan::SyntaxToken<RustLanguage>;
+pub type SyntaxElement = rowan::SyntaxElement<RustLanguage>;
+pub type SyntaxNodeChildren = rowan::SyntaxNodeChildren<RustLanguage>;
+pub type SyntaxElementChildren = rowan::SyntaxElementChildren<RustLanguage>;
+pub type PreorderWithTokens = rowan::api::PreorderWithTokens<RustLanguage>;
+
+#[derive(Default)]
+pub struct SyntaxTreeBuilder {
+ errors: Vec<SyntaxError>,
+ inner: GreenNodeBuilder<'static>,
+}
+
+impl SyntaxTreeBuilder {
+ pub(crate) fn finish_raw(self) -> (GreenNode, Vec<SyntaxError>) {
+ let green = self.inner.finish();
+ (green, self.errors)
+ }
+
+ pub fn finish(self) -> Parse<SyntaxNode> {
+ let (green, errors) = self.finish_raw();
+ // Disable block validation, see https://github.com/rust-lang/rust-analyzer/pull/10357
+ if cfg!(debug_assertions) && false {
+ let node = SyntaxNode::new_root(green.clone());
+ crate::validation::validate_block_structure(&node);
+ }
+ Parse::new(green, errors)
+ }
+
+ pub fn token(&mut self, kind: SyntaxKind, text: &str) {
+ let kind = RustLanguage::kind_to_raw(kind);
+ self.inner.token(kind, text);
+ }
+
+ pub fn start_node(&mut self, kind: SyntaxKind) {
+ let kind = RustLanguage::kind_to_raw(kind);
+ self.inner.start_node(kind);
+ }
+
+ pub fn finish_node(&mut self) {
+ self.inner.finish_node();
+ }
+
+ pub fn error(&mut self, error: String, text_pos: TextSize) {
+ self.errors.push(SyntaxError::new_at_offset(error, text_pos));
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ted.rs b/src/tools/rust-analyzer/crates/syntax/src/ted.rs
new file mode 100644
index 000000000..a47b4b11c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ted.rs
@@ -0,0 +1,206 @@
+//! Primitive tree editor, ed for trees.
+//!
+//! The `_raw`-suffixed functions insert elements as is, unsuffixed versions fix
+//! up elements around the edges.
+use std::{mem, ops::RangeInclusive};
+
+use parser::T;
+
+use crate::{
+ ast::{self, edit::IndentLevel, make, AstNode},
+ SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken,
+};
+
+/// Utility trait to allow calling `ted` functions with references or owned
+/// nodes. Do not use outside of this module.
+pub trait Element {
+ fn syntax_element(self) -> SyntaxElement;
+}
+
+impl<E: Element + Clone> Element for &'_ E {
+ fn syntax_element(self) -> SyntaxElement {
+ self.clone().syntax_element()
+ }
+}
+impl Element for SyntaxElement {
+ fn syntax_element(self) -> SyntaxElement {
+ self
+ }
+}
+impl Element for SyntaxNode {
+ fn syntax_element(self) -> SyntaxElement {
+ self.into()
+ }
+}
+impl Element for SyntaxToken {
+ fn syntax_element(self) -> SyntaxElement {
+ self.into()
+ }
+}
+
+#[derive(Debug)]
+pub struct Position {
+ repr: PositionRepr,
+}
+
+#[derive(Debug)]
+enum PositionRepr {
+ FirstChild(SyntaxNode),
+ After(SyntaxElement),
+}
+
+impl Position {
+ pub fn after(elem: impl Element) -> Position {
+ let repr = PositionRepr::After(elem.syntax_element());
+ Position { repr }
+ }
+ pub fn before(elem: impl Element) -> Position {
+ let elem = elem.syntax_element();
+ let repr = match elem.prev_sibling_or_token() {
+ Some(it) => PositionRepr::After(it),
+ None => PositionRepr::FirstChild(elem.parent().unwrap()),
+ };
+ Position { repr }
+ }
+ pub fn first_child_of(node: &(impl Into<SyntaxNode> + Clone)) -> Position {
+ let repr = PositionRepr::FirstChild(node.clone().into());
+ Position { repr }
+ }
+ pub fn last_child_of(node: &(impl Into<SyntaxNode> + Clone)) -> Position {
+ let node = node.clone().into();
+ let repr = match node.last_child_or_token() {
+ Some(it) => PositionRepr::After(it),
+ None => PositionRepr::FirstChild(node),
+ };
+ Position { repr }
+ }
+}
+
+pub fn insert(position: Position, elem: impl Element) {
+ insert_all(position, vec![elem.syntax_element()]);
+}
+pub fn insert_raw(position: Position, elem: impl Element) {
+ insert_all_raw(position, vec![elem.syntax_element()]);
+}
+pub fn insert_all(position: Position, mut elements: Vec<SyntaxElement>) {
+ if let Some(first) = elements.first() {
+ if let Some(ws) = ws_before(&position, first) {
+ elements.insert(0, ws.into());
+ }
+ }
+ if let Some(last) = elements.last() {
+ if let Some(ws) = ws_after(&position, last) {
+ elements.push(ws.into());
+ }
+ }
+ insert_all_raw(position, elements);
+}
+pub fn insert_all_raw(position: Position, elements: Vec<SyntaxElement>) {
+ let (parent, index) = match position.repr {
+ PositionRepr::FirstChild(parent) => (parent, 0),
+ PositionRepr::After(child) => (child.parent().unwrap(), child.index() + 1),
+ };
+ parent.splice_children(index..index, elements);
+}
+
+pub fn remove(elem: impl Element) {
+ elem.syntax_element().detach();
+}
+pub fn remove_all(range: RangeInclusive<SyntaxElement>) {
+ replace_all(range, Vec::new());
+}
+pub fn remove_all_iter(range: impl IntoIterator<Item = SyntaxElement>) {
+ let mut it = range.into_iter();
+ if let Some(mut first) = it.next() {
+ match it.last() {
+ Some(mut last) => {
+ if first.index() > last.index() {
+ mem::swap(&mut first, &mut last);
+ }
+ remove_all(first..=last);
+ }
+ None => remove(first),
+ }
+ }
+}
+
+pub fn replace(old: impl Element, new: impl Element) {
+ replace_with_many(old, vec![new.syntax_element()]);
+}
+pub fn replace_with_many(old: impl Element, new: Vec<SyntaxElement>) {
+ let old = old.syntax_element();
+ replace_all(old.clone()..=old, new);
+}
+pub fn replace_all(range: RangeInclusive<SyntaxElement>, new: Vec<SyntaxElement>) {
+ let start = range.start().index();
+ let end = range.end().index();
+ let parent = range.start().parent().unwrap();
+ parent.splice_children(start..end + 1, new);
+}
+
+pub fn append_child(node: &(impl Into<SyntaxNode> + Clone), child: impl Element) {
+ let position = Position::last_child_of(node);
+ insert(position, child);
+}
+pub fn append_child_raw(node: &(impl Into<SyntaxNode> + Clone), child: impl Element) {
+ let position = Position::last_child_of(node);
+ insert_raw(position, child);
+}
+
+fn ws_before(position: &Position, new: &SyntaxElement) -> Option<SyntaxToken> {
+ let prev = match &position.repr {
+ PositionRepr::FirstChild(_) => return None,
+ PositionRepr::After(it) => it,
+ };
+
+ if prev.kind() == T!['{'] && new.kind() == SyntaxKind::USE {
+ if let Some(item_list) = prev.parent().and_then(ast::ItemList::cast) {
+ let mut indent = IndentLevel::from_element(&item_list.syntax().clone().into());
+ indent.0 += 1;
+ return Some(make::tokens::whitespace(&format!("\n{}", indent)));
+ }
+ }
+
+ if prev.kind() == T!['{'] && ast::Stmt::can_cast(new.kind()) {
+ if let Some(stmt_list) = prev.parent().and_then(ast::StmtList::cast) {
+ let mut indent = IndentLevel::from_element(&stmt_list.syntax().clone().into());
+ indent.0 += 1;
+ return Some(make::tokens::whitespace(&format!("\n{}", indent)));
+ }
+ }
+
+ ws_between(prev, new)
+}
+fn ws_after(position: &Position, new: &SyntaxElement) -> Option<SyntaxToken> {
+ let next = match &position.repr {
+ PositionRepr::FirstChild(parent) => parent.first_child_or_token()?,
+ PositionRepr::After(sibling) => sibling.next_sibling_or_token()?,
+ };
+ ws_between(new, &next)
+}
+fn ws_between(left: &SyntaxElement, right: &SyntaxElement) -> Option<SyntaxToken> {
+ if left.kind() == SyntaxKind::WHITESPACE || right.kind() == SyntaxKind::WHITESPACE {
+ return None;
+ }
+ if right.kind() == T![;] || right.kind() == T![,] {
+ return None;
+ }
+ if left.kind() == T![<] || right.kind() == T![>] {
+ return None;
+ }
+ if left.kind() == T![&] && right.kind() == SyntaxKind::LIFETIME {
+ return None;
+ }
+ if right.kind() == SyntaxKind::GENERIC_ARG_LIST {
+ return None;
+ }
+
+ if right.kind() == SyntaxKind::USE {
+ let mut indent = IndentLevel::from_element(left);
+ if left.kind() == SyntaxKind::USE {
+ indent.0 = IndentLevel::from_element(right).0.max(indent.0);
+ }
+ return Some(make::tokens::whitespace(&format!("\n{}", indent)));
+ }
+ Some(make::tokens::single_space())
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/tests.rs b/src/tools/rust-analyzer/crates/syntax/src/tests.rs
new file mode 100644
index 000000000..58fba8cfa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/tests.rs
@@ -0,0 +1,186 @@
+#[cfg(not(feature = "in-rust-tree"))]
+mod ast_src;
+#[cfg(not(feature = "in-rust-tree"))]
+mod sourcegen_ast;
+
+use std::{
+ fs,
+ path::{Path, PathBuf},
+};
+
+use ast::HasName;
+use expect_test::expect_file;
+use rayon::prelude::*;
+use test_utils::{bench, bench_fixture, project_root};
+
+use crate::{ast, fuzz, AstNode, SourceFile, SyntaxError};
+
+#[test]
+fn parse_smoke_test() {
+ let code = r##"
+fn main() {
+ println!("Hello, world!")
+}
+ "##;
+
+ let parse = SourceFile::parse(code);
+ // eprintln!("{:#?}", parse.syntax_node());
+ assert!(parse.ok().is_ok());
+}
+
+#[test]
+fn benchmark_parser() {
+ if std::env::var("RUN_SLOW_BENCHES").is_err() {
+ return;
+ }
+
+ let data = bench_fixture::glorious_old_parser();
+ let tree = {
+ let _b = bench("parsing");
+ let p = SourceFile::parse(&data);
+ assert!(p.errors.is_empty());
+ assert_eq!(p.tree().syntax.text_range().len(), 352474.into());
+ p.tree()
+ };
+
+ {
+ let _b = bench("tree traversal");
+ let fn_names =
+ tree.syntax().descendants().filter_map(ast::Fn::cast).filter_map(|f| f.name()).count();
+ assert_eq!(fn_names, 268);
+ }
+}
+
+#[test]
+fn validation_tests() {
+ dir_tests(&test_data_dir(), &["parser/validation"], "rast", |text, path| {
+ let parse = SourceFile::parse(text);
+ let errors = parse.errors();
+ assert_errors_are_present(errors, path);
+ parse.debug_dump()
+ });
+}
+
+#[test]
+fn parser_fuzz_tests() {
+ for (_, text) in collect_rust_files(&test_data_dir(), &["parser/fuzz-failures"]) {
+ fuzz::check_parser(&text)
+ }
+}
+
+#[test]
+fn reparse_fuzz_tests() {
+ for (_, text) in collect_rust_files(&test_data_dir(), &["reparse/fuzz-failures"]) {
+ let check = fuzz::CheckReparse::from_data(text.as_bytes()).unwrap();
+ check.run();
+ }
+}
+
+/// Test that Rust-analyzer can parse and validate the rust-analyzer
+#[test]
+fn self_hosting_parsing() {
+ let crates_dir = project_root().join("crates");
+
+ let mut files = ::sourcegen::list_rust_files(&crates_dir);
+ files.retain(|path| {
+ // Get all files which are not in the crates/syntax/test_data folder
+ !path.components().any(|component| component.as_os_str() == "test_data")
+ });
+
+ assert!(
+ files.len() > 100,
+ "self_hosting_parsing found too few files - is it running in the right directory?"
+ );
+
+ let errors = files
+ .into_par_iter()
+ .filter_map(|file| {
+ let text = read_text(&file);
+ match SourceFile::parse(&text).ok() {
+ Ok(_) => None,
+ Err(err) => Some((file, err)),
+ }
+ })
+ .collect::<Vec<_>>();
+
+ if !errors.is_empty() {
+ let errors = errors
+ .into_iter()
+ .map(|(path, err)| format!("{}: {:?}\n", path.display(), err[0]))
+ .collect::<String>();
+ panic!("Parsing errors:\n{}\n", errors);
+ }
+}
+
+fn test_data_dir() -> PathBuf {
+ project_root().join("crates/syntax/test_data")
+}
+
+fn assert_errors_are_present(errors: &[SyntaxError], path: &Path) {
+ assert!(!errors.is_empty(), "There should be errors in the file {:?}", path.display());
+}
+
+/// Calls callback `f` with input code and file paths for each `.rs` file in `test_data_dir`
+/// subdirectories defined by `paths`.
+///
+/// If the content of the matching output file differs from the output of `f()`
+/// the test will fail.
+///
+/// If there is no matching output file it will be created and filled with the
+/// output of `f()`, but the test will fail.
+fn dir_tests<F>(test_data_dir: &Path, paths: &[&str], outfile_extension: &str, f: F)
+where
+ F: Fn(&str, &Path) -> String,
+{
+ for (path, input_code) in collect_rust_files(test_data_dir, paths) {
+ let actual = f(&input_code, &path);
+ let path = path.with_extension(outfile_extension);
+ expect_file![path].assert_eq(&actual)
+ }
+}
+
+/// Collects all `.rs` files from `dir` subdirectories defined by `paths`.
+fn collect_rust_files(root_dir: &Path, paths: &[&str]) -> Vec<(PathBuf, String)> {
+ paths
+ .iter()
+ .flat_map(|path| {
+ let path = root_dir.to_owned().join(path);
+ rust_files_in_dir(&path).into_iter()
+ })
+ .map(|path| {
+ let text = read_text(&path);
+ (path, text)
+ })
+ .collect()
+}
+
+/// Collects paths to all `.rs` files from `dir` in a sorted `Vec<PathBuf>`.
+fn rust_files_in_dir(dir: &Path) -> Vec<PathBuf> {
+ let mut acc = Vec::new();
+ for file in fs::read_dir(&dir).unwrap() {
+ let file = file.unwrap();
+ let path = file.path();
+ if path.extension().unwrap_or_default() == "rs" {
+ acc.push(path);
+ }
+ }
+ acc.sort();
+ acc
+}
+
+/// Read file and normalize newlines.
+///
+/// `rustc` seems to always normalize `\r\n` newlines to `\n`:
+///
+/// ```
+/// let s = "
+/// ";
+/// assert_eq!(s.as_bytes(), &[10]);
+/// ```
+///
+/// so this should always be correct.
+fn read_text(path: &Path) -> String {
+ fs::read_to_string(path)
+ .unwrap_or_else(|_| panic!("File at {:?} should be valid", path))
+ .replace("\r\n", "\n")
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/tests/ast_src.rs b/src/tools/rust-analyzer/crates/syntax/src/tests/ast_src.rs
new file mode 100644
index 000000000..cf5be1c30
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/tests/ast_src.rs
@@ -0,0 +1,252 @@
+//! Defines input for code generation process.
+
+pub(crate) struct KindsSrc<'a> {
+ pub(crate) punct: &'a [(&'a str, &'a str)],
+ pub(crate) keywords: &'a [&'a str],
+ pub(crate) contextual_keywords: &'a [&'a str],
+ pub(crate) literals: &'a [&'a str],
+ pub(crate) tokens: &'a [&'a str],
+ pub(crate) nodes: &'a [&'a str],
+}
+
+pub(crate) const KINDS_SRC: KindsSrc<'_> = KindsSrc {
+ punct: &[
+ (";", "SEMICOLON"),
+ (",", "COMMA"),
+ ("(", "L_PAREN"),
+ (")", "R_PAREN"),
+ ("{", "L_CURLY"),
+ ("}", "R_CURLY"),
+ ("[", "L_BRACK"),
+ ("]", "R_BRACK"),
+ ("<", "L_ANGLE"),
+ (">", "R_ANGLE"),
+ ("@", "AT"),
+ ("#", "POUND"),
+ ("~", "TILDE"),
+ ("?", "QUESTION"),
+ ("$", "DOLLAR"),
+ ("&", "AMP"),
+ ("|", "PIPE"),
+ ("+", "PLUS"),
+ ("*", "STAR"),
+ ("/", "SLASH"),
+ ("^", "CARET"),
+ ("%", "PERCENT"),
+ ("_", "UNDERSCORE"),
+ (".", "DOT"),
+ ("..", "DOT2"),
+ ("...", "DOT3"),
+ ("..=", "DOT2EQ"),
+ (":", "COLON"),
+ ("::", "COLON2"),
+ ("=", "EQ"),
+ ("==", "EQ2"),
+ ("=>", "FAT_ARROW"),
+ ("!", "BANG"),
+ ("!=", "NEQ"),
+ ("-", "MINUS"),
+ ("->", "THIN_ARROW"),
+ ("<=", "LTEQ"),
+ (">=", "GTEQ"),
+ ("+=", "PLUSEQ"),
+ ("-=", "MINUSEQ"),
+ ("|=", "PIPEEQ"),
+ ("&=", "AMPEQ"),
+ ("^=", "CARETEQ"),
+ ("/=", "SLASHEQ"),
+ ("*=", "STAREQ"),
+ ("%=", "PERCENTEQ"),
+ ("&&", "AMP2"),
+ ("||", "PIPE2"),
+ ("<<", "SHL"),
+ (">>", "SHR"),
+ ("<<=", "SHLEQ"),
+ (">>=", "SHREQ"),
+ ],
+ keywords: &[
+ "as", "async", "await", "box", "break", "const", "continue", "crate", "dyn", "else",
+ "enum", "extern", "false", "fn", "for", "if", "impl", "in", "let", "loop", "macro",
+ "match", "mod", "move", "mut", "pub", "ref", "return", "self", "Self", "static", "struct",
+ "super", "trait", "true", "try", "type", "unsafe", "use", "where", "while", "yield",
+ ],
+ contextual_keywords: &["auto", "default", "existential", "union", "raw", "macro_rules"],
+ literals: &["INT_NUMBER", "FLOAT_NUMBER", "CHAR", "BYTE", "STRING", "BYTE_STRING"],
+ tokens: &["ERROR", "IDENT", "WHITESPACE", "LIFETIME_IDENT", "COMMENT", "SHEBANG"],
+ nodes: &[
+ "SOURCE_FILE",
+ "STRUCT",
+ "UNION",
+ "ENUM",
+ "FN",
+ "RET_TYPE",
+ "EXTERN_CRATE",
+ "MODULE",
+ "USE",
+ "STATIC",
+ "CONST",
+ "TRAIT",
+ "IMPL",
+ "TYPE_ALIAS",
+ "MACRO_CALL",
+ "MACRO_RULES",
+ "MACRO_ARM",
+ "TOKEN_TREE",
+ "MACRO_DEF",
+ "PAREN_TYPE",
+ "TUPLE_TYPE",
+ "MACRO_TYPE",
+ "NEVER_TYPE",
+ "PATH_TYPE",
+ "PTR_TYPE",
+ "ARRAY_TYPE",
+ "SLICE_TYPE",
+ "REF_TYPE",
+ "INFER_TYPE",
+ "FN_PTR_TYPE",
+ "FOR_TYPE",
+ "IMPL_TRAIT_TYPE",
+ "DYN_TRAIT_TYPE",
+ "OR_PAT",
+ "PAREN_PAT",
+ "REF_PAT",
+ "BOX_PAT",
+ "IDENT_PAT",
+ "WILDCARD_PAT",
+ "REST_PAT",
+ "PATH_PAT",
+ "RECORD_PAT",
+ "RECORD_PAT_FIELD_LIST",
+ "RECORD_PAT_FIELD",
+ "TUPLE_STRUCT_PAT",
+ "TUPLE_PAT",
+ "SLICE_PAT",
+ "RANGE_PAT",
+ "LITERAL_PAT",
+ "MACRO_PAT",
+ "CONST_BLOCK_PAT",
+ // atoms
+ "TUPLE_EXPR",
+ "ARRAY_EXPR",
+ "PAREN_EXPR",
+ "PATH_EXPR",
+ "CLOSURE_EXPR",
+ "IF_EXPR",
+ "WHILE_EXPR",
+ "LOOP_EXPR",
+ "FOR_EXPR",
+ "CONTINUE_EXPR",
+ "BREAK_EXPR",
+ "LABEL",
+ "BLOCK_EXPR",
+ "STMT_LIST",
+ "RETURN_EXPR",
+ "YIELD_EXPR",
+ "LET_EXPR",
+ "UNDERSCORE_EXPR",
+ "MACRO_EXPR",
+ "MATCH_EXPR",
+ "MATCH_ARM_LIST",
+ "MATCH_ARM",
+ "MATCH_GUARD",
+ "RECORD_EXPR",
+ "RECORD_EXPR_FIELD_LIST",
+ "RECORD_EXPR_FIELD",
+ "BOX_EXPR",
+ // postfix
+ "CALL_EXPR",
+ "INDEX_EXPR",
+ "METHOD_CALL_EXPR",
+ "FIELD_EXPR",
+ "AWAIT_EXPR",
+ "TRY_EXPR",
+ "CAST_EXPR",
+ // unary
+ "REF_EXPR",
+ "PREFIX_EXPR",
+ "RANGE_EXPR", // just weird
+ "BIN_EXPR",
+ "EXTERN_BLOCK",
+ "EXTERN_ITEM_LIST",
+ "VARIANT",
+ "RECORD_FIELD_LIST",
+ "RECORD_FIELD",
+ "TUPLE_FIELD_LIST",
+ "TUPLE_FIELD",
+ "VARIANT_LIST",
+ "ITEM_LIST",
+ "ASSOC_ITEM_LIST",
+ "ATTR",
+ "META",
+ "USE_TREE",
+ "USE_TREE_LIST",
+ "PATH",
+ "PATH_SEGMENT",
+ "LITERAL",
+ "RENAME",
+ "VISIBILITY",
+ "WHERE_CLAUSE",
+ "WHERE_PRED",
+ "ABI",
+ "NAME",
+ "NAME_REF",
+ "LET_STMT",
+ "LET_ELSE",
+ "EXPR_STMT",
+ "GENERIC_PARAM_LIST",
+ "GENERIC_PARAM",
+ "LIFETIME_PARAM",
+ "TYPE_PARAM",
+ "CONST_PARAM",
+ "GENERIC_ARG_LIST",
+ "LIFETIME",
+ "LIFETIME_ARG",
+ "TYPE_ARG",
+ "ASSOC_TYPE_ARG",
+ "CONST_ARG",
+ "PARAM_LIST",
+ "PARAM",
+ "SELF_PARAM",
+ "ARG_LIST",
+ "TYPE_BOUND",
+ "TYPE_BOUND_LIST",
+ // macro related
+ "MACRO_ITEMS",
+ "MACRO_STMTS",
+ ],
+};
+
+#[derive(Default, Debug)]
+pub(crate) struct AstSrc {
+ pub(crate) tokens: Vec<String>,
+ pub(crate) nodes: Vec<AstNodeSrc>,
+ pub(crate) enums: Vec<AstEnumSrc>,
+}
+
+#[derive(Debug)]
+pub(crate) struct AstNodeSrc {
+ pub(crate) doc: Vec<String>,
+ pub(crate) name: String,
+ pub(crate) traits: Vec<String>,
+ pub(crate) fields: Vec<Field>,
+}
+
+#[derive(Debug, Eq, PartialEq)]
+pub(crate) enum Field {
+ Token(String),
+ Node { name: String, ty: String, cardinality: Cardinality },
+}
+
+#[derive(Debug, Eq, PartialEq)]
+pub(crate) enum Cardinality {
+ Optional,
+ Many,
+}
+
+#[derive(Debug)]
+pub(crate) struct AstEnumSrc {
+ pub(crate) doc: Vec<String>,
+ pub(crate) name: String,
+ pub(crate) traits: Vec<String>,
+ pub(crate) variants: Vec<String>,
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/tests/sourcegen_ast.rs b/src/tools/rust-analyzer/crates/syntax/src/tests/sourcegen_ast.rs
new file mode 100644
index 000000000..6d2766225
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/tests/sourcegen_ast.rs
@@ -0,0 +1,862 @@
+//! This module generates AST datatype used by rust-analyzer.
+//!
+//! Specifically, it generates the `SyntaxKind` enum and a number of newtype
+//! wrappers around `SyntaxNode` which implement `syntax::AstNode`.
+
+use std::{
+ collections::{BTreeSet, HashSet},
+ fmt::Write,
+};
+
+use itertools::Itertools;
+use proc_macro2::{Punct, Spacing};
+use quote::{format_ident, quote};
+use ungrammar::{Grammar, Rule};
+
+use crate::tests::ast_src::{
+ AstEnumSrc, AstNodeSrc, AstSrc, Cardinality, Field, KindsSrc, KINDS_SRC,
+};
+
+#[test]
+fn sourcegen_ast() {
+ let syntax_kinds = generate_syntax_kinds(KINDS_SRC);
+ let syntax_kinds_file =
+ sourcegen::project_root().join("crates/parser/src/syntax_kind/generated.rs");
+ sourcegen::ensure_file_contents(syntax_kinds_file.as_path(), &syntax_kinds);
+
+ let grammar =
+ include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/rust.ungram")).parse().unwrap();
+ let ast = lower(&grammar);
+
+ let ast_tokens = generate_tokens(&ast);
+ let ast_tokens_file =
+ sourcegen::project_root().join("crates/syntax/src/ast/generated/tokens.rs");
+ sourcegen::ensure_file_contents(ast_tokens_file.as_path(), &ast_tokens);
+
+ let ast_nodes = generate_nodes(KINDS_SRC, &ast);
+ let ast_nodes_file = sourcegen::project_root().join("crates/syntax/src/ast/generated/nodes.rs");
+ sourcegen::ensure_file_contents(ast_nodes_file.as_path(), &ast_nodes);
+}
+
+fn generate_tokens(grammar: &AstSrc) -> String {
+ let tokens = grammar.tokens.iter().map(|token| {
+ let name = format_ident!("{}", token);
+ let kind = format_ident!("{}", to_upper_snake_case(token));
+ quote! {
+ #[derive(Debug, Clone, PartialEq, Eq, Hash)]
+ pub struct #name {
+ pub(crate) syntax: SyntaxToken,
+ }
+ impl std::fmt::Display for #name {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(&self.syntax, f)
+ }
+ }
+ impl AstToken for #name {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == #kind }
+ fn cast(syntax: SyntaxToken) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
+ }
+ fn syntax(&self) -> &SyntaxToken { &self.syntax }
+ }
+ }
+ });
+
+ sourcegen::add_preamble(
+ "sourcegen_ast",
+ sourcegen::reformat(
+ quote! {
+ use crate::{SyntaxKind::{self, *}, SyntaxToken, ast::AstToken};
+ #(#tokens)*
+ }
+ .to_string(),
+ ),
+ )
+ .replace("#[derive", "\n#[derive")
+}
+
+fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String {
+ let (node_defs, node_boilerplate_impls): (Vec<_>, Vec<_>) = grammar
+ .nodes
+ .iter()
+ .map(|node| {
+ let name = format_ident!("{}", node.name);
+ let kind = format_ident!("{}", to_upper_snake_case(&node.name));
+ let traits = node
+ .traits
+ .iter()
+ .filter(|trait_name| {
+ // Loops have two expressions so this might collide, therefor manual impl it
+ node.name != "ForExpr" && node.name != "WhileExpr"
+ || trait_name.as_str() != "HasLoopBody"
+ })
+ .map(|trait_name| {
+ let trait_name = format_ident!("{}", trait_name);
+ quote!(impl ast::#trait_name for #name {})
+ });
+
+ let methods = node.fields.iter().map(|field| {
+ let method_name = field.method_name();
+ let ty = field.ty();
+
+ if field.is_many() {
+ quote! {
+ pub fn #method_name(&self) -> AstChildren<#ty> {
+ support::children(&self.syntax)
+ }
+ }
+ } else if let Some(token_kind) = field.token_kind() {
+ quote! {
+ pub fn #method_name(&self) -> Option<#ty> {
+ support::token(&self.syntax, #token_kind)
+ }
+ }
+ } else {
+ quote! {
+ pub fn #method_name(&self) -> Option<#ty> {
+ support::child(&self.syntax)
+ }
+ }
+ }
+ });
+ (
+ quote! {
+ #[pretty_doc_comment_placeholder_workaround]
+ #[derive(Debug, Clone, PartialEq, Eq, Hash)]
+ pub struct #name {
+ pub(crate) syntax: SyntaxNode,
+ }
+
+ #(#traits)*
+
+ impl #name {
+ #(#methods)*
+ }
+ },
+ quote! {
+ impl AstNode for #name {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ kind == #kind
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+ }
+ },
+ )
+ })
+ .unzip();
+
+ let (enum_defs, enum_boilerplate_impls): (Vec<_>, Vec<_>) = grammar
+ .enums
+ .iter()
+ .map(|en| {
+ let variants: Vec<_> = en.variants.iter().map(|var| format_ident!("{}", var)).collect();
+ let name = format_ident!("{}", en.name);
+ let kinds: Vec<_> = variants
+ .iter()
+ .map(|name| format_ident!("{}", to_upper_snake_case(&name.to_string())))
+ .collect();
+ let traits = en.traits.iter().map(|trait_name| {
+ let trait_name = format_ident!("{}", trait_name);
+ quote!(impl ast::#trait_name for #name {})
+ });
+
+ let ast_node = if en.name == "Stmt" {
+ quote! {}
+ } else {
+ quote! {
+ impl AstNode for #name {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ #(#kinds)|* => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ #(
+ #kinds => #name::#variants(#variants { syntax }),
+ )*
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ #(
+ #name::#variants(it) => &it.syntax,
+ )*
+ }
+ }
+ }
+ }
+ };
+
+ (
+ quote! {
+ #[pretty_doc_comment_placeholder_workaround]
+ #[derive(Debug, Clone, PartialEq, Eq, Hash)]
+ pub enum #name {
+ #(#variants(#variants),)*
+ }
+
+ #(#traits)*
+ },
+ quote! {
+ #(
+ impl From<#variants> for #name {
+ fn from(node: #variants) -> #name {
+ #name::#variants(node)
+ }
+ }
+ )*
+ #ast_node
+ },
+ )
+ })
+ .unzip();
+
+ let (any_node_defs, any_node_boilerplate_impls): (Vec<_>, Vec<_>) = grammar
+ .nodes
+ .iter()
+ .flat_map(|node| node.traits.iter().map(move |t| (t, node)))
+ .into_group_map()
+ .into_iter()
+ .sorted_by_key(|(k, _)| *k)
+ .map(|(trait_name, nodes)| {
+ let name = format_ident!("Any{}", trait_name);
+ let trait_name = format_ident!("{}", trait_name);
+ let kinds: Vec<_> = nodes
+ .iter()
+ .map(|name| format_ident!("{}", to_upper_snake_case(&name.name.to_string())))
+ .collect();
+
+ (
+ quote! {
+ #[pretty_doc_comment_placeholder_workaround]
+ #[derive(Debug, Clone, PartialEq, Eq, Hash)]
+ pub struct #name {
+ pub(crate) syntax: SyntaxNode,
+ }
+ impl ast::#trait_name for #name {}
+ },
+ quote! {
+ impl #name {
+ #[inline]
+ pub fn new<T: ast::#trait_name>(node: T) -> #name {
+ #name {
+ syntax: node.syntax().clone()
+ }
+ }
+ }
+ impl AstNode for #name {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ match kind {
+ #(#kinds)|* => true,
+ _ => false,
+ }
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| #name { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ &self.syntax
+ }
+ }
+ },
+ )
+ })
+ .unzip();
+
+ let enum_names = grammar.enums.iter().map(|it| &it.name);
+ let node_names = grammar.nodes.iter().map(|it| &it.name);
+
+ let display_impls =
+ enum_names.chain(node_names.clone()).map(|it| format_ident!("{}", it)).map(|name| {
+ quote! {
+ impl std::fmt::Display for #name {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+ }
+ }
+ });
+
+ let defined_nodes: HashSet<_> = node_names.collect();
+
+ for node in kinds
+ .nodes
+ .iter()
+ .map(|kind| to_pascal_case(kind))
+ .filter(|name| !defined_nodes.iter().any(|&it| it == name))
+ {
+ drop(node)
+ // FIXME: restore this
+ // eprintln!("Warning: node {} not defined in ast source", node);
+ }
+
+ let ast = quote! {
+ #![allow(non_snake_case)]
+ use crate::{
+ SyntaxNode, SyntaxToken, SyntaxKind::{self, *},
+ ast::{self, AstNode, AstChildren, support},
+ T,
+ };
+
+ #(#node_defs)*
+ #(#enum_defs)*
+ #(#any_node_defs)*
+ #(#node_boilerplate_impls)*
+ #(#enum_boilerplate_impls)*
+ #(#any_node_boilerplate_impls)*
+ #(#display_impls)*
+ };
+
+ let ast = ast.to_string().replace("T ! [", "T![");
+
+ let mut res = String::with_capacity(ast.len() * 2);
+
+ let mut docs =
+ grammar.nodes.iter().map(|it| &it.doc).chain(grammar.enums.iter().map(|it| &it.doc));
+
+ for chunk in ast.split("# [pretty_doc_comment_placeholder_workaround] ") {
+ res.push_str(chunk);
+ if let Some(doc) = docs.next() {
+ write_doc_comment(doc, &mut res);
+ }
+ }
+
+ let res = sourcegen::add_preamble("sourcegen_ast", sourcegen::reformat(res));
+ res.replace("#[derive", "\n#[derive")
+}
+
+fn write_doc_comment(contents: &[String], dest: &mut String) {
+ for line in contents {
+ writeln!(dest, "///{}", line).unwrap();
+ }
+}
+
+fn generate_syntax_kinds(grammar: KindsSrc<'_>) -> String {
+ let (single_byte_tokens_values, single_byte_tokens): (Vec<_>, Vec<_>) = grammar
+ .punct
+ .iter()
+ .filter(|(token, _name)| token.len() == 1)
+ .map(|(token, name)| (token.chars().next().unwrap(), format_ident!("{}", name)))
+ .unzip();
+
+ let punctuation_values = grammar.punct.iter().map(|(token, _name)| {
+ if "{}[]()".contains(token) {
+ let c = token.chars().next().unwrap();
+ quote! { #c }
+ } else {
+ let cs = token.chars().map(|c| Punct::new(c, Spacing::Joint));
+ quote! { #(#cs)* }
+ }
+ });
+ let punctuation =
+ grammar.punct.iter().map(|(_token, name)| format_ident!("{}", name)).collect::<Vec<_>>();
+
+ let x = |&name| match name {
+ "Self" => format_ident!("SELF_TYPE_KW"),
+ name => format_ident!("{}_KW", to_upper_snake_case(name)),
+ };
+ let full_keywords_values = grammar.keywords;
+ let full_keywords = full_keywords_values.iter().map(x);
+
+ let contextual_keywords_values = &grammar.contextual_keywords;
+ let contextual_keywords = contextual_keywords_values.iter().map(x);
+
+ let all_keywords_values = grammar
+ .keywords
+ .iter()
+ .chain(grammar.contextual_keywords.iter())
+ .copied()
+ .collect::<Vec<_>>();
+ let all_keywords_idents = all_keywords_values.iter().map(|kw| format_ident!("{}", kw));
+ let all_keywords = all_keywords_values.iter().map(x).collect::<Vec<_>>();
+
+ let literals =
+ grammar.literals.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
+
+ let tokens = grammar.tokens.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
+
+ let nodes = grammar.nodes.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
+
+ let ast = quote! {
+ #![allow(bad_style, missing_docs, unreachable_pub)]
+ /// The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT`.
+ #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
+ #[repr(u16)]
+ pub enum SyntaxKind {
+ // Technical SyntaxKinds: they appear temporally during parsing,
+ // but never end up in the final tree
+ #[doc(hidden)]
+ TOMBSTONE,
+ #[doc(hidden)]
+ EOF,
+ #(#punctuation,)*
+ #(#all_keywords,)*
+ #(#literals,)*
+ #(#tokens,)*
+ #(#nodes,)*
+
+ // Technical kind so that we can cast from u16 safely
+ #[doc(hidden)]
+ __LAST,
+ }
+ use self::SyntaxKind::*;
+
+ impl SyntaxKind {
+ pub fn is_keyword(self) -> bool {
+ match self {
+ #(#all_keywords)|* => true,
+ _ => false,
+ }
+ }
+
+ pub fn is_punct(self) -> bool {
+ match self {
+ #(#punctuation)|* => true,
+ _ => false,
+ }
+ }
+
+ pub fn is_literal(self) -> bool {
+ match self {
+ #(#literals)|* => true,
+ _ => false,
+ }
+ }
+
+ pub fn from_keyword(ident: &str) -> Option<SyntaxKind> {
+ let kw = match ident {
+ #(#full_keywords_values => #full_keywords,)*
+ _ => return None,
+ };
+ Some(kw)
+ }
+
+ pub fn from_contextual_keyword(ident: &str) -> Option<SyntaxKind> {
+ let kw = match ident {
+ #(#contextual_keywords_values => #contextual_keywords,)*
+ _ => return None,
+ };
+ Some(kw)
+ }
+
+ pub fn from_char(c: char) -> Option<SyntaxKind> {
+ let tok = match c {
+ #(#single_byte_tokens_values => #single_byte_tokens,)*
+ _ => return None,
+ };
+ Some(tok)
+ }
+ }
+
+ #[macro_export]
+ macro_rules! T {
+ #([#punctuation_values] => { $crate::SyntaxKind::#punctuation };)*
+ #([#all_keywords_idents] => { $crate::SyntaxKind::#all_keywords };)*
+ [lifetime_ident] => { $crate::SyntaxKind::LIFETIME_IDENT };
+ [ident] => { $crate::SyntaxKind::IDENT };
+ [shebang] => { $crate::SyntaxKind::SHEBANG };
+ }
+ pub use T;
+ };
+
+ sourcegen::add_preamble("sourcegen_ast", sourcegen::reformat(ast.to_string()))
+}
+
+fn to_upper_snake_case(s: &str) -> String {
+ let mut buf = String::with_capacity(s.len());
+ let mut prev = false;
+ for c in s.chars() {
+ if c.is_ascii_uppercase() && prev {
+ buf.push('_')
+ }
+ prev = true;
+
+ buf.push(c.to_ascii_uppercase());
+ }
+ buf
+}
+
+fn to_lower_snake_case(s: &str) -> String {
+ let mut buf = String::with_capacity(s.len());
+ let mut prev = false;
+ for c in s.chars() {
+ if c.is_ascii_uppercase() && prev {
+ buf.push('_')
+ }
+ prev = true;
+
+ buf.push(c.to_ascii_lowercase());
+ }
+ buf
+}
+
+fn to_pascal_case(s: &str) -> String {
+ let mut buf = String::with_capacity(s.len());
+ let mut prev_is_underscore = true;
+ for c in s.chars() {
+ if c == '_' {
+ prev_is_underscore = true;
+ } else if prev_is_underscore {
+ buf.push(c.to_ascii_uppercase());
+ prev_is_underscore = false;
+ } else {
+ buf.push(c.to_ascii_lowercase());
+ }
+ }
+ buf
+}
+
+fn pluralize(s: &str) -> String {
+ format!("{}s", s)
+}
+
+impl Field {
+ fn is_many(&self) -> bool {
+ matches!(self, Field::Node { cardinality: Cardinality::Many, .. })
+ }
+ fn token_kind(&self) -> Option<proc_macro2::TokenStream> {
+ match self {
+ Field::Token(token) => {
+ let token: proc_macro2::TokenStream = token.parse().unwrap();
+ Some(quote! { T![#token] })
+ }
+ _ => None,
+ }
+ }
+ fn method_name(&self) -> proc_macro2::Ident {
+ match self {
+ Field::Token(name) => {
+ let name = match name.as_str() {
+ ";" => "semicolon",
+ "->" => "thin_arrow",
+ "'{'" => "l_curly",
+ "'}'" => "r_curly",
+ "'('" => "l_paren",
+ "')'" => "r_paren",
+ "'['" => "l_brack",
+ "']'" => "r_brack",
+ "<" => "l_angle",
+ ">" => "r_angle",
+ "=" => "eq",
+ "!" => "excl",
+ "*" => "star",
+ "&" => "amp",
+ "_" => "underscore",
+ "." => "dot",
+ ".." => "dotdot",
+ "..." => "dotdotdot",
+ "..=" => "dotdoteq",
+ "=>" => "fat_arrow",
+ "@" => "at",
+ ":" => "colon",
+ "::" => "coloncolon",
+ "#" => "pound",
+ "?" => "question_mark",
+ "," => "comma",
+ "|" => "pipe",
+ "~" => "tilde",
+ _ => name,
+ };
+ format_ident!("{}_token", name)
+ }
+ Field::Node { name, .. } => {
+ if name == "type" {
+ format_ident!("ty")
+ } else {
+ format_ident!("{}", name)
+ }
+ }
+ }
+ }
+ fn ty(&self) -> proc_macro2::Ident {
+ match self {
+ Field::Token(_) => format_ident!("SyntaxToken"),
+ Field::Node { ty, .. } => format_ident!("{}", ty),
+ }
+ }
+}
+
+fn lower(grammar: &Grammar) -> AstSrc {
+ let mut res = AstSrc {
+ tokens: "Whitespace Comment String ByteString IntNumber FloatNumber Char Byte Ident"
+ .split_ascii_whitespace()
+ .map(|it| it.to_string())
+ .collect::<Vec<_>>(),
+ ..Default::default()
+ };
+
+ let nodes = grammar.iter().collect::<Vec<_>>();
+
+ for &node in &nodes {
+ let name = grammar[node].name.clone();
+ let rule = &grammar[node].rule;
+ match lower_enum(grammar, rule) {
+ Some(variants) => {
+ let enum_src = AstEnumSrc { doc: Vec::new(), name, traits: Vec::new(), variants };
+ res.enums.push(enum_src);
+ }
+ None => {
+ let mut fields = Vec::new();
+ lower_rule(&mut fields, grammar, None, rule);
+ res.nodes.push(AstNodeSrc { doc: Vec::new(), name, traits: Vec::new(), fields });
+ }
+ }
+ }
+
+ deduplicate_fields(&mut res);
+ extract_enums(&mut res);
+ extract_struct_traits(&mut res);
+ extract_enum_traits(&mut res);
+ res
+}
+
+fn lower_enum(grammar: &Grammar, rule: &Rule) -> Option<Vec<String>> {
+ let alternatives = match rule {
+ Rule::Alt(it) => it,
+ _ => return None,
+ };
+ let mut variants = Vec::new();
+ for alternative in alternatives {
+ match alternative {
+ Rule::Node(it) => variants.push(grammar[*it].name.clone()),
+ Rule::Token(it) if grammar[*it].name == ";" => (),
+ _ => return None,
+ }
+ }
+ Some(variants)
+}
+
+fn lower_rule(acc: &mut Vec<Field>, grammar: &Grammar, label: Option<&String>, rule: &Rule) {
+ if lower_comma_list(acc, grammar, label, rule) {
+ return;
+ }
+
+ match rule {
+ Rule::Node(node) => {
+ let ty = grammar[*node].name.clone();
+ let name = label.cloned().unwrap_or_else(|| to_lower_snake_case(&ty));
+ let field = Field::Node { name, ty, cardinality: Cardinality::Optional };
+ acc.push(field);
+ }
+ Rule::Token(token) => {
+ assert!(label.is_none());
+ let mut name = grammar[*token].name.clone();
+ if name != "int_number" && name != "string" {
+ if "[]{}()".contains(&name) {
+ name = format!("'{}'", name);
+ }
+ let field = Field::Token(name);
+ acc.push(field);
+ }
+ }
+ Rule::Rep(inner) => {
+ if let Rule::Node(node) = &**inner {
+ let ty = grammar[*node].name.clone();
+ let name = label.cloned().unwrap_or_else(|| pluralize(&to_lower_snake_case(&ty)));
+ let field = Field::Node { name, ty, cardinality: Cardinality::Many };
+ acc.push(field);
+ return;
+ }
+ panic!("unhandled rule: {:?}", rule)
+ }
+ Rule::Labeled { label: l, rule } => {
+ assert!(label.is_none());
+ let manually_implemented = matches!(
+ l.as_str(),
+ "lhs"
+ | "rhs"
+ | "then_branch"
+ | "else_branch"
+ | "start"
+ | "end"
+ | "op"
+ | "index"
+ | "base"
+ | "value"
+ | "trait"
+ | "self_ty"
+ | "iterable"
+ | "condition"
+ );
+ if manually_implemented {
+ return;
+ }
+ lower_rule(acc, grammar, Some(l), rule);
+ }
+ Rule::Seq(rules) | Rule::Alt(rules) => {
+ for rule in rules {
+ lower_rule(acc, grammar, label, rule)
+ }
+ }
+ Rule::Opt(rule) => lower_rule(acc, grammar, label, rule),
+ }
+}
+
+// (T (',' T)* ','?)
+fn lower_comma_list(
+ acc: &mut Vec<Field>,
+ grammar: &Grammar,
+ label: Option<&String>,
+ rule: &Rule,
+) -> bool {
+ let rule = match rule {
+ Rule::Seq(it) => it,
+ _ => return false,
+ };
+ let (node, repeat, trailing_comma) = match rule.as_slice() {
+ [Rule::Node(node), Rule::Rep(repeat), Rule::Opt(trailing_comma)] => {
+ (node, repeat, trailing_comma)
+ }
+ _ => return false,
+ };
+ let repeat = match &**repeat {
+ Rule::Seq(it) => it,
+ _ => return false,
+ };
+ match repeat.as_slice() {
+ [comma, Rule::Node(n)] if comma == &**trailing_comma && n == node => (),
+ _ => return false,
+ }
+ let ty = grammar[*node].name.clone();
+ let name = label.cloned().unwrap_or_else(|| pluralize(&to_lower_snake_case(&ty)));
+ let field = Field::Node { name, ty, cardinality: Cardinality::Many };
+ acc.push(field);
+ true
+}
+
+fn deduplicate_fields(ast: &mut AstSrc) {
+ for node in &mut ast.nodes {
+ let mut i = 0;
+ 'outer: while i < node.fields.len() {
+ for j in 0..i {
+ let f1 = &node.fields[i];
+ let f2 = &node.fields[j];
+ if f1 == f2 {
+ node.fields.remove(i);
+ continue 'outer;
+ }
+ }
+ i += 1;
+ }
+ }
+}
+
+fn extract_enums(ast: &mut AstSrc) {
+ for node in &mut ast.nodes {
+ for enm in &ast.enums {
+ let mut to_remove = Vec::new();
+ for (i, field) in node.fields.iter().enumerate() {
+ let ty = field.ty().to_string();
+ if enm.variants.iter().any(|it| it == &ty) {
+ to_remove.push(i);
+ }
+ }
+ if to_remove.len() == enm.variants.len() {
+ node.remove_field(to_remove);
+ let ty = enm.name.clone();
+ let name = to_lower_snake_case(&ty);
+ node.fields.push(Field::Node { name, ty, cardinality: Cardinality::Optional });
+ }
+ }
+ }
+}
+
+fn extract_struct_traits(ast: &mut AstSrc) {
+ let traits: &[(&str, &[&str])] = &[
+ ("HasAttrs", &["attrs"]),
+ ("HasName", &["name"]),
+ ("HasVisibility", &["visibility"]),
+ ("HasGenericParams", &["generic_param_list", "where_clause"]),
+ ("HasTypeBounds", &["type_bound_list", "colon_token"]),
+ ("HasModuleItem", &["items"]),
+ ("HasLoopBody", &["label", "loop_body"]),
+ ("HasArgList", &["arg_list"]),
+ ];
+
+ for node in &mut ast.nodes {
+ for (name, methods) in traits {
+ extract_struct_trait(node, name, methods);
+ }
+ }
+
+ let nodes_with_doc_comments = [
+ "SourceFile",
+ "Fn",
+ "Struct",
+ "Union",
+ "RecordField",
+ "TupleField",
+ "Enum",
+ "Variant",
+ "Trait",
+ "Module",
+ "Static",
+ "Const",
+ "TypeAlias",
+ "Impl",
+ "ExternBlock",
+ "ExternCrate",
+ "MacroCall",
+ "MacroRules",
+ "MacroDef",
+ "Use",
+ ];
+
+ for node in &mut ast.nodes {
+ if nodes_with_doc_comments.contains(&&*node.name) {
+ node.traits.push("HasDocComments".into());
+ }
+ }
+}
+
+fn extract_struct_trait(node: &mut AstNodeSrc, trait_name: &str, methods: &[&str]) {
+ let mut to_remove = Vec::new();
+ for (i, field) in node.fields.iter().enumerate() {
+ let method_name = field.method_name().to_string();
+ if methods.iter().any(|&it| it == method_name) {
+ to_remove.push(i);
+ }
+ }
+ if to_remove.len() == methods.len() {
+ node.traits.push(trait_name.to_string());
+ node.remove_field(to_remove);
+ }
+}
+
+fn extract_enum_traits(ast: &mut AstSrc) {
+ for enm in &mut ast.enums {
+ if enm.name == "Stmt" {
+ continue;
+ }
+ let nodes = &ast.nodes;
+ let mut variant_traits = enm
+ .variants
+ .iter()
+ .map(|var| nodes.iter().find(|it| &it.name == var).unwrap())
+ .map(|node| node.traits.iter().cloned().collect::<BTreeSet<_>>());
+
+ let mut enum_traits = match variant_traits.next() {
+ Some(it) => it,
+ None => continue,
+ };
+ for traits in variant_traits {
+ enum_traits = enum_traits.intersection(&traits).cloned().collect();
+ }
+ enm.traits = enum_traits.into_iter().collect();
+ }
+}
+
+impl AstNodeSrc {
+ fn remove_field(&mut self, to_remove: Vec<usize>) {
+ to_remove.into_iter().rev().for_each(|idx| {
+ self.fields.remove(idx);
+ });
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/token_text.rs b/src/tools/rust-analyzer/crates/syntax/src/token_text.rs
new file mode 100644
index 000000000..913b24d42
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/token_text.rs
@@ -0,0 +1,95 @@
+//! Yet another version of owned string, backed by a syntax tree token.
+
+use std::{cmp::Ordering, fmt, ops};
+
+use rowan::GreenToken;
+
+pub struct TokenText<'a>(pub(crate) Repr<'a>);
+
+pub(crate) enum Repr<'a> {
+ Borrowed(&'a str),
+ Owned(GreenToken),
+}
+
+impl<'a> TokenText<'a> {
+ pub(crate) fn borrowed(text: &'a str) -> Self {
+ TokenText(Repr::Borrowed(text))
+ }
+
+ pub(crate) fn owned(green: GreenToken) -> Self {
+ TokenText(Repr::Owned(green))
+ }
+
+ pub fn as_str(&self) -> &str {
+ match &self.0 {
+ &Repr::Borrowed(it) => it,
+ Repr::Owned(green) => green.text(),
+ }
+ }
+}
+
+impl ops::Deref for TokenText<'_> {
+ type Target = str;
+
+ fn deref(&self) -> &str {
+ self.as_str()
+ }
+}
+impl AsRef<str> for TokenText<'_> {
+ fn as_ref(&self) -> &str {
+ self.as_str()
+ }
+}
+
+impl From<TokenText<'_>> for String {
+ fn from(token_text: TokenText<'_>) -> Self {
+ token_text.as_str().into()
+ }
+}
+
+impl PartialEq<&'_ str> for TokenText<'_> {
+ fn eq(&self, other: &&str) -> bool {
+ self.as_str() == *other
+ }
+}
+impl PartialEq<TokenText<'_>> for &'_ str {
+ fn eq(&self, other: &TokenText<'_>) -> bool {
+ other == self
+ }
+}
+impl PartialEq<String> for TokenText<'_> {
+ fn eq(&self, other: &String) -> bool {
+ self.as_str() == other.as_str()
+ }
+}
+impl PartialEq<TokenText<'_>> for String {
+ fn eq(&self, other: &TokenText<'_>) -> bool {
+ other == self
+ }
+}
+impl PartialEq for TokenText<'_> {
+ fn eq(&self, other: &TokenText<'_>) -> bool {
+ self.as_str() == other.as_str()
+ }
+}
+impl Eq for TokenText<'_> {}
+impl Ord for TokenText<'_> {
+ fn cmp(&self, other: &Self) -> Ordering {
+ self.as_str().cmp(other.as_str())
+ }
+}
+impl PartialOrd for TokenText<'_> {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+impl fmt::Display for TokenText<'_> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(self.as_str(), f)
+ }
+}
+impl fmt::Debug for TokenText<'_> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Debug::fmt(self.as_str(), f)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/utils.rs b/src/tools/rust-analyzer/crates/syntax/src/utils.rs
new file mode 100644
index 000000000..f4c02518b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/utils.rs
@@ -0,0 +1,43 @@
+//! A set of utils methods to reuse on other abstraction levels
+
+use itertools::Itertools;
+
+use crate::{ast, match_ast, AstNode};
+
+pub fn path_to_string_stripping_turbo_fish(path: &ast::Path) -> String {
+ path.syntax()
+ .children()
+ .filter_map(|node| {
+ match_ast! {
+ match node {
+ ast::PathSegment(it) => {
+ Some(it.name_ref()?.to_string())
+ },
+ ast::Path(it) => {
+ Some(path_to_string_stripping_turbo_fish(&it))
+ },
+ _ => None,
+ }
+ }
+ })
+ .join("::")
+}
+
+#[cfg(test)]
+mod tests {
+ use super::path_to_string_stripping_turbo_fish;
+ use crate::ast::make;
+
+ #[test]
+ fn turbofishes_are_stripped() {
+ assert_eq!("Vec", path_to_string_stripping_turbo_fish(&make::path_from_text("Vec::<i32>")),);
+ assert_eq!(
+ "Vec::new",
+ path_to_string_stripping_turbo_fish(&make::path_from_text("Vec::<i32>::new")),
+ );
+ assert_eq!(
+ "Vec::new",
+ path_to_string_stripping_turbo_fish(&make::path_from_text("Vec::new()")),
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/validation.rs b/src/tools/rust-analyzer/crates/syntax/src/validation.rs
new file mode 100644
index 000000000..b9f2b5132
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/validation.rs
@@ -0,0 +1,378 @@
+//! This module implements syntax validation that the parser doesn't handle.
+//!
+//! A failed validation emits a diagnostic.
+
+mod block;
+
+use rowan::Direction;
+use rustc_lexer::unescape::{
+ self, unescape_byte, unescape_byte_literal, unescape_char, unescape_literal, Mode,
+};
+
+use crate::{
+ algo,
+ ast::{self, HasAttrs, HasVisibility},
+ match_ast, AstNode, SyntaxError,
+ SyntaxKind::{CONST, FN, INT_NUMBER, TYPE_ALIAS},
+ SyntaxNode, SyntaxToken, TextSize, T,
+};
+
+pub(crate) fn validate(root: &SyntaxNode) -> Vec<SyntaxError> {
+ // FIXME:
+ // * Add unescape validation of raw string literals and raw byte string literals
+ // * Add validation of doc comments are being attached to nodes
+
+ let mut errors = Vec::new();
+ for node in root.descendants() {
+ match_ast! {
+ match node {
+ ast::Literal(it) => validate_literal(it, &mut errors),
+ ast::Const(it) => validate_const(it, &mut errors),
+ ast::BlockExpr(it) => block::validate_block_expr(it, &mut errors),
+ ast::FieldExpr(it) => validate_numeric_name(it.name_ref(), &mut errors),
+ ast::RecordExprField(it) => validate_numeric_name(it.name_ref(), &mut errors),
+ ast::Visibility(it) => validate_visibility(it, &mut errors),
+ ast::RangeExpr(it) => validate_range_expr(it, &mut errors),
+ ast::PathSegment(it) => validate_path_keywords(it, &mut errors),
+ ast::RefType(it) => validate_trait_object_ref_ty(it, &mut errors),
+ ast::PtrType(it) => validate_trait_object_ptr_ty(it, &mut errors),
+ ast::FnPtrType(it) => validate_trait_object_fn_ptr_ret_ty(it, &mut errors),
+ ast::MacroRules(it) => validate_macro_rules(it, &mut errors),
+ ast::LetExpr(it) => validate_let_expr(it, &mut errors),
+ _ => (),
+ }
+ }
+ }
+ errors
+}
+
+fn rustc_unescape_error_to_string(err: unescape::EscapeError) -> &'static str {
+ use unescape::EscapeError as EE;
+
+ #[rustfmt::skip]
+ let err_message = match err {
+ EE::ZeroChars => {
+ "Literal must not be empty"
+ }
+ EE::MoreThanOneChar => {
+ "Literal must be one character long"
+ }
+ EE::LoneSlash => {
+ "Character must be escaped: `\\`"
+ }
+ EE::InvalidEscape => {
+ "Invalid escape"
+ }
+ EE::BareCarriageReturn | EE::BareCarriageReturnInRawString => {
+ "Character must be escaped: `\r`"
+ }
+ EE::EscapeOnlyChar => {
+ "Escape character `\\` must be escaped itself"
+ }
+ EE::TooShortHexEscape => {
+ "ASCII hex escape code must have exactly two digits"
+ }
+ EE::InvalidCharInHexEscape => {
+ "ASCII hex escape code must contain only hex characters"
+ }
+ EE::OutOfRangeHexEscape => {
+ "ASCII hex escape code must be at most 0x7F"
+ }
+ EE::NoBraceInUnicodeEscape => {
+ "Missing `{` to begin the unicode escape"
+ }
+ EE::InvalidCharInUnicodeEscape => {
+ "Unicode escape must contain only hex characters and underscores"
+ }
+ EE::EmptyUnicodeEscape => {
+ "Unicode escape must not be empty"
+ }
+ EE::UnclosedUnicodeEscape => {
+ "Missing `}` to terminate the unicode escape"
+ }
+ EE::LeadingUnderscoreUnicodeEscape => {
+ "Unicode escape code must not begin with an underscore"
+ }
+ EE::OverlongUnicodeEscape => {
+ "Unicode escape code must have at most 6 digits"
+ }
+ EE::LoneSurrogateUnicodeEscape => {
+ "Unicode escape code must not be a surrogate"
+ }
+ EE::OutOfRangeUnicodeEscape => {
+ "Unicode escape code must be at most 0x10FFFF"
+ }
+ EE::UnicodeEscapeInByte => {
+ "Byte literals must not contain unicode escapes"
+ }
+ EE::NonAsciiCharInByte | EE::NonAsciiCharInByteString => {
+ "Byte literals must not contain non-ASCII characters"
+ }
+ };
+
+ err_message
+}
+
+fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) {
+ // FIXME: move this function to outer scope (https://github.com/rust-lang/rust-analyzer/pull/2834#discussion_r366196658)
+ fn unquote(text: &str, prefix_len: usize, end_delimiter: char) -> Option<&str> {
+ text.rfind(end_delimiter).and_then(|end| text.get(prefix_len..end))
+ }
+
+ let token = literal.token();
+ let text = token.text();
+
+ // FIXME: lift this lambda refactor to `fn` (https://github.com/rust-lang/rust-analyzer/pull/2834#discussion_r366199205)
+ let mut push_err = |prefix_len, (off, err): (usize, unescape::EscapeError)| {
+ let off = token.text_range().start() + TextSize::try_from(off + prefix_len).unwrap();
+ acc.push(SyntaxError::new_at_offset(rustc_unescape_error_to_string(err), off));
+ };
+
+ match literal.kind() {
+ ast::LiteralKind::String(s) => {
+ if !s.is_raw() {
+ if let Some(without_quotes) = unquote(text, 1, '"') {
+ unescape_literal(without_quotes, Mode::Str, &mut |range, char| {
+ if let Err(err) = char {
+ push_err(1, (range.start, err));
+ }
+ });
+ }
+ }
+ }
+ ast::LiteralKind::ByteString(s) => {
+ if !s.is_raw() {
+ if let Some(without_quotes) = unquote(text, 2, '"') {
+ unescape_byte_literal(without_quotes, Mode::ByteStr, &mut |range, char| {
+ if let Err(err) = char {
+ push_err(2, (range.start, err));
+ }
+ });
+ }
+ }
+ }
+ ast::LiteralKind::Char(_) => {
+ if let Some(Err(e)) = unquote(text, 1, '\'').map(unescape_char) {
+ push_err(1, e);
+ }
+ }
+ ast::LiteralKind::Byte(_) => {
+ if let Some(Err(e)) = unquote(text, 2, '\'').map(unescape_byte) {
+ push_err(2, e);
+ }
+ }
+ ast::LiteralKind::IntNumber(_)
+ | ast::LiteralKind::FloatNumber(_)
+ | ast::LiteralKind::Bool(_) => {}
+ }
+}
+
+pub(crate) fn validate_block_structure(root: &SyntaxNode) {
+ let mut stack = Vec::new();
+ for node in root.descendants_with_tokens() {
+ match node.kind() {
+ T!['{'] => stack.push(node),
+ T!['}'] => {
+ if let Some(pair) = stack.pop() {
+ assert_eq!(
+ node.parent(),
+ pair.parent(),
+ "\nunpaired curlys:\n{}\n{:#?}\n",
+ root.text(),
+ root,
+ );
+ assert!(
+ node.next_sibling_or_token().is_none()
+ && pair.prev_sibling_or_token().is_none(),
+ "\nfloating curlys at {:?}\nfile:\n{}\nerror:\n{}\n",
+ node,
+ root.text(),
+ node,
+ );
+ }
+ }
+ _ => (),
+ }
+ }
+}
+
+fn validate_numeric_name(name_ref: Option<ast::NameRef>, errors: &mut Vec<SyntaxError>) {
+ if let Some(int_token) = int_token(name_ref) {
+ if int_token.text().chars().any(|c| !c.is_digit(10)) {
+ errors.push(SyntaxError::new(
+ "Tuple (struct) field access is only allowed through \
+ decimal integers with no underscores or suffix",
+ int_token.text_range(),
+ ));
+ }
+ }
+
+ fn int_token(name_ref: Option<ast::NameRef>) -> Option<SyntaxToken> {
+ name_ref?.syntax().first_child_or_token()?.into_token().filter(|it| it.kind() == INT_NUMBER)
+ }
+}
+
+fn validate_visibility(vis: ast::Visibility, errors: &mut Vec<SyntaxError>) {
+ let path_without_in_token = vis.in_token().is_none()
+ && vis.path().and_then(|p| p.as_single_name_ref()).and_then(|n| n.ident_token()).is_some();
+ if path_without_in_token {
+ errors.push(SyntaxError::new("incorrect visibility restriction", vis.syntax.text_range()));
+ }
+ let parent = match vis.syntax().parent() {
+ Some(it) => it,
+ None => return,
+ };
+ match parent.kind() {
+ FN | CONST | TYPE_ALIAS => (),
+ _ => return,
+ }
+
+ let impl_def = match parent.parent().and_then(|it| it.parent()).and_then(ast::Impl::cast) {
+ Some(it) => it,
+ None => return,
+ };
+ // FIXME: disable validation if there's an attribute, since some proc macros use this syntax.
+ // ideally the validation would run only on the fully expanded code, then this wouldn't be necessary.
+ if impl_def.trait_().is_some() && impl_def.attrs().next().is_none() {
+ errors.push(SyntaxError::new("Unnecessary visibility qualifier", vis.syntax.text_range()));
+ }
+}
+
+fn validate_range_expr(expr: ast::RangeExpr, errors: &mut Vec<SyntaxError>) {
+ if expr.op_kind() == Some(ast::RangeOp::Inclusive) && expr.end().is_none() {
+ errors.push(SyntaxError::new(
+ "An inclusive range must have an end expression",
+ expr.syntax().text_range(),
+ ));
+ }
+}
+
+fn validate_path_keywords(segment: ast::PathSegment, errors: &mut Vec<SyntaxError>) {
+ let path = segment.parent_path();
+ let is_path_start = segment.coloncolon_token().is_none() && path.qualifier().is_none();
+
+ if let Some(token) = segment.self_token() {
+ if !is_path_start {
+ errors.push(SyntaxError::new(
+ "The `self` keyword is only allowed as the first segment of a path",
+ token.text_range(),
+ ));
+ }
+ } else if let Some(token) = segment.crate_token() {
+ if !is_path_start || use_prefix(path).is_some() {
+ errors.push(SyntaxError::new(
+ "The `crate` keyword is only allowed as the first segment of a path",
+ token.text_range(),
+ ));
+ }
+ }
+
+ fn use_prefix(mut path: ast::Path) -> Option<ast::Path> {
+ for node in path.syntax().ancestors().skip(1) {
+ match_ast! {
+ match node {
+ ast::UseTree(it) => if let Some(tree_path) = it.path() {
+ // Even a top-level path exists within a `UseTree` so we must explicitly
+ // allow our path but disallow anything else
+ if tree_path != path {
+ return Some(tree_path);
+ }
+ },
+ ast::UseTreeList(_) => continue,
+ ast::Path(parent) => path = parent,
+ _ => return None,
+ }
+ };
+ }
+ None
+ }
+}
+
+fn validate_trait_object_ref_ty(ty: ast::RefType, errors: &mut Vec<SyntaxError>) {
+ if let Some(ast::Type::DynTraitType(ty)) = ty.ty() {
+ if let Some(err) = validate_trait_object_ty(ty) {
+ errors.push(err);
+ }
+ }
+}
+
+fn validate_trait_object_ptr_ty(ty: ast::PtrType, errors: &mut Vec<SyntaxError>) {
+ if let Some(ast::Type::DynTraitType(ty)) = ty.ty() {
+ if let Some(err) = validate_trait_object_ty(ty) {
+ errors.push(err);
+ }
+ }
+}
+
+fn validate_trait_object_fn_ptr_ret_ty(ty: ast::FnPtrType, errors: &mut Vec<SyntaxError>) {
+ if let Some(ast::Type::DynTraitType(ty)) = ty.ret_type().and_then(|ty| ty.ty()) {
+ if let Some(err) = validate_trait_object_ty(ty) {
+ errors.push(err);
+ }
+ }
+}
+
+fn validate_trait_object_ty(ty: ast::DynTraitType) -> Option<SyntaxError> {
+ let tbl = ty.type_bound_list()?;
+
+ if tbl.bounds().count() > 1 {
+ let dyn_token = ty.dyn_token()?;
+ let potential_parenthesis =
+ algo::skip_trivia_token(dyn_token.prev_token()?, Direction::Prev)?;
+ let kind = potential_parenthesis.kind();
+ if !matches!(kind, T!['('] | T![<] | T![=]) {
+ return Some(SyntaxError::new("ambiguous `+` in a type", ty.syntax().text_range()));
+ }
+ }
+ None
+}
+
+fn validate_macro_rules(mac: ast::MacroRules, errors: &mut Vec<SyntaxError>) {
+ if let Some(vis) = mac.visibility() {
+ errors.push(SyntaxError::new(
+ "visibilities are not allowed on `macro_rules!` items",
+ vis.syntax().text_range(),
+ ));
+ }
+}
+
+fn validate_const(const_: ast::Const, errors: &mut Vec<SyntaxError>) {
+ if let Some(mut_token) = const_
+ .const_token()
+ .and_then(|t| t.next_token())
+ .and_then(|t| algo::skip_trivia_token(t, Direction::Next))
+ .filter(|t| t.kind() == T![mut])
+ {
+ errors.push(SyntaxError::new("const globals cannot be mutable", mut_token.text_range()));
+ }
+}
+
+fn validate_let_expr(let_: ast::LetExpr, errors: &mut Vec<SyntaxError>) {
+ let mut token = let_.syntax().clone();
+ loop {
+ token = match token.parent() {
+ Some(it) => it,
+ None => break,
+ };
+
+ if ast::ParenExpr::can_cast(token.kind()) {
+ continue;
+ } else if let Some(it) = ast::BinExpr::cast(token.clone()) {
+ if it.op_kind() == Some(ast::BinaryOp::LogicOp(ast::LogicOp::And)) {
+ continue;
+ }
+ } else if ast::IfExpr::can_cast(token.kind())
+ || ast::WhileExpr::can_cast(token.kind())
+ || ast::MatchGuard::can_cast(token.kind())
+ {
+ // It must be part of the condition since the expressions are inside a block.
+ return;
+ }
+
+ break;
+ }
+ errors.push(SyntaxError::new(
+ "`let` expressions are not supported here",
+ let_.syntax().text_range(),
+ ));
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/validation/block.rs b/src/tools/rust-analyzer/crates/syntax/src/validation/block.rs
new file mode 100644
index 000000000..8eb4a10a3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/validation/block.rs
@@ -0,0 +1,24 @@
+//! Logic for validating block expressions i.e. `ast::BlockExpr`.
+
+use crate::{
+ ast::{self, AstNode, HasAttrs},
+ SyntaxError,
+ SyntaxKind::*,
+};
+
+pub(crate) fn validate_block_expr(block: ast::BlockExpr, errors: &mut Vec<SyntaxError>) {
+ if let Some(parent) = block.syntax().parent() {
+ match parent.kind() {
+ FN | EXPR_STMT | STMT_LIST => return,
+ _ => {}
+ }
+ }
+ if let Some(stmt_list) = block.stmt_list() {
+ errors.extend(stmt_list.attrs().filter(|attr| attr.kind().is_inner()).map(|attr| {
+ SyntaxError::new(
+ "A block in this position cannot accept inner attributes",
+ attr.syntax().text_range(),
+ )
+ }));
+ }
+}