summaryrefslogtreecommitdiffstats
path: root/src/tools/rust-analyzer/crates/hir-expand
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:18:32 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:18:32 +0000
commit4547b622d8d29df964fa2914213088b148c498fc (patch)
tree9fc6b25f3c3add6b745be9a2400a6e96140046e9 /src/tools/rust-analyzer/crates/hir-expand
parentReleasing progress-linux version 1.66.0+dfsg1-1~progress7.99u1. (diff)
downloadrustc-4547b622d8d29df964fa2914213088b148c498fc.tar.xz
rustc-4547b622d8d29df964fa2914213088b148c498fc.zip
Merging upstream version 1.67.1+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/tools/rust-analyzer/crates/hir-expand')
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs96
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/lib.rs4
3 files changed, 69 insertions, 33 deletions
diff --git a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
index 3359c99b3..77eb1fd45 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
@@ -4,7 +4,7 @@ version = "0.0.0"
description = "TBD"
license = "MIT OR Apache-2.0"
edition = "2021"
-rust-version = "1.57"
+rust-version = "1.65"
[lib]
doctest = false
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
index 893e6fe4b..a4abe7562 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
@@ -4,6 +4,7 @@ use std::mem;
use mbe::{SyntheticToken, SyntheticTokenId, TokenMap};
use rustc_hash::FxHashMap;
+use smallvec::SmallVec;
use syntax::{
ast::{self, AstNode, HasLoopBody},
match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange,
@@ -292,25 +293,34 @@ pub(crate) fn reverse_fixups(
token_map: &TokenMap,
undo_info: &SyntaxFixupUndoInfo,
) {
- tt.token_trees.retain(|tt| match tt {
- tt::TokenTree::Leaf(leaf) => {
- token_map.synthetic_token_id(leaf.id()).is_none()
- || token_map.synthetic_token_id(leaf.id()) != Some(EMPTY_ID)
- }
- tt::TokenTree::Subtree(st) => st.delimiter.map_or(true, |d| {
- token_map.synthetic_token_id(d.id).is_none()
- || token_map.synthetic_token_id(d.id) != Some(EMPTY_ID)
- }),
- });
- tt.token_trees.iter_mut().for_each(|tt| match tt {
- tt::TokenTree::Subtree(tt) => reverse_fixups(tt, token_map, undo_info),
- tt::TokenTree::Leaf(leaf) => {
- if let Some(id) = token_map.synthetic_token_id(leaf.id()) {
- let original = &undo_info.original[id.0 as usize];
- *tt = tt::TokenTree::Subtree(original.clone());
+ let tts = std::mem::take(&mut tt.token_trees);
+ tt.token_trees = tts
+ .into_iter()
+ .filter(|tt| match tt {
+ tt::TokenTree::Leaf(leaf) => token_map.synthetic_token_id(leaf.id()) != Some(EMPTY_ID),
+ tt::TokenTree::Subtree(st) => {
+ st.delimiter.map_or(true, |d| token_map.synthetic_token_id(d.id) != Some(EMPTY_ID))
}
- }
- });
+ })
+ .flat_map(|tt| match tt {
+ tt::TokenTree::Subtree(mut tt) => {
+ reverse_fixups(&mut tt, token_map, undo_info);
+ SmallVec::from_const([tt.into()])
+ }
+ tt::TokenTree::Leaf(leaf) => {
+ if let Some(id) = token_map.synthetic_token_id(leaf.id()) {
+ let original = undo_info.original[id.0 as usize].clone();
+ if original.delimiter.is_none() {
+ original.token_trees.into()
+ } else {
+ SmallVec::from_const([original.into()])
+ }
+ } else {
+ SmallVec::from_const([leaf.into()])
+ }
+ }
+ })
+ .collect();
}
#[cfg(test)]
@@ -319,6 +329,31 @@ mod tests {
use super::reverse_fixups;
+ // The following three functions are only meant to check partial structural equivalence of
+ // `TokenTree`s, see the last assertion in `check()`.
+ fn check_leaf_eq(a: &tt::Leaf, b: &tt::Leaf) -> bool {
+ match (a, b) {
+ (tt::Leaf::Literal(a), tt::Leaf::Literal(b)) => a.text == b.text,
+ (tt::Leaf::Punct(a), tt::Leaf::Punct(b)) => a.char == b.char,
+ (tt::Leaf::Ident(a), tt::Leaf::Ident(b)) => a.text == b.text,
+ _ => false,
+ }
+ }
+
+ fn check_subtree_eq(a: &tt::Subtree, b: &tt::Subtree) -> bool {
+ a.delimiter.map(|it| it.kind) == b.delimiter.map(|it| it.kind)
+ && a.token_trees.len() == b.token_trees.len()
+ && a.token_trees.iter().zip(&b.token_trees).all(|(a, b)| check_tt_eq(a, b))
+ }
+
+ fn check_tt_eq(a: &tt::TokenTree, b: &tt::TokenTree) -> bool {
+ match (a, b) {
+ (tt::TokenTree::Leaf(a), tt::TokenTree::Leaf(b)) => check_leaf_eq(a, b),
+ (tt::TokenTree::Subtree(a), tt::TokenTree::Subtree(b)) => check_subtree_eq(a, b),
+ _ => false,
+ }
+ }
+
#[track_caller]
fn check(ra_fixture: &str, mut expect: Expect) {
let parsed = syntax::SourceFile::parse(ra_fixture);
@@ -331,17 +366,15 @@ mod tests {
fixups.append,
);
- let mut actual = tt.to_string();
- actual.push('\n');
+ let actual = format!("{}\n", tt);
expect.indent(false);
expect.assert_eq(&actual);
// the fixed-up tree should be syntactically valid
let (parse, _) = mbe::token_tree_to_syntax_node(&tt, ::mbe::TopEntryPoint::MacroItems);
- assert_eq!(
- parse.errors(),
- &[],
+ assert!(
+ parse.errors().is_empty(),
"parse has syntax errors. parse tree:\n{:#?}",
parse.syntax_node()
);
@@ -349,9 +382,12 @@ mod tests {
reverse_fixups(&mut tt, &tmap, &fixups.undo_info);
// the fixed-up + reversed version should be equivalent to the original input
- // (but token IDs don't matter)
+ // modulo token IDs and `Punct`s' spacing.
let (original_as_tt, _) = mbe::syntax_node_to_token_tree(&parsed.syntax_node());
- assert_eq!(tt.to_string(), original_as_tt.to_string());
+ assert!(
+ check_subtree_eq(&tt, &original_as_tt),
+ "different token tree: {tt:?}, {original_as_tt:?}"
+ );
}
#[test]
@@ -468,7 +504,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {a .__ra_fixup}
+fn foo () {a . __ra_fixup}
"#]],
)
}
@@ -482,7 +518,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {a .__ra_fixup ;}
+fn foo () {a . __ra_fixup ;}
"#]],
)
}
@@ -497,7 +533,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {a .__ra_fixup ; bar () ;}
+fn foo () {a . __ra_fixup ; bar () ;}
"#]],
)
}
@@ -525,7 +561,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {let x = a .__ra_fixup ;}
+fn foo () {let x = a . __ra_fixup ;}
"#]],
)
}
@@ -541,7 +577,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {a .b ; bar () ;}
+fn foo () {a . b ; bar () ;}
"#]],
)
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
index a5b499fe8..7352b003a 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
@@ -814,7 +814,7 @@ impl<'a> InFile<&'a SyntaxNode> {
pub fn original_syntax_node(self, db: &dyn db::AstDatabase) -> Option<InFile<SyntaxNode>> {
// This kind of upmapping can only be achieved in attribute expanded files,
- // as we don't have node inputs otherwise and therefor can't find an `N` node in the input
+ // as we don't have node inputs otherwise and therefore can't find an `N` node in the input
if !self.file_id.is_macro() {
return Some(self.map(Clone::clone));
} else if !self.file_id.is_attr_macro(db) {
@@ -926,7 +926,7 @@ impl<N: AstNode> InFile<N> {
pub fn original_ast_node(self, db: &dyn db::AstDatabase) -> Option<InFile<N>> {
// This kind of upmapping can only be achieved in attribute expanded files,
- // as we don't have node inputs otherwise and therefor can't find an `N` node in the input
+ // as we don't have node inputs otherwise and therefore can't find an `N` node in the input
if !self.file_id.is_macro() {
return Some(self);
} else if !self.file_id.is_attr_macro(db) {