summaryrefslogtreecommitdiffstats
path: root/src/tools/rust-analyzer/crates/proc-macro-api
diff options
context:
space:
mode:
Diffstat (limited to 'src/tools/rust-analyzer/crates/proc-macro-api')
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml10
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs41
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs121
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs196
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs31
5 files changed, 309 insertions, 90 deletions
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml
index 4229f2891..2cbbc9489 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml
@@ -21,15 +21,19 @@ object = { version = "0.32.0", default-features = false, features = [
] }
serde.workspace = true
serde_json = { workspace = true, features = ["unbounded_depth"] }
-tracing = "0.1.37"
+tracing.workspace = true
triomphe.workspace = true
memmap2 = "0.5.4"
snap = "1.1.0"
+indexmap = "2.1.0"
# local deps
paths.workspace = true
tt.workspace = true
stdx.workspace = true
profile.workspace = true
-# Intentionally *not* depend on anything salsa-related
-# base-db.workspace = true
+text-size.workspace = true
+# Ideally this crate would not depend on salsa things, but we need span information here which wraps
+# InternIds for the syntax context
+base-db.workspace = true
+la-arena.workspace = true
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
index 1603458f7..f697ecd35 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
@@ -5,22 +5,22 @@
//! is used to provide basic infrastructure for communication between two
//! processes: Client (RA itself), Server (the external program)
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
pub mod msg;
mod process;
mod version;
+use base_db::span::SpanData;
+use indexmap::IndexSet;
use paths::AbsPathBuf;
use std::{fmt, io, sync::Mutex};
use triomphe::Arc;
use serde::{Deserialize, Serialize};
-use ::tt::token_id as tt;
-
use crate::{
- msg::{ExpandMacro, FlatTree, PanicMessage},
+ msg::{ExpandMacro, ExpnGlobals, FlatTree, PanicMessage, HAS_GLOBAL_SPANS},
process::ProcMacroProcessSrv,
};
@@ -136,30 +136,47 @@ impl ProcMacro {
pub fn expand(
&self,
- subtree: &tt::Subtree,
- attr: Option<&tt::Subtree>,
+ subtree: &tt::Subtree<SpanData>,
+ attr: Option<&tt::Subtree<SpanData>>,
env: Vec<(String, String)>,
- ) -> Result<Result<tt::Subtree, PanicMessage>, ServerError> {
+ def_site: SpanData,
+ call_site: SpanData,
+ mixed_site: SpanData,
+ ) -> Result<Result<tt::Subtree<SpanData>, PanicMessage>, ServerError> {
let version = self.process.lock().unwrap_or_else(|e| e.into_inner()).version();
let current_dir = env
.iter()
.find(|(name, _)| name == "CARGO_MANIFEST_DIR")
.map(|(_, value)| value.clone());
+ let mut span_data_table = IndexSet::default();
+ let def_site = span_data_table.insert_full(def_site).0;
+ let call_site = span_data_table.insert_full(call_site).0;
+ let mixed_site = span_data_table.insert_full(mixed_site).0;
let task = ExpandMacro {
- macro_body: FlatTree::new(subtree, version),
+ macro_body: FlatTree::new(subtree, version, &mut span_data_table),
macro_name: self.name.to_string(),
- attributes: attr.map(|subtree| FlatTree::new(subtree, version)),
+ attributes: attr.map(|subtree| FlatTree::new(subtree, version, &mut span_data_table)),
lib: self.dylib_path.to_path_buf().into(),
env,
current_dir,
+ has_global_spans: ExpnGlobals {
+ serialize: version >= HAS_GLOBAL_SPANS,
+ def_site,
+ call_site,
+ mixed_site,
+ },
};
- let request = msg::Request::ExpandMacro(task);
- let response = self.process.lock().unwrap_or_else(|e| e.into_inner()).send_task(request)?;
+ let response = self
+ .process
+ .lock()
+ .unwrap_or_else(|e| e.into_inner())
+ .send_task(msg::Request::ExpandMacro(task))?;
+
match response {
msg::Response::ExpandMacro(it) => {
- Ok(it.map(|tree| FlatTree::to_subtree(tree, version)))
+ Ok(it.map(|tree| FlatTree::to_subtree_resolved(tree, version, &span_data_table)))
}
msg::Response::ListMacros(..) | msg::Response::ApiVersionCheck(..) => {
Err(ServerError { message: "unexpected response".to_string(), io: None })
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs
index 4b01643c2..1d3e45aff 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs
@@ -10,14 +10,15 @@ use serde::{de::DeserializeOwned, Deserialize, Serialize};
use crate::ProcMacroKind;
-pub use crate::msg::flat::FlatTree;
+pub use crate::msg::flat::{FlatTree, TokenId};
// The versions of the server protocol
pub const NO_VERSION_CHECK_VERSION: u32 = 0;
pub const VERSION_CHECK_VERSION: u32 = 1;
pub const ENCODE_CLOSE_SPAN_VERSION: u32 = 2;
+pub const HAS_GLOBAL_SPANS: u32 = 3;
-pub const CURRENT_API_VERSION: u32 = ENCODE_CLOSE_SPAN_VERSION;
+pub const CURRENT_API_VERSION: u32 = HAS_GLOBAL_SPANS;
#[derive(Debug, Serialize, Deserialize)]
pub enum Request {
@@ -59,6 +60,26 @@ pub struct ExpandMacro {
pub env: Vec<(String, String)>,
pub current_dir: Option<String>,
+ /// marker for serde skip stuff
+ #[serde(skip_serializing_if = "ExpnGlobals::skip_serializing_if")]
+ #[serde(default)]
+ pub has_global_spans: ExpnGlobals,
+}
+
+#[derive(Default, Debug, Serialize, Deserialize)]
+pub struct ExpnGlobals {
+ #[serde(skip_serializing)]
+ #[serde(default)]
+ pub serialize: bool,
+ pub def_site: usize,
+ pub call_site: usize,
+ pub mixed_site: usize,
+}
+
+impl ExpnGlobals {
+ fn skip_serializing_if(&self) -> bool {
+ !self.serialize
+ }
}
pub trait Message: Serialize + DeserializeOwned {
@@ -115,30 +136,89 @@ fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> {
#[cfg(test)]
mod tests {
+ use base_db::{
+ span::{ErasedFileAstId, SpanAnchor, SpanData, SyntaxContextId},
+ FileId,
+ };
+ use la_arena::RawIdx;
+ use text_size::{TextRange, TextSize};
+ use tt::{Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, Spacing, Subtree, TokenTree};
+
use super::*;
- use crate::tt::*;
- fn fixture_token_tree() -> Subtree {
- let mut subtree = Subtree { delimiter: Delimiter::unspecified(), token_trees: Vec::new() };
- subtree
- .token_trees
- .push(TokenTree::Leaf(Ident { text: "struct".into(), span: TokenId(0) }.into()));
- subtree
- .token_trees
- .push(TokenTree::Leaf(Ident { text: "Foo".into(), span: TokenId(1) }.into()));
+ fn fixture_token_tree() -> Subtree<SpanData> {
+ let anchor = SpanAnchor {
+ file_id: FileId::from_raw(0),
+ ast_id: ErasedFileAstId::from_raw(RawIdx::from(0)),
+ };
+ let mut subtree = Subtree {
+ delimiter: Delimiter {
+ open: SpanData {
+ range: TextRange::empty(TextSize::new(0)),
+ anchor,
+ ctx: SyntaxContextId::ROOT,
+ },
+ close: SpanData {
+ range: TextRange::empty(TextSize::new(13)),
+ anchor,
+ ctx: SyntaxContextId::ROOT,
+ },
+ kind: DelimiterKind::Invisible,
+ },
+ token_trees: Vec::new(),
+ };
+ subtree.token_trees.push(TokenTree::Leaf(
+ Ident {
+ text: "struct".into(),
+ span: SpanData {
+ range: TextRange::at(TextSize::new(0), TextSize::of("struct")),
+ anchor,
+ ctx: SyntaxContextId::ROOT,
+ },
+ }
+ .into(),
+ ));
+ subtree.token_trees.push(TokenTree::Leaf(
+ Ident {
+ text: "Foo".into(),
+ span: SpanData {
+ range: TextRange::at(TextSize::new(5), TextSize::of("Foo")),
+ anchor,
+ ctx: SyntaxContextId::ROOT,
+ },
+ }
+ .into(),
+ ));
subtree.token_trees.push(TokenTree::Leaf(Leaf::Literal(Literal {
text: "Foo".into(),
- span: TokenId::unspecified(),
+
+ span: SpanData {
+ range: TextRange::at(TextSize::new(8), TextSize::of("Foo")),
+ anchor,
+ ctx: SyntaxContextId::ROOT,
+ },
})));
subtree.token_trees.push(TokenTree::Leaf(Leaf::Punct(Punct {
char: '@',
- span: TokenId::unspecified(),
+ span: SpanData {
+ range: TextRange::at(TextSize::new(11), TextSize::of('@')),
+ anchor,
+ ctx: SyntaxContextId::ROOT,
+ },
spacing: Spacing::Joint,
})));
subtree.token_trees.push(TokenTree::Subtree(Subtree {
delimiter: Delimiter {
- open: TokenId(2),
- close: TokenId::UNSPECIFIED,
+ open: SpanData {
+ range: TextRange::at(TextSize::new(12), TextSize::of('{')),
+ anchor,
+ ctx: SyntaxContextId::ROOT,
+ },
+ close: SpanData {
+ range: TextRange::at(TextSize::new(13), TextSize::of('}')),
+ anchor,
+ ctx: SyntaxContextId::ROOT,
+ },
kind: DelimiterKind::Brace,
},
token_trees: vec![],
@@ -149,19 +229,26 @@ mod tests {
#[test]
fn test_proc_macro_rpc_works() {
let tt = fixture_token_tree();
+ let mut span_data_table = Default::default();
let task = ExpandMacro {
- macro_body: FlatTree::new(&tt, CURRENT_API_VERSION),
+ macro_body: FlatTree::new(&tt, CURRENT_API_VERSION, &mut span_data_table),
macro_name: Default::default(),
attributes: None,
lib: std::env::current_dir().unwrap(),
env: Default::default(),
current_dir: Default::default(),
+ has_global_spans: ExpnGlobals {
+ serialize: true,
+ def_site: 0,
+ call_site: 0,
+ mixed_site: 0,
+ },
};
let json = serde_json::to_string(&task).unwrap();
// println!("{}", json);
let back: ExpandMacro = serde_json::from_str(&json).unwrap();
- assert_eq!(tt, back.macro_body.to_subtree(CURRENT_API_VERSION));
+ assert_eq!(tt, back.macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table));
}
}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs
index 44245336f..583571862 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs
@@ -37,12 +37,26 @@
use std::collections::{HashMap, VecDeque};
+use base_db::span::SpanData;
+use indexmap::IndexSet;
use serde::{Deserialize, Serialize};
-use crate::{
- msg::ENCODE_CLOSE_SPAN_VERSION,
- tt::{self, TokenId},
-};
+use crate::msg::ENCODE_CLOSE_SPAN_VERSION;
+
+type SpanDataIndexMap = IndexSet<SpanData>;
+
+#[derive(Clone, Copy, PartialEq, Eq, Hash)]
+pub struct TokenId(pub u32);
+
+impl std::fmt::Debug for TokenId {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+impl tt::Span for TokenId {
+ const DUMMY: Self = TokenId(!0);
+}
#[derive(Serialize, Deserialize, Debug)]
pub struct FlatTree {
@@ -55,33 +69,38 @@ pub struct FlatTree {
}
struct SubtreeRepr {
- open: tt::TokenId,
- close: tt::TokenId,
+ open: TokenId,
+ close: TokenId,
kind: tt::DelimiterKind,
tt: [u32; 2],
}
struct LiteralRepr {
- id: tt::TokenId,
+ id: TokenId,
text: u32,
}
struct PunctRepr {
- id: tt::TokenId,
+ id: TokenId,
char: char,
spacing: tt::Spacing,
}
struct IdentRepr {
- id: tt::TokenId,
+ id: TokenId,
text: u32,
}
impl FlatTree {
- pub fn new(subtree: &tt::Subtree, version: u32) -> FlatTree {
+ pub fn new(
+ subtree: &tt::Subtree<SpanData>,
+ version: u32,
+ span_data_table: &mut SpanDataIndexMap,
+ ) -> FlatTree {
let mut w = Writer {
string_table: HashMap::new(),
work: VecDeque::new(),
+ span_data_table,
subtree: Vec::new(),
literal: Vec::new(),
@@ -92,7 +111,7 @@ impl FlatTree {
};
w.write(subtree);
- return FlatTree {
+ FlatTree {
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
write_vec(w.subtree, SubtreeRepr::write_with_close_span)
} else {
@@ -103,15 +122,44 @@ impl FlatTree {
ident: write_vec(w.ident, IdentRepr::write),
token_tree: w.token_tree,
text: w.text,
+ }
+ }
+
+ pub fn new_raw(subtree: &tt::Subtree<TokenId>, version: u32) -> FlatTree {
+ let mut w = Writer {
+ string_table: HashMap::new(),
+ work: VecDeque::new(),
+ span_data_table: &mut (),
+
+ subtree: Vec::new(),
+ literal: Vec::new(),
+ punct: Vec::new(),
+ ident: Vec::new(),
+ token_tree: Vec::new(),
+ text: Vec::new(),
};
+ w.write(subtree);
- fn write_vec<T, F: Fn(T) -> [u32; N], const N: usize>(xs: Vec<T>, f: F) -> Vec<u32> {
- xs.into_iter().flat_map(f).collect()
+ FlatTree {
+ subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
+ write_vec(w.subtree, SubtreeRepr::write_with_close_span)
+ } else {
+ write_vec(w.subtree, SubtreeRepr::write)
+ },
+ literal: write_vec(w.literal, LiteralRepr::write),
+ punct: write_vec(w.punct, PunctRepr::write),
+ ident: write_vec(w.ident, IdentRepr::write),
+ token_tree: w.token_tree,
+ text: w.text,
}
}
- pub fn to_subtree(self, version: u32) -> tt::Subtree {
- return Reader {
+ pub fn to_subtree_resolved(
+ self,
+ version: u32,
+ span_data_table: &SpanDataIndexMap,
+ ) -> tt::Subtree<SpanData> {
+ Reader {
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
read_vec(self.subtree, SubtreeRepr::read_with_close_span)
} else {
@@ -122,18 +170,40 @@ impl FlatTree {
ident: read_vec(self.ident, IdentRepr::read),
token_tree: self.token_tree,
text: self.text,
+ span_data_table,
}
- .read();
+ .read()
+ }
- fn read_vec<T, F: Fn([u32; N]) -> T, const N: usize>(xs: Vec<u32>, f: F) -> Vec<T> {
- let mut chunks = xs.chunks_exact(N);
- let res = chunks.by_ref().map(|chunk| f(chunk.try_into().unwrap())).collect();
- assert!(chunks.remainder().is_empty());
- res
+ pub fn to_subtree_unresolved(self, version: u32) -> tt::Subtree<TokenId> {
+ Reader {
+ subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
+ read_vec(self.subtree, SubtreeRepr::read_with_close_span)
+ } else {
+ read_vec(self.subtree, SubtreeRepr::read)
+ },
+ literal: read_vec(self.literal, LiteralRepr::read),
+ punct: read_vec(self.punct, PunctRepr::read),
+ ident: read_vec(self.ident, IdentRepr::read),
+ token_tree: self.token_tree,
+ text: self.text,
+ span_data_table: &(),
}
+ .read()
}
}
+fn read_vec<T, F: Fn([u32; N]) -> T, const N: usize>(xs: Vec<u32>, f: F) -> Vec<T> {
+ let mut chunks = xs.chunks_exact(N);
+ let res = chunks.by_ref().map(|chunk| f(chunk.try_into().unwrap())).collect();
+ assert!(chunks.remainder().is_empty());
+ res
+}
+
+fn write_vec<T, F: Fn(T) -> [u32; N], const N: usize>(xs: Vec<T>, f: F) -> Vec<u32> {
+ xs.into_iter().flat_map(f).collect()
+}
+
impl SubtreeRepr {
fn write(self) -> [u32; 4] {
let kind = match self.kind {
@@ -152,7 +222,7 @@ impl SubtreeRepr {
3 => tt::DelimiterKind::Bracket,
other => panic!("bad kind {other}"),
};
- SubtreeRepr { open: TokenId(open), close: TokenId::UNSPECIFIED, kind, tt: [lo, len] }
+ SubtreeRepr { open: TokenId(open), close: TokenId(!0), kind, tt: [lo, len] }
}
fn write_with_close_span(self) -> [u32; 5] {
let kind = match self.kind {
@@ -211,9 +281,36 @@ impl IdentRepr {
}
}
-struct Writer<'a> {
- work: VecDeque<(usize, &'a tt::Subtree)>,
+trait Span: Copy {
+ type Table;
+ fn token_id_of(table: &mut Self::Table, s: Self) -> TokenId;
+ fn span_for_token_id(table: &Self::Table, id: TokenId) -> Self;
+}
+
+impl Span for TokenId {
+ type Table = ();
+ fn token_id_of((): &mut Self::Table, token_id: Self) -> TokenId {
+ token_id
+ }
+
+ fn span_for_token_id((): &Self::Table, id: TokenId) -> Self {
+ id
+ }
+}
+impl Span for SpanData {
+ type Table = IndexSet<SpanData>;
+ fn token_id_of(table: &mut Self::Table, span: Self) -> TokenId {
+ TokenId(table.insert_full(span).0 as u32)
+ }
+ fn span_for_token_id(table: &Self::Table, id: TokenId) -> Self {
+ *table.get_index(id.0 as usize).unwrap_or_else(|| &table[0])
+ }
+}
+
+struct Writer<'a, 'span, S: Span> {
+ work: VecDeque<(usize, &'a tt::Subtree<S>)>,
string_table: HashMap<&'a str, u32>,
+ span_data_table: &'span mut S::Table,
subtree: Vec<SubtreeRepr>,
literal: Vec<LiteralRepr>,
@@ -223,15 +320,19 @@ struct Writer<'a> {
text: Vec<String>,
}
-impl<'a> Writer<'a> {
- fn write(&mut self, root: &'a tt::Subtree) {
+impl<'a, 'span, S: Span> Writer<'a, 'span, S> {
+ fn write(&mut self, root: &'a tt::Subtree<S>) {
self.enqueue(root);
while let Some((idx, subtree)) = self.work.pop_front() {
self.subtree(idx, subtree);
}
}
- fn subtree(&mut self, idx: usize, subtree: &'a tt::Subtree) {
+ fn token_id_of(&mut self, span: S) -> TokenId {
+ S::token_id_of(self.span_data_table, span)
+ }
+
+ fn subtree(&mut self, idx: usize, subtree: &'a tt::Subtree<S>) {
let mut first_tt = self.token_tree.len();
let n_tt = subtree.token_trees.len();
self.token_tree.resize(first_tt + n_tt, !0);
@@ -248,22 +349,21 @@ impl<'a> Writer<'a> {
tt::Leaf::Literal(lit) => {
let idx = self.literal.len() as u32;
let text = self.intern(&lit.text);
- self.literal.push(LiteralRepr { id: lit.span, text });
+ let id = self.token_id_of(lit.span);
+ self.literal.push(LiteralRepr { id, text });
idx << 2 | 0b01
}
tt::Leaf::Punct(punct) => {
let idx = self.punct.len() as u32;
- self.punct.push(PunctRepr {
- char: punct.char,
- spacing: punct.spacing,
- id: punct.span,
- });
+ let id = self.token_id_of(punct.span);
+ self.punct.push(PunctRepr { char: punct.char, spacing: punct.spacing, id });
idx << 2 | 0b10
}
tt::Leaf::Ident(ident) => {
let idx = self.ident.len() as u32;
let text = self.intern(&ident.text);
- self.ident.push(IdentRepr { id: ident.span, text });
+ let id = self.token_id_of(ident.span);
+ self.ident.push(IdentRepr { id, text });
idx << 2 | 0b11
}
},
@@ -273,10 +373,10 @@ impl<'a> Writer<'a> {
}
}
- fn enqueue(&mut self, subtree: &'a tt::Subtree) -> u32 {
+ fn enqueue(&mut self, subtree: &'a tt::Subtree<S>) -> u32 {
let idx = self.subtree.len();
- let open = subtree.delimiter.open;
- let close = subtree.delimiter.close;
+ let open = self.token_id_of(subtree.delimiter.open);
+ let close = self.token_id_of(subtree.delimiter.close);
let delimiter_kind = subtree.delimiter.kind;
self.subtree.push(SubtreeRepr { open, close, kind: delimiter_kind, tt: [!0, !0] });
self.work.push_back((idx, subtree));
@@ -293,23 +393,29 @@ impl<'a> Writer<'a> {
}
}
-struct Reader {
+struct Reader<'span, S: Span> {
subtree: Vec<SubtreeRepr>,
literal: Vec<LiteralRepr>,
punct: Vec<PunctRepr>,
ident: Vec<IdentRepr>,
token_tree: Vec<u32>,
text: Vec<String>,
+ span_data_table: &'span S::Table,
}
-impl Reader {
- pub(crate) fn read(self) -> tt::Subtree {
- let mut res: Vec<Option<tt::Subtree>> = vec![None; self.subtree.len()];
+impl<'span, S: Span> Reader<'span, S> {
+ pub(crate) fn read(self) -> tt::Subtree<S> {
+ let mut res: Vec<Option<tt::Subtree<S>>> = vec![None; self.subtree.len()];
+ let read_span = |id| S::span_for_token_id(self.span_data_table, id);
for i in (0..self.subtree.len()).rev() {
let repr = &self.subtree[i];
let token_trees = &self.token_tree[repr.tt[0] as usize..repr.tt[1] as usize];
let s = tt::Subtree {
- delimiter: tt::Delimiter { open: repr.open, close: repr.close, kind: repr.kind },
+ delimiter: tt::Delimiter {
+ open: read_span(repr.open),
+ close: read_span(repr.close),
+ kind: repr.kind,
+ },
token_trees: token_trees
.iter()
.copied()
@@ -324,7 +430,7 @@ impl Reader {
let repr = &self.literal[idx];
tt::Leaf::Literal(tt::Literal {
text: self.text[repr.text as usize].as_str().into(),
- span: repr.id,
+ span: read_span(repr.id),
})
.into()
}
@@ -333,7 +439,7 @@ impl Reader {
tt::Leaf::Punct(tt::Punct {
char: repr.char,
spacing: repr.spacing,
- span: repr.id,
+ span: read_span(repr.id),
})
.into()
}
@@ -341,7 +447,7 @@ impl Reader {
let repr = &self.ident[idx];
tt::Leaf::Ident(tt::Ident {
text: self.text[repr.text as usize].as_str().into(),
- span: repr.id,
+ span: read_span(repr.id),
})
.into()
}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs
index 48efbf589..5ff1f36c5 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs
@@ -85,8 +85,8 @@ fn read_section<'a>(dylib_binary: &'a [u8], section_name: &str) -> io::Result<&'
}
/// Check the version of rustc that was used to compile a proc macro crate's
-///
/// binary file.
+///
/// A proc macro crate binary's ".rustc" section has following byte layout:
/// * [b'r',b'u',b's',b't',0,0,0,5] is the first 8 bytes
/// * ff060000 734e6150 is followed, it's the snappy format magic bytes,
@@ -96,8 +96,8 @@ fn read_section<'a>(dylib_binary: &'a [u8], section_name: &str) -> io::Result<&'
/// The bytes you get after decompressing the snappy format portion has
/// following layout:
/// * [b'r',b'u',b's',b't',0,0,0,5] is the first 8 bytes(again)
-/// * [crate root bytes] next 4 bytes is to store crate root position,
-/// according to rustc's source code comment
+/// * [crate root bytes] next 8 bytes (4 in old versions) is to store
+/// crate root position, according to rustc's source code comment
/// * [length byte] next 1 byte tells us how many bytes we should read next
/// for the version string's utf8 bytes
/// * [version string bytes encoded in utf8] <- GET THIS BOI
@@ -119,13 +119,18 @@ pub fn read_version(dylib_path: &AbsPath) -> io::Result<String> {
}
let version = u32::from_be_bytes([dot_rustc[4], dot_rustc[5], dot_rustc[6], dot_rustc[7]]);
// Last supported version is:
- // https://github.com/rust-lang/rust/commit/0696e79f2740ad89309269b460579e548a5cd632
- let snappy_portion = match version {
- 5 | 6 => &dot_rustc[8..],
+ // https://github.com/rust-lang/rust/commit/b94cfefc860715fb2adf72a6955423d384c69318
+ let (snappy_portion, bytes_before_version) = match version {
+ 5 | 6 => (&dot_rustc[8..], 13),
7 | 8 => {
let len_bytes = &dot_rustc[8..12];
let data_len = u32::from_be_bytes(len_bytes.try_into().unwrap()) as usize;
- &dot_rustc[12..data_len + 12]
+ (&dot_rustc[12..data_len + 12], 13)
+ }
+ 9 => {
+ let len_bytes = &dot_rustc[8..16];
+ let data_len = u64::from_le_bytes(len_bytes.try_into().unwrap()) as usize;
+ (&dot_rustc[16..data_len + 12], 17)
}
_ => {
return Err(io::Error::new(
@@ -142,15 +147,15 @@ pub fn read_version(dylib_path: &AbsPath) -> io::Result<String> {
Box::new(SnapDecoder::new(snappy_portion))
};
- // the bytes before version string bytes, so this basically is:
+ // We're going to skip over the bytes before the version string, so basically:
// 8 bytes for [b'r',b'u',b's',b't',0,0,0,5]
- // 4 bytes for [crate root bytes]
+ // 4 or 8 bytes for [crate root bytes]
// 1 byte for length of version string
- // so 13 bytes in total, and we should check the 13th byte
+ // so 13 or 17 bytes in total, and we should check the last of those bytes
// to know the length
- let mut bytes_before_version = [0u8; 13];
- uncompressed.read_exact(&mut bytes_before_version)?;
- let length = bytes_before_version[12];
+ let mut bytes = [0u8; 17];
+ uncompressed.read_exact(&mut bytes[..bytes_before_version])?;
+ let length = bytes[bytes_before_version - 1];
let mut version_string_utf8 = vec![0u8; length as usize];
uncompressed.read_exact(&mut version_string_utf8)?;