summaryrefslogtreecommitdiffstats
path: root/src/tools/rust-analyzer/crates/proc-macro-srv
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:02:58 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:02:58 +0000
commit698f8c2f01ea549d77d7dc3338a12e04c11057b9 (patch)
tree173a775858bd501c378080a10dca74132f05bc50 /src/tools/rust-analyzer/crates/proc-macro-srv
parentInitial commit. (diff)
downloadrustc-698f8c2f01ea549d77d7dc3338a12e04c11057b9.tar.xz
rustc-698f8c2f01ea549d77d7dc3338a12e04c11057b9.zip
Adding upstream version 1.64.0+dfsg1.upstream/1.64.0+dfsg1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/tools/rust-analyzer/crates/proc-macro-srv')
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml36
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/build.rs25
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/mod.rs104
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/buffer.rs143
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/client.rs485
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/closure.rs24
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/handle.rs70
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/mod.rs429
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/rpc.rs305
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/scoped_cell.rs81
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/server.rs352
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/diagnostic.rs166
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs1056
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/quote.rs140
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs819
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/mod.rs105
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/buffer.rs156
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/client.rs510
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/closure.rs32
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/handle.rs89
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/mod.rs451
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/rpc.rs304
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/scoped_cell.rs81
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/selfless_reify.rs83
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/server.rs332
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/diagnostic.rs166
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/mod.rs1106
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/quote.rs139
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs834
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/mod.rs105
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/buffer.rs156
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/client.rs529
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/closure.rs32
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/handle.rs89
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/mod.rs493
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/rpc.rs304
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/scoped_cell.rs81
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/selfless_reify.rs84
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/server.rs339
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/diagnostic.rs166
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/mod.rs1125
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/quote.rs139
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/ra_server.rs792
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/mod.rs102
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs518
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/symbol.rs46
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/token_stream.rs179
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/mod.rs155
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/cli.rs31
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs199
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs160
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs166
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs47
53 files changed, 14660 insertions, 0 deletions
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml
new file mode 100644
index 000000000..5746eac0b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml
@@ -0,0 +1,36 @@
+[package]
+name = "proc-macro-srv"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+object = { version = "0.29.0", default-features = false, features = [
+ "std",
+ "read_core",
+ "elf",
+ "macho",
+ "pe",
+] }
+libloading = "0.7.3"
+memmap2 = "0.5.4"
+
+tt = { path = "../tt", version = "0.0.0" }
+mbe = { path = "../mbe", version = "0.0.0" }
+paths = { path = "../paths", version = "0.0.0" }
+proc-macro-api = { path = "../proc-macro-api", version = "0.0.0" }
+crossbeam = "0.8.1"
+
+[dev-dependencies]
+expect-test = "1.4.0"
+
+# used as proc macro test targets
+proc-macro-test = { path = "../proc-macro-test" }
+
+[features]
+sysroot-abi = []
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/build.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/build.rs
new file mode 100644
index 000000000..a8c732f31
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/build.rs
@@ -0,0 +1,25 @@
+//! Determine rustc version `proc-macro-srv` (and thus the sysroot ABI) is
+//! build with and make it accessible at runtime for ABI selection.
+
+use std::{env, fs::File, io::Write, path::PathBuf, process::Command};
+
+fn main() {
+ let mut path = PathBuf::from(env::var_os("OUT_DIR").unwrap());
+ path.push("rustc_version.rs");
+ let mut f = File::create(&path).unwrap();
+
+ let rustc = env::var("RUSTC").expect("proc-macro-srv's build script expects RUSTC to be set");
+ let output = Command::new(rustc).arg("--version").output().expect("rustc --version must run");
+ let version_string = std::str::from_utf8(&output.stdout[..])
+ .expect("rustc --version output must be UTF-8")
+ .trim();
+
+ write!(
+ f,
+ "
+ #[allow(dead_code)]
+ pub(crate) const RUSTC_VERSION_STRING: &str = {version_string:?};
+ "
+ )
+ .unwrap();
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/mod.rs
new file mode 100644
index 000000000..1c91ac0fa
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/mod.rs
@@ -0,0 +1,104 @@
+//! Macro ABI for version 1.58 of rustc
+
+#[allow(dead_code)]
+#[doc(hidden)]
+mod proc_macro;
+
+#[allow(dead_code)]
+#[doc(hidden)]
+mod ra_server;
+
+use libloading::Library;
+use proc_macro_api::ProcMacroKind;
+
+use super::PanicMessage;
+
+pub(crate) struct Abi {
+ exported_macros: Vec<proc_macro::bridge::client::ProcMacro>,
+}
+
+impl From<proc_macro::bridge::PanicMessage> for PanicMessage {
+ fn from(p: proc_macro::bridge::PanicMessage) -> Self {
+ Self { message: p.as_str().map(|s| s.to_string()) }
+ }
+}
+
+impl Abi {
+ pub unsafe fn from_lib(lib: &Library, symbol_name: String) -> Result<Abi, libloading::Error> {
+ let macros: libloading::Symbol<'_, &&[proc_macro::bridge::client::ProcMacro]> =
+ lib.get(symbol_name.as_bytes())?;
+ Ok(Self { exported_macros: macros.to_vec() })
+ }
+
+ pub fn expand(
+ &self,
+ macro_name: &str,
+ macro_body: &tt::Subtree,
+ attributes: Option<&tt::Subtree>,
+ ) -> Result<tt::Subtree, PanicMessage> {
+ let parsed_body = ra_server::TokenStream::with_subtree(macro_body.clone());
+
+ let parsed_attributes = attributes.map_or(ra_server::TokenStream::new(), |attr| {
+ ra_server::TokenStream::with_subtree(attr.clone())
+ });
+
+ for proc_macro in &self.exported_macros {
+ match proc_macro {
+ proc_macro::bridge::client::ProcMacro::CustomDerive {
+ trait_name, client, ..
+ } if *trait_name == macro_name => {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ proc_macro::bridge::client::ProcMacro::Bang { name, client }
+ if *name == macro_name =>
+ {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ proc_macro::bridge::client::ProcMacro::Attr { name, client }
+ if *name == macro_name =>
+ {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_attributes,
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ _ => continue,
+ }
+ }
+
+ Err(proc_macro::bridge::PanicMessage::String("Nothing to expand".to_string()).into())
+ }
+
+ pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
+ self.exported_macros
+ .iter()
+ .map(|proc_macro| match proc_macro {
+ proc_macro::bridge::client::ProcMacro::CustomDerive { trait_name, .. } => {
+ (trait_name.to_string(), ProcMacroKind::CustomDerive)
+ }
+ proc_macro::bridge::client::ProcMacro::Bang { name, .. } => {
+ (name.to_string(), ProcMacroKind::FuncLike)
+ }
+ proc_macro::bridge::client::ProcMacro::Attr { name, .. } => {
+ (name.to_string(), ProcMacroKind::Attr)
+ }
+ })
+ .collect()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/buffer.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/buffer.rs
new file mode 100644
index 000000000..d82669d3e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/buffer.rs
@@ -0,0 +1,143 @@
+//! Buffer management for same-process client<->server communication.
+
+use std::io::{self, Write};
+use std::mem;
+use std::ops::{Deref, DerefMut};
+use std::slice;
+
+#[repr(C)]
+pub struct Buffer<T: Copy> {
+ data: *mut T,
+ len: usize,
+ capacity: usize,
+ reserve: extern "C" fn(Buffer<T>, usize) -> Buffer<T>,
+ drop: extern "C" fn(Buffer<T>),
+}
+
+unsafe impl<T: Copy + Sync> Sync for Buffer<T> {}
+unsafe impl<T: Copy + Send> Send for Buffer<T> {}
+
+impl<T: Copy> Default for Buffer<T> {
+ fn default() -> Self {
+ Self::from(vec![])
+ }
+}
+
+impl<T: Copy> Deref for Buffer<T> {
+ type Target = [T];
+ fn deref(&self) -> &[T] {
+ unsafe { slice::from_raw_parts(self.data as *const T, self.len) }
+ }
+}
+
+impl<T: Copy> DerefMut for Buffer<T> {
+ fn deref_mut(&mut self) -> &mut [T] {
+ unsafe { slice::from_raw_parts_mut(self.data, self.len) }
+ }
+}
+
+impl<T: Copy> Buffer<T> {
+ pub(super) fn new() -> Self {
+ Self::default()
+ }
+
+ pub(super) fn clear(&mut self) {
+ self.len = 0;
+ }
+
+ pub(super) fn take(&mut self) -> Self {
+ mem::take(self)
+ }
+
+ // We have the array method separate from extending from a slice. This is
+ // because in the case of small arrays, codegen can be more efficient
+ // (avoiding a memmove call). With extend_from_slice, LLVM at least
+ // currently is not able to make that optimization.
+ pub(super) fn extend_from_array<const N: usize>(&mut self, xs: &[T; N]) {
+ if xs.len() > (self.capacity - self.len) {
+ let b = self.take();
+ *self = (b.reserve)(b, xs.len());
+ }
+ unsafe {
+ xs.as_ptr().copy_to_nonoverlapping(self.data.add(self.len), xs.len());
+ self.len += xs.len();
+ }
+ }
+
+ pub(super) fn extend_from_slice(&mut self, xs: &[T]) {
+ if xs.len() > (self.capacity - self.len) {
+ let b = self.take();
+ *self = (b.reserve)(b, xs.len());
+ }
+ unsafe {
+ xs.as_ptr().copy_to_nonoverlapping(self.data.add(self.len), xs.len());
+ self.len += xs.len();
+ }
+ }
+
+ pub(super) fn push(&mut self, v: T) {
+ // The code here is taken from Vec::push, and we know that reserve()
+ // will panic if we're exceeding isize::MAX bytes and so there's no need
+ // to check for overflow.
+ if self.len == self.capacity {
+ let b = self.take();
+ *self = (b.reserve)(b, 1);
+ }
+ unsafe {
+ *self.data.add(self.len) = v;
+ self.len += 1;
+ }
+ }
+}
+
+impl Write for Buffer<u8> {
+ fn write(&mut self, xs: &[u8]) -> io::Result<usize> {
+ self.extend_from_slice(xs);
+ Ok(xs.len())
+ }
+
+ fn write_all(&mut self, xs: &[u8]) -> io::Result<()> {
+ self.extend_from_slice(xs);
+ Ok(())
+ }
+
+ fn flush(&mut self) -> io::Result<()> {
+ Ok(())
+ }
+}
+
+impl<T: Copy> Drop for Buffer<T> {
+ fn drop(&mut self) {
+ let b = self.take();
+ (b.drop)(b);
+ }
+}
+
+impl<T: Copy> From<Vec<T>> for Buffer<T> {
+ fn from(mut v: Vec<T>) -> Self {
+ let (data, len, capacity) = (v.as_mut_ptr(), v.len(), v.capacity());
+ mem::forget(v);
+
+ // This utility function is nested in here because it can *only*
+ // be safely called on `Buffer`s created by *this* `proc_macro`.
+ fn to_vec<T: Copy>(b: Buffer<T>) -> Vec<T> {
+ unsafe {
+ let Buffer { data, len, capacity, .. } = b;
+ mem::forget(b);
+ Vec::from_raw_parts(data, len, capacity)
+ }
+ }
+
+ extern "C" fn reserve<T: Copy>(b: Buffer<T>, additional: usize) -> Buffer<T> {
+ let mut v = to_vec(b);
+ v.reserve(additional);
+ Buffer::from(v)
+ }
+
+ extern "C" fn drop<T: Copy>(b: Buffer<T>) {
+ mem::drop(to_vec(b));
+ }
+
+ Buffer { data, len, capacity, reserve, drop }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/client.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/client.rs
new file mode 100644
index 000000000..ed0e91da3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/client.rs
@@ -0,0 +1,485 @@
+//! Client-side types.
+
+use super::*;
+
+macro_rules! define_handles {
+ (
+ 'owned: $($oty:ident,)*
+ 'interned: $($ity:ident,)*
+ ) => {
+ #[repr(C)]
+ #[allow(non_snake_case)]
+ pub struct HandleCounters {
+ $($oty: AtomicUsize,)*
+ $($ity: AtomicUsize,)*
+ }
+
+ impl HandleCounters {
+ // FIXME(eddyb) use a reference to the `static COUNTERS`, instead of
+ // a wrapper `fn` pointer, once `const fn` can reference `static`s.
+ extern "C" fn get() -> &'static Self {
+ static COUNTERS: HandleCounters = HandleCounters {
+ $($oty: AtomicUsize::new(1),)*
+ $($ity: AtomicUsize::new(1),)*
+ };
+ &COUNTERS
+ }
+ }
+
+ // FIXME(eddyb) generate the definition of `HandleStore` in `server.rs`.
+ #[repr(C)]
+ #[allow(non_snake_case)]
+ pub(super) struct HandleStore<S: server::Types> {
+ $($oty: handle::OwnedStore<S::$oty>,)*
+ $($ity: handle::InternedStore<S::$ity>,)*
+ }
+
+ impl<S: server::Types> HandleStore<S> {
+ pub(super) fn new(handle_counters: &'static HandleCounters) -> Self {
+ HandleStore {
+ $($oty: handle::OwnedStore::new(&handle_counters.$oty),)*
+ $($ity: handle::InternedStore::new(&handle_counters.$ity),)*
+ }
+ }
+ }
+
+ $(
+ #[repr(C)]
+ pub(crate) struct $oty(handle::Handle);
+
+ // Forward `Drop::drop` to the inherent `drop` method.
+ impl Drop for $oty {
+ fn drop(&mut self) {
+ $oty(self.0).drop();
+ }
+ }
+
+ impl<S> Encode<S> for $oty {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ let handle = self.0;
+ mem::forget(self);
+ handle.encode(w, s);
+ }
+ }
+
+ impl<S: server::Types> DecodeMut<'_, '_, HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$oty, $oty>
+ {
+ fn decode(r: &mut Reader<'_>, s: &mut HandleStore<server::MarkedTypes<S>>) -> Self {
+ s.$oty.take(handle::Handle::decode(r, &mut ()))
+ }
+ }
+
+ impl<S> Encode<S> for &$oty {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.0.encode(w, s);
+ }
+ }
+
+ impl<'s, S: server::Types> Decode<'_, 's, HandleStore<server::MarkedTypes<S>>>
+ for &'s Marked<S::$oty, $oty>
+ {
+ fn decode(r: &mut Reader<'_>, s: &'s HandleStore<server::MarkedTypes<S>>) -> Self {
+ &s.$oty[handle::Handle::decode(r, &mut ())]
+ }
+ }
+
+ impl<S> Encode<S> for &mut $oty {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.0.encode(w, s);
+ }
+ }
+
+ impl<'s, S: server::Types> DecodeMut<'_, 's, HandleStore<server::MarkedTypes<S>>>
+ for &'s mut Marked<S::$oty, $oty>
+ {
+ fn decode(
+ r: &mut Reader<'_>,
+ s: &'s mut HandleStore<server::MarkedTypes<S>>
+ ) -> Self {
+ &mut s.$oty[handle::Handle::decode(r, &mut ())]
+ }
+ }
+
+ impl<S: server::Types> Encode<HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$oty, $oty>
+ {
+ fn encode(self, w: &mut Writer, s: &mut HandleStore<server::MarkedTypes<S>>) {
+ s.$oty.alloc(self).encode(w, s);
+ }
+ }
+
+ impl<S> DecodeMut<'_, '_, S> for $oty {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ $oty(handle::Handle::decode(r, s))
+ }
+ }
+ )*
+
+ $(
+ #[repr(C)]
+ #[derive(Copy, Clone, PartialEq, Eq, Hash)]
+ pub(crate) struct $ity(handle::Handle);
+
+ impl<S> Encode<S> for $ity {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.0.encode(w, s);
+ }
+ }
+
+ impl<S: server::Types> DecodeMut<'_, '_, HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$ity, $ity>
+ {
+ fn decode(r: &mut Reader<'_>, s: &mut HandleStore<server::MarkedTypes<S>>) -> Self {
+ s.$ity.copy(handle::Handle::decode(r, &mut ()))
+ }
+ }
+
+ impl<S: server::Types> Encode<HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$ity, $ity>
+ {
+ fn encode(self, w: &mut Writer, s: &mut HandleStore<server::MarkedTypes<S>>) {
+ s.$ity.alloc(self).encode(w, s);
+ }
+ }
+
+ impl<S> DecodeMut<'_, '_, S> for $ity {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ $ity(handle::Handle::decode(r, s))
+ }
+ }
+ )*
+ }
+}
+define_handles! {
+ 'owned:
+ FreeFunctions,
+ TokenStream,
+ TokenStreamBuilder,
+ TokenStreamIter,
+ Group,
+ Literal,
+ SourceFile,
+ MultiSpan,
+ Diagnostic,
+
+ 'interned:
+ Punct,
+ Ident,
+ Span,
+}
+
+// FIXME(eddyb) generate these impls by pattern-matching on the
+// names of methods - also could use the presence of `fn drop`
+// to distinguish between 'owned and 'interned, above.
+// Alternatively, special 'modes" could be listed of types in with_api
+// instead of pattern matching on methods, here and in server decl.
+
+impl Clone for TokenStream {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl Clone for TokenStreamIter {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl Clone for Group {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl Clone for Literal {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl fmt::Debug for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Literal")
+ // format the kind without quotes, as in `kind: Float`
+ .field("kind", &format_args!("{}", &self.debug_kind()))
+ .field("symbol", &self.symbol())
+ // format `Some("...")` on one line even in {:#?} mode
+ .field("suffix", &format_args!("{:?}", &self.suffix()))
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+impl Clone for SourceFile {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl fmt::Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.debug())
+ }
+}
+
+macro_rules! define_client_side {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)*;)*
+ }),* $(,)?) => {
+ $(impl $name {
+ $(pub(crate) fn $method($($arg: $arg_ty),*) $(-> $ret_ty)* {
+ Bridge::with(|bridge| {
+ let mut b = bridge.cached_buffer.take();
+
+ b.clear();
+ api_tags::Method::$name(api_tags::$name::$method).encode(&mut b, &mut ());
+ reverse_encode!(b; $($arg),*);
+
+ b = bridge.dispatch.call(b);
+
+ let r = Result::<_, PanicMessage>::decode(&mut &b[..], &mut ());
+
+ bridge.cached_buffer = b;
+
+ r.unwrap_or_else(|e| panic::resume_unwind(e.into()))
+ })
+ })*
+ })*
+ }
+}
+with_api!(self, self, define_client_side);
+
+enum BridgeState<'a> {
+ /// No server is currently connected to this client.
+ NotConnected,
+
+ /// A server is connected and available for requests.
+ Connected(Bridge<'a>),
+
+ /// Access to the bridge is being exclusively acquired
+ /// (e.g., during `BridgeState::with`).
+ InUse,
+}
+
+enum BridgeStateL {}
+
+impl<'a> scoped_cell::ApplyL<'a> for BridgeStateL {
+ type Out = BridgeState<'a>;
+}
+
+thread_local! {
+ static BRIDGE_STATE: scoped_cell::ScopedCell<BridgeStateL> =
+ scoped_cell::ScopedCell::new(BridgeState::NotConnected);
+}
+
+impl BridgeState<'_> {
+ /// Take exclusive control of the thread-local
+ /// `BridgeState`, and pass it to `f`, mutably.
+ /// The state will be restored after `f` exits, even
+ /// by panic, including modifications made to it by `f`.
+ ///
+ /// N.B., while `f` is running, the thread-local state
+ /// is `BridgeState::InUse`.
+ fn with<R>(f: impl FnOnce(&mut BridgeState<'_>) -> R) -> R {
+ BRIDGE_STATE.with(|state| {
+ state.replace(BridgeState::InUse, |mut state| {
+ // FIXME(#52812) pass `f` directly to `replace` when `RefMutL` is gone
+ f(&mut *state)
+ })
+ })
+ }
+}
+
+impl Bridge<'_> {
+ pub(crate) fn is_available() -> bool {
+ BridgeState::with(|state| match state {
+ BridgeState::Connected(_) | BridgeState::InUse => true,
+ BridgeState::NotConnected => false,
+ })
+ }
+
+ fn enter<R>(self, f: impl FnOnce() -> R) -> R {
+ let force_show_panics = self.force_show_panics;
+ // Hide the default panic output within `proc_macro` expansions.
+ // NB. the server can't do this because it may use a different libstd.
+ static HIDE_PANICS_DURING_EXPANSION: Once = Once::new();
+ HIDE_PANICS_DURING_EXPANSION.call_once(|| {
+ let prev = panic::take_hook();
+ panic::set_hook(Box::new(move |info| {
+ let show = BridgeState::with(|state| match state {
+ BridgeState::NotConnected => true,
+ BridgeState::Connected(_) | BridgeState::InUse => force_show_panics,
+ });
+ if show {
+ prev(info)
+ }
+ }));
+ });
+
+ BRIDGE_STATE.with(|state| state.set(BridgeState::Connected(self), f))
+ }
+
+ fn with<R>(f: impl FnOnce(&mut Bridge<'_>) -> R) -> R {
+ BridgeState::with(|state| match state {
+ BridgeState::NotConnected => {
+ panic!("procedural macro API is used outside of a procedural macro");
+ }
+ BridgeState::InUse => {
+ panic!("procedural macro API is used while it's already in use");
+ }
+ BridgeState::Connected(bridge) => f(bridge),
+ })
+ }
+}
+
+/// A client-side "global object" (usually a function pointer),
+/// which may be using a different `proc_macro` from the one
+/// used by the server, but can be interacted with compatibly.
+///
+/// N.B., `F` must have FFI-friendly memory layout (e.g., a pointer).
+/// The call ABI of function pointers used for `F` doesn't
+/// need to match between server and client, since it's only
+/// passed between them and (eventually) called by the client.
+#[repr(C)]
+#[derive(Copy, Clone)]
+pub struct Client<F> {
+ // FIXME(eddyb) use a reference to the `static COUNTERS`, instead of
+ // a wrapper `fn` pointer, once `const fn` can reference `static`s.
+ pub(super) get_handle_counters: extern "C" fn() -> &'static HandleCounters,
+ pub(super) run: extern "C" fn(Bridge<'_>, F) -> Buffer<u8>,
+ pub(super) f: F,
+}
+
+/// Client-side helper for handling client panics, entering the bridge,
+/// deserializing input and serializing output.
+// FIXME(eddyb) maybe replace `Bridge::enter` with this?
+fn run_client<A: for<'a, 's> DecodeMut<'a, 's, ()>, R: Encode<()>>(
+ mut bridge: Bridge<'_>,
+ f: impl FnOnce(A) -> R,
+) -> Buffer<u8> {
+ // The initial `cached_buffer` contains the input.
+ let mut b = bridge.cached_buffer.take();
+
+ panic::catch_unwind(panic::AssertUnwindSafe(|| {
+ bridge.enter(|| {
+ let reader = &mut &b[..];
+ let input = A::decode(reader, &mut ());
+
+ // Put the `cached_buffer` back in the `Bridge`, for requests.
+ Bridge::with(|bridge| bridge.cached_buffer = b.take());
+
+ let output = f(input);
+
+ // Take the `cached_buffer` back out, for the output value.
+ b = Bridge::with(|bridge| bridge.cached_buffer.take());
+
+ // HACK(eddyb) Separate encoding a success value (`Ok(output)`)
+ // from encoding a panic (`Err(e: PanicMessage)`) to avoid
+ // having handles outside the `bridge.enter(|| ...)` scope, and
+ // to catch panics that could happen while encoding the success.
+ //
+ // Note that panics should be impossible beyond this point, but
+ // this is defensively trying to avoid any accidental panicking
+ // reaching the `extern "C"` (which should `abort` but might not
+ // at the moment, so this is also potentially preventing UB).
+ b.clear();
+ Ok::<_, ()>(output).encode(&mut b, &mut ());
+ })
+ }))
+ .map_err(PanicMessage::from)
+ .unwrap_or_else(|e| {
+ b.clear();
+ Err::<(), _>(e).encode(&mut b, &mut ());
+ });
+ b
+}
+
+impl Client<fn(super::super::TokenStream) -> super::super::TokenStream> {
+ pub fn expand1(f: fn(super::super::TokenStream) -> super::super::TokenStream) -> Self {
+ extern "C" fn run(
+ bridge: Bridge<'_>,
+ f: impl FnOnce(super::super::TokenStream) -> super::super::TokenStream,
+ ) -> Buffer<u8> {
+ run_client(bridge, |input| f(super::super::TokenStream(input)).0)
+ }
+ Client { get_handle_counters: HandleCounters::get, run, f }
+ }
+}
+
+impl Client<fn(super::super::TokenStream, super::super::TokenStream) -> super::super::TokenStream> {
+ pub fn expand2(
+ f: fn(super::super::TokenStream, super::super::TokenStream) -> super::super::TokenStream,
+ ) -> Self {
+ extern "C" fn run(
+ bridge: Bridge<'_>,
+ f: impl FnOnce(
+ super::super::TokenStream,
+ super::super::TokenStream,
+ ) -> super::super::TokenStream,
+ ) -> Buffer<u8> {
+ run_client(bridge, |(input, input2)| {
+ f(super::super::TokenStream(input), super::super::TokenStream(input2)).0
+ })
+ }
+ Client { get_handle_counters: HandleCounters::get, run, f }
+ }
+}
+
+#[repr(C)]
+#[derive(Copy, Clone)]
+pub enum ProcMacro {
+ CustomDerive {
+ trait_name: &'static str,
+ attributes: &'static [&'static str],
+ client: Client<fn(super::super::TokenStream) -> super::super::TokenStream>,
+ },
+
+ Attr {
+ name: &'static str,
+ client: Client<
+ fn(super::super::TokenStream, super::super::TokenStream) -> super::super::TokenStream,
+ >,
+ },
+
+ Bang {
+ name: &'static str,
+ client: Client<fn(super::super::TokenStream) -> super::super::TokenStream>,
+ },
+}
+
+impl ProcMacro {
+ pub fn name(&self) -> &'static str {
+ match self {
+ ProcMacro::CustomDerive { trait_name, .. } => trait_name,
+ ProcMacro::Attr { name, .. } => name,
+ ProcMacro::Bang { name, .. } => name,
+ }
+ }
+
+ pub fn custom_derive(
+ trait_name: &'static str,
+ attributes: &'static [&'static str],
+ expand: fn(super::super::TokenStream) -> super::super::TokenStream,
+ ) -> Self {
+ ProcMacro::CustomDerive { trait_name, attributes, client: Client::expand1(expand) }
+ }
+
+ pub fn attr(
+ name: &'static str,
+ expand: fn(
+ super::super::TokenStream,
+ super::super::TokenStream,
+ ) -> super::super::TokenStream,
+ ) -> Self {
+ ProcMacro::Attr { name, client: Client::expand2(expand) }
+ }
+
+ pub fn bang(
+ name: &'static str,
+ expand: fn(super::super::TokenStream) -> super::super::TokenStream,
+ ) -> Self {
+ ProcMacro::Bang { name, client: Client::expand1(expand) }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/closure.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/closure.rs
new file mode 100644
index 000000000..5be71cc3d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/closure.rs
@@ -0,0 +1,24 @@
+//! Closure type (equivalent to `&mut dyn FnMut(A) -> R`) that's `repr(C)`.
+
+#[repr(C)]
+pub struct Closure<'a, A, R> {
+ call: unsafe extern "C" fn(&mut Env, A) -> R,
+ env: &'a mut Env,
+}
+
+struct Env;
+
+impl<'a, A, R, F: FnMut(A) -> R> From<&'a mut F> for Closure<'a, A, R> {
+ fn from(f: &'a mut F) -> Self {
+ unsafe extern "C" fn call<A, R, F: FnMut(A) -> R>(env: &mut Env, arg: A) -> R {
+ (*(env as *mut _ as *mut F))(arg)
+ }
+ Closure { call: call::<A, R, F>, env: unsafe { &mut *(f as *mut _ as *mut Env) } }
+ }
+}
+
+impl<'a, A, R> Closure<'a, A, R> {
+ pub fn call(&mut self, arg: A) -> R {
+ unsafe { (self.call)(self.env, arg) }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/handle.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/handle.rs
new file mode 100644
index 000000000..bcbb86812
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/handle.rs
@@ -0,0 +1,70 @@
+//! Server-side handles and storage for per-handle data.
+
+use std::collections::{BTreeMap, HashMap};
+use std::hash::Hash;
+use std::num::NonZeroU32;
+use std::ops::{Index, IndexMut};
+use std::sync::atomic::{AtomicUsize, Ordering};
+
+pub(super) type Handle = NonZeroU32;
+
+pub(super) struct OwnedStore<T: 'static> {
+ counter: &'static AtomicUsize,
+ data: BTreeMap<Handle, T>,
+}
+
+impl<T> OwnedStore<T> {
+ pub(super) fn new(counter: &'static AtomicUsize) -> Self {
+ // Ensure the handle counter isn't 0, which would panic later,
+ // when `NonZeroU32::new` (aka `Handle::new`) is called in `alloc`.
+ assert_ne!(counter.load(Ordering::SeqCst), 0);
+
+ OwnedStore { counter, data: BTreeMap::new() }
+ }
+}
+
+impl<T> OwnedStore<T> {
+ pub(super) fn alloc(&mut self, x: T) -> Handle {
+ let counter = self.counter.fetch_add(1, Ordering::SeqCst);
+ let handle = Handle::new(counter as u32).expect("`proc_macro` handle counter overflowed");
+ assert!(self.data.insert(handle, x).is_none());
+ handle
+ }
+
+ pub(super) fn take(&mut self, h: Handle) -> T {
+ self.data.remove(&h).expect("use-after-free in `proc_macro` handle")
+ }
+}
+
+impl<T> Index<Handle> for OwnedStore<T> {
+ type Output = T;
+ fn index(&self, h: Handle) -> &T {
+ self.data.get(&h).expect("use-after-free in `proc_macro` handle")
+ }
+}
+
+impl<T> IndexMut<Handle> for OwnedStore<T> {
+ fn index_mut(&mut self, h: Handle) -> &mut T {
+ self.data.get_mut(&h).expect("use-after-free in `proc_macro` handle")
+ }
+}
+
+pub(super) struct InternedStore<T: 'static> {
+ owned: OwnedStore<T>,
+ interner: HashMap<T, Handle>,
+}
+
+impl<T: Copy + Eq + Hash> InternedStore<T> {
+ pub(super) fn new(counter: &'static AtomicUsize) -> Self {
+ InternedStore { owned: OwnedStore::new(counter), interner: HashMap::new() }
+ }
+
+ pub(super) fn alloc(&mut self, x: T) -> Handle {
+ let owned = &mut self.owned;
+ *self.interner.entry(x).or_insert_with(|| owned.alloc(x))
+ }
+
+ pub(super) fn copy(&mut self, h: Handle) -> T {
+ self.owned[h]
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/mod.rs
new file mode 100644
index 000000000..b7968c529
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/mod.rs
@@ -0,0 +1,429 @@
+//! Internal interface for communicating between a `proc_macro` client
+//! (a proc macro crate) and a `proc_macro` server (a compiler front-end).
+//!
+//! Serialization (with C ABI buffers) and unique integer handles are employed
+//! to allow safely interfacing between two copies of `proc_macro` built
+//! (from the same source) by different compilers with potentially mismatching
+//! Rust ABIs (e.g., stage0/bin/rustc vs stage1/bin/rustc during bootstrap).
+
+#![deny(unsafe_code)]
+
+pub use super::{Delimiter, Level, LineColumn, Spacing};
+use std::fmt;
+use std::hash::Hash;
+use std::marker;
+use std::mem;
+use std::ops::Bound;
+use std::panic;
+use std::sync::atomic::AtomicUsize;
+use std::sync::Once;
+use std::thread;
+
+/// Higher-order macro describing the server RPC API, allowing automatic
+/// generation of type-safe Rust APIs, both client-side and server-side.
+///
+/// `with_api!(MySelf, my_self, my_macro)` expands to:
+/// ```rust,ignore (pseudo-code)
+/// my_macro! {
+/// // ...
+/// Literal {
+/// // ...
+/// fn character(ch: char) -> MySelf::Literal;
+/// // ...
+/// fn span(my_self: &MySelf::Literal) -> MySelf::Span;
+/// fn set_span(my_self: &mut MySelf::Literal, span: MySelf::Span);
+/// },
+/// // ...
+/// }
+/// ```
+///
+/// The first two arguments serve to customize the arguments names
+/// and argument/return types, to enable several different usecases:
+///
+/// If `my_self` is just `self`, then each `fn` signature can be used
+/// as-is for a method. If it's anything else (`self_` in practice),
+/// then the signatures don't have a special `self` argument, and
+/// can, therefore, have a different one introduced.
+///
+/// If `MySelf` is just `Self`, then the types are only valid inside
+/// a trait or a trait impl, where the trait has associated types
+/// for each of the API types. If non-associated types are desired,
+/// a module name (`self` in practice) can be used instead of `Self`.
+macro_rules! with_api {
+ ($S:ident, $self:ident, $m:ident) => {
+ $m! {
+ FreeFunctions {
+ fn drop($self: $S::FreeFunctions);
+ fn track_env_var(var: &str, value: Option<&str>);
+ fn track_path(path: &str);
+ },
+ TokenStream {
+ fn drop($self: $S::TokenStream);
+ fn clone($self: &$S::TokenStream) -> $S::TokenStream;
+ fn new() -> $S::TokenStream;
+ fn is_empty($self: &$S::TokenStream) -> bool;
+ fn expand_expr($self: &$S::TokenStream) -> Result<$S::TokenStream, ()>;
+ fn from_str(src: &str) -> $S::TokenStream;
+ fn to_string($self: &$S::TokenStream) -> String;
+ fn from_token_tree(
+ tree: TokenTree<$S::Group, $S::Punct, $S::Ident, $S::Literal>,
+ ) -> $S::TokenStream;
+ fn into_iter($self: $S::TokenStream) -> $S::TokenStreamIter;
+ },
+ TokenStreamBuilder {
+ fn drop($self: $S::TokenStreamBuilder);
+ fn new() -> $S::TokenStreamBuilder;
+ fn push($self: &mut $S::TokenStreamBuilder, stream: $S::TokenStream);
+ fn build($self: $S::TokenStreamBuilder) -> $S::TokenStream;
+ },
+ TokenStreamIter {
+ fn drop($self: $S::TokenStreamIter);
+ fn clone($self: &$S::TokenStreamIter) -> $S::TokenStreamIter;
+ fn next(
+ $self: &mut $S::TokenStreamIter,
+ ) -> Option<TokenTree<$S::Group, $S::Punct, $S::Ident, $S::Literal>>;
+ },
+ Group {
+ fn drop($self: $S::Group);
+ fn clone($self: &$S::Group) -> $S::Group;
+ fn new(delimiter: Delimiter, stream: $S::TokenStream) -> $S::Group;
+ fn delimiter($self: &$S::Group) -> Delimiter;
+ fn stream($self: &$S::Group) -> $S::TokenStream;
+ fn span($self: &$S::Group) -> $S::Span;
+ fn span_open($self: &$S::Group) -> $S::Span;
+ fn span_close($self: &$S::Group) -> $S::Span;
+ fn set_span($self: &mut $S::Group, span: $S::Span);
+ },
+ Punct {
+ fn new(ch: char, spacing: Spacing) -> $S::Punct;
+ fn as_char($self: $S::Punct) -> char;
+ fn spacing($self: $S::Punct) -> Spacing;
+ fn span($self: $S::Punct) -> $S::Span;
+ fn with_span($self: $S::Punct, span: $S::Span) -> $S::Punct;
+ },
+ Ident {
+ fn new(string: &str, span: $S::Span, is_raw: bool) -> $S::Ident;
+ fn span($self: $S::Ident) -> $S::Span;
+ fn with_span($self: $S::Ident, span: $S::Span) -> $S::Ident;
+ },
+ Literal {
+ fn drop($self: $S::Literal);
+ fn clone($self: &$S::Literal) -> $S::Literal;
+ fn from_str(s: &str) -> Result<$S::Literal, ()>;
+ fn to_string($self: &$S::Literal) -> String;
+ fn debug_kind($self: &$S::Literal) -> String;
+ fn symbol($self: &$S::Literal) -> String;
+ fn suffix($self: &$S::Literal) -> Option<String>;
+ fn integer(n: &str) -> $S::Literal;
+ fn typed_integer(n: &str, kind: &str) -> $S::Literal;
+ fn float(n: &str) -> $S::Literal;
+ fn f32(n: &str) -> $S::Literal;
+ fn f64(n: &str) -> $S::Literal;
+ fn string(string: &str) -> $S::Literal;
+ fn character(ch: char) -> $S::Literal;
+ fn byte_string(bytes: &[u8]) -> $S::Literal;
+ fn span($self: &$S::Literal) -> $S::Span;
+ fn set_span($self: &mut $S::Literal, span: $S::Span);
+ fn subspan(
+ $self: &$S::Literal,
+ start: Bound<usize>,
+ end: Bound<usize>,
+ ) -> Option<$S::Span>;
+ },
+ SourceFile {
+ fn drop($self: $S::SourceFile);
+ fn clone($self: &$S::SourceFile) -> $S::SourceFile;
+ fn eq($self: &$S::SourceFile, other: &$S::SourceFile) -> bool;
+ fn path($self: &$S::SourceFile) -> String;
+ fn is_real($self: &$S::SourceFile) -> bool;
+ },
+ MultiSpan {
+ fn drop($self: $S::MultiSpan);
+ fn new() -> $S::MultiSpan;
+ fn push($self: &mut $S::MultiSpan, span: $S::Span);
+ },
+ Diagnostic {
+ fn drop($self: $S::Diagnostic);
+ fn new(level: Level, msg: &str, span: $S::MultiSpan) -> $S::Diagnostic;
+ fn sub(
+ $self: &mut $S::Diagnostic,
+ level: Level,
+ msg: &str,
+ span: $S::MultiSpan,
+ );
+ fn emit($self: $S::Diagnostic);
+ },
+ Span {
+ fn debug($self: $S::Span) -> String;
+ fn def_site() -> $S::Span;
+ fn call_site() -> $S::Span;
+ fn mixed_site() -> $S::Span;
+ fn source_file($self: $S::Span) -> $S::SourceFile;
+ fn parent($self: $S::Span) -> Option<$S::Span>;
+ fn source($self: $S::Span) -> $S::Span;
+ fn start($self: $S::Span) -> LineColumn;
+ fn end($self: $S::Span) -> LineColumn;
+ fn before($self: $S::Span) -> $S::Span;
+ fn after($self: $S::Span) -> $S::Span;
+ fn join($self: $S::Span, other: $S::Span) -> Option<$S::Span>;
+ fn resolved_at($self: $S::Span, at: $S::Span) -> $S::Span;
+ fn source_text($self: $S::Span) -> Option<String>;
+ fn save_span($self: $S::Span) -> usize;
+ fn recover_proc_macro_span(id: usize) -> $S::Span;
+ },
+ }
+ };
+}
+
+// FIXME(eddyb) this calls `encode` for each argument, but in reverse,
+// to avoid borrow conflicts from borrows started by `&mut` arguments.
+macro_rules! reverse_encode {
+ ($writer:ident;) => {};
+ ($writer:ident; $first:ident $(, $rest:ident)*) => {
+ reverse_encode!($writer; $($rest),*);
+ $first.encode(&mut $writer, &mut ());
+ }
+}
+
+// FIXME(eddyb) this calls `decode` for each argument, but in reverse,
+// to avoid borrow conflicts from borrows started by `&mut` arguments.
+macro_rules! reverse_decode {
+ ($reader:ident, $s:ident;) => {};
+ ($reader:ident, $s:ident; $first:ident: $first_ty:ty $(, $rest:ident: $rest_ty:ty)*) => {
+ reverse_decode!($reader, $s; $($rest: $rest_ty),*);
+ let $first = <$first_ty>::decode(&mut $reader, $s);
+ }
+}
+
+#[allow(unsafe_code)]
+mod buffer;
+#[forbid(unsafe_code)]
+pub mod client;
+#[allow(unsafe_code)]
+mod closure;
+#[forbid(unsafe_code)]
+mod handle;
+#[macro_use]
+#[forbid(unsafe_code)]
+mod rpc;
+#[allow(unsafe_code)]
+mod scoped_cell;
+#[forbid(unsafe_code)]
+pub mod server;
+
+use buffer::Buffer;
+pub use rpc::PanicMessage;
+use rpc::{Decode, DecodeMut, Encode, Reader, Writer};
+
+/// An active connection between a server and a client.
+/// The server creates the bridge (`Bridge::run_server` in `server.rs`),
+/// then passes it to the client through the function pointer in the `run`
+/// field of `client::Client`. The client holds its copy of the `Bridge`
+/// in TLS during its execution (`Bridge::{enter, with}` in `client.rs`).
+#[repr(C)]
+pub struct Bridge<'a> {
+ /// Reusable buffer (only `clear`-ed, never shrunk), primarily
+ /// used for making requests, but also for passing input to client.
+ cached_buffer: Buffer<u8>,
+
+ /// Server-side function that the client uses to make requests.
+ dispatch: closure::Closure<'a, Buffer<u8>, Buffer<u8>>,
+
+ /// If 'true', always invoke the default panic hook
+ force_show_panics: bool,
+}
+
+#[forbid(unsafe_code)]
+#[allow(non_camel_case_types)]
+mod api_tags {
+ use super::rpc::{DecodeMut, Encode, Reader, Writer};
+
+ macro_rules! declare_tags {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)*;)*
+ }),* $(,)?) => {
+ $(
+ pub(super) enum $name {
+ $($method),*
+ }
+ rpc_encode_decode!(enum $name { $($method),* });
+ )*
+
+
+ pub(super) enum Method {
+ $($name($name)),*
+ }
+ rpc_encode_decode!(enum Method { $($name(m)),* });
+ }
+ }
+ with_api!(self, self, declare_tags);
+}
+
+/// Helper to wrap associated types to allow trait impl dispatch.
+/// That is, normally a pair of impls for `T::Foo` and `T::Bar`
+/// can overlap, but if the impls are, instead, on types like
+/// `Marked<T::Foo, Foo>` and `Marked<T::Bar, Bar>`, they can't.
+trait Mark {
+ type Unmarked;
+ fn mark(unmarked: Self::Unmarked) -> Self;
+}
+
+/// Unwrap types wrapped by `Mark::mark` (see `Mark` for details).
+trait Unmark {
+ type Unmarked;
+ fn unmark(self) -> Self::Unmarked;
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Hash)]
+struct Marked<T, M> {
+ value: T,
+ _marker: marker::PhantomData<M>,
+}
+
+impl<T, M> Mark for Marked<T, M> {
+ type Unmarked = T;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ Marked { value: unmarked, _marker: marker::PhantomData }
+ }
+}
+impl<T, M> Unmark for Marked<T, M> {
+ type Unmarked = T;
+ fn unmark(self) -> Self::Unmarked {
+ self.value
+ }
+}
+impl<'a, T, M> Unmark for &'a Marked<T, M> {
+ type Unmarked = &'a T;
+ fn unmark(self) -> Self::Unmarked {
+ &self.value
+ }
+}
+impl<'a, T, M> Unmark for &'a mut Marked<T, M> {
+ type Unmarked = &'a mut T;
+ fn unmark(self) -> Self::Unmarked {
+ &mut self.value
+ }
+}
+
+impl<T: Mark> Mark for Option<T> {
+ type Unmarked = Option<T::Unmarked>;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ unmarked.map(T::mark)
+ }
+}
+impl<T: Unmark> Unmark for Option<T> {
+ type Unmarked = Option<T::Unmarked>;
+ fn unmark(self) -> Self::Unmarked {
+ self.map(T::unmark)
+ }
+}
+
+impl<T: Mark, E: Mark> Mark for Result<T, E> {
+ type Unmarked = Result<T::Unmarked, E::Unmarked>;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ unmarked.map(T::mark).map_err(E::mark)
+ }
+}
+impl<T: Unmark, E: Unmark> Unmark for Result<T, E> {
+ type Unmarked = Result<T::Unmarked, E::Unmarked>;
+ fn unmark(self) -> Self::Unmarked {
+ self.map(T::unmark).map_err(E::unmark)
+ }
+}
+
+macro_rules! mark_noop {
+ ($($ty:ty),* $(,)?) => {
+ $(
+ impl Mark for $ty {
+ type Unmarked = Self;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ unmarked
+ }
+ }
+ impl Unmark for $ty {
+ type Unmarked = Self;
+ fn unmark(self) -> Self::Unmarked {
+ self
+ }
+ }
+ )*
+ }
+}
+mark_noop! {
+ (),
+ bool,
+ char,
+ &'_ [u8],
+ &'_ str,
+ String,
+ usize,
+ Delimiter,
+ Level,
+ LineColumn,
+ Spacing,
+ Bound<usize>,
+}
+
+rpc_encode_decode!(
+ enum Delimiter {
+ Parenthesis,
+ Brace,
+ Bracket,
+ None,
+ }
+);
+rpc_encode_decode!(
+ enum Level {
+ Error,
+ Warning,
+ Note,
+ Help,
+ }
+);
+rpc_encode_decode!(struct LineColumn { line, column });
+rpc_encode_decode!(
+ enum Spacing {
+ Alone,
+ Joint,
+ }
+);
+
+#[derive(Clone)]
+pub enum TokenTree<G, P, I, L> {
+ Group(G),
+ Punct(P),
+ Ident(I),
+ Literal(L),
+}
+
+impl<G: Mark, P: Mark, I: Mark, L: Mark> Mark for TokenTree<G, P, I, L> {
+ type Unmarked = TokenTree<G::Unmarked, P::Unmarked, I::Unmarked, L::Unmarked>;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ match unmarked {
+ TokenTree::Group(tt) => TokenTree::Group(G::mark(tt)),
+ TokenTree::Punct(tt) => TokenTree::Punct(P::mark(tt)),
+ TokenTree::Ident(tt) => TokenTree::Ident(I::mark(tt)),
+ TokenTree::Literal(tt) => TokenTree::Literal(L::mark(tt)),
+ }
+ }
+}
+impl<G: Unmark, P: Unmark, I: Unmark, L: Unmark> Unmark for TokenTree<G, P, I, L> {
+ type Unmarked = TokenTree<G::Unmarked, P::Unmarked, I::Unmarked, L::Unmarked>;
+ fn unmark(self) -> Self::Unmarked {
+ match self {
+ TokenTree::Group(tt) => TokenTree::Group(tt.unmark()),
+ TokenTree::Punct(tt) => TokenTree::Punct(tt.unmark()),
+ TokenTree::Ident(tt) => TokenTree::Ident(tt.unmark()),
+ TokenTree::Literal(tt) => TokenTree::Literal(tt.unmark()),
+ }
+ }
+}
+
+rpc_encode_decode!(
+ enum TokenTree<G, P, I, L> {
+ Group(tt),
+ Punct(tt),
+ Ident(tt),
+ Literal(tt),
+ }
+);
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/rpc.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/rpc.rs
new file mode 100644
index 000000000..d50564d01
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/rpc.rs
@@ -0,0 +1,305 @@
+//! Serialization for client-server communication.
+
+use std::any::Any;
+use std::char;
+use std::io::Write;
+use std::num::NonZeroU32;
+use std::ops::Bound;
+use std::str;
+
+pub(super) type Writer = super::buffer::Buffer<u8>;
+
+pub(super) trait Encode<S>: Sized {
+ fn encode(self, w: &mut Writer, s: &mut S);
+}
+
+pub(super) type Reader<'a> = &'a [u8];
+
+pub(super) trait Decode<'a, 's, S>: Sized {
+ fn decode(r: &mut Reader<'a>, s: &'s S) -> Self;
+}
+
+pub(super) trait DecodeMut<'a, 's, S>: Sized {
+ fn decode(r: &mut Reader<'a>, s: &'s mut S) -> Self;
+}
+
+macro_rules! rpc_encode_decode {
+ (le $ty:ty) => {
+ impl<S> Encode<S> for $ty {
+ fn encode(self, w: &mut Writer, _: &mut S) {
+ w.extend_from_array(&self.to_le_bytes());
+ }
+ }
+
+ impl<S> DecodeMut<'_, '_, S> for $ty {
+ fn decode(r: &mut Reader<'_>, _: &mut S) -> Self {
+ const N: usize = ::std::mem::size_of::<$ty>();
+
+ let mut bytes = [0; N];
+ bytes.copy_from_slice(&r[..N]);
+ *r = &r[N..];
+
+ Self::from_le_bytes(bytes)
+ }
+ }
+ };
+ (struct $name:ident { $($field:ident),* $(,)? }) => {
+ impl<S> Encode<S> for $name {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ $(self.$field.encode(w, s);)*
+ }
+ }
+
+ impl<S> DecodeMut<'_, '_, S> for $name {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ $name {
+ $($field: DecodeMut::decode(r, s)),*
+ }
+ }
+ }
+ };
+ (enum $name:ident $(<$($T:ident),+>)? { $($variant:ident $(($field:ident))*),* $(,)? }) => {
+ impl<S, $($($T: Encode<S>),+)?> Encode<S> for $name $(<$($T),+>)? {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ // HACK(eddyb): `Tag` enum duplicated between the
+ // two impls as there's no other place to stash it.
+ #[allow(non_upper_case_globals)]
+ mod tag {
+ #[repr(u8)] enum Tag { $($variant),* }
+
+ $(pub const $variant: u8 = Tag::$variant as u8;)*
+ }
+
+ match self {
+ $($name::$variant $(($field))* => {
+ tag::$variant.encode(w, s);
+ $($field.encode(w, s);)*
+ })*
+ }
+ }
+ }
+
+ impl<'a, S, $($($T: for<'s> DecodeMut<'a, 's, S>),+)?> DecodeMut<'a, '_, S>
+ for $name $(<$($T),+>)?
+ {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ // HACK(eddyb): `Tag` enum duplicated between the
+ // two impls as there's no other place to stash it.
+ #[allow(non_upper_case_globals)]
+ mod tag {
+ #[repr(u8)] enum Tag { $($variant),* }
+
+ $(pub const $variant: u8 = Tag::$variant as u8;)*
+ }
+
+ match u8::decode(r, s) {
+ $(tag::$variant => {
+ $(let $field = DecodeMut::decode(r, s);)*
+ $name::$variant $(($field))*
+ })*
+ _ => unreachable!(),
+ }
+ }
+ }
+ }
+}
+
+impl<S> Encode<S> for () {
+ fn encode(self, _: &mut Writer, _: &mut S) {}
+}
+
+impl<S> DecodeMut<'_, '_, S> for () {
+ fn decode(_: &mut Reader<'_>, _: &mut S) -> Self {}
+}
+
+impl<S> Encode<S> for u8 {
+ fn encode(self, w: &mut Writer, _: &mut S) {
+ w.push(self);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for u8 {
+ fn decode(r: &mut Reader<'_>, _: &mut S) -> Self {
+ let x = r[0];
+ *r = &r[1..];
+ x
+ }
+}
+
+rpc_encode_decode!(le u32);
+rpc_encode_decode!(le usize);
+
+impl<S> Encode<S> for bool {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ (self as u8).encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for bool {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ match u8::decode(r, s) {
+ 0 => false,
+ 1 => true,
+ _ => unreachable!(),
+ }
+ }
+}
+
+impl<S> Encode<S> for char {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ (self as u32).encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for char {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ char::from_u32(u32::decode(r, s)).unwrap()
+ }
+}
+
+impl<S> Encode<S> for NonZeroU32 {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.get().encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for NonZeroU32 {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ Self::new(u32::decode(r, s)).unwrap()
+ }
+}
+
+impl<S, A: Encode<S>, B: Encode<S>> Encode<S> for (A, B) {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.0.encode(w, s);
+ self.1.encode(w, s);
+ }
+}
+
+impl<'a, S, A: for<'s> DecodeMut<'a, 's, S>, B: for<'s> DecodeMut<'a, 's, S>> DecodeMut<'a, '_, S>
+ for (A, B)
+{
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ (DecodeMut::decode(r, s), DecodeMut::decode(r, s))
+ }
+}
+
+rpc_encode_decode!(
+ enum Bound<T> {
+ Included(x),
+ Excluded(x),
+ Unbounded,
+ }
+);
+
+rpc_encode_decode!(
+ enum Option<T> {
+ None,
+ Some(x),
+ }
+);
+
+rpc_encode_decode!(
+ enum Result<T, E> {
+ Ok(x),
+ Err(e),
+ }
+);
+
+impl<S> Encode<S> for &[u8] {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.len().encode(w, s);
+ w.write_all(self).unwrap();
+ }
+}
+
+impl<'a, S> DecodeMut<'a, '_, S> for &'a [u8] {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ let len = usize::decode(r, s);
+ let xs = &r[..len];
+ *r = &r[len..];
+ xs
+ }
+}
+
+impl<S> Encode<S> for &str {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.as_bytes().encode(w, s);
+ }
+}
+
+impl<'a, S> DecodeMut<'a, '_, S> for &'a str {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ str::from_utf8(<&[u8]>::decode(r, s)).unwrap()
+ }
+}
+
+impl<S> Encode<S> for String {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self[..].encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for String {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ <&str>::decode(r, s).to_string()
+ }
+}
+
+/// Simplified version of panic payloads, ignoring
+/// types other than `&'static str` and `String`.
+pub enum PanicMessage {
+ StaticStr(&'static str),
+ String(String),
+ Unknown,
+}
+
+impl From<Box<dyn Any + Send>> for PanicMessage {
+ fn from(payload: Box<dyn Any + Send + 'static>) -> Self {
+ if let Some(s) = payload.downcast_ref::<&'static str>() {
+ return PanicMessage::StaticStr(s);
+ }
+ if let Ok(s) = payload.downcast::<String>() {
+ return PanicMessage::String(*s);
+ }
+ PanicMessage::Unknown
+ }
+}
+
+impl Into<Box<dyn Any + Send>> for PanicMessage {
+ fn into(self) -> Box<dyn Any + Send> {
+ match self {
+ PanicMessage::StaticStr(s) => Box::new(s),
+ PanicMessage::String(s) => Box::new(s),
+ PanicMessage::Unknown => {
+ struct UnknownPanicMessage;
+ Box::new(UnknownPanicMessage)
+ }
+ }
+ }
+}
+
+impl PanicMessage {
+ pub fn as_str(&self) -> Option<&str> {
+ match self {
+ PanicMessage::StaticStr(s) => Some(s),
+ PanicMessage::String(s) => Some(s),
+ PanicMessage::Unknown => None,
+ }
+ }
+}
+
+impl<S> Encode<S> for PanicMessage {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.as_str().encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for PanicMessage {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ match Option::<String>::decode(r, s) {
+ Some(s) => PanicMessage::String(s),
+ None => PanicMessage::Unknown,
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/scoped_cell.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/scoped_cell.rs
new file mode 100644
index 000000000..b0c2e5b9c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/scoped_cell.rs
@@ -0,0 +1,81 @@
+//! `Cell` variant for (scoped) existential lifetimes.
+
+use std::cell::Cell;
+use std::mem;
+use std::ops::{Deref, DerefMut};
+
+/// Type lambda application, with a lifetime.
+#[allow(unused_lifetimes)]
+pub trait ApplyL<'a> {
+ type Out;
+}
+
+/// Type lambda taking a lifetime, i.e., `Lifetime -> Type`.
+pub trait LambdaL: for<'a> ApplyL<'a> {}
+
+impl<T: for<'a> ApplyL<'a>> LambdaL for T {}
+
+// HACK(eddyb) work around projection limitations with a newtype
+// FIXME(#52812) replace with `&'a mut <T as ApplyL<'b>>::Out`
+pub struct RefMutL<'a, 'b, T: LambdaL>(&'a mut <T as ApplyL<'b>>::Out);
+
+impl<'a, 'b, T: LambdaL> Deref for RefMutL<'a, 'b, T> {
+ type Target = <T as ApplyL<'b>>::Out;
+ fn deref(&self) -> &Self::Target {
+ self.0
+ }
+}
+
+impl<'a, 'b, T: LambdaL> DerefMut for RefMutL<'a, 'b, T> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ self.0
+ }
+}
+
+pub struct ScopedCell<T: LambdaL>(Cell<<T as ApplyL<'static>>::Out>);
+
+impl<T: LambdaL> ScopedCell<T> {
+ pub fn new(value: <T as ApplyL<'static>>::Out) -> Self {
+ ScopedCell(Cell::new(value))
+ }
+
+ /// Sets the value in `self` to `replacement` while
+ /// running `f`, which gets the old value, mutably.
+ /// The old value will be restored after `f` exits, even
+ /// by panic, including modifications made to it by `f`.
+ pub fn replace<'a, R>(
+ &self,
+ replacement: <T as ApplyL<'a>>::Out,
+ f: impl for<'b, 'c> FnOnce(RefMutL<'b, 'c, T>) -> R,
+ ) -> R {
+ /// Wrapper that ensures that the cell always gets filled
+ /// (with the original state, optionally changed by `f`),
+ /// even if `f` had panicked.
+ struct PutBackOnDrop<'a, T: LambdaL> {
+ cell: &'a ScopedCell<T>,
+ value: Option<<T as ApplyL<'static>>::Out>,
+ }
+
+ impl<'a, T: LambdaL> Drop for PutBackOnDrop<'a, T> {
+ fn drop(&mut self) {
+ self.cell.0.set(self.value.take().unwrap());
+ }
+ }
+
+ let mut put_back_on_drop = PutBackOnDrop {
+ cell: self,
+ value: Some(self.0.replace(unsafe {
+ let erased = mem::transmute_copy(&replacement);
+ mem::forget(replacement);
+ erased
+ })),
+ };
+
+ f(RefMutL(put_back_on_drop.value.as_mut().unwrap()))
+ }
+
+ /// Sets the value in `self` to `value` while running `f`.
+ pub fn set<R>(&self, value: <T as ApplyL<'_>>::Out, f: impl FnOnce() -> R) -> R {
+ self.replace(value, |_| f())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/server.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/server.rs
new file mode 100644
index 000000000..06a197913
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/server.rs
@@ -0,0 +1,352 @@
+//! Server-side traits.
+
+use super::*;
+
+// FIXME(eddyb) generate the definition of `HandleStore` in `server.rs`.
+use super::client::HandleStore;
+
+/// Declare an associated item of one of the traits below, optionally
+/// adjusting it (i.e., adding bounds to types and default bodies to methods).
+macro_rules! associated_item {
+ (type FreeFunctions) =>
+ (type FreeFunctions: 'static;);
+ (type TokenStream) =>
+ (type TokenStream: 'static + Clone;);
+ (type TokenStreamBuilder) =>
+ (type TokenStreamBuilder: 'static;);
+ (type TokenStreamIter) =>
+ (type TokenStreamIter: 'static + Clone;);
+ (type Group) =>
+ (type Group: 'static + Clone;);
+ (type Punct) =>
+ (type Punct: 'static + Copy + Eq + Hash;);
+ (type Ident) =>
+ (type Ident: 'static + Copy + Eq + Hash;);
+ (type Literal) =>
+ (type Literal: 'static + Clone;);
+ (type SourceFile) =>
+ (type SourceFile: 'static + Clone;);
+ (type MultiSpan) =>
+ (type MultiSpan: 'static;);
+ (type Diagnostic) =>
+ (type Diagnostic: 'static;);
+ (type Span) =>
+ (type Span: 'static + Copy + Eq + Hash;);
+ (fn drop(&mut self, $arg:ident: $arg_ty:ty)) =>
+ (fn drop(&mut self, $arg: $arg_ty) { mem::drop($arg) });
+ (fn clone(&mut self, $arg:ident: $arg_ty:ty) -> $ret_ty:ty) =>
+ (fn clone(&mut self, $arg: $arg_ty) -> $ret_ty { $arg.clone() });
+ ($($item:tt)*) => ($($item)*;)
+}
+
+macro_rules! declare_server_traits {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
+ }),* $(,)?) => {
+ pub trait Types {
+ $(associated_item!(type $name);)*
+ }
+
+ $(pub trait $name: Types {
+ $(associated_item!(fn $method(&mut self, $($arg: $arg_ty),*) $(-> $ret_ty)?);)*
+ })*
+
+ pub trait Server: Types $(+ $name)* {}
+ impl<S: Types $(+ $name)*> Server for S {}
+ }
+}
+with_api!(Self, self_, declare_server_traits);
+
+pub(super) struct MarkedTypes<S: Types>(S);
+
+macro_rules! define_mark_types_impls {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
+ }),* $(,)?) => {
+ impl<S: Types> Types for MarkedTypes<S> {
+ $(type $name = Marked<S::$name, client::$name>;)*
+ }
+
+ $(impl<S: $name> $name for MarkedTypes<S> {
+ $(fn $method(&mut self, $($arg: $arg_ty),*) $(-> $ret_ty)? {
+ <_>::mark($name::$method(&mut self.0, $($arg.unmark()),*))
+ })*
+ })*
+ }
+}
+with_api!(Self, self_, define_mark_types_impls);
+
+struct Dispatcher<S: Types> {
+ handle_store: HandleStore<S>,
+ server: S,
+}
+
+macro_rules! define_dispatcher_impl {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
+ }),* $(,)?) => {
+ // FIXME(eddyb) `pub` only for `ExecutionStrategy` below.
+ pub trait DispatcherTrait {
+ // HACK(eddyb) these are here to allow `Self::$name` to work below.
+ $(type $name;)*
+ fn dispatch(&mut self, b: Buffer<u8>) -> Buffer<u8>;
+ }
+
+ impl<S: Server> DispatcherTrait for Dispatcher<MarkedTypes<S>> {
+ $(type $name = <MarkedTypes<S> as Types>::$name;)*
+ fn dispatch(&mut self, mut b: Buffer<u8>) -> Buffer<u8> {
+ let Dispatcher { handle_store, server } = self;
+
+ let mut reader = &b[..];
+ match api_tags::Method::decode(&mut reader, &mut ()) {
+ $(api_tags::Method::$name(m) => match m {
+ $(api_tags::$name::$method => {
+ let mut call_method = || {
+ reverse_decode!(reader, handle_store; $($arg: $arg_ty),*);
+ $name::$method(server, $($arg),*)
+ };
+ // HACK(eddyb) don't use `panic::catch_unwind` in a panic.
+ // If client and server happen to use the same `libstd`,
+ // `catch_unwind` asserts that the panic counter was 0,
+ // even when the closure passed to it didn't panic.
+ let r = if thread::panicking() {
+ Ok(call_method())
+ } else {
+ panic::catch_unwind(panic::AssertUnwindSafe(call_method))
+ .map_err(PanicMessage::from)
+ };
+
+ b.clear();
+ r.encode(&mut b, handle_store);
+ })*
+ }),*
+ }
+ b
+ }
+ }
+ }
+}
+with_api!(Self, self_, define_dispatcher_impl);
+
+pub trait ExecutionStrategy {
+ fn run_bridge_and_client<D: Copy + Send + 'static>(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer<u8>,
+ run_client: extern "C" fn(Bridge<'_>, D) -> Buffer<u8>,
+ client_data: D,
+ force_show_panics: bool,
+ ) -> Buffer<u8>;
+}
+
+pub struct SameThread;
+
+impl ExecutionStrategy for SameThread {
+ fn run_bridge_and_client<D: Copy + Send + 'static>(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer<u8>,
+ run_client: extern "C" fn(Bridge<'_>, D) -> Buffer<u8>,
+ client_data: D,
+ force_show_panics: bool,
+ ) -> Buffer<u8> {
+ let mut dispatch = |b| dispatcher.dispatch(b);
+
+ run_client(
+ Bridge { cached_buffer: input, dispatch: (&mut dispatch).into(), force_show_panics },
+ client_data,
+ )
+ }
+}
+
+// NOTE(eddyb) Two implementations are provided, the second one is a bit
+// faster but neither is anywhere near as fast as same-thread execution.
+
+pub struct CrossThread1;
+
+impl ExecutionStrategy for CrossThread1 {
+ fn run_bridge_and_client<D: Copy + Send + 'static>(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer<u8>,
+ run_client: extern "C" fn(Bridge<'_>, D) -> Buffer<u8>,
+ client_data: D,
+ force_show_panics: bool,
+ ) -> Buffer<u8> {
+ use std::sync::mpsc::channel;
+
+ let (req_tx, req_rx) = channel();
+ let (res_tx, res_rx) = channel();
+
+ let join_handle = thread::spawn(move || {
+ let mut dispatch = |b| {
+ req_tx.send(b).unwrap();
+ res_rx.recv().unwrap()
+ };
+
+ run_client(
+ Bridge {
+ cached_buffer: input,
+ dispatch: (&mut dispatch).into(),
+ force_show_panics,
+ },
+ client_data,
+ )
+ });
+
+ for b in req_rx {
+ res_tx.send(dispatcher.dispatch(b)).unwrap();
+ }
+
+ join_handle.join().unwrap()
+ }
+}
+
+pub struct CrossThread2;
+
+impl ExecutionStrategy for CrossThread2 {
+ fn run_bridge_and_client<D: Copy + Send + 'static>(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer<u8>,
+ run_client: extern "C" fn(Bridge<'_>, D) -> Buffer<u8>,
+ client_data: D,
+ force_show_panics: bool,
+ ) -> Buffer<u8> {
+ use std::sync::{Arc, Mutex};
+
+ enum State<T> {
+ Req(T),
+ Res(T),
+ }
+
+ let mut state = Arc::new(Mutex::new(State::Res(Buffer::new())));
+
+ let server_thread = thread::current();
+ let state2 = state.clone();
+ let join_handle = thread::spawn(move || {
+ let mut dispatch = |b| {
+ *state2.lock().unwrap() = State::Req(b);
+ server_thread.unpark();
+ loop {
+ thread::park();
+ if let State::Res(b) = &mut *state2.lock().unwrap() {
+ break b.take();
+ }
+ }
+ };
+
+ let r = run_client(
+ Bridge {
+ cached_buffer: input,
+ dispatch: (&mut dispatch).into(),
+ force_show_panics,
+ },
+ client_data,
+ );
+
+ // Wake up the server so it can exit the dispatch loop.
+ drop(state2);
+ server_thread.unpark();
+
+ r
+ });
+
+ // Check whether `state2` was dropped, to know when to stop.
+ while Arc::get_mut(&mut state).is_none() {
+ thread::park();
+ let mut b = match &mut *state.lock().unwrap() {
+ State::Req(b) => b.take(),
+ _ => continue,
+ };
+ b = dispatcher.dispatch(b.take());
+ *state.lock().unwrap() = State::Res(b);
+ join_handle.thread().unpark();
+ }
+
+ join_handle.join().unwrap()
+ }
+}
+
+fn run_server<
+ S: Server,
+ I: Encode<HandleStore<MarkedTypes<S>>>,
+ O: for<'a, 's> DecodeMut<'a, 's, HandleStore<MarkedTypes<S>>>,
+ D: Copy + Send + 'static,
+>(
+ strategy: &impl ExecutionStrategy,
+ handle_counters: &'static client::HandleCounters,
+ server: S,
+ input: I,
+ run_client: extern "C" fn(Bridge<'_>, D) -> Buffer<u8>,
+ client_data: D,
+ force_show_panics: bool,
+) -> Result<O, PanicMessage> {
+ let mut dispatcher =
+ Dispatcher { handle_store: HandleStore::new(handle_counters), server: MarkedTypes(server) };
+
+ let mut b = Buffer::new();
+ input.encode(&mut b, &mut dispatcher.handle_store);
+
+ b = strategy.run_bridge_and_client(
+ &mut dispatcher,
+ b,
+ run_client,
+ client_data,
+ force_show_panics,
+ );
+
+ Result::decode(&mut &b[..], &mut dispatcher.handle_store)
+}
+
+impl client::Client<fn(super::super::TokenStream) -> super::super::TokenStream> {
+ pub fn run<S: Server>(
+ &self,
+ strategy: &impl ExecutionStrategy,
+ server: S,
+ input: S::TokenStream,
+ force_show_panics: bool,
+ ) -> Result<S::TokenStream, PanicMessage> {
+ let client::Client { get_handle_counters, run, f } = *self;
+ run_server(
+ strategy,
+ get_handle_counters(),
+ server,
+ <MarkedTypes<S> as Types>::TokenStream::mark(input),
+ run,
+ f,
+ force_show_panics,
+ )
+ .map(<MarkedTypes<S> as Types>::TokenStream::unmark)
+ }
+}
+
+impl
+ client::Client<
+ fn(super::super::TokenStream, super::super::TokenStream) -> super::super::TokenStream,
+ >
+{
+ pub fn run<S: Server>(
+ &self,
+ strategy: &impl ExecutionStrategy,
+ server: S,
+ input: S::TokenStream,
+ input2: S::TokenStream,
+ force_show_panics: bool,
+ ) -> Result<S::TokenStream, PanicMessage> {
+ let client::Client { get_handle_counters, run, f } = *self;
+ run_server(
+ strategy,
+ get_handle_counters(),
+ server,
+ (
+ <MarkedTypes<S> as Types>::TokenStream::mark(input),
+ <MarkedTypes<S> as Types>::TokenStream::mark(input2),
+ ),
+ run,
+ f,
+ force_show_panics,
+ )
+ .map(<MarkedTypes<S> as Types>::TokenStream::unmark)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/diagnostic.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/diagnostic.rs
new file mode 100644
index 000000000..cda239f87
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/diagnostic.rs
@@ -0,0 +1,166 @@
+//! lib-proc-macro diagnostic
+//!
+//! Copy from <https://github.com/rust-lang/rust/blob/6050e523bae6de61de4e060facc43dc512adaccd/src/libproc_macro/diagnostic.rs>
+//! augmented with removing unstable features
+
+use super::Span;
+
+/// An enum representing a diagnostic level.
+#[derive(Copy, Clone, Debug)]
+#[non_exhaustive]
+pub enum Level {
+ /// An error.
+ Error,
+ /// A warning.
+ Warning,
+ /// A note.
+ Note,
+ /// A help message.
+ Help,
+}
+
+/// Trait implemented by types that can be converted into a set of `Span`s.
+pub trait MultiSpan {
+ /// Converts `self` into a `Vec<Span>`.
+ fn into_spans(self) -> Vec<Span>;
+}
+
+impl MultiSpan for Span {
+ fn into_spans(self) -> Vec<Span> {
+ vec![self]
+ }
+}
+
+impl MultiSpan for Vec<Span> {
+ fn into_spans(self) -> Vec<Span> {
+ self
+ }
+}
+
+impl<'a> MultiSpan for &'a [Span] {
+ fn into_spans(self) -> Vec<Span> {
+ self.to_vec()
+ }
+}
+
+/// A structure representing a diagnostic message and associated children
+/// messages.
+#[derive(Clone, Debug)]
+pub struct Diagnostic {
+ level: Level,
+ message: String,
+ spans: Vec<Span>,
+ children: Vec<Diagnostic>,
+}
+
+macro_rules! diagnostic_child_methods {
+ ($spanned:ident, $regular:ident, $level:expr) => {
+ #[doc = concat!("Adds a new child diagnostics message to `self` with the [`",
+ stringify!($level), "`] level, and the given `spans` and `message`.")]
+ pub fn $spanned<S, T>(mut self, spans: S, message: T) -> Diagnostic
+ where
+ S: MultiSpan,
+ T: Into<String>,
+ {
+ self.children.push(Diagnostic::spanned(spans, $level, message));
+ self
+ }
+
+ #[doc = concat!("Adds a new child diagnostic message to `self` with the [`",
+ stringify!($level), "`] level, and the given `message`.")]
+ pub fn $regular<T: Into<String>>(mut self, message: T) -> Diagnostic {
+ self.children.push(Diagnostic::new($level, message));
+ self
+ }
+ };
+}
+
+/// Iterator over the children diagnostics of a `Diagnostic`.
+#[derive(Debug, Clone)]
+pub struct Children<'a>(std::slice::Iter<'a, Diagnostic>);
+
+impl<'a> Iterator for Children<'a> {
+ type Item = &'a Diagnostic;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.0.next()
+ }
+}
+
+impl Diagnostic {
+ /// Creates a new diagnostic with the given `level` and `message`.
+ pub fn new<T: Into<String>>(level: Level, message: T) -> Diagnostic {
+ Diagnostic { level, message: message.into(), spans: vec![], children: vec![] }
+ }
+
+ /// Creates a new diagnostic with the given `level` and `message` pointing to
+ /// the given set of `spans`.
+ pub fn spanned<S, T>(spans: S, level: Level, message: T) -> Diagnostic
+ where
+ S: MultiSpan,
+ T: Into<String>,
+ {
+ Diagnostic { level, message: message.into(), spans: spans.into_spans(), children: vec![] }
+ }
+
+ diagnostic_child_methods!(span_error, error, Level::Error);
+ diagnostic_child_methods!(span_warning, warning, Level::Warning);
+ diagnostic_child_methods!(span_note, note, Level::Note);
+ diagnostic_child_methods!(span_help, help, Level::Help);
+
+ /// Returns the diagnostic `level` for `self`.
+ pub fn level(&self) -> Level {
+ self.level
+ }
+
+ /// Sets the level in `self` to `level`.
+ pub fn set_level(&mut self, level: Level) {
+ self.level = level;
+ }
+
+ /// Returns the message in `self`.
+ pub fn message(&self) -> &str {
+ &self.message
+ }
+
+ /// Sets the message in `self` to `message`.
+ pub fn set_message<T: Into<String>>(&mut self, message: T) {
+ self.message = message.into();
+ }
+
+ /// Returns the `Span`s in `self`.
+ pub fn spans(&self) -> &[Span] {
+ &self.spans
+ }
+
+ /// Sets the `Span`s in `self` to `spans`.
+ pub fn set_spans<S: MultiSpan>(&mut self, spans: S) {
+ self.spans = spans.into_spans();
+ }
+
+ /// Returns an iterator over the children diagnostics of `self`.
+ pub fn children(&self) -> Children<'_> {
+ Children(self.children.iter())
+ }
+
+ /// Emit the diagnostic.
+ pub fn emit(self) {
+ fn to_internal(spans: Vec<Span>) -> super::bridge::client::MultiSpan {
+ let mut multi_span = super::bridge::client::MultiSpan::new();
+ for span in spans {
+ multi_span.push(span.0);
+ }
+ multi_span
+ }
+
+ let mut diag = super::bridge::client::Diagnostic::new(
+ self.level,
+ &self.message[..],
+ to_internal(self.spans),
+ );
+ for c in self.children {
+ diag.sub(c.level, &c.message[..], to_internal(c.spans));
+ }
+ diag.emit();
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs
new file mode 100644
index 000000000..4a07f2277
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs
@@ -0,0 +1,1056 @@
+//! A support library for macro authors when defining new macros.
+//!
+//! This library, provided by the standard distribution, provides the types
+//! consumed in the interfaces of procedurally defined macro definitions such as
+//! function-like macros `#[proc_macro]`, macro attributes `#[proc_macro_attribute]` and
+//! custom derive attributes`#[proc_macro_derive]`.
+//!
+//! See [the book] for more.
+//!
+//! [the book]: ../book/ch19-06-macros.html#procedural-macros-for-generating-code-from-attributes
+
+#[doc(hidden)]
+pub mod bridge;
+
+mod diagnostic;
+
+pub use diagnostic::{Diagnostic, Level, MultiSpan};
+
+use std::cmp::Ordering;
+use std::ops::RangeBounds;
+use std::path::PathBuf;
+use std::str::FromStr;
+use std::{error, fmt, iter, mem};
+
+/// Determines whether proc_macro has been made accessible to the currently
+/// running program.
+///
+/// The proc_macro crate is only intended for use inside the implementation of
+/// procedural macros. All the functions in this crate panic if invoked from
+/// outside of a procedural macro, such as from a build script or unit test or
+/// ordinary Rust binary.
+///
+/// With consideration for Rust libraries that are designed to support both
+/// macro and non-macro use cases, `proc_macro::is_available()` provides a
+/// non-panicking way to detect whether the infrastructure required to use the
+/// API of proc_macro is presently available. Returns true if invoked from
+/// inside of a procedural macro, false if invoked from any other binary.
+pub fn is_available() -> bool {
+ bridge::Bridge::is_available()
+}
+
+/// The main type provided by this crate, representing an abstract stream of
+/// tokens, or, more specifically, a sequence of token trees.
+/// The type provide interfaces for iterating over those token trees and, conversely,
+/// collecting a number of token trees into one stream.
+///
+/// This is both the input and output of `#[proc_macro]`, `#[proc_macro_attribute]`
+/// and `#[proc_macro_derive]` definitions.
+#[derive(Clone)]
+pub struct TokenStream(bridge::client::TokenStream);
+
+/// Error returned from `TokenStream::from_str`.
+#[non_exhaustive]
+#[derive(Debug)]
+pub struct LexError;
+
+impl fmt::Display for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str("cannot parse string into token stream")
+ }
+}
+
+impl error::Error for LexError {}
+
+/// Error returned from `TokenStream::expand_expr`.
+#[non_exhaustive]
+#[derive(Debug)]
+pub struct ExpandError;
+
+impl fmt::Display for ExpandError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str("macro expansion failed")
+ }
+}
+
+impl error::Error for ExpandError {}
+
+impl TokenStream {
+ /// Returns an empty `TokenStream` containing no token trees.
+ pub fn new() -> TokenStream {
+ TokenStream(bridge::client::TokenStream::new())
+ }
+
+ /// Checks if this `TokenStream` is empty.
+ pub fn is_empty(&self) -> bool {
+ self.0.is_empty()
+ }
+
+ /// Parses this `TokenStream` as an expression and attempts to expand any
+ /// macros within it. Returns the expanded `TokenStream`.
+ ///
+ /// Currently only expressions expanding to literals will succeed, although
+ /// this may be relaxed in the future.
+ ///
+ /// NOTE: In error conditions, `expand_expr` may leave macros unexpanded,
+ /// report an error, failing compilation, and/or return an `Err(..)`. The
+ /// specific behavior for any error condition, and what conditions are
+ /// considered errors, is unspecified and may change in the future.
+ pub fn expand_expr(&self) -> Result<TokenStream, ExpandError> {
+ match bridge::client::TokenStream::expand_expr(&self.0) {
+ Ok(stream) => Ok(TokenStream(stream)),
+ Err(_) => Err(ExpandError),
+ }
+ }
+}
+
+/// Attempts to break the string into tokens and parse those tokens into a token stream.
+/// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
+/// or characters not existing in the language.
+/// All tokens in the parsed stream get `Span::call_site()` spans.
+///
+/// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
+/// change these errors into `LexError`s later.
+impl FromStr for TokenStream {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+ Ok(TokenStream(bridge::client::TokenStream::from_str(src)))
+ }
+}
+
+/// Prints the token stream as a string that is supposed to be losslessly convertible back
+/// into the same token stream (modulo spans), except for possibly `TokenTree::Group`s
+/// with `Delimiter::None` delimiters and negative numeric literals.
+impl fmt::Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+/// Prints token in a form convenient for debugging.
+impl fmt::Debug for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str("TokenStream ")?;
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+impl Default for TokenStream {
+ fn default() -> Self {
+ TokenStream::new()
+ }
+}
+
+pub use quote::{quote, quote_span};
+
+/// Creates a token stream containing a single token tree.
+impl From<TokenTree> for TokenStream {
+ fn from(tree: TokenTree) -> TokenStream {
+ TokenStream(bridge::client::TokenStream::from_token_tree(match tree {
+ TokenTree::Group(tt) => bridge::TokenTree::Group(tt.0),
+ TokenTree::Punct(tt) => bridge::TokenTree::Punct(tt.0),
+ TokenTree::Ident(tt) => bridge::TokenTree::Ident(tt.0),
+ TokenTree::Literal(tt) => bridge::TokenTree::Literal(tt.0),
+ }))
+ }
+}
+
+/// Collects a number of token trees into a single stream.
+impl iter::FromIterator<TokenTree> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+ trees.into_iter().map(TokenStream::from).collect()
+ }
+}
+
+/// A "flattening" operation on token streams, collects token trees
+/// from multiple token streams into a single stream.
+impl iter::FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let mut builder = bridge::client::TokenStreamBuilder::new();
+ streams.into_iter().for_each(|stream| builder.push(stream.0));
+ TokenStream(builder.build())
+ }
+}
+
+impl Extend<TokenTree> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
+ self.extend(trees.into_iter().map(TokenStream::from));
+ }
+}
+
+impl Extend<TokenStream> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+ // FIXME(eddyb) Use an optimized implementation if/when possible.
+ *self = iter::once(mem::replace(self, Self::new())).chain(streams).collect();
+ }
+}
+
+/// Public implementation details for the `TokenStream` type, such as iterators.
+pub mod token_stream {
+ use super::{bridge, Group, Ident, Literal, Punct, TokenStream, TokenTree};
+
+ /// An iterator over `TokenStream`'s `TokenTree`s.
+ /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
+ /// and returns whole groups as token trees.
+ #[derive(Clone)]
+ pub struct IntoIter(bridge::client::TokenStreamIter);
+
+ impl Iterator for IntoIter {
+ type Item = TokenTree;
+
+ fn next(&mut self) -> Option<TokenTree> {
+ self.0.next().map(|tree| match tree {
+ bridge::TokenTree::Group(tt) => TokenTree::Group(Group(tt)),
+ bridge::TokenTree::Punct(tt) => TokenTree::Punct(Punct(tt)),
+ bridge::TokenTree::Ident(tt) => TokenTree::Ident(Ident(tt)),
+ bridge::TokenTree::Literal(tt) => TokenTree::Literal(Literal(tt)),
+ })
+ }
+ }
+
+ impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = IntoIter;
+
+ fn into_iter(self) -> IntoIter {
+ IntoIter(self.0.into_iter())
+ }
+ }
+}
+
+/// `quote!(..)` accepts arbitrary tokens and expands into a `TokenStream` describing the input.
+/// For example, `quote!(a + b)` will produce an expression, that, when evaluated, constructs
+/// the `TokenStream` `[Ident("a"), Punct('+', Alone), Ident("b")]`.
+///
+/// Unquoting is done with `$`, and works by taking the single next ident as the unquoted term.
+/// To quote `$` itself, use `$$`.
+//pub macro quote($($t:tt)*) {
+//[> compiler built-in <]
+//}
+
+#[doc(hidden)]
+mod quote;
+
+/// A region of source code, along with macro expansion information.
+#[derive(Copy, Clone)]
+pub struct Span(bridge::client::Span);
+
+macro_rules! diagnostic_method {
+ ($name:ident, $level:expr) => {
+ /// Creates a new `Diagnostic` with the given `message` at the span
+ /// `self`.
+ pub fn $name<T: Into<String>>(self, message: T) -> Diagnostic {
+ Diagnostic::spanned(self, $level, message)
+ }
+ };
+}
+
+impl Span {
+ /// A span that resolves at the macro definition site.
+ pub fn def_site() -> Span {
+ Span(bridge::client::Span::def_site())
+ }
+
+ /// The span of the invocation of the current procedural macro.
+ /// Identifiers created with this span will be resolved as if they were written
+ /// directly at the macro call location (call-site hygiene) and other code
+ /// at the macro call site will be able to refer to them as well.
+ pub fn call_site() -> Span {
+ Span(bridge::client::Span::call_site())
+ }
+
+ /// A span that represents `macro_rules` hygiene, and sometimes resolves at the macro
+ /// definition site (local variables, labels, `$crate`) and sometimes at the macro
+ /// call site (everything else).
+ /// The span location is taken from the call-site.
+ pub fn mixed_site() -> Span {
+ Span(bridge::client::Span::mixed_site())
+ }
+
+ /// The original source file into which this span points.
+ pub fn source_file(&self) -> SourceFile {
+ SourceFile(self.0.source_file())
+ }
+
+ /// The `Span` for the tokens in the previous macro expansion from which
+ /// `self` was generated from, if any.
+ pub fn parent(&self) -> Option<Span> {
+ self.0.parent().map(Span)
+ }
+
+ /// The span for the origin source code that `self` was generated from. If
+ /// this `Span` wasn't generated from other macro expansions then the return
+ /// value is the same as `*self`.
+ pub fn source(&self) -> Span {
+ Span(self.0.source())
+ }
+
+ /// Gets the starting line/column in the source file for this span.
+ pub fn start(&self) -> LineColumn {
+ self.0.start().add_1_to_column()
+ }
+
+ /// Gets the ending line/column in the source file for this span.
+ pub fn end(&self) -> LineColumn {
+ self.0.end().add_1_to_column()
+ }
+
+ /// Creates an empty span pointing to directly before this span.
+ pub fn before(&self) -> Span {
+ Span(self.0.before())
+ }
+
+ /// Creates an empty span pointing to directly after this span.
+ pub fn after(&self) -> Span {
+ Span(self.0.after())
+ }
+
+ /// Creates a new span encompassing `self` and `other`.
+ ///
+ /// Returns `None` if `self` and `other` are from different files.
+ pub fn join(&self, other: Span) -> Option<Span> {
+ self.0.join(other.0).map(Span)
+ }
+
+ /// Creates a new span with the same line/column information as `self` but
+ /// that resolves symbols as though it were at `other`.
+ pub fn resolved_at(&self, other: Span) -> Span {
+ Span(self.0.resolved_at(other.0))
+ }
+
+ /// Creates a new span with the same name resolution behavior as `self` but
+ /// with the line/column information of `other`.
+ pub fn located_at(&self, other: Span) -> Span {
+ other.resolved_at(*self)
+ }
+
+ /// Compares to spans to see if they're equal.
+ pub fn eq(&self, other: &Span) -> bool {
+ self.0 == other.0
+ }
+
+ /// Returns the source text behind a span. This preserves the original source
+ /// code, including spaces and comments. It only returns a result if the span
+ /// corresponds to real source code.
+ ///
+ /// Note: The observable result of a macro should only rely on the tokens and
+ /// not on this source text. The result of this function is a best effort to
+ /// be used for diagnostics only.
+ pub fn source_text(&self) -> Option<String> {
+ self.0.source_text()
+ }
+
+ // Used by the implementation of `Span::quote`
+ #[doc(hidden)]
+ pub fn save_span(&self) -> usize {
+ self.0.save_span()
+ }
+
+ // Used by the implementation of `Span::quote`
+ #[doc(hidden)]
+ pub fn recover_proc_macro_span(id: usize) -> Span {
+ Span(bridge::client::Span::recover_proc_macro_span(id))
+ }
+
+ diagnostic_method!(error, Level::Error);
+ diagnostic_method!(warning, Level::Warning);
+ diagnostic_method!(note, Level::Note);
+ diagnostic_method!(help, Level::Help);
+}
+
+/// Prints a span in a form convenient for debugging.
+impl fmt::Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+/// A line-column pair representing the start or end of a `Span`.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub struct LineColumn {
+ /// The 1-indexed line in the source file on which the span starts or ends (inclusive).
+ pub line: usize,
+ /// The 1-indexed column (number of bytes in UTF-8 encoding) in the source
+ /// file on which the span starts or ends (inclusive).
+ pub column: usize,
+}
+
+impl LineColumn {
+ fn add_1_to_column(self) -> Self {
+ LineColumn { line: self.line, column: self.column + 1 }
+ }
+}
+
+impl Ord for LineColumn {
+ fn cmp(&self, other: &Self) -> Ordering {
+ self.line.cmp(&other.line).then(self.column.cmp(&other.column))
+ }
+}
+
+impl PartialOrd for LineColumn {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
+/// The source file of a given `Span`.
+#[derive(Clone)]
+pub struct SourceFile(bridge::client::SourceFile);
+
+impl SourceFile {
+ /// Gets the path to this source file.
+ ///
+ /// ### Note
+ /// If the code span associated with this `SourceFile` was generated by an external macro, this
+ /// macro, this might not be an actual path on the filesystem. Use [`is_real`] to check.
+ ///
+ /// Also note that even if `is_real` returns `true`, if `--remap-path-prefix` was passed on
+ /// the command line, the path as given might not actually be valid.
+ ///
+ /// [`is_real`]: Self::is_real
+ pub fn path(&self) -> PathBuf {
+ PathBuf::from(self.0.path())
+ }
+
+ /// Returns `true` if this source file is a real source file, and not generated by an external
+ /// macro's expansion.
+ pub fn is_real(&self) -> bool {
+ // This is a hack until intercrate spans are implemented and we can have real source files
+ // for spans generated in external macros.
+ // https://github.com/rust-lang/rust/pull/43604#issuecomment-333334368
+ self.0.is_real()
+ }
+}
+
+impl fmt::Debug for SourceFile {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("SourceFile")
+ .field("path", &self.path())
+ .field("is_real", &self.is_real())
+ .finish()
+ }
+}
+
+impl PartialEq for SourceFile {
+ fn eq(&self, other: &Self) -> bool {
+ self.0.eq(&other.0)
+ }
+}
+
+impl Eq for SourceFile {}
+
+/// A single token or a delimited sequence of token trees (e.g., `[1, (), ..]`).
+#[derive(Clone)]
+pub enum TokenTree {
+ /// A token stream surrounded by bracket delimiters.
+ Group(Group),
+ /// An identifier.
+ Ident(Ident),
+ /// A single punctuation character (`+`, `,`, `$`, etc.).
+ Punct(Punct),
+ /// A literal character (`'a'`), string (`"hello"`), number (`2.3`), etc.
+ Literal(Literal),
+}
+
+impl TokenTree {
+ /// Returns the span of this tree, delegating to the `span` method of
+ /// the contained token or a delimited stream.
+ pub fn span(&self) -> Span {
+ match *self {
+ TokenTree::Group(ref t) => t.span(),
+ TokenTree::Ident(ref t) => t.span(),
+ TokenTree::Punct(ref t) => t.span(),
+ TokenTree::Literal(ref t) => t.span(),
+ }
+ }
+
+ /// Configures the span for *only this token*.
+ ///
+ /// Note that if this token is a `Group` then this method will not configure
+ /// the span of each of the internal tokens, this will simply delegate to
+ /// the `set_span` method of each variant.
+ pub fn set_span(&mut self, span: Span) {
+ match *self {
+ TokenTree::Group(ref mut t) => t.set_span(span),
+ TokenTree::Ident(ref mut t) => t.set_span(span),
+ TokenTree::Punct(ref mut t) => t.set_span(span),
+ TokenTree::Literal(ref mut t) => t.set_span(span),
+ }
+ }
+}
+
+/// Prints token tree in a form convenient for debugging.
+impl fmt::Debug for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ // Each of these has the name in the struct type in the derived debug,
+ // so don't bother with an extra layer of indirection
+ match *self {
+ TokenTree::Group(ref tt) => tt.fmt(f),
+ TokenTree::Ident(ref tt) => tt.fmt(f),
+ TokenTree::Punct(ref tt) => tt.fmt(f),
+ TokenTree::Literal(ref tt) => tt.fmt(f),
+ }
+ }
+}
+
+impl From<Group> for TokenTree {
+ fn from(g: Group) -> TokenTree {
+ TokenTree::Group(g)
+ }
+}
+
+impl From<Ident> for TokenTree {
+ fn from(g: Ident) -> TokenTree {
+ TokenTree::Ident(g)
+ }
+}
+
+impl From<Punct> for TokenTree {
+ fn from(g: Punct) -> TokenTree {
+ TokenTree::Punct(g)
+ }
+}
+
+impl From<Literal> for TokenTree {
+ fn from(g: Literal) -> TokenTree {
+ TokenTree::Literal(g)
+ }
+}
+
+/// Prints the token tree as a string that is supposed to be losslessly convertible back
+/// into the same token tree (modulo spans), except for possibly `TokenTree::Group`s
+/// with `Delimiter::None` delimiters and negative numeric literals.
+impl fmt::Display for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+/// A delimited token stream.
+///
+/// A `Group` internally contains a `TokenStream` which is surrounded by `Delimiter`s.
+#[derive(Clone)]
+pub struct Group(bridge::client::Group);
+
+/// Describes how a sequence of token trees is delimited.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum Delimiter {
+ /// `( ... )`
+ Parenthesis,
+ /// `{ ... }`
+ Brace,
+ /// `[ ... ]`
+ Bracket,
+ /// `Ø ... Ø`
+ /// An implicit delimiter, that may, for example, appear around tokens coming from a
+ /// "macro variable" `$var`. It is important to preserve operator priorities in cases like
+ /// `$var * 3` where `$var` is `1 + 2`.
+ /// Implicit delimiters might not survive roundtrip of a token stream through a string.
+ None,
+}
+
+impl Group {
+ /// Creates a new `Group` with the given delimiter and token stream.
+ ///
+ /// This constructor will set the span for this group to
+ /// `Span::call_site()`. To change the span you can use the `set_span`
+ /// method below.
+ pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
+ Group(bridge::client::Group::new(delimiter, stream.0))
+ }
+
+ /// Returns the delimiter of this `Group`
+ pub fn delimiter(&self) -> Delimiter {
+ self.0.delimiter()
+ }
+
+ /// Returns the `TokenStream` of tokens that are delimited in this `Group`.
+ ///
+ /// Note that the returned token stream does not include the delimiter
+ /// returned above.
+ pub fn stream(&self) -> TokenStream {
+ TokenStream(self.0.stream())
+ }
+
+ /// Returns the span for the delimiters of this token stream, spanning the
+ /// entire `Group`.
+ ///
+ /// ```text
+ /// pub fn span(&self) -> Span {
+ /// ^^^^^^^
+ /// ```
+ pub fn span(&self) -> Span {
+ Span(self.0.span())
+ }
+
+ /// Returns the span pointing to the opening delimiter of this group.
+ ///
+ /// ```text
+ /// pub fn span_open(&self) -> Span {
+ /// ^
+ /// ```
+ pub fn span_open(&self) -> Span {
+ Span(self.0.span_open())
+ }
+
+ /// Returns the span pointing to the closing delimiter of this group.
+ ///
+ /// ```text
+ /// pub fn span_close(&self) -> Span {
+ /// ^
+ /// ```
+ pub fn span_close(&self) -> Span {
+ Span(self.0.span_close())
+ }
+
+ /// Configures the span for this `Group`'s delimiters, but not its internal
+ /// tokens.
+ ///
+ /// This method will **not** set the span of all the internal tokens spanned
+ /// by this group, but rather it will only set the span of the delimiter
+ /// tokens at the level of the `Group`.
+ pub fn set_span(&mut self, span: Span) {
+ self.0.set_span(span.0);
+ }
+}
+
+/// Prints the group as a string that should be losslessly convertible back
+/// into the same group (modulo spans), except for possibly `TokenTree::Group`s
+/// with `Delimiter::None` delimiters.
+impl fmt::Display for Group {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Group {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Group")
+ .field("delimiter", &self.delimiter())
+ .field("stream", &self.stream())
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+/// A `Punct` is a single punctuation character such as `+`, `-` or `#`.
+///
+/// Multi-character operators like `+=` are represented as two instances of `Punct` with different
+/// forms of `Spacing` returned.
+#[derive(Clone)]
+pub struct Punct(bridge::client::Punct);
+
+/// Describes whether a `Punct` is followed immediately by another `Punct` ([`Spacing::Joint`]) or
+/// by a different token or whitespace ([`Spacing::Alone`]).
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum Spacing {
+ /// A `Punct` is not immediately followed by another `Punct`.
+ /// E.g. `+` is `Alone` in `+ =`, `+ident` and `+()`.
+ Alone,
+ /// A `Punct` is immediately followed by another `Punct`.
+ /// E.g. `+` is `Joint` in `+=` and `++`.
+ ///
+ /// Additionally, single quote `'` can join with identifiers to form lifetimes: `'ident`.
+ Joint,
+}
+
+impl Punct {
+ /// Creates a new `Punct` from the given character and spacing.
+ /// The `ch` argument must be a valid punctuation character permitted by the language,
+ /// otherwise the function will panic.
+ ///
+ /// The returned `Punct` will have the default span of `Span::call_site()`
+ /// which can be further configured with the `set_span` method below.
+ pub fn new(ch: char, spacing: Spacing) -> Punct {
+ Punct(bridge::client::Punct::new(ch, spacing))
+ }
+
+ /// Returns the value of this punctuation character as `char`.
+ pub fn as_char(&self) -> char {
+ self.0.as_char()
+ }
+
+ /// Returns the spacing of this punctuation character, indicating whether it's immediately
+ /// followed by another `Punct` in the token stream, so they can potentially be combined into
+ /// a multi-character operator (`Joint`), or it's followed by some other token or whitespace
+ /// (`Alone`) so the operator has certainly ended.
+ pub fn spacing(&self) -> Spacing {
+ self.0.spacing()
+ }
+
+ /// Returns the span for this punctuation character.
+ pub fn span(&self) -> Span {
+ Span(self.0.span())
+ }
+
+ /// Configure the span for this punctuation character.
+ pub fn set_span(&mut self, span: Span) {
+ self.0 = self.0.with_span(span.0);
+ }
+}
+
+/// Prints the punctuation character as a string that should be losslessly convertible
+/// back into the same character.
+impl fmt::Display for Punct {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Punct {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Punct")
+ .field("ch", &self.as_char())
+ .field("spacing", &self.spacing())
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+impl PartialEq<char> for Punct {
+ fn eq(&self, rhs: &char) -> bool {
+ self.as_char() == *rhs
+ }
+}
+
+impl PartialEq<Punct> for char {
+ fn eq(&self, rhs: &Punct) -> bool {
+ *self == rhs.as_char()
+ }
+}
+
+/// An identifier (`ident`).
+#[derive(Clone)]
+pub struct Ident(bridge::client::Ident);
+
+impl Ident {
+ /// Creates a new `Ident` with the given `string` as well as the specified
+ /// `span`.
+ /// The `string` argument must be a valid identifier permitted by the
+ /// language (including keywords, e.g. `self` or `fn`). Otherwise, the function will panic.
+ ///
+ /// Note that `span`, currently in rustc, configures the hygiene information
+ /// for this identifier.
+ ///
+ /// As of this time `Span::call_site()` explicitly opts-in to "call-site" hygiene
+ /// meaning that identifiers created with this span will be resolved as if they were written
+ /// directly at the location of the macro call, and other code at the macro call site will be
+ /// able to refer to them as well.
+ ///
+ /// Later spans like `Span::def_site()` will allow to opt-in to "definition-site" hygiene
+ /// meaning that identifiers created with this span will be resolved at the location of the
+ /// macro definition and other code at the macro call site will not be able to refer to them.
+ ///
+ /// Due to the current importance of hygiene this constructor, unlike other
+ /// tokens, requires a `Span` to be specified at construction.
+ pub fn new(string: &str, span: Span) -> Ident {
+ Ident(bridge::client::Ident::new(string, span.0, false))
+ }
+
+ /// Same as `Ident::new`, but creates a raw identifier (`r#ident`).
+ /// The `string` argument be a valid identifier permitted by the language
+ /// (including keywords, e.g. `fn`). Keywords which are usable in path segments
+ /// (e.g. `self`, `super`) are not supported, and will cause a panic.
+ pub fn new_raw(string: &str, span: Span) -> Ident {
+ Ident(bridge::client::Ident::new(string, span.0, true))
+ }
+
+ /// Returns the span of this `Ident`, encompassing the entire string returned
+ /// by [`to_string`](Self::to_string).
+ pub fn span(&self) -> Span {
+ Span(self.0.span())
+ }
+
+ /// Configures the span of this `Ident`, possibly changing its hygiene context.
+ pub fn set_span(&mut self, span: Span) {
+ self.0 = self.0.with_span(span.0);
+ }
+}
+
+/// Prints the identifier as a string that should be losslessly convertible
+/// back into the same identifier.
+impl fmt::Display for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Ident")
+ .field("ident", &self.to_string())
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+/// A literal string (`"hello"`), byte string (`b"hello"`),
+/// character (`'a'`), byte character (`b'a'`), an integer or floating point number
+/// with or without a suffix (`1`, `1u8`, `2.3`, `2.3f32`).
+/// Boolean literals like `true` and `false` do not belong here, they are `Ident`s.
+#[derive(Clone)]
+pub struct Literal(bridge::client::Literal);
+
+macro_rules! suffixed_int_literals {
+ ($($name:ident => $kind:ident,)*) => ($(
+ /// Creates a new suffixed integer literal with the specified value.
+ ///
+ /// This function will create an integer like `1u32` where the integer
+ /// value specified is the first part of the token and the integral is
+ /// also suffixed at the end.
+ /// Literals created from negative numbers might not survive round-trips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// Literals created through this method have the `Span::call_site()`
+ /// span by default, which can be configured with the `set_span` method
+ /// below.
+ pub fn $name(n: $kind) -> Literal {
+ Literal(bridge::client::Literal::typed_integer(&n.to_string(), stringify!($kind)))
+ }
+ )*)
+}
+
+macro_rules! unsuffixed_int_literals {
+ ($($name:ident => $kind:ident,)*) => ($(
+ /// Creates a new unsuffixed integer literal with the specified value.
+ ///
+ /// This function will create an integer like `1` where the integer
+ /// value specified is the first part of the token. No suffix is
+ /// specified on this token, meaning that invocations like
+ /// `Literal::i8_unsuffixed(1)` are equivalent to
+ /// `Literal::u32_unsuffixed(1)`.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// Literals created through this method have the `Span::call_site()`
+ /// span by default, which can be configured with the `set_span` method
+ /// below.
+ pub fn $name(n: $kind) -> Literal {
+ Literal(bridge::client::Literal::integer(&n.to_string()))
+ }
+ )*)
+}
+
+impl Literal {
+ suffixed_int_literals! {
+ u8_suffixed => u8,
+ u16_suffixed => u16,
+ u32_suffixed => u32,
+ u64_suffixed => u64,
+ u128_suffixed => u128,
+ usize_suffixed => usize,
+ i8_suffixed => i8,
+ i16_suffixed => i16,
+ i32_suffixed => i32,
+ i64_suffixed => i64,
+ i128_suffixed => i128,
+ isize_suffixed => isize,
+ }
+
+ unsuffixed_int_literals! {
+ u8_unsuffixed => u8,
+ u16_unsuffixed => u16,
+ u32_unsuffixed => u32,
+ u64_unsuffixed => u64,
+ u128_unsuffixed => u128,
+ usize_unsuffixed => usize,
+ i8_unsuffixed => i8,
+ i16_unsuffixed => i16,
+ i32_unsuffixed => i32,
+ i64_unsuffixed => i64,
+ i128_unsuffixed => i128,
+ isize_unsuffixed => isize,
+ }
+
+ /// Creates a new unsuffixed floating-point literal.
+ ///
+ /// This constructor is similar to those like `Literal::i8_unsuffixed` where
+ /// the float's value is emitted directly into the token but no suffix is
+ /// used, so it may be inferred to be a `f64` later in the compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f32_unsuffixed(n: f32) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {}", n);
+ }
+ let mut repr = n.to_string();
+ if !repr.contains('.') {
+ repr.push_str(".0");
+ }
+ Literal(bridge::client::Literal::float(&repr))
+ }
+
+ /// Creates a new suffixed floating-point literal.
+ ///
+ /// This constructor will create a literal like `1.0f32` where the value
+ /// specified is the preceding part of the token and `f32` is the suffix of
+ /// the token. This token will always be inferred to be an `f32` in the
+ /// compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f32_suffixed(n: f32) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {}", n);
+ }
+ Literal(bridge::client::Literal::f32(&n.to_string()))
+ }
+
+ /// Creates a new unsuffixed floating-point literal.
+ ///
+ /// This constructor is similar to those like `Literal::i8_unsuffixed` where
+ /// the float's value is emitted directly into the token but no suffix is
+ /// used, so it may be inferred to be a `f64` later in the compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f64_unsuffixed(n: f64) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {}", n);
+ }
+ let mut repr = n.to_string();
+ if !repr.contains('.') {
+ repr.push_str(".0");
+ }
+ Literal(bridge::client::Literal::float(&repr))
+ }
+
+ /// Creates a new suffixed floating-point literal.
+ ///
+ /// This constructor will create a literal like `1.0f64` where the value
+ /// specified is the preceding part of the token and `f64` is the suffix of
+ /// the token. This token will always be inferred to be an `f64` in the
+ /// compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f64_suffixed(n: f64) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {}", n);
+ }
+ Literal(bridge::client::Literal::f64(&n.to_string()))
+ }
+
+ /// String literal.
+ pub fn string(string: &str) -> Literal {
+ Literal(bridge::client::Literal::string(string))
+ }
+
+ /// Character literal.
+ pub fn character(ch: char) -> Literal {
+ Literal(bridge::client::Literal::character(ch))
+ }
+
+ /// Byte string literal.
+ pub fn byte_string(bytes: &[u8]) -> Literal {
+ Literal(bridge::client::Literal::byte_string(bytes))
+ }
+
+ /// Returns the span encompassing this literal.
+ pub fn span(&self) -> Span {
+ Span(self.0.span())
+ }
+
+ /// Configures the span associated for this literal.
+ pub fn set_span(&mut self, span: Span) {
+ self.0.set_span(span.0);
+ }
+
+ /// Returns a `Span` that is a subset of `self.span()` containing only the
+ /// source bytes in range `range`. Returns `None` if the would-be trimmed
+ /// span is outside the bounds of `self`.
+ // FIXME(SergioBenitez): check that the byte range starts and ends at a
+ // UTF-8 boundary of the source. otherwise, it's likely that a panic will
+ // occur elsewhere when the source text is printed.
+ // FIXME(SergioBenitez): there is no way for the user to know what
+ // `self.span()` actually maps to, so this method can currently only be
+ // called blindly. For example, `to_string()` for the character 'c' returns
+ // "'\u{63}'"; there is no way for the user to know whether the source text
+ // was 'c' or whether it was '\u{63}'.
+ pub fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> {
+ self.0.subspan(range.start_bound().cloned(), range.end_bound().cloned()).map(Span)
+ }
+}
+
+/// Parse a single literal from its stringified representation.
+///
+/// In order to parse successfully, the input string must not contain anything
+/// but the literal token. Specifically, it must not contain whitespace or
+/// comments in addition to the literal.
+///
+/// The resulting literal token will have a `Span::call_site()` span.
+///
+/// NOTE: some errors may cause panics instead of returning `LexError`. We
+/// reserve the right to change these errors into `LexError`s later.
+impl FromStr for Literal {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<Self, LexError> {
+ match bridge::client::Literal::from_str(src) {
+ Ok(literal) => Ok(Literal(literal)),
+ Err(()) => Err(LexError),
+ }
+ }
+}
+
+/// Prints the literal as a string that should be losslessly convertible
+/// back into the same literal (except for possible rounding for floating point literals).
+impl fmt::Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+/// Tracked access to environment variables.
+pub mod tracked_env {
+ use std::env::{self, VarError};
+ use std::ffi::OsStr;
+
+ /// Retrieve an environment variable and add it to build dependency info.
+ /// Build system executing the compiler will know that the variable was accessed during
+ /// compilation, and will be able to rerun the build when the value of that variable changes.
+ /// Besides the dependency tracking this function should be equivalent to `env::var` from the
+ /// standard library, except that the argument must be UTF-8.
+ pub fn var<K: AsRef<OsStr> + AsRef<str>>(key: K) -> Result<String, VarError> {
+ let key: &str = key.as_ref();
+ let value = env::var(key);
+ super::bridge::client::FreeFunctions::track_env_var(key, value.as_deref().ok());
+ value
+ }
+}
+
+/// Tracked access to additional files.
+pub mod tracked_path {
+
+ /// Track a file explicitly.
+ ///
+ /// Commonly used for tracking asset preprocessing.
+ pub fn path<P: AsRef<str>>(path: P) {
+ let path: &str = path.as_ref();
+ super::bridge::client::FreeFunctions::track_path(path);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/quote.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/quote.rs
new file mode 100644
index 000000000..b539ab9c0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/quote.rs
@@ -0,0 +1,140 @@
+//! # Quasiquoter
+//! This file contains the implementation internals of the quasiquoter provided by `quote!`.
+
+//! This quasiquoter uses macros 2.0 hygiene to reliably access
+//! items from `proc_macro`, to build a `proc_macro::TokenStream`.
+
+use super::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
+
+macro_rules! quote_tt {
+ (($($t:tt)*)) => { Group::new(Delimiter::Parenthesis, quote!($($t)*)) };
+ ([$($t:tt)*]) => { Group::new(Delimiter::Bracket, quote!($($t)*)) };
+ ({$($t:tt)*}) => { Group::new(Delimiter::Brace, quote!($($t)*)) };
+ (,) => { Punct::new(',', Spacing::Alone) };
+ (.) => { Punct::new('.', Spacing::Alone) };
+ (:) => { Punct::new(':', Spacing::Alone) };
+ (;) => { Punct::new(';', Spacing::Alone) };
+ (!) => { Punct::new('!', Spacing::Alone) };
+ (<) => { Punct::new('<', Spacing::Alone) };
+ (>) => { Punct::new('>', Spacing::Alone) };
+ (&) => { Punct::new('&', Spacing::Alone) };
+ (=) => { Punct::new('=', Spacing::Alone) };
+ ($i:ident) => { Ident::new(stringify!($i), Span::def_site()) };
+}
+
+macro_rules! quote_ts {
+ ((@ $($t:tt)*)) => { $($t)* };
+ (::) => {
+ [
+ TokenTree::from(Punct::new(':', Spacing::Joint)),
+ TokenTree::from(Punct::new(':', Spacing::Alone)),
+ ].iter()
+ .cloned()
+ .map(|mut x| {
+ x.set_span(Span::def_site());
+ x
+ })
+ .collect::<TokenStream>()
+ };
+ ($t:tt) => { TokenTree::from(quote_tt!($t)) };
+}
+
+/// Simpler version of the real `quote!` macro, implemented solely
+/// through `macro_rules`, for bootstrapping the real implementation
+/// (see the `quote` function), which does not have access to the
+/// real `quote!` macro due to the `proc_macro` crate not being
+/// able to depend on itself.
+///
+/// Note: supported tokens are a subset of the real `quote!`, but
+/// unquoting is different: instead of `$x`, this uses `(@ expr)`.
+macro_rules! quote {
+ () => { TokenStream::new() };
+ ($($t:tt)*) => {
+ [
+ $(TokenStream::from(quote_ts!($t)),)*
+ ].iter().cloned().collect::<TokenStream>()
+ };
+}
+
+/// Quote a `TokenStream` into a `TokenStream`.
+/// This is the actual implementation of the `quote!()` proc macro.
+///
+/// It is loaded by the compiler in `register_builtin_macros`.
+pub fn quote(stream: TokenStream) -> TokenStream {
+ if stream.is_empty() {
+ return quote!(crate::TokenStream::new());
+ }
+ let proc_macro_crate = quote!(crate);
+ let mut after_dollar = false;
+ let tokens = stream
+ .into_iter()
+ .filter_map(|tree| {
+ if after_dollar {
+ after_dollar = false;
+ match tree {
+ TokenTree::Ident(_) => {
+ return Some(quote!(Into::<crate::TokenStream>::into(
+ Clone::clone(&(@ tree))),));
+ }
+ TokenTree::Punct(ref tt) if tt.as_char() == '$' => {}
+ _ => panic!("`$` must be followed by an ident or `$` in `quote!`"),
+ }
+ } else if let TokenTree::Punct(ref tt) = tree {
+ if tt.as_char() == '$' {
+ after_dollar = true;
+ return None;
+ }
+ }
+
+ Some(quote!(crate::TokenStream::from((@ match tree {
+ TokenTree::Punct(tt) => quote!(crate::TokenTree::Punct(crate::Punct::new(
+ (@ TokenTree::from(Literal::character(tt.as_char()))),
+ (@ match tt.spacing() {
+ Spacing::Alone => quote!(crate::Spacing::Alone),
+ Spacing::Joint => quote!(crate::Spacing::Joint),
+ }),
+ ))),
+ TokenTree::Group(tt) => quote!(crate::TokenTree::Group(crate::Group::new(
+ (@ match tt.delimiter() {
+ Delimiter::Parenthesis => quote!(crate::Delimiter::Parenthesis),
+ Delimiter::Brace => quote!(crate::Delimiter::Brace),
+ Delimiter::Bracket => quote!(crate::Delimiter::Bracket),
+ Delimiter::None => quote!(crate::Delimiter::None),
+ }),
+ (@ quote(tt.stream())),
+ ))),
+ TokenTree::Ident(tt) => quote!(crate::TokenTree::Ident(crate::Ident::new(
+ (@ TokenTree::from(Literal::string(&tt.to_string()))),
+ (@ quote_span(proc_macro_crate.clone(), tt.span())),
+ ))),
+ TokenTree::Literal(tt) => quote!(crate::TokenTree::Literal({
+ let mut iter = (@ TokenTree::from(Literal::string(&tt.to_string())))
+ .parse::<crate::TokenStream>()
+ .unwrap()
+ .into_iter();
+ if let (Some(crate::TokenTree::Literal(mut lit)), None) =
+ (iter.next(), iter.next())
+ {
+ lit.set_span((@ quote_span(proc_macro_crate.clone(), tt.span())));
+ lit
+ } else {
+ unreachable!()
+ }
+ }))
+ })),))
+ })
+ .collect::<TokenStream>();
+
+ if after_dollar {
+ panic!("unexpected trailing `$` in `quote!`");
+ }
+
+ quote!([(@ tokens)].iter().cloned().collect::<crate::TokenStream>())
+}
+
+/// Quote a `Span` into a `TokenStream`.
+/// This is needed to implement a custom quoter.
+pub fn quote_span(proc_macro_crate: TokenStream, span: Span) -> TokenStream {
+ let id = span.save_span();
+ quote!((@ proc_macro_crate ) ::Span::recover_proc_macro_span((@ TokenTree::from(Literal::usize_unsuffixed(id)))))
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs
new file mode 100644
index 000000000..ebdfca00d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs
@@ -0,0 +1,819 @@
+//! Rustc proc-macro server implementation with tt
+//!
+//! Based on idea from <https://github.com/fedochet/rust-proc-macro-expander>
+//! The lib-proc-macro server backend is `TokenStream`-agnostic, such that
+//! we could provide any TokenStream implementation.
+//! The original idea from fedochet is using proc-macro2 as backend,
+//! we use tt instead for better integration with RA.
+//!
+//! FIXME: No span and source file information is implemented yet
+
+use super::proc_macro::bridge::{self, server};
+
+use std::collections::HashMap;
+use std::hash::Hash;
+use std::iter::FromIterator;
+use std::ops::Bound;
+use std::{ascii, vec::IntoIter};
+
+type Group = tt::Subtree;
+type TokenTree = tt::TokenTree;
+type Punct = tt::Punct;
+type Spacing = tt::Spacing;
+type Literal = tt::Literal;
+type Span = tt::TokenId;
+
+#[derive(Debug, Clone)]
+pub struct TokenStream {
+ pub token_trees: Vec<TokenTree>,
+}
+
+impl TokenStream {
+ pub fn new() -> Self {
+ TokenStream { token_trees: Default::default() }
+ }
+
+ pub fn with_subtree(subtree: tt::Subtree) -> Self {
+ if subtree.delimiter.is_some() {
+ TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] }
+ } else {
+ TokenStream { token_trees: subtree.token_trees }
+ }
+ }
+
+ pub fn into_subtree(self) -> tt::Subtree {
+ tt::Subtree { delimiter: None, token_trees: self.token_trees }
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.token_trees.is_empty()
+ }
+}
+
+/// Creates a token stream containing a single token tree.
+impl From<TokenTree> for TokenStream {
+ fn from(tree: TokenTree) -> TokenStream {
+ TokenStream { token_trees: vec![tree] }
+ }
+}
+
+/// Collects a number of token trees into a single stream.
+impl FromIterator<TokenTree> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+ trees.into_iter().map(TokenStream::from).collect()
+ }
+}
+
+/// A "flattening" operation on token streams, collects token trees
+/// from multiple token streams into a single stream.
+impl FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let mut builder = TokenStreamBuilder::new();
+ streams.into_iter().for_each(|stream| builder.push(stream));
+ builder.build()
+ }
+}
+
+impl Extend<TokenTree> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
+ self.extend(trees.into_iter().map(TokenStream::from));
+ }
+}
+
+impl Extend<TokenStream> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+ for item in streams {
+ for tkn in item {
+ match tkn {
+ tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => {
+ self.token_trees.extend(subtree.token_trees);
+ }
+ _ => {
+ self.token_trees.push(tkn);
+ }
+ }
+ }
+ }
+ }
+}
+
+#[derive(Clone)]
+pub struct SourceFile {
+ // FIXME stub
+}
+
+type Level = super::proc_macro::Level;
+type LineColumn = super::proc_macro::LineColumn;
+
+/// A structure representing a diagnostic message and associated children
+/// messages.
+#[derive(Clone, Debug)]
+pub struct Diagnostic {
+ level: Level,
+ message: String,
+ spans: Vec<Span>,
+ children: Vec<Diagnostic>,
+}
+
+impl Diagnostic {
+ /// Creates a new diagnostic with the given `level` and `message`.
+ pub fn new<T: Into<String>>(level: Level, message: T) -> Diagnostic {
+ Diagnostic { level, message: message.into(), spans: vec![], children: vec![] }
+ }
+}
+
+// Rustc Server Ident has to be `Copyable`
+// We use a stub here for bypassing
+#[derive(Hash, Eq, PartialEq, Copy, Clone)]
+pub struct IdentId(u32);
+
+#[derive(Clone, Hash, Eq, PartialEq)]
+struct IdentData(tt::Ident);
+
+#[derive(Default)]
+struct IdentInterner {
+ idents: HashMap<IdentData, u32>,
+ ident_data: Vec<IdentData>,
+}
+
+impl IdentInterner {
+ fn intern(&mut self, data: &IdentData) -> u32 {
+ if let Some(index) = self.idents.get(data) {
+ return *index;
+ }
+
+ let index = self.idents.len() as u32;
+ self.ident_data.push(data.clone());
+ self.idents.insert(data.clone(), index);
+ index
+ }
+
+ fn get(&self, index: u32) -> &IdentData {
+ &self.ident_data[index as usize]
+ }
+
+ #[allow(unused)]
+ fn get_mut(&mut self, index: u32) -> &mut IdentData {
+ self.ident_data.get_mut(index as usize).expect("Should be consistent")
+ }
+}
+
+pub struct TokenStreamBuilder {
+ acc: TokenStream,
+}
+
+/// Public implementation details for the `TokenStream` type, such as iterators.
+pub mod token_stream {
+ use std::str::FromStr;
+
+ use super::{TokenStream, TokenTree};
+
+ /// An iterator over `TokenStream`'s `TokenTree`s.
+ /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
+ /// and returns whole groups as token trees.
+ impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = super::IntoIter<TokenTree>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.token_trees.into_iter()
+ }
+ }
+
+ type LexError = String;
+
+ /// Attempts to break the string into tokens and parse those tokens into a token stream.
+ /// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
+ /// or characters not existing in the language.
+ /// All tokens in the parsed stream get `Span::call_site()` spans.
+ ///
+ /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
+ /// change these errors into `LexError`s later.
+ impl FromStr for TokenStream {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+ let (subtree, _token_map) =
+ mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?;
+
+ let subtree = subtree_replace_token_ids_with_unspecified(subtree);
+ Ok(TokenStream::with_subtree(subtree))
+ }
+ }
+
+ impl ToString for TokenStream {
+ fn to_string(&self) -> String {
+ tt::pretty(&self.token_trees)
+ }
+ }
+
+ fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree {
+ tt::Subtree {
+ delimiter: subtree
+ .delimiter
+ .map(|d| tt::Delimiter { id: tt::TokenId::unspecified(), ..d }),
+ token_trees: subtree
+ .token_trees
+ .into_iter()
+ .map(token_tree_replace_token_ids_with_unspecified)
+ .collect(),
+ }
+ }
+
+ fn token_tree_replace_token_ids_with_unspecified(tt: tt::TokenTree) -> tt::TokenTree {
+ match tt {
+ tt::TokenTree::Leaf(leaf) => {
+ tt::TokenTree::Leaf(leaf_replace_token_ids_with_unspecified(leaf))
+ }
+ tt::TokenTree::Subtree(subtree) => {
+ tt::TokenTree::Subtree(subtree_replace_token_ids_with_unspecified(subtree))
+ }
+ }
+ }
+
+ fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf {
+ match leaf {
+ tt::Leaf::Literal(lit) => {
+ tt::Leaf::Literal(tt::Literal { id: tt::TokenId::unspecified(), ..lit })
+ }
+ tt::Leaf::Punct(punct) => {
+ tt::Leaf::Punct(tt::Punct { id: tt::TokenId::unspecified(), ..punct })
+ }
+ tt::Leaf::Ident(ident) => {
+ tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), ..ident })
+ }
+ }
+ }
+}
+
+impl TokenStreamBuilder {
+ fn new() -> TokenStreamBuilder {
+ TokenStreamBuilder { acc: TokenStream::new() }
+ }
+
+ fn push(&mut self, stream: TokenStream) {
+ self.acc.extend(stream.into_iter())
+ }
+
+ fn build(self) -> TokenStream {
+ self.acc
+ }
+}
+
+pub struct FreeFunctions;
+
+#[derive(Clone)]
+pub struct TokenStreamIter {
+ trees: IntoIter<TokenTree>,
+}
+
+#[derive(Default)]
+pub struct RustAnalyzer {
+ ident_interner: IdentInterner,
+ // FIXME: store span information here.
+}
+
+impl server::Types for RustAnalyzer {
+ type FreeFunctions = FreeFunctions;
+ type TokenStream = TokenStream;
+ type TokenStreamBuilder = TokenStreamBuilder;
+ type TokenStreamIter = TokenStreamIter;
+ type Group = Group;
+ type Punct = Punct;
+ type Ident = IdentId;
+ type Literal = Literal;
+ type SourceFile = SourceFile;
+ type Diagnostic = Diagnostic;
+ type Span = Span;
+ type MultiSpan = Vec<Span>;
+}
+
+impl server::FreeFunctions for RustAnalyzer {
+ fn track_env_var(&mut self, _var: &str, _value: Option<&str>) {
+ // FIXME: track env var accesses
+ // https://github.com/rust-lang/rust/pull/71858
+ }
+ fn track_path(&mut self, _path: &str) {}
+}
+
+impl server::TokenStream for RustAnalyzer {
+ fn new(&mut self) -> Self::TokenStream {
+ Self::TokenStream::new()
+ }
+
+ fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
+ stream.is_empty()
+ }
+ fn from_str(&mut self, src: &str) -> Self::TokenStream {
+ use std::str::FromStr;
+
+ Self::TokenStream::from_str(src).expect("cannot parse string")
+ }
+ fn to_string(&mut self, stream: &Self::TokenStream) -> String {
+ stream.to_string()
+ }
+ fn from_token_tree(
+ &mut self,
+ tree: bridge::TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>,
+ ) -> Self::TokenStream {
+ match tree {
+ bridge::TokenTree::Group(group) => {
+ let tree = TokenTree::from(group);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Ident(IdentId(index)) => {
+ let IdentData(ident) = self.ident_interner.get(index).clone();
+ let ident: tt::Ident = ident;
+ let leaf = tt::Leaf::from(ident);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Literal(literal) => {
+ let leaf = tt::Leaf::from(literal);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Punct(p) => {
+ let leaf = tt::Leaf::from(p);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+ }
+ }
+
+ fn into_iter(&mut self, stream: Self::TokenStream) -> Self::TokenStreamIter {
+ let trees: Vec<TokenTree> = stream.into_iter().collect();
+ TokenStreamIter { trees: trees.into_iter() }
+ }
+
+ fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
+ Ok(self_.clone())
+ }
+}
+
+impl server::TokenStreamBuilder for RustAnalyzer {
+ fn new(&mut self) -> Self::TokenStreamBuilder {
+ Self::TokenStreamBuilder::new()
+ }
+ fn push(&mut self, builder: &mut Self::TokenStreamBuilder, stream: Self::TokenStream) {
+ builder.push(stream)
+ }
+ fn build(&mut self, builder: Self::TokenStreamBuilder) -> Self::TokenStream {
+ builder.build()
+ }
+}
+
+impl server::TokenStreamIter for RustAnalyzer {
+ fn next(
+ &mut self,
+ iter: &mut Self::TokenStreamIter,
+ ) -> Option<bridge::TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>> {
+ iter.trees.next().map(|tree| match tree {
+ TokenTree::Subtree(group) => bridge::TokenTree::Group(group),
+ TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
+ bridge::TokenTree::Ident(IdentId(self.ident_interner.intern(&IdentData(ident))))
+ }
+ TokenTree::Leaf(tt::Leaf::Literal(literal)) => bridge::TokenTree::Literal(literal),
+ TokenTree::Leaf(tt::Leaf::Punct(punct)) => bridge::TokenTree::Punct(punct),
+ })
+ }
+}
+
+fn delim_to_internal(d: bridge::Delimiter) -> Option<tt::Delimiter> {
+ let kind = match d {
+ bridge::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,
+ bridge::Delimiter::Brace => tt::DelimiterKind::Brace,
+ bridge::Delimiter::Bracket => tt::DelimiterKind::Bracket,
+ bridge::Delimiter::None => return None,
+ };
+ Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind })
+}
+
+fn delim_to_external(d: Option<tt::Delimiter>) -> bridge::Delimiter {
+ match d.map(|it| it.kind) {
+ Some(tt::DelimiterKind::Parenthesis) => bridge::Delimiter::Parenthesis,
+ Some(tt::DelimiterKind::Brace) => bridge::Delimiter::Brace,
+ Some(tt::DelimiterKind::Bracket) => bridge::Delimiter::Bracket,
+ None => bridge::Delimiter::None,
+ }
+}
+
+fn spacing_to_internal(spacing: bridge::Spacing) -> Spacing {
+ match spacing {
+ bridge::Spacing::Alone => Spacing::Alone,
+ bridge::Spacing::Joint => Spacing::Joint,
+ }
+}
+
+fn spacing_to_external(spacing: Spacing) -> bridge::Spacing {
+ match spacing {
+ Spacing::Alone => bridge::Spacing::Alone,
+ Spacing::Joint => bridge::Spacing::Joint,
+ }
+}
+
+impl server::Group for RustAnalyzer {
+ fn new(&mut self, delimiter: bridge::Delimiter, stream: Self::TokenStream) -> Self::Group {
+ Self::Group { delimiter: delim_to_internal(delimiter), token_trees: stream.token_trees }
+ }
+ fn delimiter(&mut self, group: &Self::Group) -> bridge::Delimiter {
+ delim_to_external(group.delimiter)
+ }
+
+ // NOTE: Return value of do not include delimiter
+ fn stream(&mut self, group: &Self::Group) -> Self::TokenStream {
+ TokenStream { token_trees: group.token_trees.clone() }
+ }
+
+ fn span(&mut self, group: &Self::Group) -> Self::Span {
+ group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
+ }
+
+ fn set_span(&mut self, group: &mut Self::Group, span: Self::Span) {
+ if let Some(delim) = &mut group.delimiter {
+ delim.id = span;
+ }
+ }
+
+ fn span_open(&mut self, group: &Self::Group) -> Self::Span {
+ // FIXME we only store one `TokenId` for the delimiters
+ group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
+ }
+
+ fn span_close(&mut self, group: &Self::Group) -> Self::Span {
+ // FIXME we only store one `TokenId` for the delimiters
+ group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
+ }
+}
+
+impl server::Punct for RustAnalyzer {
+ fn new(&mut self, ch: char, spacing: bridge::Spacing) -> Self::Punct {
+ tt::Punct {
+ char: ch,
+ spacing: spacing_to_internal(spacing),
+ id: tt::TokenId::unspecified(),
+ }
+ }
+ fn as_char(&mut self, punct: Self::Punct) -> char {
+ punct.char
+ }
+ fn spacing(&mut self, punct: Self::Punct) -> bridge::Spacing {
+ spacing_to_external(punct.spacing)
+ }
+ fn span(&mut self, punct: Self::Punct) -> Self::Span {
+ punct.id
+ }
+ fn with_span(&mut self, punct: Self::Punct, span: Self::Span) -> Self::Punct {
+ tt::Punct { id: span, ..punct }
+ }
+}
+
+impl server::Ident for RustAnalyzer {
+ fn new(&mut self, string: &str, span: Self::Span, _is_raw: bool) -> Self::Ident {
+ IdentId(self.ident_interner.intern(&IdentData(tt::Ident { text: string.into(), id: span })))
+ }
+
+ fn span(&mut self, ident: Self::Ident) -> Self::Span {
+ self.ident_interner.get(ident.0).0.id
+ }
+ fn with_span(&mut self, ident: Self::Ident, span: Self::Span) -> Self::Ident {
+ let data = self.ident_interner.get(ident.0);
+ let new = IdentData(tt::Ident { id: span, ..data.0.clone() });
+ IdentId(self.ident_interner.intern(&new))
+ }
+}
+
+impl server::Literal for RustAnalyzer {
+ fn debug_kind(&mut self, _literal: &Self::Literal) -> String {
+ // r-a: debug_kind and suffix are unsupported; corresponding client code has been changed to not call these.
+ // They must still be present to be ABI-compatible and work with upstream proc_macro.
+ "".to_owned()
+ }
+ fn from_str(&mut self, s: &str) -> Result<Self::Literal, ()> {
+ Ok(Literal { text: s.into(), id: tt::TokenId::unspecified() })
+ }
+ fn symbol(&mut self, literal: &Self::Literal) -> String {
+ literal.text.to_string()
+ }
+ fn suffix(&mut self, _literal: &Self::Literal) -> Option<String> {
+ None
+ }
+
+ fn to_string(&mut self, literal: &Self::Literal) -> String {
+ literal.to_string()
+ }
+
+ fn integer(&mut self, n: &str) -> Self::Literal {
+ let n = match n.parse::<i128>() {
+ Ok(n) => n.to_string(),
+ Err(_) => n.parse::<u128>().unwrap().to_string(),
+ };
+ Literal { text: n.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal {
+ macro_rules! def_suffixed_integer {
+ ($kind:ident, $($ty:ty),*) => {
+ match $kind {
+ $(
+ stringify!($ty) => {
+ let n: $ty = n.parse().unwrap();
+ format!(concat!("{}", stringify!($ty)), n)
+ }
+ )*
+ _ => unimplemented!("unknown args for typed_integer: n {}, kind {}", n, $kind),
+ }
+ }
+ }
+
+ let text = def_suffixed_integer! {kind, u8, u16, u32, u64, u128, usize, i8, i16, i32, i64, i128, isize};
+
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn float(&mut self, n: &str) -> Self::Literal {
+ let n: f64 = n.parse().unwrap();
+ let mut text = f64::to_string(&n);
+ if !text.contains('.') {
+ text += ".0"
+ }
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn f32(&mut self, n: &str) -> Self::Literal {
+ let n: f32 = n.parse().unwrap();
+ let text = format!("{}f32", n);
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn f64(&mut self, n: &str) -> Self::Literal {
+ let n: f64 = n.parse().unwrap();
+ let text = format!("{}f64", n);
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn string(&mut self, string: &str) -> Self::Literal {
+ let mut escaped = String::new();
+ for ch in string.chars() {
+ escaped.extend(ch.escape_debug());
+ }
+ Literal { text: format!("\"{}\"", escaped).into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn character(&mut self, ch: char) -> Self::Literal {
+ Literal { text: format!("'{}'", ch).into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal {
+ let string = bytes
+ .iter()
+ .cloned()
+ .flat_map(ascii::escape_default)
+ .map(Into::<char>::into)
+ .collect::<String>();
+
+ Literal { text: format!("b\"{}\"", string).into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn span(&mut self, literal: &Self::Literal) -> Self::Span {
+ literal.id
+ }
+
+ fn set_span(&mut self, literal: &mut Self::Literal, span: Self::Span) {
+ literal.id = span;
+ }
+
+ fn subspan(
+ &mut self,
+ _literal: &Self::Literal,
+ _start: Bound<usize>,
+ _end: Bound<usize>,
+ ) -> Option<Self::Span> {
+ // FIXME handle span
+ None
+ }
+}
+
+impl server::SourceFile for RustAnalyzer {
+ // FIXME these are all stubs
+ fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool {
+ true
+ }
+ fn path(&mut self, _file: &Self::SourceFile) -> String {
+ String::new()
+ }
+ fn is_real(&mut self, _file: &Self::SourceFile) -> bool {
+ true
+ }
+}
+
+impl server::Diagnostic for RustAnalyzer {
+ fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic {
+ let mut diag = Diagnostic::new(level, msg);
+ diag.spans = spans;
+ diag
+ }
+
+ fn sub(
+ &mut self,
+ _diag: &mut Self::Diagnostic,
+ _level: Level,
+ _msg: &str,
+ _spans: Self::MultiSpan,
+ ) {
+ // FIXME handle diagnostic
+ //
+ }
+
+ fn emit(&mut self, _diag: Self::Diagnostic) {
+ // FIXME handle diagnostic
+ // diag.emit()
+ }
+}
+
+impl server::Span for RustAnalyzer {
+ fn debug(&mut self, span: Self::Span) -> String {
+ format!("{:?}", span.0)
+ }
+ fn def_site(&mut self) -> Self::Span {
+ // MySpan(self.span_interner.intern(&MySpanData(Span::def_site())))
+ // FIXME handle span
+ tt::TokenId::unspecified()
+ }
+ fn call_site(&mut self) -> Self::Span {
+ // MySpan(self.span_interner.intern(&MySpanData(Span::call_site())))
+ // FIXME handle span
+ tt::TokenId::unspecified()
+ }
+ fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile {
+ SourceFile {}
+ }
+ fn save_span(&mut self, _span: Self::Span) -> usize {
+ // FIXME stub
+ 0
+ }
+ fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span {
+ // FIXME stub
+ tt::TokenId::unspecified()
+ }
+ /// Recent feature, not yet in the proc_macro
+ ///
+ /// See PR:
+ /// https://github.com/rust-lang/rust/pull/55780
+ fn source_text(&mut self, _span: Self::Span) -> Option<String> {
+ None
+ }
+
+ fn parent(&mut self, _span: Self::Span) -> Option<Self::Span> {
+ // FIXME handle span
+ None
+ }
+ fn source(&mut self, span: Self::Span) -> Self::Span {
+ // FIXME handle span
+ span
+ }
+ fn start(&mut self, _span: Self::Span) -> LineColumn {
+ // FIXME handle span
+ LineColumn { line: 0, column: 0 }
+ }
+ fn end(&mut self, _span: Self::Span) -> LineColumn {
+ // FIXME handle span
+ LineColumn { line: 0, column: 0 }
+ }
+ fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option<Self::Span> {
+ // Just return the first span again, because some macros will unwrap the result.
+ Some(first)
+ }
+ fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span {
+ // FIXME handle span
+ tt::TokenId::unspecified()
+ }
+
+ fn mixed_site(&mut self) -> Self::Span {
+ // FIXME handle span
+ tt::TokenId::unspecified()
+ }
+
+ fn after(&mut self, _self_: Self::Span) -> Self::Span {
+ tt::TokenId::unspecified()
+ }
+
+ fn before(&mut self, _self_: Self::Span) -> Self::Span {
+ tt::TokenId::unspecified()
+ }
+}
+
+impl server::MultiSpan for RustAnalyzer {
+ fn new(&mut self) -> Self::MultiSpan {
+ // FIXME handle span
+ vec![]
+ }
+
+ fn push(&mut self, other: &mut Self::MultiSpan, span: Self::Span) {
+ //TODP
+ other.push(span)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::super::proc_macro::bridge::server::Literal;
+ use super::*;
+
+ #[test]
+ fn test_ra_server_literals() {
+ let mut srv = RustAnalyzer { ident_interner: IdentInterner::default() };
+ assert_eq!(srv.integer("1234").text, "1234");
+
+ assert_eq!(srv.typed_integer("12", "u8").text, "12u8");
+ assert_eq!(srv.typed_integer("255", "u16").text, "255u16");
+ assert_eq!(srv.typed_integer("1234", "u32").text, "1234u32");
+ assert_eq!(srv.typed_integer("15846685", "u64").text, "15846685u64");
+ assert_eq!(srv.typed_integer("15846685258", "u128").text, "15846685258u128");
+ assert_eq!(srv.typed_integer("156788984", "usize").text, "156788984usize");
+ assert_eq!(srv.typed_integer("127", "i8").text, "127i8");
+ assert_eq!(srv.typed_integer("255", "i16").text, "255i16");
+ assert_eq!(srv.typed_integer("1234", "i32").text, "1234i32");
+ assert_eq!(srv.typed_integer("15846685", "i64").text, "15846685i64");
+ assert_eq!(srv.typed_integer("15846685258", "i128").text, "15846685258i128");
+ assert_eq!(srv.float("0").text, "0.0");
+ assert_eq!(srv.float("15684.5867").text, "15684.5867");
+ assert_eq!(srv.f32("15684.58").text, "15684.58f32");
+ assert_eq!(srv.f64("15684.58").text, "15684.58f64");
+
+ assert_eq!(srv.string("hello_world").text, "\"hello_world\"");
+ assert_eq!(srv.character('c').text, "'c'");
+ assert_eq!(srv.byte_string(b"1234586\x88").text, "b\"1234586\\x88\"");
+
+ // u128::max
+ assert_eq!(
+ srv.integer("340282366920938463463374607431768211455").text,
+ "340282366920938463463374607431768211455"
+ );
+ // i128::min
+ assert_eq!(
+ srv.integer("-170141183460469231731687303715884105728").text,
+ "-170141183460469231731687303715884105728"
+ );
+ }
+
+ #[test]
+ fn test_ra_server_to_string() {
+ let s = TokenStream {
+ token_trees: vec![
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "struct".into(),
+ id: tt::TokenId::unspecified(),
+ })),
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "T".into(),
+ id: tt::TokenId::unspecified(),
+ })),
+ tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: Some(tt::Delimiter {
+ id: tt::TokenId::unspecified(),
+ kind: tt::DelimiterKind::Brace,
+ }),
+ token_trees: vec![],
+ }),
+ ],
+ };
+
+ assert_eq!(s.to_string(), "struct T {}");
+ }
+
+ #[test]
+ fn test_ra_server_from_str() {
+ use std::str::FromStr;
+ let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: Some(tt::Delimiter {
+ id: tt::TokenId::unspecified(),
+ kind: tt::DelimiterKind::Parenthesis,
+ }),
+ token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "a".into(),
+ id: tt::TokenId::unspecified(),
+ }))],
+ });
+
+ let t1 = TokenStream::from_str("(a)").unwrap();
+ assert_eq!(t1.token_trees.len(), 1);
+ assert_eq!(t1.token_trees[0], subtree_paren_a);
+
+ let t2 = TokenStream::from_str("(a);").unwrap();
+ assert_eq!(t2.token_trees.len(), 2);
+ assert_eq!(t2.token_trees[0], subtree_paren_a);
+
+ let underscore = TokenStream::from_str("_").unwrap();
+ assert_eq!(
+ underscore.token_trees[0],
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "_".into(),
+ id: tt::TokenId::unspecified(),
+ }))
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/mod.rs
new file mode 100644
index 000000000..76e89e319
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/mod.rs
@@ -0,0 +1,105 @@
+//! Macro ABI for version 1.63 of rustc
+
+#[allow(dead_code)]
+#[doc(hidden)]
+mod proc_macro;
+
+#[allow(dead_code)]
+#[doc(hidden)]
+mod ra_server;
+
+use libloading::Library;
+use proc_macro_api::ProcMacroKind;
+
+use super::PanicMessage;
+
+pub use ra_server::TokenStream;
+
+pub(crate) struct Abi {
+ exported_macros: Vec<proc_macro::bridge::client::ProcMacro>,
+}
+
+impl From<proc_macro::bridge::PanicMessage> for PanicMessage {
+ fn from(p: proc_macro::bridge::PanicMessage) -> Self {
+ Self { message: p.as_str().map(|s| s.to_string()) }
+ }
+}
+
+impl Abi {
+ pub unsafe fn from_lib(lib: &Library, symbol_name: String) -> Result<Abi, libloading::Error> {
+ let macros: libloading::Symbol<'_, &&[proc_macro::bridge::client::ProcMacro]> =
+ lib.get(symbol_name.as_bytes())?;
+ Ok(Self { exported_macros: macros.to_vec() })
+ }
+
+ pub fn expand(
+ &self,
+ macro_name: &str,
+ macro_body: &tt::Subtree,
+ attributes: Option<&tt::Subtree>,
+ ) -> Result<tt::Subtree, PanicMessage> {
+ let parsed_body = TokenStream::with_subtree(macro_body.clone());
+
+ let parsed_attributes =
+ attributes.map_or(TokenStream::new(), |attr| TokenStream::with_subtree(attr.clone()));
+
+ for proc_macro in &self.exported_macros {
+ match proc_macro {
+ proc_macro::bridge::client::ProcMacro::CustomDerive {
+ trait_name, client, ..
+ } if *trait_name == macro_name => {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ proc_macro::bridge::client::ProcMacro::Bang { name, client }
+ if *name == macro_name =>
+ {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ proc_macro::bridge::client::ProcMacro::Attr { name, client }
+ if *name == macro_name =>
+ {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_attributes,
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ _ => continue,
+ }
+ }
+
+ Err(proc_macro::bridge::PanicMessage::String("Nothing to expand".to_string()).into())
+ }
+
+ pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
+ self.exported_macros
+ .iter()
+ .map(|proc_macro| match proc_macro {
+ proc_macro::bridge::client::ProcMacro::CustomDerive { trait_name, .. } => {
+ (trait_name.to_string(), ProcMacroKind::CustomDerive)
+ }
+ proc_macro::bridge::client::ProcMacro::Bang { name, .. } => {
+ (name.to_string(), ProcMacroKind::FuncLike)
+ }
+ proc_macro::bridge::client::ProcMacro::Attr { name, .. } => {
+ (name.to_string(), ProcMacroKind::Attr)
+ }
+ })
+ .collect()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/buffer.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/buffer.rs
new file mode 100644
index 000000000..48030f8d8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/buffer.rs
@@ -0,0 +1,156 @@
+//! Buffer management for same-process client<->server communication.
+
+use std::io::{self, Write};
+use std::mem;
+use std::ops::{Deref, DerefMut};
+use std::slice;
+
+#[repr(C)]
+pub struct Buffer {
+ data: *mut u8,
+ len: usize,
+ capacity: usize,
+ reserve: extern "C" fn(Buffer, usize) -> Buffer,
+ drop: extern "C" fn(Buffer),
+}
+
+unsafe impl Sync for Buffer {}
+unsafe impl Send for Buffer {}
+
+impl Default for Buffer {
+ #[inline]
+ fn default() -> Self {
+ Self::from(vec![])
+ }
+}
+
+impl Deref for Buffer {
+ type Target = [u8];
+ #[inline]
+ fn deref(&self) -> &[u8] {
+ unsafe { slice::from_raw_parts(self.data as *const u8, self.len) }
+ }
+}
+
+impl DerefMut for Buffer {
+ #[inline]
+ fn deref_mut(&mut self) -> &mut [u8] {
+ unsafe { slice::from_raw_parts_mut(self.data, self.len) }
+ }
+}
+
+impl Buffer {
+ #[inline]
+ pub(super) fn new() -> Self {
+ Self::default()
+ }
+
+ #[inline]
+ pub(super) fn clear(&mut self) {
+ self.len = 0;
+ }
+
+ #[inline]
+ pub(super) fn take(&mut self) -> Self {
+ mem::take(self)
+ }
+
+ // We have the array method separate from extending from a slice. This is
+ // because in the case of small arrays, codegen can be more efficient
+ // (avoiding a memmove call). With extend_from_slice, LLVM at least
+ // currently is not able to make that optimization.
+ #[inline]
+ pub(super) fn extend_from_array<const N: usize>(&mut self, xs: &[u8; N]) {
+ if xs.len() > (self.capacity - self.len) {
+ let b = self.take();
+ *self = (b.reserve)(b, xs.len());
+ }
+ unsafe {
+ xs.as_ptr().copy_to_nonoverlapping(self.data.add(self.len), xs.len());
+ self.len += xs.len();
+ }
+ }
+
+ #[inline]
+ pub(super) fn extend_from_slice(&mut self, xs: &[u8]) {
+ if xs.len() > (self.capacity - self.len) {
+ let b = self.take();
+ *self = (b.reserve)(b, xs.len());
+ }
+ unsafe {
+ xs.as_ptr().copy_to_nonoverlapping(self.data.add(self.len), xs.len());
+ self.len += xs.len();
+ }
+ }
+
+ #[inline]
+ pub(super) fn push(&mut self, v: u8) {
+ // The code here is taken from Vec::push, and we know that reserve()
+ // will panic if we're exceeding isize::MAX bytes and so there's no need
+ // to check for overflow.
+ if self.len == self.capacity {
+ let b = self.take();
+ *self = (b.reserve)(b, 1);
+ }
+ unsafe {
+ *self.data.add(self.len) = v;
+ self.len += 1;
+ }
+ }
+}
+
+impl Write for Buffer {
+ #[inline]
+ fn write(&mut self, xs: &[u8]) -> io::Result<usize> {
+ self.extend_from_slice(xs);
+ Ok(xs.len())
+ }
+
+ #[inline]
+ fn write_all(&mut self, xs: &[u8]) -> io::Result<()> {
+ self.extend_from_slice(xs);
+ Ok(())
+ }
+
+ #[inline]
+ fn flush(&mut self) -> io::Result<()> {
+ Ok(())
+ }
+}
+
+impl Drop for Buffer {
+ #[inline]
+ fn drop(&mut self) {
+ let b = self.take();
+ (b.drop)(b);
+ }
+}
+
+impl From<Vec<u8>> for Buffer {
+ fn from(mut v: Vec<u8>) -> Self {
+ let (data, len, capacity) = (v.as_mut_ptr(), v.len(), v.capacity());
+ mem::forget(v);
+
+ // This utility function is nested in here because it can *only*
+ // be safely called on `Buffer`s created by *this* `proc_macro`.
+ fn to_vec(b: Buffer) -> Vec<u8> {
+ unsafe {
+ let Buffer { data, len, capacity, .. } = b;
+ mem::forget(b);
+ Vec::from_raw_parts(data, len, capacity)
+ }
+ }
+
+ extern "C" fn reserve(b: Buffer, additional: usize) -> Buffer {
+ let mut v = to_vec(b);
+ v.reserve(additional);
+ Buffer::from(v)
+ }
+
+ extern "C" fn drop(b: Buffer) {
+ mem::drop(to_vec(b));
+ }
+
+ Buffer { data, len, capacity, reserve, drop }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/client.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/client.rs
new file mode 100644
index 000000000..102027d14
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/client.rs
@@ -0,0 +1,510 @@
+//! Client-side types.
+
+use super::*;
+
+use std::marker::PhantomData;
+
+macro_rules! define_handles {
+ (
+ 'owned: $($oty:ident,)*
+ 'interned: $($ity:ident,)*
+ ) => {
+ #[repr(C)]
+ #[allow(non_snake_case)]
+ pub struct HandleCounters {
+ $($oty: AtomicUsize,)*
+ $($ity: AtomicUsize,)*
+ }
+
+ impl HandleCounters {
+ // FIXME(eddyb) use a reference to the `static COUNTERS`, instead of
+ // a wrapper `fn` pointer, once `const fn` can reference `static`s.
+ extern "C" fn get() -> &'static Self {
+ static COUNTERS: HandleCounters = HandleCounters {
+ $($oty: AtomicUsize::new(1),)*
+ $($ity: AtomicUsize::new(1),)*
+ };
+ &COUNTERS
+ }
+ }
+
+ // FIXME(eddyb) generate the definition of `HandleStore` in `server.rs`.
+ #[repr(C)]
+ #[allow(non_snake_case)]
+ pub(super) struct HandleStore<S: server::Types> {
+ $($oty: handle::OwnedStore<S::$oty>,)*
+ $($ity: handle::InternedStore<S::$ity>,)*
+ }
+
+ impl<S: server::Types> HandleStore<S> {
+ pub(super) fn new(handle_counters: &'static HandleCounters) -> Self {
+ HandleStore {
+ $($oty: handle::OwnedStore::new(&handle_counters.$oty),)*
+ $($ity: handle::InternedStore::new(&handle_counters.$ity),)*
+ }
+ }
+ }
+
+ $(
+ #[repr(C)]
+ pub(crate) struct $oty {
+ handle: handle::Handle,
+ // Prevent Send and Sync impls. `!Send`/`!Sync` is the usual
+ // way of doing this, but that requires unstable features.
+ // rust-analyzer uses this code and avoids unstable features.
+ _marker: PhantomData<*mut ()>,
+ }
+
+ // Forward `Drop::drop` to the inherent `drop` method.
+ impl Drop for $oty {
+ fn drop(&mut self) {
+ $oty {
+ handle: self.handle,
+ _marker: PhantomData,
+ }.drop();
+ }
+ }
+
+ impl<S> Encode<S> for $oty {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ let handle = self.handle;
+ mem::forget(self);
+ handle.encode(w, s);
+ }
+ }
+
+ impl<S: server::Types> DecodeMut<'_, '_, HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$oty, $oty>
+ {
+ fn decode(r: &mut Reader<'_>, s: &mut HandleStore<server::MarkedTypes<S>>) -> Self {
+ s.$oty.take(handle::Handle::decode(r, &mut ()))
+ }
+ }
+
+ impl<S> Encode<S> for &$oty {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.handle.encode(w, s);
+ }
+ }
+
+ impl<'s, S: server::Types> Decode<'_, 's, HandleStore<server::MarkedTypes<S>>>
+ for &'s Marked<S::$oty, $oty>
+ {
+ fn decode(r: &mut Reader<'_>, s: &'s HandleStore<server::MarkedTypes<S>>) -> Self {
+ &s.$oty[handle::Handle::decode(r, &mut ())]
+ }
+ }
+
+ impl<S> Encode<S> for &mut $oty {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.handle.encode(w, s);
+ }
+ }
+
+ impl<'s, S: server::Types> DecodeMut<'_, 's, HandleStore<server::MarkedTypes<S>>>
+ for &'s mut Marked<S::$oty, $oty>
+ {
+ fn decode(
+ r: &mut Reader<'_>,
+ s: &'s mut HandleStore<server::MarkedTypes<S>>
+ ) -> Self {
+ &mut s.$oty[handle::Handle::decode(r, &mut ())]
+ }
+ }
+
+ impl<S: server::Types> Encode<HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$oty, $oty>
+ {
+ fn encode(self, w: &mut Writer, s: &mut HandleStore<server::MarkedTypes<S>>) {
+ s.$oty.alloc(self).encode(w, s);
+ }
+ }
+
+ impl<S> DecodeMut<'_, '_, S> for $oty {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ $oty {
+ handle: handle::Handle::decode(r, s),
+ _marker: PhantomData,
+ }
+ }
+ }
+ )*
+
+ $(
+ #[repr(C)]
+ #[derive(Copy, Clone, PartialEq, Eq, Hash)]
+ pub(crate) struct $ity {
+ handle: handle::Handle,
+ // Prevent Send and Sync impls. `!Send`/`!Sync` is the usual
+ // way of doing this, but that requires unstable features.
+ // rust-analyzer uses this code and avoids unstable features.
+ _marker: PhantomData<*mut ()>,
+ }
+
+ impl<S> Encode<S> for $ity {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.handle.encode(w, s);
+ }
+ }
+
+ impl<S: server::Types> DecodeMut<'_, '_, HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$ity, $ity>
+ {
+ fn decode(r: &mut Reader<'_>, s: &mut HandleStore<server::MarkedTypes<S>>) -> Self {
+ s.$ity.copy(handle::Handle::decode(r, &mut ()))
+ }
+ }
+
+ impl<S: server::Types> Encode<HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$ity, $ity>
+ {
+ fn encode(self, w: &mut Writer, s: &mut HandleStore<server::MarkedTypes<S>>) {
+ s.$ity.alloc(self).encode(w, s);
+ }
+ }
+
+ impl<S> DecodeMut<'_, '_, S> for $ity {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ $ity {
+ handle: handle::Handle::decode(r, s),
+ _marker: PhantomData,
+ }
+ }
+ }
+ )*
+ }
+}
+define_handles! {
+ 'owned:
+ FreeFunctions,
+ TokenStream,
+ Group,
+ Literal,
+ SourceFile,
+ MultiSpan,
+ Diagnostic,
+
+ 'interned:
+ Punct,
+ Ident,
+ Span,
+}
+
+// FIXME(eddyb) generate these impls by pattern-matching on the
+// names of methods - also could use the presence of `fn drop`
+// to distinguish between 'owned and 'interned, above.
+// Alternatively, special "modes" could be listed of types in with_api
+// instead of pattern matching on methods, here and in server decl.
+
+impl Clone for TokenStream {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl Clone for Group {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl Clone for Literal {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl fmt::Debug for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Literal")
+ // format the kind without quotes, as in `kind: Float`
+ .field("kind", &format_args!("{}", &self.debug_kind()))
+ .field("symbol", &self.symbol())
+ // format `Some("...")` on one line even in {:#?} mode
+ .field("suffix", &format_args!("{:?}", &self.suffix()))
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+impl Clone for SourceFile {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl fmt::Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.debug())
+ }
+}
+
+macro_rules! define_client_side {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)*;)*
+ }),* $(,)?) => {
+ $(impl $name {
+ $(pub(crate) fn $method($($arg: $arg_ty),*) $(-> $ret_ty)* {
+ Bridge::with(|bridge| {
+ let mut buf = bridge.cached_buffer.take();
+
+ buf.clear();
+ api_tags::Method::$name(api_tags::$name::$method).encode(&mut buf, &mut ());
+ reverse_encode!(buf; $($arg),*);
+
+ buf = bridge.dispatch.call(buf);
+
+ let r = Result::<_, PanicMessage>::decode(&mut &buf[..], &mut ());
+
+ bridge.cached_buffer = buf;
+
+ r.unwrap_or_else(|e| panic::resume_unwind(e.into()))
+ })
+ })*
+ })*
+ }
+}
+with_api!(self, self, define_client_side);
+
+enum BridgeState<'a> {
+ /// No server is currently connected to this client.
+ NotConnected,
+
+ /// A server is connected and available for requests.
+ Connected(Bridge<'a>),
+
+ /// Access to the bridge is being exclusively acquired
+ /// (e.g., during `BridgeState::with`).
+ InUse,
+}
+
+enum BridgeStateL {}
+
+impl<'a> scoped_cell::ApplyL<'a> for BridgeStateL {
+ type Out = BridgeState<'a>;
+}
+
+thread_local! {
+ static BRIDGE_STATE: scoped_cell::ScopedCell<BridgeStateL> =
+ scoped_cell::ScopedCell::new(BridgeState::NotConnected);
+}
+
+impl BridgeState<'_> {
+ /// Take exclusive control of the thread-local
+ /// `BridgeState`, and pass it to `f`, mutably.
+ /// The state will be restored after `f` exits, even
+ /// by panic, including modifications made to it by `f`.
+ ///
+ /// N.B., while `f` is running, the thread-local state
+ /// is `BridgeState::InUse`.
+ fn with<R>(f: impl FnOnce(&mut BridgeState<'_>) -> R) -> R {
+ BRIDGE_STATE.with(|state| {
+ state.replace(BridgeState::InUse, |mut state| {
+ // FIXME(#52812) pass `f` directly to `replace` when `RefMutL` is gone
+ f(&mut *state)
+ })
+ })
+ }
+}
+
+impl Bridge<'_> {
+ pub(crate) fn is_available() -> bool {
+ BridgeState::with(|state| match state {
+ BridgeState::Connected(_) | BridgeState::InUse => true,
+ BridgeState::NotConnected => false,
+ })
+ }
+
+ fn enter<R>(self, f: impl FnOnce() -> R) -> R {
+ let force_show_panics = self.force_show_panics;
+ // Hide the default panic output within `proc_macro` expansions.
+ // NB. the server can't do this because it may use a different libstd.
+ static HIDE_PANICS_DURING_EXPANSION: Once = Once::new();
+ HIDE_PANICS_DURING_EXPANSION.call_once(|| {
+ let prev = panic::take_hook();
+ panic::set_hook(Box::new(move |info| {
+ let show = BridgeState::with(|state| match state {
+ BridgeState::NotConnected => true,
+ BridgeState::Connected(_) | BridgeState::InUse => force_show_panics,
+ });
+ if show {
+ prev(info)
+ }
+ }));
+ });
+
+ BRIDGE_STATE.with(|state| state.set(BridgeState::Connected(self), f))
+ }
+
+ fn with<R>(f: impl FnOnce(&mut Bridge<'_>) -> R) -> R {
+ BridgeState::with(|state| match state {
+ BridgeState::NotConnected => {
+ panic!("procedural macro API is used outside of a procedural macro");
+ }
+ BridgeState::InUse => {
+ panic!("procedural macro API is used while it's already in use");
+ }
+ BridgeState::Connected(bridge) => f(bridge),
+ })
+ }
+}
+
+/// A client-side RPC entry-point, which may be using a different `proc_macro`
+/// from the one used by the server, but can be invoked compatibly.
+///
+/// Note that the (phantom) `I` ("input") and `O` ("output") type parameters
+/// decorate the `Client<I, O>` with the RPC "interface" of the entry-point, but
+/// do not themselves participate in ABI, at all, only facilitate type-checking.
+///
+/// E.g. `Client<TokenStream, TokenStream>` is the common proc macro interface,
+/// used for `#[proc_macro] fn foo(input: TokenStream) -> TokenStream`,
+/// indicating that the RPC input and output will be serialized token streams,
+/// and forcing the use of APIs that take/return `S::TokenStream`, server-side.
+#[repr(C)]
+pub struct Client<I, O> {
+ // FIXME(eddyb) use a reference to the `static COUNTERS`, instead of
+ // a wrapper `fn` pointer, once `const fn` can reference `static`s.
+ pub(super) get_handle_counters: extern "C" fn() -> &'static HandleCounters,
+
+ pub(super) run: extern "C" fn(Bridge<'_>) -> Buffer,
+
+ pub(super) _marker: PhantomData<fn(I) -> O>,
+}
+
+impl<I, O> Copy for Client<I, O> {}
+impl<I, O> Clone for Client<I, O> {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+
+/// Client-side helper for handling client panics, entering the bridge,
+/// deserializing input and serializing output.
+// FIXME(eddyb) maybe replace `Bridge::enter` with this?
+fn run_client<A: for<'a, 's> DecodeMut<'a, 's, ()>, R: Encode<()>>(
+ mut bridge: Bridge<'_>,
+ f: impl FnOnce(A) -> R,
+) -> Buffer {
+ // The initial `cached_buffer` contains the input.
+ let mut buf = bridge.cached_buffer.take();
+
+ panic::catch_unwind(panic::AssertUnwindSafe(|| {
+ bridge.enter(|| {
+ let reader = &mut &buf[..];
+ let input = A::decode(reader, &mut ());
+
+ // Put the `cached_buffer` back in the `Bridge`, for requests.
+ Bridge::with(|bridge| bridge.cached_buffer = buf.take());
+
+ let output = f(input);
+
+ // Take the `cached_buffer` back out, for the output value.
+ buf = Bridge::with(|bridge| bridge.cached_buffer.take());
+
+ // HACK(eddyb) Separate encoding a success value (`Ok(output)`)
+ // from encoding a panic (`Err(e: PanicMessage)`) to avoid
+ // having handles outside the `bridge.enter(|| ...)` scope, and
+ // to catch panics that could happen while encoding the success.
+ //
+ // Note that panics should be impossible beyond this point, but
+ // this is defensively trying to avoid any accidental panicking
+ // reaching the `extern "C"` (which should `abort` but might not
+ // at the moment, so this is also potentially preventing UB).
+ buf.clear();
+ Ok::<_, ()>(output).encode(&mut buf, &mut ());
+ })
+ }))
+ .map_err(PanicMessage::from)
+ .unwrap_or_else(|e| {
+ buf.clear();
+ Err::<(), _>(e).encode(&mut buf, &mut ());
+ });
+ buf
+}
+
+impl Client<super::super::TokenStream, super::super::TokenStream> {
+ pub const fn expand1(
+ f: impl Fn(super::super::TokenStream) -> super::super::TokenStream + Copy,
+ ) -> Self {
+ Client {
+ get_handle_counters: HandleCounters::get,
+ run: super::selfless_reify::reify_to_extern_c_fn_hrt_bridge(move |bridge| {
+ run_client(bridge, |input| f(super::super::TokenStream(input)).0)
+ }),
+ _marker: PhantomData,
+ }
+ }
+}
+
+impl Client<(super::super::TokenStream, super::super::TokenStream), super::super::TokenStream> {
+ pub const fn expand2(
+ f: impl Fn(super::super::TokenStream, super::super::TokenStream) -> super::super::TokenStream
+ + Copy,
+ ) -> Self {
+ Client {
+ get_handle_counters: HandleCounters::get,
+ run: super::selfless_reify::reify_to_extern_c_fn_hrt_bridge(move |bridge| {
+ run_client(bridge, |(input, input2)| {
+ f(super::super::TokenStream(input), super::super::TokenStream(input2)).0
+ })
+ }),
+ _marker: PhantomData,
+ }
+ }
+}
+
+#[repr(C)]
+#[derive(Copy, Clone)]
+pub enum ProcMacro {
+ CustomDerive {
+ trait_name: &'static str,
+ attributes: &'static [&'static str],
+ client: Client<super::super::TokenStream, super::super::TokenStream>,
+ },
+
+ Attr {
+ name: &'static str,
+ client: Client<
+ (super::super::TokenStream, super::super::TokenStream),
+ super::super::TokenStream,
+ >,
+ },
+
+ Bang {
+ name: &'static str,
+ client: Client<super::super::TokenStream, super::super::TokenStream>,
+ },
+}
+
+impl ProcMacro {
+ pub fn name(&self) -> &'static str {
+ match self {
+ ProcMacro::CustomDerive { trait_name, .. } => trait_name,
+ ProcMacro::Attr { name, .. } => name,
+ ProcMacro::Bang { name, .. } => name,
+ }
+ }
+
+ pub const fn custom_derive(
+ trait_name: &'static str,
+ attributes: &'static [&'static str],
+ expand: impl Fn(super::super::TokenStream) -> super::super::TokenStream + Copy,
+ ) -> Self {
+ ProcMacro::CustomDerive { trait_name, attributes, client: Client::expand1(expand) }
+ }
+
+ pub const fn attr(
+ name: &'static str,
+ expand: impl Fn(super::super::TokenStream, super::super::TokenStream) -> super::super::TokenStream
+ + Copy,
+ ) -> Self {
+ ProcMacro::Attr { name, client: Client::expand2(expand) }
+ }
+
+ pub const fn bang(
+ name: &'static str,
+ expand: impl Fn(super::super::TokenStream) -> super::super::TokenStream + Copy,
+ ) -> Self {
+ ProcMacro::Bang { name, client: Client::expand1(expand) }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/closure.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/closure.rs
new file mode 100644
index 000000000..d371ae3ce
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/closure.rs
@@ -0,0 +1,32 @@
+//! Closure type (equivalent to `&mut dyn FnMut(A) -> R`) that's `repr(C)`.
+
+use std::marker::PhantomData;
+
+#[repr(C)]
+pub struct Closure<'a, A, R> {
+ call: unsafe extern "C" fn(*mut Env, A) -> R,
+ env: *mut Env,
+ // Prevent Send and Sync impls. `!Send`/`!Sync` is the usual way of doing
+ // this, but that requires unstable features. rust-analyzer uses this code
+ // and avoids unstable features.
+ //
+ // The `'a` lifetime parameter represents the lifetime of `Env`.
+ _marker: PhantomData<*mut &'a mut ()>,
+}
+
+struct Env;
+
+impl<'a, A, R, F: FnMut(A) -> R> From<&'a mut F> for Closure<'a, A, R> {
+ fn from(f: &'a mut F) -> Self {
+ unsafe extern "C" fn call<A, R, F: FnMut(A) -> R>(env: *mut Env, arg: A) -> R {
+ (*(env as *mut _ as *mut F))(arg)
+ }
+ Closure { call: call::<A, R, F>, env: f as *mut _ as *mut Env, _marker: PhantomData }
+ }
+}
+
+impl<'a, A, R> Closure<'a, A, R> {
+ pub fn call(&mut self, arg: A) -> R {
+ unsafe { (self.call)(self.env, arg) }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/handle.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/handle.rs
new file mode 100644
index 000000000..c219a9465
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/handle.rs
@@ -0,0 +1,89 @@
+//! Server-side handles and storage for per-handle data.
+
+use std::collections::{BTreeMap, HashMap};
+use std::hash::{BuildHasher, Hash};
+use std::num::NonZeroU32;
+use std::ops::{Index, IndexMut};
+use std::sync::atomic::{AtomicUsize, Ordering};
+
+pub(super) type Handle = NonZeroU32;
+
+/// A store that associates values of type `T` with numeric handles. A value can
+/// be looked up using its handle.
+pub(super) struct OwnedStore<T: 'static> {
+ counter: &'static AtomicUsize,
+ data: BTreeMap<Handle, T>,
+}
+
+impl<T> OwnedStore<T> {
+ pub(super) fn new(counter: &'static AtomicUsize) -> Self {
+ // Ensure the handle counter isn't 0, which would panic later,
+ // when `NonZeroU32::new` (aka `Handle::new`) is called in `alloc`.
+ assert_ne!(counter.load(Ordering::SeqCst), 0);
+
+ OwnedStore { counter, data: BTreeMap::new() }
+ }
+}
+
+impl<T> OwnedStore<T> {
+ pub(super) fn alloc(&mut self, x: T) -> Handle {
+ let counter = self.counter.fetch_add(1, Ordering::SeqCst);
+ let handle = Handle::new(counter as u32).expect("`proc_macro` handle counter overflowed");
+ assert!(self.data.insert(handle, x).is_none());
+ handle
+ }
+
+ pub(super) fn take(&mut self, h: Handle) -> T {
+ self.data.remove(&h).expect("use-after-free in `proc_macro` handle")
+ }
+}
+
+impl<T> Index<Handle> for OwnedStore<T> {
+ type Output = T;
+ fn index(&self, h: Handle) -> &T {
+ self.data.get(&h).expect("use-after-free in `proc_macro` handle")
+ }
+}
+
+impl<T> IndexMut<Handle> for OwnedStore<T> {
+ fn index_mut(&mut self, h: Handle) -> &mut T {
+ self.data.get_mut(&h).expect("use-after-free in `proc_macro` handle")
+ }
+}
+
+// HACK(eddyb) deterministic `std::collections::hash_map::RandomState` replacement
+// that doesn't require adding any dependencies to `proc_macro` (like `rustc-hash`).
+#[derive(Clone)]
+struct NonRandomState;
+
+impl BuildHasher for NonRandomState {
+ type Hasher = std::collections::hash_map::DefaultHasher;
+ #[inline]
+ fn build_hasher(&self) -> Self::Hasher {
+ Self::Hasher::new()
+ }
+}
+
+/// Like `OwnedStore`, but avoids storing any value more than once.
+pub(super) struct InternedStore<T: 'static> {
+ owned: OwnedStore<T>,
+ interner: HashMap<T, Handle, NonRandomState>,
+}
+
+impl<T: Copy + Eq + Hash> InternedStore<T> {
+ pub(super) fn new(counter: &'static AtomicUsize) -> Self {
+ InternedStore {
+ owned: OwnedStore::new(counter),
+ interner: HashMap::with_hasher(NonRandomState),
+ }
+ }
+
+ pub(super) fn alloc(&mut self, x: T) -> Handle {
+ let owned = &mut self.owned;
+ *self.interner.entry(x).or_insert_with(|| owned.alloc(x))
+ }
+
+ pub(super) fn copy(&mut self, h: Handle) -> T {
+ self.owned[h]
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/mod.rs
new file mode 100644
index 000000000..4967da493
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/mod.rs
@@ -0,0 +1,451 @@
+//! Internal interface for communicating between a `proc_macro` client
+//! (a proc macro crate) and a `proc_macro` server (a compiler front-end).
+//!
+//! Serialization (with C ABI buffers) and unique integer handles are employed
+//! to allow safely interfacing between two copies of `proc_macro` built
+//! (from the same source) by different compilers with potentially mismatching
+//! Rust ABIs (e.g., stage0/bin/rustc vs stage1/bin/rustc during bootstrap).
+
+#![deny(unsafe_code)]
+
+pub use super::{Delimiter, Level, LineColumn, Spacing};
+use std::fmt;
+use std::hash::Hash;
+use std::marker;
+use std::mem;
+use std::ops::Bound;
+use std::panic;
+use std::sync::atomic::AtomicUsize;
+use std::sync::Once;
+use std::thread;
+
+/// Higher-order macro describing the server RPC API, allowing automatic
+/// generation of type-safe Rust APIs, both client-side and server-side.
+///
+/// `with_api!(MySelf, my_self, my_macro)` expands to:
+/// ```rust,ignore (pseudo-code)
+/// my_macro! {
+/// // ...
+/// Literal {
+/// // ...
+/// fn character(ch: char) -> MySelf::Literal;
+/// // ...
+/// fn span(my_self: &MySelf::Literal) -> MySelf::Span;
+/// fn set_span(my_self: &mut MySelf::Literal, span: MySelf::Span);
+/// },
+/// // ...
+/// }
+/// ```
+///
+/// The first two arguments serve to customize the arguments names
+/// and argument/return types, to enable several different usecases:
+///
+/// If `my_self` is just `self`, then each `fn` signature can be used
+/// as-is for a method. If it's anything else (`self_` in practice),
+/// then the signatures don't have a special `self` argument, and
+/// can, therefore, have a different one introduced.
+///
+/// If `MySelf` is just `Self`, then the types are only valid inside
+/// a trait or a trait impl, where the trait has associated types
+/// for each of the API types. If non-associated types are desired,
+/// a module name (`self` in practice) can be used instead of `Self`.
+macro_rules! with_api {
+ ($S:ident, $self:ident, $m:ident) => {
+ $m! {
+ FreeFunctions {
+ fn drop($self: $S::FreeFunctions);
+ fn track_env_var(var: &str, value: Option<&str>);
+ fn track_path(path: &str);
+ },
+ TokenStream {
+ fn drop($self: $S::TokenStream);
+ fn clone($self: &$S::TokenStream) -> $S::TokenStream;
+ fn is_empty($self: &$S::TokenStream) -> bool;
+ fn expand_expr($self: &$S::TokenStream) -> Result<$S::TokenStream, ()>;
+ fn from_str(src: &str) -> $S::TokenStream;
+ fn to_string($self: &$S::TokenStream) -> String;
+ fn from_token_tree(
+ tree: TokenTree<$S::Group, $S::Punct, $S::Ident, $S::Literal>,
+ ) -> $S::TokenStream;
+ fn concat_trees(
+ base: Option<$S::TokenStream>,
+ trees: Vec<TokenTree<$S::Group, $S::Punct, $S::Ident, $S::Literal>>,
+ ) -> $S::TokenStream;
+ fn concat_streams(
+ base: Option<$S::TokenStream>,
+ streams: Vec<$S::TokenStream>,
+ ) -> $S::TokenStream;
+ fn into_trees(
+ $self: $S::TokenStream
+ ) -> Vec<TokenTree<$S::Group, $S::Punct, $S::Ident, $S::Literal>>;
+ },
+ Group {
+ fn drop($self: $S::Group);
+ fn clone($self: &$S::Group) -> $S::Group;
+ fn new(delimiter: Delimiter, stream: Option<$S::TokenStream>) -> $S::Group;
+ fn delimiter($self: &$S::Group) -> Delimiter;
+ fn stream($self: &$S::Group) -> $S::TokenStream;
+ fn span($self: &$S::Group) -> $S::Span;
+ fn span_open($self: &$S::Group) -> $S::Span;
+ fn span_close($self: &$S::Group) -> $S::Span;
+ fn set_span($self: &mut $S::Group, span: $S::Span);
+ },
+ Punct {
+ fn new(ch: char, spacing: Spacing) -> $S::Punct;
+ fn as_char($self: $S::Punct) -> char;
+ fn spacing($self: $S::Punct) -> Spacing;
+ fn span($self: $S::Punct) -> $S::Span;
+ fn with_span($self: $S::Punct, span: $S::Span) -> $S::Punct;
+ },
+ Ident {
+ fn new(string: &str, span: $S::Span, is_raw: bool) -> $S::Ident;
+ fn span($self: $S::Ident) -> $S::Span;
+ fn with_span($self: $S::Ident, span: $S::Span) -> $S::Ident;
+ },
+ Literal {
+ fn drop($self: $S::Literal);
+ fn clone($self: &$S::Literal) -> $S::Literal;
+ fn from_str(s: &str) -> Result<$S::Literal, ()>;
+ fn to_string($self: &$S::Literal) -> String;
+ fn debug_kind($self: &$S::Literal) -> String;
+ fn symbol($self: &$S::Literal) -> String;
+ fn suffix($self: &$S::Literal) -> Option<String>;
+ fn integer(n: &str) -> $S::Literal;
+ fn typed_integer(n: &str, kind: &str) -> $S::Literal;
+ fn float(n: &str) -> $S::Literal;
+ fn f32(n: &str) -> $S::Literal;
+ fn f64(n: &str) -> $S::Literal;
+ fn string(string: &str) -> $S::Literal;
+ fn character(ch: char) -> $S::Literal;
+ fn byte_string(bytes: &[u8]) -> $S::Literal;
+ fn span($self: &$S::Literal) -> $S::Span;
+ fn set_span($self: &mut $S::Literal, span: $S::Span);
+ fn subspan(
+ $self: &$S::Literal,
+ start: Bound<usize>,
+ end: Bound<usize>,
+ ) -> Option<$S::Span>;
+ },
+ SourceFile {
+ fn drop($self: $S::SourceFile);
+ fn clone($self: &$S::SourceFile) -> $S::SourceFile;
+ fn eq($self: &$S::SourceFile, other: &$S::SourceFile) -> bool;
+ fn path($self: &$S::SourceFile) -> String;
+ fn is_real($self: &$S::SourceFile) -> bool;
+ },
+ MultiSpan {
+ fn drop($self: $S::MultiSpan);
+ fn new() -> $S::MultiSpan;
+ fn push($self: &mut $S::MultiSpan, span: $S::Span);
+ },
+ Diagnostic {
+ fn drop($self: $S::Diagnostic);
+ fn new(level: Level, msg: &str, span: $S::MultiSpan) -> $S::Diagnostic;
+ fn sub(
+ $self: &mut $S::Diagnostic,
+ level: Level,
+ msg: &str,
+ span: $S::MultiSpan,
+ );
+ fn emit($self: $S::Diagnostic);
+ },
+ Span {
+ fn debug($self: $S::Span) -> String;
+ fn def_site() -> $S::Span;
+ fn call_site() -> $S::Span;
+ fn mixed_site() -> $S::Span;
+ fn source_file($self: $S::Span) -> $S::SourceFile;
+ fn parent($self: $S::Span) -> Option<$S::Span>;
+ fn source($self: $S::Span) -> $S::Span;
+ fn start($self: $S::Span) -> LineColumn;
+ fn end($self: $S::Span) -> LineColumn;
+ fn before($self: $S::Span) -> $S::Span;
+ fn after($self: $S::Span) -> $S::Span;
+ fn join($self: $S::Span, other: $S::Span) -> Option<$S::Span>;
+ fn resolved_at($self: $S::Span, at: $S::Span) -> $S::Span;
+ fn source_text($self: $S::Span) -> Option<String>;
+ fn save_span($self: $S::Span) -> usize;
+ fn recover_proc_macro_span(id: usize) -> $S::Span;
+ },
+ }
+ };
+}
+
+// FIXME(eddyb) this calls `encode` for each argument, but in reverse,
+// to match the ordering in `reverse_decode`.
+macro_rules! reverse_encode {
+ ($writer:ident;) => {};
+ ($writer:ident; $first:ident $(, $rest:ident)*) => {
+ reverse_encode!($writer; $($rest),*);
+ $first.encode(&mut $writer, &mut ());
+ }
+}
+
+// FIXME(eddyb) this calls `decode` for each argument, but in reverse,
+// to avoid borrow conflicts from borrows started by `&mut` arguments.
+macro_rules! reverse_decode {
+ ($reader:ident, $s:ident;) => {};
+ ($reader:ident, $s:ident; $first:ident: $first_ty:ty $(, $rest:ident: $rest_ty:ty)*) => {
+ reverse_decode!($reader, $s; $($rest: $rest_ty),*);
+ let $first = <$first_ty>::decode(&mut $reader, $s);
+ }
+}
+
+#[allow(unsafe_code)]
+mod buffer;
+#[forbid(unsafe_code)]
+pub mod client;
+#[allow(unsafe_code)]
+mod closure;
+#[forbid(unsafe_code)]
+mod handle;
+#[macro_use]
+#[forbid(unsafe_code)]
+mod rpc;
+#[allow(unsafe_code)]
+mod scoped_cell;
+#[allow(unsafe_code)]
+mod selfless_reify;
+#[forbid(unsafe_code)]
+pub mod server;
+
+use buffer::Buffer;
+pub use rpc::PanicMessage;
+use rpc::{Decode, DecodeMut, Encode, Reader, Writer};
+
+/// An active connection between a server and a client.
+/// The server creates the bridge (`Bridge::run_server` in `server.rs`),
+/// then passes it to the client through the function pointer in the `run`
+/// field of `client::Client`. The client holds its copy of the `Bridge`
+/// in TLS during its execution (`Bridge::{enter, with}` in `client.rs`).
+#[repr(C)]
+pub struct Bridge<'a> {
+ /// Reusable buffer (only `clear`-ed, never shrunk), primarily
+ /// used for making requests, but also for passing input to client.
+ cached_buffer: Buffer,
+
+ /// Server-side function that the client uses to make requests.
+ dispatch: closure::Closure<'a, Buffer, Buffer>,
+
+ /// If 'true', always invoke the default panic hook
+ force_show_panics: bool,
+
+ // Prevent Send and Sync impls. `!Send`/`!Sync` is the usual way of doing
+ // this, but that requires unstable features. rust-analyzer uses this code
+ // and avoids unstable features.
+ _marker: marker::PhantomData<*mut ()>,
+}
+
+#[forbid(unsafe_code)]
+#[allow(non_camel_case_types)]
+mod api_tags {
+ use super::rpc::{DecodeMut, Encode, Reader, Writer};
+
+ macro_rules! declare_tags {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)*;)*
+ }),* $(,)?) => {
+ $(
+ pub(super) enum $name {
+ $($method),*
+ }
+ rpc_encode_decode!(enum $name { $($method),* });
+ )*
+
+ pub(super) enum Method {
+ $($name($name)),*
+ }
+ rpc_encode_decode!(enum Method { $($name(m)),* });
+ }
+ }
+ with_api!(self, self, declare_tags);
+}
+
+/// Helper to wrap associated types to allow trait impl dispatch.
+/// That is, normally a pair of impls for `T::Foo` and `T::Bar`
+/// can overlap, but if the impls are, instead, on types like
+/// `Marked<T::Foo, Foo>` and `Marked<T::Bar, Bar>`, they can't.
+trait Mark {
+ type Unmarked;
+ fn mark(unmarked: Self::Unmarked) -> Self;
+}
+
+/// Unwrap types wrapped by `Mark::mark` (see `Mark` for details).
+trait Unmark {
+ type Unmarked;
+ fn unmark(self) -> Self::Unmarked;
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Hash)]
+struct Marked<T, M> {
+ value: T,
+ _marker: marker::PhantomData<M>,
+}
+
+impl<T, M> Mark for Marked<T, M> {
+ type Unmarked = T;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ Marked { value: unmarked, _marker: marker::PhantomData }
+ }
+}
+impl<T, M> Unmark for Marked<T, M> {
+ type Unmarked = T;
+ fn unmark(self) -> Self::Unmarked {
+ self.value
+ }
+}
+impl<'a, T, M> Unmark for &'a Marked<T, M> {
+ type Unmarked = &'a T;
+ fn unmark(self) -> Self::Unmarked {
+ &self.value
+ }
+}
+impl<'a, T, M> Unmark for &'a mut Marked<T, M> {
+ type Unmarked = &'a mut T;
+ fn unmark(self) -> Self::Unmarked {
+ &mut self.value
+ }
+}
+
+impl<T: Mark> Mark for Vec<T> {
+ type Unmarked = Vec<T::Unmarked>;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ // Should be a no-op due to std's in-place collect optimizations.
+ unmarked.into_iter().map(T::mark).collect()
+ }
+}
+impl<T: Unmark> Unmark for Vec<T> {
+ type Unmarked = Vec<T::Unmarked>;
+ fn unmark(self) -> Self::Unmarked {
+ // Should be a no-op due to std's in-place collect optimizations.
+ self.into_iter().map(T::unmark).collect()
+ }
+}
+
+macro_rules! mark_noop {
+ ($($ty:ty),* $(,)?) => {
+ $(
+ impl Mark for $ty {
+ type Unmarked = Self;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ unmarked
+ }
+ }
+ impl Unmark for $ty {
+ type Unmarked = Self;
+ fn unmark(self) -> Self::Unmarked {
+ self
+ }
+ }
+ )*
+ }
+}
+mark_noop! {
+ (),
+ bool,
+ char,
+ &'_ [u8],
+ &'_ str,
+ String,
+ usize,
+ Delimiter,
+ Level,
+ LineColumn,
+ Spacing,
+}
+
+rpc_encode_decode!(
+ enum Delimiter {
+ Parenthesis,
+ Brace,
+ Bracket,
+ None,
+ }
+);
+rpc_encode_decode!(
+ enum Level {
+ Error,
+ Warning,
+ Note,
+ Help,
+ }
+);
+rpc_encode_decode!(struct LineColumn { line, column });
+rpc_encode_decode!(
+ enum Spacing {
+ Alone,
+ Joint,
+ }
+);
+
+macro_rules! mark_compound {
+ (enum $name:ident <$($T:ident),+> { $($variant:ident $(($field:ident))?),* $(,)? }) => {
+ impl<$($T: Mark),+> Mark for $name <$($T),+> {
+ type Unmarked = $name <$($T::Unmarked),+>;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ match unmarked {
+ $($name::$variant $(($field))? => {
+ $name::$variant $((Mark::mark($field)))?
+ })*
+ }
+ }
+ }
+
+ impl<$($T: Unmark),+> Unmark for $name <$($T),+> {
+ type Unmarked = $name <$($T::Unmarked),+>;
+ fn unmark(self) -> Self::Unmarked {
+ match self {
+ $($name::$variant $(($field))? => {
+ $name::$variant $((Unmark::unmark($field)))?
+ })*
+ }
+ }
+ }
+ }
+}
+
+macro_rules! compound_traits {
+ ($($t:tt)*) => {
+ rpc_encode_decode!($($t)*);
+ mark_compound!($($t)*);
+ };
+}
+
+compound_traits!(
+ enum Bound<T> {
+ Included(x),
+ Excluded(x),
+ Unbounded,
+ }
+);
+
+compound_traits!(
+ enum Option<T> {
+ Some(t),
+ None,
+ }
+);
+
+compound_traits!(
+ enum Result<T, E> {
+ Ok(t),
+ Err(e),
+ }
+);
+
+#[derive(Clone)]
+pub enum TokenTree<G, P, I, L> {
+ Group(G),
+ Punct(P),
+ Ident(I),
+ Literal(L),
+}
+
+compound_traits!(
+ enum TokenTree<G, P, I, L> {
+ Group(tt),
+ Punct(tt),
+ Ident(tt),
+ Literal(tt),
+ }
+);
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/rpc.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/rpc.rs
new file mode 100644
index 000000000..e9d7a46c0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/rpc.rs
@@ -0,0 +1,304 @@
+//! Serialization for client-server communication.
+
+use std::any::Any;
+use std::char;
+use std::io::Write;
+use std::num::NonZeroU32;
+use std::str;
+
+pub(super) type Writer = super::buffer::Buffer;
+
+pub(super) trait Encode<S>: Sized {
+ fn encode(self, w: &mut Writer, s: &mut S);
+}
+
+pub(super) type Reader<'a> = &'a [u8];
+
+pub(super) trait Decode<'a, 's, S>: Sized {
+ fn decode(r: &mut Reader<'a>, s: &'s S) -> Self;
+}
+
+pub(super) trait DecodeMut<'a, 's, S>: Sized {
+ fn decode(r: &mut Reader<'a>, s: &'s mut S) -> Self;
+}
+
+macro_rules! rpc_encode_decode {
+ (le $ty:ty) => {
+ impl<S> Encode<S> for $ty {
+ fn encode(self, w: &mut Writer, _: &mut S) {
+ w.extend_from_array(&self.to_le_bytes());
+ }
+ }
+
+ impl<S> DecodeMut<'_, '_, S> for $ty {
+ fn decode(r: &mut Reader<'_>, _: &mut S) -> Self {
+ const N: usize = ::std::mem::size_of::<$ty>();
+
+ let mut bytes = [0; N];
+ bytes.copy_from_slice(&r[..N]);
+ *r = &r[N..];
+
+ Self::from_le_bytes(bytes)
+ }
+ }
+ };
+ (struct $name:ident $(<$($T:ident),+>)? { $($field:ident),* $(,)? }) => {
+ impl<S, $($($T: Encode<S>),+)?> Encode<S> for $name $(<$($T),+>)? {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ $(self.$field.encode(w, s);)*
+ }
+ }
+
+ impl<'a, S, $($($T: for<'s> DecodeMut<'a, 's, S>),+)?> DecodeMut<'a, '_, S>
+ for $name $(<$($T),+>)?
+ {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ $name {
+ $($field: DecodeMut::decode(r, s)),*
+ }
+ }
+ }
+ };
+ (enum $name:ident $(<$($T:ident),+>)? { $($variant:ident $(($field:ident))*),* $(,)? }) => {
+ impl<S, $($($T: Encode<S>),+)?> Encode<S> for $name $(<$($T),+>)? {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ // HACK(eddyb): `Tag` enum duplicated between the
+ // two impls as there's no other place to stash it.
+ #[allow(non_upper_case_globals)]
+ mod tag {
+ #[repr(u8)] enum Tag { $($variant),* }
+
+ $(pub const $variant: u8 = Tag::$variant as u8;)*
+ }
+
+ match self {
+ $($name::$variant $(($field))* => {
+ tag::$variant.encode(w, s);
+ $($field.encode(w, s);)*
+ })*
+ }
+ }
+ }
+
+ impl<'a, S, $($($T: for<'s> DecodeMut<'a, 's, S>),+)?> DecodeMut<'a, '_, S>
+ for $name $(<$($T),+>)?
+ {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ // HACK(eddyb): `Tag` enum duplicated between the
+ // two impls as there's no other place to stash it.
+ #[allow(non_upper_case_globals)]
+ mod tag {
+ #[repr(u8)] enum Tag { $($variant),* }
+
+ $(pub const $variant: u8 = Tag::$variant as u8;)*
+ }
+
+ match u8::decode(r, s) {
+ $(tag::$variant => {
+ $(let $field = DecodeMut::decode(r, s);)*
+ $name::$variant $(($field))*
+ })*
+ _ => unreachable!(),
+ }
+ }
+ }
+ }
+}
+
+impl<S> Encode<S> for () {
+ fn encode(self, _: &mut Writer, _: &mut S) {}
+}
+
+impl<S> DecodeMut<'_, '_, S> for () {
+ fn decode(_: &mut Reader<'_>, _: &mut S) -> Self {}
+}
+
+impl<S> Encode<S> for u8 {
+ fn encode(self, w: &mut Writer, _: &mut S) {
+ w.push(self);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for u8 {
+ fn decode(r: &mut Reader<'_>, _: &mut S) -> Self {
+ let x = r[0];
+ *r = &r[1..];
+ x
+ }
+}
+
+rpc_encode_decode!(le u32);
+rpc_encode_decode!(le usize);
+
+impl<S> Encode<S> for bool {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ (self as u8).encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for bool {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ match u8::decode(r, s) {
+ 0 => false,
+ 1 => true,
+ _ => unreachable!(),
+ }
+ }
+}
+
+impl<S> Encode<S> for char {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ (self as u32).encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for char {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ char::from_u32(u32::decode(r, s)).unwrap()
+ }
+}
+
+impl<S> Encode<S> for NonZeroU32 {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.get().encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for NonZeroU32 {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ Self::new(u32::decode(r, s)).unwrap()
+ }
+}
+
+impl<S, A: Encode<S>, B: Encode<S>> Encode<S> for (A, B) {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.0.encode(w, s);
+ self.1.encode(w, s);
+ }
+}
+
+impl<'a, S, A: for<'s> DecodeMut<'a, 's, S>, B: for<'s> DecodeMut<'a, 's, S>> DecodeMut<'a, '_, S>
+ for (A, B)
+{
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ (DecodeMut::decode(r, s), DecodeMut::decode(r, s))
+ }
+}
+
+impl<S> Encode<S> for &[u8] {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.len().encode(w, s);
+ w.write_all(self).unwrap();
+ }
+}
+
+impl<'a, S> DecodeMut<'a, '_, S> for &'a [u8] {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ let len = usize::decode(r, s);
+ let xs = &r[..len];
+ *r = &r[len..];
+ xs
+ }
+}
+
+impl<S> Encode<S> for &str {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.as_bytes().encode(w, s);
+ }
+}
+
+impl<'a, S> DecodeMut<'a, '_, S> for &'a str {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ str::from_utf8(<&[u8]>::decode(r, s)).unwrap()
+ }
+}
+
+impl<S> Encode<S> for String {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self[..].encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for String {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ <&str>::decode(r, s).to_string()
+ }
+}
+
+impl<S, T: Encode<S>> Encode<S> for Vec<T> {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.len().encode(w, s);
+ for x in self {
+ x.encode(w, s);
+ }
+ }
+}
+
+impl<'a, S, T: for<'s> DecodeMut<'a, 's, S>> DecodeMut<'a, '_, S> for Vec<T> {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ let len = usize::decode(r, s);
+ let mut vec = Vec::with_capacity(len);
+ for _ in 0..len {
+ vec.push(T::decode(r, s));
+ }
+ vec
+ }
+}
+
+/// Simplified version of panic payloads, ignoring
+/// types other than `&'static str` and `String`.
+pub enum PanicMessage {
+ StaticStr(&'static str),
+ String(String),
+ Unknown,
+}
+
+impl From<Box<dyn Any + Send>> for PanicMessage {
+ fn from(payload: Box<dyn Any + Send + 'static>) -> Self {
+ if let Some(s) = payload.downcast_ref::<&'static str>() {
+ return PanicMessage::StaticStr(s);
+ }
+ if let Ok(s) = payload.downcast::<String>() {
+ return PanicMessage::String(*s);
+ }
+ PanicMessage::Unknown
+ }
+}
+
+impl Into<Box<dyn Any + Send>> for PanicMessage {
+ fn into(self) -> Box<dyn Any + Send> {
+ match self {
+ PanicMessage::StaticStr(s) => Box::new(s),
+ PanicMessage::String(s) => Box::new(s),
+ PanicMessage::Unknown => {
+ struct UnknownPanicMessage;
+ Box::new(UnknownPanicMessage)
+ }
+ }
+ }
+}
+
+impl PanicMessage {
+ pub fn as_str(&self) -> Option<&str> {
+ match self {
+ PanicMessage::StaticStr(s) => Some(s),
+ PanicMessage::String(s) => Some(s),
+ PanicMessage::Unknown => None,
+ }
+ }
+}
+
+impl<S> Encode<S> for PanicMessage {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.as_str().encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for PanicMessage {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ match Option::<String>::decode(r, s) {
+ Some(s) => PanicMessage::String(s),
+ None => PanicMessage::Unknown,
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/scoped_cell.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/scoped_cell.rs
new file mode 100644
index 000000000..2cde1f65a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/scoped_cell.rs
@@ -0,0 +1,81 @@
+//! `Cell` variant for (scoped) existential lifetimes.
+
+use std::cell::Cell;
+use std::mem;
+use std::ops::{Deref, DerefMut};
+
+/// Type lambda application, with a lifetime.
+#[allow(unused_lifetimes)]
+pub trait ApplyL<'a> {
+ type Out;
+}
+
+/// Type lambda taking a lifetime, i.e., `Lifetime -> Type`.
+pub trait LambdaL: for<'a> ApplyL<'a> {}
+
+impl<T: for<'a> ApplyL<'a>> LambdaL for T {}
+
+// HACK(eddyb) work around projection limitations with a newtype
+// FIXME(#52812) replace with `&'a mut <T as ApplyL<'b>>::Out`
+pub struct RefMutL<'a, 'b, T: LambdaL>(&'a mut <T as ApplyL<'b>>::Out);
+
+impl<'a, 'b, T: LambdaL> Deref for RefMutL<'a, 'b, T> {
+ type Target = <T as ApplyL<'b>>::Out;
+ fn deref(&self) -> &Self::Target {
+ self.0
+ }
+}
+
+impl<'a, 'b, T: LambdaL> DerefMut for RefMutL<'a, 'b, T> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ self.0
+ }
+}
+
+pub struct ScopedCell<T: LambdaL>(Cell<<T as ApplyL<'static>>::Out>);
+
+impl<T: LambdaL> ScopedCell<T> {
+ pub const fn new(value: <T as ApplyL<'static>>::Out) -> Self {
+ ScopedCell(Cell::new(value))
+ }
+
+ /// Sets the value in `self` to `replacement` while
+ /// running `f`, which gets the old value, mutably.
+ /// The old value will be restored after `f` exits, even
+ /// by panic, including modifications made to it by `f`.
+ pub fn replace<'a, R>(
+ &self,
+ replacement: <T as ApplyL<'a>>::Out,
+ f: impl for<'b, 'c> FnOnce(RefMutL<'b, 'c, T>) -> R,
+ ) -> R {
+ /// Wrapper that ensures that the cell always gets filled
+ /// (with the original state, optionally changed by `f`),
+ /// even if `f` had panicked.
+ struct PutBackOnDrop<'a, T: LambdaL> {
+ cell: &'a ScopedCell<T>,
+ value: Option<<T as ApplyL<'static>>::Out>,
+ }
+
+ impl<'a, T: LambdaL> Drop for PutBackOnDrop<'a, T> {
+ fn drop(&mut self) {
+ self.cell.0.set(self.value.take().unwrap());
+ }
+ }
+
+ let mut put_back_on_drop = PutBackOnDrop {
+ cell: self,
+ value: Some(self.0.replace(unsafe {
+ let erased = mem::transmute_copy(&replacement);
+ mem::forget(replacement);
+ erased
+ })),
+ };
+
+ f(RefMutL(put_back_on_drop.value.as_mut().unwrap()))
+ }
+
+ /// Sets the value in `self` to `value` while running `f`.
+ pub fn set<R>(&self, value: <T as ApplyL<'_>>::Out, f: impl FnOnce() -> R) -> R {
+ self.replace(value, |_| f())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/selfless_reify.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/selfless_reify.rs
new file mode 100644
index 000000000..4ee4bb87c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/selfless_reify.rs
@@ -0,0 +1,83 @@
+//! Abstraction for creating `fn` pointers from any callable that *effectively*
+//! has the equivalent of implementing `Default`, even if the compiler neither
+//! provides `Default` nor allows reifying closures (i.e. creating `fn` pointers)
+//! other than those with absolutely no captures.
+//!
+//! More specifically, for a closure-like type to be "effectively `Default`":
+//! * it must be a ZST (zero-sized type): no information contained within, so
+//! that `Default`'s return value (if it were implemented) is unambiguous
+//! * it must be `Copy`: no captured "unique ZST tokens" or any other similar
+//! types that would make duplicating values at will unsound
+//! * combined with the ZST requirement, this confers a kind of "telecopy"
+//! ability: similar to `Copy`, but without keeping the value around, and
+//! instead "reconstructing" it (a noop given it's a ZST) when needed
+//! * it must be *provably* inhabited: no captured uninhabited types or any
+//! other types that cannot be constructed by the user of this abstraction
+//! * the proof is a value of the closure-like type itself, in a sense the
+//! "seed" for the "telecopy" process made possible by ZST + `Copy`
+//! * this requirement is the only reason an abstraction limited to a specific
+//! usecase is required: ZST + `Copy` can be checked with *at worst* a panic
+//! at the "attempted `::default()` call" time, but that doesn't guarantee
+//! that the value can be soundly created, and attempting to use the typical
+//! "proof ZST token" approach leads yet again to having a ZST + `Copy` type
+//! that is not proof of anything without a value (i.e. isomorphic to a
+//! newtype of the type it's trying to prove the inhabitation of)
+//!
+//! A more flexible (and safer) solution to the general problem could exist once
+//! `const`-generic parameters can have type parameters in their types:
+//!
+//! ```rust,ignore (needs future const-generics)
+//! extern "C" fn ffi_wrapper<
+//! A, R,
+//! F: Fn(A) -> R,
+//! const f: F, // <-- this `const`-generic is not yet allowed
+//! >(arg: A) -> R {
+//! f(arg)
+//! }
+//! ```
+
+use std::mem;
+
+// FIXME(eddyb) this could be `trait` impls except for the `const fn` requirement.
+macro_rules! define_reify_functions {
+ ($(
+ fn $name:ident $(<$($param:ident),*>)?
+ for $(extern $abi:tt)? fn($($arg:ident: $arg_ty:ty),*) -> $ret_ty:ty;
+ )+) => {
+ $(pub const fn $name<
+ $($($param,)*)?
+ F: Fn($($arg_ty),*) -> $ret_ty + Copy
+ >(f: F) -> $(extern $abi)? fn($($arg_ty),*) -> $ret_ty {
+ // FIXME(eddyb) describe the `F` type (e.g. via `type_name::<F>`) once panic
+ // formatting becomes possible in `const fn`.
+ assert!(mem::size_of::<F>() == 0, "selfless_reify: closure must be zero-sized");
+
+ $(extern $abi)? fn wrapper<
+ $($($param,)*)?
+ F: Fn($($arg_ty),*) -> $ret_ty + Copy
+ >($($arg: $arg_ty),*) -> $ret_ty {
+ let f = unsafe {
+ // SAFETY: `F` satisfies all criteria for "out of thin air"
+ // reconstructability (see module-level doc comment).
+ mem::MaybeUninit::<F>::uninit().assume_init()
+ };
+ f($($arg),*)
+ }
+ let _f_proof = f;
+ wrapper::<
+ $($($param,)*)?
+ F
+ >
+ })+
+ }
+}
+
+define_reify_functions! {
+ fn _reify_to_extern_c_fn_unary<A, R> for extern "C" fn(arg: A) -> R;
+
+ // HACK(eddyb) this abstraction is used with `for<'a> fn(Bridge<'a>) -> T`
+ // but that doesn't work with just `reify_to_extern_c_fn_unary` because of
+ // the `fn` pointer type being "higher-ranked" (i.e. the `for<'a>` binder).
+ // FIXME(eddyb) try to remove the lifetime from `Bridge`, that'd help.
+ fn reify_to_extern_c_fn_hrt_bridge<R> for extern "C" fn(bridge: super::Bridge<'_>) -> R;
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/server.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/server.rs
new file mode 100644
index 000000000..0fb3c6985
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/server.rs
@@ -0,0 +1,332 @@
+//! Server-side traits.
+
+use super::*;
+
+// FIXME(eddyb) generate the definition of `HandleStore` in `server.rs`.
+use super::client::HandleStore;
+
+pub trait Types {
+ type FreeFunctions: 'static;
+ type TokenStream: 'static + Clone;
+ type Group: 'static + Clone;
+ type Punct: 'static + Copy + Eq + Hash;
+ type Ident: 'static + Copy + Eq + Hash;
+ type Literal: 'static + Clone;
+ type SourceFile: 'static + Clone;
+ type MultiSpan: 'static;
+ type Diagnostic: 'static;
+ type Span: 'static + Copy + Eq + Hash;
+}
+
+/// Declare an associated fn of one of the traits below, adding necessary
+/// default bodies.
+macro_rules! associated_fn {
+ (fn drop(&mut self, $arg:ident: $arg_ty:ty)) =>
+ (fn drop(&mut self, $arg: $arg_ty) { mem::drop($arg) });
+
+ (fn clone(&mut self, $arg:ident: $arg_ty:ty) -> $ret_ty:ty) =>
+ (fn clone(&mut self, $arg: $arg_ty) -> $ret_ty { $arg.clone() });
+
+ ($($item:tt)*) => ($($item)*;)
+}
+
+macro_rules! declare_server_traits {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
+ }),* $(,)?) => {
+ $(pub trait $name: Types {
+ $(associated_fn!(fn $method(&mut self, $($arg: $arg_ty),*) $(-> $ret_ty)?);)*
+ })*
+
+ pub trait Server: Types $(+ $name)* {}
+ impl<S: Types $(+ $name)*> Server for S {}
+ }
+}
+with_api!(Self, self_, declare_server_traits);
+
+pub(super) struct MarkedTypes<S: Types>(S);
+
+macro_rules! define_mark_types_impls {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
+ }),* $(,)?) => {
+ impl<S: Types> Types for MarkedTypes<S> {
+ $(type $name = Marked<S::$name, client::$name>;)*
+ }
+
+ $(impl<S: $name> $name for MarkedTypes<S> {
+ $(fn $method(&mut self, $($arg: $arg_ty),*) $(-> $ret_ty)? {
+ <_>::mark($name::$method(&mut self.0, $($arg.unmark()),*))
+ })*
+ })*
+ }
+}
+with_api!(Self, self_, define_mark_types_impls);
+
+struct Dispatcher<S: Types> {
+ handle_store: HandleStore<S>,
+ server: S,
+}
+
+macro_rules! define_dispatcher_impl {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
+ }),* $(,)?) => {
+ // FIXME(eddyb) `pub` only for `ExecutionStrategy` below.
+ pub trait DispatcherTrait {
+ // HACK(eddyb) these are here to allow `Self::$name` to work below.
+ $(type $name;)*
+ fn dispatch(&mut self, buf: Buffer) -> Buffer;
+ }
+
+ impl<S: Server> DispatcherTrait for Dispatcher<MarkedTypes<S>> {
+ $(type $name = <MarkedTypes<S> as Types>::$name;)*
+ fn dispatch(&mut self, mut buf: Buffer) -> Buffer {
+ let Dispatcher { handle_store, server } = self;
+
+ let mut reader = &buf[..];
+ match api_tags::Method::decode(&mut reader, &mut ()) {
+ $(api_tags::Method::$name(m) => match m {
+ $(api_tags::$name::$method => {
+ let mut call_method = || {
+ reverse_decode!(reader, handle_store; $($arg: $arg_ty),*);
+ $name::$method(server, $($arg),*)
+ };
+ // HACK(eddyb) don't use `panic::catch_unwind` in a panic.
+ // If client and server happen to use the same `libstd`,
+ // `catch_unwind` asserts that the panic counter was 0,
+ // even when the closure passed to it didn't panic.
+ let r = if thread::panicking() {
+ Ok(call_method())
+ } else {
+ panic::catch_unwind(panic::AssertUnwindSafe(call_method))
+ .map_err(PanicMessage::from)
+ };
+
+ buf.clear();
+ r.encode(&mut buf, handle_store);
+ })*
+ }),*
+ }
+ buf
+ }
+ }
+ }
+}
+with_api!(Self, self_, define_dispatcher_impl);
+
+pub trait ExecutionStrategy {
+ fn run_bridge_and_client(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer,
+ run_client: extern "C" fn(Bridge<'_>) -> Buffer,
+ force_show_panics: bool,
+ ) -> Buffer;
+}
+
+pub struct SameThread;
+
+impl ExecutionStrategy for SameThread {
+ fn run_bridge_and_client(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer,
+ run_client: extern "C" fn(Bridge<'_>) -> Buffer,
+ force_show_panics: bool,
+ ) -> Buffer {
+ let mut dispatch = |buf| dispatcher.dispatch(buf);
+
+ run_client(Bridge {
+ cached_buffer: input,
+ dispatch: (&mut dispatch).into(),
+ force_show_panics,
+ _marker: marker::PhantomData,
+ })
+ }
+}
+
+// NOTE(eddyb) Two implementations are provided, the second one is a bit
+// faster but neither is anywhere near as fast as same-thread execution.
+
+pub struct CrossThread1;
+
+impl ExecutionStrategy for CrossThread1 {
+ fn run_bridge_and_client(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer,
+ run_client: extern "C" fn(Bridge<'_>) -> Buffer,
+ force_show_panics: bool,
+ ) -> Buffer {
+ use std::sync::mpsc::channel;
+
+ let (req_tx, req_rx) = channel();
+ let (res_tx, res_rx) = channel();
+
+ let join_handle = thread::spawn(move || {
+ let mut dispatch = |buf| {
+ req_tx.send(buf).unwrap();
+ res_rx.recv().unwrap()
+ };
+
+ run_client(Bridge {
+ cached_buffer: input,
+ dispatch: (&mut dispatch).into(),
+ force_show_panics,
+ _marker: marker::PhantomData,
+ })
+ });
+
+ for b in req_rx {
+ res_tx.send(dispatcher.dispatch(b)).unwrap();
+ }
+
+ join_handle.join().unwrap()
+ }
+}
+
+pub struct CrossThread2;
+
+impl ExecutionStrategy for CrossThread2 {
+ fn run_bridge_and_client(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer,
+ run_client: extern "C" fn(Bridge<'_>) -> Buffer,
+ force_show_panics: bool,
+ ) -> Buffer {
+ use std::sync::{Arc, Mutex};
+
+ enum State<T> {
+ Req(T),
+ Res(T),
+ }
+
+ let mut state = Arc::new(Mutex::new(State::Res(Buffer::new())));
+
+ let server_thread = thread::current();
+ let state2 = state.clone();
+ let join_handle = thread::spawn(move || {
+ let mut dispatch = |b| {
+ *state2.lock().unwrap() = State::Req(b);
+ server_thread.unpark();
+ loop {
+ thread::park();
+ if let State::Res(b) = &mut *state2.lock().unwrap() {
+ break b.take();
+ }
+ }
+ };
+
+ let r = run_client(Bridge {
+ cached_buffer: input,
+ dispatch: (&mut dispatch).into(),
+ force_show_panics,
+ _marker: marker::PhantomData,
+ });
+
+ // Wake up the server so it can exit the dispatch loop.
+ drop(state2);
+ server_thread.unpark();
+
+ r
+ });
+
+ // Check whether `state2` was dropped, to know when to stop.
+ while Arc::get_mut(&mut state).is_none() {
+ thread::park();
+ let mut b = match &mut *state.lock().unwrap() {
+ State::Req(b) => b.take(),
+ _ => continue,
+ };
+ b = dispatcher.dispatch(b.take());
+ *state.lock().unwrap() = State::Res(b);
+ join_handle.thread().unpark();
+ }
+
+ join_handle.join().unwrap()
+ }
+}
+
+fn run_server<
+ S: Server,
+ I: Encode<HandleStore<MarkedTypes<S>>>,
+ O: for<'a, 's> DecodeMut<'a, 's, HandleStore<MarkedTypes<S>>>,
+>(
+ strategy: &impl ExecutionStrategy,
+ handle_counters: &'static client::HandleCounters,
+ server: S,
+ input: I,
+ run_client: extern "C" fn(Bridge<'_>) -> Buffer,
+ force_show_panics: bool,
+) -> Result<O, PanicMessage> {
+ let mut dispatcher =
+ Dispatcher { handle_store: HandleStore::new(handle_counters), server: MarkedTypes(server) };
+
+ let mut buf = Buffer::new();
+ input.encode(&mut buf, &mut dispatcher.handle_store);
+
+ buf = strategy.run_bridge_and_client(&mut dispatcher, buf, run_client, force_show_panics);
+
+ Result::decode(&mut &buf[..], &mut dispatcher.handle_store)
+}
+
+impl client::Client<super::super::TokenStream, super::super::TokenStream> {
+ pub fn run<S>(
+ &self,
+ strategy: &impl ExecutionStrategy,
+ server: S,
+ input: S::TokenStream,
+ force_show_panics: bool,
+ ) -> Result<S::TokenStream, PanicMessage>
+ where
+ S: Server,
+ S::TokenStream: Default,
+ {
+ let client::Client { get_handle_counters, run, _marker } = *self;
+ run_server(
+ strategy,
+ get_handle_counters(),
+ server,
+ <MarkedTypes<S> as Types>::TokenStream::mark(input),
+ run,
+ force_show_panics,
+ )
+ .map(|s| <Option<<MarkedTypes<S> as Types>::TokenStream>>::unmark(s).unwrap_or_default())
+ }
+}
+
+impl
+ client::Client<
+ (super::super::TokenStream, super::super::TokenStream),
+ super::super::TokenStream,
+ >
+{
+ pub fn run<S>(
+ &self,
+ strategy: &impl ExecutionStrategy,
+ server: S,
+ input: S::TokenStream,
+ input2: S::TokenStream,
+ force_show_panics: bool,
+ ) -> Result<S::TokenStream, PanicMessage>
+ where
+ S: Server,
+ S::TokenStream: Default,
+ {
+ let client::Client { get_handle_counters, run, _marker } = *self;
+ run_server(
+ strategy,
+ get_handle_counters(),
+ server,
+ (
+ <MarkedTypes<S> as Types>::TokenStream::mark(input),
+ <MarkedTypes<S> as Types>::TokenStream::mark(input2),
+ ),
+ run,
+ force_show_panics,
+ )
+ .map(|s| <Option<<MarkedTypes<S> as Types>::TokenStream>>::unmark(s).unwrap_or_default())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/diagnostic.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/diagnostic.rs
new file mode 100644
index 000000000..3fade2dc4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/diagnostic.rs
@@ -0,0 +1,166 @@
+//! lib-proc-macro diagnostic
+//!
+//! Copy from <https://github.com/rust-lang/rust/blob/e45d9973b2665897a768312e971b82cc62633103/src/libproc_macro/diagnostic.rs>
+//! augmented with removing unstable features
+
+use super::Span;
+
+/// An enum representing a diagnostic level.
+#[derive(Copy, Clone, Debug)]
+#[non_exhaustive]
+pub enum Level {
+ /// An error.
+ Error,
+ /// A warning.
+ Warning,
+ /// A note.
+ Note,
+ /// A help message.
+ Help,
+}
+
+/// Trait implemented by types that can be converted into a set of `Span`s.
+pub trait MultiSpan {
+ /// Converts `self` into a `Vec<Span>`.
+ fn into_spans(self) -> Vec<Span>;
+}
+
+impl MultiSpan for Span {
+ fn into_spans(self) -> Vec<Span> {
+ vec![self]
+ }
+}
+
+impl MultiSpan for Vec<Span> {
+ fn into_spans(self) -> Vec<Span> {
+ self
+ }
+}
+
+impl<'a> MultiSpan for &'a [Span] {
+ fn into_spans(self) -> Vec<Span> {
+ self.to_vec()
+ }
+}
+
+/// A structure representing a diagnostic message and associated children
+/// messages.
+#[derive(Clone, Debug)]
+pub struct Diagnostic {
+ level: Level,
+ message: String,
+ spans: Vec<Span>,
+ children: Vec<Diagnostic>,
+}
+
+macro_rules! diagnostic_child_methods {
+ ($spanned:ident, $regular:ident, $level:expr) => {
+ #[doc = concat!("Adds a new child diagnostics message to `self` with the [`",
+ stringify!($level), "`] level, and the given `spans` and `message`.")]
+ pub fn $spanned<S, T>(mut self, spans: S, message: T) -> Diagnostic
+ where
+ S: MultiSpan,
+ T: Into<String>,
+ {
+ self.children.push(Diagnostic::spanned(spans, $level, message));
+ self
+ }
+
+ #[doc = concat!("Adds a new child diagnostic message to `self` with the [`",
+ stringify!($level), "`] level, and the given `message`.")]
+ pub fn $regular<T: Into<String>>(mut self, message: T) -> Diagnostic {
+ self.children.push(Diagnostic::new($level, message));
+ self
+ }
+ };
+}
+
+/// Iterator over the children diagnostics of a `Diagnostic`.
+#[derive(Debug, Clone)]
+pub struct Children<'a>(std::slice::Iter<'a, Diagnostic>);
+
+impl<'a> Iterator for Children<'a> {
+ type Item = &'a Diagnostic;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.0.next()
+ }
+}
+
+impl Diagnostic {
+ /// Creates a new diagnostic with the given `level` and `message`.
+ pub fn new<T: Into<String>>(level: Level, message: T) -> Diagnostic {
+ Diagnostic { level, message: message.into(), spans: vec![], children: vec![] }
+ }
+
+ /// Creates a new diagnostic with the given `level` and `message` pointing to
+ /// the given set of `spans`.
+ pub fn spanned<S, T>(spans: S, level: Level, message: T) -> Diagnostic
+ where
+ S: MultiSpan,
+ T: Into<String>,
+ {
+ Diagnostic { level, message: message.into(), spans: spans.into_spans(), children: vec![] }
+ }
+
+ diagnostic_child_methods!(span_error, error, Level::Error);
+ diagnostic_child_methods!(span_warning, warning, Level::Warning);
+ diagnostic_child_methods!(span_note, note, Level::Note);
+ diagnostic_child_methods!(span_help, help, Level::Help);
+
+ /// Returns the diagnostic `level` for `self`.
+ pub fn level(&self) -> Level {
+ self.level
+ }
+
+ /// Sets the level in `self` to `level`.
+ pub fn set_level(&mut self, level: Level) {
+ self.level = level;
+ }
+
+ /// Returns the message in `self`.
+ pub fn message(&self) -> &str {
+ &self.message
+ }
+
+ /// Sets the message in `self` to `message`.
+ pub fn set_message<T: Into<String>>(&mut self, message: T) {
+ self.message = message.into();
+ }
+
+ /// Returns the `Span`s in `self`.
+ pub fn spans(&self) -> &[Span] {
+ &self.spans
+ }
+
+ /// Sets the `Span`s in `self` to `spans`.
+ pub fn set_spans<S: MultiSpan>(&mut self, spans: S) {
+ self.spans = spans.into_spans();
+ }
+
+ /// Returns an iterator over the children diagnostics of `self`.
+ pub fn children(&self) -> Children<'_> {
+ Children(self.children.iter())
+ }
+
+ /// Emit the diagnostic.
+ pub fn emit(self) {
+ fn to_internal(spans: Vec<Span>) -> super::bridge::client::MultiSpan {
+ let mut multi_span = super::bridge::client::MultiSpan::new();
+ for span in spans {
+ multi_span.push(span.0);
+ }
+ multi_span
+ }
+
+ let mut diag = super::bridge::client::Diagnostic::new(
+ self.level,
+ &self.message[..],
+ to_internal(self.spans),
+ );
+ for c in self.children {
+ diag.sub(c.level, &c.message[..], to_internal(c.spans));
+ }
+ diag.emit();
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/mod.rs
new file mode 100644
index 000000000..c50a16bf4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/mod.rs
@@ -0,0 +1,1106 @@
+//! A support library for macro authors when defining new macros.
+//!
+//! This library, provided by the standard distribution, provides the types
+//! consumed in the interfaces of procedurally defined macro definitions such as
+//! function-like macros `#[proc_macro]`, macro attributes `#[proc_macro_attribute]` and
+//! custom derive attributes`#[proc_macro_derive]`.
+//!
+//! See [the book] for more.
+//!
+//! [the book]: ../book/ch19-06-macros.html#procedural-macros-for-generating-code-from-attributes
+
+#[doc(hidden)]
+pub mod bridge;
+
+mod diagnostic;
+
+pub use diagnostic::{Diagnostic, Level, MultiSpan};
+
+use std::cmp::Ordering;
+use std::ops::RangeBounds;
+use std::path::PathBuf;
+use std::str::FromStr;
+use std::{error, fmt, iter, mem};
+
+/// Determines whether proc_macro has been made accessible to the currently
+/// running program.
+///
+/// The proc_macro crate is only intended for use inside the implementation of
+/// procedural macros. All the functions in this crate panic if invoked from
+/// outside of a procedural macro, such as from a build script or unit test or
+/// ordinary Rust binary.
+///
+/// With consideration for Rust libraries that are designed to support both
+/// macro and non-macro use cases, `proc_macro::is_available()` provides a
+/// non-panicking way to detect whether the infrastructure required to use the
+/// API of proc_macro is presently available. Returns true if invoked from
+/// inside of a procedural macro, false if invoked from any other binary.
+pub fn is_available() -> bool {
+ bridge::Bridge::is_available()
+}
+
+/// The main type provided by this crate, representing an abstract stream of
+/// tokens, or, more specifically, a sequence of token trees.
+/// The type provide interfaces for iterating over those token trees and, conversely,
+/// collecting a number of token trees into one stream.
+///
+/// This is both the input and output of `#[proc_macro]`, `#[proc_macro_attribute]`
+/// and `#[proc_macro_derive]` definitions.
+#[derive(Clone)]
+pub struct TokenStream(Option<bridge::client::TokenStream>);
+
+/// Error returned from `TokenStream::from_str`.
+#[non_exhaustive]
+#[derive(Debug)]
+pub struct LexError;
+
+impl fmt::Display for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str("cannot parse string into token stream")
+ }
+}
+
+impl error::Error for LexError {}
+
+/// Error returned from `TokenStream::expand_expr`.
+#[non_exhaustive]
+#[derive(Debug)]
+pub struct ExpandError;
+
+impl fmt::Display for ExpandError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str("macro expansion failed")
+ }
+}
+
+impl error::Error for ExpandError {}
+
+impl TokenStream {
+ /// Returns an empty `TokenStream` containing no token trees.
+ pub fn new() -> TokenStream {
+ TokenStream(None)
+ }
+
+ /// Checks if this `TokenStream` is empty.
+ pub fn is_empty(&self) -> bool {
+ self.0.as_ref().map(|h| h.is_empty()).unwrap_or(true)
+ }
+
+ /// Parses this `TokenStream` as an expression and attempts to expand any
+ /// macros within it. Returns the expanded `TokenStream`.
+ ///
+ /// Currently only expressions expanding to literals will succeed, although
+ /// this may be relaxed in the future.
+ ///
+ /// NOTE: In error conditions, `expand_expr` may leave macros unexpanded,
+ /// report an error, failing compilation, and/or return an `Err(..)`. The
+ /// specific behavior for any error condition, and what conditions are
+ /// considered errors, is unspecified and may change in the future.
+ pub fn expand_expr(&self) -> Result<TokenStream, ExpandError> {
+ let stream = self.0.as_ref().ok_or(ExpandError)?;
+ match bridge::client::TokenStream::expand_expr(stream) {
+ Ok(stream) => Ok(TokenStream(Some(stream))),
+ Err(_) => Err(ExpandError),
+ }
+ }
+}
+
+/// Attempts to break the string into tokens and parse those tokens into a token stream.
+/// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
+/// or characters not existing in the language.
+/// All tokens in the parsed stream get `Span::call_site()` spans.
+///
+/// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
+/// change these errors into `LexError`s later.
+impl FromStr for TokenStream {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+ Ok(TokenStream(Some(bridge::client::TokenStream::from_str(src))))
+ }
+}
+
+/// Prints the token stream as a string that is supposed to be losslessly convertible back
+/// into the same token stream (modulo spans), except for possibly `TokenTree::Group`s
+/// with `Delimiter::None` delimiters and negative numeric literals.
+impl fmt::Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+/// Prints token in a form convenient for debugging.
+impl fmt::Debug for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str("TokenStream ")?;
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+impl Default for TokenStream {
+ fn default() -> Self {
+ TokenStream::new()
+ }
+}
+
+pub use quote::{quote, quote_span};
+
+fn tree_to_bridge_tree(
+ tree: TokenTree,
+) -> bridge::TokenTree<
+ bridge::client::Group,
+ bridge::client::Punct,
+ bridge::client::Ident,
+ bridge::client::Literal,
+> {
+ match tree {
+ TokenTree::Group(tt) => bridge::TokenTree::Group(tt.0),
+ TokenTree::Punct(tt) => bridge::TokenTree::Punct(tt.0),
+ TokenTree::Ident(tt) => bridge::TokenTree::Ident(tt.0),
+ TokenTree::Literal(tt) => bridge::TokenTree::Literal(tt.0),
+ }
+}
+
+/// Creates a token stream containing a single token tree.
+impl From<TokenTree> for TokenStream {
+ fn from(tree: TokenTree) -> TokenStream {
+ TokenStream(Some(bridge::client::TokenStream::from_token_tree(tree_to_bridge_tree(tree))))
+ }
+}
+
+/// Non-generic helper for implementing `FromIterator<TokenStream>` and
+/// `Extend<TokenStream>` with less monomorphization in calling crates.
+struct ConcatStreamsHelper {
+ streams: Vec<bridge::client::TokenStream>,
+}
+
+impl ConcatStreamsHelper {
+ fn new(capacity: usize) -> Self {
+ ConcatStreamsHelper { streams: Vec::with_capacity(capacity) }
+ }
+
+ fn push(&mut self, stream: TokenStream) {
+ if let Some(stream) = stream.0 {
+ self.streams.push(stream);
+ }
+ }
+
+ fn build(mut self) -> TokenStream {
+ if self.streams.len() <= 1 {
+ TokenStream(self.streams.pop())
+ } else {
+ TokenStream(Some(bridge::client::TokenStream::concat_streams(None, self.streams)))
+ }
+ }
+
+ fn append_to(mut self, stream: &mut TokenStream) {
+ if self.streams.is_empty() {
+ return;
+ }
+ let base = stream.0.take();
+ if base.is_none() && self.streams.len() == 1 {
+ stream.0 = self.streams.pop();
+ } else {
+ stream.0 = Some(bridge::client::TokenStream::concat_streams(base, self.streams));
+ }
+ }
+}
+
+/// Collects a number of token trees into a single stream.
+impl iter::FromIterator<TokenTree> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+ trees.into_iter().map(TokenStream::from).collect()
+ }
+}
+
+/// A "flattening" operation on token streams, collects token trees
+/// from multiple token streams into a single stream.
+impl iter::FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let iter = streams.into_iter();
+ let mut builder = ConcatStreamsHelper::new(iter.size_hint().0);
+ iter.for_each(|stream| builder.push(stream));
+ builder.build()
+ }
+}
+
+impl Extend<TokenTree> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
+ self.extend(trees.into_iter().map(TokenStream::from));
+ }
+}
+
+impl Extend<TokenStream> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+ // FIXME(eddyb) Use an optimized implementation if/when possible.
+ *self = iter::once(mem::replace(self, Self::new())).chain(streams).collect();
+ }
+}
+
+/// Public implementation details for the `TokenStream` type, such as iterators.
+pub mod token_stream {
+ use super::{bridge, Group, Ident, Literal, Punct, TokenStream, TokenTree};
+
+ /// An iterator over `TokenStream`'s `TokenTree`s.
+ /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
+ /// and returns whole groups as token trees.
+ #[derive(Clone)]
+ pub struct IntoIter(
+ std::vec::IntoIter<
+ bridge::TokenTree<
+ bridge::client::Group,
+ bridge::client::Punct,
+ bridge::client::Ident,
+ bridge::client::Literal,
+ >,
+ >,
+ );
+
+ impl Iterator for IntoIter {
+ type Item = TokenTree;
+
+ fn next(&mut self) -> Option<TokenTree> {
+ self.0.next().map(|tree| match tree {
+ bridge::TokenTree::Group(tt) => TokenTree::Group(Group(tt)),
+ bridge::TokenTree::Punct(tt) => TokenTree::Punct(Punct(tt)),
+ bridge::TokenTree::Ident(tt) => TokenTree::Ident(Ident(tt)),
+ bridge::TokenTree::Literal(tt) => TokenTree::Literal(Literal(tt)),
+ })
+ }
+ }
+
+ impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = IntoIter;
+
+ fn into_iter(self) -> IntoIter {
+ IntoIter(self.0.map(|v| v.into_trees()).unwrap_or_default().into_iter())
+ }
+ }
+}
+
+#[doc(hidden)]
+mod quote;
+
+/// A region of source code, along with macro expansion information.
+#[derive(Copy, Clone)]
+pub struct Span(bridge::client::Span);
+
+macro_rules! diagnostic_method {
+ ($name:ident, $level:expr) => {
+ /// Creates a new `Diagnostic` with the given `message` at the span
+ /// `self`.
+ pub fn $name<T: Into<String>>(self, message: T) -> Diagnostic {
+ Diagnostic::spanned(self, $level, message)
+ }
+ };
+}
+
+impl Span {
+ /// A span that resolves at the macro definition site.
+ pub fn def_site() -> Span {
+ Span(bridge::client::Span::def_site())
+ }
+
+ /// The span of the invocation of the current procedural macro.
+ /// Identifiers created with this span will be resolved as if they were written
+ /// directly at the macro call location (call-site hygiene) and other code
+ /// at the macro call site will be able to refer to them as well.
+ pub fn call_site() -> Span {
+ Span(bridge::client::Span::call_site())
+ }
+
+ /// A span that represents `macro_rules` hygiene, and sometimes resolves at the macro
+ /// definition site (local variables, labels, `$crate`) and sometimes at the macro
+ /// call site (everything else).
+ /// The span location is taken from the call-site.
+ pub fn mixed_site() -> Span {
+ Span(bridge::client::Span::mixed_site())
+ }
+
+ /// The original source file into which this span points.
+ pub fn source_file(&self) -> SourceFile {
+ SourceFile(self.0.source_file())
+ }
+
+ /// The `Span` for the tokens in the previous macro expansion from which
+ /// `self` was generated from, if any.
+ pub fn parent(&self) -> Option<Span> {
+ self.0.parent().map(Span)
+ }
+
+ /// The span for the origin source code that `self` was generated from. If
+ /// this `Span` wasn't generated from other macro expansions then the return
+ /// value is the same as `*self`.
+ pub fn source(&self) -> Span {
+ Span(self.0.source())
+ }
+
+ /// Gets the starting line/column in the source file for this span.
+ pub fn start(&self) -> LineColumn {
+ self.0.start().add_1_to_column()
+ }
+
+ /// Gets the ending line/column in the source file for this span.
+ pub fn end(&self) -> LineColumn {
+ self.0.end().add_1_to_column()
+ }
+
+ /// Creates an empty span pointing to directly before this span.
+ pub fn before(&self) -> Span {
+ Span(self.0.before())
+ }
+
+ /// Creates an empty span pointing to directly after this span.
+ pub fn after(&self) -> Span {
+ Span(self.0.after())
+ }
+
+ /// Creates a new span encompassing `self` and `other`.
+ ///
+ /// Returns `None` if `self` and `other` are from different files.
+ pub fn join(&self, other: Span) -> Option<Span> {
+ self.0.join(other.0).map(Span)
+ }
+
+ /// Creates a new span with the same line/column information as `self` but
+ /// that resolves symbols as though it were at `other`.
+ pub fn resolved_at(&self, other: Span) -> Span {
+ Span(self.0.resolved_at(other.0))
+ }
+
+ /// Creates a new span with the same name resolution behavior as `self` but
+ /// with the line/column information of `other`.
+ pub fn located_at(&self, other: Span) -> Span {
+ other.resolved_at(*self)
+ }
+
+ /// Compares to spans to see if they're equal.
+ pub fn eq(&self, other: &Span) -> bool {
+ self.0 == other.0
+ }
+
+ /// Returns the source text behind a span. This preserves the original source
+ /// code, including spaces and comments. It only returns a result if the span
+ /// corresponds to real source code.
+ ///
+ /// Note: The observable result of a macro should only rely on the tokens and
+ /// not on this source text. The result of this function is a best effort to
+ /// be used for diagnostics only.
+ pub fn source_text(&self) -> Option<String> {
+ self.0.source_text()
+ }
+
+ // Used by the implementation of `Span::quote`
+ #[doc(hidden)]
+ pub fn save_span(&self) -> usize {
+ self.0.save_span()
+ }
+
+ // Used by the implementation of `Span::quote`
+ #[doc(hidden)]
+ pub fn recover_proc_macro_span(id: usize) -> Span {
+ Span(bridge::client::Span::recover_proc_macro_span(id))
+ }
+
+ diagnostic_method!(error, Level::Error);
+ diagnostic_method!(warning, Level::Warning);
+ diagnostic_method!(note, Level::Note);
+ diagnostic_method!(help, Level::Help);
+}
+
+/// Prints a span in a form convenient for debugging.
+impl fmt::Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+/// A line-column pair representing the start or end of a `Span`.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub struct LineColumn {
+ /// The 1-indexed line in the source file on which the span starts or ends (inclusive).
+ pub line: usize,
+ /// The 1-indexed column (number of bytes in UTF-8 encoding) in the source
+ /// file on which the span starts or ends (inclusive).
+ pub column: usize,
+}
+
+impl LineColumn {
+ fn add_1_to_column(self) -> Self {
+ LineColumn { line: self.line, column: self.column + 1 }
+ }
+}
+
+impl Ord for LineColumn {
+ fn cmp(&self, other: &Self) -> Ordering {
+ self.line.cmp(&other.line).then(self.column.cmp(&other.column))
+ }
+}
+
+impl PartialOrd for LineColumn {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
+/// The source file of a given `Span`.
+#[derive(Clone)]
+pub struct SourceFile(bridge::client::SourceFile);
+
+impl SourceFile {
+ /// Gets the path to this source file.
+ ///
+ /// ### Note
+ /// If the code span associated with this `SourceFile` was generated by an external macro, this
+ /// macro, this might not be an actual path on the filesystem. Use [`is_real`] to check.
+ ///
+ /// Also note that even if `is_real` returns `true`, if `--remap-path-prefix` was passed on
+ /// the command line, the path as given might not actually be valid.
+ ///
+ /// [`is_real`]: Self::is_real
+ pub fn path(&self) -> PathBuf {
+ PathBuf::from(self.0.path())
+ }
+
+ /// Returns `true` if this source file is a real source file, and not generated by an external
+ /// macro's expansion.
+ pub fn is_real(&self) -> bool {
+ // This is a hack until intercrate spans are implemented and we can have real source files
+ // for spans generated in external macros.
+ // https://github.com/rust-lang/rust/pull/43604#issuecomment-333334368
+ self.0.is_real()
+ }
+}
+
+impl fmt::Debug for SourceFile {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("SourceFile")
+ .field("path", &self.path())
+ .field("is_real", &self.is_real())
+ .finish()
+ }
+}
+
+impl PartialEq for SourceFile {
+ fn eq(&self, other: &Self) -> bool {
+ self.0.eq(&other.0)
+ }
+}
+
+impl Eq for SourceFile {}
+
+/// A single token or a delimited sequence of token trees (e.g., `[1, (), ..]`).
+#[derive(Clone)]
+pub enum TokenTree {
+ /// A token stream surrounded by bracket delimiters.
+ Group(Group),
+ /// An identifier.
+ Ident(Ident),
+ /// A single punctuation character (`+`, `,`, `$`, etc.).
+ Punct(Punct),
+ /// A literal character (`'a'`), string (`"hello"`), number (`2.3`), etc.
+ Literal(Literal),
+}
+
+impl TokenTree {
+ /// Returns the span of this tree, delegating to the `span` method of
+ /// the contained token or a delimited stream.
+ pub fn span(&self) -> Span {
+ match *self {
+ TokenTree::Group(ref t) => t.span(),
+ TokenTree::Ident(ref t) => t.span(),
+ TokenTree::Punct(ref t) => t.span(),
+ TokenTree::Literal(ref t) => t.span(),
+ }
+ }
+
+ /// Configures the span for *only this token*.
+ ///
+ /// Note that if this token is a `Group` then this method will not configure
+ /// the span of each of the internal tokens, this will simply delegate to
+ /// the `set_span` method of each variant.
+ pub fn set_span(&mut self, span: Span) {
+ match *self {
+ TokenTree::Group(ref mut t) => t.set_span(span),
+ TokenTree::Ident(ref mut t) => t.set_span(span),
+ TokenTree::Punct(ref mut t) => t.set_span(span),
+ TokenTree::Literal(ref mut t) => t.set_span(span),
+ }
+ }
+}
+
+/// Prints token tree in a form convenient for debugging.
+impl fmt::Debug for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ // Each of these has the name in the struct type in the derived debug,
+ // so don't bother with an extra layer of indirection
+ match *self {
+ TokenTree::Group(ref tt) => tt.fmt(f),
+ TokenTree::Ident(ref tt) => tt.fmt(f),
+ TokenTree::Punct(ref tt) => tt.fmt(f),
+ TokenTree::Literal(ref tt) => tt.fmt(f),
+ }
+ }
+}
+
+impl From<Group> for TokenTree {
+ fn from(g: Group) -> TokenTree {
+ TokenTree::Group(g)
+ }
+}
+
+impl From<Ident> for TokenTree {
+ fn from(g: Ident) -> TokenTree {
+ TokenTree::Ident(g)
+ }
+}
+
+impl From<Punct> for TokenTree {
+ fn from(g: Punct) -> TokenTree {
+ TokenTree::Punct(g)
+ }
+}
+
+impl From<Literal> for TokenTree {
+ fn from(g: Literal) -> TokenTree {
+ TokenTree::Literal(g)
+ }
+}
+
+/// Prints the token tree as a string that is supposed to be losslessly convertible back
+/// into the same token tree (modulo spans), except for possibly `TokenTree::Group`s
+/// with `Delimiter::None` delimiters and negative numeric literals.
+impl fmt::Display for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+/// A delimited token stream.
+///
+/// A `Group` internally contains a `TokenStream` which is surrounded by `Delimiter`s.
+#[derive(Clone)]
+pub struct Group(bridge::client::Group);
+
+/// Describes how a sequence of token trees is delimited.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum Delimiter {
+ /// `( ... )`
+ Parenthesis,
+ /// `{ ... }`
+ Brace,
+ /// `[ ... ]`
+ Bracket,
+ /// `Ø ... Ø`
+ /// An invisible delimiter, that may, for example, appear around tokens coming from a
+ /// "macro variable" `$var`. It is important to preserve operator priorities in cases like
+ /// `$var * 3` where `$var` is `1 + 2`.
+ /// Invisible delimiters might not survive roundtrip of a token stream through a string.
+ None,
+}
+
+impl Group {
+ /// Creates a new `Group` with the given delimiter and token stream.
+ ///
+ /// This constructor will set the span for this group to
+ /// `Span::call_site()`. To change the span you can use the `set_span`
+ /// method below.
+ pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
+ Group(bridge::client::Group::new(delimiter, stream.0))
+ }
+
+ /// Returns the delimiter of this `Group`
+ pub fn delimiter(&self) -> Delimiter {
+ self.0.delimiter()
+ }
+
+ /// Returns the `TokenStream` of tokens that are delimited in this `Group`.
+ ///
+ /// Note that the returned token stream does not include the delimiter
+ /// returned above.
+ pub fn stream(&self) -> TokenStream {
+ TokenStream(Some(self.0.stream()))
+ }
+
+ /// Returns the span for the delimiters of this token stream, spanning the
+ /// entire `Group`.
+ ///
+ /// ```text
+ /// pub fn span(&self) -> Span {
+ /// ^^^^^^^
+ /// ```
+ pub fn span(&self) -> Span {
+ Span(self.0.span())
+ }
+
+ /// Returns the span pointing to the opening delimiter of this group.
+ ///
+ /// ```text
+ /// pub fn span_open(&self) -> Span {
+ /// ^
+ /// ```
+ pub fn span_open(&self) -> Span {
+ Span(self.0.span_open())
+ }
+
+ /// Returns the span pointing to the closing delimiter of this group.
+ ///
+ /// ```text
+ /// pub fn span_close(&self) -> Span {
+ /// ^
+ /// ```
+ pub fn span_close(&self) -> Span {
+ Span(self.0.span_close())
+ }
+
+ /// Configures the span for this `Group`'s delimiters, but not its internal
+ /// tokens.
+ ///
+ /// This method will **not** set the span of all the internal tokens spanned
+ /// by this group, but rather it will only set the span of the delimiter
+ /// tokens at the level of the `Group`.
+ pub fn set_span(&mut self, span: Span) {
+ self.0.set_span(span.0);
+ }
+}
+
+/// Prints the group as a string that should be losslessly convertible back
+/// into the same group (modulo spans), except for possibly `TokenTree::Group`s
+/// with `Delimiter::None` delimiters.
+impl fmt::Display for Group {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Group {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Group")
+ .field("delimiter", &self.delimiter())
+ .field("stream", &self.stream())
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+/// A `Punct` is a single punctuation character such as `+`, `-` or `#`.
+///
+/// Multi-character operators like `+=` are represented as two instances of `Punct` with different
+/// forms of `Spacing` returned.
+#[derive(Clone)]
+pub struct Punct(bridge::client::Punct);
+
+/// Describes whether a `Punct` is followed immediately by another `Punct` ([`Spacing::Joint`]) or
+/// by a different token or whitespace ([`Spacing::Alone`]).
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum Spacing {
+ /// A `Punct` is not immediately followed by another `Punct`.
+ /// E.g. `+` is `Alone` in `+ =`, `+ident` and `+()`.
+ Alone,
+ /// A `Punct` is immediately followed by another `Punct`.
+ /// E.g. `+` is `Joint` in `+=` and `++`.
+ ///
+ /// Additionally, single quote `'` can join with identifiers to form lifetimes: `'ident`.
+ Joint,
+}
+
+impl Punct {
+ /// Creates a new `Punct` from the given character and spacing.
+ /// The `ch` argument must be a valid punctuation character permitted by the language,
+ /// otherwise the function will panic.
+ ///
+ /// The returned `Punct` will have the default span of `Span::call_site()`
+ /// which can be further configured with the `set_span` method below.
+ pub fn new(ch: char, spacing: Spacing) -> Punct {
+ Punct(bridge::client::Punct::new(ch, spacing))
+ }
+
+ /// Returns the value of this punctuation character as `char`.
+ pub fn as_char(&self) -> char {
+ self.0.as_char()
+ }
+
+ /// Returns the spacing of this punctuation character, indicating whether it's immediately
+ /// followed by another `Punct` in the token stream, so they can potentially be combined into
+ /// a multi-character operator (`Joint`), or it's followed by some other token or whitespace
+ /// (`Alone`) so the operator has certainly ended.
+ pub fn spacing(&self) -> Spacing {
+ self.0.spacing()
+ }
+
+ /// Returns the span for this punctuation character.
+ pub fn span(&self) -> Span {
+ Span(self.0.span())
+ }
+
+ /// Configure the span for this punctuation character.
+ pub fn set_span(&mut self, span: Span) {
+ self.0 = self.0.with_span(span.0);
+ }
+}
+
+/// Prints the punctuation character as a string that should be losslessly convertible
+/// back into the same character.
+impl fmt::Display for Punct {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Punct {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Punct")
+ .field("ch", &self.as_char())
+ .field("spacing", &self.spacing())
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+impl PartialEq<char> for Punct {
+ fn eq(&self, rhs: &char) -> bool {
+ self.as_char() == *rhs
+ }
+}
+
+impl PartialEq<Punct> for char {
+ fn eq(&self, rhs: &Punct) -> bool {
+ *self == rhs.as_char()
+ }
+}
+
+/// An identifier (`ident`).
+#[derive(Clone)]
+pub struct Ident(bridge::client::Ident);
+
+impl Ident {
+ /// Creates a new `Ident` with the given `string` as well as the specified
+ /// `span`.
+ /// The `string` argument must be a valid identifier permitted by the
+ /// language (including keywords, e.g. `self` or `fn`). Otherwise, the function will panic.
+ ///
+ /// Note that `span`, currently in rustc, configures the hygiene information
+ /// for this identifier.
+ ///
+ /// As of this time `Span::call_site()` explicitly opts-in to "call-site" hygiene
+ /// meaning that identifiers created with this span will be resolved as if they were written
+ /// directly at the location of the macro call, and other code at the macro call site will be
+ /// able to refer to them as well.
+ ///
+ /// Later spans like `Span::def_site()` will allow to opt-in to "definition-site" hygiene
+ /// meaning that identifiers created with this span will be resolved at the location of the
+ /// macro definition and other code at the macro call site will not be able to refer to them.
+ ///
+ /// Due to the current importance of hygiene this constructor, unlike other
+ /// tokens, requires a `Span` to be specified at construction.
+ pub fn new(string: &str, span: Span) -> Ident {
+ Ident(bridge::client::Ident::new(string, span.0, false))
+ }
+
+ /// Same as `Ident::new`, but creates a raw identifier (`r#ident`).
+ /// The `string` argument be a valid identifier permitted by the language
+ /// (including keywords, e.g. `fn`). Keywords which are usable in path segments
+ /// (e.g. `self`, `super`) are not supported, and will cause a panic.
+ pub fn new_raw(string: &str, span: Span) -> Ident {
+ Ident(bridge::client::Ident::new(string, span.0, true))
+ }
+
+ /// Returns the span of this `Ident`, encompassing the entire string returned
+ /// by [`to_string`](Self::to_string).
+ pub fn span(&self) -> Span {
+ Span(self.0.span())
+ }
+
+ /// Configures the span of this `Ident`, possibly changing its hygiene context.
+ pub fn set_span(&mut self, span: Span) {
+ self.0 = self.0.with_span(span.0);
+ }
+}
+
+/// Prints the identifier as a string that should be losslessly convertible
+/// back into the same identifier.
+impl fmt::Display for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Ident")
+ .field("ident", &self.to_string())
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+/// A literal string (`"hello"`), byte string (`b"hello"`),
+/// character (`'a'`), byte character (`b'a'`), an integer or floating point number
+/// with or without a suffix (`1`, `1u8`, `2.3`, `2.3f32`).
+/// Boolean literals like `true` and `false` do not belong here, they are `Ident`s.
+#[derive(Clone)]
+pub struct Literal(bridge::client::Literal);
+
+macro_rules! suffixed_int_literals {
+ ($($name:ident => $kind:ident,)*) => ($(
+ /// Creates a new suffixed integer literal with the specified value.
+ ///
+ /// This function will create an integer like `1u32` where the integer
+ /// value specified is the first part of the token and the integral is
+ /// also suffixed at the end.
+ /// Literals created from negative numbers might not survive round-trips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// Literals created through this method have the `Span::call_site()`
+ /// span by default, which can be configured with the `set_span` method
+ /// below.
+ pub fn $name(n: $kind) -> Literal {
+ Literal(bridge::client::Literal::typed_integer(&n.to_string(), stringify!($kind)))
+ }
+ )*)
+}
+
+macro_rules! unsuffixed_int_literals {
+ ($($name:ident => $kind:ident,)*) => ($(
+ /// Creates a new unsuffixed integer literal with the specified value.
+ ///
+ /// This function will create an integer like `1` where the integer
+ /// value specified is the first part of the token. No suffix is
+ /// specified on this token, meaning that invocations like
+ /// `Literal::i8_unsuffixed(1)` are equivalent to
+ /// `Literal::u32_unsuffixed(1)`.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// Literals created through this method have the `Span::call_site()`
+ /// span by default, which can be configured with the `set_span` method
+ /// below.
+ pub fn $name(n: $kind) -> Literal {
+ Literal(bridge::client::Literal::integer(&n.to_string()))
+ }
+ )*)
+}
+
+impl Literal {
+ suffixed_int_literals! {
+ u8_suffixed => u8,
+ u16_suffixed => u16,
+ u32_suffixed => u32,
+ u64_suffixed => u64,
+ u128_suffixed => u128,
+ usize_suffixed => usize,
+ i8_suffixed => i8,
+ i16_suffixed => i16,
+ i32_suffixed => i32,
+ i64_suffixed => i64,
+ i128_suffixed => i128,
+ isize_suffixed => isize,
+ }
+
+ unsuffixed_int_literals! {
+ u8_unsuffixed => u8,
+ u16_unsuffixed => u16,
+ u32_unsuffixed => u32,
+ u64_unsuffixed => u64,
+ u128_unsuffixed => u128,
+ usize_unsuffixed => usize,
+ i8_unsuffixed => i8,
+ i16_unsuffixed => i16,
+ i32_unsuffixed => i32,
+ i64_unsuffixed => i64,
+ i128_unsuffixed => i128,
+ isize_unsuffixed => isize,
+ }
+
+ /// Creates a new unsuffixed floating-point literal.
+ ///
+ /// This constructor is similar to those like `Literal::i8_unsuffixed` where
+ /// the float's value is emitted directly into the token but no suffix is
+ /// used, so it may be inferred to be a `f64` later in the compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f32_unsuffixed(n: f32) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {n}");
+ }
+ let mut repr = n.to_string();
+ if !repr.contains('.') {
+ repr.push_str(".0");
+ }
+ Literal(bridge::client::Literal::float(&repr))
+ }
+
+ /// Creates a new suffixed floating-point literal.
+ ///
+ /// This constructor will create a literal like `1.0f32` where the value
+ /// specified is the preceding part of the token and `f32` is the suffix of
+ /// the token. This token will always be inferred to be an `f32` in the
+ /// compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f32_suffixed(n: f32) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {n}");
+ }
+ Literal(bridge::client::Literal::f32(&n.to_string()))
+ }
+
+ /// Creates a new unsuffixed floating-point literal.
+ ///
+ /// This constructor is similar to those like `Literal::i8_unsuffixed` where
+ /// the float's value is emitted directly into the token but no suffix is
+ /// used, so it may be inferred to be a `f64` later in the compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f64_unsuffixed(n: f64) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {n}");
+ }
+ let mut repr = n.to_string();
+ if !repr.contains('.') {
+ repr.push_str(".0");
+ }
+ Literal(bridge::client::Literal::float(&repr))
+ }
+
+ /// Creates a new suffixed floating-point literal.
+ ///
+ /// This constructor will create a literal like `1.0f64` where the value
+ /// specified is the preceding part of the token and `f64` is the suffix of
+ /// the token. This token will always be inferred to be an `f64` in the
+ /// compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f64_suffixed(n: f64) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {n}");
+ }
+ Literal(bridge::client::Literal::f64(&n.to_string()))
+ }
+
+ /// String literal.
+ pub fn string(string: &str) -> Literal {
+ Literal(bridge::client::Literal::string(string))
+ }
+
+ /// Character literal.
+ pub fn character(ch: char) -> Literal {
+ Literal(bridge::client::Literal::character(ch))
+ }
+
+ /// Byte string literal.
+ pub fn byte_string(bytes: &[u8]) -> Literal {
+ Literal(bridge::client::Literal::byte_string(bytes))
+ }
+
+ /// Returns the span encompassing this literal.
+ pub fn span(&self) -> Span {
+ Span(self.0.span())
+ }
+
+ /// Configures the span associated for this literal.
+ pub fn set_span(&mut self, span: Span) {
+ self.0.set_span(span.0);
+ }
+
+ /// Returns a `Span` that is a subset of `self.span()` containing only the
+ /// source bytes in range `range`. Returns `None` if the would-be trimmed
+ /// span is outside the bounds of `self`.
+ // FIXME(SergioBenitez): check that the byte range starts and ends at a
+ // UTF-8 boundary of the source. otherwise, it's likely that a panic will
+ // occur elsewhere when the source text is printed.
+ // FIXME(SergioBenitez): there is no way for the user to know what
+ // `self.span()` actually maps to, so this method can currently only be
+ // called blindly. For example, `to_string()` for the character 'c' returns
+ // "'\u{63}'"; there is no way for the user to know whether the source text
+ // was 'c' or whether it was '\u{63}'.
+ pub fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> {
+ self.0.subspan(range.start_bound().cloned(), range.end_bound().cloned()).map(Span)
+ }
+}
+
+/// Parse a single literal from its stringified representation.
+///
+/// In order to parse successfully, the input string must not contain anything
+/// but the literal token. Specifically, it must not contain whitespace or
+/// comments in addition to the literal.
+///
+/// The resulting literal token will have a `Span::call_site()` span.
+///
+/// NOTE: some errors may cause panics instead of returning `LexError`. We
+/// reserve the right to change these errors into `LexError`s later.
+impl FromStr for Literal {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<Self, LexError> {
+ match bridge::client::Literal::from_str(src) {
+ Ok(literal) => Ok(Literal(literal)),
+ Err(()) => Err(LexError),
+ }
+ }
+}
+
+/// Prints the literal as a string that should be losslessly convertible
+/// back into the same literal (except for possible rounding for floating point literals).
+impl fmt::Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+/// Tracked access to environment variables.
+pub mod tracked_env {
+ use std::env::{self, VarError};
+ use std::ffi::OsStr;
+
+ /// Retrieve an environment variable and add it to build dependency info.
+ /// Build system executing the compiler will know that the variable was accessed during
+ /// compilation, and will be able to rerun the build when the value of that variable changes.
+ /// Besides the dependency tracking this function should be equivalent to `env::var` from the
+ /// standard library, except that the argument must be UTF-8.
+ pub fn var<K: AsRef<OsStr> + AsRef<str>>(key: K) -> Result<String, VarError> {
+ let key: &str = key.as_ref();
+ let value = env::var(key);
+ super::bridge::client::FreeFunctions::track_env_var(key, value.as_deref().ok());
+ value
+ }
+}
+
+/// Tracked access to additional files.
+pub mod tracked_path {
+
+ /// Track a file explicitly.
+ ///
+ /// Commonly used for tracking asset preprocessing.
+ pub fn path<P: AsRef<str>>(path: P) {
+ let path: &str = path.as_ref();
+ super::bridge::client::FreeFunctions::track_path(path);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/quote.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/quote.rs
new file mode 100644
index 000000000..39309faa4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/quote.rs
@@ -0,0 +1,139 @@
+//! # Quasiquoter
+//! This file contains the implementation internals of the quasiquoter provided by `quote!`.
+
+//! This quasiquoter uses macros 2.0 hygiene to reliably access
+//! items from `proc_macro`, to build a `proc_macro::TokenStream`.
+
+use super::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
+
+macro_rules! quote_tt {
+ (($($t:tt)*)) => { Group::new(Delimiter::Parenthesis, quote!($($t)*)) };
+ ([$($t:tt)*]) => { Group::new(Delimiter::Bracket, quote!($($t)*)) };
+ ({$($t:tt)*}) => { Group::new(Delimiter::Brace, quote!($($t)*)) };
+ (,) => { Punct::new(',', Spacing::Alone) };
+ (.) => { Punct::new('.', Spacing::Alone) };
+ (;) => { Punct::new(';', Spacing::Alone) };
+ (!) => { Punct::new('!', Spacing::Alone) };
+ (<) => { Punct::new('<', Spacing::Alone) };
+ (>) => { Punct::new('>', Spacing::Alone) };
+ (&) => { Punct::new('&', Spacing::Alone) };
+ (=) => { Punct::new('=', Spacing::Alone) };
+ ($i:ident) => { Ident::new(stringify!($i), Span::def_site()) };
+}
+
+macro_rules! quote_ts {
+ ((@ $($t:tt)*)) => { $($t)* };
+ (::) => {
+ [
+ TokenTree::from(Punct::new(':', Spacing::Joint)),
+ TokenTree::from(Punct::new(':', Spacing::Alone)),
+ ].iter()
+ .cloned()
+ .map(|mut x| {
+ x.set_span(Span::def_site());
+ x
+ })
+ .collect::<TokenStream>()
+ };
+ ($t:tt) => { TokenTree::from(quote_tt!($t)) };
+}
+
+/// Simpler version of the real `quote!` macro, implemented solely
+/// through `macro_rules`, for bootstrapping the real implementation
+/// (see the `quote` function), which does not have access to the
+/// real `quote!` macro due to the `proc_macro` crate not being
+/// able to depend on itself.
+///
+/// Note: supported tokens are a subset of the real `quote!`, but
+/// unquoting is different: instead of `$x`, this uses `(@ expr)`.
+macro_rules! quote {
+ () => { TokenStream::new() };
+ ($($t:tt)*) => {
+ [
+ $(TokenStream::from(quote_ts!($t)),)*
+ ].iter().cloned().collect::<TokenStream>()
+ };
+}
+
+/// Quote a `TokenStream` into a `TokenStream`.
+/// This is the actual implementation of the `quote!()` proc macro.
+///
+/// It is loaded by the compiler in `register_builtin_macros`.
+pub fn quote(stream: TokenStream) -> TokenStream {
+ if stream.is_empty() {
+ return quote!(super::TokenStream::new());
+ }
+ let proc_macro_crate = quote!(crate);
+ let mut after_dollar = false;
+ let tokens = stream
+ .into_iter()
+ .filter_map(|tree| {
+ if after_dollar {
+ after_dollar = false;
+ match tree {
+ TokenTree::Ident(_) => {
+ return Some(quote!(Into::<super::TokenStream>::into(
+ Clone::clone(&(@ tree))),));
+ }
+ TokenTree::Punct(ref tt) if tt.as_char() == '$' => {}
+ _ => panic!("`$` must be followed by an ident or `$` in `quote!`"),
+ }
+ } else if let TokenTree::Punct(ref tt) = tree {
+ if tt.as_char() == '$' {
+ after_dollar = true;
+ return None;
+ }
+ }
+
+ Some(quote!(super::TokenStream::from((@ match tree {
+ TokenTree::Punct(tt) => quote!(super::TokenTree::Punct(super::Punct::new(
+ (@ TokenTree::from(Literal::character(tt.as_char()))),
+ (@ match tt.spacing() {
+ Spacing::Alone => quote!(super::Spacing::Alone),
+ Spacing::Joint => quote!(super::Spacing::Joint),
+ }),
+ ))),
+ TokenTree::Group(tt) => quote!(super::TokenTree::Group(super::Group::new(
+ (@ match tt.delimiter() {
+ Delimiter::Parenthesis => quote!(super::Delimiter::Parenthesis),
+ Delimiter::Brace => quote!(super::Delimiter::Brace),
+ Delimiter::Bracket => quote!(super::Delimiter::Bracket),
+ Delimiter::None => quote!(super::Delimiter::None),
+ }),
+ (@ quote(tt.stream())),
+ ))),
+ TokenTree::Ident(tt) => quote!(super::TokenTree::Ident(super::Ident::new(
+ (@ TokenTree::from(Literal::string(&tt.to_string()))),
+ (@ quote_span(proc_macro_crate.clone(), tt.span())),
+ ))),
+ TokenTree::Literal(tt) => quote!(super::TokenTree::Literal({
+ let mut iter = (@ TokenTree::from(Literal::string(&tt.to_string())))
+ .parse::<super::TokenStream>()
+ .unwrap()
+ .into_iter();
+ if let (Some(super::TokenTree::Literal(mut lit)), None) =
+ (iter.next(), iter.next())
+ {
+ lit.set_span((@ quote_span(proc_macro_crate.clone(), tt.span())));
+ lit
+ } else {
+ unreachable!()
+ }
+ }))
+ })),))
+ })
+ .collect::<TokenStream>();
+
+ if after_dollar {
+ panic!("unexpected trailing `$` in `quote!`");
+ }
+
+ quote!([(@ tokens)].iter().cloned().collect::<super::TokenStream>())
+}
+
+/// Quote a `Span` into a `TokenStream`.
+/// This is needed to implement a custom quoter.
+pub fn quote_span(proc_macro_crate: TokenStream, span: Span) -> TokenStream {
+ let id = span.save_span();
+ quote!((@ proc_macro_crate ) ::Span::recover_proc_macro_span((@ TokenTree::from(Literal::usize_unsuffixed(id)))))
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs
new file mode 100644
index 000000000..05a565fbf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs
@@ -0,0 +1,834 @@
+//! Rustc proc-macro server implementation with tt
+//!
+//! Based on idea from <https://github.com/fedochet/rust-proc-macro-expander>
+//! The lib-proc-macro server backend is `TokenStream`-agnostic, such that
+//! we could provide any TokenStream implementation.
+//! The original idea from fedochet is using proc-macro2 as backend,
+//! we use tt instead for better integration with RA.
+//!
+//! FIXME: No span and source file information is implemented yet
+
+use super::proc_macro::bridge::{self, server};
+
+use std::collections::HashMap;
+use std::hash::Hash;
+use std::iter::FromIterator;
+use std::ops::Bound;
+use std::{ascii, vec::IntoIter};
+
+type Group = tt::Subtree;
+type TokenTree = tt::TokenTree;
+type Punct = tt::Punct;
+type Spacing = tt::Spacing;
+type Literal = tt::Literal;
+type Span = tt::TokenId;
+
+#[derive(Debug, Default, Clone)]
+pub struct TokenStream {
+ pub token_trees: Vec<TokenTree>,
+}
+
+impl TokenStream {
+ pub fn new() -> Self {
+ TokenStream::default()
+ }
+
+ pub fn with_subtree(subtree: tt::Subtree) -> Self {
+ if subtree.delimiter.is_some() {
+ TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] }
+ } else {
+ TokenStream { token_trees: subtree.token_trees }
+ }
+ }
+
+ pub fn into_subtree(self) -> tt::Subtree {
+ tt::Subtree { delimiter: None, token_trees: self.token_trees }
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.token_trees.is_empty()
+ }
+}
+
+/// Creates a token stream containing a single token tree.
+impl From<TokenTree> for TokenStream {
+ fn from(tree: TokenTree) -> TokenStream {
+ TokenStream { token_trees: vec![tree] }
+ }
+}
+
+/// Collects a number of token trees into a single stream.
+impl FromIterator<TokenTree> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+ trees.into_iter().map(TokenStream::from).collect()
+ }
+}
+
+/// A "flattening" operation on token streams, collects token trees
+/// from multiple token streams into a single stream.
+impl FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let mut builder = TokenStreamBuilder::new();
+ streams.into_iter().for_each(|stream| builder.push(stream));
+ builder.build()
+ }
+}
+
+impl Extend<TokenTree> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
+ self.extend(trees.into_iter().map(TokenStream::from));
+ }
+}
+
+impl Extend<TokenStream> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+ for item in streams {
+ for tkn in item {
+ match tkn {
+ tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => {
+ self.token_trees.extend(subtree.token_trees);
+ }
+ _ => {
+ self.token_trees.push(tkn);
+ }
+ }
+ }
+ }
+ }
+}
+
+#[derive(Clone)]
+pub struct SourceFile {
+ // FIXME stub
+}
+
+type Level = super::proc_macro::Level;
+type LineColumn = super::proc_macro::LineColumn;
+
+/// A structure representing a diagnostic message and associated children
+/// messages.
+#[derive(Clone, Debug)]
+pub struct Diagnostic {
+ level: Level,
+ message: String,
+ spans: Vec<Span>,
+ children: Vec<Diagnostic>,
+}
+
+impl Diagnostic {
+ /// Creates a new diagnostic with the given `level` and `message`.
+ pub fn new<T: Into<String>>(level: Level, message: T) -> Diagnostic {
+ Diagnostic { level, message: message.into(), spans: vec![], children: vec![] }
+ }
+}
+
+// Rustc Server Ident has to be `Copyable`
+// We use a stub here for bypassing
+#[derive(Hash, Eq, PartialEq, Copy, Clone)]
+pub struct IdentId(u32);
+
+#[derive(Clone, Hash, Eq, PartialEq)]
+struct IdentData(tt::Ident);
+
+#[derive(Default)]
+struct IdentInterner {
+ idents: HashMap<IdentData, u32>,
+ ident_data: Vec<IdentData>,
+}
+
+impl IdentInterner {
+ fn intern(&mut self, data: &IdentData) -> u32 {
+ if let Some(index) = self.idents.get(data) {
+ return *index;
+ }
+
+ let index = self.idents.len() as u32;
+ self.ident_data.push(data.clone());
+ self.idents.insert(data.clone(), index);
+ index
+ }
+
+ fn get(&self, index: u32) -> &IdentData {
+ &self.ident_data[index as usize]
+ }
+
+ #[allow(unused)]
+ fn get_mut(&mut self, index: u32) -> &mut IdentData {
+ self.ident_data.get_mut(index as usize).expect("Should be consistent")
+ }
+}
+
+pub struct TokenStreamBuilder {
+ acc: TokenStream,
+}
+
+/// Public implementation details for the `TokenStream` type, such as iterators.
+pub mod token_stream {
+ use std::str::FromStr;
+
+ use super::{TokenStream, TokenTree};
+
+ /// An iterator over `TokenStream`'s `TokenTree`s.
+ /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
+ /// and returns whole groups as token trees.
+ impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = super::IntoIter<TokenTree>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.token_trees.into_iter()
+ }
+ }
+
+ type LexError = String;
+
+ /// Attempts to break the string into tokens and parse those tokens into a token stream.
+ /// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
+ /// or characters not existing in the language.
+ /// All tokens in the parsed stream get `Span::call_site()` spans.
+ ///
+ /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
+ /// change these errors into `LexError`s later.
+ impl FromStr for TokenStream {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+ let (subtree, _token_map) =
+ mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?;
+
+ let subtree = subtree_replace_token_ids_with_unspecified(subtree);
+ Ok(TokenStream::with_subtree(subtree))
+ }
+ }
+
+ impl ToString for TokenStream {
+ fn to_string(&self) -> String {
+ tt::pretty(&self.token_trees)
+ }
+ }
+
+ fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree {
+ tt::Subtree {
+ delimiter: subtree
+ .delimiter
+ .map(|d| tt::Delimiter { id: tt::TokenId::unspecified(), ..d }),
+ token_trees: subtree
+ .token_trees
+ .into_iter()
+ .map(token_tree_replace_token_ids_with_unspecified)
+ .collect(),
+ }
+ }
+
+ fn token_tree_replace_token_ids_with_unspecified(tt: tt::TokenTree) -> tt::TokenTree {
+ match tt {
+ tt::TokenTree::Leaf(leaf) => {
+ tt::TokenTree::Leaf(leaf_replace_token_ids_with_unspecified(leaf))
+ }
+ tt::TokenTree::Subtree(subtree) => {
+ tt::TokenTree::Subtree(subtree_replace_token_ids_with_unspecified(subtree))
+ }
+ }
+ }
+
+ fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf {
+ match leaf {
+ tt::Leaf::Literal(lit) => {
+ tt::Leaf::Literal(tt::Literal { id: tt::TokenId::unspecified(), ..lit })
+ }
+ tt::Leaf::Punct(punct) => {
+ tt::Leaf::Punct(tt::Punct { id: tt::TokenId::unspecified(), ..punct })
+ }
+ tt::Leaf::Ident(ident) => {
+ tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), ..ident })
+ }
+ }
+ }
+}
+
+impl TokenStreamBuilder {
+ fn new() -> TokenStreamBuilder {
+ TokenStreamBuilder { acc: TokenStream::new() }
+ }
+
+ fn push(&mut self, stream: TokenStream) {
+ self.acc.extend(stream.into_iter())
+ }
+
+ fn build(self) -> TokenStream {
+ self.acc
+ }
+}
+
+pub struct FreeFunctions;
+
+#[derive(Clone)]
+pub struct TokenStreamIter {
+ trees: IntoIter<TokenTree>,
+}
+
+#[derive(Default)]
+pub struct RustAnalyzer {
+ ident_interner: IdentInterner,
+ // FIXME: store span information here.
+}
+
+impl server::Types for RustAnalyzer {
+ type FreeFunctions = FreeFunctions;
+ type TokenStream = TokenStream;
+ type Group = Group;
+ type Punct = Punct;
+ type Ident = IdentId;
+ type Literal = Literal;
+ type SourceFile = SourceFile;
+ type Diagnostic = Diagnostic;
+ type Span = Span;
+ type MultiSpan = Vec<Span>;
+}
+
+impl server::FreeFunctions for RustAnalyzer {
+ fn track_env_var(&mut self, _var: &str, _value: Option<&str>) {
+ // FIXME: track env var accesses
+ // https://github.com/rust-lang/rust/pull/71858
+ }
+ fn track_path(&mut self, _path: &str) {}
+}
+
+impl server::TokenStream for RustAnalyzer {
+ fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
+ stream.is_empty()
+ }
+ fn from_str(&mut self, src: &str) -> Self::TokenStream {
+ use std::str::FromStr;
+
+ Self::TokenStream::from_str(src).expect("cannot parse string")
+ }
+ fn to_string(&mut self, stream: &Self::TokenStream) -> String {
+ stream.to_string()
+ }
+ fn from_token_tree(
+ &mut self,
+ tree: bridge::TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>,
+ ) -> Self::TokenStream {
+ match tree {
+ bridge::TokenTree::Group(group) => {
+ let tree = TokenTree::from(group);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Ident(IdentId(index)) => {
+ let IdentData(ident) = self.ident_interner.get(index).clone();
+ let ident: tt::Ident = ident;
+ let leaf = tt::Leaf::from(ident);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Literal(literal) => {
+ let leaf = tt::Leaf::from(literal);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Punct(p) => {
+ let leaf = tt::Leaf::from(p);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+ }
+ }
+
+ fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
+ Ok(self_.clone())
+ }
+
+ fn concat_trees(
+ &mut self,
+ base: Option<Self::TokenStream>,
+ trees: Vec<bridge::TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>>,
+ ) -> Self::TokenStream {
+ let mut builder = TokenStreamBuilder::new();
+ if let Some(base) = base {
+ builder.push(base);
+ }
+ for tree in trees {
+ builder.push(self.from_token_tree(tree));
+ }
+ builder.build()
+ }
+
+ fn concat_streams(
+ &mut self,
+ base: Option<Self::TokenStream>,
+ streams: Vec<Self::TokenStream>,
+ ) -> Self::TokenStream {
+ let mut builder = TokenStreamBuilder::new();
+ if let Some(base) = base {
+ builder.push(base);
+ }
+ for stream in streams {
+ builder.push(stream);
+ }
+ builder.build()
+ }
+
+ fn into_trees(
+ &mut self,
+ stream: Self::TokenStream,
+ ) -> Vec<bridge::TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>> {
+ stream
+ .into_iter()
+ .map(|tree| match tree {
+ tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
+ bridge::TokenTree::Ident(IdentId(self.ident_interner.intern(&IdentData(ident))))
+ }
+ tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => bridge::TokenTree::Literal(lit),
+ tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => bridge::TokenTree::Punct(punct),
+ tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(subtree),
+ })
+ .collect()
+ }
+}
+
+fn delim_to_internal(d: bridge::Delimiter) -> Option<tt::Delimiter> {
+ let kind = match d {
+ bridge::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,
+ bridge::Delimiter::Brace => tt::DelimiterKind::Brace,
+ bridge::Delimiter::Bracket => tt::DelimiterKind::Bracket,
+ bridge::Delimiter::None => return None,
+ };
+ Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind })
+}
+
+fn delim_to_external(d: Option<tt::Delimiter>) -> bridge::Delimiter {
+ match d.map(|it| it.kind) {
+ Some(tt::DelimiterKind::Parenthesis) => bridge::Delimiter::Parenthesis,
+ Some(tt::DelimiterKind::Brace) => bridge::Delimiter::Brace,
+ Some(tt::DelimiterKind::Bracket) => bridge::Delimiter::Bracket,
+ None => bridge::Delimiter::None,
+ }
+}
+
+fn spacing_to_internal(spacing: bridge::Spacing) -> Spacing {
+ match spacing {
+ bridge::Spacing::Alone => Spacing::Alone,
+ bridge::Spacing::Joint => Spacing::Joint,
+ }
+}
+
+fn spacing_to_external(spacing: Spacing) -> bridge::Spacing {
+ match spacing {
+ Spacing::Alone => bridge::Spacing::Alone,
+ Spacing::Joint => bridge::Spacing::Joint,
+ }
+}
+
+impl server::Group for RustAnalyzer {
+ fn new(
+ &mut self,
+ delimiter: bridge::Delimiter,
+ stream: Option<Self::TokenStream>,
+ ) -> Self::Group {
+ Self::Group {
+ delimiter: delim_to_internal(delimiter),
+ token_trees: stream.unwrap_or_default().token_trees,
+ }
+ }
+ fn delimiter(&mut self, group: &Self::Group) -> bridge::Delimiter {
+ delim_to_external(group.delimiter)
+ }
+
+ // NOTE: Return value of do not include delimiter
+ fn stream(&mut self, group: &Self::Group) -> Self::TokenStream {
+ TokenStream { token_trees: group.token_trees.clone() }
+ }
+
+ fn span(&mut self, group: &Self::Group) -> Self::Span {
+ group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
+ }
+
+ fn set_span(&mut self, group: &mut Self::Group, span: Self::Span) {
+ if let Some(delim) = &mut group.delimiter {
+ delim.id = span;
+ }
+ }
+
+ fn span_open(&mut self, group: &Self::Group) -> Self::Span {
+ // FIXME we only store one `TokenId` for the delimiters
+ group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
+ }
+
+ fn span_close(&mut self, group: &Self::Group) -> Self::Span {
+ // FIXME we only store one `TokenId` for the delimiters
+ group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
+ }
+}
+
+impl server::Punct for RustAnalyzer {
+ fn new(&mut self, ch: char, spacing: bridge::Spacing) -> Self::Punct {
+ tt::Punct {
+ char: ch,
+ spacing: spacing_to_internal(spacing),
+ id: tt::TokenId::unspecified(),
+ }
+ }
+ fn as_char(&mut self, punct: Self::Punct) -> char {
+ punct.char
+ }
+ fn spacing(&mut self, punct: Self::Punct) -> bridge::Spacing {
+ spacing_to_external(punct.spacing)
+ }
+ fn span(&mut self, punct: Self::Punct) -> Self::Span {
+ punct.id
+ }
+ fn with_span(&mut self, punct: Self::Punct, span: Self::Span) -> Self::Punct {
+ tt::Punct { id: span, ..punct }
+ }
+}
+
+impl server::Ident for RustAnalyzer {
+ fn new(&mut self, string: &str, span: Self::Span, _is_raw: bool) -> Self::Ident {
+ IdentId(self.ident_interner.intern(&IdentData(tt::Ident { text: string.into(), id: span })))
+ }
+
+ fn span(&mut self, ident: Self::Ident) -> Self::Span {
+ self.ident_interner.get(ident.0).0.id
+ }
+ fn with_span(&mut self, ident: Self::Ident, span: Self::Span) -> Self::Ident {
+ let data = self.ident_interner.get(ident.0);
+ let new = IdentData(tt::Ident { id: span, ..data.0.clone() });
+ IdentId(self.ident_interner.intern(&new))
+ }
+}
+
+impl server::Literal for RustAnalyzer {
+ fn debug_kind(&mut self, _literal: &Self::Literal) -> String {
+ // r-a: debug_kind and suffix are unsupported; corresponding client code has been changed to not call these.
+ // They must still be present to be ABI-compatible and work with upstream proc_macro.
+ "".to_owned()
+ }
+ fn from_str(&mut self, s: &str) -> Result<Self::Literal, ()> {
+ Ok(Literal { text: s.into(), id: tt::TokenId::unspecified() })
+ }
+ fn symbol(&mut self, literal: &Self::Literal) -> String {
+ literal.text.to_string()
+ }
+ fn suffix(&mut self, _literal: &Self::Literal) -> Option<String> {
+ None
+ }
+
+ fn to_string(&mut self, literal: &Self::Literal) -> String {
+ literal.to_string()
+ }
+
+ fn integer(&mut self, n: &str) -> Self::Literal {
+ let n = match n.parse::<i128>() {
+ Ok(n) => n.to_string(),
+ Err(_) => n.parse::<u128>().unwrap().to_string(),
+ };
+ Literal { text: n.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal {
+ macro_rules! def_suffixed_integer {
+ ($kind:ident, $($ty:ty),*) => {
+ match $kind {
+ $(
+ stringify!($ty) => {
+ let n: $ty = n.parse().unwrap();
+ format!(concat!("{}", stringify!($ty)), n)
+ }
+ )*
+ _ => unimplemented!("unknown args for typed_integer: n {}, kind {}", n, $kind),
+ }
+ }
+ }
+
+ let text = def_suffixed_integer! {kind, u8, u16, u32, u64, u128, usize, i8, i16, i32, i64, i128, isize};
+
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn float(&mut self, n: &str) -> Self::Literal {
+ let n: f64 = n.parse().unwrap();
+ let mut text = f64::to_string(&n);
+ if !text.contains('.') {
+ text += ".0"
+ }
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn f32(&mut self, n: &str) -> Self::Literal {
+ let n: f32 = n.parse().unwrap();
+ let text = format!("{}f32", n);
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn f64(&mut self, n: &str) -> Self::Literal {
+ let n: f64 = n.parse().unwrap();
+ let text = format!("{}f64", n);
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn string(&mut self, string: &str) -> Self::Literal {
+ let mut escaped = String::new();
+ for ch in string.chars() {
+ escaped.extend(ch.escape_debug());
+ }
+ Literal { text: format!("\"{}\"", escaped).into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn character(&mut self, ch: char) -> Self::Literal {
+ Literal { text: format!("'{}'", ch).into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal {
+ let string = bytes
+ .iter()
+ .cloned()
+ .flat_map(ascii::escape_default)
+ .map(Into::<char>::into)
+ .collect::<String>();
+
+ Literal { text: format!("b\"{}\"", string).into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn span(&mut self, literal: &Self::Literal) -> Self::Span {
+ literal.id
+ }
+
+ fn set_span(&mut self, literal: &mut Self::Literal, span: Self::Span) {
+ literal.id = span;
+ }
+
+ fn subspan(
+ &mut self,
+ _literal: &Self::Literal,
+ _start: Bound<usize>,
+ _end: Bound<usize>,
+ ) -> Option<Self::Span> {
+ // FIXME handle span
+ None
+ }
+}
+
+impl server::SourceFile for RustAnalyzer {
+ // FIXME these are all stubs
+ fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool {
+ true
+ }
+ fn path(&mut self, _file: &Self::SourceFile) -> String {
+ String::new()
+ }
+ fn is_real(&mut self, _file: &Self::SourceFile) -> bool {
+ true
+ }
+}
+
+impl server::Diagnostic for RustAnalyzer {
+ fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic {
+ let mut diag = Diagnostic::new(level, msg);
+ diag.spans = spans;
+ diag
+ }
+
+ fn sub(
+ &mut self,
+ _diag: &mut Self::Diagnostic,
+ _level: Level,
+ _msg: &str,
+ _spans: Self::MultiSpan,
+ ) {
+ // FIXME handle diagnostic
+ //
+ }
+
+ fn emit(&mut self, _diag: Self::Diagnostic) {
+ // FIXME handle diagnostic
+ // diag.emit()
+ }
+}
+
+impl server::Span for RustAnalyzer {
+ fn debug(&mut self, span: Self::Span) -> String {
+ format!("{:?}", span.0)
+ }
+ fn def_site(&mut self) -> Self::Span {
+ // MySpan(self.span_interner.intern(&MySpanData(Span::def_site())))
+ // FIXME handle span
+ tt::TokenId::unspecified()
+ }
+ fn call_site(&mut self) -> Self::Span {
+ // MySpan(self.span_interner.intern(&MySpanData(Span::call_site())))
+ // FIXME handle span
+ tt::TokenId::unspecified()
+ }
+ fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile {
+ SourceFile {}
+ }
+ fn save_span(&mut self, _span: Self::Span) -> usize {
+ // FIXME stub
+ 0
+ }
+ fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span {
+ // FIXME stub
+ tt::TokenId::unspecified()
+ }
+ /// Recent feature, not yet in the proc_macro
+ ///
+ /// See PR:
+ /// https://github.com/rust-lang/rust/pull/55780
+ fn source_text(&mut self, _span: Self::Span) -> Option<String> {
+ None
+ }
+
+ fn parent(&mut self, _span: Self::Span) -> Option<Self::Span> {
+ // FIXME handle span
+ None
+ }
+ fn source(&mut self, span: Self::Span) -> Self::Span {
+ // FIXME handle span
+ span
+ }
+ fn start(&mut self, _span: Self::Span) -> LineColumn {
+ // FIXME handle span
+ LineColumn { line: 0, column: 0 }
+ }
+ fn end(&mut self, _span: Self::Span) -> LineColumn {
+ // FIXME handle span
+ LineColumn { line: 0, column: 0 }
+ }
+ fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option<Self::Span> {
+ // Just return the first span again, because some macros will unwrap the result.
+ Some(first)
+ }
+ fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span {
+ // FIXME handle span
+ tt::TokenId::unspecified()
+ }
+
+ fn mixed_site(&mut self) -> Self::Span {
+ // FIXME handle span
+ tt::TokenId::unspecified()
+ }
+
+ fn after(&mut self, _self_: Self::Span) -> Self::Span {
+ tt::TokenId::unspecified()
+ }
+
+ fn before(&mut self, _self_: Self::Span) -> Self::Span {
+ tt::TokenId::unspecified()
+ }
+}
+
+impl server::MultiSpan for RustAnalyzer {
+ fn new(&mut self) -> Self::MultiSpan {
+ // FIXME handle span
+ vec![]
+ }
+
+ fn push(&mut self, other: &mut Self::MultiSpan, span: Self::Span) {
+ //TODP
+ other.push(span)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::super::proc_macro::bridge::server::Literal;
+ use super::*;
+
+ #[test]
+ fn test_ra_server_literals() {
+ let mut srv = RustAnalyzer { ident_interner: IdentInterner::default() };
+ assert_eq!(srv.integer("1234").text, "1234");
+
+ assert_eq!(srv.typed_integer("12", "u8").text, "12u8");
+ assert_eq!(srv.typed_integer("255", "u16").text, "255u16");
+ assert_eq!(srv.typed_integer("1234", "u32").text, "1234u32");
+ assert_eq!(srv.typed_integer("15846685", "u64").text, "15846685u64");
+ assert_eq!(srv.typed_integer("15846685258", "u128").text, "15846685258u128");
+ assert_eq!(srv.typed_integer("156788984", "usize").text, "156788984usize");
+ assert_eq!(srv.typed_integer("127", "i8").text, "127i8");
+ assert_eq!(srv.typed_integer("255", "i16").text, "255i16");
+ assert_eq!(srv.typed_integer("1234", "i32").text, "1234i32");
+ assert_eq!(srv.typed_integer("15846685", "i64").text, "15846685i64");
+ assert_eq!(srv.typed_integer("15846685258", "i128").text, "15846685258i128");
+ assert_eq!(srv.float("0").text, "0.0");
+ assert_eq!(srv.float("15684.5867").text, "15684.5867");
+ assert_eq!(srv.f32("15684.58").text, "15684.58f32");
+ assert_eq!(srv.f64("15684.58").text, "15684.58f64");
+
+ assert_eq!(srv.string("hello_world").text, "\"hello_world\"");
+ assert_eq!(srv.character('c').text, "'c'");
+ assert_eq!(srv.byte_string(b"1234586\x88").text, "b\"1234586\\x88\"");
+
+ // u128::max
+ assert_eq!(
+ srv.integer("340282366920938463463374607431768211455").text,
+ "340282366920938463463374607431768211455"
+ );
+ // i128::min
+ assert_eq!(
+ srv.integer("-170141183460469231731687303715884105728").text,
+ "-170141183460469231731687303715884105728"
+ );
+ }
+
+ #[test]
+ fn test_ra_server_to_string() {
+ let s = TokenStream {
+ token_trees: vec![
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "struct".into(),
+ id: tt::TokenId::unspecified(),
+ })),
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "T".into(),
+ id: tt::TokenId::unspecified(),
+ })),
+ tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: Some(tt::Delimiter {
+ id: tt::TokenId::unspecified(),
+ kind: tt::DelimiterKind::Brace,
+ }),
+ token_trees: vec![],
+ }),
+ ],
+ };
+
+ assert_eq!(s.to_string(), "struct T {}");
+ }
+
+ #[test]
+ fn test_ra_server_from_str() {
+ use std::str::FromStr;
+ let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: Some(tt::Delimiter {
+ id: tt::TokenId::unspecified(),
+ kind: tt::DelimiterKind::Parenthesis,
+ }),
+ token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "a".into(),
+ id: tt::TokenId::unspecified(),
+ }))],
+ });
+
+ let t1 = TokenStream::from_str("(a)").unwrap();
+ assert_eq!(t1.token_trees.len(), 1);
+ assert_eq!(t1.token_trees[0], subtree_paren_a);
+
+ let t2 = TokenStream::from_str("(a);").unwrap();
+ assert_eq!(t2.token_trees.len(), 2);
+ assert_eq!(t2.token_trees[0], subtree_paren_a);
+
+ let underscore = TokenStream::from_str("_").unwrap();
+ assert_eq!(
+ underscore.token_trees[0],
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "_".into(),
+ id: tt::TokenId::unspecified(),
+ }))
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/mod.rs
new file mode 100644
index 000000000..9d56f0eaf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/mod.rs
@@ -0,0 +1,105 @@
+//! Proc macro ABI.
+
+#[allow(dead_code)]
+#[doc(hidden)]
+mod proc_macro;
+
+#[allow(dead_code)]
+#[doc(hidden)]
+mod ra_server;
+
+use libloading::Library;
+use proc_macro_api::ProcMacroKind;
+
+use super::PanicMessage;
+
+pub use ra_server::TokenStream;
+
+pub(crate) struct Abi {
+ exported_macros: Vec<proc_macro::bridge::client::ProcMacro>,
+}
+
+impl From<proc_macro::bridge::PanicMessage> for PanicMessage {
+ fn from(p: proc_macro::bridge::PanicMessage) -> Self {
+ Self { message: p.as_str().map(|s| s.to_string()) }
+ }
+}
+
+impl Abi {
+ pub unsafe fn from_lib(lib: &Library, symbol_name: String) -> Result<Abi, libloading::Error> {
+ let macros: libloading::Symbol<'_, &&[proc_macro::bridge::client::ProcMacro]> =
+ lib.get(symbol_name.as_bytes())?;
+ Ok(Self { exported_macros: macros.to_vec() })
+ }
+
+ pub fn expand(
+ &self,
+ macro_name: &str,
+ macro_body: &tt::Subtree,
+ attributes: Option<&tt::Subtree>,
+ ) -> Result<tt::Subtree, PanicMessage> {
+ let parsed_body = TokenStream::with_subtree(macro_body.clone());
+
+ let parsed_attributes =
+ attributes.map_or(TokenStream::new(), |attr| TokenStream::with_subtree(attr.clone()));
+
+ for proc_macro in &self.exported_macros {
+ match proc_macro {
+ proc_macro::bridge::client::ProcMacro::CustomDerive {
+ trait_name, client, ..
+ } if *trait_name == macro_name => {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ proc_macro::bridge::client::ProcMacro::Bang { name, client }
+ if *name == macro_name =>
+ {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ proc_macro::bridge::client::ProcMacro::Attr { name, client }
+ if *name == macro_name =>
+ {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_attributes,
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ _ => continue,
+ }
+ }
+
+ Err(proc_macro::bridge::PanicMessage::String("Nothing to expand".to_string()).into())
+ }
+
+ pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
+ self.exported_macros
+ .iter()
+ .map(|proc_macro| match proc_macro {
+ proc_macro::bridge::client::ProcMacro::CustomDerive { trait_name, .. } => {
+ (trait_name.to_string(), ProcMacroKind::CustomDerive)
+ }
+ proc_macro::bridge::client::ProcMacro::Bang { name, .. } => {
+ (name.to_string(), ProcMacroKind::FuncLike)
+ }
+ proc_macro::bridge::client::ProcMacro::Attr { name, .. } => {
+ (name.to_string(), ProcMacroKind::Attr)
+ }
+ })
+ .collect()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/buffer.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/buffer.rs
new file mode 100644
index 000000000..48030f8d8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/buffer.rs
@@ -0,0 +1,156 @@
+//! Buffer management for same-process client<->server communication.
+
+use std::io::{self, Write};
+use std::mem;
+use std::ops::{Deref, DerefMut};
+use std::slice;
+
+#[repr(C)]
+pub struct Buffer {
+ data: *mut u8,
+ len: usize,
+ capacity: usize,
+ reserve: extern "C" fn(Buffer, usize) -> Buffer,
+ drop: extern "C" fn(Buffer),
+}
+
+unsafe impl Sync for Buffer {}
+unsafe impl Send for Buffer {}
+
+impl Default for Buffer {
+ #[inline]
+ fn default() -> Self {
+ Self::from(vec![])
+ }
+}
+
+impl Deref for Buffer {
+ type Target = [u8];
+ #[inline]
+ fn deref(&self) -> &[u8] {
+ unsafe { slice::from_raw_parts(self.data as *const u8, self.len) }
+ }
+}
+
+impl DerefMut for Buffer {
+ #[inline]
+ fn deref_mut(&mut self) -> &mut [u8] {
+ unsafe { slice::from_raw_parts_mut(self.data, self.len) }
+ }
+}
+
+impl Buffer {
+ #[inline]
+ pub(super) fn new() -> Self {
+ Self::default()
+ }
+
+ #[inline]
+ pub(super) fn clear(&mut self) {
+ self.len = 0;
+ }
+
+ #[inline]
+ pub(super) fn take(&mut self) -> Self {
+ mem::take(self)
+ }
+
+ // We have the array method separate from extending from a slice. This is
+ // because in the case of small arrays, codegen can be more efficient
+ // (avoiding a memmove call). With extend_from_slice, LLVM at least
+ // currently is not able to make that optimization.
+ #[inline]
+ pub(super) fn extend_from_array<const N: usize>(&mut self, xs: &[u8; N]) {
+ if xs.len() > (self.capacity - self.len) {
+ let b = self.take();
+ *self = (b.reserve)(b, xs.len());
+ }
+ unsafe {
+ xs.as_ptr().copy_to_nonoverlapping(self.data.add(self.len), xs.len());
+ self.len += xs.len();
+ }
+ }
+
+ #[inline]
+ pub(super) fn extend_from_slice(&mut self, xs: &[u8]) {
+ if xs.len() > (self.capacity - self.len) {
+ let b = self.take();
+ *self = (b.reserve)(b, xs.len());
+ }
+ unsafe {
+ xs.as_ptr().copy_to_nonoverlapping(self.data.add(self.len), xs.len());
+ self.len += xs.len();
+ }
+ }
+
+ #[inline]
+ pub(super) fn push(&mut self, v: u8) {
+ // The code here is taken from Vec::push, and we know that reserve()
+ // will panic if we're exceeding isize::MAX bytes and so there's no need
+ // to check for overflow.
+ if self.len == self.capacity {
+ let b = self.take();
+ *self = (b.reserve)(b, 1);
+ }
+ unsafe {
+ *self.data.add(self.len) = v;
+ self.len += 1;
+ }
+ }
+}
+
+impl Write for Buffer {
+ #[inline]
+ fn write(&mut self, xs: &[u8]) -> io::Result<usize> {
+ self.extend_from_slice(xs);
+ Ok(xs.len())
+ }
+
+ #[inline]
+ fn write_all(&mut self, xs: &[u8]) -> io::Result<()> {
+ self.extend_from_slice(xs);
+ Ok(())
+ }
+
+ #[inline]
+ fn flush(&mut self) -> io::Result<()> {
+ Ok(())
+ }
+}
+
+impl Drop for Buffer {
+ #[inline]
+ fn drop(&mut self) {
+ let b = self.take();
+ (b.drop)(b);
+ }
+}
+
+impl From<Vec<u8>> for Buffer {
+ fn from(mut v: Vec<u8>) -> Self {
+ let (data, len, capacity) = (v.as_mut_ptr(), v.len(), v.capacity());
+ mem::forget(v);
+
+ // This utility function is nested in here because it can *only*
+ // be safely called on `Buffer`s created by *this* `proc_macro`.
+ fn to_vec(b: Buffer) -> Vec<u8> {
+ unsafe {
+ let Buffer { data, len, capacity, .. } = b;
+ mem::forget(b);
+ Vec::from_raw_parts(data, len, capacity)
+ }
+ }
+
+ extern "C" fn reserve(b: Buffer, additional: usize) -> Buffer {
+ let mut v = to_vec(b);
+ v.reserve(additional);
+ Buffer::from(v)
+ }
+
+ extern "C" fn drop(b: Buffer) {
+ mem::drop(to_vec(b));
+ }
+
+ Buffer { data, len, capacity, reserve, drop }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/client.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/client.rs
new file mode 100644
index 000000000..22bda8ba5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/client.rs
@@ -0,0 +1,529 @@
+//! Client-side types.
+
+use super::*;
+
+use std::marker::PhantomData;
+
+macro_rules! define_handles {
+ (
+ 'owned: $($oty:ident,)*
+ 'interned: $($ity:ident,)*
+ ) => {
+ #[repr(C)]
+ #[allow(non_snake_case)]
+ pub struct HandleCounters {
+ $($oty: AtomicUsize,)*
+ $($ity: AtomicUsize,)*
+ }
+
+ impl HandleCounters {
+ // FIXME(eddyb) use a reference to the `static COUNTERS`, instead of
+ // a wrapper `fn` pointer, once `const fn` can reference `static`s.
+ extern "C" fn get() -> &'static Self {
+ static COUNTERS: HandleCounters = HandleCounters {
+ $($oty: AtomicUsize::new(1),)*
+ $($ity: AtomicUsize::new(1),)*
+ };
+ &COUNTERS
+ }
+ }
+
+ // FIXME(eddyb) generate the definition of `HandleStore` in `server.rs`.
+ #[repr(C)]
+ #[allow(non_snake_case)]
+ pub(super) struct HandleStore<S: server::Types> {
+ $($oty: handle::OwnedStore<S::$oty>,)*
+ $($ity: handle::InternedStore<S::$ity>,)*
+ }
+
+ impl<S: server::Types> HandleStore<S> {
+ pub(super) fn new(handle_counters: &'static HandleCounters) -> Self {
+ HandleStore {
+ $($oty: handle::OwnedStore::new(&handle_counters.$oty),)*
+ $($ity: handle::InternedStore::new(&handle_counters.$ity),)*
+ }
+ }
+ }
+
+ $(
+ #[repr(C)]
+ pub(crate) struct $oty {
+ handle: handle::Handle,
+ // Prevent Send and Sync impls. `!Send`/`!Sync` is the usual
+ // way of doing this, but that requires unstable features.
+ // rust-analyzer uses this code and avoids unstable features.
+ _marker: PhantomData<*mut ()>,
+ }
+
+ // Forward `Drop::drop` to the inherent `drop` method.
+ impl Drop for $oty {
+ fn drop(&mut self) {
+ $oty {
+ handle: self.handle,
+ _marker: PhantomData,
+ }.drop();
+ }
+ }
+
+ impl<S> Encode<S> for $oty {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ let handle = self.handle;
+ mem::forget(self);
+ handle.encode(w, s);
+ }
+ }
+
+ impl<S: server::Types> DecodeMut<'_, '_, HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$oty, $oty>
+ {
+ fn decode(r: &mut Reader<'_>, s: &mut HandleStore<server::MarkedTypes<S>>) -> Self {
+ s.$oty.take(handle::Handle::decode(r, &mut ()))
+ }
+ }
+
+ impl<S> Encode<S> for &$oty {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.handle.encode(w, s);
+ }
+ }
+
+ impl<'s, S: server::Types> Decode<'_, 's, HandleStore<server::MarkedTypes<S>>>
+ for &'s Marked<S::$oty, $oty>
+ {
+ fn decode(r: &mut Reader<'_>, s: &'s HandleStore<server::MarkedTypes<S>>) -> Self {
+ &s.$oty[handle::Handle::decode(r, &mut ())]
+ }
+ }
+
+ impl<S> Encode<S> for &mut $oty {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.handle.encode(w, s);
+ }
+ }
+
+ impl<'s, S: server::Types> DecodeMut<'_, 's, HandleStore<server::MarkedTypes<S>>>
+ for &'s mut Marked<S::$oty, $oty>
+ {
+ fn decode(
+ r: &mut Reader<'_>,
+ s: &'s mut HandleStore<server::MarkedTypes<S>>
+ ) -> Self {
+ &mut s.$oty[handle::Handle::decode(r, &mut ())]
+ }
+ }
+
+ impl<S: server::Types> Encode<HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$oty, $oty>
+ {
+ fn encode(self, w: &mut Writer, s: &mut HandleStore<server::MarkedTypes<S>>) {
+ s.$oty.alloc(self).encode(w, s);
+ }
+ }
+
+ impl<S> DecodeMut<'_, '_, S> for $oty {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ $oty {
+ handle: handle::Handle::decode(r, s),
+ _marker: PhantomData,
+ }
+ }
+ }
+ )*
+
+ $(
+ #[repr(C)]
+ #[derive(Copy, Clone, PartialEq, Eq, Hash)]
+ pub(crate) struct $ity {
+ handle: handle::Handle,
+ // Prevent Send and Sync impls. `!Send`/`!Sync` is the usual
+ // way of doing this, but that requires unstable features.
+ // rust-analyzer uses this code and avoids unstable features.
+ _marker: PhantomData<*mut ()>,
+ }
+
+ impl<S> Encode<S> for $ity {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.handle.encode(w, s);
+ }
+ }
+
+ impl<S: server::Types> DecodeMut<'_, '_, HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$ity, $ity>
+ {
+ fn decode(r: &mut Reader<'_>, s: &mut HandleStore<server::MarkedTypes<S>>) -> Self {
+ s.$ity.copy(handle::Handle::decode(r, &mut ()))
+ }
+ }
+
+ impl<S: server::Types> Encode<HandleStore<server::MarkedTypes<S>>>
+ for Marked<S::$ity, $ity>
+ {
+ fn encode(self, w: &mut Writer, s: &mut HandleStore<server::MarkedTypes<S>>) {
+ s.$ity.alloc(self).encode(w, s);
+ }
+ }
+
+ impl<S> DecodeMut<'_, '_, S> for $ity {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ $ity {
+ handle: handle::Handle::decode(r, s),
+ _marker: PhantomData,
+ }
+ }
+ }
+ )*
+ }
+}
+define_handles! {
+ 'owned:
+ FreeFunctions,
+ TokenStream,
+ Literal,
+ SourceFile,
+ MultiSpan,
+ Diagnostic,
+
+ 'interned:
+ Ident,
+ Span,
+}
+
+// FIXME(eddyb) generate these impls by pattern-matching on the
+// names of methods - also could use the presence of `fn drop`
+// to distinguish between 'owned and 'interned, above.
+// Alternatively, special "modes" could be listed of types in with_api
+// instead of pattern matching on methods, here and in server decl.
+
+impl Clone for TokenStream {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl Clone for Literal {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl fmt::Debug for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Literal")
+ // format the kind without quotes, as in `kind: Float`
+ .field("kind", &format_args!("{}", &self.debug_kind()))
+ .field("symbol", &self.symbol())
+ // format `Some("...")` on one line even in {:#?} mode
+ .field("suffix", &format_args!("{:?}", &self.suffix()))
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+impl Clone for SourceFile {
+ fn clone(&self) -> Self {
+ self.clone()
+ }
+}
+
+impl Span {
+ pub(crate) fn def_site() -> Span {
+ Bridge::with(|bridge| bridge.globals.def_site)
+ }
+
+ pub(crate) fn call_site() -> Span {
+ Bridge::with(|bridge| bridge.globals.call_site)
+ }
+
+ pub(crate) fn mixed_site() -> Span {
+ Bridge::with(|bridge| bridge.globals.mixed_site)
+ }
+}
+
+impl fmt::Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.debug())
+ }
+}
+
+macro_rules! define_client_side {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)*;)*
+ }),* $(,)?) => {
+ $(impl $name {
+ $(pub(crate) fn $method($($arg: $arg_ty),*) $(-> $ret_ty)* {
+ Bridge::with(|bridge| {
+ let mut buf = bridge.cached_buffer.take();
+
+ buf.clear();
+ api_tags::Method::$name(api_tags::$name::$method).encode(&mut buf, &mut ());
+ reverse_encode!(buf; $($arg),*);
+
+ buf = bridge.dispatch.call(buf);
+
+ let r = Result::<_, PanicMessage>::decode(&mut &buf[..], &mut ());
+
+ bridge.cached_buffer = buf;
+
+ r.unwrap_or_else(|e| panic::resume_unwind(e.into()))
+ })
+ })*
+ })*
+ }
+}
+with_api!(self, self, define_client_side);
+
+struct Bridge<'a> {
+ /// Reusable buffer (only `clear`-ed, never shrunk), primarily
+ /// used for making requests.
+ cached_buffer: Buffer,
+
+ /// Server-side function that the client uses to make requests.
+ dispatch: closure::Closure<'a, Buffer, Buffer>,
+
+ /// Provided globals for this macro expansion.
+ globals: ExpnGlobals<Span>,
+}
+
+enum BridgeState<'a> {
+ /// No server is currently connected to this client.
+ NotConnected,
+
+ /// A server is connected and available for requests.
+ Connected(Bridge<'a>),
+
+ /// Access to the bridge is being exclusively acquired
+ /// (e.g., during `BridgeState::with`).
+ InUse,
+}
+
+enum BridgeStateL {}
+
+impl<'a> scoped_cell::ApplyL<'a> for BridgeStateL {
+ type Out = BridgeState<'a>;
+}
+
+thread_local! {
+ static BRIDGE_STATE: scoped_cell::ScopedCell<BridgeStateL> =
+ scoped_cell::ScopedCell::new(BridgeState::NotConnected);
+}
+
+impl BridgeState<'_> {
+ /// Take exclusive control of the thread-local
+ /// `BridgeState`, and pass it to `f`, mutably.
+ /// The state will be restored after `f` exits, even
+ /// by panic, including modifications made to it by `f`.
+ ///
+ /// N.B., while `f` is running, the thread-local state
+ /// is `BridgeState::InUse`.
+ fn with<R>(f: impl FnOnce(&mut BridgeState<'_>) -> R) -> R {
+ BRIDGE_STATE.with(|state| {
+ state.replace(BridgeState::InUse, |mut state| {
+ // FIXME(#52812) pass `f` directly to `replace` when `RefMutL` is gone
+ f(&mut *state)
+ })
+ })
+ }
+}
+
+impl Bridge<'_> {
+ fn with<R>(f: impl FnOnce(&mut Bridge<'_>) -> R) -> R {
+ BridgeState::with(|state| match state {
+ BridgeState::NotConnected => {
+ panic!("procedural macro API is used outside of a procedural macro");
+ }
+ BridgeState::InUse => {
+ panic!("procedural macro API is used while it's already in use");
+ }
+ BridgeState::Connected(bridge) => f(bridge),
+ })
+ }
+}
+
+pub(crate) fn is_available() -> bool {
+ BridgeState::with(|state| match state {
+ BridgeState::Connected(_) | BridgeState::InUse => true,
+ BridgeState::NotConnected => false,
+ })
+}
+
+/// A client-side RPC entry-point, which may be using a different `proc_macro`
+/// from the one used by the server, but can be invoked compatibly.
+///
+/// Note that the (phantom) `I` ("input") and `O` ("output") type parameters
+/// decorate the `Client<I, O>` with the RPC "interface" of the entry-point, but
+/// do not themselves participate in ABI, at all, only facilitate type-checking.
+///
+/// E.g. `Client<TokenStream, TokenStream>` is the common proc macro interface,
+/// used for `#[proc_macro] fn foo(input: TokenStream) -> TokenStream`,
+/// indicating that the RPC input and output will be serialized token streams,
+/// and forcing the use of APIs that take/return `S::TokenStream`, server-side.
+#[repr(C)]
+pub struct Client<I, O> {
+ // FIXME(eddyb) use a reference to the `static COUNTERS`, instead of
+ // a wrapper `fn` pointer, once `const fn` can reference `static`s.
+ pub(super) get_handle_counters: extern "C" fn() -> &'static HandleCounters,
+
+ pub(super) run: extern "C" fn(BridgeConfig<'_>) -> Buffer,
+
+ pub(super) _marker: PhantomData<fn(I) -> O>,
+}
+
+impl<I, O> Copy for Client<I, O> {}
+impl<I, O> Clone for Client<I, O> {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+
+fn maybe_install_panic_hook(force_show_panics: bool) {
+ // Hide the default panic output within `proc_macro` expansions.
+ // NB. the server can't do this because it may use a different libstd.
+ static HIDE_PANICS_DURING_EXPANSION: Once = Once::new();
+ HIDE_PANICS_DURING_EXPANSION.call_once(|| {
+ let prev = panic::take_hook();
+ panic::set_hook(Box::new(move |info| {
+ let show = BridgeState::with(|state| match state {
+ BridgeState::NotConnected => true,
+ BridgeState::Connected(_) | BridgeState::InUse => force_show_panics,
+ });
+ if show {
+ prev(info)
+ }
+ }));
+ });
+}
+
+/// Client-side helper for handling client panics, entering the bridge,
+/// deserializing input and serializing output.
+// FIXME(eddyb) maybe replace `Bridge::enter` with this?
+fn run_client<A: for<'a, 's> DecodeMut<'a, 's, ()>, R: Encode<()>>(
+ config: BridgeConfig<'_>,
+ f: impl FnOnce(A) -> R,
+) -> Buffer {
+ let BridgeConfig { input: mut buf, dispatch, force_show_panics, .. } = config;
+
+ panic::catch_unwind(panic::AssertUnwindSafe(|| {
+ maybe_install_panic_hook(force_show_panics);
+
+ let reader = &mut &buf[..];
+ let (globals, input) = <(ExpnGlobals<Span>, A)>::decode(reader, &mut ());
+
+ // Put the buffer we used for input back in the `Bridge` for requests.
+ let new_state =
+ BridgeState::Connected(Bridge { cached_buffer: buf.take(), dispatch, globals });
+
+ BRIDGE_STATE.with(|state| {
+ state.set(new_state, || {
+ let output = f(input);
+
+ // Take the `cached_buffer` back out, for the output value.
+ buf = Bridge::with(|bridge| bridge.cached_buffer.take());
+
+ // HACK(eddyb) Separate encoding a success value (`Ok(output)`)
+ // from encoding a panic (`Err(e: PanicMessage)`) to avoid
+ // having handles outside the `bridge.enter(|| ...)` scope, and
+ // to catch panics that could happen while encoding the success.
+ //
+ // Note that panics should be impossible beyond this point, but
+ // this is defensively trying to avoid any accidental panicking
+ // reaching the `extern "C"` (which should `abort` but might not
+ // at the moment, so this is also potentially preventing UB).
+ buf.clear();
+ Ok::<_, ()>(output).encode(&mut buf, &mut ());
+ })
+ })
+ }))
+ .map_err(PanicMessage::from)
+ .unwrap_or_else(|e| {
+ buf.clear();
+ Err::<(), _>(e).encode(&mut buf, &mut ());
+ });
+ buf
+}
+
+impl Client<super::super::TokenStream, super::super::TokenStream> {
+ pub const fn expand1(
+ f: impl Fn(super::super::TokenStream) -> super::super::TokenStream + Copy,
+ ) -> Self {
+ Client {
+ get_handle_counters: HandleCounters::get,
+ run: super::selfless_reify::reify_to_extern_c_fn_hrt_bridge(move |bridge| {
+ run_client(bridge, |input| f(super::super::TokenStream(input)).0)
+ }),
+ _marker: PhantomData,
+ }
+ }
+}
+
+impl Client<(super::super::TokenStream, super::super::TokenStream), super::super::TokenStream> {
+ pub const fn expand2(
+ f: impl Fn(super::super::TokenStream, super::super::TokenStream) -> super::super::TokenStream
+ + Copy,
+ ) -> Self {
+ Client {
+ get_handle_counters: HandleCounters::get,
+ run: super::selfless_reify::reify_to_extern_c_fn_hrt_bridge(move |bridge| {
+ run_client(bridge, |(input, input2)| {
+ f(super::super::TokenStream(input), super::super::TokenStream(input2)).0
+ })
+ }),
+ _marker: PhantomData,
+ }
+ }
+}
+
+#[repr(C)]
+#[derive(Copy, Clone)]
+pub enum ProcMacro {
+ CustomDerive {
+ trait_name: &'static str,
+ attributes: &'static [&'static str],
+ client: Client<super::super::TokenStream, super::super::TokenStream>,
+ },
+
+ Attr {
+ name: &'static str,
+ client: Client<
+ (super::super::TokenStream, super::super::TokenStream),
+ super::super::TokenStream,
+ >,
+ },
+
+ Bang {
+ name: &'static str,
+ client: Client<super::super::TokenStream, super::super::TokenStream>,
+ },
+}
+
+impl ProcMacro {
+ pub fn name(&self) -> &'static str {
+ match self {
+ ProcMacro::CustomDerive { trait_name, .. } => trait_name,
+ ProcMacro::Attr { name, .. } => name,
+ ProcMacro::Bang { name, .. } => name,
+ }
+ }
+
+ pub const fn custom_derive(
+ trait_name: &'static str,
+ attributes: &'static [&'static str],
+ expand: impl Fn(super::super::TokenStream) -> super::super::TokenStream + Copy,
+ ) -> Self {
+ ProcMacro::CustomDerive { trait_name, attributes, client: Client::expand1(expand) }
+ }
+
+ pub const fn attr(
+ name: &'static str,
+ expand: impl Fn(super::super::TokenStream, super::super::TokenStream) -> super::super::TokenStream
+ + Copy,
+ ) -> Self {
+ ProcMacro::Attr { name, client: Client::expand2(expand) }
+ }
+
+ pub const fn bang(
+ name: &'static str,
+ expand: impl Fn(super::super::TokenStream) -> super::super::TokenStream + Copy,
+ ) -> Self {
+ ProcMacro::Bang { name, client: Client::expand1(expand) }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/closure.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/closure.rs
new file mode 100644
index 000000000..d371ae3ce
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/closure.rs
@@ -0,0 +1,32 @@
+//! Closure type (equivalent to `&mut dyn FnMut(A) -> R`) that's `repr(C)`.
+
+use std::marker::PhantomData;
+
+#[repr(C)]
+pub struct Closure<'a, A, R> {
+ call: unsafe extern "C" fn(*mut Env, A) -> R,
+ env: *mut Env,
+ // Prevent Send and Sync impls. `!Send`/`!Sync` is the usual way of doing
+ // this, but that requires unstable features. rust-analyzer uses this code
+ // and avoids unstable features.
+ //
+ // The `'a` lifetime parameter represents the lifetime of `Env`.
+ _marker: PhantomData<*mut &'a mut ()>,
+}
+
+struct Env;
+
+impl<'a, A, R, F: FnMut(A) -> R> From<&'a mut F> for Closure<'a, A, R> {
+ fn from(f: &'a mut F) -> Self {
+ unsafe extern "C" fn call<A, R, F: FnMut(A) -> R>(env: *mut Env, arg: A) -> R {
+ (*(env as *mut _ as *mut F))(arg)
+ }
+ Closure { call: call::<A, R, F>, env: f as *mut _ as *mut Env, _marker: PhantomData }
+ }
+}
+
+impl<'a, A, R> Closure<'a, A, R> {
+ pub fn call(&mut self, arg: A) -> R {
+ unsafe { (self.call)(self.env, arg) }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/handle.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/handle.rs
new file mode 100644
index 000000000..c219a9465
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/handle.rs
@@ -0,0 +1,89 @@
+//! Server-side handles and storage for per-handle data.
+
+use std::collections::{BTreeMap, HashMap};
+use std::hash::{BuildHasher, Hash};
+use std::num::NonZeroU32;
+use std::ops::{Index, IndexMut};
+use std::sync::atomic::{AtomicUsize, Ordering};
+
+pub(super) type Handle = NonZeroU32;
+
+/// A store that associates values of type `T` with numeric handles. A value can
+/// be looked up using its handle.
+pub(super) struct OwnedStore<T: 'static> {
+ counter: &'static AtomicUsize,
+ data: BTreeMap<Handle, T>,
+}
+
+impl<T> OwnedStore<T> {
+ pub(super) fn new(counter: &'static AtomicUsize) -> Self {
+ // Ensure the handle counter isn't 0, which would panic later,
+ // when `NonZeroU32::new` (aka `Handle::new`) is called in `alloc`.
+ assert_ne!(counter.load(Ordering::SeqCst), 0);
+
+ OwnedStore { counter, data: BTreeMap::new() }
+ }
+}
+
+impl<T> OwnedStore<T> {
+ pub(super) fn alloc(&mut self, x: T) -> Handle {
+ let counter = self.counter.fetch_add(1, Ordering::SeqCst);
+ let handle = Handle::new(counter as u32).expect("`proc_macro` handle counter overflowed");
+ assert!(self.data.insert(handle, x).is_none());
+ handle
+ }
+
+ pub(super) fn take(&mut self, h: Handle) -> T {
+ self.data.remove(&h).expect("use-after-free in `proc_macro` handle")
+ }
+}
+
+impl<T> Index<Handle> for OwnedStore<T> {
+ type Output = T;
+ fn index(&self, h: Handle) -> &T {
+ self.data.get(&h).expect("use-after-free in `proc_macro` handle")
+ }
+}
+
+impl<T> IndexMut<Handle> for OwnedStore<T> {
+ fn index_mut(&mut self, h: Handle) -> &mut T {
+ self.data.get_mut(&h).expect("use-after-free in `proc_macro` handle")
+ }
+}
+
+// HACK(eddyb) deterministic `std::collections::hash_map::RandomState` replacement
+// that doesn't require adding any dependencies to `proc_macro` (like `rustc-hash`).
+#[derive(Clone)]
+struct NonRandomState;
+
+impl BuildHasher for NonRandomState {
+ type Hasher = std::collections::hash_map::DefaultHasher;
+ #[inline]
+ fn build_hasher(&self) -> Self::Hasher {
+ Self::Hasher::new()
+ }
+}
+
+/// Like `OwnedStore`, but avoids storing any value more than once.
+pub(super) struct InternedStore<T: 'static> {
+ owned: OwnedStore<T>,
+ interner: HashMap<T, Handle, NonRandomState>,
+}
+
+impl<T: Copy + Eq + Hash> InternedStore<T> {
+ pub(super) fn new(counter: &'static AtomicUsize) -> Self {
+ InternedStore {
+ owned: OwnedStore::new(counter),
+ interner: HashMap::with_hasher(NonRandomState),
+ }
+ }
+
+ pub(super) fn alloc(&mut self, x: T) -> Handle {
+ let owned = &mut self.owned;
+ *self.interner.entry(x).or_insert_with(|| owned.alloc(x))
+ }
+
+ pub(super) fn copy(&mut self, h: Handle) -> T {
+ self.owned[h]
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/mod.rs
new file mode 100644
index 000000000..ffd440793
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/mod.rs
@@ -0,0 +1,493 @@
+//! Internal interface for communicating between a `proc_macro` client
+//! (a proc macro crate) and a `proc_macro` server (a compiler front-end).
+//!
+//! Serialization (with C ABI buffers) and unique integer handles are employed
+//! to allow safely interfacing between two copies of `proc_macro` built
+//! (from the same source) by different compilers with potentially mismatching
+//! Rust ABIs (e.g., stage0/bin/rustc vs stage1/bin/rustc during bootstrap).
+
+#![deny(unsafe_code)]
+
+pub use super::{Delimiter, Level, LineColumn, Spacing};
+use std::fmt;
+use std::hash::Hash;
+use std::marker;
+use std::mem;
+use std::ops::Bound;
+use std::panic;
+use std::sync::atomic::AtomicUsize;
+use std::sync::Once;
+use std::thread;
+
+/// Higher-order macro describing the server RPC API, allowing automatic
+/// generation of type-safe Rust APIs, both client-side and server-side.
+///
+/// `with_api!(MySelf, my_self, my_macro)` expands to:
+/// ```rust,ignore (pseudo-code)
+/// my_macro! {
+/// // ...
+/// Literal {
+/// // ...
+/// fn character(ch: char) -> MySelf::Literal;
+/// // ...
+/// fn span(my_self: &MySelf::Literal) -> MySelf::Span;
+/// fn set_span(my_self: &mut MySelf::Literal, span: MySelf::Span);
+/// },
+/// // ...
+/// }
+/// ```
+///
+/// The first two arguments serve to customize the arguments names
+/// and argument/return types, to enable several different usecases:
+///
+/// If `my_self` is just `self`, then each `fn` signature can be used
+/// as-is for a method. If it's anything else (`self_` in practice),
+/// then the signatures don't have a special `self` argument, and
+/// can, therefore, have a different one introduced.
+///
+/// If `MySelf` is just `Self`, then the types are only valid inside
+/// a trait or a trait impl, where the trait has associated types
+/// for each of the API types. If non-associated types are desired,
+/// a module name (`self` in practice) can be used instead of `Self`.
+macro_rules! with_api {
+ ($S:ident, $self:ident, $m:ident) => {
+ $m! {
+ FreeFunctions {
+ fn drop($self: $S::FreeFunctions);
+ fn track_env_var(var: &str, value: Option<&str>);
+ fn track_path(path: &str);
+ },
+ TokenStream {
+ fn drop($self: $S::TokenStream);
+ fn clone($self: &$S::TokenStream) -> $S::TokenStream;
+ fn is_empty($self: &$S::TokenStream) -> bool;
+ fn expand_expr($self: &$S::TokenStream) -> Result<$S::TokenStream, ()>;
+ fn from_str(src: &str) -> $S::TokenStream;
+ fn to_string($self: &$S::TokenStream) -> String;
+ fn from_token_tree(
+ tree: TokenTree<$S::TokenStream, $S::Span, $S::Ident, $S::Literal>,
+ ) -> $S::TokenStream;
+ fn concat_trees(
+ base: Option<$S::TokenStream>,
+ trees: Vec<TokenTree<$S::TokenStream, $S::Span, $S::Ident, $S::Literal>>,
+ ) -> $S::TokenStream;
+ fn concat_streams(
+ base: Option<$S::TokenStream>,
+ streams: Vec<$S::TokenStream>,
+ ) -> $S::TokenStream;
+ fn into_trees(
+ $self: $S::TokenStream
+ ) -> Vec<TokenTree<$S::TokenStream, $S::Span, $S::Ident, $S::Literal>>;
+ },
+ Ident {
+ fn new(string: &str, span: $S::Span, is_raw: bool) -> $S::Ident;
+ fn span($self: $S::Ident) -> $S::Span;
+ fn with_span($self: $S::Ident, span: $S::Span) -> $S::Ident;
+ },
+ Literal {
+ fn drop($self: $S::Literal);
+ fn clone($self: &$S::Literal) -> $S::Literal;
+ fn from_str(s: &str) -> Result<$S::Literal, ()>;
+ fn to_string($self: &$S::Literal) -> String;
+ fn debug_kind($self: &$S::Literal) -> String;
+ fn symbol($self: &$S::Literal) -> String;
+ fn suffix($self: &$S::Literal) -> Option<String>;
+ fn integer(n: &str) -> $S::Literal;
+ fn typed_integer(n: &str, kind: &str) -> $S::Literal;
+ fn float(n: &str) -> $S::Literal;
+ fn f32(n: &str) -> $S::Literal;
+ fn f64(n: &str) -> $S::Literal;
+ fn string(string: &str) -> $S::Literal;
+ fn character(ch: char) -> $S::Literal;
+ fn byte_string(bytes: &[u8]) -> $S::Literal;
+ fn span($self: &$S::Literal) -> $S::Span;
+ fn set_span($self: &mut $S::Literal, span: $S::Span);
+ fn subspan(
+ $self: &$S::Literal,
+ start: Bound<usize>,
+ end: Bound<usize>,
+ ) -> Option<$S::Span>;
+ },
+ SourceFile {
+ fn drop($self: $S::SourceFile);
+ fn clone($self: &$S::SourceFile) -> $S::SourceFile;
+ fn eq($self: &$S::SourceFile, other: &$S::SourceFile) -> bool;
+ fn path($self: &$S::SourceFile) -> String;
+ fn is_real($self: &$S::SourceFile) -> bool;
+ },
+ MultiSpan {
+ fn drop($self: $S::MultiSpan);
+ fn new() -> $S::MultiSpan;
+ fn push($self: &mut $S::MultiSpan, span: $S::Span);
+ },
+ Diagnostic {
+ fn drop($self: $S::Diagnostic);
+ fn new(level: Level, msg: &str, span: $S::MultiSpan) -> $S::Diagnostic;
+ fn sub(
+ $self: &mut $S::Diagnostic,
+ level: Level,
+ msg: &str,
+ span: $S::MultiSpan,
+ );
+ fn emit($self: $S::Diagnostic);
+ },
+ Span {
+ fn debug($self: $S::Span) -> String;
+ fn source_file($self: $S::Span) -> $S::SourceFile;
+ fn parent($self: $S::Span) -> Option<$S::Span>;
+ fn source($self: $S::Span) -> $S::Span;
+ fn start($self: $S::Span) -> LineColumn;
+ fn end($self: $S::Span) -> LineColumn;
+ fn before($self: $S::Span) -> $S::Span;
+ fn after($self: $S::Span) -> $S::Span;
+ fn join($self: $S::Span, other: $S::Span) -> Option<$S::Span>;
+ fn resolved_at($self: $S::Span, at: $S::Span) -> $S::Span;
+ fn source_text($self: $S::Span) -> Option<String>;
+ fn save_span($self: $S::Span) -> usize;
+ fn recover_proc_macro_span(id: usize) -> $S::Span;
+ },
+ }
+ };
+}
+
+// FIXME(eddyb) this calls `encode` for each argument, but in reverse,
+// to match the ordering in `reverse_decode`.
+macro_rules! reverse_encode {
+ ($writer:ident;) => {};
+ ($writer:ident; $first:ident $(, $rest:ident)*) => {
+ reverse_encode!($writer; $($rest),*);
+ $first.encode(&mut $writer, &mut ());
+ }
+}
+
+// FIXME(eddyb) this calls `decode` for each argument, but in reverse,
+// to avoid borrow conflicts from borrows started by `&mut` arguments.
+macro_rules! reverse_decode {
+ ($reader:ident, $s:ident;) => {};
+ ($reader:ident, $s:ident; $first:ident: $first_ty:ty $(, $rest:ident: $rest_ty:ty)*) => {
+ reverse_decode!($reader, $s; $($rest: $rest_ty),*);
+ let $first = <$first_ty>::decode(&mut $reader, $s);
+ }
+}
+
+#[allow(unsafe_code)]
+mod buffer;
+#[forbid(unsafe_code)]
+pub mod client;
+#[allow(unsafe_code)]
+mod closure;
+#[forbid(unsafe_code)]
+mod handle;
+#[macro_use]
+#[forbid(unsafe_code)]
+mod rpc;
+#[allow(unsafe_code)]
+mod scoped_cell;
+#[allow(unsafe_code)]
+mod selfless_reify;
+#[forbid(unsafe_code)]
+pub mod server;
+
+use buffer::Buffer;
+pub use rpc::PanicMessage;
+use rpc::{Decode, DecodeMut, Encode, Reader, Writer};
+
+/// Configuration for establishing an active connection between a server and a
+/// client. The server creates the bridge config (`run_server` in `server.rs`),
+/// then passes it to the client through the function pointer in the `run` field
+/// of `client::Client`. The client constructs a local `Bridge` from the config
+/// in TLS during its execution (`Bridge::{enter, with}` in `client.rs`).
+#[repr(C)]
+pub struct BridgeConfig<'a> {
+ /// Buffer used to pass initial input to the client.
+ input: Buffer,
+
+ /// Server-side function that the client uses to make requests.
+ dispatch: closure::Closure<'a, Buffer, Buffer>,
+
+ /// If 'true', always invoke the default panic hook
+ force_show_panics: bool,
+
+ // Prevent Send and Sync impls. `!Send`/`!Sync` is the usual way of doing
+ // this, but that requires unstable features. rust-analyzer uses this code
+ // and avoids unstable features.
+ _marker: marker::PhantomData<*mut ()>,
+}
+
+#[forbid(unsafe_code)]
+#[allow(non_camel_case_types)]
+mod api_tags {
+ use super::rpc::{DecodeMut, Encode, Reader, Writer};
+
+ macro_rules! declare_tags {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)*;)*
+ }),* $(,)?) => {
+ $(
+ pub(super) enum $name {
+ $($method),*
+ }
+ rpc_encode_decode!(enum $name { $($method),* });
+ )*
+
+ pub(super) enum Method {
+ $($name($name)),*
+ }
+ rpc_encode_decode!(enum Method { $($name(m)),* });
+ }
+ }
+ with_api!(self, self, declare_tags);
+}
+
+/// Helper to wrap associated types to allow trait impl dispatch.
+/// That is, normally a pair of impls for `T::Foo` and `T::Bar`
+/// can overlap, but if the impls are, instead, on types like
+/// `Marked<T::Foo, Foo>` and `Marked<T::Bar, Bar>`, they can't.
+trait Mark {
+ type Unmarked;
+ fn mark(unmarked: Self::Unmarked) -> Self;
+}
+
+/// Unwrap types wrapped by `Mark::mark` (see `Mark` for details).
+trait Unmark {
+ type Unmarked;
+ fn unmark(self) -> Self::Unmarked;
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Hash)]
+struct Marked<T, M> {
+ value: T,
+ _marker: marker::PhantomData<M>,
+}
+
+impl<T, M> Mark for Marked<T, M> {
+ type Unmarked = T;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ Marked { value: unmarked, _marker: marker::PhantomData }
+ }
+}
+impl<T, M> Unmark for Marked<T, M> {
+ type Unmarked = T;
+ fn unmark(self) -> Self::Unmarked {
+ self.value
+ }
+}
+impl<'a, T, M> Unmark for &'a Marked<T, M> {
+ type Unmarked = &'a T;
+ fn unmark(self) -> Self::Unmarked {
+ &self.value
+ }
+}
+impl<'a, T, M> Unmark for &'a mut Marked<T, M> {
+ type Unmarked = &'a mut T;
+ fn unmark(self) -> Self::Unmarked {
+ &mut self.value
+ }
+}
+
+impl<T: Mark> Mark for Vec<T> {
+ type Unmarked = Vec<T::Unmarked>;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ // Should be a no-op due to std's in-place collect optimizations.
+ unmarked.into_iter().map(T::mark).collect()
+ }
+}
+impl<T: Unmark> Unmark for Vec<T> {
+ type Unmarked = Vec<T::Unmarked>;
+ fn unmark(self) -> Self::Unmarked {
+ // Should be a no-op due to std's in-place collect optimizations.
+ self.into_iter().map(T::unmark).collect()
+ }
+}
+
+macro_rules! mark_noop {
+ ($($ty:ty),* $(,)?) => {
+ $(
+ impl Mark for $ty {
+ type Unmarked = Self;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ unmarked
+ }
+ }
+ impl Unmark for $ty {
+ type Unmarked = Self;
+ fn unmark(self) -> Self::Unmarked {
+ self
+ }
+ }
+ )*
+ }
+}
+mark_noop! {
+ (),
+ bool,
+ char,
+ &'_ [u8],
+ &'_ str,
+ String,
+ u8,
+ usize,
+ Delimiter,
+ Level,
+ LineColumn,
+ Spacing,
+}
+
+rpc_encode_decode!(
+ enum Delimiter {
+ Parenthesis,
+ Brace,
+ Bracket,
+ None,
+ }
+);
+rpc_encode_decode!(
+ enum Level {
+ Error,
+ Warning,
+ Note,
+ Help,
+ }
+);
+rpc_encode_decode!(struct LineColumn { line, column });
+rpc_encode_decode!(
+ enum Spacing {
+ Alone,
+ Joint,
+ }
+);
+
+macro_rules! mark_compound {
+ (struct $name:ident <$($T:ident),+> { $($field:ident),* $(,)? }) => {
+ impl<$($T: Mark),+> Mark for $name <$($T),+> {
+ type Unmarked = $name <$($T::Unmarked),+>;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ $name {
+ $($field: Mark::mark(unmarked.$field)),*
+ }
+ }
+ }
+ impl<$($T: Unmark),+> Unmark for $name <$($T),+> {
+ type Unmarked = $name <$($T::Unmarked),+>;
+ fn unmark(self) -> Self::Unmarked {
+ $name {
+ $($field: Unmark::unmark(self.$field)),*
+ }
+ }
+ }
+ };
+ (enum $name:ident <$($T:ident),+> { $($variant:ident $(($field:ident))?),* $(,)? }) => {
+ impl<$($T: Mark),+> Mark for $name <$($T),+> {
+ type Unmarked = $name <$($T::Unmarked),+>;
+ fn mark(unmarked: Self::Unmarked) -> Self {
+ match unmarked {
+ $($name::$variant $(($field))? => {
+ $name::$variant $((Mark::mark($field)))?
+ })*
+ }
+ }
+ }
+ impl<$($T: Unmark),+> Unmark for $name <$($T),+> {
+ type Unmarked = $name <$($T::Unmarked),+>;
+ fn unmark(self) -> Self::Unmarked {
+ match self {
+ $($name::$variant $(($field))? => {
+ $name::$variant $((Unmark::unmark($field)))?
+ })*
+ }
+ }
+ }
+ }
+}
+
+macro_rules! compound_traits {
+ ($($t:tt)*) => {
+ rpc_encode_decode!($($t)*);
+ mark_compound!($($t)*);
+ };
+}
+
+compound_traits!(
+ enum Bound<T> {
+ Included(x),
+ Excluded(x),
+ Unbounded,
+ }
+);
+
+compound_traits!(
+ enum Option<T> {
+ Some(t),
+ None,
+ }
+);
+
+compound_traits!(
+ enum Result<T, E> {
+ Ok(t),
+ Err(e),
+ }
+);
+
+#[derive(Copy, Clone)]
+pub struct DelimSpan<Span> {
+ pub open: Span,
+ pub close: Span,
+ pub entire: Span,
+}
+
+impl<Span: Copy> DelimSpan<Span> {
+ pub fn from_single(span: Span) -> Self {
+ DelimSpan { open: span, close: span, entire: span }
+ }
+}
+
+compound_traits!(struct DelimSpan<Span> { open, close, entire });
+
+#[derive(Clone)]
+pub struct Group<TokenStream, Span> {
+ pub delimiter: Delimiter,
+ pub stream: Option<TokenStream>,
+ pub span: DelimSpan<Span>,
+}
+
+compound_traits!(struct Group<TokenStream, Span> { delimiter, stream, span });
+
+#[derive(Clone)]
+pub struct Punct<Span> {
+ pub ch: u8,
+ pub joint: bool,
+ pub span: Span,
+}
+
+compound_traits!(struct Punct<Span> { ch, joint, span });
+
+#[derive(Clone)]
+pub enum TokenTree<TokenStream, Span, Ident, Literal> {
+ Group(Group<TokenStream, Span>),
+ Punct(Punct<Span>),
+ Ident(Ident),
+ Literal(Literal),
+}
+
+compound_traits!(
+ enum TokenTree<TokenStream, Span, Ident, Literal> {
+ Group(tt),
+ Punct(tt),
+ Ident(tt),
+ Literal(tt),
+ }
+);
+
+/// Globals provided alongside the initial inputs for a macro expansion.
+/// Provides values such as spans which are used frequently to avoid RPC.
+#[derive(Clone)]
+pub struct ExpnGlobals<Span> {
+ pub def_site: Span,
+ pub call_site: Span,
+ pub mixed_site: Span,
+}
+
+compound_traits!(
+ struct ExpnGlobals<Span> { def_site, call_site, mixed_site }
+);
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/rpc.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/rpc.rs
new file mode 100644
index 000000000..e9d7a46c0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/rpc.rs
@@ -0,0 +1,304 @@
+//! Serialization for client-server communication.
+
+use std::any::Any;
+use std::char;
+use std::io::Write;
+use std::num::NonZeroU32;
+use std::str;
+
+pub(super) type Writer = super::buffer::Buffer;
+
+pub(super) trait Encode<S>: Sized {
+ fn encode(self, w: &mut Writer, s: &mut S);
+}
+
+pub(super) type Reader<'a> = &'a [u8];
+
+pub(super) trait Decode<'a, 's, S>: Sized {
+ fn decode(r: &mut Reader<'a>, s: &'s S) -> Self;
+}
+
+pub(super) trait DecodeMut<'a, 's, S>: Sized {
+ fn decode(r: &mut Reader<'a>, s: &'s mut S) -> Self;
+}
+
+macro_rules! rpc_encode_decode {
+ (le $ty:ty) => {
+ impl<S> Encode<S> for $ty {
+ fn encode(self, w: &mut Writer, _: &mut S) {
+ w.extend_from_array(&self.to_le_bytes());
+ }
+ }
+
+ impl<S> DecodeMut<'_, '_, S> for $ty {
+ fn decode(r: &mut Reader<'_>, _: &mut S) -> Self {
+ const N: usize = ::std::mem::size_of::<$ty>();
+
+ let mut bytes = [0; N];
+ bytes.copy_from_slice(&r[..N]);
+ *r = &r[N..];
+
+ Self::from_le_bytes(bytes)
+ }
+ }
+ };
+ (struct $name:ident $(<$($T:ident),+>)? { $($field:ident),* $(,)? }) => {
+ impl<S, $($($T: Encode<S>),+)?> Encode<S> for $name $(<$($T),+>)? {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ $(self.$field.encode(w, s);)*
+ }
+ }
+
+ impl<'a, S, $($($T: for<'s> DecodeMut<'a, 's, S>),+)?> DecodeMut<'a, '_, S>
+ for $name $(<$($T),+>)?
+ {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ $name {
+ $($field: DecodeMut::decode(r, s)),*
+ }
+ }
+ }
+ };
+ (enum $name:ident $(<$($T:ident),+>)? { $($variant:ident $(($field:ident))*),* $(,)? }) => {
+ impl<S, $($($T: Encode<S>),+)?> Encode<S> for $name $(<$($T),+>)? {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ // HACK(eddyb): `Tag` enum duplicated between the
+ // two impls as there's no other place to stash it.
+ #[allow(non_upper_case_globals)]
+ mod tag {
+ #[repr(u8)] enum Tag { $($variant),* }
+
+ $(pub const $variant: u8 = Tag::$variant as u8;)*
+ }
+
+ match self {
+ $($name::$variant $(($field))* => {
+ tag::$variant.encode(w, s);
+ $($field.encode(w, s);)*
+ })*
+ }
+ }
+ }
+
+ impl<'a, S, $($($T: for<'s> DecodeMut<'a, 's, S>),+)?> DecodeMut<'a, '_, S>
+ for $name $(<$($T),+>)?
+ {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ // HACK(eddyb): `Tag` enum duplicated between the
+ // two impls as there's no other place to stash it.
+ #[allow(non_upper_case_globals)]
+ mod tag {
+ #[repr(u8)] enum Tag { $($variant),* }
+
+ $(pub const $variant: u8 = Tag::$variant as u8;)*
+ }
+
+ match u8::decode(r, s) {
+ $(tag::$variant => {
+ $(let $field = DecodeMut::decode(r, s);)*
+ $name::$variant $(($field))*
+ })*
+ _ => unreachable!(),
+ }
+ }
+ }
+ }
+}
+
+impl<S> Encode<S> for () {
+ fn encode(self, _: &mut Writer, _: &mut S) {}
+}
+
+impl<S> DecodeMut<'_, '_, S> for () {
+ fn decode(_: &mut Reader<'_>, _: &mut S) -> Self {}
+}
+
+impl<S> Encode<S> for u8 {
+ fn encode(self, w: &mut Writer, _: &mut S) {
+ w.push(self);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for u8 {
+ fn decode(r: &mut Reader<'_>, _: &mut S) -> Self {
+ let x = r[0];
+ *r = &r[1..];
+ x
+ }
+}
+
+rpc_encode_decode!(le u32);
+rpc_encode_decode!(le usize);
+
+impl<S> Encode<S> for bool {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ (self as u8).encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for bool {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ match u8::decode(r, s) {
+ 0 => false,
+ 1 => true,
+ _ => unreachable!(),
+ }
+ }
+}
+
+impl<S> Encode<S> for char {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ (self as u32).encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for char {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ char::from_u32(u32::decode(r, s)).unwrap()
+ }
+}
+
+impl<S> Encode<S> for NonZeroU32 {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.get().encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for NonZeroU32 {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ Self::new(u32::decode(r, s)).unwrap()
+ }
+}
+
+impl<S, A: Encode<S>, B: Encode<S>> Encode<S> for (A, B) {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.0.encode(w, s);
+ self.1.encode(w, s);
+ }
+}
+
+impl<'a, S, A: for<'s> DecodeMut<'a, 's, S>, B: for<'s> DecodeMut<'a, 's, S>> DecodeMut<'a, '_, S>
+ for (A, B)
+{
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ (DecodeMut::decode(r, s), DecodeMut::decode(r, s))
+ }
+}
+
+impl<S> Encode<S> for &[u8] {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.len().encode(w, s);
+ w.write_all(self).unwrap();
+ }
+}
+
+impl<'a, S> DecodeMut<'a, '_, S> for &'a [u8] {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ let len = usize::decode(r, s);
+ let xs = &r[..len];
+ *r = &r[len..];
+ xs
+ }
+}
+
+impl<S> Encode<S> for &str {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.as_bytes().encode(w, s);
+ }
+}
+
+impl<'a, S> DecodeMut<'a, '_, S> for &'a str {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ str::from_utf8(<&[u8]>::decode(r, s)).unwrap()
+ }
+}
+
+impl<S> Encode<S> for String {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self[..].encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for String {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ <&str>::decode(r, s).to_string()
+ }
+}
+
+impl<S, T: Encode<S>> Encode<S> for Vec<T> {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.len().encode(w, s);
+ for x in self {
+ x.encode(w, s);
+ }
+ }
+}
+
+impl<'a, S, T: for<'s> DecodeMut<'a, 's, S>> DecodeMut<'a, '_, S> for Vec<T> {
+ fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
+ let len = usize::decode(r, s);
+ let mut vec = Vec::with_capacity(len);
+ for _ in 0..len {
+ vec.push(T::decode(r, s));
+ }
+ vec
+ }
+}
+
+/// Simplified version of panic payloads, ignoring
+/// types other than `&'static str` and `String`.
+pub enum PanicMessage {
+ StaticStr(&'static str),
+ String(String),
+ Unknown,
+}
+
+impl From<Box<dyn Any + Send>> for PanicMessage {
+ fn from(payload: Box<dyn Any + Send + 'static>) -> Self {
+ if let Some(s) = payload.downcast_ref::<&'static str>() {
+ return PanicMessage::StaticStr(s);
+ }
+ if let Ok(s) = payload.downcast::<String>() {
+ return PanicMessage::String(*s);
+ }
+ PanicMessage::Unknown
+ }
+}
+
+impl Into<Box<dyn Any + Send>> for PanicMessage {
+ fn into(self) -> Box<dyn Any + Send> {
+ match self {
+ PanicMessage::StaticStr(s) => Box::new(s),
+ PanicMessage::String(s) => Box::new(s),
+ PanicMessage::Unknown => {
+ struct UnknownPanicMessage;
+ Box::new(UnknownPanicMessage)
+ }
+ }
+ }
+}
+
+impl PanicMessage {
+ pub fn as_str(&self) -> Option<&str> {
+ match self {
+ PanicMessage::StaticStr(s) => Some(s),
+ PanicMessage::String(s) => Some(s),
+ PanicMessage::Unknown => None,
+ }
+ }
+}
+
+impl<S> Encode<S> for PanicMessage {
+ fn encode(self, w: &mut Writer, s: &mut S) {
+ self.as_str().encode(w, s);
+ }
+}
+
+impl<S> DecodeMut<'_, '_, S> for PanicMessage {
+ fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
+ match Option::<String>::decode(r, s) {
+ Some(s) => PanicMessage::String(s),
+ None => PanicMessage::Unknown,
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/scoped_cell.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/scoped_cell.rs
new file mode 100644
index 000000000..2cde1f65a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/scoped_cell.rs
@@ -0,0 +1,81 @@
+//! `Cell` variant for (scoped) existential lifetimes.
+
+use std::cell::Cell;
+use std::mem;
+use std::ops::{Deref, DerefMut};
+
+/// Type lambda application, with a lifetime.
+#[allow(unused_lifetimes)]
+pub trait ApplyL<'a> {
+ type Out;
+}
+
+/// Type lambda taking a lifetime, i.e., `Lifetime -> Type`.
+pub trait LambdaL: for<'a> ApplyL<'a> {}
+
+impl<T: for<'a> ApplyL<'a>> LambdaL for T {}
+
+// HACK(eddyb) work around projection limitations with a newtype
+// FIXME(#52812) replace with `&'a mut <T as ApplyL<'b>>::Out`
+pub struct RefMutL<'a, 'b, T: LambdaL>(&'a mut <T as ApplyL<'b>>::Out);
+
+impl<'a, 'b, T: LambdaL> Deref for RefMutL<'a, 'b, T> {
+ type Target = <T as ApplyL<'b>>::Out;
+ fn deref(&self) -> &Self::Target {
+ self.0
+ }
+}
+
+impl<'a, 'b, T: LambdaL> DerefMut for RefMutL<'a, 'b, T> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ self.0
+ }
+}
+
+pub struct ScopedCell<T: LambdaL>(Cell<<T as ApplyL<'static>>::Out>);
+
+impl<T: LambdaL> ScopedCell<T> {
+ pub const fn new(value: <T as ApplyL<'static>>::Out) -> Self {
+ ScopedCell(Cell::new(value))
+ }
+
+ /// Sets the value in `self` to `replacement` while
+ /// running `f`, which gets the old value, mutably.
+ /// The old value will be restored after `f` exits, even
+ /// by panic, including modifications made to it by `f`.
+ pub fn replace<'a, R>(
+ &self,
+ replacement: <T as ApplyL<'a>>::Out,
+ f: impl for<'b, 'c> FnOnce(RefMutL<'b, 'c, T>) -> R,
+ ) -> R {
+ /// Wrapper that ensures that the cell always gets filled
+ /// (with the original state, optionally changed by `f`),
+ /// even if `f` had panicked.
+ struct PutBackOnDrop<'a, T: LambdaL> {
+ cell: &'a ScopedCell<T>,
+ value: Option<<T as ApplyL<'static>>::Out>,
+ }
+
+ impl<'a, T: LambdaL> Drop for PutBackOnDrop<'a, T> {
+ fn drop(&mut self) {
+ self.cell.0.set(self.value.take().unwrap());
+ }
+ }
+
+ let mut put_back_on_drop = PutBackOnDrop {
+ cell: self,
+ value: Some(self.0.replace(unsafe {
+ let erased = mem::transmute_copy(&replacement);
+ mem::forget(replacement);
+ erased
+ })),
+ };
+
+ f(RefMutL(put_back_on_drop.value.as_mut().unwrap()))
+ }
+
+ /// Sets the value in `self` to `value` while running `f`.
+ pub fn set<R>(&self, value: <T as ApplyL<'_>>::Out, f: impl FnOnce() -> R) -> R {
+ self.replace(value, |_| f())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/selfless_reify.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/selfless_reify.rs
new file mode 100644
index 000000000..907ad256e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/selfless_reify.rs
@@ -0,0 +1,84 @@
+//! Abstraction for creating `fn` pointers from any callable that *effectively*
+//! has the equivalent of implementing `Default`, even if the compiler neither
+//! provides `Default` nor allows reifying closures (i.e. creating `fn` pointers)
+//! other than those with absolutely no captures.
+//!
+//! More specifically, for a closure-like type to be "effectively `Default`":
+//! * it must be a ZST (zero-sized type): no information contained within, so
+//! that `Default`'s return value (if it were implemented) is unambiguous
+//! * it must be `Copy`: no captured "unique ZST tokens" or any other similar
+//! types that would make duplicating values at will unsound
+//! * combined with the ZST requirement, this confers a kind of "telecopy"
+//! ability: similar to `Copy`, but without keeping the value around, and
+//! instead "reconstructing" it (a noop given it's a ZST) when needed
+//! * it must be *provably* inhabited: no captured uninhabited types or any
+//! other types that cannot be constructed by the user of this abstraction
+//! * the proof is a value of the closure-like type itself, in a sense the
+//! "seed" for the "telecopy" process made possible by ZST + `Copy`
+//! * this requirement is the only reason an abstraction limited to a specific
+//! usecase is required: ZST + `Copy` can be checked with *at worst* a panic
+//! at the "attempted `::default()` call" time, but that doesn't guarantee
+//! that the value can be soundly created, and attempting to use the typical
+//! "proof ZST token" approach leads yet again to having a ZST + `Copy` type
+//! that is not proof of anything without a value (i.e. isomorphic to a
+//! newtype of the type it's trying to prove the inhabitation of)
+//!
+//! A more flexible (and safer) solution to the general problem could exist once
+//! `const`-generic parameters can have type parameters in their types:
+//!
+//! ```rust,ignore (needs future const-generics)
+//! extern "C" fn ffi_wrapper<
+//! A, R,
+//! F: Fn(A) -> R,
+//! const f: F, // <-- this `const`-generic is not yet allowed
+//! >(arg: A) -> R {
+//! f(arg)
+//! }
+//! ```
+
+use std::mem;
+
+// FIXME(eddyb) this could be `trait` impls except for the `const fn` requirement.
+macro_rules! define_reify_functions {
+ ($(
+ fn $name:ident $(<$($param:ident),*>)?
+ for $(extern $abi:tt)? fn($($arg:ident: $arg_ty:ty),*) -> $ret_ty:ty;
+ )+) => {
+ $(pub const fn $name<
+ $($($param,)*)?
+ F: Fn($($arg_ty),*) -> $ret_ty + Copy
+ >(f: F) -> $(extern $abi)? fn($($arg_ty),*) -> $ret_ty {
+ // FIXME(eddyb) describe the `F` type (e.g. via `type_name::<F>`) once panic
+ // formatting becomes possible in `const fn`.
+ assert!(mem::size_of::<F>() == 0, "selfless_reify: closure must be zero-sized");
+
+ $(extern $abi)? fn wrapper<
+ $($($param,)*)?
+ F: Fn($($arg_ty),*) -> $ret_ty + Copy
+ >($($arg: $arg_ty),*) -> $ret_ty {
+ let f = unsafe {
+ // SAFETY: `F` satisfies all criteria for "out of thin air"
+ // reconstructability (see module-level doc comment).
+ mem::MaybeUninit::<F>::uninit().assume_init()
+ };
+ f($($arg),*)
+ }
+ let _f_proof = f;
+ wrapper::<
+ $($($param,)*)?
+ F
+ >
+ })+
+ }
+}
+
+define_reify_functions! {
+ fn _reify_to_extern_c_fn_unary<A, R> for extern "C" fn(arg: A) -> R;
+
+ // HACK(eddyb) this abstraction is used with `for<'a> fn(BridgeConfig<'a>)
+ // -> T` but that doesn't work with just `reify_to_extern_c_fn_unary`
+ // because of the `fn` pointer type being "higher-ranked" (i.e. the
+ // `for<'a>` binder).
+ // FIXME(eddyb) try to remove the lifetime from `BridgeConfig`, that'd help.
+ fn reify_to_extern_c_fn_hrt_bridge<R> for extern "C" fn(bridge: super::BridgeConfig<'_>) -> R;
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/server.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/server.rs
new file mode 100644
index 000000000..6e7a8d8c1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/server.rs
@@ -0,0 +1,339 @@
+//! Server-side traits.
+
+use super::*;
+
+// FIXME(eddyb) generate the definition of `HandleStore` in `server.rs`.
+use super::client::HandleStore;
+
+pub trait Types {
+ type FreeFunctions: 'static;
+ type TokenStream: 'static + Clone;
+ type Ident: 'static + Copy + Eq + Hash;
+ type Literal: 'static + Clone;
+ type SourceFile: 'static + Clone;
+ type MultiSpan: 'static;
+ type Diagnostic: 'static;
+ type Span: 'static + Copy + Eq + Hash;
+}
+
+/// Declare an associated fn of one of the traits below, adding necessary
+/// default bodies.
+macro_rules! associated_fn {
+ (fn drop(&mut self, $arg:ident: $arg_ty:ty)) =>
+ (fn drop(&mut self, $arg: $arg_ty) { mem::drop($arg) });
+
+ (fn clone(&mut self, $arg:ident: $arg_ty:ty) -> $ret_ty:ty) =>
+ (fn clone(&mut self, $arg: $arg_ty) -> $ret_ty { $arg.clone() });
+
+ ($($item:tt)*) => ($($item)*;)
+}
+
+macro_rules! declare_server_traits {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
+ }),* $(,)?) => {
+ $(pub trait $name: Types {
+ $(associated_fn!(fn $method(&mut self, $($arg: $arg_ty),*) $(-> $ret_ty)?);)*
+ })*
+
+ pub trait Server: Types $(+ $name)* {
+ fn globals(&mut self) -> ExpnGlobals<Self::Span>;
+ }
+ }
+}
+with_api!(Self, self_, declare_server_traits);
+
+pub(super) struct MarkedTypes<S: Types>(S);
+
+impl<S: Server> Server for MarkedTypes<S> {
+ fn globals(&mut self) -> ExpnGlobals<Self::Span> {
+ <_>::mark(Server::globals(&mut self.0))
+ }
+}
+
+macro_rules! define_mark_types_impls {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
+ }),* $(,)?) => {
+ impl<S: Types> Types for MarkedTypes<S> {
+ $(type $name = Marked<S::$name, client::$name>;)*
+ }
+
+ $(impl<S: $name> $name for MarkedTypes<S> {
+ $(fn $method(&mut self, $($arg: $arg_ty),*) $(-> $ret_ty)? {
+ <_>::mark($name::$method(&mut self.0, $($arg.unmark()),*))
+ })*
+ })*
+ }
+}
+with_api!(Self, self_, define_mark_types_impls);
+
+struct Dispatcher<S: Types> {
+ handle_store: HandleStore<S>,
+ server: S,
+}
+
+macro_rules! define_dispatcher_impl {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
+ }),* $(,)?) => {
+ // FIXME(eddyb) `pub` only for `ExecutionStrategy` below.
+ pub trait DispatcherTrait {
+ // HACK(eddyb) these are here to allow `Self::$name` to work below.
+ $(type $name;)*
+ fn dispatch(&mut self, buf: Buffer) -> Buffer;
+ }
+
+ impl<S: Server> DispatcherTrait for Dispatcher<MarkedTypes<S>> {
+ $(type $name = <MarkedTypes<S> as Types>::$name;)*
+ fn dispatch(&mut self, mut buf: Buffer) -> Buffer {
+ let Dispatcher { handle_store, server } = self;
+
+ let mut reader = &buf[..];
+ match api_tags::Method::decode(&mut reader, &mut ()) {
+ $(api_tags::Method::$name(m) => match m {
+ $(api_tags::$name::$method => {
+ let mut call_method = || {
+ reverse_decode!(reader, handle_store; $($arg: $arg_ty),*);
+ $name::$method(server, $($arg),*)
+ };
+ // HACK(eddyb) don't use `panic::catch_unwind` in a panic.
+ // If client and server happen to use the same `libstd`,
+ // `catch_unwind` asserts that the panic counter was 0,
+ // even when the closure passed to it didn't panic.
+ let r = if thread::panicking() {
+ Ok(call_method())
+ } else {
+ panic::catch_unwind(panic::AssertUnwindSafe(call_method))
+ .map_err(PanicMessage::from)
+ };
+
+ buf.clear();
+ r.encode(&mut buf, handle_store);
+ })*
+ }),*
+ }
+ buf
+ }
+ }
+ }
+}
+with_api!(Self, self_, define_dispatcher_impl);
+
+pub trait ExecutionStrategy {
+ fn run_bridge_and_client(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer,
+ run_client: extern "C" fn(BridgeConfig<'_>) -> Buffer,
+ force_show_panics: bool,
+ ) -> Buffer;
+}
+
+pub struct SameThread;
+
+impl ExecutionStrategy for SameThread {
+ fn run_bridge_and_client(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer,
+ run_client: extern "C" fn(BridgeConfig<'_>) -> Buffer,
+ force_show_panics: bool,
+ ) -> Buffer {
+ let mut dispatch = |buf| dispatcher.dispatch(buf);
+
+ run_client(BridgeConfig {
+ input,
+ dispatch: (&mut dispatch).into(),
+ force_show_panics,
+ _marker: marker::PhantomData,
+ })
+ }
+}
+
+// NOTE(eddyb) Two implementations are provided, the second one is a bit
+// faster but neither is anywhere near as fast as same-thread execution.
+
+pub struct CrossThread1;
+
+impl ExecutionStrategy for CrossThread1 {
+ fn run_bridge_and_client(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer,
+ run_client: extern "C" fn(BridgeConfig<'_>) -> Buffer,
+ force_show_panics: bool,
+ ) -> Buffer {
+ use std::sync::mpsc::channel;
+
+ let (req_tx, req_rx) = channel();
+ let (res_tx, res_rx) = channel();
+
+ let join_handle = thread::spawn(move || {
+ let mut dispatch = |buf| {
+ req_tx.send(buf).unwrap();
+ res_rx.recv().unwrap()
+ };
+
+ run_client(BridgeConfig {
+ input,
+ dispatch: (&mut dispatch).into(),
+ force_show_panics,
+ _marker: marker::PhantomData,
+ })
+ });
+
+ for b in req_rx {
+ res_tx.send(dispatcher.dispatch(b)).unwrap();
+ }
+
+ join_handle.join().unwrap()
+ }
+}
+
+pub struct CrossThread2;
+
+impl ExecutionStrategy for CrossThread2 {
+ fn run_bridge_and_client(
+ &self,
+ dispatcher: &mut impl DispatcherTrait,
+ input: Buffer,
+ run_client: extern "C" fn(BridgeConfig<'_>) -> Buffer,
+ force_show_panics: bool,
+ ) -> Buffer {
+ use std::sync::{Arc, Mutex};
+
+ enum State<T> {
+ Req(T),
+ Res(T),
+ }
+
+ let mut state = Arc::new(Mutex::new(State::Res(Buffer::new())));
+
+ let server_thread = thread::current();
+ let state2 = state.clone();
+ let join_handle = thread::spawn(move || {
+ let mut dispatch = |b| {
+ *state2.lock().unwrap() = State::Req(b);
+ server_thread.unpark();
+ loop {
+ thread::park();
+ if let State::Res(b) = &mut *state2.lock().unwrap() {
+ break b.take();
+ }
+ }
+ };
+
+ let r = run_client(BridgeConfig {
+ input,
+ dispatch: (&mut dispatch).into(),
+ force_show_panics,
+ _marker: marker::PhantomData,
+ });
+
+ // Wake up the server so it can exit the dispatch loop.
+ drop(state2);
+ server_thread.unpark();
+
+ r
+ });
+
+ // Check whether `state2` was dropped, to know when to stop.
+ while Arc::get_mut(&mut state).is_none() {
+ thread::park();
+ let mut b = match &mut *state.lock().unwrap() {
+ State::Req(b) => b.take(),
+ _ => continue,
+ };
+ b = dispatcher.dispatch(b.take());
+ *state.lock().unwrap() = State::Res(b);
+ join_handle.thread().unpark();
+ }
+
+ join_handle.join().unwrap()
+ }
+}
+
+fn run_server<
+ S: Server,
+ I: Encode<HandleStore<MarkedTypes<S>>>,
+ O: for<'a, 's> DecodeMut<'a, 's, HandleStore<MarkedTypes<S>>>,
+>(
+ strategy: &impl ExecutionStrategy,
+ handle_counters: &'static client::HandleCounters,
+ server: S,
+ input: I,
+ run_client: extern "C" fn(BridgeConfig<'_>) -> Buffer,
+ force_show_panics: bool,
+) -> Result<O, PanicMessage> {
+ let mut dispatcher =
+ Dispatcher { handle_store: HandleStore::new(handle_counters), server: MarkedTypes(server) };
+
+ let globals = dispatcher.server.globals();
+
+ let mut buf = Buffer::new();
+ (globals, input).encode(&mut buf, &mut dispatcher.handle_store);
+
+ buf = strategy.run_bridge_and_client(&mut dispatcher, buf, run_client, force_show_panics);
+
+ Result::decode(&mut &buf[..], &mut dispatcher.handle_store)
+}
+
+impl client::Client<super::super::TokenStream, super::super::TokenStream> {
+ pub fn run<S>(
+ &self,
+ strategy: &impl ExecutionStrategy,
+ server: S,
+ input: S::TokenStream,
+ force_show_panics: bool,
+ ) -> Result<S::TokenStream, PanicMessage>
+ where
+ S: Server,
+ S::TokenStream: Default,
+ {
+ let client::Client { get_handle_counters, run, _marker } = *self;
+ run_server(
+ strategy,
+ get_handle_counters(),
+ server,
+ <MarkedTypes<S> as Types>::TokenStream::mark(input),
+ run,
+ force_show_panics,
+ )
+ .map(|s| <Option<<MarkedTypes<S> as Types>::TokenStream>>::unmark(s).unwrap_or_default())
+ }
+}
+
+impl
+ client::Client<
+ (super::super::TokenStream, super::super::TokenStream),
+ super::super::TokenStream,
+ >
+{
+ pub fn run<S>(
+ &self,
+ strategy: &impl ExecutionStrategy,
+ server: S,
+ input: S::TokenStream,
+ input2: S::TokenStream,
+ force_show_panics: bool,
+ ) -> Result<S::TokenStream, PanicMessage>
+ where
+ S: Server,
+ S::TokenStream: Default,
+ {
+ let client::Client { get_handle_counters, run, _marker } = *self;
+ run_server(
+ strategy,
+ get_handle_counters(),
+ server,
+ (
+ <MarkedTypes<S> as Types>::TokenStream::mark(input),
+ <MarkedTypes<S> as Types>::TokenStream::mark(input2),
+ ),
+ run,
+ force_show_panics,
+ )
+ .map(|s| <Option<<MarkedTypes<S> as Types>::TokenStream>>::unmark(s).unwrap_or_default())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/diagnostic.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/diagnostic.rs
new file mode 100644
index 000000000..3fade2dc4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/diagnostic.rs
@@ -0,0 +1,166 @@
+//! lib-proc-macro diagnostic
+//!
+//! Copy from <https://github.com/rust-lang/rust/blob/e45d9973b2665897a768312e971b82cc62633103/src/libproc_macro/diagnostic.rs>
+//! augmented with removing unstable features
+
+use super::Span;
+
+/// An enum representing a diagnostic level.
+#[derive(Copy, Clone, Debug)]
+#[non_exhaustive]
+pub enum Level {
+ /// An error.
+ Error,
+ /// A warning.
+ Warning,
+ /// A note.
+ Note,
+ /// A help message.
+ Help,
+}
+
+/// Trait implemented by types that can be converted into a set of `Span`s.
+pub trait MultiSpan {
+ /// Converts `self` into a `Vec<Span>`.
+ fn into_spans(self) -> Vec<Span>;
+}
+
+impl MultiSpan for Span {
+ fn into_spans(self) -> Vec<Span> {
+ vec![self]
+ }
+}
+
+impl MultiSpan for Vec<Span> {
+ fn into_spans(self) -> Vec<Span> {
+ self
+ }
+}
+
+impl<'a> MultiSpan for &'a [Span] {
+ fn into_spans(self) -> Vec<Span> {
+ self.to_vec()
+ }
+}
+
+/// A structure representing a diagnostic message and associated children
+/// messages.
+#[derive(Clone, Debug)]
+pub struct Diagnostic {
+ level: Level,
+ message: String,
+ spans: Vec<Span>,
+ children: Vec<Diagnostic>,
+}
+
+macro_rules! diagnostic_child_methods {
+ ($spanned:ident, $regular:ident, $level:expr) => {
+ #[doc = concat!("Adds a new child diagnostics message to `self` with the [`",
+ stringify!($level), "`] level, and the given `spans` and `message`.")]
+ pub fn $spanned<S, T>(mut self, spans: S, message: T) -> Diagnostic
+ where
+ S: MultiSpan,
+ T: Into<String>,
+ {
+ self.children.push(Diagnostic::spanned(spans, $level, message));
+ self
+ }
+
+ #[doc = concat!("Adds a new child diagnostic message to `self` with the [`",
+ stringify!($level), "`] level, and the given `message`.")]
+ pub fn $regular<T: Into<String>>(mut self, message: T) -> Diagnostic {
+ self.children.push(Diagnostic::new($level, message));
+ self
+ }
+ };
+}
+
+/// Iterator over the children diagnostics of a `Diagnostic`.
+#[derive(Debug, Clone)]
+pub struct Children<'a>(std::slice::Iter<'a, Diagnostic>);
+
+impl<'a> Iterator for Children<'a> {
+ type Item = &'a Diagnostic;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.0.next()
+ }
+}
+
+impl Diagnostic {
+ /// Creates a new diagnostic with the given `level` and `message`.
+ pub fn new<T: Into<String>>(level: Level, message: T) -> Diagnostic {
+ Diagnostic { level, message: message.into(), spans: vec![], children: vec![] }
+ }
+
+ /// Creates a new diagnostic with the given `level` and `message` pointing to
+ /// the given set of `spans`.
+ pub fn spanned<S, T>(spans: S, level: Level, message: T) -> Diagnostic
+ where
+ S: MultiSpan,
+ T: Into<String>,
+ {
+ Diagnostic { level, message: message.into(), spans: spans.into_spans(), children: vec![] }
+ }
+
+ diagnostic_child_methods!(span_error, error, Level::Error);
+ diagnostic_child_methods!(span_warning, warning, Level::Warning);
+ diagnostic_child_methods!(span_note, note, Level::Note);
+ diagnostic_child_methods!(span_help, help, Level::Help);
+
+ /// Returns the diagnostic `level` for `self`.
+ pub fn level(&self) -> Level {
+ self.level
+ }
+
+ /// Sets the level in `self` to `level`.
+ pub fn set_level(&mut self, level: Level) {
+ self.level = level;
+ }
+
+ /// Returns the message in `self`.
+ pub fn message(&self) -> &str {
+ &self.message
+ }
+
+ /// Sets the message in `self` to `message`.
+ pub fn set_message<T: Into<String>>(&mut self, message: T) {
+ self.message = message.into();
+ }
+
+ /// Returns the `Span`s in `self`.
+ pub fn spans(&self) -> &[Span] {
+ &self.spans
+ }
+
+ /// Sets the `Span`s in `self` to `spans`.
+ pub fn set_spans<S: MultiSpan>(&mut self, spans: S) {
+ self.spans = spans.into_spans();
+ }
+
+ /// Returns an iterator over the children diagnostics of `self`.
+ pub fn children(&self) -> Children<'_> {
+ Children(self.children.iter())
+ }
+
+ /// Emit the diagnostic.
+ pub fn emit(self) {
+ fn to_internal(spans: Vec<Span>) -> super::bridge::client::MultiSpan {
+ let mut multi_span = super::bridge::client::MultiSpan::new();
+ for span in spans {
+ multi_span.push(span.0);
+ }
+ multi_span
+ }
+
+ let mut diag = super::bridge::client::Diagnostic::new(
+ self.level,
+ &self.message[..],
+ to_internal(self.spans),
+ );
+ for c in self.children {
+ diag.sub(c.level, &c.message[..], to_internal(c.spans));
+ }
+ diag.emit();
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/mod.rs
new file mode 100644
index 000000000..be62c73ef
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/mod.rs
@@ -0,0 +1,1125 @@
+//! A support library for macro authors when defining new macros.
+//!
+//! This library, provided by the standard distribution, provides the types
+//! consumed in the interfaces of procedurally defined macro definitions such as
+//! function-like macros `#[proc_macro]`, macro attributes `#[proc_macro_attribute]` and
+//! custom derive attributes`#[proc_macro_derive]`.
+//!
+//! See [the book] for more.
+//!
+//! [the book]: ../book/ch19-06-macros.html#procedural-macros-for-generating-code-from-attributes
+
+#[doc(hidden)]
+pub mod bridge;
+
+mod diagnostic;
+
+pub use diagnostic::{Diagnostic, Level, MultiSpan};
+
+use std::cmp::Ordering;
+use std::ops::RangeBounds;
+use std::path::PathBuf;
+use std::str::FromStr;
+use std::{error, fmt, iter, mem};
+
+/// Determines whether proc_macro has been made accessible to the currently
+/// running program.
+///
+/// The proc_macro crate is only intended for use inside the implementation of
+/// procedural macros. All the functions in this crate panic if invoked from
+/// outside of a procedural macro, such as from a build script or unit test or
+/// ordinary Rust binary.
+///
+/// With consideration for Rust libraries that are designed to support both
+/// macro and non-macro use cases, `proc_macro::is_available()` provides a
+/// non-panicking way to detect whether the infrastructure required to use the
+/// API of proc_macro is presently available. Returns true if invoked from
+/// inside of a procedural macro, false if invoked from any other binary.
+pub fn is_available() -> bool {
+ bridge::client::is_available()
+}
+
+/// The main type provided by this crate, representing an abstract stream of
+/// tokens, or, more specifically, a sequence of token trees.
+/// The type provide interfaces for iterating over those token trees and, conversely,
+/// collecting a number of token trees into one stream.
+///
+/// This is both the input and output of `#[proc_macro]`, `#[proc_macro_attribute]`
+/// and `#[proc_macro_derive]` definitions.
+#[derive(Clone)]
+pub struct TokenStream(Option<bridge::client::TokenStream>);
+
+/// Error returned from `TokenStream::from_str`.
+#[non_exhaustive]
+#[derive(Debug)]
+pub struct LexError;
+
+impl fmt::Display for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str("cannot parse string into token stream")
+ }
+}
+
+impl error::Error for LexError {}
+
+/// Error returned from `TokenStream::expand_expr`.
+#[non_exhaustive]
+#[derive(Debug)]
+pub struct ExpandError;
+
+impl fmt::Display for ExpandError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str("macro expansion failed")
+ }
+}
+
+impl error::Error for ExpandError {}
+
+impl TokenStream {
+ /// Returns an empty `TokenStream` containing no token trees.
+ pub fn new() -> TokenStream {
+ TokenStream(None)
+ }
+
+ /// Checks if this `TokenStream` is empty.
+ pub fn is_empty(&self) -> bool {
+ self.0.as_ref().map(|h| h.is_empty()).unwrap_or(true)
+ }
+
+ /// Parses this `TokenStream` as an expression and attempts to expand any
+ /// macros within it. Returns the expanded `TokenStream`.
+ ///
+ /// Currently only expressions expanding to literals will succeed, although
+ /// this may be relaxed in the future.
+ ///
+ /// NOTE: In error conditions, `expand_expr` may leave macros unexpanded,
+ /// report an error, failing compilation, and/or return an `Err(..)`. The
+ /// specific behavior for any error condition, and what conditions are
+ /// considered errors, is unspecified and may change in the future.
+ pub fn expand_expr(&self) -> Result<TokenStream, ExpandError> {
+ let stream = self.0.as_ref().ok_or(ExpandError)?;
+ match bridge::client::TokenStream::expand_expr(stream) {
+ Ok(stream) => Ok(TokenStream(Some(stream))),
+ Err(_) => Err(ExpandError),
+ }
+ }
+}
+
+/// Attempts to break the string into tokens and parse those tokens into a token stream.
+/// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
+/// or characters not existing in the language.
+/// All tokens in the parsed stream get `Span::call_site()` spans.
+///
+/// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
+/// change these errors into `LexError`s later.
+impl FromStr for TokenStream {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+ Ok(TokenStream(Some(bridge::client::TokenStream::from_str(src))))
+ }
+}
+
+/// Prints the token stream as a string that is supposed to be losslessly convertible back
+/// into the same token stream (modulo spans), except for possibly `TokenTree::Group`s
+/// with `Delimiter::None` delimiters and negative numeric literals.
+impl fmt::Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+/// Prints token in a form convenient for debugging.
+impl fmt::Debug for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str("TokenStream ")?;
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+impl Default for TokenStream {
+ fn default() -> Self {
+ TokenStream::new()
+ }
+}
+
+pub use quote::{quote, quote_span};
+
+fn tree_to_bridge_tree(
+ tree: TokenTree,
+) -> bridge::TokenTree<
+ bridge::client::TokenStream,
+ bridge::client::Span,
+ bridge::client::Ident,
+ bridge::client::Literal,
+> {
+ match tree {
+ TokenTree::Group(tt) => bridge::TokenTree::Group(tt.0),
+ TokenTree::Punct(tt) => bridge::TokenTree::Punct(tt.0),
+ TokenTree::Ident(tt) => bridge::TokenTree::Ident(tt.0),
+ TokenTree::Literal(tt) => bridge::TokenTree::Literal(tt.0),
+ }
+}
+
+/// Creates a token stream containing a single token tree.
+impl From<TokenTree> for TokenStream {
+ fn from(tree: TokenTree) -> TokenStream {
+ TokenStream(Some(bridge::client::TokenStream::from_token_tree(tree_to_bridge_tree(tree))))
+ }
+}
+
+/// Non-generic helper for implementing `FromIterator<TokenStream>` and
+/// `Extend<TokenStream>` with less monomorphization in calling crates.
+struct ConcatStreamsHelper {
+ streams: Vec<bridge::client::TokenStream>,
+}
+
+impl ConcatStreamsHelper {
+ fn new(capacity: usize) -> Self {
+ ConcatStreamsHelper { streams: Vec::with_capacity(capacity) }
+ }
+
+ fn push(&mut self, stream: TokenStream) {
+ if let Some(stream) = stream.0 {
+ self.streams.push(stream);
+ }
+ }
+
+ fn build(mut self) -> TokenStream {
+ if self.streams.len() <= 1 {
+ TokenStream(self.streams.pop())
+ } else {
+ TokenStream(Some(bridge::client::TokenStream::concat_streams(None, self.streams)))
+ }
+ }
+
+ fn append_to(mut self, stream: &mut TokenStream) {
+ if self.streams.is_empty() {
+ return;
+ }
+ let base = stream.0.take();
+ if base.is_none() && self.streams.len() == 1 {
+ stream.0 = self.streams.pop();
+ } else {
+ stream.0 = Some(bridge::client::TokenStream::concat_streams(base, self.streams));
+ }
+ }
+}
+
+/// Collects a number of token trees into a single stream.
+impl iter::FromIterator<TokenTree> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+ trees.into_iter().map(TokenStream::from).collect()
+ }
+}
+
+/// A "flattening" operation on token streams, collects token trees
+/// from multiple token streams into a single stream.
+impl iter::FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let iter = streams.into_iter();
+ let mut builder = ConcatStreamsHelper::new(iter.size_hint().0);
+ iter.for_each(|stream| builder.push(stream));
+ builder.build()
+ }
+}
+
+impl Extend<TokenTree> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
+ self.extend(trees.into_iter().map(TokenStream::from));
+ }
+}
+
+impl Extend<TokenStream> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+ // FIXME(eddyb) Use an optimized implementation if/when possible.
+ *self = iter::once(mem::replace(self, Self::new())).chain(streams).collect();
+ }
+}
+
+/// Public implementation details for the `TokenStream` type, such as iterators.
+pub mod token_stream {
+ use super::{bridge, Group, Ident, Literal, Punct, TokenStream, TokenTree};
+
+ /// An iterator over `TokenStream`'s `TokenTree`s.
+ /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
+ /// and returns whole groups as token trees.
+ #[derive(Clone)]
+ pub struct IntoIter(
+ std::vec::IntoIter<
+ bridge::TokenTree<
+ bridge::client::TokenStream,
+ bridge::client::Span,
+ bridge::client::Ident,
+ bridge::client::Literal,
+ >,
+ >,
+ );
+
+ impl Iterator for IntoIter {
+ type Item = TokenTree;
+
+ fn next(&mut self) -> Option<TokenTree> {
+ self.0.next().map(|tree| match tree {
+ bridge::TokenTree::Group(tt) => TokenTree::Group(Group(tt)),
+ bridge::TokenTree::Punct(tt) => TokenTree::Punct(Punct(tt)),
+ bridge::TokenTree::Ident(tt) => TokenTree::Ident(Ident(tt)),
+ bridge::TokenTree::Literal(tt) => TokenTree::Literal(Literal(tt)),
+ })
+ }
+ }
+
+ impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = IntoIter;
+
+ fn into_iter(self) -> IntoIter {
+ IntoIter(self.0.map(|v| v.into_trees()).unwrap_or_default().into_iter())
+ }
+ }
+}
+
+#[doc(hidden)]
+mod quote;
+
+/// A region of source code, along with macro expansion information.
+#[derive(Copy, Clone)]
+pub struct Span(bridge::client::Span);
+
+macro_rules! diagnostic_method {
+ ($name:ident, $level:expr) => {
+ /// Creates a new `Diagnostic` with the given `message` at the span
+ /// `self`.
+ pub fn $name<T: Into<String>>(self, message: T) -> Diagnostic {
+ Diagnostic::spanned(self, $level, message)
+ }
+ };
+}
+
+impl Span {
+ /// A span that resolves at the macro definition site.
+ pub fn def_site() -> Span {
+ Span(bridge::client::Span::def_site())
+ }
+
+ /// The span of the invocation of the current procedural macro.
+ /// Identifiers created with this span will be resolved as if they were written
+ /// directly at the macro call location (call-site hygiene) and other code
+ /// at the macro call site will be able to refer to them as well.
+ pub fn call_site() -> Span {
+ Span(bridge::client::Span::call_site())
+ }
+
+ /// A span that represents `macro_rules` hygiene, and sometimes resolves at the macro
+ /// definition site (local variables, labels, `$crate`) and sometimes at the macro
+ /// call site (everything else).
+ /// The span location is taken from the call-site.
+ pub fn mixed_site() -> Span {
+ Span(bridge::client::Span::mixed_site())
+ }
+
+ /// The original source file into which this span points.
+ pub fn source_file(&self) -> SourceFile {
+ SourceFile(self.0.source_file())
+ }
+
+ /// The `Span` for the tokens in the previous macro expansion from which
+ /// `self` was generated from, if any.
+ pub fn parent(&self) -> Option<Span> {
+ self.0.parent().map(Span)
+ }
+
+ /// The span for the origin source code that `self` was generated from. If
+ /// this `Span` wasn't generated from other macro expansions then the return
+ /// value is the same as `*self`.
+ pub fn source(&self) -> Span {
+ Span(self.0.source())
+ }
+
+ /// Gets the starting line/column in the source file for this span.
+ pub fn start(&self) -> LineColumn {
+ self.0.start().add_1_to_column()
+ }
+
+ /// Gets the ending line/column in the source file for this span.
+ pub fn end(&self) -> LineColumn {
+ self.0.end().add_1_to_column()
+ }
+
+ /// Creates an empty span pointing to directly before this span.
+ pub fn before(&self) -> Span {
+ Span(self.0.before())
+ }
+
+ /// Creates an empty span pointing to directly after this span.
+ pub fn after(&self) -> Span {
+ Span(self.0.after())
+ }
+
+ /// Creates a new span encompassing `self` and `other`.
+ ///
+ /// Returns `None` if `self` and `other` are from different files.
+ pub fn join(&self, other: Span) -> Option<Span> {
+ self.0.join(other.0).map(Span)
+ }
+
+ /// Creates a new span with the same line/column information as `self` but
+ /// that resolves symbols as though it were at `other`.
+ pub fn resolved_at(&self, other: Span) -> Span {
+ Span(self.0.resolved_at(other.0))
+ }
+
+ /// Creates a new span with the same name resolution behavior as `self` but
+ /// with the line/column information of `other`.
+ pub fn located_at(&self, other: Span) -> Span {
+ other.resolved_at(*self)
+ }
+
+ /// Compares to spans to see if they're equal.
+ pub fn eq(&self, other: &Span) -> bool {
+ self.0 == other.0
+ }
+
+ /// Returns the source text behind a span. This preserves the original source
+ /// code, including spaces and comments. It only returns a result if the span
+ /// corresponds to real source code.
+ ///
+ /// Note: The observable result of a macro should only rely on the tokens and
+ /// not on this source text. The result of this function is a best effort to
+ /// be used for diagnostics only.
+ pub fn source_text(&self) -> Option<String> {
+ self.0.source_text()
+ }
+
+ // Used by the implementation of `Span::quote`
+ #[doc(hidden)]
+ pub fn save_span(&self) -> usize {
+ self.0.save_span()
+ }
+
+ // Used by the implementation of `Span::quote`
+ #[doc(hidden)]
+ pub fn recover_proc_macro_span(id: usize) -> Span {
+ Span(bridge::client::Span::recover_proc_macro_span(id))
+ }
+
+ diagnostic_method!(error, Level::Error);
+ diagnostic_method!(warning, Level::Warning);
+ diagnostic_method!(note, Level::Note);
+ diagnostic_method!(help, Level::Help);
+}
+
+/// Prints a span in a form convenient for debugging.
+impl fmt::Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+/// A line-column pair representing the start or end of a `Span`.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub struct LineColumn {
+ /// The 1-indexed line in the source file on which the span starts or ends (inclusive).
+ pub line: usize,
+ /// The 1-indexed column (number of bytes in UTF-8 encoding) in the source
+ /// file on which the span starts or ends (inclusive).
+ pub column: usize,
+}
+
+impl LineColumn {
+ fn add_1_to_column(self) -> Self {
+ LineColumn { line: self.line, column: self.column + 1 }
+ }
+}
+
+impl Ord for LineColumn {
+ fn cmp(&self, other: &Self) -> Ordering {
+ self.line.cmp(&other.line).then(self.column.cmp(&other.column))
+ }
+}
+
+impl PartialOrd for LineColumn {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
+/// The source file of a given `Span`.
+#[derive(Clone)]
+pub struct SourceFile(bridge::client::SourceFile);
+
+impl SourceFile {
+ /// Gets the path to this source file.
+ ///
+ /// ### Note
+ /// If the code span associated with this `SourceFile` was generated by an external macro, this
+ /// macro, this might not be an actual path on the filesystem. Use [`is_real`] to check.
+ ///
+ /// Also note that even if `is_real` returns `true`, if `--remap-path-prefix` was passed on
+ /// the command line, the path as given might not actually be valid.
+ ///
+ /// [`is_real`]: Self::is_real
+ pub fn path(&self) -> PathBuf {
+ PathBuf::from(self.0.path())
+ }
+
+ /// Returns `true` if this source file is a real source file, and not generated by an external
+ /// macro's expansion.
+ pub fn is_real(&self) -> bool {
+ // This is a hack until intercrate spans are implemented and we can have real source files
+ // for spans generated in external macros.
+ // https://github.com/rust-lang/rust/pull/43604#issuecomment-333334368
+ self.0.is_real()
+ }
+}
+
+impl fmt::Debug for SourceFile {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("SourceFile")
+ .field("path", &self.path())
+ .field("is_real", &self.is_real())
+ .finish()
+ }
+}
+
+impl PartialEq for SourceFile {
+ fn eq(&self, other: &Self) -> bool {
+ self.0.eq(&other.0)
+ }
+}
+
+impl Eq for SourceFile {}
+
+/// A single token or a delimited sequence of token trees (e.g., `[1, (), ..]`).
+#[derive(Clone)]
+pub enum TokenTree {
+ /// A token stream surrounded by bracket delimiters.
+ Group(Group),
+ /// An identifier.
+ Ident(Ident),
+ /// A single punctuation character (`+`, `,`, `$`, etc.).
+ Punct(Punct),
+ /// A literal character (`'a'`), string (`"hello"`), number (`2.3`), etc.
+ Literal(Literal),
+}
+
+impl TokenTree {
+ /// Returns the span of this tree, delegating to the `span` method of
+ /// the contained token or a delimited stream.
+ pub fn span(&self) -> Span {
+ match *self {
+ TokenTree::Group(ref t) => t.span(),
+ TokenTree::Ident(ref t) => t.span(),
+ TokenTree::Punct(ref t) => t.span(),
+ TokenTree::Literal(ref t) => t.span(),
+ }
+ }
+
+ /// Configures the span for *only this token*.
+ ///
+ /// Note that if this token is a `Group` then this method will not configure
+ /// the span of each of the internal tokens, this will simply delegate to
+ /// the `set_span` method of each variant.
+ pub fn set_span(&mut self, span: Span) {
+ match *self {
+ TokenTree::Group(ref mut t) => t.set_span(span),
+ TokenTree::Ident(ref mut t) => t.set_span(span),
+ TokenTree::Punct(ref mut t) => t.set_span(span),
+ TokenTree::Literal(ref mut t) => t.set_span(span),
+ }
+ }
+}
+
+/// Prints token tree in a form convenient for debugging.
+impl fmt::Debug for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ // Each of these has the name in the struct type in the derived debug,
+ // so don't bother with an extra layer of indirection
+ match *self {
+ TokenTree::Group(ref tt) => tt.fmt(f),
+ TokenTree::Ident(ref tt) => tt.fmt(f),
+ TokenTree::Punct(ref tt) => tt.fmt(f),
+ TokenTree::Literal(ref tt) => tt.fmt(f),
+ }
+ }
+}
+
+impl From<Group> for TokenTree {
+ fn from(g: Group) -> TokenTree {
+ TokenTree::Group(g)
+ }
+}
+
+impl From<Ident> for TokenTree {
+ fn from(g: Ident) -> TokenTree {
+ TokenTree::Ident(g)
+ }
+}
+
+impl From<Punct> for TokenTree {
+ fn from(g: Punct) -> TokenTree {
+ TokenTree::Punct(g)
+ }
+}
+
+impl From<Literal> for TokenTree {
+ fn from(g: Literal) -> TokenTree {
+ TokenTree::Literal(g)
+ }
+}
+
+/// Prints the token tree as a string that is supposed to be losslessly convertible back
+/// into the same token tree (modulo spans), except for possibly `TokenTree::Group`s
+/// with `Delimiter::None` delimiters and negative numeric literals.
+impl fmt::Display for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+/// A delimited token stream.
+///
+/// A `Group` internally contains a `TokenStream` which is surrounded by `Delimiter`s.
+#[derive(Clone)]
+pub struct Group(bridge::Group<bridge::client::TokenStream, bridge::client::Span>);
+
+/// Describes how a sequence of token trees is delimited.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum Delimiter {
+ /// `( ... )`
+ Parenthesis,
+ /// `{ ... }`
+ Brace,
+ /// `[ ... ]`
+ Bracket,
+ /// `Ø ... Ø`
+ /// An invisible delimiter, that may, for example, appear around tokens coming from a
+ /// "macro variable" `$var`. It is important to preserve operator priorities in cases like
+ /// `$var * 3` where `$var` is `1 + 2`.
+ /// Invisible delimiters might not survive roundtrip of a token stream through a string.
+ None,
+}
+
+impl Group {
+ /// Creates a new `Group` with the given delimiter and token stream.
+ ///
+ /// This constructor will set the span for this group to
+ /// `Span::call_site()`. To change the span you can use the `set_span`
+ /// method below.
+ pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
+ Group(bridge::Group {
+ delimiter,
+ stream: stream.0,
+ span: bridge::DelimSpan::from_single(Span::call_site().0),
+ })
+ }
+
+ /// Returns the delimiter of this `Group`
+ pub fn delimiter(&self) -> Delimiter {
+ self.0.delimiter
+ }
+
+ /// Returns the `TokenStream` of tokens that are delimited in this `Group`.
+ ///
+ /// Note that the returned token stream does not include the delimiter
+ /// returned above.
+ pub fn stream(&self) -> TokenStream {
+ TokenStream(self.0.stream.clone())
+ }
+
+ /// Returns the span for the delimiters of this token stream, spanning the
+ /// entire `Group`.
+ ///
+ /// ```text
+ /// pub fn span(&self) -> Span {
+ /// ^^^^^^^
+ /// ```
+ pub fn span(&self) -> Span {
+ Span(self.0.span.entire)
+ }
+
+ /// Returns the span pointing to the opening delimiter of this group.
+ ///
+ /// ```text
+ /// pub fn span_open(&self) -> Span {
+ /// ^
+ /// ```
+ pub fn span_open(&self) -> Span {
+ Span(self.0.span.open)
+ }
+
+ /// Returns the span pointing to the closing delimiter of this group.
+ ///
+ /// ```text
+ /// pub fn span_close(&self) -> Span {
+ /// ^
+ /// ```
+ pub fn span_close(&self) -> Span {
+ Span(self.0.span.close)
+ }
+
+ /// Configures the span for this `Group`'s delimiters, but not its internal
+ /// tokens.
+ ///
+ /// This method will **not** set the span of all the internal tokens spanned
+ /// by this group, but rather it will only set the span of the delimiter
+ /// tokens at the level of the `Group`.
+ pub fn set_span(&mut self, span: Span) {
+ self.0.span = bridge::DelimSpan::from_single(span.0);
+ }
+}
+
+/// Prints the group as a string that should be losslessly convertible back
+/// into the same group (modulo spans), except for possibly `TokenTree::Group`s
+/// with `Delimiter::None` delimiters.
+impl fmt::Display for Group {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Group {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Group")
+ .field("delimiter", &self.delimiter())
+ .field("stream", &self.stream())
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+/// A `Punct` is a single punctuation character such as `+`, `-` or `#`.
+///
+/// Multi-character operators like `+=` are represented as two instances of `Punct` with different
+/// forms of `Spacing` returned.
+#[derive(Clone)]
+pub struct Punct(bridge::Punct<bridge::client::Span>);
+
+/// Describes whether a `Punct` is followed immediately by another `Punct` ([`Spacing::Joint`]) or
+/// by a different token or whitespace ([`Spacing::Alone`]).
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum Spacing {
+ /// A `Punct` is not immediately followed by another `Punct`.
+ /// E.g. `+` is `Alone` in `+ =`, `+ident` and `+()`.
+ Alone,
+ /// A `Punct` is immediately followed by another `Punct`.
+ /// E.g. `+` is `Joint` in `+=` and `++`.
+ ///
+ /// Additionally, single quote `'` can join with identifiers to form lifetimes: `'ident`.
+ Joint,
+}
+
+impl Punct {
+ /// Creates a new `Punct` from the given character and spacing.
+ /// The `ch` argument must be a valid punctuation character permitted by the language,
+ /// otherwise the function will panic.
+ ///
+ /// The returned `Punct` will have the default span of `Span::call_site()`
+ /// which can be further configured with the `set_span` method below.
+ pub fn new(ch: char, spacing: Spacing) -> Punct {
+ const LEGAL_CHARS: &[char] = &[
+ '=', '<', '>', '!', '~', '+', '-', '*', '/', '%', '^', '&', '|', '@', '.', ',', ';',
+ ':', '#', '$', '?', '\'',
+ ];
+ if !LEGAL_CHARS.contains(&ch) {
+ panic!("unsupported character `{:?}`", ch);
+ }
+ Punct(bridge::Punct {
+ ch: ch as u8,
+ joint: spacing == Spacing::Joint,
+ span: Span::call_site().0,
+ })
+ }
+
+ /// Returns the value of this punctuation character as `char`.
+ pub fn as_char(&self) -> char {
+ self.0.ch as char
+ }
+
+ /// Returns the spacing of this punctuation character, indicating whether it's immediately
+ /// followed by another `Punct` in the token stream, so they can potentially be combined into
+ /// a multi-character operator (`Joint`), or it's followed by some other token or whitespace
+ /// (`Alone`) so the operator has certainly ended.
+ pub fn spacing(&self) -> Spacing {
+ if self.0.joint {
+ Spacing::Joint
+ } else {
+ Spacing::Alone
+ }
+ }
+
+ /// Returns the span for this punctuation character.
+ pub fn span(&self) -> Span {
+ Span(self.0.span)
+ }
+
+ /// Configure the span for this punctuation character.
+ pub fn set_span(&mut self, span: Span) {
+ self.0.span = span.0;
+ }
+}
+
+/// Prints the punctuation character as a string that should be losslessly convertible
+/// back into the same character.
+impl fmt::Display for Punct {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Punct {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Punct")
+ .field("ch", &self.as_char())
+ .field("spacing", &self.spacing())
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+impl PartialEq<char> for Punct {
+ fn eq(&self, rhs: &char) -> bool {
+ self.as_char() == *rhs
+ }
+}
+
+impl PartialEq<Punct> for char {
+ fn eq(&self, rhs: &Punct) -> bool {
+ *self == rhs.as_char()
+ }
+}
+
+/// An identifier (`ident`).
+#[derive(Clone)]
+pub struct Ident(bridge::client::Ident);
+
+impl Ident {
+ /// Creates a new `Ident` with the given `string` as well as the specified
+ /// `span`.
+ /// The `string` argument must be a valid identifier permitted by the
+ /// language (including keywords, e.g. `self` or `fn`). Otherwise, the function will panic.
+ ///
+ /// Note that `span`, currently in rustc, configures the hygiene information
+ /// for this identifier.
+ ///
+ /// As of this time `Span::call_site()` explicitly opts-in to "call-site" hygiene
+ /// meaning that identifiers created with this span will be resolved as if they were written
+ /// directly at the location of the macro call, and other code at the macro call site will be
+ /// able to refer to them as well.
+ ///
+ /// Later spans like `Span::def_site()` will allow to opt-in to "definition-site" hygiene
+ /// meaning that identifiers created with this span will be resolved at the location of the
+ /// macro definition and other code at the macro call site will not be able to refer to them.
+ ///
+ /// Due to the current importance of hygiene this constructor, unlike other
+ /// tokens, requires a `Span` to be specified at construction.
+ pub fn new(string: &str, span: Span) -> Ident {
+ Ident(bridge::client::Ident::new(string, span.0, false))
+ }
+
+ /// Same as `Ident::new`, but creates a raw identifier (`r#ident`).
+ /// The `string` argument be a valid identifier permitted by the language
+ /// (including keywords, e.g. `fn`). Keywords which are usable in path segments
+ /// (e.g. `self`, `super`) are not supported, and will cause a panic.
+ pub fn new_raw(string: &str, span: Span) -> Ident {
+ Ident(bridge::client::Ident::new(string, span.0, true))
+ }
+
+ /// Returns the span of this `Ident`, encompassing the entire string returned
+ /// by [`to_string`](Self::to_string).
+ pub fn span(&self) -> Span {
+ Span(self.0.span())
+ }
+
+ /// Configures the span of this `Ident`, possibly changing its hygiene context.
+ pub fn set_span(&mut self, span: Span) {
+ self.0 = self.0.with_span(span.0);
+ }
+}
+
+/// Prints the identifier as a string that should be losslessly convertible
+/// back into the same identifier.
+impl fmt::Display for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Ident")
+ .field("ident", &self.to_string())
+ .field("span", &self.span())
+ .finish()
+ }
+}
+
+/// A literal string (`"hello"`), byte string (`b"hello"`),
+/// character (`'a'`), byte character (`b'a'`), an integer or floating point number
+/// with or without a suffix (`1`, `1u8`, `2.3`, `2.3f32`).
+/// Boolean literals like `true` and `false` do not belong here, they are `Ident`s.
+#[derive(Clone)]
+pub struct Literal(bridge::client::Literal);
+
+macro_rules! suffixed_int_literals {
+ ($($name:ident => $kind:ident,)*) => ($(
+ /// Creates a new suffixed integer literal with the specified value.
+ ///
+ /// This function will create an integer like `1u32` where the integer
+ /// value specified is the first part of the token and the integral is
+ /// also suffixed at the end.
+ /// Literals created from negative numbers might not survive round-trips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// Literals created through this method have the `Span::call_site()`
+ /// span by default, which can be configured with the `set_span` method
+ /// below.
+ pub fn $name(n: $kind) -> Literal {
+ Literal(bridge::client::Literal::typed_integer(&n.to_string(), stringify!($kind)))
+ }
+ )*)
+}
+
+macro_rules! unsuffixed_int_literals {
+ ($($name:ident => $kind:ident,)*) => ($(
+ /// Creates a new unsuffixed integer literal with the specified value.
+ ///
+ /// This function will create an integer like `1` where the integer
+ /// value specified is the first part of the token. No suffix is
+ /// specified on this token, meaning that invocations like
+ /// `Literal::i8_unsuffixed(1)` are equivalent to
+ /// `Literal::u32_unsuffixed(1)`.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// Literals created through this method have the `Span::call_site()`
+ /// span by default, which can be configured with the `set_span` method
+ /// below.
+ pub fn $name(n: $kind) -> Literal {
+ Literal(bridge::client::Literal::integer(&n.to_string()))
+ }
+ )*)
+}
+
+impl Literal {
+ suffixed_int_literals! {
+ u8_suffixed => u8,
+ u16_suffixed => u16,
+ u32_suffixed => u32,
+ u64_suffixed => u64,
+ u128_suffixed => u128,
+ usize_suffixed => usize,
+ i8_suffixed => i8,
+ i16_suffixed => i16,
+ i32_suffixed => i32,
+ i64_suffixed => i64,
+ i128_suffixed => i128,
+ isize_suffixed => isize,
+ }
+
+ unsuffixed_int_literals! {
+ u8_unsuffixed => u8,
+ u16_unsuffixed => u16,
+ u32_unsuffixed => u32,
+ u64_unsuffixed => u64,
+ u128_unsuffixed => u128,
+ usize_unsuffixed => usize,
+ i8_unsuffixed => i8,
+ i16_unsuffixed => i16,
+ i32_unsuffixed => i32,
+ i64_unsuffixed => i64,
+ i128_unsuffixed => i128,
+ isize_unsuffixed => isize,
+ }
+
+ /// Creates a new unsuffixed floating-point literal.
+ ///
+ /// This constructor is similar to those like `Literal::i8_unsuffixed` where
+ /// the float's value is emitted directly into the token but no suffix is
+ /// used, so it may be inferred to be a `f64` later in the compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f32_unsuffixed(n: f32) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {n}");
+ }
+ let mut repr = n.to_string();
+ if !repr.contains('.') {
+ repr.push_str(".0");
+ }
+ Literal(bridge::client::Literal::float(&repr))
+ }
+
+ /// Creates a new suffixed floating-point literal.
+ ///
+ /// This constructor will create a literal like `1.0f32` where the value
+ /// specified is the preceding part of the token and `f32` is the suffix of
+ /// the token. This token will always be inferred to be an `f32` in the
+ /// compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f32_suffixed(n: f32) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {n}");
+ }
+ Literal(bridge::client::Literal::f32(&n.to_string()))
+ }
+
+ /// Creates a new unsuffixed floating-point literal.
+ ///
+ /// This constructor is similar to those like `Literal::i8_unsuffixed` where
+ /// the float's value is emitted directly into the token but no suffix is
+ /// used, so it may be inferred to be a `f64` later in the compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f64_unsuffixed(n: f64) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {n}");
+ }
+ let mut repr = n.to_string();
+ if !repr.contains('.') {
+ repr.push_str(".0");
+ }
+ Literal(bridge::client::Literal::float(&repr))
+ }
+
+ /// Creates a new suffixed floating-point literal.
+ ///
+ /// This constructor will create a literal like `1.0f64` where the value
+ /// specified is the preceding part of the token and `f64` is the suffix of
+ /// the token. This token will always be inferred to be an `f64` in the
+ /// compiler.
+ /// Literals created from negative numbers might not survive rountrips through
+ /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ pub fn f64_suffixed(n: f64) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {n}");
+ }
+ Literal(bridge::client::Literal::f64(&n.to_string()))
+ }
+
+ /// String literal.
+ pub fn string(string: &str) -> Literal {
+ Literal(bridge::client::Literal::string(string))
+ }
+
+ /// Character literal.
+ pub fn character(ch: char) -> Literal {
+ Literal(bridge::client::Literal::character(ch))
+ }
+
+ /// Byte string literal.
+ pub fn byte_string(bytes: &[u8]) -> Literal {
+ Literal(bridge::client::Literal::byte_string(bytes))
+ }
+
+ /// Returns the span encompassing this literal.
+ pub fn span(&self) -> Span {
+ Span(self.0.span())
+ }
+
+ /// Configures the span associated for this literal.
+ pub fn set_span(&mut self, span: Span) {
+ self.0.set_span(span.0);
+ }
+
+ /// Returns a `Span` that is a subset of `self.span()` containing only the
+ /// source bytes in range `range`. Returns `None` if the would-be trimmed
+ /// span is outside the bounds of `self`.
+ // FIXME(SergioBenitez): check that the byte range starts and ends at a
+ // UTF-8 boundary of the source. otherwise, it's likely that a panic will
+ // occur elsewhere when the source text is printed.
+ // FIXME(SergioBenitez): there is no way for the user to know what
+ // `self.span()` actually maps to, so this method can currently only be
+ // called blindly. For example, `to_string()` for the character 'c' returns
+ // "'\u{63}'"; there is no way for the user to know whether the source text
+ // was 'c' or whether it was '\u{63}'.
+ pub fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> {
+ self.0.subspan(range.start_bound().cloned(), range.end_bound().cloned()).map(Span)
+ }
+}
+
+/// Parse a single literal from its stringified representation.
+///
+/// In order to parse successfully, the input string must not contain anything
+/// but the literal token. Specifically, it must not contain whitespace or
+/// comments in addition to the literal.
+///
+/// The resulting literal token will have a `Span::call_site()` span.
+///
+/// NOTE: some errors may cause panics instead of returning `LexError`. We
+/// reserve the right to change these errors into `LexError`s later.
+impl FromStr for Literal {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<Self, LexError> {
+ match bridge::client::Literal::from_str(src) {
+ Ok(literal) => Ok(Literal(literal)),
+ Err(()) => Err(LexError),
+ }
+ }
+}
+
+/// Prints the literal as a string that should be losslessly convertible
+/// back into the same literal (except for possible rounding for floating point literals).
+impl fmt::Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(&self.to_string())
+ }
+}
+
+impl fmt::Debug for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+/// Tracked access to environment variables.
+pub mod tracked_env {
+ use std::env::{self, VarError};
+ use std::ffi::OsStr;
+
+ /// Retrieve an environment variable and add it to build dependency info.
+ /// Build system executing the compiler will know that the variable was accessed during
+ /// compilation, and will be able to rerun the build when the value of that variable changes.
+ /// Besides the dependency tracking this function should be equivalent to `env::var` from the
+ /// standard library, except that the argument must be UTF-8.
+ pub fn var<K: AsRef<OsStr> + AsRef<str>>(key: K) -> Result<String, VarError> {
+ let key: &str = key.as_ref();
+ let value = env::var(key);
+ super::bridge::client::FreeFunctions::track_env_var(key, value.as_deref().ok());
+ value
+ }
+}
+
+/// Tracked access to additional files.
+pub mod tracked_path {
+
+ /// Track a file explicitly.
+ ///
+ /// Commonly used for tracking asset preprocessing.
+ pub fn path<P: AsRef<str>>(path: P) {
+ let path: &str = path.as_ref();
+ super::bridge::client::FreeFunctions::track_path(path);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/quote.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/quote.rs
new file mode 100644
index 000000000..39309faa4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/quote.rs
@@ -0,0 +1,139 @@
+//! # Quasiquoter
+//! This file contains the implementation internals of the quasiquoter provided by `quote!`.
+
+//! This quasiquoter uses macros 2.0 hygiene to reliably access
+//! items from `proc_macro`, to build a `proc_macro::TokenStream`.
+
+use super::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
+
+macro_rules! quote_tt {
+ (($($t:tt)*)) => { Group::new(Delimiter::Parenthesis, quote!($($t)*)) };
+ ([$($t:tt)*]) => { Group::new(Delimiter::Bracket, quote!($($t)*)) };
+ ({$($t:tt)*}) => { Group::new(Delimiter::Brace, quote!($($t)*)) };
+ (,) => { Punct::new(',', Spacing::Alone) };
+ (.) => { Punct::new('.', Spacing::Alone) };
+ (;) => { Punct::new(';', Spacing::Alone) };
+ (!) => { Punct::new('!', Spacing::Alone) };
+ (<) => { Punct::new('<', Spacing::Alone) };
+ (>) => { Punct::new('>', Spacing::Alone) };
+ (&) => { Punct::new('&', Spacing::Alone) };
+ (=) => { Punct::new('=', Spacing::Alone) };
+ ($i:ident) => { Ident::new(stringify!($i), Span::def_site()) };
+}
+
+macro_rules! quote_ts {
+ ((@ $($t:tt)*)) => { $($t)* };
+ (::) => {
+ [
+ TokenTree::from(Punct::new(':', Spacing::Joint)),
+ TokenTree::from(Punct::new(':', Spacing::Alone)),
+ ].iter()
+ .cloned()
+ .map(|mut x| {
+ x.set_span(Span::def_site());
+ x
+ })
+ .collect::<TokenStream>()
+ };
+ ($t:tt) => { TokenTree::from(quote_tt!($t)) };
+}
+
+/// Simpler version of the real `quote!` macro, implemented solely
+/// through `macro_rules`, for bootstrapping the real implementation
+/// (see the `quote` function), which does not have access to the
+/// real `quote!` macro due to the `proc_macro` crate not being
+/// able to depend on itself.
+///
+/// Note: supported tokens are a subset of the real `quote!`, but
+/// unquoting is different: instead of `$x`, this uses `(@ expr)`.
+macro_rules! quote {
+ () => { TokenStream::new() };
+ ($($t:tt)*) => {
+ [
+ $(TokenStream::from(quote_ts!($t)),)*
+ ].iter().cloned().collect::<TokenStream>()
+ };
+}
+
+/// Quote a `TokenStream` into a `TokenStream`.
+/// This is the actual implementation of the `quote!()` proc macro.
+///
+/// It is loaded by the compiler in `register_builtin_macros`.
+pub fn quote(stream: TokenStream) -> TokenStream {
+ if stream.is_empty() {
+ return quote!(super::TokenStream::new());
+ }
+ let proc_macro_crate = quote!(crate);
+ let mut after_dollar = false;
+ let tokens = stream
+ .into_iter()
+ .filter_map(|tree| {
+ if after_dollar {
+ after_dollar = false;
+ match tree {
+ TokenTree::Ident(_) => {
+ return Some(quote!(Into::<super::TokenStream>::into(
+ Clone::clone(&(@ tree))),));
+ }
+ TokenTree::Punct(ref tt) if tt.as_char() == '$' => {}
+ _ => panic!("`$` must be followed by an ident or `$` in `quote!`"),
+ }
+ } else if let TokenTree::Punct(ref tt) = tree {
+ if tt.as_char() == '$' {
+ after_dollar = true;
+ return None;
+ }
+ }
+
+ Some(quote!(super::TokenStream::from((@ match tree {
+ TokenTree::Punct(tt) => quote!(super::TokenTree::Punct(super::Punct::new(
+ (@ TokenTree::from(Literal::character(tt.as_char()))),
+ (@ match tt.spacing() {
+ Spacing::Alone => quote!(super::Spacing::Alone),
+ Spacing::Joint => quote!(super::Spacing::Joint),
+ }),
+ ))),
+ TokenTree::Group(tt) => quote!(super::TokenTree::Group(super::Group::new(
+ (@ match tt.delimiter() {
+ Delimiter::Parenthesis => quote!(super::Delimiter::Parenthesis),
+ Delimiter::Brace => quote!(super::Delimiter::Brace),
+ Delimiter::Bracket => quote!(super::Delimiter::Bracket),
+ Delimiter::None => quote!(super::Delimiter::None),
+ }),
+ (@ quote(tt.stream())),
+ ))),
+ TokenTree::Ident(tt) => quote!(super::TokenTree::Ident(super::Ident::new(
+ (@ TokenTree::from(Literal::string(&tt.to_string()))),
+ (@ quote_span(proc_macro_crate.clone(), tt.span())),
+ ))),
+ TokenTree::Literal(tt) => quote!(super::TokenTree::Literal({
+ let mut iter = (@ TokenTree::from(Literal::string(&tt.to_string())))
+ .parse::<super::TokenStream>()
+ .unwrap()
+ .into_iter();
+ if let (Some(super::TokenTree::Literal(mut lit)), None) =
+ (iter.next(), iter.next())
+ {
+ lit.set_span((@ quote_span(proc_macro_crate.clone(), tt.span())));
+ lit
+ } else {
+ unreachable!()
+ }
+ }))
+ })),))
+ })
+ .collect::<TokenStream>();
+
+ if after_dollar {
+ panic!("unexpected trailing `$` in `quote!`");
+ }
+
+ quote!([(@ tokens)].iter().cloned().collect::<super::TokenStream>())
+}
+
+/// Quote a `Span` into a `TokenStream`.
+/// This is needed to implement a custom quoter.
+pub fn quote_span(proc_macro_crate: TokenStream, span: Span) -> TokenStream {
+ let id = span.save_span();
+ quote!((@ proc_macro_crate ) ::Span::recover_proc_macro_span((@ TokenTree::from(Literal::usize_unsuffixed(id)))))
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/ra_server.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/ra_server.rs
new file mode 100644
index 000000000..7e8e67856
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_64/ra_server.rs
@@ -0,0 +1,792 @@
+//! Rustc proc-macro server implementation with tt
+//!
+//! Based on idea from <https://github.com/fedochet/rust-proc-macro-expander>
+//! The lib-proc-macro server backend is `TokenStream`-agnostic, such that
+//! we could provide any TokenStream implementation.
+//! The original idea from fedochet is using proc-macro2 as backend,
+//! we use tt instead for better integration with RA.
+//!
+//! FIXME: No span and source file information is implemented yet
+
+use super::proc_macro::bridge::{self, server};
+
+use std::collections::HashMap;
+use std::hash::Hash;
+use std::iter::FromIterator;
+use std::ops::Bound;
+use std::{ascii, vec::IntoIter};
+
+type Group = tt::Subtree;
+type TokenTree = tt::TokenTree;
+type Punct = tt::Punct;
+type Spacing = tt::Spacing;
+type Literal = tt::Literal;
+type Span = tt::TokenId;
+
+#[derive(Debug, Default, Clone)]
+pub struct TokenStream {
+ pub token_trees: Vec<TokenTree>,
+}
+
+impl TokenStream {
+ pub fn new() -> Self {
+ TokenStream::default()
+ }
+
+ pub fn with_subtree(subtree: tt::Subtree) -> Self {
+ if subtree.delimiter.is_some() {
+ TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] }
+ } else {
+ TokenStream { token_trees: subtree.token_trees }
+ }
+ }
+
+ pub fn into_subtree(self) -> tt::Subtree {
+ tt::Subtree { delimiter: None, token_trees: self.token_trees }
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.token_trees.is_empty()
+ }
+}
+
+/// Creates a token stream containing a single token tree.
+impl From<TokenTree> for TokenStream {
+ fn from(tree: TokenTree) -> TokenStream {
+ TokenStream { token_trees: vec![tree] }
+ }
+}
+
+/// Collects a number of token trees into a single stream.
+impl FromIterator<TokenTree> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+ trees.into_iter().map(TokenStream::from).collect()
+ }
+}
+
+/// A "flattening" operation on token streams, collects token trees
+/// from multiple token streams into a single stream.
+impl FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let mut builder = TokenStreamBuilder::new();
+ streams.into_iter().for_each(|stream| builder.push(stream));
+ builder.build()
+ }
+}
+
+impl Extend<TokenTree> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
+ self.extend(trees.into_iter().map(TokenStream::from));
+ }
+}
+
+impl Extend<TokenStream> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+ for item in streams {
+ for tkn in item {
+ match tkn {
+ tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => {
+ self.token_trees.extend(subtree.token_trees);
+ }
+ _ => {
+ self.token_trees.push(tkn);
+ }
+ }
+ }
+ }
+ }
+}
+
+#[derive(Clone)]
+pub struct SourceFile {
+ // FIXME stub
+}
+
+type Level = super::proc_macro::Level;
+type LineColumn = super::proc_macro::LineColumn;
+
+/// A structure representing a diagnostic message and associated children
+/// messages.
+#[derive(Clone, Debug)]
+pub struct Diagnostic {
+ level: Level,
+ message: String,
+ spans: Vec<Span>,
+ children: Vec<Diagnostic>,
+}
+
+impl Diagnostic {
+ /// Creates a new diagnostic with the given `level` and `message`.
+ pub fn new<T: Into<String>>(level: Level, message: T) -> Diagnostic {
+ Diagnostic { level, message: message.into(), spans: vec![], children: vec![] }
+ }
+}
+
+// Rustc Server Ident has to be `Copyable`
+// We use a stub here for bypassing
+#[derive(Hash, Eq, PartialEq, Copy, Clone)]
+pub struct IdentId(u32);
+
+#[derive(Clone, Hash, Eq, PartialEq)]
+struct IdentData(tt::Ident);
+
+#[derive(Default)]
+struct IdentInterner {
+ idents: HashMap<IdentData, u32>,
+ ident_data: Vec<IdentData>,
+}
+
+impl IdentInterner {
+ fn intern(&mut self, data: &IdentData) -> u32 {
+ if let Some(index) = self.idents.get(data) {
+ return *index;
+ }
+
+ let index = self.idents.len() as u32;
+ self.ident_data.push(data.clone());
+ self.idents.insert(data.clone(), index);
+ index
+ }
+
+ fn get(&self, index: u32) -> &IdentData {
+ &self.ident_data[index as usize]
+ }
+
+ #[allow(unused)]
+ fn get_mut(&mut self, index: u32) -> &mut IdentData {
+ self.ident_data.get_mut(index as usize).expect("Should be consistent")
+ }
+}
+
+pub struct TokenStreamBuilder {
+ acc: TokenStream,
+}
+
+/// Public implementation details for the `TokenStream` type, such as iterators.
+pub mod token_stream {
+ use std::str::FromStr;
+
+ use super::{TokenStream, TokenTree};
+
+ /// An iterator over `TokenStream`'s `TokenTree`s.
+ /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
+ /// and returns whole groups as token trees.
+ impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = super::IntoIter<TokenTree>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.token_trees.into_iter()
+ }
+ }
+
+ type LexError = String;
+
+ /// Attempts to break the string into tokens and parse those tokens into a token stream.
+ /// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
+ /// or characters not existing in the language.
+ /// All tokens in the parsed stream get `Span::call_site()` spans.
+ ///
+ /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
+ /// change these errors into `LexError`s later.
+ impl FromStr for TokenStream {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+ let (subtree, _token_map) =
+ mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?;
+
+ let subtree = subtree_replace_token_ids_with_unspecified(subtree);
+ Ok(TokenStream::with_subtree(subtree))
+ }
+ }
+
+ impl ToString for TokenStream {
+ fn to_string(&self) -> String {
+ tt::pretty(&self.token_trees)
+ }
+ }
+
+ fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree {
+ tt::Subtree {
+ delimiter: subtree
+ .delimiter
+ .map(|d| tt::Delimiter { id: tt::TokenId::unspecified(), ..d }),
+ token_trees: subtree
+ .token_trees
+ .into_iter()
+ .map(token_tree_replace_token_ids_with_unspecified)
+ .collect(),
+ }
+ }
+
+ fn token_tree_replace_token_ids_with_unspecified(tt: tt::TokenTree) -> tt::TokenTree {
+ match tt {
+ tt::TokenTree::Leaf(leaf) => {
+ tt::TokenTree::Leaf(leaf_replace_token_ids_with_unspecified(leaf))
+ }
+ tt::TokenTree::Subtree(subtree) => {
+ tt::TokenTree::Subtree(subtree_replace_token_ids_with_unspecified(subtree))
+ }
+ }
+ }
+
+ fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf {
+ match leaf {
+ tt::Leaf::Literal(lit) => {
+ tt::Leaf::Literal(tt::Literal { id: tt::TokenId::unspecified(), ..lit })
+ }
+ tt::Leaf::Punct(punct) => {
+ tt::Leaf::Punct(tt::Punct { id: tt::TokenId::unspecified(), ..punct })
+ }
+ tt::Leaf::Ident(ident) => {
+ tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), ..ident })
+ }
+ }
+ }
+}
+
+impl TokenStreamBuilder {
+ fn new() -> TokenStreamBuilder {
+ TokenStreamBuilder { acc: TokenStream::new() }
+ }
+
+ fn push(&mut self, stream: TokenStream) {
+ self.acc.extend(stream.into_iter())
+ }
+
+ fn build(self) -> TokenStream {
+ self.acc
+ }
+}
+
+pub struct FreeFunctions;
+
+#[derive(Clone)]
+pub struct TokenStreamIter {
+ trees: IntoIter<TokenTree>,
+}
+
+#[derive(Default)]
+pub struct RustAnalyzer {
+ ident_interner: IdentInterner,
+ // FIXME: store span information here.
+}
+
+impl server::Types for RustAnalyzer {
+ type FreeFunctions = FreeFunctions;
+ type TokenStream = TokenStream;
+ type Ident = IdentId;
+ type Literal = Literal;
+ type SourceFile = SourceFile;
+ type Diagnostic = Diagnostic;
+ type Span = Span;
+ type MultiSpan = Vec<Span>;
+}
+
+impl server::FreeFunctions for RustAnalyzer {
+ fn track_env_var(&mut self, _var: &str, _value: Option<&str>) {
+ // FIXME: track env var accesses
+ // https://github.com/rust-lang/rust/pull/71858
+ }
+ fn track_path(&mut self, _path: &str) {}
+}
+
+impl server::TokenStream for RustAnalyzer {
+ fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
+ stream.is_empty()
+ }
+ fn from_str(&mut self, src: &str) -> Self::TokenStream {
+ use std::str::FromStr;
+
+ Self::TokenStream::from_str(src).expect("cannot parse string")
+ }
+ fn to_string(&mut self, stream: &Self::TokenStream) -> String {
+ stream.to_string()
+ }
+ fn from_token_tree(
+ &mut self,
+ tree: bridge::TokenTree<Self::TokenStream, Self::Span, Self::Ident, Self::Literal>,
+ ) -> Self::TokenStream {
+ match tree {
+ bridge::TokenTree::Group(group) => {
+ let group = Group {
+ delimiter: delim_to_internal(group.delimiter),
+ token_trees: match group.stream {
+ Some(stream) => stream.into_iter().collect(),
+ None => Vec::new(),
+ },
+ };
+ let tree = TokenTree::from(group);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Ident(IdentId(index)) => {
+ let IdentData(ident) = self.ident_interner.get(index).clone();
+ let ident: tt::Ident = ident;
+ let leaf = tt::Leaf::from(ident);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Literal(literal) => {
+ let leaf = tt::Leaf::from(literal);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Punct(p) => {
+ let punct = tt::Punct {
+ char: p.ch as char,
+ spacing: if p.joint { Spacing::Joint } else { Spacing::Alone },
+ id: p.span,
+ };
+ let leaf = tt::Leaf::from(punct);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+ }
+ }
+
+ fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
+ Ok(self_.clone())
+ }
+
+ fn concat_trees(
+ &mut self,
+ base: Option<Self::TokenStream>,
+ trees: Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Ident, Self::Literal>>,
+ ) -> Self::TokenStream {
+ let mut builder = TokenStreamBuilder::new();
+ if let Some(base) = base {
+ builder.push(base);
+ }
+ for tree in trees {
+ builder.push(self.from_token_tree(tree));
+ }
+ builder.build()
+ }
+
+ fn concat_streams(
+ &mut self,
+ base: Option<Self::TokenStream>,
+ streams: Vec<Self::TokenStream>,
+ ) -> Self::TokenStream {
+ let mut builder = TokenStreamBuilder::new();
+ if let Some(base) = base {
+ builder.push(base);
+ }
+ for stream in streams {
+ builder.push(stream);
+ }
+ builder.build()
+ }
+
+ fn into_trees(
+ &mut self,
+ stream: Self::TokenStream,
+ ) -> Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Ident, Self::Literal>> {
+ stream
+ .into_iter()
+ .map(|tree| match tree {
+ tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
+ bridge::TokenTree::Ident(IdentId(self.ident_interner.intern(&IdentData(ident))))
+ }
+ tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => bridge::TokenTree::Literal(lit),
+ tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => {
+ bridge::TokenTree::Punct(bridge::Punct {
+ ch: punct.char as u8,
+ joint: punct.spacing == Spacing::Joint,
+ span: punct.id,
+ })
+ }
+ tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(bridge::Group {
+ delimiter: delim_to_external(subtree.delimiter),
+ stream: if subtree.token_trees.is_empty() {
+ None
+ } else {
+ Some(subtree.token_trees.into_iter().collect())
+ },
+ span: bridge::DelimSpan::from_single(
+ subtree.delimiter.map_or(Span::unspecified(), |del| del.id),
+ ),
+ }),
+ })
+ .collect()
+ }
+}
+
+fn delim_to_internal(d: bridge::Delimiter) -> Option<tt::Delimiter> {
+ let kind = match d {
+ bridge::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,
+ bridge::Delimiter::Brace => tt::DelimiterKind::Brace,
+ bridge::Delimiter::Bracket => tt::DelimiterKind::Bracket,
+ bridge::Delimiter::None => return None,
+ };
+ Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind })
+}
+
+fn delim_to_external(d: Option<tt::Delimiter>) -> bridge::Delimiter {
+ match d.map(|it| it.kind) {
+ Some(tt::DelimiterKind::Parenthesis) => bridge::Delimiter::Parenthesis,
+ Some(tt::DelimiterKind::Brace) => bridge::Delimiter::Brace,
+ Some(tt::DelimiterKind::Bracket) => bridge::Delimiter::Bracket,
+ None => bridge::Delimiter::None,
+ }
+}
+
+fn spacing_to_internal(spacing: bridge::Spacing) -> Spacing {
+ match spacing {
+ bridge::Spacing::Alone => Spacing::Alone,
+ bridge::Spacing::Joint => Spacing::Joint,
+ }
+}
+
+fn spacing_to_external(spacing: Spacing) -> bridge::Spacing {
+ match spacing {
+ Spacing::Alone => bridge::Spacing::Alone,
+ Spacing::Joint => bridge::Spacing::Joint,
+ }
+}
+
+impl server::Ident for RustAnalyzer {
+ fn new(&mut self, string: &str, span: Self::Span, _is_raw: bool) -> Self::Ident {
+ IdentId(self.ident_interner.intern(&IdentData(tt::Ident { text: string.into(), id: span })))
+ }
+
+ fn span(&mut self, ident: Self::Ident) -> Self::Span {
+ self.ident_interner.get(ident.0).0.id
+ }
+ fn with_span(&mut self, ident: Self::Ident, span: Self::Span) -> Self::Ident {
+ let data = self.ident_interner.get(ident.0);
+ let new = IdentData(tt::Ident { id: span, ..data.0.clone() });
+ IdentId(self.ident_interner.intern(&new))
+ }
+}
+
+impl server::Literal for RustAnalyzer {
+ fn debug_kind(&mut self, _literal: &Self::Literal) -> String {
+ // r-a: debug_kind and suffix are unsupported; corresponding client code has been changed to not call these.
+ // They must still be present to be ABI-compatible and work with upstream proc_macro.
+ "".to_owned()
+ }
+ fn from_str(&mut self, s: &str) -> Result<Self::Literal, ()> {
+ Ok(Literal { text: s.into(), id: tt::TokenId::unspecified() })
+ }
+ fn symbol(&mut self, literal: &Self::Literal) -> String {
+ literal.text.to_string()
+ }
+ fn suffix(&mut self, _literal: &Self::Literal) -> Option<String> {
+ None
+ }
+
+ fn to_string(&mut self, literal: &Self::Literal) -> String {
+ literal.to_string()
+ }
+
+ fn integer(&mut self, n: &str) -> Self::Literal {
+ let n = match n.parse::<i128>() {
+ Ok(n) => n.to_string(),
+ Err(_) => n.parse::<u128>().unwrap().to_string(),
+ };
+ Literal { text: n.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal {
+ macro_rules! def_suffixed_integer {
+ ($kind:ident, $($ty:ty),*) => {
+ match $kind {
+ $(
+ stringify!($ty) => {
+ let n: $ty = n.parse().unwrap();
+ format!(concat!("{}", stringify!($ty)), n)
+ }
+ )*
+ _ => unimplemented!("unknown args for typed_integer: n {}, kind {}", n, $kind),
+ }
+ }
+ }
+
+ let text = def_suffixed_integer! {kind, u8, u16, u32, u64, u128, usize, i8, i16, i32, i64, i128, isize};
+
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn float(&mut self, n: &str) -> Self::Literal {
+ let n: f64 = n.parse().unwrap();
+ let mut text = f64::to_string(&n);
+ if !text.contains('.') {
+ text += ".0"
+ }
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn f32(&mut self, n: &str) -> Self::Literal {
+ let n: f32 = n.parse().unwrap();
+ let text = format!("{}f32", n);
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn f64(&mut self, n: &str) -> Self::Literal {
+ let n: f64 = n.parse().unwrap();
+ let text = format!("{}f64", n);
+ Literal { text: text.into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn string(&mut self, string: &str) -> Self::Literal {
+ let mut escaped = String::new();
+ for ch in string.chars() {
+ escaped.extend(ch.escape_debug());
+ }
+ Literal { text: format!("\"{}\"", escaped).into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn character(&mut self, ch: char) -> Self::Literal {
+ Literal { text: format!("'{}'", ch).into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal {
+ let string = bytes
+ .iter()
+ .cloned()
+ .flat_map(ascii::escape_default)
+ .map(Into::<char>::into)
+ .collect::<String>();
+
+ Literal { text: format!("b\"{}\"", string).into(), id: tt::TokenId::unspecified() }
+ }
+
+ fn span(&mut self, literal: &Self::Literal) -> Self::Span {
+ literal.id
+ }
+
+ fn set_span(&mut self, literal: &mut Self::Literal, span: Self::Span) {
+ literal.id = span;
+ }
+
+ fn subspan(
+ &mut self,
+ _literal: &Self::Literal,
+ _start: Bound<usize>,
+ _end: Bound<usize>,
+ ) -> Option<Self::Span> {
+ // FIXME handle span
+ None
+ }
+}
+
+impl server::SourceFile for RustAnalyzer {
+ // FIXME these are all stubs
+ fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool {
+ true
+ }
+ fn path(&mut self, _file: &Self::SourceFile) -> String {
+ String::new()
+ }
+ fn is_real(&mut self, _file: &Self::SourceFile) -> bool {
+ true
+ }
+}
+
+impl server::Diagnostic for RustAnalyzer {
+ fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic {
+ let mut diag = Diagnostic::new(level, msg);
+ diag.spans = spans;
+ diag
+ }
+
+ fn sub(
+ &mut self,
+ _diag: &mut Self::Diagnostic,
+ _level: Level,
+ _msg: &str,
+ _spans: Self::MultiSpan,
+ ) {
+ // FIXME handle diagnostic
+ //
+ }
+
+ fn emit(&mut self, _diag: Self::Diagnostic) {
+ // FIXME handle diagnostic
+ // diag.emit()
+ }
+}
+
+impl server::Span for RustAnalyzer {
+ fn debug(&mut self, span: Self::Span) -> String {
+ format!("{:?}", span.0)
+ }
+ fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile {
+ SourceFile {}
+ }
+ fn save_span(&mut self, _span: Self::Span) -> usize {
+ // FIXME stub
+ 0
+ }
+ fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span {
+ // FIXME stub
+ tt::TokenId::unspecified()
+ }
+ /// Recent feature, not yet in the proc_macro
+ ///
+ /// See PR:
+ /// https://github.com/rust-lang/rust/pull/55780
+ fn source_text(&mut self, _span: Self::Span) -> Option<String> {
+ None
+ }
+
+ fn parent(&mut self, _span: Self::Span) -> Option<Self::Span> {
+ // FIXME handle span
+ None
+ }
+ fn source(&mut self, span: Self::Span) -> Self::Span {
+ // FIXME handle span
+ span
+ }
+ fn start(&mut self, _span: Self::Span) -> LineColumn {
+ // FIXME handle span
+ LineColumn { line: 0, column: 0 }
+ }
+ fn end(&mut self, _span: Self::Span) -> LineColumn {
+ // FIXME handle span
+ LineColumn { line: 0, column: 0 }
+ }
+ fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option<Self::Span> {
+ // Just return the first span again, because some macros will unwrap the result.
+ Some(first)
+ }
+ fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span {
+ // FIXME handle span
+ tt::TokenId::unspecified()
+ }
+
+ fn after(&mut self, _self_: Self::Span) -> Self::Span {
+ tt::TokenId::unspecified()
+ }
+
+ fn before(&mut self, _self_: Self::Span) -> Self::Span {
+ tt::TokenId::unspecified()
+ }
+}
+
+impl server::MultiSpan for RustAnalyzer {
+ fn new(&mut self) -> Self::MultiSpan {
+ // FIXME handle span
+ vec![]
+ }
+
+ fn push(&mut self, other: &mut Self::MultiSpan, span: Self::Span) {
+ //TODP
+ other.push(span)
+ }
+}
+
+impl server::Server for RustAnalyzer {
+ fn globals(&mut self) -> bridge::ExpnGlobals<Self::Span> {
+ bridge::ExpnGlobals {
+ def_site: Span::unspecified(),
+ call_site: Span::unspecified(),
+ mixed_site: Span::unspecified(),
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::super::proc_macro::bridge::server::Literal;
+ use super::*;
+
+ #[test]
+ fn test_ra_server_literals() {
+ let mut srv = RustAnalyzer { ident_interner: IdentInterner::default() };
+ assert_eq!(srv.integer("1234").text, "1234");
+
+ assert_eq!(srv.typed_integer("12", "u8").text, "12u8");
+ assert_eq!(srv.typed_integer("255", "u16").text, "255u16");
+ assert_eq!(srv.typed_integer("1234", "u32").text, "1234u32");
+ assert_eq!(srv.typed_integer("15846685", "u64").text, "15846685u64");
+ assert_eq!(srv.typed_integer("15846685258", "u128").text, "15846685258u128");
+ assert_eq!(srv.typed_integer("156788984", "usize").text, "156788984usize");
+ assert_eq!(srv.typed_integer("127", "i8").text, "127i8");
+ assert_eq!(srv.typed_integer("255", "i16").text, "255i16");
+ assert_eq!(srv.typed_integer("1234", "i32").text, "1234i32");
+ assert_eq!(srv.typed_integer("15846685", "i64").text, "15846685i64");
+ assert_eq!(srv.typed_integer("15846685258", "i128").text, "15846685258i128");
+ assert_eq!(srv.float("0").text, "0.0");
+ assert_eq!(srv.float("15684.5867").text, "15684.5867");
+ assert_eq!(srv.f32("15684.58").text, "15684.58f32");
+ assert_eq!(srv.f64("15684.58").text, "15684.58f64");
+
+ assert_eq!(srv.string("hello_world").text, "\"hello_world\"");
+ assert_eq!(srv.character('c').text, "'c'");
+ assert_eq!(srv.byte_string(b"1234586\x88").text, "b\"1234586\\x88\"");
+
+ // u128::max
+ assert_eq!(
+ srv.integer("340282366920938463463374607431768211455").text,
+ "340282366920938463463374607431768211455"
+ );
+ // i128::min
+ assert_eq!(
+ srv.integer("-170141183460469231731687303715884105728").text,
+ "-170141183460469231731687303715884105728"
+ );
+ }
+
+ #[test]
+ fn test_ra_server_to_string() {
+ let s = TokenStream {
+ token_trees: vec![
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "struct".into(),
+ id: tt::TokenId::unspecified(),
+ })),
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "T".into(),
+ id: tt::TokenId::unspecified(),
+ })),
+ tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: Some(tt::Delimiter {
+ id: tt::TokenId::unspecified(),
+ kind: tt::DelimiterKind::Brace,
+ }),
+ token_trees: vec![],
+ }),
+ ],
+ };
+
+ assert_eq!(s.to_string(), "struct T {}");
+ }
+
+ #[test]
+ fn test_ra_server_from_str() {
+ use std::str::FromStr;
+ let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: Some(tt::Delimiter {
+ id: tt::TokenId::unspecified(),
+ kind: tt::DelimiterKind::Parenthesis,
+ }),
+ token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "a".into(),
+ id: tt::TokenId::unspecified(),
+ }))],
+ });
+
+ let t1 = TokenStream::from_str("(a)").unwrap();
+ assert_eq!(t1.token_trees.len(), 1);
+ assert_eq!(t1.token_trees[0], subtree_paren_a);
+
+ let t2 = TokenStream::from_str("(a);").unwrap();
+ assert_eq!(t2.token_trees.len(), 2);
+ assert_eq!(t2.token_trees[0], subtree_paren_a);
+
+ let underscore = TokenStream::from_str("_").unwrap();
+ assert_eq!(
+ underscore.token_trees[0],
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "_".into(),
+ id: tt::TokenId::unspecified(),
+ }))
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/mod.rs
new file mode 100644
index 000000000..44712f419
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/mod.rs
@@ -0,0 +1,102 @@
+//! Proc macro ABI
+
+extern crate proc_macro;
+
+#[allow(dead_code)]
+#[doc(hidden)]
+mod ra_server;
+
+use libloading::Library;
+use proc_macro_api::ProcMacroKind;
+
+use super::PanicMessage;
+
+pub(crate) struct Abi {
+ exported_macros: Vec<proc_macro::bridge::client::ProcMacro>,
+}
+
+impl From<proc_macro::bridge::PanicMessage> for PanicMessage {
+ fn from(p: proc_macro::bridge::PanicMessage) -> Self {
+ Self { message: p.as_str().map(|s| s.to_string()) }
+ }
+}
+
+impl Abi {
+ pub unsafe fn from_lib(lib: &Library, symbol_name: String) -> Result<Abi, libloading::Error> {
+ let macros: libloading::Symbol<'_, &&[proc_macro::bridge::client::ProcMacro]> =
+ lib.get(symbol_name.as_bytes())?;
+ Ok(Self { exported_macros: macros.to_vec() })
+ }
+
+ pub fn expand(
+ &self,
+ macro_name: &str,
+ macro_body: &tt::Subtree,
+ attributes: Option<&tt::Subtree>,
+ ) -> Result<tt::Subtree, PanicMessage> {
+ let parsed_body = ra_server::TokenStream::with_subtree(macro_body.clone());
+
+ let parsed_attributes = attributes.map_or(ra_server::TokenStream::new(), |attr| {
+ ra_server::TokenStream::with_subtree(attr.clone())
+ });
+
+ for proc_macro in &self.exported_macros {
+ match proc_macro {
+ proc_macro::bridge::client::ProcMacro::CustomDerive {
+ trait_name, client, ..
+ } if *trait_name == macro_name => {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ proc_macro::bridge::client::ProcMacro::Bang { name, client }
+ if *name == macro_name =>
+ {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ proc_macro::bridge::client::ProcMacro::Attr { name, client }
+ if *name == macro_name =>
+ {
+ let res = client.run(
+ &proc_macro::bridge::server::SameThread,
+ ra_server::RustAnalyzer::default(),
+ parsed_attributes,
+ parsed_body,
+ true,
+ );
+ return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+ }
+ _ => continue,
+ }
+ }
+
+ Err(proc_macro::bridge::PanicMessage::String("Nothing to expand".to_string()).into())
+ }
+
+ pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
+ self.exported_macros
+ .iter()
+ .map(|proc_macro| match proc_macro {
+ proc_macro::bridge::client::ProcMacro::CustomDerive { trait_name, .. } => {
+ (trait_name.to_string(), ProcMacroKind::CustomDerive)
+ }
+ proc_macro::bridge::client::ProcMacro::Bang { name, .. } => {
+ (name.to_string(), ProcMacroKind::FuncLike)
+ }
+ proc_macro::bridge::client::ProcMacro::Attr { name, .. } => {
+ (name.to_string(), ProcMacroKind::Attr)
+ }
+ })
+ .collect()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs
new file mode 100644
index 000000000..46882845a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs
@@ -0,0 +1,518 @@
+//! proc-macro server implementation
+//!
+//! Based on idea from <https://github.com/fedochet/rust-proc-macro-expander>
+//! The lib-proc-macro server backend is `TokenStream`-agnostic, such that
+//! we could provide any TokenStream implementation.
+//! The original idea from fedochet is using proc-macro2 as backend,
+//! we use tt instead for better integration with RA.
+//!
+//! FIXME: No span and source file information is implemented yet
+
+use super::proc_macro::{
+ self,
+ bridge::{self, server},
+};
+
+mod token_stream;
+pub use token_stream::TokenStream;
+use token_stream::TokenStreamBuilder;
+
+mod symbol;
+pub use symbol::*;
+
+use std::{iter::FromIterator, ops::Bound};
+
+type Group = tt::Subtree;
+type TokenTree = tt::TokenTree;
+type Punct = tt::Punct;
+type Spacing = tt::Spacing;
+type Literal = tt::Literal;
+type Span = tt::TokenId;
+
+#[derive(Clone)]
+pub struct SourceFile {
+ // FIXME stub
+}
+
+type Level = super::proc_macro::Level;
+type LineColumn = super::proc_macro::LineColumn;
+
+/// A structure representing a diagnostic message and associated children
+/// messages.
+#[derive(Clone, Debug)]
+pub struct Diagnostic {
+ level: Level,
+ message: String,
+ spans: Vec<Span>,
+ children: Vec<Diagnostic>,
+}
+
+impl Diagnostic {
+ /// Creates a new diagnostic with the given `level` and `message`.
+ pub fn new<T: Into<String>>(level: Level, message: T) -> Diagnostic {
+ Diagnostic { level, message: message.into(), spans: vec![], children: vec![] }
+ }
+}
+
+pub struct FreeFunctions;
+
+#[derive(Default)]
+pub struct RustAnalyzer {
+ // FIXME: store span information here.
+}
+
+impl server::Types for RustAnalyzer {
+ type FreeFunctions = FreeFunctions;
+ type TokenStream = TokenStream;
+ type SourceFile = SourceFile;
+ type MultiSpan = Vec<Span>;
+ type Diagnostic = Diagnostic;
+ type Span = Span;
+ type Symbol = Symbol;
+}
+
+impl server::FreeFunctions for RustAnalyzer {
+ fn track_env_var(&mut self, _var: &str, _value: Option<&str>) {
+ // FIXME: track env var accesses
+ // https://github.com/rust-lang/rust/pull/71858
+ }
+ fn track_path(&mut self, _path: &str) {}
+
+ fn literal_from_str(
+ &mut self,
+ s: &str,
+ ) -> Result<bridge::Literal<Self::Span, Self::Symbol>, ()> {
+ // FIXME: keep track of LitKind and Suffix
+ Ok(bridge::Literal {
+ kind: bridge::LitKind::Err,
+ symbol: Symbol::intern(s),
+ suffix: None,
+ span: tt::TokenId::unspecified(),
+ })
+ }
+}
+
+impl server::TokenStream for RustAnalyzer {
+ fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
+ stream.is_empty()
+ }
+ fn from_str(&mut self, src: &str) -> Self::TokenStream {
+ use std::str::FromStr;
+
+ Self::TokenStream::from_str(src).expect("cannot parse string")
+ }
+ fn to_string(&mut self, stream: &Self::TokenStream) -> String {
+ stream.to_string()
+ }
+ fn from_token_tree(
+ &mut self,
+ tree: bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>,
+ ) -> Self::TokenStream {
+ match tree {
+ bridge::TokenTree::Group(group) => {
+ let group = Group {
+ delimiter: delim_to_internal(group.delimiter),
+ token_trees: match group.stream {
+ Some(stream) => stream.into_iter().collect(),
+ None => Vec::new(),
+ },
+ };
+ let tree = TokenTree::from(group);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Ident(ident) => {
+ // FIXME: handle raw idents
+ let text = ident.sym.text();
+ let ident: tt::Ident = tt::Ident { text, id: ident.span };
+ let leaf = tt::Leaf::from(ident);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Literal(literal) => {
+ let literal = LiteralFormatter(literal);
+ let text = literal
+ .with_stringify_parts(|parts| tt::SmolStr::from_iter(parts.iter().copied()));
+
+ let literal = tt::Literal { text, id: literal.0.span };
+ let leaf = tt::Leaf::from(literal);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+
+ bridge::TokenTree::Punct(p) => {
+ let punct = tt::Punct {
+ char: p.ch as char,
+ spacing: if p.joint { Spacing::Joint } else { Spacing::Alone },
+ id: p.span,
+ };
+ let leaf = tt::Leaf::from(punct);
+ let tree = TokenTree::from(leaf);
+ Self::TokenStream::from_iter(vec![tree])
+ }
+ }
+ }
+
+ fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
+ Ok(self_.clone())
+ }
+
+ fn concat_trees(
+ &mut self,
+ base: Option<Self::TokenStream>,
+ trees: Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>>,
+ ) -> Self::TokenStream {
+ let mut builder = TokenStreamBuilder::new();
+ if let Some(base) = base {
+ builder.push(base);
+ }
+ for tree in trees {
+ builder.push(self.from_token_tree(tree));
+ }
+ builder.build()
+ }
+
+ fn concat_streams(
+ &mut self,
+ base: Option<Self::TokenStream>,
+ streams: Vec<Self::TokenStream>,
+ ) -> Self::TokenStream {
+ let mut builder = TokenStreamBuilder::new();
+ if let Some(base) = base {
+ builder.push(base);
+ }
+ for stream in streams {
+ builder.push(stream);
+ }
+ builder.build()
+ }
+
+ fn into_trees(
+ &mut self,
+ stream: Self::TokenStream,
+ ) -> Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>> {
+ stream
+ .into_iter()
+ .map(|tree| match tree {
+ tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
+ bridge::TokenTree::Ident(bridge::Ident {
+ sym: Symbol::intern(&ident.text),
+ // FIXME: handle raw idents
+ is_raw: false,
+ span: ident.id,
+ })
+ }
+ tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
+ bridge::TokenTree::Literal(bridge::Literal {
+ // FIXME: handle literal kinds
+ kind: bridge::LitKind::Err,
+ symbol: Symbol::intern(&lit.text),
+ // FIXME: handle suffixes
+ suffix: None,
+ span: lit.id,
+ })
+ }
+ tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => {
+ bridge::TokenTree::Punct(bridge::Punct {
+ ch: punct.char as u8,
+ joint: punct.spacing == Spacing::Joint,
+ span: punct.id,
+ })
+ }
+ tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(bridge::Group {
+ delimiter: delim_to_external(subtree.delimiter),
+ stream: if subtree.token_trees.is_empty() {
+ None
+ } else {
+ Some(subtree.token_trees.into_iter().collect())
+ },
+ span: bridge::DelimSpan::from_single(
+ subtree.delimiter.map_or(Span::unspecified(), |del| del.id),
+ ),
+ }),
+ })
+ .collect()
+ }
+}
+
+fn delim_to_internal(d: proc_macro::Delimiter) -> Option<tt::Delimiter> {
+ let kind = match d {
+ proc_macro::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,
+ proc_macro::Delimiter::Brace => tt::DelimiterKind::Brace,
+ proc_macro::Delimiter::Bracket => tt::DelimiterKind::Bracket,
+ proc_macro::Delimiter::None => return None,
+ };
+ Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind })
+}
+
+fn delim_to_external(d: Option<tt::Delimiter>) -> proc_macro::Delimiter {
+ match d.map(|it| it.kind) {
+ Some(tt::DelimiterKind::Parenthesis) => proc_macro::Delimiter::Parenthesis,
+ Some(tt::DelimiterKind::Brace) => proc_macro::Delimiter::Brace,
+ Some(tt::DelimiterKind::Bracket) => proc_macro::Delimiter::Bracket,
+ None => proc_macro::Delimiter::None,
+ }
+}
+
+fn spacing_to_internal(spacing: proc_macro::Spacing) -> Spacing {
+ match spacing {
+ proc_macro::Spacing::Alone => Spacing::Alone,
+ proc_macro::Spacing::Joint => Spacing::Joint,
+ }
+}
+
+fn spacing_to_external(spacing: Spacing) -> proc_macro::Spacing {
+ match spacing {
+ Spacing::Alone => proc_macro::Spacing::Alone,
+ Spacing::Joint => proc_macro::Spacing::Joint,
+ }
+}
+
+impl server::SourceFile for RustAnalyzer {
+ // FIXME these are all stubs
+ fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool {
+ true
+ }
+ fn path(&mut self, _file: &Self::SourceFile) -> String {
+ String::new()
+ }
+ fn is_real(&mut self, _file: &Self::SourceFile) -> bool {
+ true
+ }
+}
+
+impl server::Diagnostic for RustAnalyzer {
+ fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic {
+ let mut diag = Diagnostic::new(level, msg);
+ diag.spans = spans;
+ diag
+ }
+
+ fn sub(
+ &mut self,
+ _diag: &mut Self::Diagnostic,
+ _level: Level,
+ _msg: &str,
+ _spans: Self::MultiSpan,
+ ) {
+ // FIXME handle diagnostic
+ //
+ }
+
+ fn emit(&mut self, _diag: Self::Diagnostic) {
+ // FIXME handle diagnostic
+ // diag.emit()
+ }
+}
+
+impl server::Span for RustAnalyzer {
+ fn debug(&mut self, span: Self::Span) -> String {
+ format!("{:?}", span.0)
+ }
+ fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile {
+ SourceFile {}
+ }
+ fn save_span(&mut self, _span: Self::Span) -> usize {
+ // FIXME stub
+ 0
+ }
+ fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span {
+ // FIXME stub
+ tt::TokenId::unspecified()
+ }
+ /// Recent feature, not yet in the proc_macro
+ ///
+ /// See PR:
+ /// https://github.com/rust-lang/rust/pull/55780
+ fn source_text(&mut self, _span: Self::Span) -> Option<String> {
+ None
+ }
+
+ fn parent(&mut self, _span: Self::Span) -> Option<Self::Span> {
+ // FIXME handle span
+ None
+ }
+ fn source(&mut self, span: Self::Span) -> Self::Span {
+ // FIXME handle span
+ span
+ }
+ fn start(&mut self, _span: Self::Span) -> LineColumn {
+ // FIXME handle span
+ LineColumn { line: 0, column: 0 }
+ }
+ fn end(&mut self, _span: Self::Span) -> LineColumn {
+ // FIXME handle span
+ LineColumn { line: 0, column: 0 }
+ }
+ fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option<Self::Span> {
+ // Just return the first span again, because some macros will unwrap the result.
+ Some(first)
+ }
+ fn subspan(
+ &mut self,
+ span: Self::Span,
+ _start: Bound<usize>,
+ _end: Bound<usize>,
+ ) -> Option<Self::Span> {
+ // Just return the span again, because some macros will unwrap the result.
+ Some(span)
+ }
+ fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span {
+ // FIXME handle span
+ tt::TokenId::unspecified()
+ }
+
+ fn after(&mut self, _self_: Self::Span) -> Self::Span {
+ tt::TokenId::unspecified()
+ }
+
+ fn before(&mut self, _self_: Self::Span) -> Self::Span {
+ tt::TokenId::unspecified()
+ }
+}
+
+impl server::MultiSpan for RustAnalyzer {
+ fn new(&mut self) -> Self::MultiSpan {
+ // FIXME handle span
+ vec![]
+ }
+
+ fn push(&mut self, other: &mut Self::MultiSpan, span: Self::Span) {
+ //TODP
+ other.push(span)
+ }
+}
+
+impl server::Symbol for RustAnalyzer {
+ fn normalize_and_validate_ident(&mut self, string: &str) -> Result<Self::Symbol, ()> {
+ // FIXME: nfc-normalize and validate idents
+ Ok(<Self as server::Server>::intern_symbol(string))
+ }
+}
+
+impl server::Server for RustAnalyzer {
+ fn globals(&mut self) -> bridge::ExpnGlobals<Self::Span> {
+ bridge::ExpnGlobals {
+ def_site: Span::unspecified(),
+ call_site: Span::unspecified(),
+ mixed_site: Span::unspecified(),
+ }
+ }
+
+ fn intern_symbol(ident: &str) -> Self::Symbol {
+ Symbol::intern(&tt::SmolStr::from(ident))
+ }
+
+ fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) {
+ f(symbol.text().as_str())
+ }
+}
+
+struct LiteralFormatter(bridge::Literal<tt::TokenId, Symbol>);
+
+impl LiteralFormatter {
+ /// Invokes the callback with a `&[&str]` consisting of each part of the
+ /// literal's representation. This is done to allow the `ToString` and
+ /// `Display` implementations to borrow references to symbol values, and
+ /// both be optimized to reduce overhead.
+ fn with_stringify_parts<R>(&self, f: impl FnOnce(&[&str]) -> R) -> R {
+ /// Returns a string containing exactly `num` '#' characters.
+ /// Uses a 256-character source string literal which is always safe to
+ /// index with a `u8` index.
+ fn get_hashes_str(num: u8) -> &'static str {
+ const HASHES: &str = "\
+ ################################################################\
+ ################################################################\
+ ################################################################\
+ ################################################################\
+ ";
+ const _: () = assert!(HASHES.len() == 256);
+ &HASHES[..num as usize]
+ }
+
+ self.with_symbol_and_suffix(|symbol, suffix| match self.0.kind {
+ bridge::LitKind::Byte => f(&["b'", symbol, "'", suffix]),
+ bridge::LitKind::Char => f(&["'", symbol, "'", suffix]),
+ bridge::LitKind::Str => f(&["\"", symbol, "\"", suffix]),
+ bridge::LitKind::StrRaw(n) => {
+ let hashes = get_hashes_str(n);
+ f(&["r", hashes, "\"", symbol, "\"", hashes, suffix])
+ }
+ bridge::LitKind::ByteStr => f(&["b\"", symbol, "\"", suffix]),
+ bridge::LitKind::ByteStrRaw(n) => {
+ let hashes = get_hashes_str(n);
+ f(&["br", hashes, "\"", symbol, "\"", hashes, suffix])
+ }
+ _ => f(&[symbol, suffix]),
+ })
+ }
+
+ fn with_symbol_and_suffix<R>(&self, f: impl FnOnce(&str, &str) -> R) -> R {
+ let symbol = self.0.symbol.text();
+ let suffix = self.0.suffix.map(|s| s.text()).unwrap_or_default();
+ f(symbol.as_str(), suffix.as_str())
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_ra_server_to_string() {
+ let s = TokenStream {
+ token_trees: vec![
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "struct".into(),
+ id: tt::TokenId::unspecified(),
+ })),
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "T".into(),
+ id: tt::TokenId::unspecified(),
+ })),
+ tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: Some(tt::Delimiter {
+ id: tt::TokenId::unspecified(),
+ kind: tt::DelimiterKind::Brace,
+ }),
+ token_trees: vec![],
+ }),
+ ],
+ };
+
+ assert_eq!(s.to_string(), "struct T {}");
+ }
+
+ #[test]
+ fn test_ra_server_from_str() {
+ use std::str::FromStr;
+ let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: Some(tt::Delimiter {
+ id: tt::TokenId::unspecified(),
+ kind: tt::DelimiterKind::Parenthesis,
+ }),
+ token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "a".into(),
+ id: tt::TokenId::unspecified(),
+ }))],
+ });
+
+ let t1 = TokenStream::from_str("(a)").unwrap();
+ assert_eq!(t1.token_trees.len(), 1);
+ assert_eq!(t1.token_trees[0], subtree_paren_a);
+
+ let t2 = TokenStream::from_str("(a);").unwrap();
+ assert_eq!(t2.token_trees.len(), 2);
+ assert_eq!(t2.token_trees[0], subtree_paren_a);
+
+ let underscore = TokenStream::from_str("_").unwrap();
+ assert_eq!(
+ underscore.token_trees[0],
+ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: "_".into(),
+ id: tt::TokenId::unspecified(),
+ }))
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/symbol.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/symbol.rs
new file mode 100644
index 000000000..51dfba2ea
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/symbol.rs
@@ -0,0 +1,46 @@
+//! Symbol interner for proc-macro-srv
+
+use std::{cell::RefCell, collections::HashMap};
+use tt::SmolStr;
+
+thread_local! {
+ static SYMBOL_INTERNER: RefCell<SymbolInterner> = Default::default();
+}
+
+// ID for an interned symbol.
+#[derive(Hash, Eq, PartialEq, Copy, Clone)]
+pub struct Symbol(u32);
+
+impl Symbol {
+ pub fn intern(data: &str) -> Symbol {
+ SYMBOL_INTERNER.with(|i| i.borrow_mut().intern(data))
+ }
+
+ pub fn text(&self) -> SmolStr {
+ SYMBOL_INTERNER.with(|i| i.borrow().get(self).clone())
+ }
+}
+
+#[derive(Default)]
+struct SymbolInterner {
+ idents: HashMap<SmolStr, u32>,
+ ident_data: Vec<SmolStr>,
+}
+
+impl SymbolInterner {
+ fn intern(&mut self, data: &str) -> Symbol {
+ if let Some(index) = self.idents.get(data) {
+ return Symbol(*index);
+ }
+
+ let index = self.idents.len() as u32;
+ let data = SmolStr::from(data);
+ self.ident_data.push(data.clone());
+ self.idents.insert(data, index);
+ Symbol(index)
+ }
+
+ fn get(&self, sym: &Symbol) -> &SmolStr {
+ &self.ident_data[sym.0 as usize]
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/token_stream.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/token_stream.rs
new file mode 100644
index 000000000..113bb52c1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/token_stream.rs
@@ -0,0 +1,179 @@
+//! TokenStream implementation used by sysroot ABI
+
+use tt::TokenTree;
+
+#[derive(Debug, Default, Clone)]
+pub struct TokenStream {
+ pub token_trees: Vec<TokenTree>,
+}
+
+impl TokenStream {
+ pub fn new() -> Self {
+ TokenStream::default()
+ }
+
+ pub fn with_subtree(subtree: tt::Subtree) -> Self {
+ if subtree.delimiter.is_some() {
+ TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] }
+ } else {
+ TokenStream { token_trees: subtree.token_trees }
+ }
+ }
+
+ pub fn into_subtree(self) -> tt::Subtree {
+ tt::Subtree { delimiter: None, token_trees: self.token_trees }
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.token_trees.is_empty()
+ }
+}
+
+/// Creates a token stream containing a single token tree.
+impl From<TokenTree> for TokenStream {
+ fn from(tree: TokenTree) -> TokenStream {
+ TokenStream { token_trees: vec![tree] }
+ }
+}
+
+/// Collects a number of token trees into a single stream.
+impl FromIterator<TokenTree> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+ trees.into_iter().map(TokenStream::from).collect()
+ }
+}
+
+/// A "flattening" operation on token streams, collects token trees
+/// from multiple token streams into a single stream.
+impl FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let mut builder = TokenStreamBuilder::new();
+ streams.into_iter().for_each(|stream| builder.push(stream));
+ builder.build()
+ }
+}
+
+impl Extend<TokenTree> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
+ self.extend(trees.into_iter().map(TokenStream::from));
+ }
+}
+
+impl Extend<TokenStream> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+ for item in streams {
+ for tkn in item {
+ match tkn {
+ tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => {
+ self.token_trees.extend(subtree.token_trees);
+ }
+ _ => {
+ self.token_trees.push(tkn);
+ }
+ }
+ }
+ }
+ }
+}
+
+pub struct TokenStreamBuilder {
+ acc: TokenStream,
+}
+
+/// Public implementation details for the `TokenStream` type, such as iterators.
+pub mod token_stream {
+ use std::str::FromStr;
+
+ use super::{TokenStream, TokenTree};
+
+ /// An iterator over `TokenStream`'s `TokenTree`s.
+ /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
+ /// and returns whole groups as token trees.
+ impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = std::vec::IntoIter<TokenTree>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.token_trees.into_iter()
+ }
+ }
+
+ type LexError = String;
+
+ /// Attempts to break the string into tokens and parse those tokens into a token stream.
+ /// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
+ /// or characters not existing in the language.
+ /// All tokens in the parsed stream get `Span::call_site()` spans.
+ ///
+ /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
+ /// change these errors into `LexError`s later.
+ impl FromStr for TokenStream {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+ let (subtree, _token_map) =
+ mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?;
+
+ let subtree = subtree_replace_token_ids_with_unspecified(subtree);
+ Ok(TokenStream::with_subtree(subtree))
+ }
+ }
+
+ impl ToString for TokenStream {
+ fn to_string(&self) -> String {
+ tt::pretty(&self.token_trees)
+ }
+ }
+
+ fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree {
+ tt::Subtree {
+ delimiter: subtree
+ .delimiter
+ .map(|d| tt::Delimiter { id: tt::TokenId::unspecified(), ..d }),
+ token_trees: subtree
+ .token_trees
+ .into_iter()
+ .map(token_tree_replace_token_ids_with_unspecified)
+ .collect(),
+ }
+ }
+
+ fn token_tree_replace_token_ids_with_unspecified(tt: tt::TokenTree) -> tt::TokenTree {
+ match tt {
+ tt::TokenTree::Leaf(leaf) => {
+ tt::TokenTree::Leaf(leaf_replace_token_ids_with_unspecified(leaf))
+ }
+ tt::TokenTree::Subtree(subtree) => {
+ tt::TokenTree::Subtree(subtree_replace_token_ids_with_unspecified(subtree))
+ }
+ }
+ }
+
+ fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf {
+ match leaf {
+ tt::Leaf::Literal(lit) => {
+ tt::Leaf::Literal(tt::Literal { id: tt::TokenId::unspecified(), ..lit })
+ }
+ tt::Leaf::Punct(punct) => {
+ tt::Leaf::Punct(tt::Punct { id: tt::TokenId::unspecified(), ..punct })
+ }
+ tt::Leaf::Ident(ident) => {
+ tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), ..ident })
+ }
+ }
+ }
+}
+
+impl TokenStreamBuilder {
+ pub(super) fn new() -> TokenStreamBuilder {
+ TokenStreamBuilder { acc: TokenStream::new() }
+ }
+
+ pub(super) fn push(&mut self, stream: TokenStream) {
+ self.acc.extend(stream.into_iter())
+ }
+
+ pub(super) fn build(self) -> TokenStream {
+ self.acc
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/mod.rs
new file mode 100644
index 000000000..bcf3f1184
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/mod.rs
@@ -0,0 +1,155 @@
+//! Procedural macros are implemented by compiling the macro providing crate
+//! to a dynamic library with a particular ABI which the compiler uses to expand
+//! macros. Unfortunately this ABI is not specified and can change from version
+//! to version of the compiler. To support this we copy the ABI from the rust
+//! compiler into submodules of this module (e.g proc_macro_srv::abis::abi_1_47).
+//!
+//! All of these ABIs are subsumed in the `Abi` enum, which exposes a simple
+//! interface the rest of rust analyzer can use to talk to the macro
+//! provider.
+//!
+//! # Adding a new ABI
+//!
+//! To add a new ABI you'll need to copy the source of the target proc_macro
+//! crate from the source tree of the Rust compiler into this directory tree.
+//! Then you'll need to modify it
+//! - Remove any feature! or other things which won't compile on stable
+//! - change any absolute imports to relative imports within the ABI tree
+//!
+//! Then you'll need to add a branch to the `Abi` enum and an implementation of
+//! `Abi::expand`, `Abi::list_macros` and `Abi::from_lib` for the new ABI. See
+//! `proc_macro_srv/src/abis/abi_1_47/mod.rs` for an example. Finally you'll
+//! need to update the conditionals in `Abi::from_lib` to return your new ABI
+//! for the relevant versions of the rust compiler
+//!
+
+mod abi_1_58;
+mod abi_1_63;
+mod abi_1_64;
+#[cfg(feature = "sysroot-abi")]
+mod abi_sysroot;
+
+// see `build.rs`
+include!(concat!(env!("OUT_DIR"), "/rustc_version.rs"));
+
+// Used by `test/utils.rs`
+#[cfg(test)]
+pub(crate) use abi_1_64::TokenStream as TestTokenStream;
+
+use super::dylib::LoadProcMacroDylibError;
+pub(crate) use abi_1_58::Abi as Abi_1_58;
+pub(crate) use abi_1_63::Abi as Abi_1_63;
+pub(crate) use abi_1_64::Abi as Abi_1_64;
+#[cfg(feature = "sysroot-abi")]
+pub(crate) use abi_sysroot::Abi as Abi_Sysroot;
+use libloading::Library;
+use proc_macro_api::{ProcMacroKind, RustCInfo};
+
+pub struct PanicMessage {
+ message: Option<String>,
+}
+
+impl PanicMessage {
+ pub fn as_str(&self) -> Option<String> {
+ self.message.clone()
+ }
+}
+
+pub(crate) enum Abi {
+ Abi1_58(Abi_1_58),
+ Abi1_63(Abi_1_63),
+ Abi1_64(Abi_1_64),
+ #[cfg(feature = "sysroot-abi")]
+ AbiSysroot(Abi_Sysroot),
+}
+
+impl Abi {
+ /// Load a new ABI.
+ ///
+ /// # Arguments
+ ///
+ /// *`lib` - The dynamic library containing the macro implementations
+ /// *`symbol_name` - The symbol name the macros can be found attributes
+ /// *`info` - RustCInfo about the compiler that was used to compile the
+ /// macro crate. This is the information we use to figure out
+ /// which ABI to return
+ pub fn from_lib(
+ lib: &Library,
+ symbol_name: String,
+ info: RustCInfo,
+ ) -> Result<Abi, LoadProcMacroDylibError> {
+ // the sysroot ABI relies on `extern proc_macro` with unstable features,
+ // instead of a snapshot of the proc macro bridge's source code. it's only
+ // enabled if we have an exact version match.
+ #[cfg(feature = "sysroot-abi")]
+ {
+ if info.version_string == RUSTC_VERSION_STRING {
+ let inner = unsafe { Abi_Sysroot::from_lib(lib, symbol_name) }?;
+ return Ok(Abi::AbiSysroot(inner));
+ }
+
+ // if we reached this point, versions didn't match. in testing, we
+ // want that to panic - this could mean that the format of `rustc
+ // --version` no longer matches the format of the version string
+ // stored in the `.rustc` section, and we want to catch that in-tree
+ // with `x.py test`
+ #[cfg(test)]
+ {
+ let allow_mismatch = std::env::var("PROC_MACRO_SRV_ALLOW_SYSROOT_MISMATCH");
+ if let Ok("1") = allow_mismatch.as_deref() {
+ // only used by rust-analyzer developers, when working on the
+ // sysroot ABI from the rust-analyzer repository - which should
+ // only happen pre-subtree. this can be removed later.
+ } else {
+ panic!(
+ "sysroot ABI mismatch: dylib rustc version (read from .rustc section): {:?} != proc-macro-srv version (read from 'rustc --version'): {:?}",
+ info.version_string, RUSTC_VERSION_STRING
+ );
+ }
+ }
+ }
+
+ // FIXME: this should use exclusive ranges when they're stable
+ // https://github.com/rust-lang/rust/issues/37854
+ match (info.version.0, info.version.1) {
+ (1, 58..=62) => {
+ let inner = unsafe { Abi_1_58::from_lib(lib, symbol_name) }?;
+ Ok(Abi::Abi1_58(inner))
+ }
+ (1, 63) => {
+ let inner = unsafe { Abi_1_63::from_lib(lib, symbol_name) }?;
+ Ok(Abi::Abi1_63(inner))
+ }
+ (1, 64..) => {
+ let inner = unsafe { Abi_1_64::from_lib(lib, symbol_name) }?;
+ Ok(Abi::Abi1_64(inner))
+ }
+ _ => Err(LoadProcMacroDylibError::UnsupportedABI),
+ }
+ }
+
+ pub fn expand(
+ &self,
+ macro_name: &str,
+ macro_body: &tt::Subtree,
+ attributes: Option<&tt::Subtree>,
+ ) -> Result<tt::Subtree, PanicMessage> {
+ match self {
+ Self::Abi1_58(abi) => abi.expand(macro_name, macro_body, attributes),
+ Self::Abi1_63(abi) => abi.expand(macro_name, macro_body, attributes),
+ Self::Abi1_64(abi) => abi.expand(macro_name, macro_body, attributes),
+ #[cfg(feature = "sysroot-abi")]
+ Self::AbiSysroot(abi) => abi.expand(macro_name, macro_body, attributes),
+ }
+ }
+
+ pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
+ match self {
+ Self::Abi1_58(abi) => abi.list_macros(),
+ Self::Abi1_63(abi) => abi.list_macros(),
+ Self::Abi1_64(abi) => abi.list_macros(),
+ #[cfg(feature = "sysroot-abi")]
+ Self::AbiSysroot(abi) => abi.list_macros(),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/cli.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/cli.rs
new file mode 100644
index 000000000..f1e131c13
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/cli.rs
@@ -0,0 +1,31 @@
+//! Driver for proc macro server
+use std::io;
+
+use proc_macro_api::msg::{self, Message};
+
+use crate::ProcMacroSrv;
+
+pub fn run() -> io::Result<()> {
+ let mut srv = ProcMacroSrv::default();
+ let mut buf = String::new();
+
+ while let Some(req) = read_request(&mut buf)? {
+ let res = match req {
+ msg::Request::ListMacros { dylib_path } => {
+ msg::Response::ListMacros(srv.list_macros(&dylib_path))
+ }
+ msg::Request::ExpandMacro(task) => msg::Response::ExpandMacro(srv.expand(task)),
+ };
+ write_response(res)?
+ }
+
+ Ok(())
+}
+
+fn read_request(buf: &mut String) -> io::Result<Option<msg::Request>> {
+ msg::Request::read(&mut io::stdin().lock(), buf)
+}
+
+fn write_response(msg: msg::Response) -> io::Result<()> {
+ msg.write(&mut io::stdout().lock())
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs
new file mode 100644
index 000000000..2b6c070fe
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs
@@ -0,0 +1,199 @@
+//! Handles dynamic library loading for proc macro
+
+use std::{
+ convert::TryInto,
+ fmt,
+ fs::File,
+ io,
+ path::{Path, PathBuf},
+};
+
+use libloading::Library;
+use memmap2::Mmap;
+use object::Object;
+use paths::AbsPath;
+use proc_macro_api::{read_dylib_info, ProcMacroKind};
+
+use super::abis::Abi;
+
+const NEW_REGISTRAR_SYMBOL: &str = "_rustc_proc_macro_decls_";
+
+fn invalid_data_err(e: impl Into<Box<dyn std::error::Error + Send + Sync>>) -> io::Error {
+ io::Error::new(io::ErrorKind::InvalidData, e)
+}
+
+fn is_derive_registrar_symbol(symbol: &str) -> bool {
+ symbol.contains(NEW_REGISTRAR_SYMBOL)
+}
+
+fn find_registrar_symbol(file: &Path) -> io::Result<Option<String>> {
+ let file = File::open(file)?;
+ let buffer = unsafe { Mmap::map(&file)? };
+
+ Ok(object::File::parse(&*buffer)
+ .map_err(invalid_data_err)?
+ .exports()
+ .map_err(invalid_data_err)?
+ .into_iter()
+ .map(|export| export.name())
+ .filter_map(|sym| String::from_utf8(sym.into()).ok())
+ .find(|sym| is_derive_registrar_symbol(sym))
+ .map(|sym| {
+ // From MacOS docs:
+ // https://developer.apple.com/library/archive/documentation/System/Conceptual/ManPages_iPhoneOS/man3/dlsym.3.html
+ // Unlike other dyld API's, the symbol name passed to dlsym() must NOT be
+ // prepended with an underscore.
+ if cfg!(target_os = "macos") && sym.starts_with('_') {
+ sym[1..].to_owned()
+ } else {
+ sym
+ }
+ }))
+}
+
+/// Loads dynamic library in platform dependent manner.
+///
+/// For unix, you have to use RTLD_DEEPBIND flag to escape problems described
+/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample)
+/// and [here](https://github.com/rust-lang/rust/issues/60593).
+///
+/// Usage of RTLD_DEEPBIND
+/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample/issues/1)
+///
+/// It seems that on Windows that behaviour is default, so we do nothing in that case.
+#[cfg(windows)]
+fn load_library(file: &Path) -> Result<Library, libloading::Error> {
+ unsafe { Library::new(file) }
+}
+
+#[cfg(unix)]
+fn load_library(file: &Path) -> Result<Library, libloading::Error> {
+ use libloading::os::unix::Library as UnixLibrary;
+ use std::os::raw::c_int;
+
+ const RTLD_NOW: c_int = 0x00002;
+ const RTLD_DEEPBIND: c_int = 0x00008;
+
+ unsafe { UnixLibrary::open(Some(file), RTLD_NOW | RTLD_DEEPBIND).map(|lib| lib.into()) }
+}
+
+#[derive(Debug)]
+pub enum LoadProcMacroDylibError {
+ Io(io::Error),
+ LibLoading(libloading::Error),
+ UnsupportedABI,
+}
+
+impl fmt::Display for LoadProcMacroDylibError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ Self::Io(e) => e.fmt(f),
+ Self::UnsupportedABI => write!(f, "unsupported ABI version"),
+ Self::LibLoading(e) => e.fmt(f),
+ }
+ }
+}
+
+impl From<io::Error> for LoadProcMacroDylibError {
+ fn from(e: io::Error) -> Self {
+ LoadProcMacroDylibError::Io(e)
+ }
+}
+
+impl From<libloading::Error> for LoadProcMacroDylibError {
+ fn from(e: libloading::Error) -> Self {
+ LoadProcMacroDylibError::LibLoading(e)
+ }
+}
+
+struct ProcMacroLibraryLibloading {
+ // Hold on to the library so it doesn't unload
+ _lib: Library,
+ abi: Abi,
+}
+
+impl ProcMacroLibraryLibloading {
+ fn open(file: &Path) -> Result<Self, LoadProcMacroDylibError> {
+ let symbol_name = find_registrar_symbol(file)?.ok_or_else(|| {
+ invalid_data_err(format!("Cannot find registrar symbol in file {}", file.display()))
+ })?;
+
+ let abs_file: &AbsPath = file.try_into().map_err(|_| {
+ invalid_data_err(format!("expected an absolute path, got {}", file.display()))
+ })?;
+ let version_info = read_dylib_info(abs_file)?;
+
+ let lib = load_library(file).map_err(invalid_data_err)?;
+ let abi = Abi::from_lib(&lib, symbol_name, version_info)?;
+ Ok(ProcMacroLibraryLibloading { _lib: lib, abi })
+ }
+}
+
+pub struct Expander {
+ inner: ProcMacroLibraryLibloading,
+}
+
+impl Expander {
+ pub fn new(lib: &Path) -> Result<Expander, LoadProcMacroDylibError> {
+ // Some libraries for dynamic loading require canonicalized path even when it is
+ // already absolute
+ let lib = lib.canonicalize()?;
+
+ let lib = ensure_file_with_lock_free_access(&lib)?;
+
+ let library = ProcMacroLibraryLibloading::open(lib.as_ref())?;
+
+ Ok(Expander { inner: library })
+ }
+
+ pub fn expand(
+ &self,
+ macro_name: &str,
+ macro_body: &tt::Subtree,
+ attributes: Option<&tt::Subtree>,
+ ) -> Result<tt::Subtree, String> {
+ let result = self.inner.abi.expand(macro_name, macro_body, attributes);
+ result.map_err(|e| e.as_str().unwrap_or_else(|| "<unknown error>".to_string()))
+ }
+
+ pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
+ self.inner.abi.list_macros()
+ }
+}
+
+/// Copy the dylib to temp directory to prevent locking in Windows
+#[cfg(windows)]
+fn ensure_file_with_lock_free_access(path: &Path) -> io::Result<PathBuf> {
+ use std::collections::hash_map::RandomState;
+ use std::ffi::OsString;
+ use std::hash::{BuildHasher, Hasher};
+
+ if std::env::var("RA_DONT_COPY_PROC_MACRO_DLL").is_ok() {
+ return Ok(path.to_path_buf());
+ }
+
+ let mut to = std::env::temp_dir();
+
+ let file_name = path.file_name().ok_or_else(|| {
+ io::Error::new(
+ io::ErrorKind::InvalidInput,
+ format!("File path is invalid: {}", path.display()),
+ )
+ })?;
+
+ // Generate a unique number by abusing `HashMap`'s hasher.
+ // Maybe this will also "inspire" a libs team member to finally put `rand` in libstd.
+ let t = RandomState::new().build_hasher().finish();
+
+ let mut unique_name = OsString::from(t.to_string());
+ unique_name.push(file_name);
+
+ to.push(unique_name);
+ std::fs::copy(path, &to).unwrap();
+ Ok(to)
+}
+
+#[cfg(unix)]
+fn ensure_file_with_lock_free_access(path: &Path) -> io::Result<PathBuf> {
+ Ok(path.to_path_buf())
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs
new file mode 100644
index 000000000..4c205b9ca
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs
@@ -0,0 +1,160 @@
+//! RA Proc Macro Server
+//!
+//! This library is able to call compiled Rust custom derive dynamic libraries on arbitrary code.
+//! The general idea here is based on <https://github.com/fedochet/rust-proc-macro-expander>.
+//!
+//! But we adapt it to better fit RA needs:
+//!
+//! * We use `tt` for proc-macro `TokenStream` server, it is easier to manipulate and interact with
+//! RA than `proc-macro2` token stream.
+//! * By **copying** the whole rustc `lib_proc_macro` code, we are able to build this with `stable`
+//! rustc rather than `unstable`. (Although in general ABI compatibility is still an issue)…
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![cfg_attr(
+ feature = "sysroot-abi",
+ feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)
+)]
+#![allow(unreachable_pub)]
+
+mod dylib;
+mod abis;
+
+use std::{
+ collections::{hash_map::Entry, HashMap},
+ env,
+ ffi::OsString,
+ fs,
+ path::{Path, PathBuf},
+ time::SystemTime,
+};
+
+use proc_macro_api::{
+ msg::{ExpandMacro, FlatTree, PanicMessage},
+ ProcMacroKind,
+};
+
+#[derive(Default)]
+pub(crate) struct ProcMacroSrv {
+ expanders: HashMap<(PathBuf, SystemTime), dylib::Expander>,
+}
+
+const EXPANDER_STACK_SIZE: usize = 8 * 1024 * 1024;
+
+impl ProcMacroSrv {
+ pub fn expand(&mut self, task: ExpandMacro) -> Result<FlatTree, PanicMessage> {
+ let expander = self.expander(task.lib.as_ref()).map_err(|err| {
+ debug_assert!(false, "should list macros before asking to expand");
+ PanicMessage(format!("failed to load macro: {}", err))
+ })?;
+
+ let prev_env = EnvSnapshot::new();
+ for (k, v) in &task.env {
+ env::set_var(k, v);
+ }
+ let prev_working_dir = match task.current_dir {
+ Some(dir) => {
+ let prev_working_dir = std::env::current_dir().ok();
+ if let Err(err) = std::env::set_current_dir(&dir) {
+ eprintln!("Failed to set the current working dir to {}. Error: {:?}", dir, err)
+ }
+ prev_working_dir
+ }
+ None => None,
+ };
+
+ let macro_body = task.macro_body.to_subtree();
+ let attributes = task.attributes.map(|it| it.to_subtree());
+ // FIXME: replace this with std's scoped threads once they stabilize
+ // (then remove dependency on crossbeam)
+ let result = crossbeam::scope(|s| {
+ let res = match s
+ .builder()
+ .stack_size(EXPANDER_STACK_SIZE)
+ .name(task.macro_name.clone())
+ .spawn(|_| {
+ expander
+ .expand(&task.macro_name, &macro_body, attributes.as_ref())
+ .map(|it| FlatTree::new(&it))
+ }) {
+ Ok(handle) => handle.join(),
+ Err(e) => std::panic::resume_unwind(Box::new(e)),
+ };
+
+ match res {
+ Ok(res) => res,
+ Err(e) => std::panic::resume_unwind(e),
+ }
+ });
+ let result = match result {
+ Ok(result) => result,
+ Err(e) => std::panic::resume_unwind(e),
+ };
+
+ prev_env.rollback();
+
+ if let Some(dir) = prev_working_dir {
+ if let Err(err) = std::env::set_current_dir(&dir) {
+ eprintln!(
+ "Failed to set the current working dir to {}. Error: {:?}",
+ dir.display(),
+ err
+ )
+ }
+ }
+
+ result.map_err(PanicMessage)
+ }
+
+ pub(crate) fn list_macros(
+ &mut self,
+ dylib_path: &Path,
+ ) -> Result<Vec<(String, ProcMacroKind)>, String> {
+ let expander = self.expander(dylib_path)?;
+ Ok(expander.list_macros())
+ }
+
+ fn expander(&mut self, path: &Path) -> Result<&dylib::Expander, String> {
+ let time = fs::metadata(path).and_then(|it| it.modified()).map_err(|err| {
+ format!("Failed to get file metadata for {}: {:?}", path.display(), err)
+ })?;
+
+ Ok(match self.expanders.entry((path.to_path_buf(), time)) {
+ Entry::Vacant(v) => v.insert(dylib::Expander::new(path).map_err(|err| {
+ format!("Cannot create expander for {}: {:?}", path.display(), err)
+ })?),
+ Entry::Occupied(e) => e.into_mut(),
+ })
+ }
+}
+
+struct EnvSnapshot {
+ vars: HashMap<OsString, OsString>,
+}
+
+impl EnvSnapshot {
+ fn new() -> EnvSnapshot {
+ EnvSnapshot { vars: env::vars_os().collect() }
+ }
+
+ fn rollback(self) {
+ let mut old_vars = self.vars;
+ for (name, value) in env::vars_os() {
+ let old_value = old_vars.remove(&name);
+ if old_value != Some(value) {
+ match old_value {
+ None => env::remove_var(name),
+ Some(old_value) => env::set_var(name, old_value),
+ }
+ }
+ }
+ for (name, old_value) in old_vars {
+ env::set_var(name, old_value)
+ }
+ }
+}
+
+pub mod cli;
+
+#[cfg(test)]
+mod tests;
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs
new file mode 100644
index 000000000..07222907f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs
@@ -0,0 +1,166 @@
+//! proc-macro tests
+
+#[macro_use]
+mod utils;
+use expect_test::expect;
+use paths::AbsPathBuf;
+use utils::*;
+
+#[test]
+fn test_derive_empty() {
+ assert_expand("DeriveEmpty", r#"struct S;"#, expect![[r#"SUBTREE $"#]]);
+}
+
+#[test]
+fn test_derive_error() {
+ assert_expand(
+ "DeriveError",
+ r#"struct S;"#,
+ expect![[r##"
+ SUBTREE $
+ IDENT compile_error 4294967295
+ PUNCH ! [alone] 4294967295
+ SUBTREE () 4294967295
+ LITERAL "#[derive(DeriveError)] struct S ;" 4294967295
+ PUNCH ; [alone] 4294967295"##]],
+ );
+}
+
+#[test]
+fn test_fn_like_macro_noop() {
+ assert_expand(
+ "fn_like_noop",
+ r#"ident, 0, 1, []"#,
+ expect![[r#"
+ SUBTREE $
+ IDENT ident 4294967295
+ PUNCH , [alone] 4294967295
+ LITERAL 0 4294967295
+ PUNCH , [alone] 4294967295
+ LITERAL 1 4294967295
+ PUNCH , [alone] 4294967295
+ SUBTREE [] 4294967295"#]],
+ );
+}
+
+#[test]
+fn test_fn_like_macro_clone_ident_subtree() {
+ assert_expand(
+ "fn_like_clone_tokens",
+ r#"ident, []"#,
+ expect![[r#"
+ SUBTREE $
+ IDENT ident 4294967295
+ PUNCH , [alone] 4294967295
+ SUBTREE [] 4294967295"#]],
+ );
+}
+
+#[test]
+fn test_fn_like_macro_clone_raw_ident() {
+ assert_expand(
+ "fn_like_clone_tokens",
+ "r#async",
+ expect![[r#"
+ SUBTREE $
+ IDENT async 4294967295"#]],
+ );
+}
+
+#[test]
+fn test_fn_like_mk_literals() {
+ assert_expand(
+ "fn_like_mk_literals",
+ r#""#,
+ expect![[r#"
+ SUBTREE $
+ LITERAL b"byte_string" 4294967295
+ LITERAL 'c' 4294967295
+ LITERAL "string" 4294967295
+ LITERAL 3.14f64 4294967295
+ LITERAL 3.14 4294967295
+ LITERAL 123i64 4294967295
+ LITERAL 123 4294967295"#]],
+ );
+}
+
+#[test]
+fn test_fn_like_mk_idents() {
+ // FIXME: this test is wrong: raw should be 'r#raw' but ABIs 1.64 and below
+ // simply ignore `is_raw` when implementing the `Ident` interface.
+ assert_expand(
+ "fn_like_mk_idents",
+ r#""#,
+ expect![[r#"
+ SUBTREE $
+ IDENT standard 4294967295
+ IDENT raw 4294967295"#]],
+ );
+}
+
+#[test]
+fn test_fn_like_macro_clone_literals() {
+ assert_expand(
+ "fn_like_clone_tokens",
+ r#"1u16, 2_u32, -4i64, 3.14f32, "hello bridge""#,
+ expect![[r#"
+ SUBTREE $
+ LITERAL 1u16 4294967295
+ PUNCH , [alone] 4294967295
+ LITERAL 2_u32 4294967295
+ PUNCH , [alone] 4294967295
+ PUNCH - [alone] 4294967295
+ LITERAL 4i64 4294967295
+ PUNCH , [alone] 4294967295
+ LITERAL 3.14f32 4294967295
+ PUNCH , [alone] 4294967295
+ LITERAL "hello bridge" 4294967295"#]],
+ );
+}
+
+#[test]
+fn test_attr_macro() {
+ // Corresponds to
+ // #[proc_macro_test::attr_error(some arguments)]
+ // mod m {}
+ assert_expand_attr(
+ "attr_error",
+ r#"mod m {}"#,
+ r#"some arguments"#,
+ expect![[r##"
+ SUBTREE $
+ IDENT compile_error 4294967295
+ PUNCH ! [alone] 4294967295
+ SUBTREE () 4294967295
+ LITERAL "#[attr_error(some arguments)] mod m {}" 4294967295
+ PUNCH ; [alone] 4294967295"##]],
+ );
+}
+
+/// Tests that we find and classify all proc macros correctly.
+#[test]
+fn list_test_macros() {
+ let res = list().join("\n");
+
+ expect![[r#"
+ fn_like_noop [FuncLike]
+ fn_like_panic [FuncLike]
+ fn_like_error [FuncLike]
+ fn_like_clone_tokens [FuncLike]
+ fn_like_mk_literals [FuncLike]
+ fn_like_mk_idents [FuncLike]
+ attr_noop [Attr]
+ attr_panic [Attr]
+ attr_error [Attr]
+ DeriveEmpty [CustomDerive]
+ DerivePanic [CustomDerive]
+ DeriveError [CustomDerive]"#]]
+ .assert_eq(&res);
+}
+
+#[test]
+fn test_version_check() {
+ let path = AbsPathBuf::assert(fixtures::proc_macro_test_dylib_path());
+ let info = proc_macro_api::read_dylib_info(&path).unwrap();
+ assert!(info.version.1 >= 50);
+}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs
new file mode 100644
index 000000000..f881fe868
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs
@@ -0,0 +1,47 @@
+//! utils used in proc-macro tests
+
+use crate::dylib;
+use crate::ProcMacroSrv;
+use expect_test::Expect;
+use std::str::FromStr;
+
+pub mod fixtures {
+ pub fn proc_macro_test_dylib_path() -> std::path::PathBuf {
+ proc_macro_test::PROC_MACRO_TEST_LOCATION.into()
+ }
+}
+
+fn parse_string(code: &str) -> Option<crate::abis::TestTokenStream> {
+ // This is a bit strange. We need to parse a string into a token stream into
+ // order to create a tt::SubTree from it in fixtures. `into_subtree` is
+ // implemented by all the ABIs we have so we arbitrarily choose one ABI to
+ // write a `parse_string` function for and use that. The tests don't really
+ // care which ABI we're using as the `into_subtree` function isn't part of
+ // the ABI and shouldn't change between ABI versions.
+ crate::abis::TestTokenStream::from_str(code).ok()
+}
+
+pub fn assert_expand(macro_name: &str, ra_fixture: &str, expect: Expect) {
+ assert_expand_impl(macro_name, ra_fixture, None, expect);
+}
+
+pub fn assert_expand_attr(macro_name: &str, ra_fixture: &str, attr_args: &str, expect: Expect) {
+ assert_expand_impl(macro_name, ra_fixture, Some(attr_args), expect);
+}
+
+fn assert_expand_impl(macro_name: &str, input: &str, attr: Option<&str>, expect: Expect) {
+ let path = fixtures::proc_macro_test_dylib_path();
+ let expander = dylib::Expander::new(&path).unwrap();
+ let fixture = parse_string(input).unwrap();
+ let attr = attr.map(|attr| parse_string(attr).unwrap().into_subtree());
+
+ let res = expander.expand(macro_name, &fixture.into_subtree(), attr.as_ref()).unwrap();
+ expect.assert_eq(&format!("{:?}", res));
+}
+
+pub(crate) fn list() -> Vec<String> {
+ let dylib_path = fixtures::proc_macro_test_dylib_path();
+ let mut srv = ProcMacroSrv::default();
+ let res = srv.list_macros(&dylib_path).unwrap();
+ res.into_iter().map(|(name, kind)| format!("{} [{:?}]", name, kind)).collect()
+}