summaryrefslogtreecommitdiffstats
path: root/rust/macros
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-11 08:27:49 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-11 08:27:49 +0000
commitace9429bb58fd418f0c81d4c2835699bddf6bde6 (patch)
treeb2d64bc10158fdd5497876388cd68142ca374ed3 /rust/macros
parentInitial commit. (diff)
downloadlinux-ace9429bb58fd418f0c81d4c2835699bddf6bde6.tar.xz
linux-ace9429bb58fd418f0c81d4c2835699bddf6bde6.zip
Adding upstream version 6.6.15.upstream/6.6.15
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'rust/macros')
-rw-r--r--rust/macros/concat_idents.rs23
-rw-r--r--rust/macros/helpers.rs155
-rw-r--r--rust/macros/lib.rs365
-rw-r--r--rust/macros/module.rs302
-rw-r--r--rust/macros/paste.rs96
-rw-r--r--rust/macros/pin_data.rs127
-rw-r--r--rust/macros/pinned_drop.rs49
-rw-r--r--rust/macros/quote.rs157
-rw-r--r--rust/macros/vtable.rs96
-rw-r--r--rust/macros/zeroable.rs72
10 files changed, 1442 insertions, 0 deletions
diff --git a/rust/macros/concat_idents.rs b/rust/macros/concat_idents.rs
new file mode 100644
index 0000000000..7e4b450f3a
--- /dev/null
+++ b/rust/macros/concat_idents.rs
@@ -0,0 +1,23 @@
+// SPDX-License-Identifier: GPL-2.0
+
+use proc_macro::{token_stream, Ident, TokenStream, TokenTree};
+
+use crate::helpers::expect_punct;
+
+fn expect_ident(it: &mut token_stream::IntoIter) -> Ident {
+ if let Some(TokenTree::Ident(ident)) = it.next() {
+ ident
+ } else {
+ panic!("Expected Ident")
+ }
+}
+
+pub(crate) fn concat_idents(ts: TokenStream) -> TokenStream {
+ let mut it = ts.into_iter();
+ let a = expect_ident(&mut it);
+ assert_eq!(expect_punct(&mut it), ',');
+ let b = expect_ident(&mut it);
+ assert!(it.next().is_none(), "only two idents can be concatenated");
+ let res = Ident::new(&format!("{a}{b}"), b.span());
+ TokenStream::from_iter([TokenTree::Ident(res)])
+}
diff --git a/rust/macros/helpers.rs b/rust/macros/helpers.rs
new file mode 100644
index 0000000000..afb0f2e3a3
--- /dev/null
+++ b/rust/macros/helpers.rs
@@ -0,0 +1,155 @@
+// SPDX-License-Identifier: GPL-2.0
+
+use proc_macro::{token_stream, Group, Punct, Spacing, TokenStream, TokenTree};
+
+pub(crate) fn try_ident(it: &mut token_stream::IntoIter) -> Option<String> {
+ if let Some(TokenTree::Ident(ident)) = it.next() {
+ Some(ident.to_string())
+ } else {
+ None
+ }
+}
+
+pub(crate) fn try_literal(it: &mut token_stream::IntoIter) -> Option<String> {
+ if let Some(TokenTree::Literal(literal)) = it.next() {
+ Some(literal.to_string())
+ } else {
+ None
+ }
+}
+
+pub(crate) fn try_string(it: &mut token_stream::IntoIter) -> Option<String> {
+ try_literal(it).and_then(|string| {
+ if string.starts_with('\"') && string.ends_with('\"') {
+ let content = &string[1..string.len() - 1];
+ if content.contains('\\') {
+ panic!("Escape sequences in string literals not yet handled");
+ }
+ Some(content.to_string())
+ } else if string.starts_with("r\"") {
+ panic!("Raw string literals are not yet handled");
+ } else {
+ None
+ }
+ })
+}
+
+pub(crate) fn expect_ident(it: &mut token_stream::IntoIter) -> String {
+ try_ident(it).expect("Expected Ident")
+}
+
+pub(crate) fn expect_punct(it: &mut token_stream::IntoIter) -> char {
+ if let TokenTree::Punct(punct) = it.next().expect("Reached end of token stream for Punct") {
+ punct.as_char()
+ } else {
+ panic!("Expected Punct");
+ }
+}
+
+pub(crate) fn expect_string(it: &mut token_stream::IntoIter) -> String {
+ try_string(it).expect("Expected string")
+}
+
+pub(crate) fn expect_string_ascii(it: &mut token_stream::IntoIter) -> String {
+ let string = try_string(it).expect("Expected string");
+ assert!(string.is_ascii(), "Expected ASCII string");
+ string
+}
+
+pub(crate) fn expect_group(it: &mut token_stream::IntoIter) -> Group {
+ if let TokenTree::Group(group) = it.next().expect("Reached end of token stream for Group") {
+ group
+ } else {
+ panic!("Expected Group");
+ }
+}
+
+pub(crate) fn expect_end(it: &mut token_stream::IntoIter) {
+ if it.next().is_some() {
+ panic!("Expected end");
+ }
+}
+
+pub(crate) struct Generics {
+ pub(crate) impl_generics: Vec<TokenTree>,
+ pub(crate) ty_generics: Vec<TokenTree>,
+}
+
+/// Parses the given `TokenStream` into `Generics` and the rest.
+///
+/// The generics are not present in the rest, but a where clause might remain.
+pub(crate) fn parse_generics(input: TokenStream) -> (Generics, Vec<TokenTree>) {
+ // `impl_generics`, the declared generics with their bounds.
+ let mut impl_generics = vec![];
+ // Only the names of the generics, without any bounds.
+ let mut ty_generics = vec![];
+ // Tokens not related to the generics e.g. the `where` token and definition.
+ let mut rest = vec![];
+ // The current level of `<`.
+ let mut nesting = 0;
+ let mut toks = input.into_iter();
+ // If we are at the beginning of a generic parameter.
+ let mut at_start = true;
+ for tt in &mut toks {
+ match tt.clone() {
+ TokenTree::Punct(p) if p.as_char() == '<' => {
+ if nesting >= 1 {
+ // This is inside of the generics and part of some bound.
+ impl_generics.push(tt);
+ }
+ nesting += 1;
+ }
+ TokenTree::Punct(p) if p.as_char() == '>' => {
+ // This is a parsing error, so we just end it here.
+ if nesting == 0 {
+ break;
+ } else {
+ nesting -= 1;
+ if nesting >= 1 {
+ // We are still inside of the generics and part of some bound.
+ impl_generics.push(tt);
+ }
+ if nesting == 0 {
+ break;
+ }
+ }
+ }
+ tt => {
+ if nesting == 1 {
+ // Here depending on the token, it might be a generic variable name.
+ match &tt {
+ // Ignore const.
+ TokenTree::Ident(i) if i.to_string() == "const" => {}
+ TokenTree::Ident(_) if at_start => {
+ ty_generics.push(tt.clone());
+ // We also already push the `,` token, this makes it easier to append
+ // generics.
+ ty_generics.push(TokenTree::Punct(Punct::new(',', Spacing::Alone)));
+ at_start = false;
+ }
+ TokenTree::Punct(p) if p.as_char() == ',' => at_start = true,
+ // Lifetimes begin with `'`.
+ TokenTree::Punct(p) if p.as_char() == '\'' && at_start => {
+ ty_generics.push(tt.clone());
+ }
+ _ => {}
+ }
+ }
+ if nesting >= 1 {
+ impl_generics.push(tt);
+ } else if nesting == 0 {
+ // If we haven't entered the generics yet, we still want to keep these tokens.
+ rest.push(tt);
+ }
+ }
+ }
+ }
+ rest.extend(toks);
+ (
+ Generics {
+ impl_generics,
+ ty_generics,
+ },
+ rest,
+ )
+}
diff --git a/rust/macros/lib.rs b/rust/macros/lib.rs
new file mode 100644
index 0000000000..c42105c2ff
--- /dev/null
+++ b/rust/macros/lib.rs
@@ -0,0 +1,365 @@
+// SPDX-License-Identifier: GPL-2.0
+
+//! Crate for all kernel procedural macros.
+
+#[macro_use]
+mod quote;
+mod concat_idents;
+mod helpers;
+mod module;
+mod paste;
+mod pin_data;
+mod pinned_drop;
+mod vtable;
+mod zeroable;
+
+use proc_macro::TokenStream;
+
+/// Declares a kernel module.
+///
+/// The `type` argument should be a type which implements the [`Module`]
+/// trait. Also accepts various forms of kernel metadata.
+///
+/// C header: [`include/linux/moduleparam.h`](../../../include/linux/moduleparam.h)
+///
+/// [`Module`]: ../kernel/trait.Module.html
+///
+/// # Examples
+///
+/// ```ignore
+/// use kernel::prelude::*;
+///
+/// module!{
+/// type: MyModule,
+/// name: "my_kernel_module",
+/// author: "Rust for Linux Contributors",
+/// description: "My very own kernel module!",
+/// license: "GPL",
+/// params: {
+/// my_i32: i32 {
+/// default: 42,
+/// permissions: 0o000,
+/// description: "Example of i32",
+/// },
+/// writeable_i32: i32 {
+/// default: 42,
+/// permissions: 0o644,
+/// description: "Example of i32",
+/// },
+/// },
+/// }
+///
+/// struct MyModule;
+///
+/// impl kernel::Module for MyModule {
+/// fn init() -> Result<Self> {
+/// // If the parameter is writeable, then the kparam lock must be
+/// // taken to read the parameter:
+/// {
+/// let lock = THIS_MODULE.kernel_param_lock();
+/// pr_info!("i32 param is: {}\n", writeable_i32.read(&lock));
+/// }
+/// // If the parameter is read only, it can be read without locking
+/// // the kernel parameters:
+/// pr_info!("i32 param is: {}\n", my_i32.read());
+/// Ok(Self)
+/// }
+/// }
+/// ```
+///
+/// # Supported argument types
+/// - `type`: type which implements the [`Module`] trait (required).
+/// - `name`: byte array of the name of the kernel module (required).
+/// - `author`: byte array of the author of the kernel module.
+/// - `description`: byte array of the description of the kernel module.
+/// - `license`: byte array of the license of the kernel module (required).
+/// - `alias`: byte array of alias name of the kernel module.
+#[proc_macro]
+pub fn module(ts: TokenStream) -> TokenStream {
+ module::module(ts)
+}
+
+/// Declares or implements a vtable trait.
+///
+/// Linux's use of pure vtables is very close to Rust traits, but they differ
+/// in how unimplemented functions are represented. In Rust, traits can provide
+/// default implementation for all non-required methods (and the default
+/// implementation could just return `Error::EINVAL`); Linux typically use C
+/// `NULL` pointers to represent these functions.
+///
+/// This attribute is intended to close the gap. Traits can be declared and
+/// implemented with the `#[vtable]` attribute, and a `HAS_*` associated constant
+/// will be generated for each method in the trait, indicating if the implementor
+/// has overridden a method.
+///
+/// This attribute is not needed if all methods are required.
+///
+/// # Examples
+///
+/// ```ignore
+/// use kernel::prelude::*;
+///
+/// // Declares a `#[vtable]` trait
+/// #[vtable]
+/// pub trait Operations: Send + Sync + Sized {
+/// fn foo(&self) -> Result<()> {
+/// Err(EINVAL)
+/// }
+///
+/// fn bar(&self) -> Result<()> {
+/// Err(EINVAL)
+/// }
+/// }
+///
+/// struct Foo;
+///
+/// // Implements the `#[vtable]` trait
+/// #[vtable]
+/// impl Operations for Foo {
+/// fn foo(&self) -> Result<()> {
+/// # Err(EINVAL)
+/// // ...
+/// }
+/// }
+///
+/// assert_eq!(<Foo as Operations>::HAS_FOO, true);
+/// assert_eq!(<Foo as Operations>::HAS_BAR, false);
+/// ```
+#[proc_macro_attribute]
+pub fn vtable(attr: TokenStream, ts: TokenStream) -> TokenStream {
+ vtable::vtable(attr, ts)
+}
+
+/// Concatenate two identifiers.
+///
+/// This is useful in macros that need to declare or reference items with names
+/// starting with a fixed prefix and ending in a user specified name. The resulting
+/// identifier has the span of the second argument.
+///
+/// # Examples
+///
+/// ```ignore
+/// use kernel::macro::concat_idents;
+///
+/// macro_rules! pub_no_prefix {
+/// ($prefix:ident, $($newname:ident),+) => {
+/// $(pub(crate) const $newname: u32 = kernel::macros::concat_idents!($prefix, $newname);)+
+/// };
+/// }
+///
+/// pub_no_prefix!(
+/// binder_driver_return_protocol_,
+/// BR_OK,
+/// BR_ERROR,
+/// BR_TRANSACTION,
+/// BR_REPLY,
+/// BR_DEAD_REPLY,
+/// BR_TRANSACTION_COMPLETE,
+/// BR_INCREFS,
+/// BR_ACQUIRE,
+/// BR_RELEASE,
+/// BR_DECREFS,
+/// BR_NOOP,
+/// BR_SPAWN_LOOPER,
+/// BR_DEAD_BINDER,
+/// BR_CLEAR_DEATH_NOTIFICATION_DONE,
+/// BR_FAILED_REPLY
+/// );
+///
+/// assert_eq!(BR_OK, binder_driver_return_protocol_BR_OK);
+/// ```
+#[proc_macro]
+pub fn concat_idents(ts: TokenStream) -> TokenStream {
+ concat_idents::concat_idents(ts)
+}
+
+/// Used to specify the pinning information of the fields of a struct.
+///
+/// This is somewhat similar in purpose as
+/// [pin-project-lite](https://crates.io/crates/pin-project-lite).
+/// Place this macro on a struct definition and then `#[pin]` in front of the attributes of each
+/// field you want to structurally pin.
+///
+/// This macro enables the use of the [`pin_init!`] macro. When pin-initializing a `struct`,
+/// then `#[pin]` directs the type of initializer that is required.
+///
+/// If your `struct` implements `Drop`, then you need to add `PinnedDrop` as arguments to this
+/// macro, and change your `Drop` implementation to `PinnedDrop` annotated with
+/// `#[`[`macro@pinned_drop`]`]`, since dropping pinned values requires extra care.
+///
+/// # Examples
+///
+/// ```rust,ignore
+/// #[pin_data]
+/// struct DriverData {
+/// #[pin]
+/// queue: Mutex<Vec<Command>>,
+/// buf: Box<[u8; 1024 * 1024]>,
+/// }
+/// ```
+///
+/// ```rust,ignore
+/// #[pin_data(PinnedDrop)]
+/// struct DriverData {
+/// #[pin]
+/// queue: Mutex<Vec<Command>>,
+/// buf: Box<[u8; 1024 * 1024]>,
+/// raw_info: *mut Info,
+/// }
+///
+/// #[pinned_drop]
+/// impl PinnedDrop for DriverData {
+/// fn drop(self: Pin<&mut Self>) {
+/// unsafe { bindings::destroy_info(self.raw_info) };
+/// }
+/// }
+/// ```
+///
+/// [`pin_init!`]: ../kernel/macro.pin_init.html
+// ^ cannot use direct link, since `kernel` is not a dependency of `macros`.
+#[proc_macro_attribute]
+pub fn pin_data(inner: TokenStream, item: TokenStream) -> TokenStream {
+ pin_data::pin_data(inner, item)
+}
+
+/// Used to implement `PinnedDrop` safely.
+///
+/// Only works on structs that are annotated via `#[`[`macro@pin_data`]`]`.
+///
+/// # Examples
+///
+/// ```rust,ignore
+/// #[pin_data(PinnedDrop)]
+/// struct DriverData {
+/// #[pin]
+/// queue: Mutex<Vec<Command>>,
+/// buf: Box<[u8; 1024 * 1024]>,
+/// raw_info: *mut Info,
+/// }
+///
+/// #[pinned_drop]
+/// impl PinnedDrop for DriverData {
+/// fn drop(self: Pin<&mut Self>) {
+/// unsafe { bindings::destroy_info(self.raw_info) };
+/// }
+/// }
+/// ```
+#[proc_macro_attribute]
+pub fn pinned_drop(args: TokenStream, input: TokenStream) -> TokenStream {
+ pinned_drop::pinned_drop(args, input)
+}
+
+/// Paste identifiers together.
+///
+/// Within the `paste!` macro, identifiers inside `[<` and `>]` are concatenated together to form a
+/// single identifier.
+///
+/// This is similar to the [`paste`] crate, but with pasting feature limited to identifiers
+/// (literals, lifetimes and documentation strings are not supported). There is a difference in
+/// supported modifiers as well.
+///
+/// # Example
+///
+/// ```ignore
+/// use kernel::macro::paste;
+///
+/// macro_rules! pub_no_prefix {
+/// ($prefix:ident, $($newname:ident),+) => {
+/// paste! {
+/// $(pub(crate) const $newname: u32 = [<$prefix $newname>];)+
+/// }
+/// };
+/// }
+///
+/// pub_no_prefix!(
+/// binder_driver_return_protocol_,
+/// BR_OK,
+/// BR_ERROR,
+/// BR_TRANSACTION,
+/// BR_REPLY,
+/// BR_DEAD_REPLY,
+/// BR_TRANSACTION_COMPLETE,
+/// BR_INCREFS,
+/// BR_ACQUIRE,
+/// BR_RELEASE,
+/// BR_DECREFS,
+/// BR_NOOP,
+/// BR_SPAWN_LOOPER,
+/// BR_DEAD_BINDER,
+/// BR_CLEAR_DEATH_NOTIFICATION_DONE,
+/// BR_FAILED_REPLY
+/// );
+///
+/// assert_eq!(BR_OK, binder_driver_return_protocol_BR_OK);
+/// ```
+///
+/// # Modifiers
+///
+/// For each identifier, it is possible to attach one or multiple modifiers to
+/// it.
+///
+/// Currently supported modifiers are:
+/// * `span`: change the span of concatenated identifier to the span of the specified token. By
+/// default the span of the `[< >]` group is used.
+/// * `lower`: change the identifier to lower case.
+/// * `upper`: change the identifier to upper case.
+///
+/// ```ignore
+/// use kernel::macro::paste;
+///
+/// macro_rules! pub_no_prefix {
+/// ($prefix:ident, $($newname:ident),+) => {
+/// kernel::macros::paste! {
+/// $(pub(crate) const fn [<$newname:lower:span>]: u32 = [<$prefix $newname:span>];)+
+/// }
+/// };
+/// }
+///
+/// pub_no_prefix!(
+/// binder_driver_return_protocol_,
+/// BR_OK,
+/// BR_ERROR,
+/// BR_TRANSACTION,
+/// BR_REPLY,
+/// BR_DEAD_REPLY,
+/// BR_TRANSACTION_COMPLETE,
+/// BR_INCREFS,
+/// BR_ACQUIRE,
+/// BR_RELEASE,
+/// BR_DECREFS,
+/// BR_NOOP,
+/// BR_SPAWN_LOOPER,
+/// BR_DEAD_BINDER,
+/// BR_CLEAR_DEATH_NOTIFICATION_DONE,
+/// BR_FAILED_REPLY
+/// );
+///
+/// assert_eq!(br_ok(), binder_driver_return_protocol_BR_OK);
+/// ```
+///
+/// [`paste`]: https://docs.rs/paste/
+#[proc_macro]
+pub fn paste(input: TokenStream) -> TokenStream {
+ let mut tokens = input.into_iter().collect();
+ paste::expand(&mut tokens);
+ tokens.into_iter().collect()
+}
+
+/// Derives the [`Zeroable`] trait for the given struct.
+///
+/// This can only be used for structs where every field implements the [`Zeroable`] trait.
+///
+/// # Examples
+///
+/// ```rust,ignore
+/// #[derive(Zeroable)]
+/// pub struct DriverData {
+/// id: i64,
+/// buf_ptr: *mut u8,
+/// len: usize,
+/// }
+/// ```
+#[proc_macro_derive(Zeroable)]
+pub fn derive_zeroable(input: TokenStream) -> TokenStream {
+ zeroable::derive(input)
+}
diff --git a/rust/macros/module.rs b/rust/macros/module.rs
new file mode 100644
index 0000000000..d62d8710d7
--- /dev/null
+++ b/rust/macros/module.rs
@@ -0,0 +1,302 @@
+// SPDX-License-Identifier: GPL-2.0
+
+use crate::helpers::*;
+use proc_macro::{token_stream, Delimiter, Literal, TokenStream, TokenTree};
+use std::fmt::Write;
+
+fn expect_string_array(it: &mut token_stream::IntoIter) -> Vec<String> {
+ let group = expect_group(it);
+ assert_eq!(group.delimiter(), Delimiter::Bracket);
+ let mut values = Vec::new();
+ let mut it = group.stream().into_iter();
+
+ while let Some(val) = try_string(&mut it) {
+ assert!(val.is_ascii(), "Expected ASCII string");
+ values.push(val);
+ match it.next() {
+ Some(TokenTree::Punct(punct)) => assert_eq!(punct.as_char(), ','),
+ None => break,
+ _ => panic!("Expected ',' or end of array"),
+ }
+ }
+ values
+}
+
+struct ModInfoBuilder<'a> {
+ module: &'a str,
+ counter: usize,
+ buffer: String,
+}
+
+impl<'a> ModInfoBuilder<'a> {
+ fn new(module: &'a str) -> Self {
+ ModInfoBuilder {
+ module,
+ counter: 0,
+ buffer: String::new(),
+ }
+ }
+
+ fn emit_base(&mut self, field: &str, content: &str, builtin: bool) {
+ let string = if builtin {
+ // Built-in modules prefix their modinfo strings by `module.`.
+ format!(
+ "{module}.{field}={content}\0",
+ module = self.module,
+ field = field,
+ content = content
+ )
+ } else {
+ // Loadable modules' modinfo strings go as-is.
+ format!("{field}={content}\0", field = field, content = content)
+ };
+
+ write!(
+ &mut self.buffer,
+ "
+ {cfg}
+ #[doc(hidden)]
+ #[link_section = \".modinfo\"]
+ #[used]
+ pub static __{module}_{counter}: [u8; {length}] = *{string};
+ ",
+ cfg = if builtin {
+ "#[cfg(not(MODULE))]"
+ } else {
+ "#[cfg(MODULE)]"
+ },
+ module = self.module.to_uppercase(),
+ counter = self.counter,
+ length = string.len(),
+ string = Literal::byte_string(string.as_bytes()),
+ )
+ .unwrap();
+
+ self.counter += 1;
+ }
+
+ fn emit_only_builtin(&mut self, field: &str, content: &str) {
+ self.emit_base(field, content, true)
+ }
+
+ fn emit_only_loadable(&mut self, field: &str, content: &str) {
+ self.emit_base(field, content, false)
+ }
+
+ fn emit(&mut self, field: &str, content: &str) {
+ self.emit_only_builtin(field, content);
+ self.emit_only_loadable(field, content);
+ }
+}
+
+#[derive(Debug, Default)]
+struct ModuleInfo {
+ type_: String,
+ license: String,
+ name: String,
+ author: Option<String>,
+ description: Option<String>,
+ alias: Option<Vec<String>>,
+}
+
+impl ModuleInfo {
+ fn parse(it: &mut token_stream::IntoIter) -> Self {
+ let mut info = ModuleInfo::default();
+
+ const EXPECTED_KEYS: &[&str] =
+ &["type", "name", "author", "description", "license", "alias"];
+ const REQUIRED_KEYS: &[&str] = &["type", "name", "license"];
+ let mut seen_keys = Vec::new();
+
+ loop {
+ let key = match it.next() {
+ Some(TokenTree::Ident(ident)) => ident.to_string(),
+ Some(_) => panic!("Expected Ident or end"),
+ None => break,
+ };
+
+ if seen_keys.contains(&key) {
+ panic!(
+ "Duplicated key \"{}\". Keys can only be specified once.",
+ key
+ );
+ }
+
+ assert_eq!(expect_punct(it), ':');
+
+ match key.as_str() {
+ "type" => info.type_ = expect_ident(it),
+ "name" => info.name = expect_string_ascii(it),
+ "author" => info.author = Some(expect_string(it)),
+ "description" => info.description = Some(expect_string(it)),
+ "license" => info.license = expect_string_ascii(it),
+ "alias" => info.alias = Some(expect_string_array(it)),
+ _ => panic!(
+ "Unknown key \"{}\". Valid keys are: {:?}.",
+ key, EXPECTED_KEYS
+ ),
+ }
+
+ assert_eq!(expect_punct(it), ',');
+
+ seen_keys.push(key);
+ }
+
+ expect_end(it);
+
+ for key in REQUIRED_KEYS {
+ if !seen_keys.iter().any(|e| e == key) {
+ panic!("Missing required key \"{}\".", key);
+ }
+ }
+
+ let mut ordered_keys: Vec<&str> = Vec::new();
+ for key in EXPECTED_KEYS {
+ if seen_keys.iter().any(|e| e == key) {
+ ordered_keys.push(key);
+ }
+ }
+
+ if seen_keys != ordered_keys {
+ panic!(
+ "Keys are not ordered as expected. Order them like: {:?}.",
+ ordered_keys
+ );
+ }
+
+ info
+ }
+}
+
+pub(crate) fn module(ts: TokenStream) -> TokenStream {
+ let mut it = ts.into_iter();
+
+ let info = ModuleInfo::parse(&mut it);
+
+ let mut modinfo = ModInfoBuilder::new(info.name.as_ref());
+ if let Some(author) = info.author {
+ modinfo.emit("author", &author);
+ }
+ if let Some(description) = info.description {
+ modinfo.emit("description", &description);
+ }
+ modinfo.emit("license", &info.license);
+ if let Some(aliases) = info.alias {
+ for alias in aliases {
+ modinfo.emit("alias", &alias);
+ }
+ }
+
+ // Built-in modules also export the `file` modinfo string.
+ let file =
+ std::env::var("RUST_MODFILE").expect("Unable to fetch RUST_MODFILE environmental variable");
+ modinfo.emit_only_builtin("file", &file);
+
+ format!(
+ "
+ /// The module name.
+ ///
+ /// Used by the printing macros, e.g. [`info!`].
+ const __LOG_PREFIX: &[u8] = b\"{name}\\0\";
+
+ /// The \"Rust loadable module\" mark.
+ //
+ // This may be best done another way later on, e.g. as a new modinfo
+ // key or a new section. For the moment, keep it simple.
+ #[cfg(MODULE)]
+ #[doc(hidden)]
+ #[used]
+ static __IS_RUST_MODULE: () = ();
+
+ static mut __MOD: Option<{type_}> = None;
+
+ // SAFETY: `__this_module` is constructed by the kernel at load time and will not be
+ // freed until the module is unloaded.
+ #[cfg(MODULE)]
+ static THIS_MODULE: kernel::ThisModule = unsafe {{
+ kernel::ThisModule::from_ptr(&kernel::bindings::__this_module as *const _ as *mut _)
+ }};
+ #[cfg(not(MODULE))]
+ static THIS_MODULE: kernel::ThisModule = unsafe {{
+ kernel::ThisModule::from_ptr(core::ptr::null_mut())
+ }};
+
+ // Loadable modules need to export the `{{init,cleanup}}_module` identifiers.
+ #[cfg(MODULE)]
+ #[doc(hidden)]
+ #[no_mangle]
+ pub extern \"C\" fn init_module() -> core::ffi::c_int {{
+ __init()
+ }}
+
+ #[cfg(MODULE)]
+ #[doc(hidden)]
+ #[no_mangle]
+ pub extern \"C\" fn cleanup_module() {{
+ __exit()
+ }}
+
+ // Built-in modules are initialized through an initcall pointer
+ // and the identifiers need to be unique.
+ #[cfg(not(MODULE))]
+ #[cfg(not(CONFIG_HAVE_ARCH_PREL32_RELOCATIONS))]
+ #[doc(hidden)]
+ #[link_section = \"{initcall_section}\"]
+ #[used]
+ pub static __{name}_initcall: extern \"C\" fn() -> core::ffi::c_int = __{name}_init;
+
+ #[cfg(not(MODULE))]
+ #[cfg(CONFIG_HAVE_ARCH_PREL32_RELOCATIONS)]
+ core::arch::global_asm!(
+ r#\".section \"{initcall_section}\", \"a\"
+ __{name}_initcall:
+ .long __{name}_init - .
+ .previous
+ \"#
+ );
+
+ #[cfg(not(MODULE))]
+ #[doc(hidden)]
+ #[no_mangle]
+ pub extern \"C\" fn __{name}_init() -> core::ffi::c_int {{
+ __init()
+ }}
+
+ #[cfg(not(MODULE))]
+ #[doc(hidden)]
+ #[no_mangle]
+ pub extern \"C\" fn __{name}_exit() {{
+ __exit()
+ }}
+
+ fn __init() -> core::ffi::c_int {{
+ match <{type_} as kernel::Module>::init(&THIS_MODULE) {{
+ Ok(m) => {{
+ unsafe {{
+ __MOD = Some(m);
+ }}
+ return 0;
+ }}
+ Err(e) => {{
+ return e.to_errno();
+ }}
+ }}
+ }}
+
+ fn __exit() {{
+ unsafe {{
+ // Invokes `drop()` on `__MOD`, which should be used for cleanup.
+ __MOD = None;
+ }}
+ }}
+
+ {modinfo}
+ ",
+ type_ = info.type_,
+ name = info.name,
+ modinfo = modinfo.buffer,
+ initcall_section = ".initcall6.init"
+ )
+ .parse()
+ .expect("Error parsing formatted string into token stream.")
+}
diff --git a/rust/macros/paste.rs b/rust/macros/paste.rs
new file mode 100644
index 0000000000..385a784342
--- /dev/null
+++ b/rust/macros/paste.rs
@@ -0,0 +1,96 @@
+// SPDX-License-Identifier: GPL-2.0
+
+use proc_macro::{Delimiter, Group, Ident, Spacing, Span, TokenTree};
+
+fn concat(tokens: &[TokenTree], group_span: Span) -> TokenTree {
+ let mut tokens = tokens.iter();
+ let mut segments = Vec::new();
+ let mut span = None;
+ loop {
+ match tokens.next() {
+ None => break,
+ Some(TokenTree::Literal(lit)) => segments.push((lit.to_string(), lit.span())),
+ Some(TokenTree::Ident(ident)) => {
+ let mut value = ident.to_string();
+ if value.starts_with("r#") {
+ value.replace_range(0..2, "");
+ }
+ segments.push((value, ident.span()));
+ }
+ Some(TokenTree::Punct(p)) if p.as_char() == ':' => {
+ let Some(TokenTree::Ident(ident)) = tokens.next() else {
+ panic!("expected identifier as modifier");
+ };
+
+ let (mut value, sp) = segments.pop().expect("expected identifier before modifier");
+ match ident.to_string().as_str() {
+ // Set the overall span of concatenated token as current span
+ "span" => {
+ assert!(
+ span.is_none(),
+ "span modifier should only appear at most once"
+ );
+ span = Some(sp);
+ }
+ "lower" => value = value.to_lowercase(),
+ "upper" => value = value.to_uppercase(),
+ v => panic!("unknown modifier `{v}`"),
+ };
+ segments.push((value, sp));
+ }
+ _ => panic!("unexpected token in paste segments"),
+ };
+ }
+
+ let pasted: String = segments.into_iter().map(|x| x.0).collect();
+ TokenTree::Ident(Ident::new(&pasted, span.unwrap_or(group_span)))
+}
+
+pub(crate) fn expand(tokens: &mut Vec<TokenTree>) {
+ for token in tokens.iter_mut() {
+ if let TokenTree::Group(group) = token {
+ let delimiter = group.delimiter();
+ let span = group.span();
+ let mut stream: Vec<_> = group.stream().into_iter().collect();
+ // Find groups that looks like `[< A B C D >]`
+ if delimiter == Delimiter::Bracket
+ && stream.len() >= 3
+ && matches!(&stream[0], TokenTree::Punct(p) if p.as_char() == '<')
+ && matches!(&stream[stream.len() - 1], TokenTree::Punct(p) if p.as_char() == '>')
+ {
+ // Replace the group with concatenated token
+ *token = concat(&stream[1..stream.len() - 1], span);
+ } else {
+ // Recursively expand tokens inside the group
+ expand(&mut stream);
+ let mut group = Group::new(delimiter, stream.into_iter().collect());
+ group.set_span(span);
+ *token = TokenTree::Group(group);
+ }
+ }
+ }
+
+ // Path segments cannot contain invisible delimiter group, so remove them if any.
+ for i in (0..tokens.len().saturating_sub(3)).rev() {
+ // Looking for a double colon
+ if matches!(
+ (&tokens[i + 1], &tokens[i + 2]),
+ (TokenTree::Punct(a), TokenTree::Punct(b))
+ if a.as_char() == ':' && a.spacing() == Spacing::Joint && b.as_char() == ':'
+ ) {
+ match &tokens[i + 3] {
+ TokenTree::Group(group) if group.delimiter() == Delimiter::None => {
+ tokens.splice(i + 3..i + 4, group.stream());
+ }
+ _ => (),
+ }
+
+ match &tokens[i] {
+ TokenTree::Group(group) if group.delimiter() == Delimiter::None => {
+ tokens.splice(i..i + 1, group.stream());
+ }
+ _ => (),
+ }
+ }
+ }
+}
diff --git a/rust/macros/pin_data.rs b/rust/macros/pin_data.rs
new file mode 100644
index 0000000000..6d58cfda98
--- /dev/null
+++ b/rust/macros/pin_data.rs
@@ -0,0 +1,127 @@
+// SPDX-License-Identifier: Apache-2.0 OR MIT
+
+use crate::helpers::{parse_generics, Generics};
+use proc_macro::{Group, Punct, Spacing, TokenStream, TokenTree};
+
+pub(crate) fn pin_data(args: TokenStream, input: TokenStream) -> TokenStream {
+ // This proc-macro only does some pre-parsing and then delegates the actual parsing to
+ // `kernel::__pin_data!`.
+
+ let (
+ Generics {
+ impl_generics,
+ ty_generics,
+ },
+ rest,
+ ) = parse_generics(input);
+ // The struct definition might contain the `Self` type. Since `__pin_data!` will define a new
+ // type with the same generics and bounds, this poses a problem, since `Self` will refer to the
+ // new type as opposed to this struct definition. Therefore we have to replace `Self` with the
+ // concrete name.
+
+ // Errors that occur when replacing `Self` with `struct_name`.
+ let mut errs = TokenStream::new();
+ // The name of the struct with ty_generics.
+ let struct_name = rest
+ .iter()
+ .skip_while(|tt| !matches!(tt, TokenTree::Ident(i) if i.to_string() == "struct"))
+ .nth(1)
+ .and_then(|tt| match tt {
+ TokenTree::Ident(_) => {
+ let tt = tt.clone();
+ let mut res = vec![tt];
+ if !ty_generics.is_empty() {
+ // We add this, so it is maximally compatible with e.g. `Self::CONST` which
+ // will be replaced by `StructName::<$generics>::CONST`.
+ res.push(TokenTree::Punct(Punct::new(':', Spacing::Joint)));
+ res.push(TokenTree::Punct(Punct::new(':', Spacing::Alone)));
+ res.push(TokenTree::Punct(Punct::new('<', Spacing::Alone)));
+ res.extend(ty_generics.iter().cloned());
+ res.push(TokenTree::Punct(Punct::new('>', Spacing::Alone)));
+ }
+ Some(res)
+ }
+ _ => None,
+ })
+ .unwrap_or_else(|| {
+ // If we did not find the name of the struct then we will use `Self` as the replacement
+ // and add a compile error to ensure it does not compile.
+ errs.extend(
+ "::core::compile_error!(\"Could not locate type name.\");"
+ .parse::<TokenStream>()
+ .unwrap(),
+ );
+ "Self".parse::<TokenStream>().unwrap().into_iter().collect()
+ });
+ let impl_generics = impl_generics
+ .into_iter()
+ .flat_map(|tt| replace_self_and_deny_type_defs(&struct_name, tt, &mut errs))
+ .collect::<Vec<_>>();
+ let mut rest = rest
+ .into_iter()
+ .flat_map(|tt| {
+ // We ignore top level `struct` tokens, since they would emit a compile error.
+ if matches!(&tt, TokenTree::Ident(i) if i.to_string() == "struct") {
+ vec![tt]
+ } else {
+ replace_self_and_deny_type_defs(&struct_name, tt, &mut errs)
+ }
+ })
+ .collect::<Vec<_>>();
+ // This should be the body of the struct `{...}`.
+ let last = rest.pop();
+ let mut quoted = quote!(::kernel::__pin_data! {
+ parse_input:
+ @args(#args),
+ @sig(#(#rest)*),
+ @impl_generics(#(#impl_generics)*),
+ @ty_generics(#(#ty_generics)*),
+ @body(#last),
+ });
+ quoted.extend(errs);
+ quoted
+}
+
+/// Replaces `Self` with `struct_name` and errors on `enum`, `trait`, `struct` `union` and `impl`
+/// keywords.
+///
+/// The error is appended to `errs` to allow normal parsing to continue.
+fn replace_self_and_deny_type_defs(
+ struct_name: &Vec<TokenTree>,
+ tt: TokenTree,
+ errs: &mut TokenStream,
+) -> Vec<TokenTree> {
+ match tt {
+ TokenTree::Ident(ref i)
+ if i.to_string() == "enum"
+ || i.to_string() == "trait"
+ || i.to_string() == "struct"
+ || i.to_string() == "union"
+ || i.to_string() == "impl" =>
+ {
+ errs.extend(
+ format!(
+ "::core::compile_error!(\"Cannot use `{i}` inside of struct definition with \
+ `#[pin_data]`.\");"
+ )
+ .parse::<TokenStream>()
+ .unwrap()
+ .into_iter()
+ .map(|mut tok| {
+ tok.set_span(tt.span());
+ tok
+ }),
+ );
+ vec![tt]
+ }
+ TokenTree::Ident(i) if i.to_string() == "Self" => struct_name.clone(),
+ TokenTree::Literal(_) | TokenTree::Punct(_) | TokenTree::Ident(_) => vec![tt],
+ TokenTree::Group(g) => vec![TokenTree::Group(Group::new(
+ g.delimiter(),
+ g.stream()
+ .into_iter()
+ .flat_map(|tt| replace_self_and_deny_type_defs(struct_name, tt, errs))
+ .collect(),
+ ))],
+ }
+}
diff --git a/rust/macros/pinned_drop.rs b/rust/macros/pinned_drop.rs
new file mode 100644
index 0000000000..88fb72b206
--- /dev/null
+++ b/rust/macros/pinned_drop.rs
@@ -0,0 +1,49 @@
+// SPDX-License-Identifier: Apache-2.0 OR MIT
+
+use proc_macro::{TokenStream, TokenTree};
+
+pub(crate) fn pinned_drop(_args: TokenStream, input: TokenStream) -> TokenStream {
+ let mut toks = input.into_iter().collect::<Vec<_>>();
+ assert!(!toks.is_empty());
+ // Ensure that we have an `impl` item.
+ assert!(matches!(&toks[0], TokenTree::Ident(i) if i.to_string() == "impl"));
+ // Ensure that we are implementing `PinnedDrop`.
+ let mut nesting: usize = 0;
+ let mut pinned_drop_idx = None;
+ for (i, tt) in toks.iter().enumerate() {
+ match tt {
+ TokenTree::Punct(p) if p.as_char() == '<' => {
+ nesting += 1;
+ }
+ TokenTree::Punct(p) if p.as_char() == '>' => {
+ nesting = nesting.checked_sub(1).unwrap();
+ continue;
+ }
+ _ => {}
+ }
+ if i >= 1 && nesting == 0 {
+ // Found the end of the generics, this should be `PinnedDrop`.
+ assert!(
+ matches!(tt, TokenTree::Ident(i) if i.to_string() == "PinnedDrop"),
+ "expected 'PinnedDrop', found: '{:?}'",
+ tt
+ );
+ pinned_drop_idx = Some(i);
+ break;
+ }
+ }
+ let idx = pinned_drop_idx
+ .unwrap_or_else(|| panic!("Expected an `impl` block implementing `PinnedDrop`."));
+ // Fully qualify the `PinnedDrop`, as to avoid any tampering.
+ toks.splice(idx..idx, quote!(::kernel::init::));
+ // Take the `{}` body and call the declarative macro.
+ if let Some(TokenTree::Group(last)) = toks.pop() {
+ let last = last.stream();
+ quote!(::kernel::__pinned_drop! {
+ @impl_sig(#(#toks)*),
+ @impl_body(#last),
+ })
+ } else {
+ TokenStream::from_iter(toks)
+ }
+}
diff --git a/rust/macros/quote.rs b/rust/macros/quote.rs
new file mode 100644
index 0000000000..33a199e4f1
--- /dev/null
+++ b/rust/macros/quote.rs
@@ -0,0 +1,157 @@
+// SPDX-License-Identifier: Apache-2.0 OR MIT
+
+use proc_macro::{TokenStream, TokenTree};
+
+pub(crate) trait ToTokens {
+ fn to_tokens(&self, tokens: &mut TokenStream);
+}
+
+impl<T: ToTokens> ToTokens for Option<T> {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ if let Some(v) = self {
+ v.to_tokens(tokens);
+ }
+ }
+}
+
+impl ToTokens for proc_macro::Group {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.extend([TokenTree::from(self.clone())]);
+ }
+}
+
+impl ToTokens for TokenTree {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.extend([self.clone()]);
+ }
+}
+
+impl ToTokens for TokenStream {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.extend(self.clone());
+ }
+}
+
+/// Converts tokens into [`proc_macro::TokenStream`] and performs variable interpolations with
+/// the given span.
+///
+/// This is a similar to the
+/// [`quote_spanned!`](https://docs.rs/quote/latest/quote/macro.quote_spanned.html) macro from the
+/// `quote` crate but provides only just enough functionality needed by the current `macros` crate.
+macro_rules! quote_spanned {
+ ($span:expr => $($tt:tt)*) => {{
+ let mut tokens;
+ #[allow(clippy::vec_init_then_push)]
+ {
+ tokens = ::std::vec::Vec::new();
+ let span = $span;
+ quote_spanned!(@proc tokens span $($tt)*);
+ }
+ ::proc_macro::TokenStream::from_iter(tokens)
+ }};
+ (@proc $v:ident $span:ident) => {};
+ (@proc $v:ident $span:ident #$id:ident $($tt:tt)*) => {
+ let mut ts = ::proc_macro::TokenStream::new();
+ $crate::quote::ToTokens::to_tokens(&$id, &mut ts);
+ $v.extend(ts);
+ quote_spanned!(@proc $v $span $($tt)*);
+ };
+ (@proc $v:ident $span:ident #(#$id:ident)* $($tt:tt)*) => {
+ for token in $id {
+ let mut ts = ::proc_macro::TokenStream::new();
+ $crate::quote::ToTokens::to_tokens(&token, &mut ts);
+ $v.extend(ts);
+ }
+ quote_spanned!(@proc $v $span $($tt)*);
+ };
+ (@proc $v:ident $span:ident ( $($inner:tt)* ) $($tt:tt)*) => {
+ let mut tokens = ::std::vec::Vec::new();
+ quote_spanned!(@proc tokens $span $($inner)*);
+ $v.push(::proc_macro::TokenTree::Group(::proc_macro::Group::new(
+ ::proc_macro::Delimiter::Parenthesis,
+ ::proc_macro::TokenStream::from_iter(tokens)
+ )));
+ quote_spanned!(@proc $v $span $($tt)*);
+ };
+ (@proc $v:ident $span:ident [ $($inner:tt)* ] $($tt:tt)*) => {
+ let mut tokens = ::std::vec::Vec::new();
+ quote_spanned!(@proc tokens $span $($inner)*);
+ $v.push(::proc_macro::TokenTree::Group(::proc_macro::Group::new(
+ ::proc_macro::Delimiter::Bracket,
+ ::proc_macro::TokenStream::from_iter(tokens)
+ )));
+ quote_spanned!(@proc $v $span $($tt)*);
+ };
+ (@proc $v:ident $span:ident { $($inner:tt)* } $($tt:tt)*) => {
+ let mut tokens = ::std::vec::Vec::new();
+ quote_spanned!(@proc tokens $span $($inner)*);
+ $v.push(::proc_macro::TokenTree::Group(::proc_macro::Group::new(
+ ::proc_macro::Delimiter::Brace,
+ ::proc_macro::TokenStream::from_iter(tokens)
+ )));
+ quote_spanned!(@proc $v $span $($tt)*);
+ };
+ (@proc $v:ident $span:ident :: $($tt:tt)*) => {
+ $v.push(::proc_macro::TokenTree::Punct(
+ ::proc_macro::Punct::new(':', ::proc_macro::Spacing::Joint)
+ ));
+ $v.push(::proc_macro::TokenTree::Punct(
+ ::proc_macro::Punct::new(':', ::proc_macro::Spacing::Alone)
+ ));
+ quote_spanned!(@proc $v $span $($tt)*);
+ };
+ (@proc $v:ident $span:ident : $($tt:tt)*) => {
+ $v.push(::proc_macro::TokenTree::Punct(
+ ::proc_macro::Punct::new(':', ::proc_macro::Spacing::Alone)
+ ));
+ quote_spanned!(@proc $v $span $($tt)*);
+ };
+ (@proc $v:ident $span:ident , $($tt:tt)*) => {
+ $v.push(::proc_macro::TokenTree::Punct(
+ ::proc_macro::Punct::new(',', ::proc_macro::Spacing::Alone)
+ ));
+ quote_spanned!(@proc $v $span $($tt)*);
+ };
+ (@proc $v:ident $span:ident @ $($tt:tt)*) => {
+ $v.push(::proc_macro::TokenTree::Punct(
+ ::proc_macro::Punct::new('@', ::proc_macro::Spacing::Alone)
+ ));
+ quote_spanned!(@proc $v $span $($tt)*);
+ };
+ (@proc $v:ident $span:ident ! $($tt:tt)*) => {
+ $v.push(::proc_macro::TokenTree::Punct(
+ ::proc_macro::Punct::new('!', ::proc_macro::Spacing::Alone)
+ ));
+ quote_spanned!(@proc $v $span $($tt)*);
+ };
+ (@proc $v:ident $span:ident ; $($tt:tt)*) => {
+ $v.push(::proc_macro::TokenTree::Punct(
+ ::proc_macro::Punct::new(';', ::proc_macro::Spacing::Alone)
+ ));
+ quote_spanned!(@proc $v $span $($tt)*);
+ };
+ (@proc $v:ident $span:ident + $($tt:tt)*) => {
+ $v.push(::proc_macro::TokenTree::Punct(
+ ::proc_macro::Punct::new('+', ::proc_macro::Spacing::Alone)
+ ));
+ quote_spanned!(@proc $v $span $($tt)*);
+ };
+ (@proc $v:ident $span:ident $id:ident $($tt:tt)*) => {
+ $v.push(::proc_macro::TokenTree::Ident(::proc_macro::Ident::new(stringify!($id), $span)));
+ quote_spanned!(@proc $v $span $($tt)*);
+ };
+}
+
+/// Converts tokens into [`proc_macro::TokenStream`] and performs variable interpolations with
+/// mixed site span ([`Span::mixed_site()`]).
+///
+/// This is a similar to the [`quote!`](https://docs.rs/quote/latest/quote/macro.quote.html) macro
+/// from the `quote` crate but provides only just enough functionality needed by the current
+/// `macros` crate.
+///
+/// [`Span::mixed_site()`]: https://doc.rust-lang.org/proc_macro/struct.Span.html#method.mixed_site
+macro_rules! quote {
+ ($($tt:tt)*) => {
+ quote_spanned!(::proc_macro::Span::mixed_site() => $($tt)*)
+ }
+}
diff --git a/rust/macros/vtable.rs b/rust/macros/vtable.rs
new file mode 100644
index 0000000000..ee06044fcd
--- /dev/null
+++ b/rust/macros/vtable.rs
@@ -0,0 +1,96 @@
+// SPDX-License-Identifier: GPL-2.0
+
+use proc_macro::{Delimiter, Group, TokenStream, TokenTree};
+use std::collections::HashSet;
+use std::fmt::Write;
+
+pub(crate) fn vtable(_attr: TokenStream, ts: TokenStream) -> TokenStream {
+ let mut tokens: Vec<_> = ts.into_iter().collect();
+
+ // Scan for the `trait` or `impl` keyword.
+ let is_trait = tokens
+ .iter()
+ .find_map(|token| match token {
+ TokenTree::Ident(ident) => match ident.to_string().as_str() {
+ "trait" => Some(true),
+ "impl" => Some(false),
+ _ => None,
+ },
+ _ => None,
+ })
+ .expect("#[vtable] attribute should only be applied to trait or impl block");
+
+ // Retrieve the main body. The main body should be the last token tree.
+ let body = match tokens.pop() {
+ Some(TokenTree::Group(group)) if group.delimiter() == Delimiter::Brace => group,
+ _ => panic!("cannot locate main body of trait or impl block"),
+ };
+
+ let mut body_it = body.stream().into_iter();
+ let mut functions = Vec::new();
+ let mut consts = HashSet::new();
+ while let Some(token) = body_it.next() {
+ match token {
+ TokenTree::Ident(ident) if ident.to_string() == "fn" => {
+ let fn_name = match body_it.next() {
+ Some(TokenTree::Ident(ident)) => ident.to_string(),
+ // Possibly we've encountered a fn pointer type instead.
+ _ => continue,
+ };
+ functions.push(fn_name);
+ }
+ TokenTree::Ident(ident) if ident.to_string() == "const" => {
+ let const_name = match body_it.next() {
+ Some(TokenTree::Ident(ident)) => ident.to_string(),
+ // Possibly we've encountered an inline const block instead.
+ _ => continue,
+ };
+ consts.insert(const_name);
+ }
+ _ => (),
+ }
+ }
+
+ let mut const_items;
+ if is_trait {
+ const_items = "
+ /// A marker to prevent implementors from forgetting to use [`#[vtable]`](vtable)
+ /// attribute when implementing this trait.
+ const USE_VTABLE_ATTR: ();
+ "
+ .to_owned();
+
+ for f in functions {
+ let gen_const_name = format!("HAS_{}", f.to_uppercase());
+ // Skip if it's declared already -- this allows user override.
+ if consts.contains(&gen_const_name) {
+ continue;
+ }
+ // We don't know on the implementation-site whether a method is required or provided
+ // so we have to generate a const for all methods.
+ write!(
+ const_items,
+ "/// Indicates if the `{f}` method is overridden by the implementor.
+ const {gen_const_name}: bool = false;",
+ )
+ .unwrap();
+ consts.insert(gen_const_name);
+ }
+ } else {
+ const_items = "const USE_VTABLE_ATTR: () = ();".to_owned();
+
+ for f in functions {
+ let gen_const_name = format!("HAS_{}", f.to_uppercase());
+ if consts.contains(&gen_const_name) {
+ continue;
+ }
+ write!(const_items, "const {gen_const_name}: bool = true;").unwrap();
+ }
+ }
+
+ let new_body = vec![const_items.parse().unwrap(), body.stream()]
+ .into_iter()
+ .collect();
+ tokens.push(TokenTree::Group(Group::new(Delimiter::Brace, new_body)));
+ tokens.into_iter().collect()
+}
diff --git a/rust/macros/zeroable.rs b/rust/macros/zeroable.rs
new file mode 100644
index 0000000000..0d605c46ab
--- /dev/null
+++ b/rust/macros/zeroable.rs
@@ -0,0 +1,72 @@
+// SPDX-License-Identifier: GPL-2.0
+
+use crate::helpers::{parse_generics, Generics};
+use proc_macro::{TokenStream, TokenTree};
+
+pub(crate) fn derive(input: TokenStream) -> TokenStream {
+ let (
+ Generics {
+ impl_generics,
+ ty_generics,
+ },
+ mut rest,
+ ) = parse_generics(input);
+ // This should be the body of the struct `{...}`.
+ let last = rest.pop();
+ // Now we insert `Zeroable` as a bound for every generic parameter in `impl_generics`.
+ let mut new_impl_generics = Vec::with_capacity(impl_generics.len());
+ // Are we inside of a generic where we want to add `Zeroable`?
+ let mut in_generic = !impl_generics.is_empty();
+ // Have we already inserted `Zeroable`?
+ let mut inserted = false;
+ // Level of `<>` nestings.
+ let mut nested = 0;
+ for tt in impl_generics {
+ match &tt {
+ // If we find a `,`, then we have finished a generic/constant/lifetime parameter.
+ TokenTree::Punct(p) if nested == 0 && p.as_char() == ',' => {
+ if in_generic && !inserted {
+ new_impl_generics.extend(quote! { : ::kernel::init::Zeroable });
+ }
+ in_generic = true;
+ inserted = false;
+ new_impl_generics.push(tt);
+ }
+ // If we find `'`, then we are entering a lifetime.
+ TokenTree::Punct(p) if nested == 0 && p.as_char() == '\'' => {
+ in_generic = false;
+ new_impl_generics.push(tt);
+ }
+ TokenTree::Punct(p) if nested == 0 && p.as_char() == ':' => {
+ new_impl_generics.push(tt);
+ if in_generic {
+ new_impl_generics.extend(quote! { ::kernel::init::Zeroable + });
+ inserted = true;
+ }
+ }
+ TokenTree::Punct(p) if p.as_char() == '<' => {
+ nested += 1;
+ new_impl_generics.push(tt);
+ }
+ TokenTree::Punct(p) if p.as_char() == '>' => {
+ assert!(nested > 0);
+ nested -= 1;
+ new_impl_generics.push(tt);
+ }
+ _ => new_impl_generics.push(tt),
+ }
+ }
+ assert_eq!(nested, 0);
+ if in_generic && !inserted {
+ new_impl_generics.extend(quote! { : ::kernel::init::Zeroable });
+ }
+ quote! {
+ ::kernel::__derive_zeroable!(
+ parse_input:
+ @sig(#(#rest)*),
+ @impl_generics(#(#new_impl_generics)*),
+ @ty_generics(#(#ty_generics)*),
+ @body(#last),
+ );
+ }
+}