diff options
Diffstat (limited to 'rust/macros')
-rw-r--r-- | rust/macros/export.rs | 29 | ||||
-rw-r--r-- | rust/macros/helpers.rs | 153 | ||||
-rw-r--r-- | rust/macros/kunit.rs | 161 | ||||
-rw-r--r-- | rust/macros/lib.rs | 164 | ||||
-rw-r--r-- | rust/macros/module.rs | 16 | ||||
-rw-r--r-- | rust/macros/pin_data.rs | 129 | ||||
-rw-r--r-- | rust/macros/pinned_drop.rs | 49 | ||||
-rw-r--r-- | rust/macros/quote.rs | 28 | ||||
-rw-r--r-- | rust/macros/zeroable.rs | 73 |
9 files changed, 288 insertions, 514 deletions
diff --git a/rust/macros/export.rs b/rust/macros/export.rs new file mode 100644 index 000000000000..a08f6337d5c8 --- /dev/null +++ b/rust/macros/export.rs @@ -0,0 +1,29 @@ +// SPDX-License-Identifier: GPL-2.0 + +use crate::helpers::function_name; +use proc_macro::TokenStream; + +/// Please see [`crate::export`] for documentation. +pub(crate) fn export(_attr: TokenStream, ts: TokenStream) -> TokenStream { + let Some(name) = function_name(ts.clone()) else { + return "::core::compile_error!(\"The #[export] attribute must be used on a function.\");" + .parse::<TokenStream>() + .unwrap(); + }; + + // This verifies that the function has the same signature as the declaration generated by + // bindgen. It makes use of the fact that all branches of an if/else must have the same type. + let signature_check = quote!( + const _: () = { + if true { + ::kernel::bindings::#name + } else { + #name + }; + }; + ); + + let no_mangle = quote!(#[no_mangle]); + + TokenStream::from_iter([signature_check, no_mangle, ts]) +} diff --git a/rust/macros/helpers.rs b/rust/macros/helpers.rs index 563dcd2b7ace..a3ee27e29a6f 100644 --- a/rust/macros/helpers.rs +++ b/rust/macros/helpers.rs @@ -1,6 +1,6 @@ // SPDX-License-Identifier: GPL-2.0 -use proc_macro::{token_stream, Group, TokenStream, TokenTree}; +use proc_macro::{token_stream, Group, Ident, TokenStream, TokenTree}; pub(crate) fn try_ident(it: &mut token_stream::IntoIter) -> Option<String> { if let Some(TokenTree::Ident(ident)) = it.next() { @@ -70,148 +70,19 @@ pub(crate) fn expect_end(it: &mut token_stream::IntoIter) { } } -/// Parsed generics. -/// -/// See the field documentation for an explanation what each of the fields represents. -/// -/// # Examples -/// -/// ```rust,ignore -/// # let input = todo!(); -/// let (Generics { decl_generics, impl_generics, ty_generics }, rest) = parse_generics(input); -/// quote! { -/// struct Foo<$($decl_generics)*> { -/// // ... -/// } -/// -/// impl<$impl_generics> Foo<$ty_generics> { -/// fn foo() { -/// // ... -/// } -/// } -/// } -/// ``` -pub(crate) struct Generics { - /// The generics with bounds and default values (e.g. `T: Clone, const N: usize = 0`). - /// - /// Use this on type definitions e.g. `struct Foo<$decl_generics> ...` (or `union`/`enum`). - pub(crate) decl_generics: Vec<TokenTree>, - /// The generics with bounds (e.g. `T: Clone, const N: usize`). - /// - /// Use this on `impl` blocks e.g. `impl<$impl_generics> Trait for ...`. - pub(crate) impl_generics: Vec<TokenTree>, - /// The generics without bounds and without default values (e.g. `T, N`). - /// - /// Use this when you use the type that is declared with these generics e.g. - /// `Foo<$ty_generics>`. - pub(crate) ty_generics: Vec<TokenTree>, -} - -/// Parses the given `TokenStream` into `Generics` and the rest. -/// -/// The generics are not present in the rest, but a where clause might remain. -pub(crate) fn parse_generics(input: TokenStream) -> (Generics, Vec<TokenTree>) { - // The generics with bounds and default values. - let mut decl_generics = vec![]; - // `impl_generics`, the declared generics with their bounds. - let mut impl_generics = vec![]; - // Only the names of the generics, without any bounds. - let mut ty_generics = vec![]; - // Tokens not related to the generics e.g. the `where` token and definition. - let mut rest = vec![]; - // The current level of `<`. - let mut nesting = 0; - let mut toks = input.into_iter(); - // If we are at the beginning of a generic parameter. - let mut at_start = true; - let mut skip_until_comma = false; - while let Some(tt) = toks.next() { - if nesting == 1 && matches!(&tt, TokenTree::Punct(p) if p.as_char() == '>') { - // Found the end of the generics. - break; - } else if nesting >= 1 { - decl_generics.push(tt.clone()); - } - match tt.clone() { - TokenTree::Punct(p) if p.as_char() == '<' => { - if nesting >= 1 && !skip_until_comma { - // This is inside of the generics and part of some bound. - impl_generics.push(tt); - } - nesting += 1; - } - TokenTree::Punct(p) if p.as_char() == '>' => { - // This is a parsing error, so we just end it here. - if nesting == 0 { - break; - } else { - nesting -= 1; - if nesting >= 1 && !skip_until_comma { - // We are still inside of the generics and part of some bound. - impl_generics.push(tt); - } - } - } - TokenTree::Punct(p) if skip_until_comma && p.as_char() == ',' => { - if nesting == 1 { - impl_generics.push(tt.clone()); - impl_generics.push(tt); - skip_until_comma = false; - } - } - _ if !skip_until_comma => { - match nesting { - // If we haven't entered the generics yet, we still want to keep these tokens. - 0 => rest.push(tt), - 1 => { - // Here depending on the token, it might be a generic variable name. - match tt.clone() { - TokenTree::Ident(i) if at_start && i.to_string() == "const" => { - let Some(name) = toks.next() else { - // Parsing error. - break; - }; - impl_generics.push(tt); - impl_generics.push(name.clone()); - ty_generics.push(name.clone()); - decl_generics.push(name); - at_start = false; - } - TokenTree::Ident(_) if at_start => { - impl_generics.push(tt.clone()); - ty_generics.push(tt); - at_start = false; - } - TokenTree::Punct(p) if p.as_char() == ',' => { - impl_generics.push(tt.clone()); - ty_generics.push(tt); - at_start = true; - } - // Lifetimes begin with `'`. - TokenTree::Punct(p) if p.as_char() == '\'' && at_start => { - impl_generics.push(tt.clone()); - ty_generics.push(tt); - } - // Generics can have default values, we skip these. - TokenTree::Punct(p) if p.as_char() == '=' => { - skip_until_comma = true; - } - _ => impl_generics.push(tt), - } - } - _ => impl_generics.push(tt), +/// Given a function declaration, finds the name of the function. +pub(crate) fn function_name(input: TokenStream) -> Option<Ident> { + let mut input = input.into_iter(); + while let Some(token) = input.next() { + match token { + TokenTree::Ident(i) if i.to_string() == "fn" => { + if let Some(TokenTree::Ident(i)) = input.next() { + return Some(i); } + return None; } - _ => {} + _ => continue, } } - rest.extend(toks); - ( - Generics { - impl_generics, - decl_generics, - ty_generics, - }, - rest, - ) + None } diff --git a/rust/macros/kunit.rs b/rust/macros/kunit.rs new file mode 100644 index 000000000000..4f553ecf40c0 --- /dev/null +++ b/rust/macros/kunit.rs @@ -0,0 +1,161 @@ +// SPDX-License-Identifier: GPL-2.0 + +//! Procedural macro to run KUnit tests using a user-space like syntax. +//! +//! Copyright (c) 2023 José Expósito <jose.exposito89@gmail.com> + +use proc_macro::{Delimiter, Group, TokenStream, TokenTree}; +use std::fmt::Write; + +pub(crate) fn kunit_tests(attr: TokenStream, ts: TokenStream) -> TokenStream { + let attr = attr.to_string(); + + if attr.is_empty() { + panic!("Missing test name in `#[kunit_tests(test_name)]` macro") + } + + if attr.len() > 255 { + panic!( + "The test suite name `{}` exceeds the maximum length of 255 bytes", + attr + ) + } + + let mut tokens: Vec<_> = ts.into_iter().collect(); + + // Scan for the `mod` keyword. + tokens + .iter() + .find_map(|token| match token { + TokenTree::Ident(ident) => match ident.to_string().as_str() { + "mod" => Some(true), + _ => None, + }, + _ => None, + }) + .expect("`#[kunit_tests(test_name)]` attribute should only be applied to modules"); + + // Retrieve the main body. The main body should be the last token tree. + let body = match tokens.pop() { + Some(TokenTree::Group(group)) if group.delimiter() == Delimiter::Brace => group, + _ => panic!("Cannot locate main body of module"), + }; + + // Get the functions set as tests. Search for `[test]` -> `fn`. + let mut body_it = body.stream().into_iter(); + let mut tests = Vec::new(); + while let Some(token) = body_it.next() { + match token { + TokenTree::Group(ident) if ident.to_string() == "[test]" => match body_it.next() { + Some(TokenTree::Ident(ident)) if ident.to_string() == "fn" => { + let test_name = match body_it.next() { + Some(TokenTree::Ident(ident)) => ident.to_string(), + _ => continue, + }; + tests.push(test_name); + } + _ => continue, + }, + _ => (), + } + } + + // Add `#[cfg(CONFIG_KUNIT)]` before the module declaration. + let config_kunit = "#[cfg(CONFIG_KUNIT)]".to_owned().parse().unwrap(); + tokens.insert( + 0, + TokenTree::Group(Group::new(Delimiter::None, config_kunit)), + ); + + // Generate the test KUnit test suite and a test case for each `#[test]`. + // The code generated for the following test module: + // + // ``` + // #[kunit_tests(kunit_test_suit_name)] + // mod tests { + // #[test] + // fn foo() { + // assert_eq!(1, 1); + // } + // + // #[test] + // fn bar() { + // assert_eq!(2, 2); + // } + // } + // ``` + // + // Looks like: + // + // ``` + // unsafe extern "C" fn kunit_rust_wrapper_foo(_test: *mut kernel::bindings::kunit) { foo(); } + // unsafe extern "C" fn kunit_rust_wrapper_bar(_test: *mut kernel::bindings::kunit) { bar(); } + // + // static mut TEST_CASES: [kernel::bindings::kunit_case; 3] = [ + // kernel::kunit::kunit_case(kernel::c_str!("foo"), kunit_rust_wrapper_foo), + // kernel::kunit::kunit_case(kernel::c_str!("bar"), kunit_rust_wrapper_bar), + // kernel::kunit::kunit_case_null(), + // ]; + // + // kernel::kunit_unsafe_test_suite!(kunit_test_suit_name, TEST_CASES); + // ``` + let mut kunit_macros = "".to_owned(); + let mut test_cases = "".to_owned(); + for test in &tests { + let kunit_wrapper_fn_name = format!("kunit_rust_wrapper_{}", test); + let kunit_wrapper = format!( + "unsafe extern \"C\" fn {}(_test: *mut kernel::bindings::kunit) {{ {}(); }}", + kunit_wrapper_fn_name, test + ); + writeln!(kunit_macros, "{kunit_wrapper}").unwrap(); + writeln!( + test_cases, + " kernel::kunit::kunit_case(kernel::c_str!(\"{}\"), {}),", + test, kunit_wrapper_fn_name + ) + .unwrap(); + } + + writeln!(kunit_macros).unwrap(); + writeln!( + kunit_macros, + "static mut TEST_CASES: [kernel::bindings::kunit_case; {}] = [\n{test_cases} kernel::kunit::kunit_case_null(),\n];", + tests.len() + 1 + ) + .unwrap(); + + writeln!( + kunit_macros, + "kernel::kunit_unsafe_test_suite!({attr}, TEST_CASES);" + ) + .unwrap(); + + // Remove the `#[test]` macros. + // We do this at a token level, in order to preserve span information. + let mut new_body = vec![]; + let mut body_it = body.stream().into_iter(); + + while let Some(token) = body_it.next() { + match token { + TokenTree::Punct(ref c) if c.as_char() == '#' => match body_it.next() { + Some(TokenTree::Group(group)) if group.to_string() == "[test]" => (), + Some(next) => { + new_body.extend([token, next]); + } + _ => { + new_body.push(token); + } + }, + _ => { + new_body.push(token); + } + } + } + + let mut new_body = TokenStream::from_iter(new_body); + new_body.extend::<TokenStream>(kunit_macros.parse().unwrap()); + + tokens.push(TokenTree::Group(Group::new(Delimiter::Brace, new_body))); + + tokens.into_iter().collect() +} diff --git a/rust/macros/lib.rs b/rust/macros/lib.rs index d61bc6a56425..9acaa68c974e 100644 --- a/rust/macros/lib.rs +++ b/rust/macros/lib.rs @@ -9,13 +9,12 @@ #[macro_use] mod quote; mod concat_idents; +mod export; mod helpers; +mod kunit; mod module; mod paste; -mod pin_data; -mod pinned_drop; mod vtable; -mod zeroable; use proc_macro::TokenStream; @@ -36,7 +35,7 @@ use proc_macro::TokenStream; /// module!{ /// type: MyModule, /// name: "my_kernel_module", -/// author: "Rust for Linux Contributors", +/// authors: ["Rust for Linux Contributors"], /// description: "My very own kernel module!", /// license: "GPL", /// alias: ["alternate_module_name"], @@ -69,7 +68,7 @@ use proc_macro::TokenStream; /// module!{ /// type: MyDeviceDriverModule, /// name: "my_device_driver_module", -/// author: "Rust for Linux Contributors", +/// authors: ["Rust for Linux Contributors"], /// description: "My device driver requires firmware", /// license: "GPL", /// firmware: ["my_device_firmware1.bin", "my_device_firmware2.bin"], @@ -88,7 +87,7 @@ use proc_macro::TokenStream; /// # Supported argument types /// - `type`: type which implements the [`Module`] trait (required). /// - `name`: ASCII string literal of the name of the kernel module (required). -/// - `author`: string literal of the author of the kernel module. +/// - `authors`: array of ASCII string literals of the authors of the kernel module. /// - `description`: string literal of the description of the kernel module. /// - `license`: ASCII string literal of the license of the kernel module (required). /// - `alias`: array of ASCII string literals of the alias names of the kernel module. @@ -174,6 +173,29 @@ pub fn vtable(attr: TokenStream, ts: TokenStream) -> TokenStream { vtable::vtable(attr, ts) } +/// Export a function so that C code can call it via a header file. +/// +/// Functions exported using this macro can be called from C code using the declaration in the +/// appropriate header file. It should only be used in cases where C calls the function through a +/// header file; cases where C calls into Rust via a function pointer in a vtable (such as +/// `file_operations`) should not use this macro. +/// +/// This macro has the following effect: +/// +/// * Disables name mangling for this function. +/// * Verifies at compile-time that the function signature matches the declaration in the header +/// file. +/// +/// You must declare the signature of the Rust function in a header file that is included by +/// `rust/bindings/bindings_helper.h`. +/// +/// This macro is *not* the same as the C macros `EXPORT_SYMBOL_*`. All Rust symbols are currently +/// automatically exported with `EXPORT_SYMBOL_GPL`. +#[proc_macro_attribute] +pub fn export(attr: TokenStream, ts: TokenStream) -> TokenStream { + export::export(attr, ts) +} + /// Concatenate two identifiers. /// /// This is useful in macros that need to declare or reference items with names @@ -232,106 +254,6 @@ pub fn concat_idents(ts: TokenStream) -> TokenStream { concat_idents::concat_idents(ts) } -/// Used to specify the pinning information of the fields of a struct. -/// -/// This is somewhat similar in purpose as -/// [pin-project-lite](https://crates.io/crates/pin-project-lite). -/// Place this macro on a struct definition and then `#[pin]` in front of the attributes of each -/// field you want to structurally pin. -/// -/// This macro enables the use of the [`pin_init!`] macro. When pin-initializing a `struct`, -/// then `#[pin]` directs the type of initializer that is required. -/// -/// If your `struct` implements `Drop`, then you need to add `PinnedDrop` as arguments to this -/// macro, and change your `Drop` implementation to `PinnedDrop` annotated with -/// `#[`[`macro@pinned_drop`]`]`, since dropping pinned values requires extra care. -/// -/// # Examples -/// -/// ``` -/// # #![feature(lint_reasons)] -/// # use kernel::prelude::*; -/// # use std::{sync::Mutex, process::Command}; -/// # use kernel::macros::pin_data; -/// #[pin_data] -/// struct DriverData { -/// #[pin] -/// queue: Mutex<KVec<Command>>, -/// buf: KBox<[u8; 1024 * 1024]>, -/// } -/// ``` -/// -/// ``` -/// # #![feature(lint_reasons)] -/// # use kernel::prelude::*; -/// # use std::{sync::Mutex, process::Command}; -/// # use core::pin::Pin; -/// # pub struct Info; -/// # mod bindings { -/// # pub unsafe fn destroy_info(_ptr: *mut super::Info) {} -/// # } -/// use kernel::macros::{pin_data, pinned_drop}; -/// -/// #[pin_data(PinnedDrop)] -/// struct DriverData { -/// #[pin] -/// queue: Mutex<KVec<Command>>, -/// buf: KBox<[u8; 1024 * 1024]>, -/// raw_info: *mut Info, -/// } -/// -/// #[pinned_drop] -/// impl PinnedDrop for DriverData { -/// fn drop(self: Pin<&mut Self>) { -/// unsafe { bindings::destroy_info(self.raw_info) }; -/// } -/// } -/// # fn main() {} -/// ``` -/// -/// [`pin_init!`]: ../kernel/macro.pin_init.html -// ^ cannot use direct link, since `kernel` is not a dependency of `macros`. -#[proc_macro_attribute] -pub fn pin_data(inner: TokenStream, item: TokenStream) -> TokenStream { - pin_data::pin_data(inner, item) -} - -/// Used to implement `PinnedDrop` safely. -/// -/// Only works on structs that are annotated via `#[`[`macro@pin_data`]`]`. -/// -/// # Examples -/// -/// ``` -/// # #![feature(lint_reasons)] -/// # use kernel::prelude::*; -/// # use macros::{pin_data, pinned_drop}; -/// # use std::{sync::Mutex, process::Command}; -/// # use core::pin::Pin; -/// # mod bindings { -/// # pub struct Info; -/// # pub unsafe fn destroy_info(_ptr: *mut Info) {} -/// # } -/// #[pin_data(PinnedDrop)] -/// struct DriverData { -/// #[pin] -/// queue: Mutex<KVec<Command>>, -/// buf: KBox<[u8; 1024 * 1024]>, -/// raw_info: *mut bindings::Info, -/// } -/// -/// #[pinned_drop] -/// impl PinnedDrop for DriverData { -/// fn drop(self: Pin<&mut Self>) { -/// unsafe { bindings::destroy_info(self.raw_info) }; -/// } -/// } -/// ``` -#[proc_macro_attribute] -pub fn pinned_drop(args: TokenStream, input: TokenStream) -> TokenStream { - pinned_drop::pinned_drop(args, input) -} - /// Paste identifiers together. /// /// Within the `paste!` macro, identifiers inside `[<` and `>]` are concatenated together to form a @@ -472,23 +394,29 @@ pub fn paste(input: TokenStream) -> TokenStream { tokens.into_iter().collect() } -/// Derives the [`Zeroable`] trait for the given struct. +/// Registers a KUnit test suite and its test cases using a user-space like syntax. /// -/// This can only be used for structs where every field implements the [`Zeroable`] trait. +/// This macro should be used on modules. If `CONFIG_KUNIT` (in `.config`) is `n`, the target module +/// is ignored. /// /// # Examples /// -/// ``` -/// use kernel::macros::Zeroable; +/// ```ignore +/// # use macros::kunit_tests; +/// #[kunit_tests(kunit_test_suit_name)] +/// mod tests { +/// #[test] +/// fn foo() { +/// assert_eq!(1, 1); +/// } /// -/// #[derive(Zeroable)] -/// pub struct DriverData { -/// id: i64, -/// buf_ptr: *mut u8, -/// len: usize, +/// #[test] +/// fn bar() { +/// assert_eq!(2, 2); +/// } /// } /// ``` -#[proc_macro_derive(Zeroable)] -pub fn derive_zeroable(input: TokenStream) -> TokenStream { - zeroable::derive(input) +#[proc_macro_attribute] +pub fn kunit_tests(attr: TokenStream, ts: TokenStream) -> TokenStream { + kunit::kunit_tests(attr, ts) } diff --git a/rust/macros/module.rs b/rust/macros/module.rs index cdf94f4982df..a9418fbc9b44 100644 --- a/rust/macros/module.rs +++ b/rust/macros/module.rs @@ -56,7 +56,7 @@ impl<'a> ModInfoBuilder<'a> { " {cfg} #[doc(hidden)] - #[link_section = \".modinfo\"] + #[cfg_attr(not(target_os = \"macos\"), link_section = \".modinfo\")] #[used] pub static __{module}_{counter}: [u8; {length}] = *{string}; ", @@ -95,6 +95,7 @@ struct ModuleInfo { license: String, name: String, author: Option<String>, + authors: Option<Vec<String>>, description: Option<String>, alias: Option<Vec<String>>, firmware: Option<Vec<String>>, @@ -108,6 +109,7 @@ impl ModuleInfo { "type", "name", "author", + "authors", "description", "license", "alias", @@ -136,6 +138,7 @@ impl ModuleInfo { "type" => info.type_ = expect_ident(it), "name" => info.name = expect_string_ascii(it), "author" => info.author = Some(expect_string(it)), + "authors" => info.authors = Some(expect_string_array(it)), "description" => info.description = Some(expect_string(it)), "license" => info.license = expect_string_ascii(it), "alias" => info.alias = Some(expect_string_array(it)), @@ -186,6 +189,11 @@ pub(crate) fn module(ts: TokenStream) -> TokenStream { if let Some(author) = info.author { modinfo.emit("author", &author); } + if let Some(authors) = info.authors { + for author in authors { + modinfo.emit("author", &author); + } + } if let Some(description) = info.description { modinfo.emit("description", &description); } @@ -228,6 +236,10 @@ pub(crate) fn module(ts: TokenStream) -> TokenStream { kernel::ThisModule::from_ptr(core::ptr::null_mut()) }}; + /// The `LocalModule` type is the type of the module created by `module!`, + /// `module_pci_driver!`, `module_platform_driver!`, etc. + type LocalModule = {type_}; + impl kernel::ModuleMetadata for {type_} {{ const NAME: &'static kernel::str::CStr = kernel::c_str!(\"{name}\"); }} @@ -236,7 +248,7 @@ pub(crate) fn module(ts: TokenStream) -> TokenStream { mod __module_init {{ mod __module_init {{ use super::super::{type_}; - use kernel::init::PinInit; + use pin_init::PinInit; /// The \"Rust loadable module\" mark. // diff --git a/rust/macros/pin_data.rs b/rust/macros/pin_data.rs deleted file mode 100644 index 1d4a3547c684..000000000000 --- a/rust/macros/pin_data.rs +++ /dev/null @@ -1,129 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 OR MIT - -use crate::helpers::{parse_generics, Generics}; -use proc_macro::{Group, Punct, Spacing, TokenStream, TokenTree}; - -pub(crate) fn pin_data(args: TokenStream, input: TokenStream) -> TokenStream { - // This proc-macro only does some pre-parsing and then delegates the actual parsing to - // `kernel::__pin_data!`. - - let ( - Generics { - impl_generics, - decl_generics, - ty_generics, - }, - rest, - ) = parse_generics(input); - // The struct definition might contain the `Self` type. Since `__pin_data!` will define a new - // type with the same generics and bounds, this poses a problem, since `Self` will refer to the - // new type as opposed to this struct definition. Therefore we have to replace `Self` with the - // concrete name. - - // Errors that occur when replacing `Self` with `struct_name`. - let mut errs = TokenStream::new(); - // The name of the struct with ty_generics. - let struct_name = rest - .iter() - .skip_while(|tt| !matches!(tt, TokenTree::Ident(i) if i.to_string() == "struct")) - .nth(1) - .and_then(|tt| match tt { - TokenTree::Ident(_) => { - let tt = tt.clone(); - let mut res = vec![tt]; - if !ty_generics.is_empty() { - // We add this, so it is maximally compatible with e.g. `Self::CONST` which - // will be replaced by `StructName::<$generics>::CONST`. - res.push(TokenTree::Punct(Punct::new(':', Spacing::Joint))); - res.push(TokenTree::Punct(Punct::new(':', Spacing::Alone))); - res.push(TokenTree::Punct(Punct::new('<', Spacing::Alone))); - res.extend(ty_generics.iter().cloned()); - res.push(TokenTree::Punct(Punct::new('>', Spacing::Alone))); - } - Some(res) - } - _ => None, - }) - .unwrap_or_else(|| { - // If we did not find the name of the struct then we will use `Self` as the replacement - // and add a compile error to ensure it does not compile. - errs.extend( - "::core::compile_error!(\"Could not locate type name.\");" - .parse::<TokenStream>() - .unwrap(), - ); - "Self".parse::<TokenStream>().unwrap().into_iter().collect() - }); - let impl_generics = impl_generics - .into_iter() - .flat_map(|tt| replace_self_and_deny_type_defs(&struct_name, tt, &mut errs)) - .collect::<Vec<_>>(); - let mut rest = rest - .into_iter() - .flat_map(|tt| { - // We ignore top level `struct` tokens, since they would emit a compile error. - if matches!(&tt, TokenTree::Ident(i) if i.to_string() == "struct") { - vec![tt] - } else { - replace_self_and_deny_type_defs(&struct_name, tt, &mut errs) - } - }) - .collect::<Vec<_>>(); - // This should be the body of the struct `{...}`. - let last = rest.pop(); - let mut quoted = quote!(::kernel::__pin_data! { - parse_input: - @args(#args), - @sig(#(#rest)*), - @impl_generics(#(#impl_generics)*), - @ty_generics(#(#ty_generics)*), - @decl_generics(#(#decl_generics)*), - @body(#last), - }); - quoted.extend(errs); - quoted -} - -/// Replaces `Self` with `struct_name` and errors on `enum`, `trait`, `struct` `union` and `impl` -/// keywords. -/// -/// The error is appended to `errs` to allow normal parsing to continue. -fn replace_self_and_deny_type_defs( - struct_name: &Vec<TokenTree>, - tt: TokenTree, - errs: &mut TokenStream, -) -> Vec<TokenTree> { - match tt { - TokenTree::Ident(ref i) - if i.to_string() == "enum" - || i.to_string() == "trait" - || i.to_string() == "struct" - || i.to_string() == "union" - || i.to_string() == "impl" => - { - errs.extend( - format!( - "::core::compile_error!(\"Cannot use `{i}` inside of struct definition with \ - `#[pin_data]`.\");" - ) - .parse::<TokenStream>() - .unwrap() - .into_iter() - .map(|mut tok| { - tok.set_span(tt.span()); - tok - }), - ); - vec![tt] - } - TokenTree::Ident(i) if i.to_string() == "Self" => struct_name.clone(), - TokenTree::Literal(_) | TokenTree::Punct(_) | TokenTree::Ident(_) => vec![tt], - TokenTree::Group(g) => vec![TokenTree::Group(Group::new( - g.delimiter(), - g.stream() - .into_iter() - .flat_map(|tt| replace_self_and_deny_type_defs(struct_name, tt, errs)) - .collect(), - ))], - } -} diff --git a/rust/macros/pinned_drop.rs b/rust/macros/pinned_drop.rs deleted file mode 100644 index 88fb72b20660..000000000000 --- a/rust/macros/pinned_drop.rs +++ /dev/null @@ -1,49 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 OR MIT - -use proc_macro::{TokenStream, TokenTree}; - -pub(crate) fn pinned_drop(_args: TokenStream, input: TokenStream) -> TokenStream { - let mut toks = input.into_iter().collect::<Vec<_>>(); - assert!(!toks.is_empty()); - // Ensure that we have an `impl` item. - assert!(matches!(&toks[0], TokenTree::Ident(i) if i.to_string() == "impl")); - // Ensure that we are implementing `PinnedDrop`. - let mut nesting: usize = 0; - let mut pinned_drop_idx = None; - for (i, tt) in toks.iter().enumerate() { - match tt { - TokenTree::Punct(p) if p.as_char() == '<' => { - nesting += 1; - } - TokenTree::Punct(p) if p.as_char() == '>' => { - nesting = nesting.checked_sub(1).unwrap(); - continue; - } - _ => {} - } - if i >= 1 && nesting == 0 { - // Found the end of the generics, this should be `PinnedDrop`. - assert!( - matches!(tt, TokenTree::Ident(i) if i.to_string() == "PinnedDrop"), - "expected 'PinnedDrop', found: '{:?}'", - tt - ); - pinned_drop_idx = Some(i); - break; - } - } - let idx = pinned_drop_idx - .unwrap_or_else(|| panic!("Expected an `impl` block implementing `PinnedDrop`.")); - // Fully qualify the `PinnedDrop`, as to avoid any tampering. - toks.splice(idx..idx, quote!(::kernel::init::)); - // Take the `{}` body and call the declarative macro. - if let Some(TokenTree::Group(last)) = toks.pop() { - let last = last.stream(); - quote!(::kernel::__pinned_drop! { - @impl_sig(#(#toks)*), - @impl_body(#last), - }) - } else { - TokenStream::from_iter(toks) - } -} diff --git a/rust/macros/quote.rs b/rust/macros/quote.rs index 33a199e4f176..92cacc4067c9 100644 --- a/rust/macros/quote.rs +++ b/rust/macros/quote.rs @@ -2,6 +2,7 @@ use proc_macro::{TokenStream, TokenTree}; +#[allow(dead_code)] pub(crate) trait ToTokens { fn to_tokens(&self, tokens: &mut TokenStream); } @@ -20,6 +21,12 @@ impl ToTokens for proc_macro::Group { } } +impl ToTokens for proc_macro::Ident { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.extend([TokenTree::from(self.clone())]); + } +} + impl ToTokens for TokenTree { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.extend([self.clone()]); @@ -40,7 +47,7 @@ impl ToTokens for TokenStream { /// `quote` crate but provides only just enough functionality needed by the current `macros` crate. macro_rules! quote_spanned { ($span:expr => $($tt:tt)*) => {{ - let mut tokens; + let mut tokens: ::std::vec::Vec<::proc_macro::TokenTree>; #[allow(clippy::vec_init_then_push)] { tokens = ::std::vec::Vec::new(); @@ -65,7 +72,8 @@ macro_rules! quote_spanned { quote_spanned!(@proc $v $span $($tt)*); }; (@proc $v:ident $span:ident ( $($inner:tt)* ) $($tt:tt)*) => { - let mut tokens = ::std::vec::Vec::new(); + #[allow(unused_mut)] + let mut tokens = ::std::vec::Vec::<::proc_macro::TokenTree>::new(); quote_spanned!(@proc tokens $span $($inner)*); $v.push(::proc_macro::TokenTree::Group(::proc_macro::Group::new( ::proc_macro::Delimiter::Parenthesis, @@ -136,6 +144,22 @@ macro_rules! quote_spanned { )); quote_spanned!(@proc $v $span $($tt)*); }; + (@proc $v:ident $span:ident = $($tt:tt)*) => { + $v.push(::proc_macro::TokenTree::Punct( + ::proc_macro::Punct::new('=', ::proc_macro::Spacing::Alone) + )); + quote_spanned!(@proc $v $span $($tt)*); + }; + (@proc $v:ident $span:ident # $($tt:tt)*) => { + $v.push(::proc_macro::TokenTree::Punct( + ::proc_macro::Punct::new('#', ::proc_macro::Spacing::Alone) + )); + quote_spanned!(@proc $v $span $($tt)*); + }; + (@proc $v:ident $span:ident _ $($tt:tt)*) => { + $v.push(::proc_macro::TokenTree::Ident(::proc_macro::Ident::new("_", $span))); + quote_spanned!(@proc $v $span $($tt)*); + }; (@proc $v:ident $span:ident $id:ident $($tt:tt)*) => { $v.push(::proc_macro::TokenTree::Ident(::proc_macro::Ident::new(stringify!($id), $span))); quote_spanned!(@proc $v $span $($tt)*); diff --git a/rust/macros/zeroable.rs b/rust/macros/zeroable.rs deleted file mode 100644 index cfee2cec18d5..000000000000 --- a/rust/macros/zeroable.rs +++ /dev/null @@ -1,73 +0,0 @@ -// SPDX-License-Identifier: GPL-2.0 - -use crate::helpers::{parse_generics, Generics}; -use proc_macro::{TokenStream, TokenTree}; - -pub(crate) fn derive(input: TokenStream) -> TokenStream { - let ( - Generics { - impl_generics, - decl_generics: _, - ty_generics, - }, - mut rest, - ) = parse_generics(input); - // This should be the body of the struct `{...}`. - let last = rest.pop(); - // Now we insert `Zeroable` as a bound for every generic parameter in `impl_generics`. - let mut new_impl_generics = Vec::with_capacity(impl_generics.len()); - // Are we inside of a generic where we want to add `Zeroable`? - let mut in_generic = !impl_generics.is_empty(); - // Have we already inserted `Zeroable`? - let mut inserted = false; - // Level of `<>` nestings. - let mut nested = 0; - for tt in impl_generics { - match &tt { - // If we find a `,`, then we have finished a generic/constant/lifetime parameter. - TokenTree::Punct(p) if nested == 0 && p.as_char() == ',' => { - if in_generic && !inserted { - new_impl_generics.extend(quote! { : ::kernel::init::Zeroable }); - } - in_generic = true; - inserted = false; - new_impl_generics.push(tt); - } - // If we find `'`, then we are entering a lifetime. - TokenTree::Punct(p) if nested == 0 && p.as_char() == '\'' => { - in_generic = false; - new_impl_generics.push(tt); - } - TokenTree::Punct(p) if nested == 0 && p.as_char() == ':' => { - new_impl_generics.push(tt); - if in_generic { - new_impl_generics.extend(quote! { ::kernel::init::Zeroable + }); - inserted = true; - } - } - TokenTree::Punct(p) if p.as_char() == '<' => { - nested += 1; - new_impl_generics.push(tt); - } - TokenTree::Punct(p) if p.as_char() == '>' => { - assert!(nested > 0); - nested -= 1; - new_impl_generics.push(tt); - } - _ => new_impl_generics.push(tt), - } - } - assert_eq!(nested, 0); - if in_generic && !inserted { - new_impl_generics.extend(quote! { : ::kernel::init::Zeroable }); - } - quote! { - ::kernel::__derive_zeroable!( - parse_input: - @sig(#(#rest)*), - @impl_generics(#(#new_impl_generics)*), - @ty_generics(#(#ty_generics)*), - @body(#last), - ); - } -} |