executor: fix unsoundness due to impl Trait, improve macro error handling. (#3425)
* executor-macros: don't parse function bodies. * executor-macros: refactor for better recovery and ide-friendliness on errors. * executor-macros: disallow `impl Trait` in task arguments. Fixes #3420 * Fix example using `impl Trait` in tasks.
This commit is contained in:
parent
3d0c557138
commit
1f58e0efd0
@ -13,7 +13,7 @@ categories = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
syn = { version = "2.0.15", features = ["full", "extra-traits"] }
|
syn = { version = "2.0.15", features = ["full", "visit"] }
|
||||||
quote = "1.0.9"
|
quote = "1.0.9"
|
||||||
darling = "0.20.1"
|
darling = "0.20.1"
|
||||||
proc-macro2 = "1.0.29"
|
proc-macro2 = "1.0.29"
|
||||||
|
|||||||
@ -1,28 +1,11 @@
|
|||||||
#![doc = include_str!("../README.md")]
|
#![doc = include_str!("../README.md")]
|
||||||
extern crate proc_macro;
|
extern crate proc_macro;
|
||||||
|
|
||||||
use darling::ast::NestedMeta;
|
|
||||||
use proc_macro::TokenStream;
|
use proc_macro::TokenStream;
|
||||||
|
|
||||||
mod macros;
|
mod macros;
|
||||||
mod util;
|
mod util;
|
||||||
use macros::*;
|
use macros::*;
|
||||||
use syn::parse::{Parse, ParseBuffer};
|
|
||||||
use syn::punctuated::Punctuated;
|
|
||||||
use syn::Token;
|
|
||||||
|
|
||||||
struct Args {
|
|
||||||
meta: Vec<NestedMeta>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Parse for Args {
|
|
||||||
fn parse(input: &ParseBuffer) -> syn::Result<Self> {
|
|
||||||
let meta = Punctuated::<NestedMeta, Token![,]>::parse_terminated(input)?;
|
|
||||||
Ok(Args {
|
|
||||||
meta: meta.into_iter().collect(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Declares an async task that can be run by `embassy-executor`. The optional `pool_size` parameter can be used to specify how
|
/// Declares an async task that can be run by `embassy-executor`. The optional `pool_size` parameter can be used to specify how
|
||||||
/// many concurrent tasks can be spawned (default is 1) for the function.
|
/// many concurrent tasks can be spawned (default is 1) for the function.
|
||||||
@ -56,17 +39,12 @@ impl Parse for Args {
|
|||||||
/// ```
|
/// ```
|
||||||
#[proc_macro_attribute]
|
#[proc_macro_attribute]
|
||||||
pub fn task(args: TokenStream, item: TokenStream) -> TokenStream {
|
pub fn task(args: TokenStream, item: TokenStream) -> TokenStream {
|
||||||
let args = syn::parse_macro_input!(args as Args);
|
task::run(args.into(), item.into()).into()
|
||||||
let f = syn::parse_macro_input!(item as syn::ItemFn);
|
|
||||||
|
|
||||||
task::run(&args.meta, f).unwrap_or_else(|x| x).into()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[proc_macro_attribute]
|
#[proc_macro_attribute]
|
||||||
pub fn main_avr(args: TokenStream, item: TokenStream) -> TokenStream {
|
pub fn main_avr(args: TokenStream, item: TokenStream) -> TokenStream {
|
||||||
let args = syn::parse_macro_input!(args as Args);
|
main::run(args.into(), item.into(), &main::ARCH_AVR).into()
|
||||||
let f = syn::parse_macro_input!(item as syn::ItemFn);
|
|
||||||
main::run(&args.meta, f, main::avr()).unwrap_or_else(|x| x).into()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a new `executor` instance and declares an application entry point for Cortex-M spawning the corresponding function body as an async task.
|
/// Creates a new `executor` instance and declares an application entry point for Cortex-M spawning the corresponding function body as an async task.
|
||||||
@ -89,9 +67,7 @@ pub fn main_avr(args: TokenStream, item: TokenStream) -> TokenStream {
|
|||||||
/// ```
|
/// ```
|
||||||
#[proc_macro_attribute]
|
#[proc_macro_attribute]
|
||||||
pub fn main_cortex_m(args: TokenStream, item: TokenStream) -> TokenStream {
|
pub fn main_cortex_m(args: TokenStream, item: TokenStream) -> TokenStream {
|
||||||
let args = syn::parse_macro_input!(args as Args);
|
main::run(args.into(), item.into(), &main::ARCH_CORTEX_M).into()
|
||||||
let f = syn::parse_macro_input!(item as syn::ItemFn);
|
|
||||||
main::run(&args.meta, f, main::cortex_m()).unwrap_or_else(|x| x).into()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a new `executor` instance and declares an architecture agnostic application entry point spawning
|
/// Creates a new `executor` instance and declares an architecture agnostic application entry point spawning
|
||||||
@ -116,11 +92,7 @@ pub fn main_cortex_m(args: TokenStream, item: TokenStream) -> TokenStream {
|
|||||||
/// ```
|
/// ```
|
||||||
#[proc_macro_attribute]
|
#[proc_macro_attribute]
|
||||||
pub fn main_spin(args: TokenStream, item: TokenStream) -> TokenStream {
|
pub fn main_spin(args: TokenStream, item: TokenStream) -> TokenStream {
|
||||||
let args = syn::parse_macro_input!(args as Args);
|
main::run(args.into(), item.into(), &main::ARCH_SPIN).into()
|
||||||
let f = syn::parse_macro_input!(item as syn::ItemFn);
|
|
||||||
main::run(&args.meta, f, main::spin(&args.meta))
|
|
||||||
.unwrap_or_else(|x| x)
|
|
||||||
.into()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a new `executor` instance and declares an application entry point for RISC-V spawning the corresponding function body as an async task.
|
/// Creates a new `executor` instance and declares an application entry point for RISC-V spawning the corresponding function body as an async task.
|
||||||
@ -153,11 +125,7 @@ pub fn main_spin(args: TokenStream, item: TokenStream) -> TokenStream {
|
|||||||
/// ```
|
/// ```
|
||||||
#[proc_macro_attribute]
|
#[proc_macro_attribute]
|
||||||
pub fn main_riscv(args: TokenStream, item: TokenStream) -> TokenStream {
|
pub fn main_riscv(args: TokenStream, item: TokenStream) -> TokenStream {
|
||||||
let args = syn::parse_macro_input!(args as Args);
|
main::run(args.into(), item.into(), &main::ARCH_RISCV).into()
|
||||||
let f = syn::parse_macro_input!(item as syn::ItemFn);
|
|
||||||
main::run(&args.meta, f, main::riscv(&args.meta))
|
|
||||||
.unwrap_or_else(|x| x)
|
|
||||||
.into()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a new `executor` instance and declares an application entry point for STD spawning the corresponding function body as an async task.
|
/// Creates a new `executor` instance and declares an application entry point for STD spawning the corresponding function body as an async task.
|
||||||
@ -180,9 +148,7 @@ pub fn main_riscv(args: TokenStream, item: TokenStream) -> TokenStream {
|
|||||||
/// ```
|
/// ```
|
||||||
#[proc_macro_attribute]
|
#[proc_macro_attribute]
|
||||||
pub fn main_std(args: TokenStream, item: TokenStream) -> TokenStream {
|
pub fn main_std(args: TokenStream, item: TokenStream) -> TokenStream {
|
||||||
let args = syn::parse_macro_input!(args as Args);
|
main::run(args.into(), item.into(), &main::ARCH_STD).into()
|
||||||
let f = syn::parse_macro_input!(item as syn::ItemFn);
|
|
||||||
main::run(&args.meta, f, main::std()).unwrap_or_else(|x| x).into()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a new `executor` instance and declares an application entry point for WASM spawning the corresponding function body as an async task.
|
/// Creates a new `executor` instance and declares an application entry point for WASM spawning the corresponding function body as an async task.
|
||||||
@ -205,7 +171,5 @@ pub fn main_std(args: TokenStream, item: TokenStream) -> TokenStream {
|
|||||||
/// ```
|
/// ```
|
||||||
#[proc_macro_attribute]
|
#[proc_macro_attribute]
|
||||||
pub fn main_wasm(args: TokenStream, item: TokenStream) -> TokenStream {
|
pub fn main_wasm(args: TokenStream, item: TokenStream) -> TokenStream {
|
||||||
let args = syn::parse_macro_input!(args as Args);
|
main::run(args.into(), item.into(), &main::ARCH_WASM).into()
|
||||||
let f = syn::parse_macro_input!(item as syn::ItemFn);
|
|
||||||
main::run(&args.meta, f, main::wasm()).unwrap_or_else(|x| x).into()
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,152 +1,107 @@
|
|||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
use darling::export::NestedMeta;
|
use darling::export::NestedMeta;
|
||||||
use darling::{Error, FromMeta};
|
use darling::FromMeta;
|
||||||
use proc_macro2::TokenStream;
|
use proc_macro2::TokenStream;
|
||||||
use quote::quote;
|
use quote::quote;
|
||||||
use syn::{Expr, ReturnType, Type};
|
use syn::{ReturnType, Type};
|
||||||
|
|
||||||
use crate::util::ctxt::Ctxt;
|
use crate::util::*;
|
||||||
|
|
||||||
#[derive(Debug, FromMeta)]
|
enum Flavor {
|
||||||
|
Standard,
|
||||||
|
Wasm,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) struct Arch {
|
||||||
|
default_entry: Option<&'static str>,
|
||||||
|
flavor: Flavor,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub static ARCH_AVR: Arch = Arch {
|
||||||
|
default_entry: Some("avr_device::entry"),
|
||||||
|
flavor: Flavor::Standard,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub static ARCH_RISCV: Arch = Arch {
|
||||||
|
default_entry: Some("riscv_rt::entry"),
|
||||||
|
flavor: Flavor::Standard,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub static ARCH_CORTEX_M: Arch = Arch {
|
||||||
|
default_entry: Some("cortex_m_rt::entry"),
|
||||||
|
flavor: Flavor::Standard,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub static ARCH_SPIN: Arch = Arch {
|
||||||
|
default_entry: None,
|
||||||
|
flavor: Flavor::Standard,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub static ARCH_STD: Arch = Arch {
|
||||||
|
default_entry: None,
|
||||||
|
flavor: Flavor::Standard,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub static ARCH_WASM: Arch = Arch {
|
||||||
|
default_entry: Some("wasm_bindgen::prelude::wasm_bindgen(start)"),
|
||||||
|
flavor: Flavor::Wasm,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Debug, FromMeta, Default)]
|
||||||
struct Args {
|
struct Args {
|
||||||
#[darling(default)]
|
#[darling(default)]
|
||||||
entry: Option<String>,
|
entry: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn avr() -> TokenStream {
|
pub fn run(args: TokenStream, item: TokenStream, arch: &Arch) -> TokenStream {
|
||||||
quote! {
|
let mut errors = TokenStream::new();
|
||||||
#[avr_device::entry]
|
|
||||||
fn main() -> ! {
|
|
||||||
let mut executor = ::embassy_executor::Executor::new();
|
|
||||||
let executor = unsafe { __make_static(&mut executor) };
|
|
||||||
|
|
||||||
executor.run(|spawner| {
|
// If any of the steps for this macro fail, we still want to expand to an item that is as close
|
||||||
spawner.must_spawn(__embassy_main(spawner));
|
// to the expected output as possible. This helps out IDEs such that completions and other
|
||||||
})
|
// related features keep working.
|
||||||
}
|
let f: ItemFn = match syn::parse2(item.clone()) {
|
||||||
}
|
Ok(x) => x,
|
||||||
}
|
Err(e) => return token_stream_with_error(item, e),
|
||||||
|
|
||||||
pub fn riscv(args: &[NestedMeta]) -> TokenStream {
|
|
||||||
let maybe_entry = match Args::from_list(args) {
|
|
||||||
Ok(args) => args.entry,
|
|
||||||
Err(e) => return e.write_errors(),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let entry = maybe_entry.unwrap_or("riscv_rt::entry".into());
|
let args = match NestedMeta::parse_meta_list(args) {
|
||||||
let entry = match Expr::from_string(&entry) {
|
Ok(x) => x,
|
||||||
Ok(expr) => expr,
|
Err(e) => return token_stream_with_error(item, e),
|
||||||
Err(e) => return e.write_errors(),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
quote! {
|
let args = match Args::from_list(&args) {
|
||||||
#[#entry]
|
Ok(x) => x,
|
||||||
fn main() -> ! {
|
Err(e) => {
|
||||||
let mut executor = ::embassy_executor::Executor::new();
|
errors.extend(e.write_errors());
|
||||||
let executor = unsafe { __make_static(&mut executor) };
|
Args::default()
|
||||||
executor.run(|spawner| {
|
|
||||||
spawner.must_spawn(__embassy_main(spawner));
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn spin(args: &[NestedMeta]) -> TokenStream {
|
|
||||||
let maybe_entry = match Args::from_list(args) {
|
|
||||||
Ok(args) => args.entry,
|
|
||||||
Err(e) => return e.write_errors(),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let entry = match maybe_entry {
|
|
||||||
Some(str) => str,
|
|
||||||
None => return Error::missing_field("entry").write_errors(),
|
|
||||||
};
|
|
||||||
let entry = match Expr::from_string(&entry) {
|
|
||||||
Ok(expr) => expr,
|
|
||||||
Err(e) => return e.write_errors(),
|
|
||||||
};
|
|
||||||
|
|
||||||
quote! {
|
|
||||||
#[#entry]
|
|
||||||
fn main() -> ! {
|
|
||||||
let mut executor = ::embassy_executor::Executor::new();
|
|
||||||
let executor = unsafe { __make_static(&mut executor) };
|
|
||||||
executor.run(|spawner| {
|
|
||||||
spawner.must_spawn(__embassy_main(spawner));
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn cortex_m() -> TokenStream {
|
|
||||||
quote! {
|
|
||||||
#[cortex_m_rt::entry]
|
|
||||||
fn main() -> ! {
|
|
||||||
let mut executor = ::embassy_executor::Executor::new();
|
|
||||||
let executor = unsafe { __make_static(&mut executor) };
|
|
||||||
executor.run(|spawner| {
|
|
||||||
spawner.must_spawn(__embassy_main(spawner));
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn wasm() -> TokenStream {
|
|
||||||
quote! {
|
|
||||||
#[wasm_bindgen::prelude::wasm_bindgen(start)]
|
|
||||||
pub fn main() -> Result<(), wasm_bindgen::JsValue> {
|
|
||||||
let executor = ::std::boxed::Box::leak(::std::boxed::Box::new(::embassy_executor::Executor::new()));
|
|
||||||
|
|
||||||
executor.start(|spawner| {
|
|
||||||
spawner.must_spawn(__embassy_main(spawner));
|
|
||||||
});
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn std() -> TokenStream {
|
|
||||||
quote! {
|
|
||||||
fn main() -> ! {
|
|
||||||
let mut executor = ::embassy_executor::Executor::new();
|
|
||||||
let executor = unsafe { __make_static(&mut executor) };
|
|
||||||
|
|
||||||
executor.run(|spawner| {
|
|
||||||
spawner.must_spawn(__embassy_main(spawner));
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn run(args: &[NestedMeta], f: syn::ItemFn, main: TokenStream) -> Result<TokenStream, TokenStream> {
|
|
||||||
#[allow(unused_variables)]
|
|
||||||
let args = Args::from_list(args).map_err(|e| e.write_errors())?;
|
|
||||||
|
|
||||||
let fargs = f.sig.inputs.clone();
|
let fargs = f.sig.inputs.clone();
|
||||||
|
|
||||||
let ctxt = Ctxt::new();
|
|
||||||
|
|
||||||
if f.sig.asyncness.is_none() {
|
if f.sig.asyncness.is_none() {
|
||||||
ctxt.error_spanned_by(&f.sig, "main function must be async");
|
error(&mut errors, &f.sig, "main function must be async");
|
||||||
}
|
}
|
||||||
if !f.sig.generics.params.is_empty() {
|
if !f.sig.generics.params.is_empty() {
|
||||||
ctxt.error_spanned_by(&f.sig, "main function must not be generic");
|
error(&mut errors, &f.sig, "main function must not be generic");
|
||||||
}
|
}
|
||||||
if !f.sig.generics.where_clause.is_none() {
|
if !f.sig.generics.where_clause.is_none() {
|
||||||
ctxt.error_spanned_by(&f.sig, "main function must not have `where` clauses");
|
error(&mut errors, &f.sig, "main function must not have `where` clauses");
|
||||||
}
|
}
|
||||||
if !f.sig.abi.is_none() {
|
if !f.sig.abi.is_none() {
|
||||||
ctxt.error_spanned_by(&f.sig, "main function must not have an ABI qualifier");
|
error(&mut errors, &f.sig, "main function must not have an ABI qualifier");
|
||||||
}
|
}
|
||||||
if !f.sig.variadic.is_none() {
|
if !f.sig.variadic.is_none() {
|
||||||
ctxt.error_spanned_by(&f.sig, "main function must not be variadic");
|
error(&mut errors, &f.sig, "main function must not be variadic");
|
||||||
}
|
}
|
||||||
match &f.sig.output {
|
match &f.sig.output {
|
||||||
ReturnType::Default => {}
|
ReturnType::Default => {}
|
||||||
ReturnType::Type(_, ty) => match &**ty {
|
ReturnType::Type(_, ty) => match &**ty {
|
||||||
Type::Tuple(tuple) if tuple.elems.is_empty() => {}
|
Type::Tuple(tuple) if tuple.elems.is_empty() => {}
|
||||||
Type::Never(_) => {}
|
Type::Never(_) => {}
|
||||||
_ => ctxt.error_spanned_by(
|
_ => error(
|
||||||
|
&mut errors,
|
||||||
&f.sig,
|
&f.sig,
|
||||||
"main function must either not return a value, return `()` or return `!`",
|
"main function must either not return a value, return `()` or return `!`",
|
||||||
),
|
),
|
||||||
@ -154,26 +109,69 @@ pub fn run(args: &[NestedMeta], f: syn::ItemFn, main: TokenStream) -> Result<Tok
|
|||||||
}
|
}
|
||||||
|
|
||||||
if fargs.len() != 1 {
|
if fargs.len() != 1 {
|
||||||
ctxt.error_spanned_by(&f.sig, "main function must have 1 argument: the spawner.");
|
error(&mut errors, &f.sig, "main function must have 1 argument: the spawner.");
|
||||||
}
|
}
|
||||||
|
|
||||||
ctxt.check()?;
|
let entry = match args.entry.as_deref().or(arch.default_entry) {
|
||||||
|
None => TokenStream::new(),
|
||||||
|
Some(x) => match TokenStream::from_str(x) {
|
||||||
|
Ok(x) => quote!(#[#x]),
|
||||||
|
Err(e) => {
|
||||||
|
error(&mut errors, &f.sig, e);
|
||||||
|
TokenStream::new()
|
||||||
|
}
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
let f_body = f.block;
|
let f_body = f.body;
|
||||||
let out = &f.sig.output;
|
let out = &f.sig.output;
|
||||||
|
|
||||||
|
let (main_ret, mut main_body) = match arch.flavor {
|
||||||
|
Flavor::Standard => (
|
||||||
|
quote!(!),
|
||||||
|
quote! {
|
||||||
|
unsafe fn __make_static<T>(t: &mut T) -> &'static mut T {
|
||||||
|
::core::mem::transmute(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut executor = ::embassy_executor::Executor::new();
|
||||||
|
let executor = unsafe { __make_static(&mut executor) };
|
||||||
|
executor.run(|spawner| {
|
||||||
|
spawner.must_spawn(__embassy_main(spawner));
|
||||||
|
})
|
||||||
|
},
|
||||||
|
),
|
||||||
|
Flavor::Wasm => (
|
||||||
|
quote!(Result<(), wasm_bindgen::JsValue>),
|
||||||
|
quote! {
|
||||||
|
let executor = ::std::boxed::Box::leak(::std::boxed::Box::new(::embassy_executor::Executor::new()));
|
||||||
|
|
||||||
|
executor.start(|spawner| {
|
||||||
|
spawner.must_spawn(__embassy_main(spawner));
|
||||||
|
});
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
},
|
||||||
|
),
|
||||||
|
};
|
||||||
|
|
||||||
|
if !errors.is_empty() {
|
||||||
|
main_body = quote! {loop{}};
|
||||||
|
}
|
||||||
|
|
||||||
let result = quote! {
|
let result = quote! {
|
||||||
#[::embassy_executor::task()]
|
#[::embassy_executor::task()]
|
||||||
async fn __embassy_main(#fargs) #out {
|
async fn __embassy_main(#fargs) #out {
|
||||||
#f_body
|
#f_body
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe fn __make_static<T>(t: &mut T) -> &'static mut T {
|
#entry
|
||||||
::core::mem::transmute(t)
|
fn main() -> #main_ret {
|
||||||
|
#main_body
|
||||||
}
|
}
|
||||||
|
|
||||||
#main
|
#errors
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(result)
|
result
|
||||||
}
|
}
|
||||||
|
|||||||
@ -2,47 +2,68 @@ use darling::export::NestedMeta;
|
|||||||
use darling::FromMeta;
|
use darling::FromMeta;
|
||||||
use proc_macro2::{Span, TokenStream};
|
use proc_macro2::{Span, TokenStream};
|
||||||
use quote::{format_ident, quote};
|
use quote::{format_ident, quote};
|
||||||
use syn::{parse_quote, Expr, ExprLit, ItemFn, Lit, LitInt, ReturnType, Type};
|
use syn::visit::Visit;
|
||||||
|
use syn::{Expr, ExprLit, Lit, LitInt, ReturnType, Type};
|
||||||
|
|
||||||
use crate::util::ctxt::Ctxt;
|
use crate::util::*;
|
||||||
|
|
||||||
#[derive(Debug, FromMeta)]
|
#[derive(Debug, FromMeta, Default)]
|
||||||
struct Args {
|
struct Args {
|
||||||
#[darling(default)]
|
#[darling(default)]
|
||||||
pool_size: Option<syn::Expr>,
|
pool_size: Option<syn::Expr>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run(args: &[NestedMeta], f: syn::ItemFn) -> Result<TokenStream, TokenStream> {
|
pub fn run(args: TokenStream, item: TokenStream) -> TokenStream {
|
||||||
let args = Args::from_list(args).map_err(|e| e.write_errors())?;
|
let mut errors = TokenStream::new();
|
||||||
|
|
||||||
|
// If any of the steps for this macro fail, we still want to expand to an item that is as close
|
||||||
|
// to the expected output as possible. This helps out IDEs such that completions and other
|
||||||
|
// related features keep working.
|
||||||
|
let f: ItemFn = match syn::parse2(item.clone()) {
|
||||||
|
Ok(x) => x,
|
||||||
|
Err(e) => return token_stream_with_error(item, e),
|
||||||
|
};
|
||||||
|
|
||||||
|
let args = match NestedMeta::parse_meta_list(args) {
|
||||||
|
Ok(x) => x,
|
||||||
|
Err(e) => return token_stream_with_error(item, e),
|
||||||
|
};
|
||||||
|
|
||||||
|
let args = match Args::from_list(&args) {
|
||||||
|
Ok(x) => x,
|
||||||
|
Err(e) => {
|
||||||
|
errors.extend(e.write_errors());
|
||||||
|
Args::default()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
let pool_size = args.pool_size.unwrap_or(Expr::Lit(ExprLit {
|
let pool_size = args.pool_size.unwrap_or(Expr::Lit(ExprLit {
|
||||||
attrs: vec![],
|
attrs: vec![],
|
||||||
lit: Lit::Int(LitInt::new("1", Span::call_site())),
|
lit: Lit::Int(LitInt::new("1", Span::call_site())),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
let ctxt = Ctxt::new();
|
|
||||||
|
|
||||||
if f.sig.asyncness.is_none() {
|
if f.sig.asyncness.is_none() {
|
||||||
ctxt.error_spanned_by(&f.sig, "task functions must be async");
|
error(&mut errors, &f.sig, "task functions must be async");
|
||||||
}
|
}
|
||||||
if !f.sig.generics.params.is_empty() {
|
if !f.sig.generics.params.is_empty() {
|
||||||
ctxt.error_spanned_by(&f.sig, "task functions must not be generic");
|
error(&mut errors, &f.sig, "task functions must not be generic");
|
||||||
}
|
}
|
||||||
if !f.sig.generics.where_clause.is_none() {
|
if !f.sig.generics.where_clause.is_none() {
|
||||||
ctxt.error_spanned_by(&f.sig, "task functions must not have `where` clauses");
|
error(&mut errors, &f.sig, "task functions must not have `where` clauses");
|
||||||
}
|
}
|
||||||
if !f.sig.abi.is_none() {
|
if !f.sig.abi.is_none() {
|
||||||
ctxt.error_spanned_by(&f.sig, "task functions must not have an ABI qualifier");
|
error(&mut errors, &f.sig, "task functions must not have an ABI qualifier");
|
||||||
}
|
}
|
||||||
if !f.sig.variadic.is_none() {
|
if !f.sig.variadic.is_none() {
|
||||||
ctxt.error_spanned_by(&f.sig, "task functions must not be variadic");
|
error(&mut errors, &f.sig, "task functions must not be variadic");
|
||||||
}
|
}
|
||||||
match &f.sig.output {
|
match &f.sig.output {
|
||||||
ReturnType::Default => {}
|
ReturnType::Default => {}
|
||||||
ReturnType::Type(_, ty) => match &**ty {
|
ReturnType::Type(_, ty) => match &**ty {
|
||||||
Type::Tuple(tuple) if tuple.elems.is_empty() => {}
|
Type::Tuple(tuple) if tuple.elems.is_empty() => {}
|
||||||
Type::Never(_) => {}
|
Type::Never(_) => {}
|
||||||
_ => ctxt.error_spanned_by(
|
_ => error(
|
||||||
|
&mut errors,
|
||||||
&f.sig,
|
&f.sig,
|
||||||
"task functions must either not return a value, return `()` or return `!`",
|
"task functions must either not return a value, return `()` or return `!`",
|
||||||
),
|
),
|
||||||
@ -55,26 +76,31 @@ pub fn run(args: &[NestedMeta], f: syn::ItemFn) -> Result<TokenStream, TokenStre
|
|||||||
for arg in fargs.iter_mut() {
|
for arg in fargs.iter_mut() {
|
||||||
match arg {
|
match arg {
|
||||||
syn::FnArg::Receiver(_) => {
|
syn::FnArg::Receiver(_) => {
|
||||||
ctxt.error_spanned_by(arg, "task functions must not have receiver arguments");
|
error(&mut errors, arg, "task functions must not have receiver arguments");
|
||||||
}
|
}
|
||||||
syn::FnArg::Typed(t) => match t.pat.as_mut() {
|
syn::FnArg::Typed(t) => {
|
||||||
syn::Pat::Ident(id) => {
|
check_arg_ty(&mut errors, &t.ty);
|
||||||
id.mutability = None;
|
match t.pat.as_mut() {
|
||||||
args.push((id.clone(), t.attrs.clone()));
|
syn::Pat::Ident(id) => {
|
||||||
|
id.mutability = None;
|
||||||
|
args.push((id.clone(), t.attrs.clone()));
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
error(
|
||||||
|
&mut errors,
|
||||||
|
arg,
|
||||||
|
"pattern matching in task arguments is not yet supported",
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
_ => {
|
}
|
||||||
ctxt.error_spanned_by(arg, "pattern matching in task arguments is not yet supported");
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ctxt.check()?;
|
|
||||||
|
|
||||||
let task_ident = f.sig.ident.clone();
|
let task_ident = f.sig.ident.clone();
|
||||||
let task_inner_ident = format_ident!("__{}_task", task_ident);
|
let task_inner_ident = format_ident!("__{}_task", task_ident);
|
||||||
|
|
||||||
let mut task_inner = f;
|
let mut task_inner = f.clone();
|
||||||
let visibility = task_inner.vis.clone();
|
let visibility = task_inner.vis.clone();
|
||||||
task_inner.vis = syn::Visibility::Inherited;
|
task_inner.vis = syn::Visibility::Inherited;
|
||||||
task_inner.sig.ident = task_inner_ident.clone();
|
task_inner.sig.ident = task_inner_ident.clone();
|
||||||
@ -91,35 +117,43 @@ pub fn run(args: &[NestedMeta], f: syn::ItemFn) -> Result<TokenStream, TokenStre
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "nightly")]
|
#[cfg(feature = "nightly")]
|
||||||
let mut task_outer: ItemFn = parse_quote! {
|
let mut task_outer_body = quote! {
|
||||||
#visibility fn #task_ident(#fargs) -> ::embassy_executor::SpawnToken<impl Sized> {
|
trait _EmbassyInternalTaskTrait {
|
||||||
trait _EmbassyInternalTaskTrait {
|
type Fut: ::core::future::Future + 'static;
|
||||||
type Fut: ::core::future::Future + 'static;
|
fn construct(#fargs) -> Self::Fut;
|
||||||
fn construct(#fargs) -> Self::Fut;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl _EmbassyInternalTaskTrait for () {
|
|
||||||
type Fut = impl core::future::Future + 'static;
|
|
||||||
fn construct(#fargs) -> Self::Fut {
|
|
||||||
#task_inner_ident(#(#full_args,)*)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const POOL_SIZE: usize = #pool_size;
|
|
||||||
static POOL: ::embassy_executor::raw::TaskPool<<() as _EmbassyInternalTaskTrait>::Fut, POOL_SIZE> = ::embassy_executor::raw::TaskPool::new();
|
|
||||||
unsafe { POOL._spawn_async_fn(move || <() as _EmbassyInternalTaskTrait>::construct(#(#full_args,)*)) }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl _EmbassyInternalTaskTrait for () {
|
||||||
|
type Fut = impl core::future::Future + 'static;
|
||||||
|
fn construct(#fargs) -> Self::Fut {
|
||||||
|
#task_inner_ident(#(#full_args,)*)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const POOL_SIZE: usize = #pool_size;
|
||||||
|
static POOL: ::embassy_executor::raw::TaskPool<<() as _EmbassyInternalTaskTrait>::Fut, POOL_SIZE> = ::embassy_executor::raw::TaskPool::new();
|
||||||
|
unsafe { POOL._spawn_async_fn(move || <() as _EmbassyInternalTaskTrait>::construct(#(#full_args,)*)) }
|
||||||
};
|
};
|
||||||
#[cfg(not(feature = "nightly"))]
|
#[cfg(not(feature = "nightly"))]
|
||||||
let mut task_outer: ItemFn = parse_quote! {
|
let mut task_outer_body = quote! {
|
||||||
#visibility fn #task_ident(#fargs) -> ::embassy_executor::SpawnToken<impl Sized> {
|
const POOL_SIZE: usize = #pool_size;
|
||||||
const POOL_SIZE: usize = #pool_size;
|
static POOL: ::embassy_executor::_export::TaskPoolRef = ::embassy_executor::_export::TaskPoolRef::new();
|
||||||
static POOL: ::embassy_executor::_export::TaskPoolRef = ::embassy_executor::_export::TaskPoolRef::new();
|
unsafe { POOL.get::<_, POOL_SIZE>()._spawn_async_fn(move || #task_inner_ident(#(#full_args,)*)) }
|
||||||
unsafe { POOL.get::<_, POOL_SIZE>()._spawn_async_fn(move || #task_inner_ident(#(#full_args,)*)) }
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
task_outer.attrs.append(&mut task_inner.attrs.clone());
|
let task_outer_attrs = task_inner.attrs.clone();
|
||||||
|
|
||||||
|
if !errors.is_empty() {
|
||||||
|
task_outer_body = quote! {
|
||||||
|
#![allow(unused_variables, unreachable_code)]
|
||||||
|
let _x: ::embassy_executor::SpawnToken<()> = ::core::todo!();
|
||||||
|
_x
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copy the generics + where clause to avoid more spurious errors.
|
||||||
|
let generics = &f.sig.generics;
|
||||||
|
let where_clause = &f.sig.generics.where_clause;
|
||||||
|
|
||||||
let result = quote! {
|
let result = quote! {
|
||||||
// This is the user's task function, renamed.
|
// This is the user's task function, renamed.
|
||||||
@ -129,8 +163,27 @@ pub fn run(args: &[NestedMeta], f: syn::ItemFn) -> Result<TokenStream, TokenStre
|
|||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
#task_inner
|
#task_inner
|
||||||
|
|
||||||
#task_outer
|
#(#task_outer_attrs)*
|
||||||
|
#visibility fn #task_ident #generics (#fargs) -> ::embassy_executor::SpawnToken<impl Sized> #where_clause{
|
||||||
|
#task_outer_body
|
||||||
|
}
|
||||||
|
|
||||||
|
#errors
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(result)
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
fn check_arg_ty(errors: &mut TokenStream, ty: &Type) {
|
||||||
|
struct Visitor<'a> {
|
||||||
|
errors: &'a mut TokenStream,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, 'ast> Visit<'ast> for Visitor<'a> {
|
||||||
|
fn visit_type_impl_trait(&mut self, i: &'ast syn::TypeImplTrait) {
|
||||||
|
error(self.errors, i, "`impl Trait` is not allowed in task arguments. It is syntax sugar for generics, and tasks can't be generic.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Visit::visit_type(&mut Visitor { errors }, ty);
|
||||||
}
|
}
|
||||||
|
|||||||
74
embassy-executor-macros/src/util.rs
Normal file
74
embassy-executor-macros/src/util.rs
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
use std::fmt::Display;
|
||||||
|
|
||||||
|
use proc_macro2::{TokenStream, TokenTree};
|
||||||
|
use quote::{ToTokens, TokenStreamExt};
|
||||||
|
use syn::parse::{Parse, ParseStream};
|
||||||
|
use syn::{braced, bracketed, token, AttrStyle, Attribute, Signature, Token, Visibility};
|
||||||
|
|
||||||
|
pub fn token_stream_with_error(mut tokens: TokenStream, error: syn::Error) -> TokenStream {
|
||||||
|
tokens.extend(error.into_compile_error());
|
||||||
|
tokens
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn error<A: ToTokens, T: Display>(s: &mut TokenStream, obj: A, msg: T) {
|
||||||
|
s.extend(syn::Error::new_spanned(obj.into_token_stream(), msg).into_compile_error())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Function signature and body.
|
||||||
|
///
|
||||||
|
/// Same as `syn`'s `ItemFn` except we keep the body as a TokenStream instead of
|
||||||
|
/// parsing it. This makes the macro not error if there's a syntax error in the body,
|
||||||
|
/// which helps IDE autocomplete work better.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct ItemFn {
|
||||||
|
pub attrs: Vec<Attribute>,
|
||||||
|
pub vis: Visibility,
|
||||||
|
pub sig: Signature,
|
||||||
|
pub brace_token: token::Brace,
|
||||||
|
pub body: TokenStream,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Parse for ItemFn {
|
||||||
|
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||||
|
let mut attrs = input.call(Attribute::parse_outer)?;
|
||||||
|
let vis: Visibility = input.parse()?;
|
||||||
|
let sig: Signature = input.parse()?;
|
||||||
|
|
||||||
|
let content;
|
||||||
|
let brace_token = braced!(content in input);
|
||||||
|
while content.peek(Token![#]) && content.peek2(Token![!]) {
|
||||||
|
let content2;
|
||||||
|
attrs.push(Attribute {
|
||||||
|
pound_token: content.parse()?,
|
||||||
|
style: AttrStyle::Inner(content.parse()?),
|
||||||
|
bracket_token: bracketed!(content2 in content),
|
||||||
|
meta: content2.parse()?,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut body = Vec::new();
|
||||||
|
while !content.is_empty() {
|
||||||
|
body.push(content.parse::<TokenTree>()?);
|
||||||
|
}
|
||||||
|
let body = body.into_iter().collect();
|
||||||
|
|
||||||
|
Ok(ItemFn {
|
||||||
|
attrs,
|
||||||
|
vis,
|
||||||
|
sig,
|
||||||
|
brace_token,
|
||||||
|
body,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToTokens for ItemFn {
|
||||||
|
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||||
|
tokens.append_all(self.attrs.iter().filter(|a| matches!(a.style, AttrStyle::Outer)));
|
||||||
|
self.vis.to_tokens(tokens);
|
||||||
|
self.sig.to_tokens(tokens);
|
||||||
|
self.brace_token.surround(tokens, |tokens| {
|
||||||
|
tokens.append_all(self.body.clone());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -1,72 +0,0 @@
|
|||||||
// nifty utility borrowed from serde :)
|
|
||||||
// https://github.com/serde-rs/serde/blob/master/serde_derive/src/internals/ctxt.rs
|
|
||||||
|
|
||||||
use std::cell::RefCell;
|
|
||||||
use std::fmt::Display;
|
|
||||||
use std::thread;
|
|
||||||
|
|
||||||
use proc_macro2::TokenStream;
|
|
||||||
use quote::{quote, ToTokens};
|
|
||||||
|
|
||||||
/// A type to collect errors together and format them.
|
|
||||||
///
|
|
||||||
/// Dropping this object will cause a panic. It must be consumed using `check`.
|
|
||||||
///
|
|
||||||
/// References can be shared since this type uses run-time exclusive mut checking.
|
|
||||||
#[derive(Default)]
|
|
||||||
pub struct Ctxt {
|
|
||||||
// The contents will be set to `None` during checking. This is so that checking can be
|
|
||||||
// enforced.
|
|
||||||
errors: RefCell<Option<Vec<syn::Error>>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Ctxt {
|
|
||||||
/// Create a new context object.
|
|
||||||
///
|
|
||||||
/// This object contains no errors, but will still trigger a panic if it is not `check`ed.
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Ctxt {
|
|
||||||
errors: RefCell::new(Some(Vec::new())),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Add an error to the context object with a tokenenizable object.
|
|
||||||
///
|
|
||||||
/// The object is used for spanning in error messages.
|
|
||||||
pub fn error_spanned_by<A: ToTokens, T: Display>(&self, obj: A, msg: T) {
|
|
||||||
self.errors
|
|
||||||
.borrow_mut()
|
|
||||||
.as_mut()
|
|
||||||
.unwrap()
|
|
||||||
// Curb monomorphization from generating too many identical methods.
|
|
||||||
.push(syn::Error::new_spanned(obj.into_token_stream(), msg));
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Add one of Syn's parse errors.
|
|
||||||
#[allow(unused)]
|
|
||||||
pub fn syn_error(&self, err: syn::Error) {
|
|
||||||
self.errors.borrow_mut().as_mut().unwrap().push(err);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Consume this object, producing a formatted error string if there are errors.
|
|
||||||
pub fn check(self) -> Result<(), TokenStream> {
|
|
||||||
let errors = self.errors.borrow_mut().take().unwrap();
|
|
||||||
match errors.len() {
|
|
||||||
0 => Ok(()),
|
|
||||||
_ => Err(to_compile_errors(errors)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn to_compile_errors(errors: Vec<syn::Error>) -> proc_macro2::TokenStream {
|
|
||||||
let compile_errors = errors.iter().map(syn::Error::to_compile_error);
|
|
||||||
quote!(#(#compile_errors)*)
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Drop for Ctxt {
|
|
||||||
fn drop(&mut self) {
|
|
||||||
if !thread::panicking() && self.errors.borrow().is_some() {
|
|
||||||
panic!("forgot to check for errors");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1 +0,0 @@
|
|||||||
pub mod ctxt;
|
|
||||||
@ -10,8 +10,10 @@ use embassy_stm32::i2c::{self, I2c};
|
|||||||
use embassy_stm32::mode::Async;
|
use embassy_stm32::mode::Async;
|
||||||
use embassy_stm32::time::Hertz;
|
use embassy_stm32::time::Hertz;
|
||||||
use embassy_stm32::{bind_interrupts, peripherals};
|
use embassy_stm32::{bind_interrupts, peripherals};
|
||||||
|
use embassy_sync::blocking_mutex::raw::NoopRawMutex;
|
||||||
use embassy_sync::blocking_mutex::NoopMutex;
|
use embassy_sync::blocking_mutex::NoopMutex;
|
||||||
use embassy_time::{Duration, Timer};
|
use embassy_time::{Duration, Timer};
|
||||||
|
use embedded_hal_1::i2c::I2c as _;
|
||||||
use static_cell::StaticCell;
|
use static_cell::StaticCell;
|
||||||
use {defmt_rtt as _, panic_probe as _};
|
use {defmt_rtt as _, panic_probe as _};
|
||||||
|
|
||||||
@ -31,7 +33,7 @@ bind_interrupts!(struct Irqs {
|
|||||||
});
|
});
|
||||||
|
|
||||||
#[embassy_executor::task]
|
#[embassy_executor::task]
|
||||||
async fn temperature(mut i2c: impl embedded_hal_1::i2c::I2c + 'static) {
|
async fn temperature(mut i2c: I2cDevice<'static, NoopRawMutex, I2c<'static, Async>>) {
|
||||||
let mut data = [0u8; 2];
|
let mut data = [0u8; 2];
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
@ -48,7 +50,7 @@ async fn temperature(mut i2c: impl embedded_hal_1::i2c::I2c + 'static) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[embassy_executor::task]
|
#[embassy_executor::task]
|
||||||
async fn humidity(mut i2c: impl embedded_hal_1::i2c::I2c + 'static) {
|
async fn humidity(mut i2c: I2cDevice<'static, NoopRawMutex, I2c<'static, Async>>) {
|
||||||
let mut data = [0u8; 6];
|
let mut data = [0u8; 6];
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user