From 8e6411bd31763409e5db379e97189b935264b30a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?D=C3=A1niel=20Buga?= Date: Fri, 30 Aug 2024 16:45:54 +0200 Subject: [PATCH] Random cleanups in non-checked packages (#2034) * Deduplicate feature check macros * Re-enable rust-analyzer for most of the workspace * Cargo fix * Turn off defmt * Only build xtask * Clippy pls * Fix CI * Fix paths * Always create doc directory first * Revert r-a * Update esp-hal-procmacros/src/lp_core.rs Co-authored-by: Dominic Fischer <14130965+Dominaezzz@users.noreply.github.com> --------- Co-authored-by: Dominic Fischer <14130965+Dominaezzz@users.noreply.github.com> --- .github/workflows/hil.yml | 2 +- esp-build/src/lib.rs | 67 ++++++++++++++----------- esp-hal-procmacros/src/embassy.rs | 6 +-- esp-hal-procmacros/src/enum_dispatch.rs | 25 ++++----- esp-hal-procmacros/src/interrupt.rs | 4 +- esp-hal-procmacros/src/lib.rs | 4 +- esp-hal-procmacros/src/lp_core.rs | 48 +++++++++--------- esp-lp-hal/Cargo.toml | 8 +-- extras/bench-server/src/main.rs | 36 +++++++++---- xtask/src/lib.rs | 12 ++++- xtask/src/main.rs | 36 ++++++++----- 11 files changed, 142 insertions(+), 106 deletions(-) diff --git a/.github/workflows/hil.yml b/.github/workflows/hil.yml index 47b9773889a..c30091df688 100644 --- a/.github/workflows/hil.yml +++ b/.github/workflows/hil.yml @@ -60,7 +60,7 @@ jobs: run: cargo install cross - name: Build xtasks - run: cross build --release --target ${{ matrix.host.rust-target }} + run: cross build --release --target ${{ matrix.host.rust-target }} -p xtask - name: Upload artifact uses: actions/upload-artifact@v4 diff --git a/esp-build/src/lib.rs b/esp-build/src/lib.rs index 3de49b6ac3d..4e97602da0a 100644 --- a/esp-build/src/lib.rs +++ b/esp-build/src/lib.rs @@ -5,6 +5,7 @@ use std::{io::Write as _, process}; use proc_macro::TokenStream; +use quote::ToTokens; use syn::{parse_macro_input, punctuated::Punctuated, LitStr, Token}; use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor}; @@ -58,22 +59,10 @@ pub fn assert_unique_features(input: TokenStream) -> TokenStream { .into_iter() .collect::>(); - let pairs = unique_pairs(&features); - let unique_cfgs = pairs - .iter() - .map(|(a, b)| quote::quote! { all(feature = #a, feature = #b) }); - - let message = format!( - r#" -ERROR: expected exactly zero or one enabled feature from feature group: - {:?} -"#, - features.iter().map(|lit| lit.value()).collect::>(), - ); + let unique = impl_unique_features(&features, "exactly zero or one"); quote::quote! { - #[cfg(any(#(#unique_cfgs),*))] - ::esp_build::error! { #message } + #unique } .into() } @@ -91,17 +80,10 @@ pub fn assert_used_features(input: TokenStream) -> TokenStream { .into_iter() .collect::>(); - let message = format!( - r#" -ERROR: expected at least one enabled feature from feature group: - {:?} - "#, - features.iter().map(|lit| lit.value()).collect::>() - ); + let used = impl_used_features(&features, "at least one"); quote::quote! { - #[cfg(not(any(#(feature = #features),*)))] - ::esp_build::error! { #message } + #used } .into() } @@ -118,29 +100,54 @@ pub fn assert_unique_used_features(input: TokenStream) -> TokenStream { .into_iter() .collect::>(); - let pairs = unique_pairs(&features); + let unique = impl_unique_features(&features, "exactly one"); + let used = impl_used_features(&features, "exactly one"); + + quote::quote! { + #unique + #used + } + .into() +} + +// ---------------------------------------------------------------------------- +// Helper Functions + +fn impl_unique_features(features: &[LitStr], expectation: &str) -> impl ToTokens { + let pairs = unique_pairs(features); let unique_cfgs = pairs .iter() .map(|(a, b)| quote::quote! { all(feature = #a, feature = #b) }); let message = format!( r#" -ERROR: expected exactly one enabled feature from feature group: +ERROR: expected {expectation} enabled feature from feature group: + {:?} +"#, + features.iter().map(|lit| lit.value()).collect::>(), + ); + + quote::quote! { + #[cfg(any(#(#unique_cfgs),*))] + ::esp_build::error! { #message } + } +} + +fn impl_used_features(features: &[LitStr], expectation: &str) -> impl ToTokens { + let message = format!( + r#" +ERROR: expected {expectation} enabled feature from feature group: {:?} "#, features.iter().map(|lit| lit.value()).collect::>() ); quote::quote! { - #[cfg(any(any(#(#unique_cfgs),*), not(any(#(feature = #features),*))))] + #[cfg(not(any(#(feature = #features),*)))] ::esp_build::error! { #message } } - .into() } -// ---------------------------------------------------------------------------- -// Helper Functions - // Adapted from: // https://github.com/dtolnay/build-alert/blob/49d060e/src/lib.rs#L54-L93 fn do_alert(color: Color, input: TokenStream) -> TokenStream { diff --git a/esp-hal-procmacros/src/embassy.rs b/esp-hal-procmacros/src/embassy.rs index fdd44ff863c..9d1dc5c0144 100644 --- a/esp-hal-procmacros/src/embassy.rs +++ b/esp-hal-procmacros/src/embassy.rs @@ -46,13 +46,13 @@ pub(crate) mod main { if !f.sig.generics.params.is_empty() { ctxt.error_spanned_by(&f.sig, "main function must not be generic"); } - if !f.sig.generics.where_clause.is_none() { + if f.sig.generics.where_clause.is_some() { ctxt.error_spanned_by(&f.sig, "main function must not have `where` clauses"); } - if !f.sig.abi.is_none() { + if f.sig.abi.is_some() { ctxt.error_spanned_by(&f.sig, "main function must not have an ABI qualifier"); } - if !f.sig.variadic.is_none() { + if f.sig.variadic.is_some() { ctxt.error_spanned_by(&f.sig, "main function must not be variadic"); } match &f.sig.output { diff --git a/esp-hal-procmacros/src/enum_dispatch.rs b/esp-hal-procmacros/src/enum_dispatch.rs index 2372be88426..3a010bd9849 100644 --- a/esp-hal-procmacros/src/enum_dispatch.rs +++ b/esp-hal-procmacros/src/enum_dispatch.rs @@ -30,21 +30,18 @@ impl Parse for MakeGpioEnumDispatchMacro { let mut elements = vec![]; - let mut stream = input.parse::()?.stream().into_iter(); + let stream = input.parse::()?.stream().into_iter(); let mut element_name = String::new(); - loop { - match stream.next() { - Some(v) => match v { - TokenTree::Ident(ident) => { - element_name = ident.to_string(); - } - TokenTree::Literal(lit) => { - let index = lit.to_string().parse().unwrap(); - elements.push((element_name.clone(), index)); - } - _ => (), - }, - None => break, + for v in stream { + match v { + TokenTree::Ident(ident) => { + element_name = ident.to_string(); + } + TokenTree::Literal(lit) => { + let index = lit.to_string().parse().unwrap(); + elements.push((element_name.clone(), index)); + } + _ => (), } } diff --git a/esp-hal-procmacros/src/interrupt.rs b/esp-hal-procmacros/src/interrupt.rs index 385dbb3c06d..f84acbfcbc5 100644 --- a/esp-hal-procmacros/src/interrupt.rs +++ b/esp-hal-procmacros/src/interrupt.rs @@ -24,7 +24,7 @@ pub(crate) fn check_attr_whitelist( 'o: for attr in attrs { for val in whitelist { - if eq(&attr, &val) { + if eq(attr, val) { continue 'o; } } @@ -35,7 +35,7 @@ pub(crate) fn check_attr_whitelist( } }; - return Err(Error::new(attr.span(), &err_str).to_compile_error().into()); + return Err(Error::new(attr.span(), err_str).to_compile_error().into()); } Ok(()) diff --git a/esp-hal-procmacros/src/lib.rs b/esp-hal-procmacros/src/lib.rs index 6bec2d198f1..0a1106e4b87 100644 --- a/esp-hal-procmacros/src/lib.rs +++ b/esp-hal-procmacros/src/lib.rs @@ -210,7 +210,7 @@ pub fn ram(args: TokenStream, input: TokenStream) -> TokenStream { let hal = proc_macro2::Ident::new( if let Ok(FoundCrate::Name(ref name)) = crate_name("esp-hal") { - &name + name } else { "crate" }, @@ -278,7 +278,7 @@ pub fn handler(args: TokenStream, input: TokenStream) -> TokenStream { let root = Ident::new( if let Ok(FoundCrate::Name(ref name)) = crate_name("esp-hal") { - &name + name } else { "crate" }, diff --git a/esp-hal-procmacros/src/lp_core.rs b/esp-hal-procmacros/src/lp_core.rs index 31baca8eee4..5fa639d7e92 100644 --- a/esp-hal-procmacros/src/lp_core.rs +++ b/esp-hal-procmacros/src/lp_core.rs @@ -23,19 +23,19 @@ pub fn entry(args: TokenStream, input: TokenStream) -> TokenStream { let mut res = String::from("__ULP_MAGIC_"); for &a in args { let t = &a.ty; - let quoted = to_string(&t); + let quoted = to_string(t); res.push_str("ed); - res.push_str("$"); + res.push('$'); } res } pub(crate) fn get_simplename(t: &Type) -> String { - String::from(match t { - Type::Path(p) => String::from(&p.path.segments.last().unwrap().ident.to_string()), + match t { + Type::Path(p) => p.path.segments.last().unwrap().ident.to_string(), _ => String::new(), - }) + } } pub(crate) fn extract_pin(ty: &Type) -> u8 { @@ -49,7 +49,7 @@ pub fn entry(args: TokenStream, input: TokenStream) -> TokenStream { res = extract_pin(t); } GenericArgument::Const(c) => { - res = ("e! { #c }.to_string()).parse().unwrap(); + res = quote! { #c }.to_string().parse().unwrap(); } _ => (), } @@ -68,11 +68,11 @@ pub fn entry(args: TokenStream, input: TokenStream) -> TokenStream { res.push_str(&segment.ident.to_string()); if let PathArguments::AngleBracketed(g) = &segment.arguments { - res.push_str("<"); + res.push('<'); let mut pushed = false; for arg in &g.args { if pushed { - res.push_str(","); + res.push(','); } match arg { @@ -87,7 +87,7 @@ pub fn entry(args: TokenStream, input: TokenStream) -> TokenStream { _ => (), } } - res.push_str(">"); + res.push('>'); } } @@ -219,7 +219,7 @@ pub fn load_lp_code(input: TokenStream) -> TokenStream { }; let hal_crate = if let Ok(FoundCrate::Name(ref name)) = hal_crate { - let ident = Ident::new(&name, Span::call_site().into()); + let ident = Ident::new(name, Span::call_site().into()); quote!( #ident ) } else { quote!(crate) @@ -261,12 +261,14 @@ pub fn load_lp_code(input: TokenStream) -> TokenStream { let mut sections: Vec
= sections .into_iter() - .filter(|section| match section.kind() { - SectionKind::Text - | SectionKind::ReadOnlyData - | SectionKind::Data - | SectionKind::UninitializedData => true, - _ => false, + .filter(|section| { + matches!( + section.kind(), + SectionKind::Text + | SectionKind::ReadOnlyData + | SectionKind::Data + | SectionKind::UninitializedData + ) }) .collect(); sections.sort_by(|a, b| a.address().partial_cmp(&b.address()).unwrap()); @@ -280,9 +282,8 @@ pub fn load_lp_code(input: TokenStream) -> TokenStream { for section in sections { if section.address() > last_address { - for _ in 0..(section.address() - last_address) { - binary.push(0); - } + let fill = section.address() - last_address; + binary.extend(std::iter::repeat(0).take(fill as usize)); } binary.extend_from_slice(section.data().unwrap()); @@ -293,21 +294,20 @@ pub fn load_lp_code(input: TokenStream) -> TokenStream { .symbols() .find(|s| s.name().unwrap().starts_with("__ULP_MAGIC_")); - if let None = magic_symbol { + let magic_symbol = if let Some(magic_symbol) = magic_symbol { + magic_symbol.name().unwrap() + } else { return Error::new( Span::call_site().into(), "Given file doesn't seem to be an LP/ULP core application.", ) .to_compile_error() .into(); - } - - let magic_symbol = magic_symbol.unwrap().name().unwrap(); + }; let magic_symbol = magic_symbol.trim_start_matches("__ULP_MAGIC_"); let args: Vec = magic_symbol .split("$") - .into_iter() .map(|t| { let t = if t.contains("OutputOpenDrain") { t.replace("OutputOpenDrain", "LowPowerOutputOpenDrain") diff --git a/esp-lp-hal/Cargo.toml b/esp-lp-hal/Cargo.toml index f8e893dab6e..72db1c35d42 100644 --- a/esp-lp-hal/Cargo.toml +++ b/esp-lp-hal/Cargo.toml @@ -7,10 +7,6 @@ description = "HAL for low-power RISC-V coprocessors found in ESP32 devices" repository = "https://github.com/esp-rs/esp-hal" license = "MIT OR Apache-2.0" -[lib] -bench = false -test = false - keywords = [ "embedded", "embedded-hal", @@ -24,6 +20,10 @@ categories = [ "no-std", ] +[lib] +bench = false +test = false + [dependencies] cfg-if = "1.0.0" document-features = "0.2.10" diff --git a/extras/bench-server/src/main.rs b/extras/bench-server/src/main.rs index 21ab64867c7..7f38acf749d 100644 --- a/extras/bench-server/src/main.rs +++ b/extras/bench-server/src/main.rs @@ -1,7 +1,9 @@ -use std::io::{Read, Write}; -use std::net::{TcpListener, TcpStream}; -use std::thread::spawn; -use std::time::Duration; +use std::{ + io::{Read, Write}, + net::{TcpListener, TcpStream}, + thread::spawn, + time::Duration, +}; use log::info; @@ -22,8 +24,12 @@ fn tx_listen() { } fn tx_conn(mut socket: TcpStream) { - socket.set_read_timeout(Some(Duration::from_secs(30))).unwrap(); - socket.set_write_timeout(Some(Duration::from_secs(30))).unwrap(); + socket + .set_read_timeout(Some(Duration::from_secs(30))) + .unwrap(); + socket + .set_write_timeout(Some(Duration::from_secs(30))) + .unwrap(); let buf = [0; 1024]; loop { @@ -44,8 +50,12 @@ fn rx_listen() { } fn rx_conn(mut socket: TcpStream) { - socket.set_read_timeout(Some(Duration::from_secs(30))).unwrap(); - socket.set_write_timeout(Some(Duration::from_secs(30))).unwrap(); + socket + .set_read_timeout(Some(Duration::from_secs(30))) + .unwrap(); + socket + .set_write_timeout(Some(Duration::from_secs(30))) + .unwrap(); let mut buf = [0; 1024]; loop { @@ -66,8 +76,12 @@ fn rxtx_listen() { } fn rxtx_conn(mut socket: TcpStream) { - socket.set_read_timeout(Some(Duration::from_secs(30))).unwrap(); - socket.set_write_timeout(Some(Duration::from_secs(30))).unwrap(); + socket + .set_read_timeout(Some(Duration::from_secs(30))) + .unwrap(); + socket + .set_write_timeout(Some(Duration::from_secs(30))) + .unwrap(); let mut buf = [0; 1024]; loop { @@ -84,4 +98,4 @@ fn rxtx_conn(mut socket: TcpStream) { } } } -} \ No newline at end of file +} diff --git a/xtask/src/lib.rs b/xtask/src/lib.rs index 9e28b57faab..6eb98fcbd89 100644 --- a/xtask/src/lib.rs +++ b/xtask/src/lib.rs @@ -114,7 +114,7 @@ pub fn build_documentation( package: Package, chip: Chip, target: &str, -) -> Result<()> { +) -> Result { let package_name = package.to_string(); let package_path = windows_safe_path(&workspace.join(&package_name)); @@ -142,7 +142,15 @@ pub fn build_documentation( // Execute `cargo doc` from the package root: cargo::run(&args, &package_path)?; - Ok(()) + let docs_path = windows_safe_path( + &workspace + .join(package.to_string()) + .join("target") + .join(target) + .join("doc"), + ); + + Ok(docs_path) } /// Load all examples at the given path, and parse their metadata. diff --git a/xtask/src/main.rs b/xtask/src/main.rs index bbec3a47e3d..21436a92b13 100644 --- a/xtask/src/main.rs +++ b/xtask/src/main.rs @@ -332,6 +332,9 @@ fn build_documentation(workspace: &Path, args: BuildDocumentationArgs) -> Result let output_path = workspace.join("docs"); let resources = workspace.join("resources"); + fs::create_dir_all(&output_path) + .with_context(|| format!("Failed to create {}", output_path.display()))?; + let mut packages = HashMap::new(); for package in args.packages { packages.insert( @@ -341,10 +344,12 @@ fn build_documentation(workspace: &Path, args: BuildDocumentationArgs) -> Result } // Copy any additional assets to the documentation's output path: - fs::copy(resources.join("esp-rs.svg"), output_path.join("esp-rs.svg"))?; + fs::copy(resources.join("esp-rs.svg"), output_path.join("esp-rs.svg")) + .context("Failed to copy esp-rs.svg")?; // Render the index and write it out to the documentaiton's output path: - let source = fs::read_to_string(resources.join("index.html.jinja"))?; + let source = fs::read_to_string(resources.join("index.html.jinja")) + .context("Failed to read index.html.jinja")?; let mut env = minijinja::Environment::new(); env.add_template("index", &source)?; @@ -352,7 +357,7 @@ fn build_documentation(workspace: &Path, args: BuildDocumentationArgs) -> Result let tmpl = env.get_template("index")?; let html = tmpl.render(minijinja::context! { packages => packages })?; - fs::write(output_path.join("index.html"), html)?; + fs::write(output_path.join("index.html"), html).context("Failed to write index.html")?; Ok(()) } @@ -377,14 +382,12 @@ fn build_documentation_for_package( // Build the documentation for the specified package, targeting the // specified chip: - xtask::build_documentation(workspace, package, *chip, target)?; - - let docs_path = xtask::windows_safe_path( - &workspace - .join(package.to_string()) - .join("target") - .join(target) - .join("doc"), + let docs_path = xtask::build_documentation(workspace, package, *chip, target)?; + + ensure!( + docs_path.exists(), + "Documentation not found at {}", + docs_path.display() ); let output_path = output_path @@ -394,8 +397,15 @@ fn build_documentation_for_package( let output_path = xtask::windows_safe_path(&output_path); // Create the output directory, and copy the built documentation into it: - fs::create_dir_all(&output_path)?; - copy_dir_all(&docs_path, &output_path)?; + fs::create_dir_all(&output_path) + .with_context(|| format!("Failed to create {}", output_path.display()))?; + copy_dir_all(&docs_path, &output_path).with_context(|| { + format!( + "Failed to copy {} to {}", + docs_path.display(), + output_path.display() + ) + })?; // Build the context object required for rendering this particular build's // information on the documentation index: