From 0af2dfeae19fa88e4bf1b89bd1c5e91fbf0fa546 Mon Sep 17 00:00:00 2001 From: Markus Mayer Date: Sat, 6 Jul 2024 19:05:29 +0200 Subject: [PATCH 01/10] Add port and baud-rate arguments for Clap --- src/cli.rs | 35 +++++++++++++++++++++++++++++------ src/data_buffer.rs | 4 +++- src/main.rs | 15 ++++++++------- 3 files changed, 40 insertions(+), 14 deletions(-) diff --git a/src/cli.rs b/src/cli.rs index ca83964..2d0df21 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -1,25 +1,48 @@ -use clap::Parser; +use clap::{Parser, Subcommand}; use crate::utils::version; #[derive(Parser, Debug)] #[command(author, version = version(), about)] +#[command(propagate_version = true)] pub struct Cli { #[arg( + global = true, short, long, - value_name = "FLOAT", - help = "Tick rate, i.e. number of ticks per second", - default_value_t = 1.0 + value_name = "PORT", + help = "The port name", + default_value = "/dev/ttyACM0" + )] + pub port: String, + + #[arg( + global = true, + short, + long, + value_name = "BAUD_RATE", + help = "The baud rate", + default_value_t = 1_000_000 )] - pub tick_rate: f64, + pub baud: u32, + #[command(subcommand)] + pub command: Commands, +} + +#[derive(Subcommand, Debug)] +pub enum Commands { + Ui(UiCommand), +} + +#[derive(Parser, Debug)] +pub struct UiCommand { #[arg( short, long, value_name = "FLOAT", help = "Frame rate, i.e. number of frames per second", - default_value_t = 60.0 + default_value_t = 30.0 )] pub frame_rate: f64, } diff --git a/src/data_buffer.rs b/src/data_buffer.rs index eed93f4..bcead02 100644 --- a/src/data_buffer.rs +++ b/src/data_buffer.rs @@ -10,6 +10,8 @@ use serial_sensors_proto::{DataFrame, IdentifierCode, SensorData, SensorId}; use crate::fps_counter::FpsCounter; +const BUFFER_CAP: usize = 100; + #[derive(Debug)] pub struct SensorDataBuffer { inner: RwLock, @@ -145,7 +147,7 @@ impl SensorDataBuffer { impl Default for InnerSensorDataBuffer { fn default() -> Self { - let capacity = 100; + let capacity = BUFFER_CAP; Self { sensor_specific: true, maker: String::new(), diff --git a/src/main.rs b/src/main.rs index 667dce0..0b888e8 100644 --- a/src/main.rs +++ b/src/main.rs @@ -12,7 +12,7 @@ use tokio::sync::mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender}; use tokio_serial::{DataBits, FlowControl, Parity, SerialPortBuilderExt, SerialStream, StopBits}; use crate::app::App; -use crate::cli::Cli; +use crate::cli::{Cli, Commands}; use crate::data_buffer::SensorDataBuffer; use crate::utils::{initialize_logging, initialize_panic_handler}; @@ -26,9 +26,6 @@ mod fps_counter; mod tui; mod utils; -const PORT_NAME: &str = "/dev/ttyACM0"; -const BAUD_RATE: u32 = 1_000_000; - #[tokio::main] async fn main() -> Result<()> { dotenvy::dotenv().ok(); @@ -40,7 +37,7 @@ async fn main() -> Result<()> { let buffer = Arc::new(SensorDataBuffer::default()); // Open the serial port - let port = tokio_serial::new(PORT_NAME, BAUD_RATE) + let port = tokio_serial::new(args.port, args.baud) .data_bits(DataBits::Eight) .flow_control(FlowControl::None) .parity(Parity::None) @@ -60,8 +57,12 @@ async fn main() -> Result<()> { tokio::spawn(handle_data_recv(port, from_device, to_device)); // Run the app. - let mut app = App::new(args.frame_rate, buffer)?; - app.run().await?; + match args.command { + Commands::Ui(args) => { + let mut app = App::new(args.frame_rate, buffer)?; + app.run().await?; + } + } Ok(()) } From 6c990d4369e416d622304b315556801762020a83 Mon Sep 17 00:00:00 2001 From: Markus Mayer Date: Sat, 6 Jul 2024 19:21:44 +0200 Subject: [PATCH 02/10] Fix negation with overflow on UI --- src/cli.rs | 16 ++++++++++++++++ src/components/utils.rs | 25 ++++++++++++++++--------- src/main.rs | 37 ++++++++++++++++++++++++++++++------- 3 files changed, 62 insertions(+), 16 deletions(-) diff --git a/src/cli.rs b/src/cli.rs index 2d0df21..2529a7b 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -1,3 +1,5 @@ +use std::path::PathBuf; + use clap::{Parser, Subcommand}; use crate::utils::version; @@ -33,8 +35,10 @@ pub struct Cli { #[derive(Subcommand, Debug)] pub enum Commands { Ui(UiCommand), + Dump(Dump), } +/// Runs a UI to visualize the incoming data stream. #[derive(Parser, Debug)] pub struct UiCommand { #[arg( @@ -46,3 +50,15 @@ pub struct UiCommand { )] pub frame_rate: f64, } + +/// Dumps received data to disk. +#[derive(Parser, Debug)] +pub struct Dump { + #[arg( + short, + long, + value_name = "RAW_FILE", + help = "The file in which to store raw data" + )] + pub raw: Option, +} diff --git a/src/components/utils.rs b/src/components/utils.rs index 05fe557..8543005 100644 --- a/src/components/utils.rs +++ b/src/components/utils.rs @@ -1,7 +1,5 @@ use std::fmt::Display; -use std::ops::Neg; -use num_traits::ConstZero; use ratatui::prelude::*; use serial_sensors_proto::versions::Version1DataFrame; use serial_sensors_proto::{IdentifierCode, ScalarData, SensorData, SensorId, Vector3Data}; @@ -34,14 +32,23 @@ where Span::styled(format!("{:+4.6}", value), highlight.to_style_dim()) } -pub fn highlight_axis_3(x: T, y: T, z: T) -> (Max, Max, Max) -where - T: PartialOrd + ConstZero + Neg, -{ +pub fn highlight_axis_3(x: i16, y: i16, z: i16) -> (Max, Max, Max) { // Fake abs. - let (x, x_pos) = if x > T::ZERO { (x, true) } else { (-x, false) }; - let (y, y_pos) = if y > T::ZERO { (y, true) } else { (-y, false) }; - let (z, z_pos) = if z > T::ZERO { (z, true) } else { (-z, false) }; + let (x, x_pos) = if x > 0 { + (x, true) + } else { + (x.saturating_neg(), false) + }; + let (y, y_pos) = if y > 0 { + (y, true) + } else { + (y.saturating_neg(), false) + }; + let (z, z_pos) = if z > 0 { + (z, true) + } else { + (z.saturating_neg(), false) + }; if x > y && x > z { (Max::Positive.flip_if(!x_pos), Max::None, Max::None) diff --git a/src/main.rs b/src/main.rs index 0b888e8..1c477c8 100644 --- a/src/main.rs +++ b/src/main.rs @@ -6,13 +6,14 @@ use std::time::Duration; use clap::Parser; use color_eyre::eyre::Result; pub use ratatui::prelude::*; +use serial_sensors_proto::versions::Version1DataFrame; use serial_sensors_proto::{deserialize, DeserializationError}; use tokio::io::{self, AsyncReadExt, AsyncWriteExt}; use tokio::sync::mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender}; use tokio_serial::{DataBits, FlowControl, Parity, SerialPortBuilderExt, SerialStream, StopBits}; use crate::app::App; -use crate::cli::{Cli, Commands}; +use crate::cli::{Cli, Commands, Dump}; use crate::data_buffer::SensorDataBuffer; use crate::utils::{initialize_logging, initialize_panic_handler}; @@ -26,7 +27,7 @@ mod fps_counter; mod tui; mod utils; -#[tokio::main] +#[tokio::main(flavor = "multi_thread", worker_threads = 4)] async fn main() -> Result<()> { dotenvy::dotenv().ok(); initialize_logging()?; @@ -47,29 +48,40 @@ async fn main() -> Result<()> { .expect("Failed to open port"); let (from_device, receiver) = unbounded_channel::>(); + let (frames_tx, frames_rx) = unbounded_channel::(); let (_command, to_device) = unbounded_channel::(); // let (decoder_send, decoded_event) = unbounded_channel::(); - // Spawn a decoder thread. - tokio::spawn(decoder(receiver, buffer.clone())); - // Spawn a thread for reading data from the serial port tokio::spawn(handle_data_recv(port, from_device, to_device)); // Run the app. match args.command { Commands::Ui(args) => { + // Spawn a decoder thread. + tokio::spawn(decoder(receiver, frames_tx)); + + // Spawn a buffer thread. + tokio::spawn(decoder_to_buffer(frames_rx, buffer.clone())); + let mut app = App::new(args.frame_rate, buffer)?; app.run().await?; } + Commands::Dump(args) => { + dump_data(args, buffer).await?; + } } Ok(()) } +async fn dump_data(_args: Dump, _buffer: Arc) -> Result<()> { + todo!() +} + async fn decoder( mut receiver: UnboundedReceiver>, - data_buffer: Arc, + sender: UnboundedSender, ) -> anyhow::Result<()> { // Main loop for printing input from the serial line. let mut buffer = Vec::with_capacity(1024); @@ -87,7 +99,7 @@ async fn decoder( let first_nonzero = buffer.iter().position(|&x| x != 0).unwrap_or(buffer.len()); buffer.drain(0..first_nonzero); - data_buffer.enqueue(frame.data); + sender.send(frame.data)?; } Err(e) => { match e { @@ -110,6 +122,17 @@ async fn decoder( } } +async fn decoder_to_buffer( + mut receiver: UnboundedReceiver, + data_buffer: Arc, +) -> anyhow::Result<()> { + loop { + if let Some(data) = receiver.recv().await { + data_buffer.enqueue(data); + } + } +} + async fn handle_data_recv( mut port: SerialStream, from_device: UnboundedSender>, From bdd9ea12a6313ad28695a77af5e6a43cea0b007d Mon Sep 17 00:00:00 2001 From: Markus Mayer Date: Sat, 6 Jul 2024 19:35:22 +0200 Subject: [PATCH 03/10] Add raw data dumping --- .gitignore | 3 +++ src/main.rs | 47 +++++++++++++++++++++++++++++++++++++++++++---- 2 files changed, 46 insertions(+), 4 deletions(-) diff --git a/.gitignore b/.gitignore index ea8c4bf..2cff1be 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,4 @@ /target +*.log +*.raw +*.gz diff --git a/src/main.rs b/src/main.rs index 1c477c8..0d46731 100644 --- a/src/main.rs +++ b/src/main.rs @@ -8,7 +8,8 @@ use color_eyre::eyre::Result; pub use ratatui::prelude::*; use serial_sensors_proto::versions::Version1DataFrame; use serial_sensors_proto::{deserialize, DeserializationError}; -use tokio::io::{self, AsyncReadExt, AsyncWriteExt}; +use tokio::fs::File; +use tokio::io::{self, AsyncReadExt, AsyncWriteExt, BufWriter}; use tokio::sync::mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender}; use tokio_serial::{DataBits, FlowControl, Parity, SerialPortBuilderExt, SerialStream, StopBits}; @@ -68,15 +69,53 @@ async fn main() -> Result<()> { app.run().await?; } Commands::Dump(args) => { - dump_data(args, buffer).await?; + // Intercept frames when dumping raw data. + let receiver = if let Some(ref path) = args.raw { + let file = match File::create(path).await { + Ok(file) => file, + Err(e) => { + return Err(e.into()); + } + }; + + let (tx, raw_rx) = unbounded_channel(); + tokio::spawn(dump_raw(file, receiver, tx)); + raw_rx + } else { + receiver + }; + + // Spawn a decoder thread. + tokio::spawn(decoder(receiver, frames_tx)); + + // Process frames. + dump_data(args, frames_rx).await?; } } Ok(()) } -async fn dump_data(_args: Dump, _buffer: Arc) -> Result<()> { - todo!() +async fn dump_raw( + file: File, + mut rx: UnboundedReceiver>, + tx: UnboundedSender>, +) -> Result<()> { + let mut buffered_writer = BufWriter::new(file); + loop { + if let Some(data) = rx.recv().await { + buffered_writer.write_all(&data).await?; + tx.send(data)?; + } + } +} + +async fn dump_data(_args: Dump, mut rx: UnboundedReceiver) -> Result<()> { + loop { + if let Some(data) = rx.recv().await { + println!("Data received: {:?}", data); + } + } } async fn decoder( From cab0cc0bece96fa88bde347db2911b8b94daf5c9 Mon Sep 17 00:00:00 2001 From: Markus Mayer Date: Sat, 6 Jul 2024 19:55:58 +0200 Subject: [PATCH 04/10] Add theoretical buffering of gzipped files --- Cargo.lock | 33 +++++++++++++++++++++++++++++++++ Cargo.toml | 1 + src/main.rs | 41 ++++++++++++++++++++++++++++++++++++++--- 3 files changed, 72 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 27218d5..09b512e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -99,6 +99,19 @@ version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" +[[package]] +name = "async-compression" +version = "0.4.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd066d0b4ef8ecb03a55319dc13aa6910616d0f44008a045bb1835af830abff5" +dependencies = [ + "flate2", + "futures-core", + "memchr", + "pin-project-lite", + "tokio", +] + [[package]] name = "async-trait" version = "0.1.80" @@ -391,6 +404,15 @@ dependencies = [ "libc", ] +[[package]] +name = "crc32fast" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" +dependencies = [ + "cfg-if", +] + [[package]] name = "crossterm" version = "0.27.0" @@ -571,6 +593,16 @@ dependencies = [ "once_cell", ] +[[package]] +name = "flate2" +version = "1.0.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f54427cfd1c7829e2a139fcefea601bf088ebca651d2bf53ebc600eac295dae" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + [[package]] name = "fnv" version = "1.0.7" @@ -1344,6 +1376,7 @@ name = "serial-sensors" version = "0.1.0" dependencies = [ "anyhow", + "async-compression", "better-panic", "clap", "color-eyre", diff --git a/Cargo.toml b/Cargo.toml index f6dda4a..cadbbd2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -40,6 +40,7 @@ serde_json = "1.0.120" json5 = "0.4.1" ttl-queue = "0.2.0" num-traits = "0.2.19" +async-compression = { version = "0.4.11", features = ["gzip", "tokio"] } [patch.crates-io] # serial-sensors-proto = { git = "https://github.com/sunsided/serial-sensors-proto", features = ["std", "alloc", "unsafe", "quaternion", "micromath"] } diff --git a/src/main.rs b/src/main.rs index 0d46731..638f387 100644 --- a/src/main.rs +++ b/src/main.rs @@ -3,6 +3,8 @@ extern crate core; use std::sync::Arc; use std::time::Duration; +use async_compression::tokio::write::GzipEncoder; +use async_compression::Level; use clap::Parser; use color_eyre::eyre::Result; pub use ratatui::prelude::*; @@ -71,6 +73,12 @@ async fn main() -> Result<()> { Commands::Dump(args) => { // Intercept frames when dumping raw data. let receiver = if let Some(ref path) = args.raw { + let gzip = path + .extension() + .and_then(|ext| ext.to_str()) + .map(|ext| ext == "gz") + .unwrap_or(false); + let file = match File::create(path).await { Ok(file) => file, Err(e) => { @@ -79,7 +87,11 @@ async fn main() -> Result<()> { }; let (tx, raw_rx) = unbounded_channel(); - tokio::spawn(dump_raw(file, receiver, tx)); + if gzip { + tokio::spawn(dump_raw_gzipped(file, receiver, tx)); + } else { + tokio::spawn(dump_raw(file, receiver, tx)); + } raw_rx } else { receiver @@ -101,15 +113,38 @@ async fn dump_raw( mut rx: UnboundedReceiver>, tx: UnboundedSender>, ) -> Result<()> { - let mut buffered_writer = BufWriter::new(file); + let mut writer = BufWriter::new(file); loop { if let Some(data) = rx.recv().await { - buffered_writer.write_all(&data).await?; + writer.write_all(&data).await?; tx.send(data)?; } } } +async fn dump_raw_gzipped( + file: File, + mut rx: UnboundedReceiver>, + tx: UnboundedSender>, +) -> Result<()> { + let buffered_writer = BufWriter::new(file); + let mut writer = GzipEncoder::with_quality(buffered_writer, Level::Default); + loop { + if let Some(data) = rx.recv().await { + if let Err(e) = writer.write_all(&data).await { + writer.flush().await.ok(); + return Err(e.into()); + } + if let Err(e) = tx.send(data) { + writer.flush().await.ok(); + return Err(e.into()); + } + } + } + + // TODO: Add rendezvous on CTRL-C +} + async fn dump_data(_args: Dump, mut rx: UnboundedReceiver) -> Result<()> { loop { if let Some(data) = rx.recv().await { From 07c94dc3ea9020fa7cd0f7e45f99dae40e68c9f0 Mon Sep 17 00:00:00 2001 From: Markus Mayer Date: Sat, 6 Jul 2024 20:28:31 +0200 Subject: [PATCH 05/10] Add timestamp to CSV output --- src/cli.rs | 8 +++++ src/main.rs | 100 +++++++++++++++++++++++++++++++++++++++++++++++++--- 2 files changed, 103 insertions(+), 5 deletions(-) diff --git a/src/cli.rs b/src/cli.rs index 2529a7b..e58b7aa 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -61,4 +61,12 @@ pub struct Dump { help = "The file in which to store raw data" )] pub raw: Option, + + #[arg( + short, + long, + value_name = "DIRECTORY", + help = "The directory in which to store data" + )] + pub dir: PathBuf, } diff --git a/src/main.rs b/src/main.rs index 638f387..84ce7bb 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,7 +1,10 @@ extern crate core; +use std::collections::hash_map::Entry; +use std::collections::HashMap; +use std::path::PathBuf; use std::sync::Arc; -use std::time::Duration; +use std::time::{Duration, SystemTime, UNIX_EPOCH}; use async_compression::tokio::write::GzipEncoder; use async_compression::Level; @@ -9,14 +12,14 @@ use clap::Parser; use color_eyre::eyre::Result; pub use ratatui::prelude::*; use serial_sensors_proto::versions::Version1DataFrame; -use serial_sensors_proto::{deserialize, DeserializationError}; +use serial_sensors_proto::{deserialize, DeserializationError, SensorData, SensorId}; use tokio::fs::File; use tokio::io::{self, AsyncReadExt, AsyncWriteExt, BufWriter}; use tokio::sync::mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender}; use tokio_serial::{DataBits, FlowControl, Parity, SerialPortBuilderExt, SerialStream, StopBits}; use crate::app::App; -use crate::cli::{Cli, Commands, Dump}; +use crate::cli::{Cli, Commands}; use crate::data_buffer::SensorDataBuffer; use crate::utils::{initialize_logging, initialize_panic_handler}; @@ -101,7 +104,7 @@ async fn main() -> Result<()> { tokio::spawn(decoder(receiver, frames_tx)); // Process frames. - dump_data(args, frames_rx).await?; + dump_data(args.dir, frames_rx).await?; } } @@ -145,14 +148,101 @@ async fn dump_raw_gzipped( // TODO: Add rendezvous on CTRL-C } -async fn dump_data(_args: Dump, mut rx: UnboundedReceiver) -> Result<()> { +async fn dump_data(directory: PathBuf, mut rx: UnboundedReceiver) -> Result<()> { + let mut files: HashMap> = HashMap::new(); + loop { + let now = SystemTime::now(); + let since_the_epoch = now.duration_since(UNIX_EPOCH).expect("Time went backwards"); + if let Some(data) = rx.recv().await { println!("Data received: {:?}", data); + let target = SensorId::from(&data); + + match files.entry(target.clone()) { + Entry::Occupied(mut entry) => { + let data = create_data_row(since_the_epoch, target, &data); + entry.get_mut().write_all(&data).await?; + } + Entry::Vacant(entry) => { + let file_name = format!( + "{}-{}x{}.csv", + target.tag(), + target.num_components().unwrap_or(0), + target.value_type() as u8 + ); + println!("New sensor; creating new file: {file_name}"); + let path = directory.join(file_name); + let file = match File::create(path).await { + Ok(file) => file, + Err(e) => { + return Err(e.into()); + } + }; + + let writer = entry.insert(BufWriter::new(file)); + + // Create header row. + let header = create_header_row(&data); + writer.write_all(&header).await?; + + // Create first data row. + let data = create_data_row(since_the_epoch, target, &data); + writer.write_all(&data).await?; + + writer.flush().await?; + } + }; } } } +fn create_header_row(data: &Version1DataFrame) -> Vec { + let mut row = String::from("host_time,sensor_tag,num_components,value_type"); + match data.value { + SensorData::SystemClockFrequency(_) => {} + SensorData::AccelerometerI16(_) => {} + SensorData::MagnetometerI16(_) => {} + SensorData::TemperatureI16(_) => {} + SensorData::GyroscopeI16(_) => {} + SensorData::HeadingI16(_) => {} + SensorData::EulerAnglesF32(_) => {} + SensorData::OrientationQuaternionF32(_) => {} + SensorData::LinearRanges(_) => {} + SensorData::Identification(_) => {} + } + row.push('\n'); + row.as_bytes().into() +} + +fn create_data_row( + since_the_epoch: Duration, + target: SensorId, + data: &Version1DataFrame, +) -> Vec { + let mut row = format!( + "{},{:02X},{},{:02X}", + since_the_epoch.as_secs_f64(), + target.tag(), + target.num_components().unwrap_or(0), + target.value_type() as u8 + ); + match data.value { + SensorData::SystemClockFrequency(_) => {} + SensorData::AccelerometerI16(_) => {} + SensorData::MagnetometerI16(_) => {} + SensorData::TemperatureI16(_) => {} + SensorData::GyroscopeI16(_) => {} + SensorData::HeadingI16(_) => {} + SensorData::EulerAnglesF32(_) => {} + SensorData::OrientationQuaternionF32(_) => {} + SensorData::LinearRanges(_) => {} + SensorData::Identification(_) => {} + } + row.push('\n'); + row.as_bytes().into() +} + async fn decoder( mut receiver: UnboundedReceiver>, sender: UnboundedSender, From e82f496b0aab58b4e4b4460866d16cac606632a5 Mon Sep 17 00:00:00 2001 From: Markus Mayer Date: Sat, 6 Jul 2024 21:50:05 +0200 Subject: [PATCH 06/10] Add CSV row formatting --- src/main.rs | 257 +++++++++++++++++++++++++++++++++++++++++++--------- 1 file changed, 213 insertions(+), 44 deletions(-) diff --git a/src/main.rs b/src/main.rs index 84ce7bb..4545e29 100644 --- a/src/main.rs +++ b/src/main.rs @@ -10,9 +10,14 @@ use async_compression::tokio::write::GzipEncoder; use async_compression::Level; use clap::Parser; use color_eyre::eyre::Result; +use num_traits::real::Real; pub use ratatui::prelude::*; +use serial_sensors_proto::types::LinearRangeInfo; use serial_sensors_proto::versions::Version1DataFrame; -use serial_sensors_proto::{deserialize, DeserializationError, SensorData, SensorId}; +use serial_sensors_proto::{ + deserialize, DataFrame, DeserializationError, IdentifierCode, ScalarData, SensorData, SensorId, + ValueType, Vector3Data, Vector4Data, +}; use tokio::fs::File; use tokio::io::{self, AsyncReadExt, AsyncWriteExt, BufWriter}; use tokio::sync::mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender}; @@ -150,6 +155,7 @@ async fn dump_raw_gzipped( async fn dump_data(directory: PathBuf, mut rx: UnboundedReceiver) -> Result<()> { let mut files: HashMap> = HashMap::new(); + let mut ranges: HashMap = HashMap::new(); loop { let now = SystemTime::now(); @@ -158,18 +164,32 @@ async fn dump_data(directory: PathBuf, mut rx: UnboundedReceiver continue, + Some(data) => data, + }; match files.entry(target.clone()) { Entry::Occupied(mut entry) => { - let data = create_data_row(since_the_epoch, target, &data); - entry.get_mut().write_all(&data).await?; + entry.get_mut().write_all(&data_row).await?; + entry.get_mut().flush().await?; } Entry::Vacant(entry) => { let file_name = format!( - "{}-{}x{}.csv", + "{}-{}-{}-x{}.csv", target.tag(), - target.num_components().unwrap_or(0), - target.value_type() as u8 + sdt.0, + value_type_code(target.value_type()), + target.num_components().unwrap_or(0) ); println!("New sensor; creating new file: {file_name}"); let path = directory.join(file_name); @@ -180,67 +200,137 @@ async fn dump_data(directory: PathBuf, mut rx: UnboundedReceiver Vec { - let mut row = String::from("host_time,sensor_tag,num_components,value_type"); +#[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] +struct SensorDataType(&'static str); + +fn map_data(data: &SensorData) -> SensorDataType { + match data { + SensorData::SystemClockFrequency(_) => SensorDataType("clock"), + SensorData::AccelerometerI16(_) => SensorDataType("acc"), + SensorData::MagnetometerI16(_) => SensorDataType("mag"), + SensorData::TemperatureI16(_) => SensorDataType("temp"), + SensorData::GyroscopeI16(_) => SensorDataType("gyro"), + SensorData::HeadingI16(_) => SensorDataType("heading"), + SensorData::EulerAnglesF32(_) => SensorDataType("euler"), + SensorData::OrientationQuaternionF32(_) => SensorDataType("quat"), + SensorData::LinearRanges(_) => SensorDataType("lranges"), + SensorData::Identification(_) => SensorDataType("ident"), + } +} + +fn create_header_row(data: &Version1DataFrame) -> Option> { + let mut row = String::from("host_time,device_time,sensor_tag,num_components,value_type"); match data.value { - SensorData::SystemClockFrequency(_) => {} - SensorData::AccelerometerI16(_) => {} - SensorData::MagnetometerI16(_) => {} - SensorData::TemperatureI16(_) => {} - SensorData::GyroscopeI16(_) => {} - SensorData::HeadingI16(_) => {} - SensorData::EulerAnglesF32(_) => {} - SensorData::OrientationQuaternionF32(_) => {} - SensorData::LinearRanges(_) => {} - SensorData::Identification(_) => {} + SensorData::SystemClockFrequency(_) => row.push_str(",freq"), + SensorData::AccelerometerI16(_) => row.push_str(",x,y,z,converted_x,converted_y,converted_z"), + SensorData::MagnetometerI16(_) => row.push_str(",x,y,z,converted_x,converted_y,converted_z"), + SensorData::TemperatureI16(_) => row.push_str(",temp,converted_temp"), + SensorData::GyroscopeI16(_) => row.push_str(",x,y,z,converted_x,converted_y,converted_z"), + SensorData::HeadingI16(_) => row.push_str(",heading,converted_heading"), + SensorData::EulerAnglesF32(_) => row.push_str(",x,y,z,converted_x,converted_y,converted_z"), + SensorData::OrientationQuaternionF32(_) => row.push_str(",a,b,c,d,converted_a,converted_b,converted_c,converted_d"), + SensorData::LinearRanges(_) => row.push_str(",resolution_bits,scale_op,scale,scale_raw,scale_decimals,offset,offset_raw,offset_decimals"), + SensorData::Identification(_) => row.push_str(",code,value"), } row.push('\n'); - row.as_bytes().into() + Some(row.as_bytes().into()) } fn create_data_row( since_the_epoch: Duration, - target: SensorId, + target: &SensorId, data: &Version1DataFrame, -) -> Vec { + ranges: Option<&LinearRangeInfo>, +) -> Option> { + let device_time = decode_device_time(data); let mut row = format!( - "{},{:02X},{},{:02X}", + "{},{},{:02X},{},{},", since_the_epoch.as_secs_f64(), + device_time, target.tag(), target.num_components().unwrap_or(0), - target.value_type() as u8 + value_type_code(target.value_type()) ); match data.value { - SensorData::SystemClockFrequency(_) => {} - SensorData::AccelerometerI16(_) => {} - SensorData::MagnetometerI16(_) => {} - SensorData::TemperatureI16(_) => {} - SensorData::GyroscopeI16(_) => {} - SensorData::HeadingI16(_) => {} - SensorData::EulerAnglesF32(_) => {} - SensorData::OrientationQuaternionF32(_) => {} - SensorData::LinearRanges(_) => {} - SensorData::Identification(_) => {} + SensorData::SystemClockFrequency(data) => row.push_str(&format!("{}", data.value)), + SensorData::AccelerometerI16(vec) => { + row.push_str(&format!("{},{},{}", vec.x, vec.y, vec.z)); + csv_convert_push_vec3(&mut row, &vec, &ranges) + } + SensorData::MagnetometerI16(vec) => { + row.push_str(&format!("{},{},{}", vec.x, vec.y, vec.z)); + csv_convert_push_vec3(&mut row, &vec, &ranges) + } + SensorData::TemperatureI16(temp) => { + row.push_str(&format!("{}", temp.value)); + csv_convert_push_scalar(&mut row, &temp, &ranges) + } + SensorData::GyroscopeI16(vec) => { + row.push_str(&format!("{},{},{}", vec.x, vec.y, vec.z)); + csv_convert_push_vec3(&mut row, &vec, &ranges) + } + SensorData::HeadingI16(heading) => { + row.push_str(&format!("{}", heading.value)); + csv_convert_push_scalar(&mut row, &heading, &ranges) + } + SensorData::EulerAnglesF32(vec) => { + row.push_str(&format!("{},{},{}", vec.x, vec.y, vec.z)); + csv_convert_push_vec3(&mut row, &vec, &ranges) + } + SensorData::OrientationQuaternionF32(vec) => { + row.push_str(&format!("{},{},{},{}", vec.a, vec.b, vec.c, vec.d)); + csv_convert_push_vec4(&mut row, &vec, &ranges) + } + SensorData::LinearRanges(ref lr) => row.push_str(&format!( + "{},{:02X},{},{},{},{},{},{}", + lr.resolution_bits, + lr.scale_op, + lr.scale as f32 * 10.0.powi(-(lr.scale_decimals as i32)), + lr.scale, + lr.scale_decimals, + lr.offset as f32 * 10.0.powi(-(lr.offset_decimals as i32)), + lr.offset, + lr.offset_decimals + )), + SensorData::Identification(ref ident) => row.push_str(&format!( + "{},{}", + ident_code(ident.code), + std::str::from_utf8(&ident.value).unwrap_or("").trim() + )), } row.push('\n'); - row.as_bytes().into() + Some(row.as_bytes().into()) +} + +fn decode_device_time(data: &Version1DataFrame) -> f32 { + if data.system_secs != u32::MAX { + data.system_secs as f32 + + if data.system_millis != u16::MAX { + data.system_millis as f32 / 1_000.0 + } else { + 0.0 + } + + if data.system_nanos != u16::MAX { + data.system_nanos as f32 / 1_000_000.0 + } else { + 0.0 + } + } else { + 0.0 + } } async fn decoder( @@ -333,3 +423,82 @@ impl Drop for RecvObserver { println!("Receive loop finished"); } } + +fn csv_convert_push_scalar( + string: &mut String, + vec: &ScalarData, + ri: &Option<&LinearRangeInfo>, +) { + if let Some(ri) = ri { + let x = ri.convert(vec.value as f32); + string.push_str(&format!(",{}", x)) + } else { + string.push(',') + } +} + +fn csv_convert_push_vec3( + string: &mut String, + vec: &Vector3Data, + ri: &Option<&LinearRangeInfo>, +) where + T: Into + Copy, +{ + if let Some(ri) = ri { + let x = ri.convert(vec.x.into()); + let y = ri.convert(vec.y.into()); + let z = ri.convert(vec.z.into()); + string.push_str(&format!(",{},{},{}", x, y, z)) + } else { + string.push_str(",,,") + } +} + +fn csv_convert_push_vec4( + string: &mut String, + vec: &Vector4Data, + ri: &Option<&LinearRangeInfo>, +) where + T: Into + Copy, +{ + if let Some(ri) = ri { + let a = ri.convert(vec.a.into()); + let b = ri.convert(vec.b.into()); + let c = ri.convert(vec.c.into()); + let d = ri.convert(vec.d.into()); + string.push_str(&format!(",{},{},{},{}", a, b, c, d)) + } else { + string.push_str(",,,,") + } +} + +fn ident_code(code: IdentifierCode) -> &'static str { + match code { + IdentifierCode::Generic => "generic", + IdentifierCode::Maker => "maker", + IdentifierCode::Product => "product", + IdentifierCode::Revision => "revision", + } +} + +fn value_type_code(vt: ValueType) -> &'static str { + match vt { + ValueType::UInt8 => "u8", + ValueType::SInt8 => "i8", + ValueType::UInt16 => "u16", + ValueType::SInt16 => "i16", + ValueType::UInt32 => "u32", + ValueType::SInt32 => "i32", + ValueType::UInt64 => "u64", + ValueType::SInt64 => "i64", + ValueType::UInt128 => "u128", + ValueType::SInt128 => "i128", + ValueType::Float32 => "f32", + ValueType::Float64 => "f64", + ValueType::Q8_8 => "Q8_8", + ValueType::Q16_16 => "Q16_16", + ValueType::Q32_32 => "Q32_32", + ValueType::LinearRange => "lrange", + ValueType::Identifier => "ident", + } +} From 2f1c72345f329935df4f3a5f1aaab312ed6272a4 Mon Sep 17 00:00:00 2001 From: Markus Mayer Date: Sat, 6 Jul 2024 23:09:58 +0200 Subject: [PATCH 07/10] Apply clippy fixes --- src/dumping.rs | 315 +++++++++++++++++++++++++++++++++++++++++++++++++ src/main.rs | 314 +----------------------------------------------- 2 files changed, 320 insertions(+), 309 deletions(-) create mode 100644 src/dumping.rs diff --git a/src/dumping.rs b/src/dumping.rs new file mode 100644 index 0000000..a33060f --- /dev/null +++ b/src/dumping.rs @@ -0,0 +1,315 @@ +use std::collections::hash_map::Entry; +use std::collections::HashMap; +use std::path::PathBuf; +use std::time::{Duration, SystemTime, UNIX_EPOCH}; + +use async_compression::tokio::write::GzipEncoder; +use async_compression::Level; +use serial_sensors_proto::types::LinearRangeInfo; +use serial_sensors_proto::versions::Version1DataFrame; +use serial_sensors_proto::{ + DataFrame, IdentifierCode, ScalarData, SensorData, SensorId, ValueType, Vector3Data, + Vector4Data, +}; +use tokio::fs::File; +use tokio::io::{AsyncWriteExt, BufWriter}; +use tokio::sync::mpsc::{UnboundedReceiver, UnboundedSender}; + +pub async fn dump_raw( + file: File, + mut rx: UnboundedReceiver>, + tx: UnboundedSender>, +) -> color_eyre::Result<()> { + let mut writer = BufWriter::new(file); + loop { + if let Some(data) = rx.recv().await { + writer.write_all(&data).await?; + tx.send(data)?; + } + } +} + +pub async fn dump_raw_gzipped( + file: File, + mut rx: UnboundedReceiver>, + tx: UnboundedSender>, +) -> color_eyre::Result<()> { + let buffered_writer = BufWriter::new(file); + let mut writer = GzipEncoder::with_quality(buffered_writer, Level::Default); + loop { + if let Some(data) = rx.recv().await { + if let Err(e) = writer.write_all(&data).await { + writer.flush().await.ok(); + return Err(e.into()); + } + if let Err(e) = tx.send(data) { + writer.flush().await.ok(); + return Err(e.into()); + } + } + } + + // TODO: Add rendezvous on CTRL-C +} + +pub async fn dump_data( + directory: PathBuf, + mut rx: UnboundedReceiver, +) -> color_eyre::Result<()> { + let mut files: HashMap> = HashMap::new(); + let mut ranges: HashMap = HashMap::new(); + + loop { + let now = SystemTime::now(); + let since_the_epoch = now.duration_since(UNIX_EPOCH).expect("Time went backwards"); + + if let Some(data) = rx.recv().await { + println!("Data received: {:?}", data); + let target = SensorId::from(&data); + let sdt = map_data(&data.value); + + let ranges = if let SensorData::LinearRanges(ref info) = data.value { + ranges.insert(data.target(), info.clone()); + ranges.get(&data.target()) + } else { + ranges.get(&target.clone()) + }; + + let data_row = match create_data_row(since_the_epoch, &target, &data, ranges) { + None => continue, + Some(data) => data, + }; + + match files.entry(target.clone()) { + Entry::Occupied(mut entry) => { + entry.get_mut().write_all(&data_row).await?; + entry.get_mut().flush().await?; + } + Entry::Vacant(entry) => { + let file_name = format!( + "{}-{}-{}-x{}.csv", + target.tag(), + sdt.0, + value_type_code(target.value_type()), + target.num_components().unwrap_or(0) + ); + println!("New sensor; creating new file: {file_name}"); + let path = directory.join(file_name); + let file = match File::create(path).await { + Ok(file) => file, + Err(e) => { + return Err(e.into()); + } + }; + + // Create header row. + if let Some(header) = create_header_row(&data) { + let writer = entry.insert(BufWriter::new(file)); + writer.write_all(&header).await?; + writer.write_all(&data_row).await?; + writer.flush().await?; + } + } + }; + } + } +} + +#[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] +struct SensorDataType(&'static str); + +fn map_data(data: &SensorData) -> SensorDataType { + match data { + SensorData::SystemClockFrequency(_) => SensorDataType("clock"), + SensorData::AccelerometerI16(_) => SensorDataType("acc"), + SensorData::MagnetometerI16(_) => SensorDataType("mag"), + SensorData::TemperatureI16(_) => SensorDataType("temp"), + SensorData::GyroscopeI16(_) => SensorDataType("gyro"), + SensorData::HeadingI16(_) => SensorDataType("heading"), + SensorData::EulerAnglesF32(_) => SensorDataType("euler"), + SensorData::OrientationQuaternionF32(_) => SensorDataType("quat"), + SensorData::LinearRanges(_) => SensorDataType("lranges"), + SensorData::Identification(_) => SensorDataType("ident"), + } +} + +fn create_header_row(data: &Version1DataFrame) -> Option> { + let mut row = String::from("host_time,device_time,sensor_tag,num_components,value_type"); + match data.value { + SensorData::SystemClockFrequency(_) => row.push_str(",freq"), + SensorData::AccelerometerI16(_) => row.push_str(",x,y,z,converted_x,converted_y,converted_z"), + SensorData::MagnetometerI16(_) => row.push_str(",x,y,z,converted_x,converted_y,converted_z"), + SensorData::TemperatureI16(_) => row.push_str(",temp,converted_temp"), + SensorData::GyroscopeI16(_) => row.push_str(",x,y,z,converted_x,converted_y,converted_z"), + SensorData::HeadingI16(_) => row.push_str(",heading,converted_heading"), + SensorData::EulerAnglesF32(_) => row.push_str(",x,y,z,converted_x,converted_y,converted_z"), + SensorData::OrientationQuaternionF32(_) => row.push_str(",a,b,c,d,converted_a,converted_b,converted_c,converted_d"), + SensorData::LinearRanges(_) => row.push_str(",resolution_bits,scale_op,scale,scale_raw,scale_decimals,offset,offset_raw,offset_decimals"), + SensorData::Identification(_) => row.push_str(",code,value"), + } + row.push('\n'); + Some(row.as_bytes().into()) +} + +fn create_data_row( + since_the_epoch: Duration, + target: &SensorId, + data: &Version1DataFrame, + ranges: Option<&LinearRangeInfo>, +) -> Option> { + let device_time = decode_device_time(data); + let mut row = format!( + "{},{},{:02X},{},{},", + since_the_epoch.as_secs_f64(), + device_time, + target.tag(), + target.num_components().unwrap_or(0), + value_type_code(target.value_type()) + ); + match data.value { + SensorData::SystemClockFrequency(data) => row.push_str(&format!("{}", data.value)), + SensorData::AccelerometerI16(vec) => { + row.push_str(&format!("{},{},{}", vec.x, vec.y, vec.z)); + csv_convert_push_vec3(&mut row, &vec, &ranges) + } + SensorData::MagnetometerI16(vec) => { + row.push_str(&format!("{},{},{}", vec.x, vec.y, vec.z)); + csv_convert_push_vec3(&mut row, &vec, &ranges) + } + SensorData::TemperatureI16(temp) => { + row.push_str(&format!("{}", temp.value)); + csv_convert_push_scalar(&mut row, &temp, &ranges) + } + SensorData::GyroscopeI16(vec) => { + row.push_str(&format!("{},{},{}", vec.x, vec.y, vec.z)); + csv_convert_push_vec3(&mut row, &vec, &ranges) + } + SensorData::HeadingI16(heading) => { + row.push_str(&format!("{}", heading.value)); + csv_convert_push_scalar(&mut row, &heading, &ranges) + } + SensorData::EulerAnglesF32(vec) => { + row.push_str(&format!("{},{},{}", vec.x, vec.y, vec.z)); + csv_convert_push_vec3(&mut row, &vec, &ranges) + } + SensorData::OrientationQuaternionF32(vec) => { + row.push_str(&format!("{},{},{},{}", vec.a, vec.b, vec.c, vec.d)); + csv_convert_push_vec4(&mut row, &vec, &ranges) + } + SensorData::LinearRanges(ref lr) => row.push_str(&format!( + "{},{:02X},{},{},{},{},{},{}", + lr.resolution_bits, + lr.scale_op, + lr.scale as f32 * 10.0_f32.powi(-(lr.scale_decimals as i32)), + lr.scale, + lr.scale_decimals, + lr.offset as f32 * 10.0_f32.powi(-(lr.offset_decimals as i32)), + lr.offset, + lr.offset_decimals + )), + SensorData::Identification(ref ident) => row.push_str(&format!( + "{},{}", + ident_code(ident.code), + std::str::from_utf8(&ident.value).unwrap_or("").trim() + )), + } + row.push('\n'); + Some(row.as_bytes().into()) +} + +fn decode_device_time(data: &Version1DataFrame) -> f32 { + if data.system_secs != u32::MAX { + data.system_secs as f32 + + if data.system_millis != u16::MAX { + data.system_millis as f32 / 1_000.0 + } else { + 0.0 + } + + if data.system_nanos != u16::MAX { + data.system_nanos as f32 / 1_000_000.0 + } else { + 0.0 + } + } else { + 0.0 + } +} + +fn csv_convert_push_scalar( + string: &mut String, + vec: &ScalarData, + ri: &Option<&LinearRangeInfo>, +) { + if let Some(ri) = ri { + let x = ri.convert(vec.value as f32); + string.push_str(&format!(",{}", x)) + } else { + string.push(',') + } +} + +fn csv_convert_push_vec3( + string: &mut String, + vec: &Vector3Data, + ri: &Option<&LinearRangeInfo>, +) where + T: Into + Copy, +{ + if let Some(ri) = ri { + let x = ri.convert(vec.x.into()); + let y = ri.convert(vec.y.into()); + let z = ri.convert(vec.z.into()); + string.push_str(&format!(",{},{},{}", x, y, z)) + } else { + string.push_str(",,,") + } +} + +fn csv_convert_push_vec4( + string: &mut String, + vec: &Vector4Data, + ri: &Option<&LinearRangeInfo>, +) where + T: Into + Copy, +{ + if let Some(ri) = ri { + let a = ri.convert(vec.a.into()); + let b = ri.convert(vec.b.into()); + let c = ri.convert(vec.c.into()); + let d = ri.convert(vec.d.into()); + string.push_str(&format!(",{},{},{},{}", a, b, c, d)) + } else { + string.push_str(",,,,") + } +} + +fn ident_code(code: IdentifierCode) -> &'static str { + match code { + IdentifierCode::Generic => "generic", + IdentifierCode::Maker => "maker", + IdentifierCode::Product => "product", + IdentifierCode::Revision => "revision", + } +} + +fn value_type_code(vt: ValueType) -> &'static str { + match vt { + ValueType::UInt8 => "u8", + ValueType::SInt8 => "i8", + ValueType::UInt16 => "u16", + ValueType::SInt16 => "i16", + ValueType::UInt32 => "u32", + ValueType::SInt32 => "i32", + ValueType::UInt64 => "u64", + ValueType::SInt64 => "i64", + ValueType::UInt128 => "u128", + ValueType::SInt128 => "i128", + ValueType::Float32 => "f32", + ValueType::Float64 => "f64", + ValueType::Q8_8 => "Q8_8", + ValueType::Q16_16 => "Q16_16", + ValueType::Q32_32 => "Q32_32", + ValueType::LinearRange => "lrange", + ValueType::Identifier => "ident", + } +} diff --git a/src/main.rs b/src/main.rs index 4545e29..7b6c233 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,31 +1,22 @@ extern crate core; -use std::collections::hash_map::Entry; -use std::collections::HashMap; -use std::path::PathBuf; use std::sync::Arc; -use std::time::{Duration, SystemTime, UNIX_EPOCH}; +use std::time::Duration; -use async_compression::tokio::write::GzipEncoder; -use async_compression::Level; use clap::Parser; use color_eyre::eyre::Result; -use num_traits::real::Real; pub use ratatui::prelude::*; -use serial_sensors_proto::types::LinearRangeInfo; use serial_sensors_proto::versions::Version1DataFrame; -use serial_sensors_proto::{ - deserialize, DataFrame, DeserializationError, IdentifierCode, ScalarData, SensorData, SensorId, - ValueType, Vector3Data, Vector4Data, -}; +use serial_sensors_proto::{deserialize, DeserializationError}; use tokio::fs::File; -use tokio::io::{self, AsyncReadExt, AsyncWriteExt, BufWriter}; +use tokio::io::{self, AsyncReadExt, AsyncWriteExt}; use tokio::sync::mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender}; use tokio_serial::{DataBits, FlowControl, Parity, SerialPortBuilderExt, SerialStream, StopBits}; use crate::app::App; use crate::cli::{Cli, Commands}; use crate::data_buffer::SensorDataBuffer; +use crate::dumping::{dump_data, dump_raw, dump_raw_gzipped}; use crate::utils::{initialize_logging, initialize_panic_handler}; mod action; @@ -34,6 +25,7 @@ mod cli; mod components; mod config; mod data_buffer; +mod dumping; mod fps_counter; mod tui; mod utils; @@ -116,223 +108,6 @@ async fn main() -> Result<()> { Ok(()) } -async fn dump_raw( - file: File, - mut rx: UnboundedReceiver>, - tx: UnboundedSender>, -) -> Result<()> { - let mut writer = BufWriter::new(file); - loop { - if let Some(data) = rx.recv().await { - writer.write_all(&data).await?; - tx.send(data)?; - } - } -} - -async fn dump_raw_gzipped( - file: File, - mut rx: UnboundedReceiver>, - tx: UnboundedSender>, -) -> Result<()> { - let buffered_writer = BufWriter::new(file); - let mut writer = GzipEncoder::with_quality(buffered_writer, Level::Default); - loop { - if let Some(data) = rx.recv().await { - if let Err(e) = writer.write_all(&data).await { - writer.flush().await.ok(); - return Err(e.into()); - } - if let Err(e) = tx.send(data) { - writer.flush().await.ok(); - return Err(e.into()); - } - } - } - - // TODO: Add rendezvous on CTRL-C -} - -async fn dump_data(directory: PathBuf, mut rx: UnboundedReceiver) -> Result<()> { - let mut files: HashMap> = HashMap::new(); - let mut ranges: HashMap = HashMap::new(); - - loop { - let now = SystemTime::now(); - let since_the_epoch = now.duration_since(UNIX_EPOCH).expect("Time went backwards"); - - if let Some(data) = rx.recv().await { - println!("Data received: {:?}", data); - let target = SensorId::from(&data); - let sdt = map_data(&data.value); - - let ranges = if let SensorData::LinearRanges(ref info) = data.value { - ranges.insert(data.target(), info.clone()); - ranges.get(&data.target()) - } else { - ranges.get(&target.clone()) - }; - - let data_row = match create_data_row(since_the_epoch, &target, &data, ranges) { - None => continue, - Some(data) => data, - }; - - match files.entry(target.clone()) { - Entry::Occupied(mut entry) => { - entry.get_mut().write_all(&data_row).await?; - entry.get_mut().flush().await?; - } - Entry::Vacant(entry) => { - let file_name = format!( - "{}-{}-{}-x{}.csv", - target.tag(), - sdt.0, - value_type_code(target.value_type()), - target.num_components().unwrap_or(0) - ); - println!("New sensor; creating new file: {file_name}"); - let path = directory.join(file_name); - let file = match File::create(path).await { - Ok(file) => file, - Err(e) => { - return Err(e.into()); - } - }; - - // Create header row. - if let Some(header) = create_header_row(&data) { - let writer = entry.insert(BufWriter::new(file)); - writer.write_all(&header).await?; - writer.write_all(&data_row).await?; - writer.flush().await?; - } - } - }; - } - } -} - -#[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] -struct SensorDataType(&'static str); - -fn map_data(data: &SensorData) -> SensorDataType { - match data { - SensorData::SystemClockFrequency(_) => SensorDataType("clock"), - SensorData::AccelerometerI16(_) => SensorDataType("acc"), - SensorData::MagnetometerI16(_) => SensorDataType("mag"), - SensorData::TemperatureI16(_) => SensorDataType("temp"), - SensorData::GyroscopeI16(_) => SensorDataType("gyro"), - SensorData::HeadingI16(_) => SensorDataType("heading"), - SensorData::EulerAnglesF32(_) => SensorDataType("euler"), - SensorData::OrientationQuaternionF32(_) => SensorDataType("quat"), - SensorData::LinearRanges(_) => SensorDataType("lranges"), - SensorData::Identification(_) => SensorDataType("ident"), - } -} - -fn create_header_row(data: &Version1DataFrame) -> Option> { - let mut row = String::from("host_time,device_time,sensor_tag,num_components,value_type"); - match data.value { - SensorData::SystemClockFrequency(_) => row.push_str(",freq"), - SensorData::AccelerometerI16(_) => row.push_str(",x,y,z,converted_x,converted_y,converted_z"), - SensorData::MagnetometerI16(_) => row.push_str(",x,y,z,converted_x,converted_y,converted_z"), - SensorData::TemperatureI16(_) => row.push_str(",temp,converted_temp"), - SensorData::GyroscopeI16(_) => row.push_str(",x,y,z,converted_x,converted_y,converted_z"), - SensorData::HeadingI16(_) => row.push_str(",heading,converted_heading"), - SensorData::EulerAnglesF32(_) => row.push_str(",x,y,z,converted_x,converted_y,converted_z"), - SensorData::OrientationQuaternionF32(_) => row.push_str(",a,b,c,d,converted_a,converted_b,converted_c,converted_d"), - SensorData::LinearRanges(_) => row.push_str(",resolution_bits,scale_op,scale,scale_raw,scale_decimals,offset,offset_raw,offset_decimals"), - SensorData::Identification(_) => row.push_str(",code,value"), - } - row.push('\n'); - Some(row.as_bytes().into()) -} - -fn create_data_row( - since_the_epoch: Duration, - target: &SensorId, - data: &Version1DataFrame, - ranges: Option<&LinearRangeInfo>, -) -> Option> { - let device_time = decode_device_time(data); - let mut row = format!( - "{},{},{:02X},{},{},", - since_the_epoch.as_secs_f64(), - device_time, - target.tag(), - target.num_components().unwrap_or(0), - value_type_code(target.value_type()) - ); - match data.value { - SensorData::SystemClockFrequency(data) => row.push_str(&format!("{}", data.value)), - SensorData::AccelerometerI16(vec) => { - row.push_str(&format!("{},{},{}", vec.x, vec.y, vec.z)); - csv_convert_push_vec3(&mut row, &vec, &ranges) - } - SensorData::MagnetometerI16(vec) => { - row.push_str(&format!("{},{},{}", vec.x, vec.y, vec.z)); - csv_convert_push_vec3(&mut row, &vec, &ranges) - } - SensorData::TemperatureI16(temp) => { - row.push_str(&format!("{}", temp.value)); - csv_convert_push_scalar(&mut row, &temp, &ranges) - } - SensorData::GyroscopeI16(vec) => { - row.push_str(&format!("{},{},{}", vec.x, vec.y, vec.z)); - csv_convert_push_vec3(&mut row, &vec, &ranges) - } - SensorData::HeadingI16(heading) => { - row.push_str(&format!("{}", heading.value)); - csv_convert_push_scalar(&mut row, &heading, &ranges) - } - SensorData::EulerAnglesF32(vec) => { - row.push_str(&format!("{},{},{}", vec.x, vec.y, vec.z)); - csv_convert_push_vec3(&mut row, &vec, &ranges) - } - SensorData::OrientationQuaternionF32(vec) => { - row.push_str(&format!("{},{},{},{}", vec.a, vec.b, vec.c, vec.d)); - csv_convert_push_vec4(&mut row, &vec, &ranges) - } - SensorData::LinearRanges(ref lr) => row.push_str(&format!( - "{},{:02X},{},{},{},{},{},{}", - lr.resolution_bits, - lr.scale_op, - lr.scale as f32 * 10.0.powi(-(lr.scale_decimals as i32)), - lr.scale, - lr.scale_decimals, - lr.offset as f32 * 10.0.powi(-(lr.offset_decimals as i32)), - lr.offset, - lr.offset_decimals - )), - SensorData::Identification(ref ident) => row.push_str(&format!( - "{},{}", - ident_code(ident.code), - std::str::from_utf8(&ident.value).unwrap_or("").trim() - )), - } - row.push('\n'); - Some(row.as_bytes().into()) -} - -fn decode_device_time(data: &Version1DataFrame) -> f32 { - if data.system_secs != u32::MAX { - data.system_secs as f32 - + if data.system_millis != u16::MAX { - data.system_millis as f32 / 1_000.0 - } else { - 0.0 - } - + if data.system_nanos != u16::MAX { - data.system_nanos as f32 / 1_000_000.0 - } else { - 0.0 - } - } else { - 0.0 - } -} - async fn decoder( mut receiver: UnboundedReceiver>, sender: UnboundedSender, @@ -423,82 +198,3 @@ impl Drop for RecvObserver { println!("Receive loop finished"); } } - -fn csv_convert_push_scalar( - string: &mut String, - vec: &ScalarData, - ri: &Option<&LinearRangeInfo>, -) { - if let Some(ri) = ri { - let x = ri.convert(vec.value as f32); - string.push_str(&format!(",{}", x)) - } else { - string.push(',') - } -} - -fn csv_convert_push_vec3( - string: &mut String, - vec: &Vector3Data, - ri: &Option<&LinearRangeInfo>, -) where - T: Into + Copy, -{ - if let Some(ri) = ri { - let x = ri.convert(vec.x.into()); - let y = ri.convert(vec.y.into()); - let z = ri.convert(vec.z.into()); - string.push_str(&format!(",{},{},{}", x, y, z)) - } else { - string.push_str(",,,") - } -} - -fn csv_convert_push_vec4( - string: &mut String, - vec: &Vector4Data, - ri: &Option<&LinearRangeInfo>, -) where - T: Into + Copy, -{ - if let Some(ri) = ri { - let a = ri.convert(vec.a.into()); - let b = ri.convert(vec.b.into()); - let c = ri.convert(vec.c.into()); - let d = ri.convert(vec.d.into()); - string.push_str(&format!(",{},{},{},{}", a, b, c, d)) - } else { - string.push_str(",,,,") - } -} - -fn ident_code(code: IdentifierCode) -> &'static str { - match code { - IdentifierCode::Generic => "generic", - IdentifierCode::Maker => "maker", - IdentifierCode::Product => "product", - IdentifierCode::Revision => "revision", - } -} - -fn value_type_code(vt: ValueType) -> &'static str { - match vt { - ValueType::UInt8 => "u8", - ValueType::SInt8 => "i8", - ValueType::UInt16 => "u16", - ValueType::SInt16 => "i16", - ValueType::UInt32 => "u32", - ValueType::SInt32 => "i32", - ValueType::UInt64 => "u64", - ValueType::SInt64 => "i64", - ValueType::UInt128 => "u128", - ValueType::SInt128 => "i128", - ValueType::Float32 => "f32", - ValueType::Float64 => "f64", - ValueType::Q8_8 => "Q8_8", - ValueType::Q16_16 => "Q16_16", - ValueType::Q32_32 => "Q32_32", - ValueType::LinearRange => "lrange", - ValueType::Identifier => "ident", - } -} From db3f213b1423a40a80fbde662be87e85e3550ce4 Mon Sep 17 00:00:00 2001 From: Markus Mayer Date: Sat, 6 Jul 2024 23:50:59 +0200 Subject: [PATCH 08/10] Add instructions for dump command to README.md --- README.md | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/README.md b/README.md index 1c99325..5654a9b 100644 --- a/README.md +++ b/README.md @@ -13,3 +13,14 @@ I'm currently using it for these projects: At the moment it doesn't do much: it simply dumps out the information as it comes. ![A screenshot of the application in action](readme/picture.jpg) + +## Logging data streams to disk + +By using the `dump` subcommand and the `--dir /path/to/log` arguments, data streams can be logged +as CSV. The command creates one file per sensor and information type. + +By using the optional `--raw /path/to/file.bin` argument, the raw data stream can be logged. + +```shell +cargo run -- dump --port /dev/ttyACM0 --raw stream.raw --dir dump/ +``` From 6f324a98177226a63e37ef61a265478d53e6626d Mon Sep 17 00:00:00 2001 From: Markus Mayer Date: Sat, 6 Jul 2024 23:51:49 +0200 Subject: [PATCH 09/10] Add time information to README.md --- README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 5654a9b..b3aea67 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,9 @@ At the moment it doesn't do much: it simply dumps out the information as it come ## Logging data streams to disk By using the `dump` subcommand and the `--dir /path/to/log` arguments, data streams can be logged -as CSV. The command creates one file per sensor and information type. +as CSV. The command creates one file per sensor and information type. The CSV will contain +both the device-side and host-side timestamps to allow for better inference of sensor data rates +later on. By using the optional `--raw /path/to/file.bin` argument, the raw data stream can be logged. From c9b06ba10d3397ea517100b9153b12024c95bd07 Mon Sep 17 00:00:00 2001 From: Markus Mayer Date: Sun, 7 Jul 2024 00:08:44 +0200 Subject: [PATCH 10/10] Add test data from STM32F3 Discovery project --- .gitattributes | 2 ++ .../stm32f3discovery/0-ident-ident-x64.csv | 3 ++ .../stm32f3discovery/1-heading-u8-x1.csv | 3 ++ .../stm32f3discovery/106-gyro-i16-x1.csv | 3 ++ .../stm32f3discovery/106-ident-ident-x64.csv | 3 ++ .../106-lranges-lrange-x1.csv | 3 ++ .../stm32f3discovery/130-ident-ident-x64.csv | 3 ++ .../130-lranges-lrange-x1.csv | 3 ++ .../stm32f3discovery/130-temp-i16-x1.csv | 3 ++ .../stm32f3discovery/206-ident-ident-x64.csv | 3 ++ .../206-lranges-lrange-x1.csv | 3 ++ .../stm32f3discovery/206-temp-i16-x1.csv | 3 ++ .../stm32f3discovery/25-acc-i16-x3.csv | 3 ++ .../stm32f3discovery/25-ident-ident-x64.csv | 3 ++ .../stm32f3discovery/25-lranges-lrange-x1.csv | 3 ++ .../stm32f3discovery/30-ident-ident-x64.csv | 3 ++ .../stm32f3discovery/30-lranges-lrange-x1.csv | 3 ++ .../stm32f3discovery/30-mag-i16-x3.csv | 3 ++ test/2024-07-06/stm32f3discovery/README.md | 32 +++++++++++++++++++ test/2024-07-06/stm32f3discovery/raw.bin | 3 ++ 20 files changed, 88 insertions(+) create mode 100644 .gitattributes create mode 100644 test/2024-07-06/stm32f3discovery/0-ident-ident-x64.csv create mode 100644 test/2024-07-06/stm32f3discovery/1-heading-u8-x1.csv create mode 100644 test/2024-07-06/stm32f3discovery/106-gyro-i16-x1.csv create mode 100644 test/2024-07-06/stm32f3discovery/106-ident-ident-x64.csv create mode 100644 test/2024-07-06/stm32f3discovery/106-lranges-lrange-x1.csv create mode 100644 test/2024-07-06/stm32f3discovery/130-ident-ident-x64.csv create mode 100644 test/2024-07-06/stm32f3discovery/130-lranges-lrange-x1.csv create mode 100644 test/2024-07-06/stm32f3discovery/130-temp-i16-x1.csv create mode 100644 test/2024-07-06/stm32f3discovery/206-ident-ident-x64.csv create mode 100644 test/2024-07-06/stm32f3discovery/206-lranges-lrange-x1.csv create mode 100644 test/2024-07-06/stm32f3discovery/206-temp-i16-x1.csv create mode 100644 test/2024-07-06/stm32f3discovery/25-acc-i16-x3.csv create mode 100644 test/2024-07-06/stm32f3discovery/25-ident-ident-x64.csv create mode 100644 test/2024-07-06/stm32f3discovery/25-lranges-lrange-x1.csv create mode 100644 test/2024-07-06/stm32f3discovery/30-ident-ident-x64.csv create mode 100644 test/2024-07-06/stm32f3discovery/30-lranges-lrange-x1.csv create mode 100644 test/2024-07-06/stm32f3discovery/30-mag-i16-x3.csv create mode 100644 test/2024-07-06/stm32f3discovery/README.md create mode 100644 test/2024-07-06/stm32f3discovery/raw.bin diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..354b3a9 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,2 @@ +*.csv filter=lfs diff=lfs merge=lfs -text +*.bin filter=lfs diff=lfs merge=lfs -text diff --git a/test/2024-07-06/stm32f3discovery/0-ident-ident-x64.csv b/test/2024-07-06/stm32f3discovery/0-ident-ident-x64.csv new file mode 100644 index 0000000..3fce608 --- /dev/null +++ b/test/2024-07-06/stm32f3discovery/0-ident-ident-x64.csv @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:18e35a897a9837100b51ab38a9152c5c0d497ee0d90c5662649a2e509c7b3e81 +size 1762 diff --git a/test/2024-07-06/stm32f3discovery/1-heading-u8-x1.csv b/test/2024-07-06/stm32f3discovery/1-heading-u8-x1.csv new file mode 100644 index 0000000..a783920 --- /dev/null +++ b/test/2024-07-06/stm32f3discovery/1-heading-u8-x1.csv @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fbbda0b79b501b431e3b05dc29b8b96a2db1eb65d73ed863a46c07fff594dad4 +size 375792 diff --git a/test/2024-07-06/stm32f3discovery/106-gyro-i16-x1.csv b/test/2024-07-06/stm32f3discovery/106-gyro-i16-x1.csv new file mode 100644 index 0000000..5cbba12 --- /dev/null +++ b/test/2024-07-06/stm32f3discovery/106-gyro-i16-x1.csv @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:101ae6afbb8ec272b963c09d70b1196ef6cdf78aea65397c68823c450085d4cb +size 3883088 diff --git a/test/2024-07-06/stm32f3discovery/106-ident-ident-x64.csv b/test/2024-07-06/stm32f3discovery/106-ident-ident-x64.csv new file mode 100644 index 0000000..5c2c1f0 --- /dev/null +++ b/test/2024-07-06/stm32f3discovery/106-ident-ident-x64.csv @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:db9ce6908962d1162731fba639dfe6d1bcd7c11d6805cc16e257d7b47a1f9a17 +size 1625 diff --git a/test/2024-07-06/stm32f3discovery/106-lranges-lrange-x1.csv b/test/2024-07-06/stm32f3discovery/106-lranges-lrange-x1.csv new file mode 100644 index 0000000..0b183e4 --- /dev/null +++ b/test/2024-07-06/stm32f3discovery/106-lranges-lrange-x1.csv @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f561ad28bfe0b774c3cc60ccff3c70ef6b65949c43929c8fb0c46776c488fb9c +size 1067 diff --git a/test/2024-07-06/stm32f3discovery/130-ident-ident-x64.csv b/test/2024-07-06/stm32f3discovery/130-ident-ident-x64.csv new file mode 100644 index 0000000..ccf6488 --- /dev/null +++ b/test/2024-07-06/stm32f3discovery/130-ident-ident-x64.csv @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5d49efb05f132e5065df388812f40e4cfe952d0c426dd084414dba5c164b32c8 +size 1674 diff --git a/test/2024-07-06/stm32f3discovery/130-lranges-lrange-x1.csv b/test/2024-07-06/stm32f3discovery/130-lranges-lrange-x1.csv new file mode 100644 index 0000000..67fdced --- /dev/null +++ b/test/2024-07-06/stm32f3discovery/130-lranges-lrange-x1.csv @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fd9de415d41395377fb478cd51f06ac592e54e6335bc742e325ba23b5c5e81bd +size 925 diff --git a/test/2024-07-06/stm32f3discovery/130-temp-i16-x1.csv b/test/2024-07-06/stm32f3discovery/130-temp-i16-x1.csv new file mode 100644 index 0000000..f407cc8 --- /dev/null +++ b/test/2024-07-06/stm32f3discovery/130-temp-i16-x1.csv @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cb4041a51b1e95a468a57a6d65c471746fda0d47613d8ddb03f02a603cd0da49 +size 5468 diff --git a/test/2024-07-06/stm32f3discovery/206-ident-ident-x64.csv b/test/2024-07-06/stm32f3discovery/206-ident-ident-x64.csv new file mode 100644 index 0000000..4f40346 --- /dev/null +++ b/test/2024-07-06/stm32f3discovery/206-ident-ident-x64.csv @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4ea5c889809e83b80be761af6cd4f5526aca9a87bb684649b8c50338bd21f413 +size 1613 diff --git a/test/2024-07-06/stm32f3discovery/206-lranges-lrange-x1.csv b/test/2024-07-06/stm32f3discovery/206-lranges-lrange-x1.csv new file mode 100644 index 0000000..3d19268 --- /dev/null +++ b/test/2024-07-06/stm32f3discovery/206-lranges-lrange-x1.csv @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:82e7d9d28176c22c3333b61926168460c2e126b95c5f53bcc1ad56a5dfbaa291 +size 909 diff --git a/test/2024-07-06/stm32f3discovery/206-temp-i16-x1.csv b/test/2024-07-06/stm32f3discovery/206-temp-i16-x1.csv new file mode 100644 index 0000000..b0adb8f --- /dev/null +++ b/test/2024-07-06/stm32f3discovery/206-temp-i16-x1.csv @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:aaec74b9c492ad5a9c501eaa6b3a0c8d0f926cb96e7a35b4e344fa552bce3b6d +size 5342 diff --git a/test/2024-07-06/stm32f3discovery/25-acc-i16-x3.csv b/test/2024-07-06/stm32f3discovery/25-acc-i16-x3.csv new file mode 100644 index 0000000..1385278 --- /dev/null +++ b/test/2024-07-06/stm32f3discovery/25-acc-i16-x3.csv @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b6ad19b3e382c4322e7fb334fca7849dae1bf22ebb8426701d9da4c075278517 +size 4040824 diff --git a/test/2024-07-06/stm32f3discovery/25-ident-ident-x64.csv b/test/2024-07-06/stm32f3discovery/25-ident-ident-x64.csv new file mode 100644 index 0000000..1c4774d --- /dev/null +++ b/test/2024-07-06/stm32f3discovery/25-ident-ident-x64.csv @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8fc0c91aad92d5190d238941251383ec276d37a710e06cacd1592c06acca420a +size 1675 diff --git a/test/2024-07-06/stm32f3discovery/25-lranges-lrange-x1.csv b/test/2024-07-06/stm32f3discovery/25-lranges-lrange-x1.csv new file mode 100644 index 0000000..cb1ccd9 --- /dev/null +++ b/test/2024-07-06/stm32f3discovery/25-lranges-lrange-x1.csv @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:04a9dca1ff4f227fabb2528052f22b2e493056ab25f4d66d339c8349c3b6332d +size 1002 diff --git a/test/2024-07-06/stm32f3discovery/30-ident-ident-x64.csv b/test/2024-07-06/stm32f3discovery/30-ident-ident-x64.csv new file mode 100644 index 0000000..0e25bdf --- /dev/null +++ b/test/2024-07-06/stm32f3discovery/30-ident-ident-x64.csv @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:91b403c44bc810da733c6c48b536f11aa796f2318d2be74b6c648cd149c314e7 +size 1673 diff --git a/test/2024-07-06/stm32f3discovery/30-lranges-lrange-x1.csv b/test/2024-07-06/stm32f3discovery/30-lranges-lrange-x1.csv new file mode 100644 index 0000000..3b2e364 --- /dev/null +++ b/test/2024-07-06/stm32f3discovery/30-lranges-lrange-x1.csv @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5d5b52ba035a0386b3a14ce59601c811595ce33008d475b954a273283be89d92 +size 975 diff --git a/test/2024-07-06/stm32f3discovery/30-mag-i16-x3.csv b/test/2024-07-06/stm32f3discovery/30-mag-i16-x3.csv new file mode 100644 index 0000000..f91251e --- /dev/null +++ b/test/2024-07-06/stm32f3discovery/30-mag-i16-x3.csv @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3cf7e344f9add1dd37ee216e383c06c5781db9c0a6e3f0a0154d2cf4fb98212b +size 768938 diff --git a/test/2024-07-06/stm32f3discovery/README.md b/test/2024-07-06/stm32f3discovery/README.md new file mode 100644 index 0000000..8715b99 --- /dev/null +++ b/test/2024-07-06/stm32f3discovery/README.md @@ -0,0 +1,32 @@ +# STM32F3 Discovery Test Data + +This directory contains an output stream from the +[`sunsided/stm32f3disco-rust`](https://github.com/sunsided/stm32f3disco-rust) project +using [`serial-sensors-proto`](https://crates.io/crates/serial-sensors-proto) version `0.4.0`. + +## CSV files + +The CSV files contain the actual data. The important parts are: + +* `25-acc-i16-x1.csv`, the LSM303DLHC accelerometer +* `30-mag-i16-x1.csv`, the LSM303DLHC magnetometer +* `106-gyro-i16-x1.csv`, the L3GD20 gyroscope +* `130-temp-i16-x1.csv`, the LSM303DLHC magnetometer's temperature reading +* `206-temp-i16-x1.csv`, the L3GD20's temperature reading + +The corresponding `lranges` files contain normalization data and generally resemble +information from the datasheets. As soon as the information was sent by the sensor, +the respective CSV columns were populated. + +Fake sensors: + +* `1-heading-u8-x1.csv`, is a simple heading derived by `atan2` from the magnetometer + +## Raw data + +By virtue of the byte stuffing, each message packet is prefixed with `0x04` and ends with `0x00`, +such that the sequence `0004` resembles a synchronization point between two messages. +Since this data was serialized with `serial-sensors-proto` version `0.4.0`, which uses +the "version 1" data format, the sequence can be extended to `000401`; the `01` here +s already part of the new data frame (indicating protocol version `1`). All data is encoded +in little-endian format. diff --git a/test/2024-07-06/stm32f3discovery/raw.bin b/test/2024-07-06/stm32f3discovery/raw.bin new file mode 100644 index 0000000..8ccbd10 --- /dev/null +++ b/test/2024-07-06/stm32f3discovery/raw.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:26640cdcfaa29fb820f054653d770760955ed6912ae7c86522d2d5c5d71df9ea +size 3415279