Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix Clippy and RERUN_MEMORY_LIMIT env variable default #490

Merged
merged 2 commits into from
Apr 28, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
171 changes: 97 additions & 74 deletions tool_nodes/dora-rerun/src/main.rs
Original file line number Diff line number Diff line change
@@ -1,16 +1,18 @@
//! Demonstrates the most barebone usage of the Rerun SDK.

use std::env::VarError;

use dora_node_api::{
arrow::array::{Float32Array, StringArray, UInt8Array},
DoraNode, Event,
};
use eyre::{Context, Result};
use eyre::{eyre, Context, Result};
use rerun::{
external::re_types::ArrowBuffer, SpawnOptions, TensorBuffer, TensorData, TensorDimension,
};

fn main() -> Result<()> {
// `serve()` requires to have a running Tokio runtime in the current context.
// rerun `serve()` requires to have a running Tokio runtime in the current context.
let rt = tokio::runtime::Runtime::new().expect("Failed to create tokio runtime");
let _guard = rt.enter();

Expand All @@ -20,88 +22,109 @@ fn main() -> Result<()> {
// Limit memory usage
let mut options = SpawnOptions::default();

let memory_limit = std::env::var("RERUN_MEMORY_LIMIT")
.context("Could not read image height")?
.parse::<String>()
.unwrap_or("25%".into());
let memory_limit = match std::env::var("RERUN_MEMORY_LIMIT") {
Ok(memory_limit) => memory_limit
.parse::<String>()
.context("Could not parse RERUN_MEMORY_LIMIT value")?,
Err(VarError::NotUnicode(_)) => {
return Err(eyre!("RERUN_MEMORY_LIMIT env variable is not unicode"));
}
Err(VarError::NotPresent) => "25%".to_string(),
};

options.memory_limit = memory_limit;

let rec = rerun::RecordingStreamBuilder::new("dora-rerun")
.spawn_opts(&options, None)
.context("Could not spawn rerun visualization")?;

while let Some(event) = events.recv() {
match event {
Event::Input {
id,
data,
metadata: _,
} => {
if id.as_str().contains("image") {
let shape = vec![
TensorDimension {
name: Some("height".into()),
size: std::env::var(format!("{}_HEIGHT", id.as_str().to_uppercase()))
.context("Could not read image height")?
.parse()
.context("Could not parse value of image height env variable")?,
},
TensorDimension {
name: Some("width".into()),
size: std::env::var(format!("{}_WIDTH", id.as_str().to_uppercase()))
.context("Could not read image width")?
.parse()
.context("Could not parse value of image width env variable")?,
},
TensorDimension {
name: Some("depth".into()),
size: std::env::var(format!("{}_DEPTH", id.as_str().to_uppercase()))
.context("Could not read image depth")?
.parse()
.context("Could not parse value of image depth env variable")?,
},
];
if let Event::Input {
id,
data,
metadata: _,
} = event
{
if id.as_str().contains("image") {
let shape = vec![
TensorDimension {
name: Some("height".into()),
size: std::env::var(format!("{}_HEIGHT", id.as_str().to_uppercase()))
.context(format!(
"Could not read {}_HEIGHT env variable for parsing the image",
id.as_str().to_uppercase()
))?
.parse()
.context(format!(
"Could not parse env {}_HEIGHT",
id.as_str().to_uppercase()
))?,
},
TensorDimension {
name: Some("width".into()),
size: std::env::var(format!("{}_WIDTH", id.as_str().to_uppercase()))
.context(format!(
"Could not read {}_WIDTH env variable for parsing the image",
id.as_str().to_uppercase()
))?
.parse()
.context(format!(
"Could not parse env {}_WIDTH",
id.as_str().to_uppercase()
))?,
},
TensorDimension {
name: Some("depth".into()),
size: std::env::var(format!("{}_DEPTH", id.as_str().to_uppercase()))
.context(format!(
"Could not read {}_DEPTH env variable for parsing the image",
id.as_str().to_uppercase()
))?
.parse()
.context(format!(
"Could not parse env {}_DEPTH",
id.as_str().to_uppercase()
))?,
},
];

let buffer: UInt8Array = data.to_data().into();
let buffer: &[u8] = buffer.values();
let buffer = TensorBuffer::U8(ArrowBuffer::from(buffer));
let tensordata = TensorData::new(shape.clone(), buffer.into());
let image = rerun::Image::new(tensordata);
let buffer: UInt8Array = data.to_data().into();
let buffer: &[u8] = buffer.values();
let buffer = TensorBuffer::U8(ArrowBuffer::from(buffer));
let tensordata = TensorData::new(shape.clone(), buffer);
let image = rerun::Image::new(tensordata);

rec.log(id.as_str(), &image)
.context("could not log image")?;
} else if id.as_str().contains("textlog") {
let buffer: StringArray = data.to_data().into();
buffer.iter().try_for_each(|string| -> Result<()> {
if let Some(str) = string {
rec.log(id.as_str(), &rerun::TextLog::new(str))
.wrap_err("Could not log text")
} else {
Ok(())
}
})?;
} else if id.as_str().contains("boxes2d") {
let buffer: Float32Array = data.to_data().into();
let buffer: &[f32] = buffer.values();
let mut centers = vec![];
let mut sizes = vec![];
let mut classes = vec![];
buffer.chunks(6).for_each(|block| {
if let [x, y, w, h, _conf, cls] = block {
centers.push((*x, *y));
sizes.push((*w, *h));
classes.push(*cls as u16);
}
});
rec.log(
id.as_str(),
&rerun::Boxes2D::from_centers_and_sizes(centers, sizes)
.with_class_ids(classes),
)
.wrap_err("Could not log Boxes2D")?;
}
rec.log(id.as_str(), &image)
.context("could not log image")?;
} else if id.as_str().contains("textlog") {
let buffer: StringArray = data.to_data().into();
buffer.iter().try_for_each(|string| -> Result<()> {
if let Some(str) = string {
rec.log(id.as_str(), &rerun::TextLog::new(str))
.wrap_err("Could not log text")
} else {
Ok(())
}
})?;
} else if id.as_str().contains("boxes2d") {
let buffer: Float32Array = data.to_data().into();
let buffer: &[f32] = buffer.values();
let mut centers = vec![];
let mut sizes = vec![];
let mut classes = vec![];
buffer.chunks(6).for_each(|block| {
if let [x, y, w, h, _conf, cls] = block {
centers.push((*x, *y));
sizes.push((*w, *h));
classes.push(*cls as u16);
}
});
rec.log(
id.as_str(),
&rerun::Boxes2D::from_centers_and_sizes(centers, sizes).with_class_ids(classes),
)
.wrap_err("Could not log Boxes2D")?;
}
_ => {}
}
}

Expand Down
Loading