Skip to content

Commit

Permalink
use re_tracing everywhere
Browse files Browse the repository at this point in the history
  • Loading branch information
teh-cmc committed May 31, 2023
1 parent ac7bf8d commit 93b7031
Show file tree
Hide file tree
Showing 124 changed files with 362 additions and 596 deletions.
21 changes: 11 additions & 10 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 1 addition & 4 deletions crates/re_arrow_store/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ core_benchmarks_only = []
re_format.workspace = true
re_log_types.workspace = true
re_log.workspace = true
re_tracing.workspace = true

# External dependencies:
ahash.workspace = true
Expand All @@ -48,10 +49,6 @@ parking_lot.workspace = true
smallvec.workspace = true
thiserror.workspace = true

# Native dependencies:
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
puffin.workspace = true

# Optional dependencies:
polars-core = { workspace = true, optional = true, features = [
"diagonal_concat",
Expand Down
22 changes: 0 additions & 22 deletions crates/re_arrow_store/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,25 +56,3 @@ pub use re_log_types::{TimeInt, TimeRange, TimeType, Timeline}; // for politenes
pub mod external {
pub use arrow2;
}

// ---

/// Native-only profiling macro for puffin.
#[doc(hidden)]
#[macro_export]
macro_rules! profile_function {
($($arg: tt)*) => {
#[cfg(not(target_arch = "wasm32"))]
puffin::profile_function!($($arg)*);
};
}

/// Native-only profiling macro for puffin.
#[doc(hidden)]
#[macro_export]
macro_rules! profile_scope {
($($arg: tt)*) => {
#[cfg(not(target_arch = "wasm32"))]
puffin::profile_scope!($($arg)*);
};
}
2 changes: 1 addition & 1 deletion crates/re_arrow_store/src/store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -286,7 +286,7 @@ impl DataStore {
///
/// Useful to call after a gc.
pub fn oldest_time_per_timeline(&self) -> BTreeMap<Timeline, TimeInt> {
crate::profile_function!();
re_tracing::profile_function!();

let mut oldest_time_per_timeline = BTreeMap::default();

Expand Down
10 changes: 5 additions & 5 deletions crates/re_arrow_store/src/store_arrow.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ impl IndexedBucket {
/// - `$cluster_key`
/// - rest of component columns in ascending lexical order
pub fn serialize(&self) -> DataTableResult<(Schema, Chunk<Box<dyn Array>>)> {
crate::profile_function!();
re_tracing::profile_function!();

let Self {
timeline,
Expand Down Expand Up @@ -63,7 +63,7 @@ impl PersistentIndexedTable {
/// - `$cluster_key`
/// - rest of component columns in ascending lexical order
pub fn serialize(&self) -> DataTableResult<(Schema, Chunk<Box<dyn Array>>)> {
crate::profile_function!();
re_tracing::profile_function!();

let Self {
ent_path: _,
Expand Down Expand Up @@ -95,7 +95,7 @@ fn serialize(
col_num_instances: &[u32],
table: &IntMap<ComponentName, DataCellColumn>,
) -> DataTableResult<(Schema, Chunk<Box<dyn Array>>)> {
crate::profile_function!();
re_tracing::profile_function!();

let mut schema = Schema::default();
let mut columns = Vec::new();
Expand Down Expand Up @@ -129,7 +129,7 @@ fn serialize_control_columns(
col_row_id: &[RowId],
col_num_instances: &[u32],
) -> DataTableResult<(Schema, Vec<Box<dyn Array>>)> {
crate::profile_function!();
re_tracing::profile_function!();

let mut schema = Schema::default();
let mut columns = Vec::new();
Expand Down Expand Up @@ -175,7 +175,7 @@ fn serialize_data_columns(
cluster_key: &ComponentName,
table: &IntMap<ComponentName, DataCellColumn>,
) -> DataTableResult<(Schema, Vec<Box<dyn Array>>)> {
crate::profile_function!();
re_tracing::profile_function!();

let mut schema = Schema::default();
let mut columns = Vec::new();
Expand Down
10 changes: 5 additions & 5 deletions crates/re_arrow_store/src/store_dump.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ impl DataStore {

fn dump_timeless_tables(&self) -> impl Iterator<Item = DataTable> + '_ {
self.timeless_tables.values().map(|table| {
crate::profile_scope!("timeless_table");
re_tracing::profile_scope!("timeless_table");

let PersistentIndexedTable {
ent_path,
Expand All @@ -58,10 +58,10 @@ impl DataStore {

fn dump_temporal_tables(&self) -> impl Iterator<Item = DataTable> + '_ {
self.tables.values().flat_map(|table| {
crate::profile_scope!("temporal_table");
re_tracing::profile_scope!("temporal_table");

table.buckets.values().map(move |bucket| {
crate::profile_scope!("temporal_bucket");
re_tracing::profile_scope!("temporal_bucket");

bucket.sort_indices_if_needed();

Expand Down Expand Up @@ -105,14 +105,14 @@ impl DataStore {
self.tables
.values()
.filter_map(move |table| {
crate::profile_scope!("temporal_table_filtered");
re_tracing::profile_scope!("temporal_table_filtered");

if table.timeline != timeline_filter {
return None;
}

Some(table.buckets.values().filter_map(move |bucket| {
crate::profile_scope!("temporal_bucket_filtered");
re_tracing::profile_scope!("temporal_bucket_filtered");

bucket.sort_indices_if_needed();

Expand Down
8 changes: 4 additions & 4 deletions crates/re_arrow_store/src/store_gc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ impl DataStore {
//
// TODO(#1823): Workload specific optimizations.
pub fn gc(&mut self, target: GarbageCollectionTarget) -> (Vec<RowId>, DataStoreStats) {
crate::profile_function!();
re_tracing::profile_function!();

self.gc_id += 1;

Expand Down Expand Up @@ -126,7 +126,7 @@ impl DataStore {
///
/// Returns the list of `RowId`s that were purged from the store.
fn gc_drop_at_least_num_bytes(&mut self, mut num_bytes_to_drop: f64) -> Vec<RowId> {
crate::profile_function!();
re_tracing::profile_function!();

let mut row_ids = Vec::new();

Expand Down Expand Up @@ -165,7 +165,7 @@ impl IndexedTable {
///
/// Returns how many bytes were actually dropped, or zero if the row wasn't found.
fn try_drop_row(&mut self, row_id: RowId, time: i64) -> u64 {
crate::profile_function!();
re_tracing::profile_function!();

let table_has_more_than_one_bucket = self.buckets.len() > 1;

Expand Down Expand Up @@ -211,7 +211,7 @@ impl IndexedBucketInner {
///
/// Returns how many bytes were actually dropped, or zero if the row wasn't found.
fn try_drop_row(&mut self, row_id: RowId, time: i64) -> u64 {
crate::profile_function!();
re_tracing::profile_function!();

self.sort();

Expand Down
10 changes: 5 additions & 5 deletions crates/re_arrow_store/src/store_helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ impl DataStore {
where
for<'b> &'b C::ArrayType: IntoIterator,
{
crate::profile_function!();
re_tracing::profile_function!();

let (_, cells) = self.latest_at(query, entity_path, C::name(), &[C::name()])?;
let cell = cells.get(0)?.as_ref()?;
Expand Down Expand Up @@ -57,7 +57,7 @@ impl DataStore {
where
for<'b> &'b C::ArrayType: IntoIterator,
{
crate::profile_function!();
re_tracing::profile_function!();

let mut cur_path = Some(entity_path.clone());
while let Some(path) = cur_path {
Expand All @@ -84,7 +84,7 @@ impl DataStore {
where
for<'b> &'b C::ArrayType: IntoIterator,
{
crate::profile_function!();
re_tracing::profile_function!();

let query = LatestAtQuery::latest(Timeline::default());
self.query_latest_component(entity_path, &query)
Expand All @@ -103,7 +103,7 @@ impl DataStore {
timepoint: &TimePoint,
component: C,
) {
crate::profile_function!();
re_tracing::profile_function!();

let mut row = match DataRow::try_from_cells1(
RowId::random(),
Expand Down Expand Up @@ -140,7 +140,7 @@ impl DataStore {
timepoint: &TimePoint,
component: ComponentName,
) {
crate::profile_function!();
re_tracing::profile_function!();

if let Some(datatype) = self.lookup_datatype(&component) {
let cell = DataCell::from_arrow_empty(component, datatype.clone());
Expand Down
Loading

0 comments on commit 93b7031

Please sign in to comment.