Skip to content

Commit

Permalink
Undo more changes
Browse files Browse the repository at this point in the history
  • Loading branch information
jleibs committed Jul 19, 2023
1 parent a6905db commit f834c02
Show file tree
Hide file tree
Showing 6 changed files with 18 additions and 25 deletions.
12 changes: 6 additions & 6 deletions crates/re_arrow_store/src/store_read.rs
Original file line number Diff line number Diff line change
Expand Up @@ -169,9 +169,9 @@ impl DataStore {
/// ) -> anyhow::Result<DataFrame> {
/// let cluster_key = store.cluster_key();
///
/// let components = [cluster_key, primary.clone()];
/// let components = &[cluster_key, primary];
/// let (_, cells) = store
/// .latest_at(&query, ent_path, primary, &components)
/// .latest_at(&query, ent_path, primary, components)
/// .unwrap_or((RowId::ZERO, [(); 2].map(|_| None)));
///
/// let series: Result<Vec<_>, _> = cells
Expand Down Expand Up @@ -372,7 +372,7 @@ impl DataStore {
/// std::iter::once(df_latest.map(|df| (Some(latest_time), df)))
/// // ..but only if it's not an empty dataframe.
/// .filter(|df| df.as_ref().map_or(true, |(_, df)| !df.is_empty()))
/// .chain(store.range(query, ent_path, &components).map(
/// .chain(store.range(query, ent_path, components).map(
/// move |(time, _, cells)| dataframe_from_cells(cells).map(|df| (time, df))
/// ))
/// }
Expand Down Expand Up @@ -404,7 +404,7 @@ impl DataStore {
id = self.query_id.load(Ordering::Relaxed),
query = ?query,
entity = %ent_path,
components = ?components.clone(),
?components,
"query started..."
);

Expand Down Expand Up @@ -535,7 +535,7 @@ impl IndexedTable {
timeline.typ().format_range(bucket.inner.read().time_range),
timeline = %timeline.name(),
?time_range,
components = ?components.clone(),
?components,
"found bucket in range"
);

Expand Down Expand Up @@ -1084,7 +1084,7 @@ impl PersistentIndexedTable {
// Early-exit if the table is unaware of any of our components of interest.
if components
.iter()
.all(|component| self.columns.get(component.as_ref()).is_none())
.all(|component| self.columns.get(component).is_none())
{
return itertools::Either::Right(std::iter::empty());
}
Expand Down
8 changes: 4 additions & 4 deletions crates/re_arrow_store/src/store_write.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use arrow2::datatypes::DataType;
use itertools::Itertools as _;
use nohash_hasher::IntMap;
use nohash_hasher::{IntMap, IntSet};
use parking_lot::RwLock;
use smallvec::SmallVec;

Expand Down Expand Up @@ -279,7 +279,7 @@ impl IndexedTable {
) {
re_tracing::profile_function!();

let components: ahash::HashSet<_> = row.component_names().collect();
let components: IntSet<_> = row.component_names().collect();

// borrowck workaround
let timeline = self.timeline;
Expand Down Expand Up @@ -413,7 +413,7 @@ impl IndexedBucket {
time: TimeInt,
generated_cluster_cell: Option<DataCell>,
row: &DataRow,
components: &ahash::HashSet<ComponentName>,
components: &IntSet<ComponentName>,
) -> u64 {
re_tracing::profile_function!();

Expand Down Expand Up @@ -806,7 +806,7 @@ impl PersistentIndexedTable {
columns,
} = self;

let components: ahash::HashSet<_> = row.component_names().collect();
let components: IntSet<_> = row.component_names().collect();

// --- update all control columns ---

Expand Down
12 changes: 6 additions & 6 deletions crates/re_arrow_store/tests/data_store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -323,7 +323,7 @@ fn latest_at_impl(store: &mut DataStore) {
)
.unwrap();

let df_expected = joint_df(&store.cluster_key(), rows);
let df_expected = joint_df(store.cluster_key(), rows);

store.sort_indices_if_needed();
assert_eq!(df_expected, df, "{store}");
Expand Down Expand Up @@ -456,9 +456,9 @@ fn range_impl(store: &mut DataStore) {
for (time, rows) in rows_at_times {
if let Some(time) = time {
let dfs = expected_at_times.entry(*time).or_default();
dfs.push(joint_df(&store.cluster_key(), rows));
dfs.push(joint_df(store.cluster_key(), rows));
} else {
expected_timeless.push(joint_df(&store.cluster_key(), rows));
expected_timeless.push(joint_df(store.cluster_key(), rows));
}
}

Expand Down Expand Up @@ -797,11 +797,11 @@ fn range_impl(store: &mut DataStore) {
// --- Common helpers ---

/// Given a list of rows, crafts a `latest_components`-looking dataframe.
fn joint_df(cluster_key: &ComponentName, rows: &[(ComponentName, &DataRow)]) -> DataFrame {
fn joint_df(cluster_key: ComponentName, rows: &[(ComponentName, &DataRow)]) -> DataFrame {
let df = rows
.iter()
.map(|(component, row)| {
let cluster_comp = if let Some(idx) = row.find_cell(cluster_key) {
let cluster_comp = if let Some(idx) = row.find_cell(&cluster_key) {
Series::try_from((cluster_key.as_ref(), row.cells[idx].to_arrow_monolist()))
.unwrap()
} else {
Expand Down Expand Up @@ -830,7 +830,7 @@ fn joint_df(cluster_key: &ComponentName, rows: &[(ComponentName, &DataRow)]) ->
})
.unwrap_or_default();

let df = polars_util::drop_all_nulls(&df, cluster_key).unwrap();
let df = polars_util::drop_all_nulls(&df, &cluster_key).unwrap();

df.sort([cluster_key.as_ref()], false).unwrap_or(df)
}
Expand Down
2 changes: 1 addition & 1 deletion crates/re_arrow_store/tests/dump.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ use re_components::datagen::{
build_frame_nr, build_log_time, build_some_colors, build_some_instances, build_some_point2d,
};
use re_log_types::{DataTable, EntityPath, InstanceKey, TableId};
use re_types::Loggable as U;
use re_types::Loggable as _;

// --- Dump ---

Expand Down
7 changes: 0 additions & 7 deletions crates/re_log_types/src/data_cell.rs
Original file line number Diff line number Diff line change
Expand Up @@ -217,9 +217,6 @@ impl DataCell {
C: Component + Clone + 'a,
C: Into<::std::borrow::Cow<'a, C>>,
{
//let values: Vec<C> = values.into_iter().map(Into::into).collect();
//Self::from_native(values)
//let values: Vec<C> = values.into_iter().map(Into::into).collect();
Self::from_native(values.into_iter().map(Into::into))
}

Expand All @@ -235,10 +232,6 @@ impl DataCell {
C: Component + Clone + 'a,
C: Into<::std::borrow::Cow<'a, C>>,
{
//let values: Vec<_> = values
// .into_iter()
// .map(|value| value.map(Into::into))
// .collect();
Self::from_native_sparse(values.into_iter().map(|value| value.map(Into::into)))
}

Expand Down
2 changes: 1 addition & 1 deletion crates/re_query/src/query.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ use crate::{ArchetypeView, ComponentWithInstances, EntityView, QueryError};
/// &store,
/// &query,
/// &ent_path.into(),
/// &Point2D::name(),
/// Point2D::name(),
/// )
/// .unwrap();
///
Expand Down

0 comments on commit f834c02

Please sign in to comment.