Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix Build: Use default() to init custom hash based datastructures #1871

Merged
merged 2 commits into from
Oct 17, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
420 changes: 190 additions & 230 deletions Cargo.lock

Large diffs are not rendered by default.

4 changes: 2 additions & 2 deletions crates/client-api/src/routes/prometheus.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use crate::{log_and_500, ControlStateReadAccess};
use axum::extract::State;
use axum::response::IntoResponse;
use serde::{Deserialize, Serialize};
use spacetimedb_data_structures::map::{HashCollectionExt, HashMap};
use spacetimedb_data_structures::map::HashMap;

#[derive(Serialize, Deserialize)]
struct SDConfig {
Expand All @@ -17,7 +17,7 @@ pub async fn get_sd_config<S: ControlStateReadAccess>(
let nodes = ctx.get_nodes().map_err(log_and_500)?;

let mut targets = Vec::new();
let labels = HashMap::new();
let labels = HashMap::default();

for node in nodes {
if let Some(addr) = node.advertise_addr {
Expand Down
14 changes: 7 additions & 7 deletions crates/core/src/db/relational_operators.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use core::marker::PhantomData;
use spacetimedb_data_structures::map::{HashCollectionExt, HashSet};
use spacetimedb_data_structures::map::HashSet;
use spacetimedb_sats::ProductValue;

// NOTE
Expand Down Expand Up @@ -143,8 +143,8 @@ impl<'a, S: Relation, U: Relation> IntoIterator for Union<'a, S, U> {
type IntoIter = std::vec::IntoIter<ProductValue>;

fn into_iter(self) -> Self::IntoIter {
let mut set_s: HashSet<ProductValue> = HashSet::new();
let mut set_u: HashSet<ProductValue> = HashSet::new();
let mut set_s: HashSet<ProductValue> = HashSet::default();
let mut set_u: HashSet<ProductValue> = HashSet::default();
for next in self.s {
set_s.insert(next);
}
Expand All @@ -166,8 +166,8 @@ impl<'a, S: Relation, U: Relation> IntoIterator for Intersection<'a, S, U> {
type IntoIter = std::vec::IntoIter<ProductValue>;

fn into_iter(self) -> Self::IntoIter {
let mut set_s: HashSet<ProductValue> = HashSet::new();
let mut set_u: HashSet<ProductValue> = HashSet::new();
let mut set_s: HashSet<ProductValue> = HashSet::default();
let mut set_u: HashSet<ProductValue> = HashSet::default();
for next in self.s {
set_s.insert(next);
}
Expand All @@ -192,8 +192,8 @@ impl<'a, S: Relation, U: Relation> IntoIterator for Difference<'a, S, U> {
type IntoIter = std::vec::IntoIter<ProductValue>;

fn into_iter(self) -> Self::IntoIter {
let mut set_s: HashSet<ProductValue> = HashSet::new();
let mut set_u: HashSet<ProductValue> = HashSet::new();
let mut set_s: HashSet<ProductValue> = HashSet::default();
let mut set_u: HashSet<ProductValue> = HashSet::default();
for next in self.s {
set_s.insert(next);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ impl SubscriptionManager {
// Collect the delta tables for each query.
// For selects this is just a single table.
// For joins it's two tables.
let mut units: HashMap<_, ArrayVec<_, 2>> = HashMap::new();
let mut units: HashMap<_, ArrayVec<_, 2>> = HashMap::default();
for table @ DatabaseTableUpdate { table_id, .. } in tables {
if let Some(hashes) = self.tables.get(table_id) {
for hash in hashes {
Expand Down
4 changes: 2 additions & 2 deletions crates/core/src/subscription/subscription.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ use anyhow::Context;
use itertools::Either;
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use spacetimedb_client_api_messages::websocket::{Compression, WebsocketFormat};
use spacetimedb_data_structures::map::{HashCollectionExt, HashSet};
use spacetimedb_data_structures::map::HashSet;
use spacetimedb_lib::db::auth::{StAccess, StTableType};
use spacetimedb_lib::db::error::AuthError;
use spacetimedb_lib::identity::AuthCtx;
Expand Down Expand Up @@ -403,7 +403,7 @@ impl IncrementalJoin {
if produce_if {
producer().collect()
} else {
HashSet::new()
HashSet::default()
}
}

Expand Down
4 changes: 2 additions & 2 deletions crates/lib/src/relation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use crate::db::error::{RelationError, TypeError};
use core::fmt;
use core::hash::Hash;
use derive_more::From;
use spacetimedb_data_structures::map::{HashCollectionExt, HashSet};
use spacetimedb_data_structures::map::HashSet;
use spacetimedb_primitives::{ColId, ColList, ColSet, Constraints, TableId};
use spacetimedb_sats::algebraic_value::AlgebraicValue;
use spacetimedb_sats::satn::Satn;
Expand Down Expand Up @@ -237,7 +237,7 @@ pub fn combine_constraints(
*slot = slot.push(constraint);
}

let mut uniques: HashSet<ColSet> = HashSet::new();
let mut uniques: HashSet<ColSet> = HashSet::default();
for (col_list, constraint) in &constraints {
if constraint.has_unique() {
uniques.insert(col_list.into());
Expand Down
14 changes: 8 additions & 6 deletions crates/schema/src/type_for_generate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ use petgraph::{
use smallvec::SmallVec;
use spacetimedb_data_structures::{
error_stream::{CollectAllErrors, CombineErrors, ErrorStream},
map::{HashCollectionExt, HashMap, HashSet},
map::{HashMap, HashSet},
};
use spacetimedb_lib::{AlgebraicType, ProductTypeElement};
use spacetimedb_sats::{typespace::TypeRefError, AlgebraicTypeRef, ArrayType, SumTypeVariant, Typespace};
Expand Down Expand Up @@ -110,11 +110,13 @@ impl TypespaceForGenerate {
) -> TypespaceForGenerateBuilder<'_> {
TypespaceForGenerateBuilder {
typespace,
result: TypespaceForGenerate { defs: HashMap::new() },
result: TypespaceForGenerate {
defs: HashMap::default(),
},
is_def: is_def.into_iter().collect(),
uses: HashSet::new(),
known_uses: HashMap::new(),
currently_touching: HashSet::new(),
uses: HashSet::default(),
known_uses: HashMap::default(),
currently_touching: HashSet::default(),
}
}

Expand Down Expand Up @@ -157,7 +159,7 @@ pub enum AlgebraicTypeDef {

thread_local! {
/// Used to efficiently extract refs from a def.
static EXTRACT_REFS_BUF: RefCell<HashSet<AlgebraicTypeRef>> = RefCell::new(HashSet::new());
static EXTRACT_REFS_BUF: RefCell<HashSet<AlgebraicTypeRef>> = RefCell::new(HashSet::default());
}

impl AlgebraicTypeDef {
Expand Down
5 changes: 3 additions & 2 deletions crates/sdk/src/spacetime_module.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use crate::{
};
use anyhow::Context;
use bytes::Bytes;
use spacetimedb_data_structures::map::{HashCollectionExt, HashMap};
use spacetimedb_data_structures::map::{DefaultHashBuilder, HashCollectionExt, HashMap};
use spacetimedb_lib::{bsatn, de::DeserializeOwned};
use std::{any::Any, fmt::Debug, hash::Hash};

Expand Down Expand Up @@ -196,7 +196,8 @@ impl<Row: DeserializeOwned + Debug> TableUpdate<Row> {
};

// Pre-allocate plenty of space to minimize hash collisions.
let mut diff: HashMap<Pk, DiffEntry<Row>> = HashMap::with_capacity(raw_updates.num_rows() * 2);
let mut diff: HashMap<Pk, DiffEntry<Row>> =
HashMap::<_, _, DefaultHashBuilder>::with_capacity(raw_updates.num_rows() * 2);

// Traverse the `table_update` to construct a diff, merging duplicated `Insert`
// and `Delete` into `Update`.
Expand Down
4 changes: 2 additions & 2 deletions crates/table/src/btree_index.rs
Original file line number Diff line number Diff line change
Expand Up @@ -479,7 +479,7 @@ mod test {
use core::ops::Bound::*;
use proptest::prelude::*;
use proptest::{collection::vec, test_runner::TestCaseResult};
use spacetimedb_data_structures::map::{HashCollectionExt, HashMap};
use spacetimedb_data_structures::map::HashMap;
use spacetimedb_primitives::ColId;
use spacetimedb_sats::{
product,
Expand Down Expand Up @@ -589,7 +589,7 @@ mod test {
let next = needle + 1;
let range = prev..=next;

let mut val_to_ptr = HashMap::new();
let mut val_to_ptr = HashMap::default();

// Insert `prev`, `needle`, and `next`.
for x in range.clone() {
Expand Down
4 changes: 2 additions & 2 deletions crates/table/src/table.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ use core::hash::{Hash, Hasher};
use core::ops::RangeBounds;
use core::{fmt, ptr};
use derive_more::{Add, AddAssign, From, Sub};
use spacetimedb_data_structures::map::{HashCollectionExt, HashMap};
use spacetimedb_data_structures::map::{DefaultHashBuilder, HashCollectionExt, HashMap};
use spacetimedb_lib::{bsatn::DecodeError, de::DeserializeOwned};
use spacetimedb_primitives::{ColId, ColList, IndexId};
use spacetimedb_sats::{
Expand Down Expand Up @@ -1102,7 +1102,7 @@ impl Table {
pages: Pages::default(),
},
schema,
indexes: HashMap::with_capacity(indexes_capacity),
indexes: HashMap::<_, _, DefaultHashBuilder>::with_capacity(indexes_capacity),
Shubham8287 marked this conversation as resolved.
Show resolved Hide resolved
pointer_map: PointerMap::default(),
squashed_offset,
row_count: 0,
Expand Down
4 changes: 2 additions & 2 deletions crates/testing/src/sdk.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use duct::cmd;
use lazy_static::lazy_static;
use rand::distributions::{Alphanumeric, DistString};
use spacetimedb_data_structures::map::{HashCollectionExt, HashMap};
use spacetimedb_data_structures::map::HashMap;
use std::fs::create_dir_all;
use std::sync::Mutex;
use std::thread::JoinHandle;
Expand Down Expand Up @@ -119,7 +119,7 @@ macro_rules! memoized {
MEMOIZED
.lock()
.unwrap()
.get_or_insert_with(HashMap::new)
.get_or_insert_with(HashMap::default)
.entry($key)
.or_insert_with_key(|$key| -> $value_ty { $body })
.clone()
Expand Down
6 changes: 3 additions & 3 deletions crates/vm/src/expr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use core::slice::from_ref;
use derive_more::From;
use itertools::Itertools;
use smallvec::SmallVec;
use spacetimedb_data_structures::map::{HashCollectionExt, HashSet, IntMap};
use spacetimedb_data_structures::map::{HashSet, IntMap};
use spacetimedb_lib::db::auth::{StAccess, StTableType};
use spacetimedb_lib::db::error::{AuthError, RelationError};
use spacetimedb_lib::relation::{ColExpr, DbTable, FieldName, Header};
Expand Down Expand Up @@ -1058,7 +1058,7 @@ fn select_best_index<'a>(
.collect::<SmallVec<[_; 1]>>();
indices.sort_unstable_by_key(|cl| Reverse(cl.len()));

let mut found: IndexColumnOpSink = IndexColumnOpSink::new();
let mut found: IndexColumnOpSink = IndexColumnOpSink::default();

// Collect fields into a multi-map `(col_id, cmp) -> [col value]`.
// This gives us `log(N)` seek + deletion.
Expand Down Expand Up @@ -1868,7 +1868,7 @@ impl QueryExpr {
fn optimize_select(mut q: QueryExpr, op: ColumnOp, tables: &[SourceExpr]) -> QueryExpr {
// Go through each table schema referenced in the query.
// Find the first sargable condition and short-circuit.
let mut fields_found = HashSet::new();
let mut fields_found = HashSet::default();
for schema in tables {
for op in select_best_index(&mut fields_found, schema.head(), &op) {
if let IndexColumnOp::Scan(op) = &op {
Expand Down
6 changes: 3 additions & 3 deletions crates/vm/src/rel_ops.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use core::iter;

use crate::relation::RelValue;
use spacetimedb_data_structures::map::{HashCollectionExt, HashMap};
use spacetimedb_data_structures::map::HashMap;
use spacetimedb_lib::relation::ColExpr;
use spacetimedb_sats::AlgebraicValue;

Expand Down Expand Up @@ -181,7 +181,7 @@ pub struct JoinInner<'a, Lhs, Rhs, KeyLhs, KeyRhs, Pred, Proj> {
impl<'a, Lhs, Rhs, KeyLhs, KeyRhs, Pred, Proj> JoinInner<'a, Lhs, Rhs, KeyLhs, KeyRhs, Pred, Proj> {
pub fn new(lhs: Lhs, rhs: Rhs, key_lhs: KeyLhs, key_rhs: KeyRhs, predicate: Pred, projection: Proj) -> Self {
Self {
map: HashMap::new(),
map: HashMap::default(),
lhs,
rhs,
key_lhs,
Expand All @@ -206,7 +206,7 @@ where
fn next(&mut self) -> Option<RelValue<'a>> {
// Consume `Rhs`, building a map `KeyRhs => Rhs`.
if !self.filled_rhs {
self.map = HashMap::new();
self.map = HashMap::default();
while let Some(row_rhs) = self.rhs.next() {
let key_rhs = (self.key_rhs)(&row_rhs);
self.map.entry(key_rhs).or_default().push(row_rhs);
Expand Down
Loading