Skip to content

Commit

Permalink
Schema updates WIP
Browse files Browse the repository at this point in the history
Get  compiling, fix birdbrained API

.

WIP
  • Loading branch information
kazimuth committed Sep 11, 2024
1 parent f3e3d92 commit 0b0849b
Show file tree
Hide file tree
Showing 18 changed files with 762 additions and 722 deletions.
622 changes: 360 additions & 262 deletions crates/core/src/db/datastore/system_tables.rs

Large diffs are not rendered by default.

7 changes: 5 additions & 2 deletions crates/core/src/db/relational_db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -520,7 +520,7 @@ impl RelationalDB {
table_id: TableId,
) -> Result<bool, DBError> {
tx.schema_for_table(ctx, table_id)
.map(|schema| schema.scheduled.is_some())
.map(|schema| schema.schedule.is_some())
}

pub fn decode_column(
Expand Down Expand Up @@ -944,6 +944,8 @@ impl RelationalDB {
tx.table_row_count(table_id)
}

/// Returns the constraints on the input `ColList`.
/// Note that this is ORDER-SENSITIVE: the order of the columns in the input `ColList` matters.
pub fn column_constraints(
&self,
tx: &mut MutTx,
Expand All @@ -952,7 +954,8 @@ impl RelationalDB {
) -> Result<Constraints, DBError> {
let table = self.inner.schema_for_table_mut_tx(tx, table_id)?;

let unique_index = table.indexes.iter().find(|x| &x.columns == cols).map(|x| x.is_unique);
let index = unimplemented!();
let unique_constraint = table.constraints.iter().find(|c| c.unique_columns() == Some(cols));
let mut attr = Constraints::unset();
if let Some(is_unique) = unique_index {
attr = attr.push(Constraints::from_is_unique(is_unique));
Expand Down
4 changes: 3 additions & 1 deletion crates/core/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use hex::FromHexError;
use spacetimedb_sats::AlgebraicType;
use spacetimedb_snapshot::SnapshotError;
use spacetimedb_table::read_column;
use spacetimedb_table::table::{self, UniqueConstraintViolation};
use spacetimedb_table::table::{self, ReadViaBsatnError, UniqueConstraintViolation};
use thiserror::Error;

use crate::client::ClientActorId;
Expand Down Expand Up @@ -208,6 +208,8 @@ pub enum DBError {
#[error(transparent)]
// Box the inner [`SnapshotError`] to keep Clippy quiet about large `Err` variants.
Snapshot(#[from] Box<SnapshotError>),
#[error("Error reading a value from a table through BSATN: {0}")]
ReadViaBsatnError(#[from] ReadViaBsatnError),
#[error(transparent)]
Other(#[from] anyhow::Error),
}
Expand Down
1 change: 0 additions & 1 deletion crates/core/src/host/module_host.rs
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,6 @@ impl EventStatus {
pub struct ModuleFunctionCall {
pub reducer: String,
pub reducer_id: ReducerId,
pub args: ArgsTuple,
}

#[derive(Debug, Clone)]
Expand Down
11 changes: 4 additions & 7 deletions crates/core/src/sql/compiler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -135,18 +135,15 @@ fn compile_columns(table: &TableSchema, cols: &[ColId]) -> DbTable {
let mut columns = Vec::with_capacity(cols.len());
let cols = cols
.iter()
// TODO: should we error here instead?
// When would the user be passing in columns that aren't present?
.filter_map(|col| table.get_column(col.idx()))
.map(|col| relation::Column::new(FieldName::new(table.table_id, col.col_pos), col.col_type.clone()));
columns.extend(cols);

let header = Arc::new(Header::new(
table.table_id,
table.table_name.clone(),
columns,
table.get_constraints(),
));
let header = Header::from(table).project_col_list(&columns.iter().map(|x| x.field.col).collect());

DbTable::new(header, table.table_id, table.table_type, table.table_access)
DbTable::new(Arc::new(header), table.table_id, table.table_type, table.table_access)
}

/// Compiles a `INSERT ...` clause
Expand Down
9 changes: 7 additions & 2 deletions crates/lib/src/db/raw_def/v9.rs
Original file line number Diff line number Diff line change
Expand Up @@ -247,6 +247,7 @@ pub struct RawIndexDefV9 {
}

/// Data specifying an index algorithm.
/// New fields MUST be added to the END of this enum, to maintain ABI compatibility.
#[non_exhaustive]
#[derive(Debug, Clone, SpacetimeType)]
#[sats(crate = crate)]
Expand Down Expand Up @@ -308,6 +309,8 @@ pub struct RawConstraintDefV9 {
pub data: RawConstraintDataV9,
}

/// Raw data attached to a constraint.
/// New fields MUST be added to the END of this enum, to maintain ABI compatibility.
#[derive(Debug, Clone, SpacetimeType)]
#[sats(crate = crate)]
#[cfg_attr(feature = "test", derive(PartialEq, Eq, PartialOrd, Ord))]
Expand Down Expand Up @@ -613,7 +616,8 @@ impl<'a> RawTableDefBuilder<'a> {
}

/// Generates a [UniqueConstraintDef] using the supplied `columns`.
pub fn with_unique_constraint(mut self, columns: ColList, name: Option<RawIdentifier>) -> Self {
pub fn with_unique_constraint(mut self, columns: impl Into<ColList>, name: Option<RawIdentifier>) -> Self {
let columns = columns.into();
let name = name.unwrap_or_else(|| self.generate_unique_constraint_name(&columns));
self.table.constraints.push(RawConstraintDefV9 {
name,
Expand All @@ -633,10 +637,11 @@ impl<'a> RawTableDefBuilder<'a> {
pub fn with_index(
mut self,
algorithm: RawIndexAlgorithm,
accessor_name: RawIdentifier,
accessor_name: impl Into<RawIdentifier>,
name: Option<RawIdentifier>,
) -> Self {
let name = name.unwrap_or_else(|| self.generate_index_name(&algorithm));
let accessor_name = accessor_name.into();

self.table.indexes.push(RawIndexDefV9 {
name,
Expand Down
42 changes: 34 additions & 8 deletions crates/lib/src/relation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ use spacetimedb_primitives::{ColId, ColList, Constraints, TableId};
use spacetimedb_sats::algebraic_value::AlgebraicValue;
use spacetimedb_sats::satn::Satn;
use spacetimedb_sats::{algebraic_type, AlgebraicType};
use std::collections::BTreeMap;
use std::sync::Arc;

#[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Hash)]
Expand Down Expand Up @@ -92,16 +93,26 @@ pub struct Header {
pub table_id: TableId,
pub table_name: Box<str>,
pub fields: Vec<Column>,
pub constraints: Vec<(ColList, Constraints)>,
pub constraints: BTreeMap<ColList, Constraints>,
}

impl Header {
/// Create a new header.
/// Note that equal ColLists with different Constraints will have their constraints unioned.
pub fn new(
table_id: TableId,
table_name: Box<str>,
fields: Vec<Column>,
constraints: Vec<(ColList, Constraints)>,
uncompressed_constraints: impl IntoIterator<Item = (ColList, Constraints)>,
) -> Self {
let mut constraints = BTreeMap::new();
for (col_list, constraint) in uncompressed_constraints {
constraints
.entry(col_list)
.or_insert(Constraints::unset())
.push(constraint);
}

Self {
table_id,
table_name,
Expand Down Expand Up @@ -140,13 +151,13 @@ impl Header {
}

/// Copy the [Constraints] that are referenced in the list of `for_columns`
fn retain_constraints(&self, for_columns: &ColList) -> Vec<(ColList, Constraints)> {
fn retain_constraints(&self, for_columns: &ColList) -> BTreeMap<ColList, Constraints> {
// Copy the constraints of the selected columns and retain the multi-column ones...
self.constraints
.iter()
// Keep constraints with a col list where at least one col is in `for_columns`.
.filter(|(cols, _)| cols.iter().any(|c| for_columns.contains(c)))
.cloned()
.map(|(cols, constraints)| (cols.clone(), constraints.clone()))
.collect()
}

Expand All @@ -158,28 +169,43 @@ impl Header {

/// Project the [ColExpr]s & the [Constraints] that referenced them
pub fn project(&self, cols: &[ColExpr]) -> Result<Self, RelationError> {
let mut p = Vec::with_capacity(cols.len());
let mut fields = Vec::with_capacity(cols.len());
let mut to_keep = ColList::with_capacity(cols.len() as _);

for (pos, col) in cols.iter().enumerate() {
match col {
ColExpr::Col(col) => {
to_keep.push(*col);
p.push(self.fields[col.idx()].clone());
fields.push(self.fields[col.idx()].clone());
}
ColExpr::Value(val) => {
// TODO: why should this field name be relevant?
// We should generate immediate names instead.
let field = FieldName::new(self.table_id, pos.into());
let ty = val.type_of().ok_or_else(|| {
RelationError::TypeInference(field, TypeError::CannotInferType { value: val.clone() })
})?;
p.push(Column::new(field, ty));
fields.push(Column::new(field, ty));
}
}
}

let constraints = self.retain_constraints(&to_keep);

Ok(Self::new(self.table_id, self.table_name.clone(), p, constraints))
Ok(Self::new(self.table_id, self.table_name.clone(), fields, constraints))
}

/// Project the ourself onto the `ColList`, keeping constraints that reference the columns in the ColList.
/// Does not change `ColIDs`.
pub fn project_col_list(&self, cols: &ColList) -> Self {
let mut fields = Vec::with_capacity(cols.len() as usize);

for col in cols.iter() {
fields.push(self.fields[col.idx()].clone());
}

let constraints = self.retain_constraints(cols);
Self::new(self.table_id, self.table_name.clone(), fields, constraints)
}

/// Adds the fields & [Constraints] from `right` to this [`Header`],
Expand Down
5 changes: 5 additions & 0 deletions crates/primitives/src/attr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -197,6 +197,11 @@ impl Constraints {
Self::new(ColumnAttribute::PRIMARY_KEY_IDENTITY)
}

/// Creates a new `Constraints` instance with [ColumnAttribute::AUTO_INC] set.
pub const fn auto_inc() -> Self {
Self::new(ColumnAttribute::AUTO_INC)
}

/// Adds a constraint to the existing constraints.
///
/// # Example
Expand Down
1 change: 1 addition & 0 deletions crates/primitives/src/ids.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,3 +53,4 @@ system_id!(SequenceId, u32);
system_id!(IndexId, u32);
system_id!(ConstraintId, u32);
system_id!(ColId, u16);
system_id!(ScheduleId, u32);
2 changes: 1 addition & 1 deletion crates/primitives/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ mod ids;

pub use attr::{AttributeKind, ColumnAttribute, ConstraintKind, Constraints};
pub use col_list::ColList;
pub use ids::{ColId, ConstraintId, IndexId, SequenceId, TableId};
pub use ids::{ColId, ConstraintId, IndexId, ScheduleId, SequenceId, TableId};

/// The minimum size of a chunk yielded by a wasm abi RowIter.
pub const ROW_ITER_CHUNK_SIZE: usize = 32 * 1024;
3 changes: 3 additions & 0 deletions crates/schema/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,9 @@ license-file = "LICENSE"
description = "Schema library for SpacetimeDB"
rust-version.workspace = true

[features]
test = []

[dependencies]
spacetimedb-lib.workspace = true
spacetimedb-primitives.workspace = true
Expand Down
15 changes: 14 additions & 1 deletion crates/schema/src/def.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ use std::hash::Hash;

use crate::error::{IdentifierError, ValidationErrors};
use crate::identifier::Identifier;
use crate::schema::TableSchema;
use crate::type_for_generate::{AlgebraicTypeUse, ProductTypeDef, TypespaceForGenerate};
use hashbrown::Equivalent;
use itertools::Itertools;
Expand All @@ -32,7 +33,7 @@ use spacetimedb_lib::db::raw_def::v9::{
RawTypeDefV9, RawUniqueConstraintDataV9, TableAccess, TableType,
};
use spacetimedb_lib::{ProductType, RawModuleDef};
use spacetimedb_primitives::{ColId, ColList};
use spacetimedb_primitives::{ColId, ColList, TableId};
use spacetimedb_sats::AlgebraicType;
use spacetimedb_sats::{AlgebraicTypeRef, Typespace};

Expand Down Expand Up @@ -166,6 +167,18 @@ impl ModuleDef {
self.tables.get(name)
}

/// Convenience method to look up a table and convert it to a `TableSchema`.
/// All indexes, constraints, etc inside the table will have ID 0!
pub fn table_schema<K: ?Sized + Hash + Equivalent<Identifier>>(
&self,
name: &K,
id: TableId,
) -> Option<TableSchema> {
// If the string IS a valid identifier, we can just look it up.
let table_def = self.tables.get(name)?;
Some(TableSchema::from_module_def(self, table_def, id))
}

/// Lookup a type's name from its `AlgebraicTypeRef`.
pub fn type_name_from_ref(&self, r: AlgebraicTypeRef) -> Option<&RefPointee> {
self.refmap.get(&r)
Expand Down
Loading

0 comments on commit 0b0849b

Please sign in to comment.