diff --git a/Cargo.lock b/Cargo.lock
index 6929b28..7e08104 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -485,7 +485,7 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
name = "avail-core"
-version = "0.6.1"
+version = "0.6.2"
dependencies = [
"avail-core",
"binary-merkle-tree",
@@ -2124,7 +2124,7 @@ dependencies = [
[[package]]
name = "kate"
-version = "0.9.0"
+version = "0.9.1"
dependencies = [
"avail-core",
"criterion",
@@ -2154,7 +2154,7 @@ dependencies = [
[[package]]
name = "kate-recovery"
-version = "0.9.3"
+version = "0.10.0"
dependencies = [
"avail-core",
"derive_more",
diff --git a/core/Cargo.toml b/core/Cargo.toml
index c46cf34..8e11492 100644
--- a/core/Cargo.toml
+++ b/core/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "avail-core"
-version = "0.6.1"
+version = "0.6.2"
authors = []
edition = "2021"
license = "Apache-2.0"
diff --git a/core/src/asdr.rs b/core/src/asdr.rs
index 8c1bc2b..7343e83 100644
--- a/core/src/asdr.rs
+++ b/core/src/asdr.rs
@@ -55,7 +55,7 @@ use sp_std::alloc::format;
/// the decoding fails.
pub const EXTRINSIC_FORMAT_VERSION: u8 = 4;
-/// The `SingaturePayload` of `UncheckedExtrinsic`.
+/// The `SignaturePayload` of `UncheckedExtrinsic`.
type SignaturePayload
= (Address, Signature, Extra);
/// An extrinsic right from the external world. This is unchecked and so can contain a signature.
@@ -735,13 +735,12 @@ mod tests {
use core::mem::transmute;
let op = unsigned_to_opaque();
- let new_op = unsafe {
+ unsafe {
// Using `transmute` because `OpaqueExtrinsic.0` is not public.
let mut raw = transmute::>(op);
raw.pop();
transmute::, OpaqueExtrinsic>(raw)
- };
- new_op
+ }
}
#[test_case( unsigned_to_opaque() => true ; "Unsigned Ex")]
diff --git a/core/src/constants.rs b/core/src/constants.rs
index 640c88d..d1a11af 100644
--- a/core/src/constants.rs
+++ b/core/src/constants.rs
@@ -1,6 +1,6 @@
use core::num::NonZeroU32;
use sp_arithmetic::Perbill;
-use static_assertions::const_assert;
+use static_assertions::{const_assert, const_assert_eq};
/// We allow `Normal` extrinsics to fill up the block up to 85%, the rest can be used
/// by Operational extrinsics.
@@ -14,7 +14,6 @@ const_assert!(BLOCK_CHUNK_SIZE.get() > 0);
pub const BLOCK_CHUNK_SIZE: NonZeroU32 = unsafe { NonZeroU32::new_unchecked(32) };
/// Money matters.
-// TODO: evaluate whether we should consider moving this into avail
pub mod currency {
pub type Balance = u128;
@@ -22,8 +21,7 @@ pub mod currency {
/// `AVAIL` has 18 decimal positions.
pub const AVAIL: Balance = 1_000_000_000_000_000_000;
- /// Cents of AVAIL has 16 decimal positions (100 Cents = $1)
- /// 1 Cents = `10_000_000_000_000_000`
+ /// Cents of AVAIL has 16 decimal positions (100 Cents = 1)
pub const CENTS: Balance = AVAIL / 100;
/// Millicent of AVAIL has 13 decimal positions( 1000 mCents = 1 cent).
@@ -41,3 +39,14 @@ pub mod currency {
/// `PICO_AVAIL` has 6 decimal positions
pub const PICO_AVAIL: Balance = NANO_AVAIL / 1_000;
}
+
+pub mod kate {
+ use super::*;
+ pub const EXTENSION_FACTOR: u32 = 2;
+ pub const COMMITMENT_SIZE: usize = 48;
+ pub const DATA_CHUNK_SIZE: usize = 31;
+ pub const CHUNK_SIZE: usize = 32;
+
+ const_assert_eq!(DATA_CHUNK_SIZE, CHUNK_SIZE - 1);
+ const_assert_eq!(CHUNK_SIZE, BLOCK_CHUNK_SIZE.get() as usize);
+}
diff --git a/core/src/data_lookup/mod.rs b/core/src/data_lookup/mod.rs
index ccfdcc1..b637730 100644
--- a/core/src/data_lookup/mod.rs
+++ b/core/src/data_lookup/mod.rs
@@ -159,7 +159,7 @@ impl TryFrom for DataLookup {
let last_range = offset..compacted.size;
if !last_range.is_empty() {
- index.push((prev_id, offset..compacted.size));
+ index.push((prev_id, last_range));
}
let lookup = DataLookup { index };
@@ -204,7 +204,7 @@ mod test {
#[test_case( vec![(0, 15), (1, 20), (2, 150)] => Ok(vec![(0,0..15),(1, 15..35), (2, 35..185)]); "Valid case")]
#[test_case( vec![(0, usize::MAX)] => Err(Error::OffsetOverflows); "Offset overflows at zero")]
#[test_case( vec![(0, (u32::MAX -1) as usize), (1, 2)] => Err(Error::OffsetOverflows); "Offset overflows at non zero")]
- #[test_case( vec![(1, 10), (0, 2)] => Err(Error::DataNotSorted); "Unsortend data")]
+ #[test_case( vec![(1, 10), (0, 2)] => Err(Error::DataNotSorted); "Unsorted data")]
#[test_case( vec![] => Ok(vec![]); "Empty data")]
fn from_id_and_len(
id_len_data: Vec<(u32, usize)>,
diff --git a/core/src/data_proof.rs b/core/src/data_proof.rs
index 599eaea..2584130 100644
--- a/core/src/data_proof.rs
+++ b/core/src/data_proof.rs
@@ -12,7 +12,7 @@ use serde::{Deserialize, Serialize};
#[cfg(feature = "runtime")]
use sp_io::hashing::keccak_256;
-/// Max data supported on bidge (Ethereum calldata limits)
+/// Max data supported on bridge (Ethereum calldata limits)
pub const BOUNDED_DATA_MAX_LENGTH: u32 = 102_400;
/// Maximum size of data allowed in the bridge
diff --git a/core/src/lib.rs b/core/src/lib.rs
index a3e876b..b84f6b9 100644
--- a/core/src/lib.rs
+++ b/core/src/lib.rs
@@ -184,7 +184,7 @@ macro_rules! keccak256_concat_update {
}};
}
-/// Calculates the Kecck 256 of arguments with NO extra allocations to join inputs.
+/// Calculates the Keccak 256 of arguments with NO extra allocations to join inputs.
#[macro_export]
macro_rules! keccak256_concat{
($($arg:tt)*) => {{
diff --git a/kate/Cargo.toml b/kate/Cargo.toml
index bce2a25..487731f 100644
--- a/kate/Cargo.toml
+++ b/kate/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "kate"
-version = "0.9.0"
+version = "0.9.1"
authors = ["Denis Ermolin "]
edition = "2021"
license = "Apache-2.0"
diff --git a/kate/recovery/Cargo.toml b/kate/recovery/Cargo.toml
index e794c0a..9e885d5 100644
--- a/kate/recovery/Cargo.toml
+++ b/kate/recovery/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "kate-recovery"
-version = "0.9.3"
+version = "0.10.0"
authors = ["Denis Ermolin "]
edition = "2018"
license = "Apache-2.0"
diff --git a/kate/recovery/src/com.rs b/kate/recovery/src/com.rs
index 656ead8..8d3fd1a 100644
--- a/kate/recovery/src/com.rs
+++ b/kate/recovery/src/com.rs
@@ -1,17 +1,18 @@
use crate::matrix;
+#[cfg(feature = "std")]
+use crate::{data, sparse_slice_read::SparseSliceRead};
use core::{num::TryFromIntError, ops::Range};
+#[cfg(feature = "std")]
+use avail_core::{
+ constants::kate::{CHUNK_SIZE, DATA_CHUNK_SIZE},
+ ensure,
+};
use avail_core::{data_lookup::Error as DataLookupError, AppId, DataLookup};
use sp_std::prelude::*;
use thiserror_no_std::Error;
-#[cfg(feature = "std")]
-use crate::data;
-#[cfg(feature = "std")]
-use crate::{config, sparse_slice_read::SparseSliceRead};
-#[cfg(feature = "std")]
-use avail_core::ensure;
#[cfg(feature = "std")]
use codec::{Decode, IoReader};
#[cfg(feature = "std")]
@@ -177,9 +178,9 @@ pub fn reconstruct_app_extrinsics(
app_id: AppId,
) -> Result {
let data = reconstruct_available(dimensions, cells)?;
- const_assert!(config::CHUNK_SIZE as u64 <= u32::MAX as u64);
+ const_assert!(CHUNK_SIZE as u64 <= u32::MAX as u64);
let range = index
- .projected_range_of(app_id, config::CHUNK_SIZE as u32)
+ .projected_range_of(app_id, CHUNK_SIZE as u32)
.ok_or(ReconstructionError::MissingId(app_id))?;
Ok(unflatten_padded_data(vec![(app_id, range)], data)?
@@ -203,8 +204,8 @@ pub fn reconstruct_extrinsics(
) -> Result, ReconstructionError> {
let data = reconstruct_available(dimensions, cells)?;
- const_assert!(config::CHUNK_SIZE as u64 <= u32::MAX as u64);
- let ranges = lookup.projected_ranges(config::CHUNK_SIZE as u32)?;
+ const_assert!(CHUNK_SIZE as u64 <= u32::MAX as u64);
+ let ranges = lookup.projected_ranges(CHUNK_SIZE as u32)?;
unflatten_padded_data(ranges, data).map_err(ReconstructionError::DataDecodingError)
}
@@ -218,7 +219,7 @@ pub fn reconstruct_extrinsics(
pub fn reconstruct_columns(
dimensions: matrix::Dimensions,
cells: &[data::Cell],
-) -> Result>, ReconstructionError> {
+) -> Result>, ReconstructionError> {
let cells: Vec = cells.iter().cloned().map(Into::into).collect::>();
let columns = map_cells(dimensions, cells)?;
@@ -235,7 +236,7 @@ pub fn reconstruct_columns(
let column = reconstruct_column(dimensions.extended_rows(), &cells)?
.iter()
.map(BlsScalar::to_bytes)
- .collect::>();
+ .collect::>();
Ok((col, column))
})
@@ -266,7 +267,7 @@ fn reconstruct_available(
})
.collect::>, ReconstructionError>>()?;
- let mut result: Vec = Vec::with_capacity(scalars.len() * config::CHUNK_SIZE);
+ let mut result: Vec = Vec::with_capacity(scalars.len() * CHUNK_SIZE);
for (row, col) in dimensions.iter_data() {
let bytes = scalars
@@ -275,7 +276,7 @@ fn reconstruct_available(
.map(Option::as_ref)
.unwrap_or(None)
.map(BlsScalar::to_bytes)
- .unwrap_or_else(|| [0; config::CHUNK_SIZE]);
+ .unwrap_or_else(|| [0; CHUNK_SIZE]);
result.extend(bytes);
}
Ok(result)
@@ -318,14 +319,14 @@ pub fn decode_app_extrinsics(
.and_then(|column| column.get(&row_number))
.filter(|cell| !cell.data.is_empty())
{
- None => app_data.extend(vec![0; config::CHUNK_SIZE]),
+ None => app_data.extend(vec![0; CHUNK_SIZE]),
Some(cell) => app_data.extend(cell.data),
}
}
- const_assert!((config::CHUNK_SIZE as u64) <= (u32::MAX as u64));
+ const_assert!((CHUNK_SIZE as u64) <= (u32::MAX as u64));
let ranges = index
- .projected_range_of(app_id, config::CHUNK_SIZE as u32)
+ .projected_range_of(app_id, CHUNK_SIZE as u32)
.map(|range| vec![(app_id, range)])
.unwrap_or_default();
@@ -363,20 +364,17 @@ pub fn unflatten_padded_data(
ranges: Vec<(AppId, AppDataRange)>,
data: Vec,
) -> Result, UnflattenError> {
- ensure!(
- data.len() % config::CHUNK_SIZE == 0,
- UnflattenError::InvalidLen
- );
+ ensure!(data.len() % CHUNK_SIZE == 0, UnflattenError::InvalidLen);
fn extract_encoded_extrinsic(range_data: &[u8]) -> SparseSliceRead {
- const_assert_ne!(config::CHUNK_SIZE, 0);
- const_assert_ne!(config::DATA_CHUNK_SIZE, 0);
+ const_assert_ne!(CHUNK_SIZE, 0);
+ const_assert_ne!(DATA_CHUNK_SIZE, 0);
// INTERNAL: Chunk into 32 bytes (CHUNK_SIZE), then remove padding (0..30 bytes).
SparseSliceRead::from_iter(
range_data
- .chunks_exact(config::CHUNK_SIZE)
- .map(|chunk| &chunk[0..config::DATA_CHUNK_SIZE]),
+ .chunks_exact(CHUNK_SIZE)
+ .map(|chunk| &chunk[0..DATA_CHUNK_SIZE]),
)
}
@@ -577,7 +575,6 @@ pub fn reconstruct_column(
let mut subset: Vec