Skip to content
This repository has been archived by the owner on Nov 15, 2023. It is now read-only.

Add tests and modify as_vec implementation #3715

Merged
merged 4 commits into from
Sep 6, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

38 changes: 25 additions & 13 deletions erasure-coding/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -298,10 +298,10 @@ where

/// Verify a merkle branch, yielding the chunk hash meant to be present at that
/// index.
pub fn branch_hash(root: &H256, branch_nodes: &[Vec<u8>], index: usize) -> Result<H256, Error> {
pub fn branch_hash(root: &H256, branch_nodes: &Proof, index: usize) -> Result<H256, Error> {
let mut trie_storage: MemoryDB<Blake2Hasher> = MemoryDB::default();
for node in branch_nodes.iter() {
(&mut trie_storage as &mut trie::HashDB<_>).insert(EMPTY_PREFIX, node.as_slice());
(&mut trie_storage as &mut trie::HashDB<_>).insert(EMPTY_PREFIX, node);
}

let trie = TrieDB::new(&trie_storage, &root).map_err(|_| Error::InvalidBranchProof)?;
Expand Down Expand Up @@ -372,6 +372,10 @@ mod tests {
use super::*;
use polkadot_primitives::v0::{AvailableData, BlockData, PoVBlock};

// In order to adequately compute the number of entries in the Merkle
// trie, we must account for the fixed 16-ary trie structure.
const KEY_INDEX_NIBBLE_SIZE: usize = 4;

#[test]
fn field_order_is_right_size() {
assert_eq!(MAX_VALIDATORS, 65536);
Expand Down Expand Up @@ -404,28 +408,36 @@ mod tests {
assert_eq!(reconstructed, Err(Error::NotEnoughValidators));
}

#[test]
fn construct_valid_branches() {
let pov_block = PoVBlock { block_data: BlockData(vec![2; 256]) };
fn generate_trie_and_generate_proofs(magnitude: u32) {
let n_validators = 2_u32.pow(magnitude) as usize;
let pov_block =
PoVBlock { block_data: BlockData(vec![2; n_validators / KEY_INDEX_NIBBLE_SIZE]) };

let available_data = AvailableData { pov_block, omitted_validation: Default::default() };

let chunks = obtain_chunks(10, &available_data).unwrap();
let chunks = obtain_chunks(magnitude as usize, &available_data).unwrap();

assert_eq!(chunks.len(), 10);
assert_eq!(chunks.len() as u32, magnitude);

let branches = branches(chunks.as_ref());
let root = branches.root();

let proofs: Vec<_> = branches.map(|(proof, _)| proof).collect();
assert_eq!(proofs.len() as u32, magnitude);
for (i, proof) in proofs.into_iter().enumerate() {
let encode = Encode::encode(&proof);
let decode = Decode::decode(&mut &encode[..]).unwrap();
assert_eq!(proof, decode);
assert_eq!(encode, Encode::encode(&decode));

assert_eq!(proofs.len(), 10);
assert_eq!(branch_hash(&root, &proof, i).unwrap(), BlakeTwo256::hash(&chunks[i]));
}
}

for (i, proof) in proofs.into_iter().enumerate() {
assert_eq!(
branch_hash(&root, &proof.as_vec(), i).unwrap(),
BlakeTwo256::hash(&chunks[i])
);
#[test]
fn roundtrip_proof_encoding() {
for i in 2..16 {
generate_trie_and_generate_proofs(i);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -363,7 +363,7 @@ impl RunningTask {

fn validate_chunk(&self, validator: &AuthorityDiscoveryId, chunk: &ErasureChunk) -> bool {
let anticipated_hash =
match branch_hash(&self.erasure_root, &chunk.proof_as_vec(), chunk.index.0 as usize) {
match branch_hash(&self.erasure_root, chunk.proof(), chunk.index.0 as usize) {
Ok(hash) => hash,
Err(e) => {
tracing::warn!(
Expand Down
8 changes: 3 additions & 5 deletions node/network/availability-recovery/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -363,11 +363,9 @@ impl RequestChunksPhase {

let validator_index = chunk.index;

if let Ok(anticipated_hash) = branch_hash(
&params.erasure_root,
&chunk.proof_as_vec(),
chunk.index.0 as usize,
) {
if let Ok(anticipated_hash) =
branch_hash(&params.erasure_root, chunk.proof(), chunk.index.0 as usize)
{
let erasure_chunk_hash = BlakeTwo256::hash(&chunk.chunk);

if erasure_chunk_hash != anticipated_hash {
Expand Down
3 changes: 3 additions & 0 deletions node/primitives/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,3 +23,6 @@ serde = { version = "1.0.130", features = ["derive"] }

[target.'cfg(not(target_os = "unknown"))'.dependencies]
zstd = "0.6.0"

[dev-dependencies]
polkadot-erasure-coding = { path = "../../erasure-coding" }
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

is that still needed?

10 changes: 5 additions & 5 deletions node/primitives/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -301,8 +301,8 @@ pub struct Proof(BoundedVec<BoundedVec<u8, 1, MERKLE_NODE_MAX_SIZE>, 1, MERKLE_P

impl Proof {
/// This function allows to convert back to the standard nested Vec format
pub fn as_vec(&self) -> Vec<Vec<u8>> {
self.0.as_vec().iter().map(|v| v.as_vec().clone()).collect()
pub fn iter(&self) -> impl Iterator<Item = &[u8]> {
self.0.iter().map(|v| v.as_slice())
}

/// Construct an invalid dummy proof
Expand Down Expand Up @@ -365,7 +365,7 @@ impl Encode for Proof {
}

fn using_encoded<R, F: FnOnce(&[u8]) -> R>(&self, f: F) -> R {
let temp = self.as_vec();
let temp = self.0.iter().map(|v| v.as_vec()).collect::<Vec<_>>();
temp.using_encoded(f)
}
}
Expand Down Expand Up @@ -404,8 +404,8 @@ pub struct ErasureChunk {

impl ErasureChunk {
/// Convert bounded Vec Proof to regular Vec<Vec<u8>>
pub fn proof_as_vec(&self) -> Vec<Vec<u8>> {
self.proof.as_vec()
pub fn proof(&self) -> &Proof {
&self.proof
}
}

Expand Down