Skip to content
This repository has been archived by the owner on Nov 15, 2023. It is now read-only.

Commit

Permalink
Add tests and modify as_vec implementation
Browse files Browse the repository at this point in the history
  • Loading branch information
Lldenaurois committed Aug 24, 2021
1 parent d4576bc commit e13242b
Show file tree
Hide file tree
Showing 7 changed files with 60 additions and 17 deletions.
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

11 changes: 4 additions & 7 deletions erasure-coding/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -298,10 +298,10 @@ where

/// Verify a merkle branch, yielding the chunk hash meant to be present at that
/// index.
pub fn branch_hash(root: &H256, branch_nodes: &[Vec<u8>], index: usize) -> Result<H256, Error> {
pub fn branch_hash(root: &H256, branch_nodes: &Proof, index: usize) -> Result<H256, Error> {
let mut trie_storage: MemoryDB<Blake2Hasher> = MemoryDB::default();
for node in branch_nodes.iter() {
(&mut trie_storage as &mut trie::HashDB<_>).insert(EMPTY_PREFIX, node.as_slice());
for node in branch_nodes.as_vec().iter() {
(&mut trie_storage as &mut trie::HashDB<_>).insert(EMPTY_PREFIX, node);
}

let trie = TrieDB::new(&trie_storage, &root).map_err(|_| Error::InvalidBranchProof)?;
Expand Down Expand Up @@ -422,10 +422,7 @@ mod tests {
assert_eq!(proofs.len(), 10);

for (i, proof) in proofs.into_iter().enumerate() {
assert_eq!(
branch_hash(&root, &proof.as_vec(), i).unwrap(),
BlakeTwo256::hash(&chunks[i])
);
assert_eq!(branch_hash(&root, &proof, i).unwrap(), BlakeTwo256::hash(&chunks[i]));
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -363,7 +363,7 @@ impl RunningTask {

fn validate_chunk(&self, validator: &AuthorityDiscoveryId, chunk: &ErasureChunk) -> bool {
let anticipated_hash =
match branch_hash(&self.erasure_root, &chunk.proof_as_vec(), chunk.index.0 as usize) {
match branch_hash(&self.erasure_root, chunk.proof(), chunk.index.0 as usize) {
Ok(hash) => hash,
Err(e) => {
tracing::warn!(
Expand Down
8 changes: 3 additions & 5 deletions node/network/availability-recovery/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -301,11 +301,9 @@ impl RequestChunksPhase {

let validator_index = chunk.index;

if let Ok(anticipated_hash) = branch_hash(
&params.erasure_root,
&chunk.proof_as_vec(),
chunk.index.0 as usize,
) {
if let Ok(anticipated_hash) =
branch_hash(&params.erasure_root, chunk.proof(), chunk.index.0 as usize)
{
let erasure_chunk_hash = BlakeTwo256::hash(&chunk.chunk);

if erasure_chunk_hash != anticipated_hash {
Expand Down
3 changes: 3 additions & 0 deletions node/primitives/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,3 +26,6 @@ serde = { version = "1.0.123", features = ["derive"] }

[target.'cfg(not(target_os = "unknown"))'.dependencies]
zstd = "0.6.0"

[dev-dependencies]
polkadot-erasure-coding = { path = "../../erasure-coding" }
11 changes: 7 additions & 4 deletions node/primitives/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,9 @@ pub use polkadot_parachain::primitives::BlockData;

pub mod approval;

#[cfg(test)]
mod tests;

/// Disputes related types.
pub mod disputes;
pub use disputes::{
Expand Down Expand Up @@ -299,8 +302,8 @@ pub struct Proof(BoundedVec<BoundedVec<u8, 1, MERKLE_NODE_MAX_SIZE>, 1, MERKLE_P

impl Proof {
/// This function allows to convert back to the standard nested Vec format
pub fn as_vec(&self) -> Vec<Vec<u8>> {
self.0.as_vec().iter().map(|v| v.as_vec().clone()).collect()
pub fn as_vec(&self) -> Vec<&[u8]> {
self.0.iter().map(|v| v.as_slice()).collect::<Vec<&[u8]>>()
}
}

Expand Down Expand Up @@ -395,8 +398,8 @@ pub struct ErasureChunk {

impl ErasureChunk {
/// Convert bounded Vec Proof to regular Vec<Vec<u8>>
pub fn proof_as_vec(&self) -> Vec<Vec<u8>> {
self.proof.as_vec()
pub fn proof(&self) -> &Proof {
&self.proof
}
}

Expand Down
41 changes: 41 additions & 0 deletions node/primitives/src/tests.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
use crate::{BlakeTwo256, BlockData, Proof};
use parity_scale_codec::{Decode, Encode};
use polkadot_erasure_coding::{branch_hash, branches, obtain_chunks_v0};
use polkadot_primitives::v0::{AvailableData, HashT, PoVBlock};

/// In order to adequately compute the number of entries in the Merkle
/// trie, we must account for the fixed 16-ary trie structure.
const KEY_INDEX_NIBBLE_SIZE: usize = 4;

fn generate_trie_and_generate_proofs(magnitude: u32) {
let n_validators = 2_u32.pow(magnitude) as usize;
let pov_block =
PoVBlock { block_data: BlockData(vec![2; n_validators / KEY_INDEX_NIBBLE_SIZE]) };

let available_data = AvailableData { pov_block, omitted_validation: Default::default() };

let chunks = obtain_chunks_v0(magnitude as usize, &available_data).unwrap();

assert_eq!(chunks.len() as u32, magnitude);

let branches = branches(chunks.as_ref());
let root = branches.root();

let proofs: Vec<_> = branches.map(|(proof, _)| proof).collect();
assert_eq!(proofs.len() as u32, magnitude);
for (i, proof) in proofs.into_iter().enumerate() {
let encode = Encode::encode(&proof);
let decode = Decode::decode(&mut &encode[..]).unwrap();
assert_eq!(proof, decode);
assert_eq!(encode, Encode::encode(&decode));

assert_eq!(branch_hash(&root, &proof, i).unwrap(), BlakeTwo256::hash(&chunks[i]));
}
}

#[test]
fn roundtrip_proof_encoding() {
for i in 2..16 {
generate_trie_and_generate_proofs(i);
}
}

0 comments on commit e13242b

Please sign in to comment.