From 190a70398e6f92956e80c18d124b071355a8a712 Mon Sep 17 00:00:00 2001 From: NikVolf Date: Sat, 9 Nov 2019 14:50:21 +0100 Subject: [PATCH] randomized tests --- Cargo.lock | 13 + core/client/db/Cargo.toml | 1 + core/client/db/src/storage_cache.rs | 581 +++++++++++++++++++++++++++- 3 files changed, 582 insertions(+), 13 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8c215ddd12611..0a3acce76260f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3456,6 +3456,17 @@ name = "quick-error" version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "quickcheck" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "env_logger 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "quickcheck" version = "0.9.0" @@ -5320,6 +5331,7 @@ dependencies = [ "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "parity-scale-codec 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", + "quickcheck 0.8.5 (registry+https://github.com/rust-lang/crates.io-index)", "sr-primitives 2.0.0", "substrate-client 2.0.0", "substrate-consensus-common 2.0.0", @@ -7695,6 +7707,7 @@ dependencies = [ "checksum pwasm-utils 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d473123ba135028544926f7aa6f34058d8bc6f120c4fcd3777f84af724280b3" "checksum quick-error 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "5fb6ccf8db7bbcb9c2eae558db5ab4f3da1c2a87e4e597ed394726bc8ea6ca1d" "checksum quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9274b940887ce9addde99c4eee6b5c44cc494b182b97e73dc8ffdcb3397fd3f0" +"checksum quickcheck 0.8.5 (registry+https://github.com/rust-lang/crates.io-index)" = "9c35d9c36a562f37eca96e79f66d5fd56eefbc22560dacc4a864cabd2d277456" "checksum quickcheck 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d5ca504a2fdaa08d3517f442fbbba91ac24d1ec4c51ea68688a038765e3b2662" "checksum quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)" = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1" "checksum quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe" diff --git a/core/client/db/Cargo.toml b/core/client/db/Cargo.toml index 7d88c39d7fd7e..a1d9f2395c536 100644 --- a/core/client/db/Cargo.toml +++ b/core/client/db/Cargo.toml @@ -28,6 +28,7 @@ header_metadata = { package = "substrate-header-metadata", path = "../header-met substrate-keyring = { path = "../../keyring" } test-client = { package = "substrate-test-runtime-client", path = "../../test-runtime/client" } env_logger = "0.7.0" +quickcheck = "0.8" [features] default = [] diff --git a/core/client/db/src/storage_cache.rs b/core/client/db/src/storage_cache.rs index e1ad6f493aa7b..38b379753403d 100644 --- a/core/client/db/src/storage_cache.rs +++ b/core/client/db/src/storage_cache.rs @@ -27,6 +27,7 @@ use state_machine::{backend::Backend as StateBackend, TrieBackend}; use log::trace; use super::{StorageCollection, ChildStorageCollection}; use std::hash::Hash as StdHash; + const STATE_CACHE_BLOCKS: usize = 12; type StorageKey = Vec; @@ -154,6 +155,7 @@ impl Cache { } /// Synchronize the shared cache with the best block state. + /// /// This function updates the shared cache by removing entries /// that are invalidated by chain reorganization. It should be called /// externally when chain reorg happens without importing a new block. @@ -164,7 +166,7 @@ impl Cache { let mut clear = false; for block in enacted { clear = clear || { - if let Some(ref mut m) = self.modifications.iter_mut().find(|m| &m.hash == block) { + if let Some(m) = self.modifications.iter_mut().find(|m| &m.hash == block) { trace!("Reverting enacted block {:?}", block); m.is_canon = true; for a in &m.storage { @@ -184,7 +186,7 @@ impl Cache { for block in retracted { clear = clear || { - if let Some(ref mut m) = self.modifications.iter_mut().find(|m| &m.hash == block) { + if let Some(m) = self.modifications.iter_mut().find(|m| &m.hash == block) { trace!("Retracting block {:?}", block); m.is_canon = false; for a in &m.storage { @@ -252,11 +254,17 @@ struct BlockChanges { /// Cached values specific to a state. struct LocalCache { - /// Storage cache. `None` indicates that key is known to be missing. + /// Storage cache. + /// + /// `None` indicates that key is known to be missing. storage: HashMap>, - /// Storage hashes cache. `None` indicates that key is known to be missing. + /// Storage hashes cache. + /// + /// `None` indicates that key is known to be missing. hashes: HashMap>, - /// Child storage cache. `None` indicates that key is known to be missing. + /// Child storage cache. + /// + /// `None` indicates that key is known to be missing. child_storage: HashMap>, } @@ -271,9 +279,11 @@ pub struct CacheChanges { pub parent_hash: Option, } -/// State abstraction. +/// State cache abstraction. +/// /// Manages shared global state cache which reflects the canonical /// state as it is on the disk. +/// /// A instance of `CachingState` may be created as canonical or not. /// For canonical instances local cache is accumulated and applied /// in `sync_cache` along with the change overlay. @@ -294,6 +304,7 @@ impl, B: BlockT> std::fmt::Debug for CachingState< impl CacheChanges { /// Propagate local cache into the shared cache and synchronize /// the shared cache with the best block state. + /// /// This function updates the shared cache by removing entries /// that are invalidated by chain reorganization. `sync_cache` /// should be called after the block has been committed and the @@ -381,7 +392,7 @@ impl CacheChanges { }; let insert_at = cache.modifications.iter() .enumerate() - .find(|&(_, m)| m.number < *number) + .find(|(_, m)| m.number < *number) .map(|(i, _)| i); trace!("Inserting modifications at {:?}", insert_at); if let Some(insert_at) = insert_at { @@ -534,6 +545,10 @@ impl, B: BlockT> StateBackend for CachingState< self.state.exists_child_storage(storage_key, key) } + fn for_keys_in_child_storage(&self, storage_key: &[u8], f: F) { + self.state.for_keys_in_child_storage(storage_key, f) + } + fn for_keys_with_prefix(&self, prefix: &[u8], f: F) { self.state.for_keys_with_prefix(prefix, f) } @@ -542,10 +557,6 @@ impl, B: BlockT> StateBackend for CachingState< self.state.for_key_values_with_prefix(prefix, f) } - fn for_keys_in_child_storage(&self, storage_key: &[u8], f: F) { - self.state.for_keys_in_child_storage(storage_key, f) - } - fn for_child_keys_with_prefix(&self, storage_key: &[u8], prefix: &[u8], f: F) { self.state.for_child_keys_with_prefix(storage_key, prefix, f) } @@ -591,6 +602,7 @@ mod tests { use primitives::Blake2Hasher; type Block = RawBlock>; + #[test] fn smoke() { //init_log(); @@ -642,8 +654,8 @@ mod tests { // blocks [ 3b(c) 3a 2a 2b(c) 1b 1a 0 ] let mut s = CachingState::new(InMemory::::default(), shared.clone(), Some(h2b.clone())); s.cache.sync_cache( - &[h1b.clone(), h2b.clone(), h3b.clone()], - &[h1a.clone(), h2a.clone(), h3a.clone()], + &[h1b, h2b, h3b], + &[h1a, h2a, h3a], vec![], vec![], Some(h3b.clone()), @@ -654,6 +666,62 @@ mod tests { assert!(s.storage(&key).unwrap().is_none()); } + #[test] + fn simple_fork() { + //init_log(); + let root_parent = H256::random(); + let key = H256::random()[..].to_vec(); + let h1 = H256::random(); + let h2a = H256::random(); + let h2b = H256::random(); + + let shared = new_shared_cache::(256*1024, (0,1)); + + let mut s = CachingState::new(InMemory::::default(), shared.clone(), Some(root_parent.clone())); + s.cache.sync_cache(&[], &[], vec![(key.clone(), Some(vec![2]))], vec![], Some(h1.clone()), Some(1), || true); + + let mut s = CachingState::new(InMemory::::default(), shared.clone(), Some(h1.clone())); + s.cache.sync_cache(&[], &[], vec![(key.clone(), Some(vec![2]))], vec![], Some(h2a.clone()), Some(2), || true); + + let mut s = CachingState::new(InMemory::::default(), shared.clone(), Some(h1.clone())); + s.cache.sync_cache(&[], &[], vec![(key.clone(), Some(vec![3]))], vec![], Some(h2b.clone()), Some(2), || false); + + let s = CachingState::new(InMemory::::default(), shared.clone(), Some(h2a.clone())); + assert_eq!(s.storage(&key).unwrap().unwrap(), vec![2]); + } + + #[test] + fn double_fork() { + //init_log(); + let root_parent = H256::random(); + let key = H256::random()[..].to_vec(); + let h1 = H256::random(); + let h2a = H256::random(); + let h2b = H256::random(); + let h3a = H256::random(); + let h3b = H256::random(); + + let shared = new_shared_cache::(256*1024, (0,1)); + + let mut s = CachingState::new(InMemory::::default(), shared.clone(), Some(root_parent.clone())); + s.cache.sync_cache(&[], &[], vec![], vec![], Some(h1.clone()), Some(1), || true); + + let mut s = CachingState::new(InMemory::::default(), shared.clone(), Some(h1.clone())); + s.cache.sync_cache(&[], &[], vec![], vec![], Some(h2a.clone()), Some(2), || true); + + let mut s = CachingState::new(InMemory::::default(), shared.clone(), Some(h2a.clone())); + s.cache.sync_cache(&[], &[], vec![(key.clone(), Some(vec![2]))], vec![], Some(h3a.clone()), Some(3), || true); + + let mut s = CachingState::new(InMemory::::default(), shared.clone(), Some(h1.clone())); + s.cache.sync_cache(&[], &[], vec![], vec![], Some(h2b.clone()), Some(2), || false); + + let mut s = CachingState::new(InMemory::::default(), shared.clone(), Some(h2b.clone())); + s.cache.sync_cache(&[], &[], vec![(key.clone(), Some(vec![3]))], vec![], Some(h3b.clone()), Some(3), || false); + + let s = CachingState::new(InMemory::::default(), shared.clone(), Some(h3a.clone())); + assert_eq!(s.storage(&key).unwrap().unwrap(), vec![2]); + } + #[test] fn should_track_used_size_correctly() { let root_parent = H256::random(); @@ -765,3 +833,490 @@ mod tests { assert_eq!(s.storage(&key).unwrap(), None); } } + +#[cfg(test)] +mod qc { + use std::collections::{HashMap, hash_map::Entry}; + + use quickcheck::{quickcheck, TestResult, Arbitrary}; + + use super::*; + use sr_primitives::testing::{H256, Block as RawBlock, ExtrinsicWrapper}; + use state_machine::backend::InMemory; + use primitives::Blake2Hasher; + + type Block = RawBlock>; + + type KeySet = Vec<(Vec, Option>)>; + + type KeyMap = HashMap, Option>>; + + #[derive(Debug, Clone)] + struct Node { + hash: H256, + parent: H256, + state: KeyMap, + changes: KeySet, + } + + impl Node { + fn new_next(&self, hash: H256, changes: KeySet) -> Self { + let mut state = self.state.clone(); + + for (k, v) in self.state.iter() { state.insert(k.clone(), v.clone()); } + for (k, v) in changes.clone().into_iter() { state.insert(k, v); } + + Self { + hash, + parent: self.hash, + changes, + state, + } + } + + fn new(hash: H256, parent: H256, changes: KeySet) -> Self { + let mut state = KeyMap::new(); + + for (k, v) in changes.clone().into_iter() { state.insert(k, v); } + + Self { + hash, + parent, + state, + changes, + } + } + + fn purge(&mut self, other_changes: &KeySet) { + for (k, _) in other_changes.iter() { + self.state.remove(k); + } + } + } + + #[derive(Debug, Clone)] + enum Action { + Next { hash: H256, changes: KeySet }, + Fork { depth: usize, hash: H256, changes: KeySet }, + Reorg { depth: usize, hash: H256 }, + } + + impl Arbitrary for Action { + fn arbitrary(gen: &mut G) -> Self { + let path = gen.next_u32() as u8; + let mut buf = [0u8; 32]; + + match path { + 0..=175 => { + gen.fill_bytes(&mut buf[..]); + Action::Next { + hash: H256::from(&buf), + changes: { + let mut set = Vec::new(); + for _ in 0..gen.next_u32()/(64*256*256*256) { + set.push((vec![gen.next_u32() as u8], Some(vec![gen.next_u32() as u8]))); + } + set + } + } + }, + 175..=220 => { + gen.fill_bytes(&mut buf[..]); + Action::Fork { + hash: H256::from(&buf), + depth: ((gen.next_u32() as u8) / 64) as usize, + changes: { + let mut set = Vec::new(); + for _ in 0..gen.next_u32()/(64*256*256*256) { + set.push((vec![gen.next_u32() as u8], Some(vec![gen.next_u32() as u8]))); + } + set + } + } + } + _ => { + gen.fill_bytes(&mut buf[..]); + Action::Reorg { + hash: H256::from(&buf), + depth: ((gen.next_u32() as u8) / 64) as usize, + } + } + } + } + } + + struct Mutator { + shared: SharedCache, + canon: Vec, + forks: HashMap>, + } + + impl Mutator { + fn new_empty() -> Self { + let shared = new_shared_cache::(256*1024, (0,1)); + + Self { + shared, + canon: vec![], + forks: HashMap::new(), + } + } + + fn head_state(&self, hash: H256) -> CachingState, Block> { + CachingState::new( + InMemory::::default(), + self.shared.clone(), + Some(hash) + ) + } + + fn canon_head_state(&self) -> CachingState, Block> { + self.head_state(self.canon.last().expect("Expected to be one commit").hash) + } + + fn mutate_static(&mut self, action: Action) -> CachingState, Block> { + self.mutate(action).expect("Expected to provide only valid actions to the mutate_static") + } + + fn canon_len(&self) -> usize { + return self.canon.len(); + } + + fn head_storage_ref(&self) -> &KeyMap { + &self.canon.last().expect("Expected to be one commit").state + } + + fn key_permutations() -> Vec> { + (0u8..255).map(|x| vec![x]).collect() + } + + fn mutate(&mut self, action: Action) -> Result, Block>, ()> { + let state = match action { + Action::Fork { depth, hash, changes } => { + let pos = self.canon.len() as isize - depth as isize; + if pos < 0 || self.canon.len() == 0 || pos >= (self.canon.len()-1) as isize + // no fork on top also, thus len-1 + { + return Err(()); + } + + let pos = pos as usize; + + let fork_at = self.canon[pos].hash; + + let (total_h, parent) = match self.forks.entry(fork_at) { + Entry::Occupied(occupied) => { + let chain = occupied.into_mut(); + let parent = chain.last().expect("No empty forks are ever created").clone(); + let mut node = parent.new_next(hash, changes.clone()); + + for earlier in chain.iter() { + node.purge(&earlier.changes.clone()); + } + + chain.push(node); + + (pos + chain.len(), parent.hash) + }, + Entry::Vacant(vacant) => { + let canon_parent = &self.canon[pos]; + vacant.insert(vec![canon_parent.new_next(hash, changes.clone())]); + + (pos + 1, fork_at) + } + }; + + let mut state = CachingState::new( + InMemory::::default(), + self.shared.clone(), + Some(parent) + ); + + state.cache.sync_cache( + &[], + &[], + changes, + vec![], + Some(hash), + Some(total_h as u64), + || false, + ); + + state + }, + Action::Next { hash, changes } => { + let (next, parent_hash) = match self.canon.last() { + None => { + let parent_hash = H256::from(&[0u8; 32]); + (Node::new(hash, parent_hash, changes.clone()), parent_hash) + }, + Some(parent) => { + (parent.new_next(hash, changes.clone()), parent.hash) + } + }; + + // delete cache entries for earlier + for node in self.canon.iter_mut() { + node.purge(&next.changes); + if let Some(fork) = self.forks.get_mut(&node.hash) { + for node in fork.iter_mut() { + node.purge(&next.changes); + } + } + } + + let mut state = CachingState::new( + InMemory::::default(), + self.shared.clone(), + Some(parent_hash) + ); + + state.cache.sync_cache( + &[], + &[], + next.changes.clone(), + vec![], + Some(hash), + Some(self.canon.len() as u64 + 1), + || true, + ); + + self.canon.push(next); + + state + }, + Action::Reorg { depth, hash } => { + let pos = self.canon.len() as isize - depth as isize; + if pos < 0 || pos+1 >= self.canon.len() as isize { return Err(()); } + let fork_at = self.canon[pos as usize].hash; + let pos = pos as usize; + + match self.forks.get_mut(&fork_at) { + Some(chain) => { + let mut new_fork = self.canon.drain(pos+1..).collect::>(); + + let retracted: Vec = new_fork.iter().map(|node| node.hash).collect(); + let enacted: Vec = chain.iter().map(|node| node.hash).collect(); + + std::mem::swap(chain, &mut new_fork); + + let mut node = new_fork.last().map( + |node| node.new_next(hash, vec![]) + ).expect("No empty fork ever created!"); + + for invalidators in chain.iter().chain(new_fork.iter()) { + node.purge(&invalidators.changes); + } + + self.canon.extend(new_fork.into_iter()); + + self.canon.push(node); + + let mut state = CachingState::new( + InMemory::::default(), + self.shared.clone(), + Some(fork_at) + ); + + let height = pos as u64 + enacted.len() as u64 + 2; + state.cache.sync_cache( + &enacted[..], + &retracted[..], + vec![], + vec![], + Some(hash), + Some(height), + || true, + ); + + state + } + None => { + return Err(()); // no reorg without a fork atm! + }, + } + } + }; + + Ok(state) + } + } + + #[test] + fn smoke() { + let key = H256::random()[..].to_vec(); + let h0 = H256::random(); + let h1a = H256::random(); + let h1b = H256::random(); + let h2a = H256::random(); + let h2b = H256::random(); + let h3a = H256::random(); + let h3b = H256::random(); + + let mut mutator = Mutator::new_empty(); + mutator.mutate_static(Action::Next { hash: h0, changes: vec![(key.clone(), Some(vec![2]))] }); + mutator.mutate_static(Action::Next { hash: h1a, changes: vec![] }); + mutator.mutate_static(Action::Fork { depth: 1, hash: h1b, changes: vec![(key.clone(), Some(vec![3]))] }); + mutator.mutate_static(Action::Fork { depth: 1, hash: h2b, changes: vec![(key.clone(), Some(vec![4]))] }); + mutator.mutate_static(Action::Next { hash: h2a, changes: vec![(key.clone(), Some(vec![5]))] }); + mutator.mutate_static(Action::Next { hash: h3a, changes: vec![] }); + + assert_eq!(mutator.head_state(h3a).storage(&key).unwrap().expect("there should be a value"), vec![5]); + assert!(mutator.head_state(h1a).storage(&key).unwrap().is_none()); + assert!(mutator.head_state(h2b).storage(&key).unwrap().is_none()); + assert!(mutator.head_state(h1b).storage(&key).unwrap().is_none()); + + mutator.mutate_static(Action::Reorg { depth: 3, hash: h3b }); + assert!(mutator.head_state(h3a).storage(&key).unwrap().is_none()); + } + + fn is_head_match(mutator: &Mutator) -> bool { + let head_state = mutator.canon_head_state(); + + for key in Mutator::key_permutations() { + match (head_state.storage(&key).unwrap(), mutator.head_storage_ref().get(&key)) { + (Some(x), Some(y)) => { + if Some(&x) != y.as_ref() { + eprintln!("{:?} != {:?}", x, y); + return false; + } + }, + (None, Some(y)) => { + eprintln!(" != {:?}", y); + return false; + }, + (Some(x), None) => { + eprintln!("{:?} != ", x); + return false; + }, + _ => continue, + } + } + true + } + + fn is_canon_match(mutator: &Mutator) -> bool { + for node in mutator.canon.iter() { + let head_state = mutator.head_state(node.hash); + for key in Mutator::key_permutations() { + match (head_state.storage(&key).unwrap(), node.state.get(&key)) { + (Some(x), Some(y)) => { + if Some(&x) != y.as_ref() { + eprintln!("at [{}]: {:?} != {:?}", node.hash, x, y); + return false; + } + }, + (None, Some(y)) => { + eprintln!("at [{}]: != {:?}", node.hash, y); + return false; + }, + (Some(x), None) => { + eprintln!("at [{}]: {:?} != ", node.hash, x); + return false; + }, + _ => continue, + } + } + } + true + } + + #[test] + fn reorg() { + let key = H256::random()[..].to_vec(); + let h0 = H256::random(); + let h1 = H256::random(); + let h2 = H256::random(); + let h1b = H256::random(); + let h2b = H256::random(); + + let mut mutator = Mutator::new_empty(); + mutator.mutate_static(Action::Next { hash: h0, changes: vec![] }); + mutator.mutate_static(Action::Next { hash: h1, changes: vec![] }); + mutator.mutate_static(Action::Next { hash: h2, changes: vec![(key.clone(), Some(vec![2]))] }); + mutator.mutate_static(Action::Fork { depth: 2, hash: h1b, changes: vec![(key.clone(), Some(vec![3]))] }); + mutator.mutate_static(Action::Reorg { depth: 2, hash: h2b }); + + assert!(is_head_match(&mutator)) + } + + fn key(k: u8) -> Vec { vec![k] } + fn val(v: u8) -> Option> { Some(vec![v]) } + fn keyval(k: u8, v: u8) -> KeySet { vec![(key(k), val(v))] } + + #[test] + fn reorg2() { + let h0 = H256::random(); + let h1a = H256::random(); + let h1b = H256::random(); + let h2b = H256::random(); + let h2a = H256::random(); + + let mut mutator = Mutator::new_empty(); + mutator.mutate_static(Action::Next { hash: h0, changes: keyval(1, 1) }); + mutator.mutate_static(Action::Fork { depth: 1, hash: h1b, changes: keyval(2, 2 ) }); + + mutator.mutate_static(Action::Next { hash: h1a, changes: keyval(3, 3) }); + mutator.mutate_static(Action::Next { hash: h2a, changes: keyval(4, 4) }); + mutator.mutate_static(Action::Reorg { depth: 3, hash: h2b }); + + assert!(is_head_match(&mutator)) + } + + #[test] + fn fork2() { + let h1 = H256::random(); + let h2a = H256::random(); + let h2b = H256::random(); + let h3a = H256::random(); + let h3b = H256::random(); + + let mut mutator = Mutator::new_empty(); + mutator.mutate_static(Action::Next { hash: h1, changes: vec![] }); + mutator.mutate_static(Action::Next { hash: h2a, changes: vec![] }); + mutator.mutate_static(Action::Next { hash: h3a, changes: keyval(1, 1) }); + + mutator.mutate_static(Action::Fork { depth: 2, hash: h2b, changes: vec![] }); + mutator.mutate_static(Action::Fork { depth: 2, hash: h3b, changes: keyval(1, 2) }); + + assert!(is_head_match(&mutator)) + } + + quickcheck! { + fn head_complete(actions: Vec) -> TestResult { + let mut mutator = Mutator::new_empty(); + + for action in actions.into_iter() { + if let Err(_) = mutator.mutate(action) { + return TestResult::discard(); + } + } + + if mutator.canon_len() == 0 { + return TestResult::discard(); + } + + TestResult::from_bool(is_head_match(&mutator)) + } + + fn canon_complete(actions: Vec) -> TestResult { + if actions.len() > 9 { + return TestResult::discard(); + } + + let mut mutator = Mutator::new_empty(); + + for action in actions.into_iter() { + if let Err(_) = mutator.mutate(action) { + return TestResult::discard(); + } + } + + if mutator.canon_len() == 0 { + return TestResult::discard(); + } + + TestResult::from_bool(is_canon_match(&mutator)) + } + } +}