Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: better error handling #1751

Draft
wants to merge 1 commit into
base: master
Choose a base branch
from
Draft
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
162 changes: 32 additions & 130 deletions storage-proofs-porep/src/stacked/vanilla/proof.rs
Original file line number Original file line Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use std::io::{BufReader, BufWriter};
use std::marker::PhantomData; use std::marker::PhantomData;
use std::panic::panic_any; use std::panic::panic_any;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex}; use std::sync::Mutex;


use anyhow::{ensure, Context}; use anyhow::{ensure, Context};
use bincode::deserialize; use bincode::deserialize;
Expand Down Expand Up @@ -56,17 +56,6 @@ use crate::{


pub const TOTAL_PARENTS: usize = 37; pub const TOTAL_PARENTS: usize = 37;


struct InvalidEncodingProofCoordinate {
failure_detected: bool,
layer: usize,
challenge_index: usize,
}

struct InvalidChallengeCoordinate {
failure_detected: bool,
challenge_index: usize,
}

lazy_static! { lazy_static! {
/// Ensure that only one `TreeBuilder` or `ColumnTreeBuilder` uses the GPU at a time. /// Ensure that only one `TreeBuilder` or `ColumnTreeBuilder` uses the GPU at a time.
/// Curently, this is accomplished by only instantiating at most one at a time. /// Curently, this is accomplished by only instantiating at most one at a time.
Expand Down Expand Up @@ -275,22 +264,6 @@ impl<'a, Tree: 'static + MerkleTreeTrait, G: 'static + Hasher> StackedDrg<'a, Tr
.collect() .collect()
}; };


// Error propagation mechanism for scoped parallel verification.
let invalid_encoding_proof = Arc::new(Mutex::new(InvalidEncodingProofCoordinate {
failure_detected: false,
layer: 0,
challenge_index: 0,
}));
let invalid_comm_d = Arc::new(Mutex::new(InvalidChallengeCoordinate {
failure_detected: false,
challenge_index: 0,
}));
let invalid_comm_r = Arc::new(Mutex::new(InvalidChallengeCoordinate {
failure_detected: false,
challenge_index: 0,
}));

THREAD_POOL.scoped(|scope| {
// Stacked commitment specifics // Stacked commitment specifics
challenges challenges
.into_par_iter() .into_par_iter()
Expand All @@ -306,19 +279,11 @@ impl<'a, Tree: 'static + MerkleTreeTrait, G: 'static + Hasher> StackedDrg<'a, Tr
.expect("failed to get tree_d") .expect("failed to get tree_d")
.gen_proof(challenge)?; .gen_proof(challenge)?;


let challenge_inner = challenge; assert!(
let comm_d_proof_inner = comm_d_proof.clone(); comm_d_proof.validate(challenge),
let invalid_comm_d_inner = Arc::clone(&invalid_comm_d); "Invalid comm_d detected at challenge_index {}",
scope.execute(move || { challenge_index
if !comm_d_proof_inner.validate(challenge_inner) { );
let mut invalid = invalid_comm_d_inner.lock().expect("failed to get lock on invalid_comm_d_inner");
*invalid = InvalidChallengeCoordinate {
failure_detected: true,
challenge_index,
};
error!("Invalid comm_d detected at challenge index {}", challenge_index);
}
});


// Stacked replica column openings // Stacked replica column openings
let rcp = { let rcp = {
Expand Down Expand Up @@ -357,23 +322,14 @@ impl<'a, Tree: 'static + MerkleTreeTrait, G: 'static + Hasher> StackedDrg<'a, Tr


// Final replica layer openings // Final replica layer openings
trace!("final replica layer openings"); trace!("final replica layer openings");
let comm_r_last_proof = t_aux.tree_r_last.gen_cached_proof( let comm_r_last_proof = t_aux
challenge, .tree_r_last
Some(t_aux.tree_r_last_config_rows_to_discard), .gen_cached_proof(challenge, Some(t_aux.tree_r_last_config_rows_to_discard))?;
)?; debug_assert!(

comm_r_last_proof.validate(challenge),
let comm_r_last_proof_inner = comm_r_last_proof.clone(); "Invalid comm_r detected at challenge index {}",
let invalid_comm_r_inner = Arc::clone(&invalid_comm_r); challenge
scope.execute(move || { );
if !comm_r_last_proof_inner.validate(challenge) {
let mut invalid = invalid_comm_r_inner.lock().expect("failed to get lock on invalid_comm_r_inner");
*invalid = InvalidChallengeCoordinate {
failure_detected: true,
challenge_index: challenge,
};
error!("Invalid comm_r detected at challenge index {}", challenge);
}
});


// Labeling Proofs Layer 1..l // Labeling Proofs Layer 1..l
let mut labeling_proofs = Vec::with_capacity(num_layers); let mut labeling_proofs = Vec::with_capacity(num_layers);
Expand Down Expand Up @@ -423,23 +379,14 @@ impl<'a, Tree: 'static + MerkleTreeTrait, G: 'static + Hasher> StackedDrg<'a, Tr


{ {
let labeled_node = *rcp.c_x.get_node_at_layer(layer)?; let labeled_node = *rcp.c_x.get_node_at_layer(layer)?;
let replica_id = &pub_inputs.replica_id; assert!(
let proof_inner = proof.clone(); proof.verify(&pub_inputs.replica_id, &labeled_node),
let invalid_encoding_proof_inner = Arc::clone(&invalid_encoding_proof); "Invalid encoding proof generated at layer {}, challenge index {}",
scope.execute(move || {
if !proof_inner.verify(replica_id, &labeled_node) {
let mut invalid = invalid_encoding_proof_inner.lock().expect("failed to get lock on invalid_encoding_proof_inner");
*invalid = InvalidEncodingProofCoordinate {
failure_detected: true,
layer, layer,
challenge_index, challenge_index
}; );
error!("Invalid encoding proof generated at layer {}, challenge index {}", layer, challenge_index);
} else {
trace!("Valid encoding proof generated at layer {}", layer); trace!("Valid encoding proof generated at layer {}", layer);
} }
});
}


labeling_proofs.push(proof); labeling_proofs.push(proof);


Expand All @@ -450,17 +397,6 @@ impl<'a, Tree: 'static + MerkleTreeTrait, G: 'static + Hasher> StackedDrg<'a, Tr
parents_data_full, parents_data_full,
)); ));
} }

// Check if a proof was detected as invalid
let invalid_comm_d_coordinate = invalid_comm_d.lock().expect("failed to get lock on invalid_comm_d");
ensure!(!invalid_comm_d_coordinate.failure_detected, "Invalid comm_d detected at challenge_index {}",
invalid_comm_d_coordinate.challenge_index);
let invalid_comm_r_coordinate = invalid_comm_r.lock().expect("failed to get lock on invalid_comm_r");
ensure!(!invalid_comm_r_coordinate.failure_detected, "Invalid comm_r detected at challenge_index {}",
invalid_comm_r_coordinate.challenge_index);
let invalid_encoding_proof_coordinate = invalid_encoding_proof.lock().expect("failed to get lock on invalid_encoding_proof");
ensure!(!invalid_encoding_proof_coordinate.failure_detected, "Invalid encoding proof generated at layer {}, challenge_index {}",
invalid_encoding_proof_coordinate.layer, invalid_encoding_proof_coordinate.challenge_index);
} }


Ok(Proof { Ok(Proof {
Expand All @@ -472,7 +408,6 @@ impl<'a, Tree: 'static + MerkleTreeTrait, G: 'static + Hasher> StackedDrg<'a, Tr
}) })
}) })
.collect() .collect()
})
} }


fn write_synth_proofs( fn write_synth_proofs(
Expand All @@ -490,11 +425,6 @@ impl<'a, Tree: 'static + MerkleTreeTrait, G: 'static + Hasher> StackedDrg<'a, Tr
"comm_r must be set prior to generating synthetic challenges", "comm_r must be set prior to generating synthetic challenges",
); );


let invalid_synth_porep_proof = Arc::new(Mutex::new(InvalidChallengeCoordinate {
failure_detected: false,
challenge_index: 0,
}));

// Verify synth proofs prior to writing because `ProofScheme`'s verification API is not // Verify synth proofs prior to writing because `ProofScheme`'s verification API is not
// amenable to prover-only verification (i.e. the API uses public values, whereas synthetic // amenable to prover-only verification (i.e. the API uses public values, whereas synthetic
// proofs are known only to the prover). // proofs are known only to the prover).
Expand All @@ -510,49 +440,21 @@ impl<'a, Tree: 'static + MerkleTreeTrait, G: 'static + Hasher> StackedDrg<'a, Tr
.map(|tau| tau.comm_r.into()) .map(|tau| tau.comm_r.into())
.expect("unwrapping should not fail"); .expect("unwrapping should not fail");
let synth_challenges = SynthChallengeGenerator::default(graph.size(), &replica_id, &comm_r); let synth_challenges = SynthChallengeGenerator::default(graph.size(), &replica_id, &comm_r);
ensure!( // Create a vector, so that we can iterate with a parallel iterator.
synth_proofs.len() == synth_challenges.num_synth_challenges, let challenges = synth_challenges
"Mismatched synth porep proofs for the required challenge set" .take(synth_proofs.len())
); .collect::<Vec<_>>();

challenges
THREAD_POOL.scoped(|scope| { .into_par_iter()
for (challenge, proof) in synth_challenges.zip(synth_proofs) { .zip_eq(synth_proofs)
let proof_inner = proof.clone(); .for_each(|(challenge, proof)| {
let challenge_inner = challenge; assert!(
let pub_params_inner = pub_params.clone(); proof.verify(&pub_params, pub_inputs, challenge, graph),
let pub_inputs_inner = pub_inputs.clone(); "Invalid synth_porep proof generated at challenge_index {}",
let invalid_synth_porep_proof_inner = Arc::clone(&invalid_synth_porep_proof); challenge
scope.execute(move || {
if !proof_inner.verify(
&pub_params_inner,
&pub_inputs_inner,
challenge_inner,
graph,
) {
let mut invalid = invalid_synth_porep_proof_inner
.lock()
.expect("failed to get lock on invalid_synth_porep_proof_inner");
*invalid = InvalidChallengeCoordinate {
failure_detected: true,
challenge_index: challenge_inner,
};
error!(
"Invalid synth porep proof generated at challenge index {}",
challenge_inner
); );
}
});
}
}); });


let invalid_synth_porep_proof_coordinate = invalid_synth_porep_proof
.lock()
.expect("failed to get lock on invalid_synth_porep_proof");
ensure!(
!invalid_synth_porep_proof_coordinate.failure_detected,
"Invalid synth_porep proof generated at challenge_index {}",
invalid_synth_porep_proof_coordinate.challenge_index
);
info!("writing synth-porep vanilla proofs to file: {:?}", path); info!("writing synth-porep vanilla proofs to file: {:?}", path);
let file = File::create(&path).map(BufWriter::new).with_context(|| { let file = File::create(&path).map(BufWriter::new).with_context(|| {
format!( format!(
Expand Down Expand Up @@ -832,7 +734,7 @@ impl<'a, Tree: 'static + MerkleTreeTrait, G: 'static + Hasher> StackedDrg<'a, Tr
TreeArity: PoseidonArity, TreeArity: PoseidonArity,
{ {
use std::cmp::min; use std::cmp::min;
use std::sync::{mpsc::sync_channel as channel, RwLock}; use std::sync::{mpsc::sync_channel as channel, Arc, RwLock};


use fr32::fr_into_bytes; use fr32::fr_into_bytes;
use generic_array::GenericArray; use generic_array::GenericArray;
Expand Down