From fb46ef8e1d6baa4092013eb9787f15775eb433d4 Mon Sep 17 00:00:00 2001 From: Adam Gibson Date: Tue, 16 Jul 2024 16:58:49 -0600 Subject: [PATCH] Migrate all functions to rpc calls To simplify the user interface and make further development easier, all functions provided by the application will be possible as RPC calls. In this commit, only the proving and the verifying are done in this way. Additionally, to streamline the process, the convertkeys function is removed as it is only needed for proving and verifying and therefore can be done on startup of the RPC server, removing any delays during the proving step. What remains to be done is: add newkeys to the RPC interface, and, importantly, introduce security in the form of encryption of the file holding the private key which is required for proving. --- Cargo.toml | 4 +- src/autct.rs | 396 +++++------------------------------------------ src/config.rs | 4 +- src/lib.rs | 289 +++++++++++++++++++++++++++++++--- src/rpcclient.rs | 47 +++++- src/rpcserver.rs | 46 ++++-- src/utils.rs | 193 ++++++++++++++++++++--- 7 files changed, 557 insertions(+), 422 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 23500bd..e1ece42 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,8 +14,8 @@ edition = "2021" [dependencies] rand = { version = "0.8", default-features = false } -bulletproofs = { git = "https://github.com/AdamISZ/curve-trees", rev = "d76bd2837d4556b1d451055e4c381e5cb5abd00c" } -relations = { git = "https://github.com/AdamISZ/curve-trees", rev = "d76bd2837d4556b1d451055e4c381e5cb5abd00c" } +bulletproofs = { git = "https://github.com/AdamISZ/curve-trees", rev = "7268c22944595e2cfe52de623d33ece6f4882e5f" } +relations = { git = "https://github.com/AdamISZ/curve-trees", rev = "7268c22944595e2cfe52de623d33ece6f4882e5f" } ark-ff = { version = "0.4.0"} ark-ec = { version = "0.4.0"} ark-serialize = { version = "0.4.0" } diff --git a/src/autct.rs b/src/autct.rs index 01cd56a..389d6fd 100644 --- a/src/autct.rs +++ b/src/autct.rs @@ -3,235 +3,38 @@ extern crate rand; extern crate alloc; extern crate ark_secp256k1; -use base64::prelude::*; -use autct::utils::*; +use autct::rpcclient; +use autct::rpcserver; use autct::config::AutctConfig; -use autct::peddleq::PedDleqProof; -mod rpcclient; -mod rpcserver; use bitcoin::{Address, PrivateKey, XOnlyPublicKey}; -use bitcoin::key::{Secp256k1, TapTweak, UntweakedKeypair}; +use bitcoin::key::Secp256k1; -use bulletproofs::r1cs::R1CSProof; -use bulletproofs::r1cs::Prover; -use alloc::vec::Vec; -use ark_ec::{AffineRepr, short_weierstrass::SWCurveConfig, CurveGroup}; -use ark_ff::{PrimeField, Zero, One}; -use ark_serialize::{ - CanonicalSerialize, Compress}; -use relations::curve_tree::{SelRerandParameters, SelectAndRerandomizePath}; -use std::ops::{Mul, Add}; -use merlin::Transcript; -use ark_ec::short_weierstrass::Affine; -use ark_secp256k1::{Config as SecpConfig, Fq as SecpBase}; -use ark_secq256k1::Config as SecqConfig; +use std::error::Error; -use std::time::Instant; - -// this function returns the curve tree for the set of points -// read from disk (currently pubkey file location is passed as an argument), and -// then returns a tree, along with two bulletproofs for secp and secq, -// and the "merkle proof" of (blinded) commitments to the root. -// For the details on this proof, see "Select-and-Rerandomize" in the paper. -pub fn get_curve_tree_with_proof< - F: PrimeField, - P0: SWCurveConfig + Copy, - P1: SWCurveConfig + Copy, ->( - depth: usize, - generators_length_log_2: usize, - pubkey_file_path: &str, - our_pubkey: Affine, -) -> (R1CSProof>, R1CSProof>, - SelectAndRerandomizePath, - P0::ScalarField, - Affine, Affine, bool) { - let mut rng = rand::thread_rng(); - let generators_length = 1 << generators_length_log_2; - - let sr_params = - SelRerandParameters::::new(generators_length, generators_length, &mut rng); - - let p0_transcript = Transcript::new(b"select_and_rerandomize"); - let mut p0_prover: Prover<_, Affine> = - Prover::new(&sr_params.even_parameters.pc_gens, p0_transcript); - - let p1_transcript = Transcript::new(b"select_and_rerandomize"); - let mut p1_prover: Prover<_, Affine> = - Prover::new(&sr_params.odd_parameters.pc_gens, p1_transcript); - - // these are called 'leaf commitments' and not 'leaves', but it's just - // to emphasize that we are not committing to scalars, but using points (i.e. pubkeys) - // as the commitments (i.e. pedersen commitments with zero randomness) at - // the leaf level. - let mut privkey_parity_flip: bool = false; - let leaf_commitments = get_leaf_commitments::( - &(pubkey_file_path.to_string() + ".p")); - // derive the index where our pubkey is in the list. - // but: since it will have been permissible-ized, we need to rederive the permissible - // version here, purely for searching: - - // as well as the randomness in the blinded commitment, we also need to use the same - // blinding base: - let b_blinding = sr_params.even_parameters.pc_gens.B_blinding; - let mut our_pubkey_permiss1: Affine = our_pubkey; - while !sr_params.even_parameters.uh.is_permissible(our_pubkey_permiss1) { - our_pubkey_permiss1 = (our_pubkey_permiss1 + b_blinding).into(); - } - let mut our_pubkey_permiss2: Affine = -our_pubkey; - while !sr_params.even_parameters.uh.is_permissible(our_pubkey_permiss2) { - our_pubkey_permiss2 = (our_pubkey_permiss2 + b_blinding).into(); - } - let mut key_index: i32; // we're guaranteed to overwrite or panic but the compiler insists. - // the reason for 2 rounds of search is that BIP340 can output a different parity - // compared to ark-ec 's compression algo. - key_index = match leaf_commitments.iter().position(|&x| x == our_pubkey_permiss1) { - None => -1, - Some(ks) => ks.try_into().unwrap() - }; - if key_index == -1 { - key_index = match leaf_commitments.iter().position(|&x| x == our_pubkey_permiss2) { - None => panic!("provided pubkey not found in the set"), - Some(ks) => { - privkey_parity_flip = true; - ks.try_into().unwrap() - } - } - }; - - // Now we know we have a key that's in the set, we can construct the curve - // tree from the set, and then the proof using its private key: - let beforect = Instant::now(); - let (curve_tree, _) = get_curve_tree::( - leaf_commitments.clone(), depth, &sr_params); - println!("Elapsed time for curve tree construction: {:.2?}", beforect.elapsed()); - assert_eq!(curve_tree.height(), depth); - - let (path_commitments, rand_scalar) = - curve_tree.select_and_rerandomize_prover_gadget( - key_index.try_into().unwrap(), - &mut p0_prover, - &mut p1_prover, - &sr_params, - &mut rng, - ); - // The randomness for the PedDLEQ proof will have to be the randomness - // used in the curve tree randomization, *plus* the randomness that was used - // to convert P to a permissible point, upon initial insertion into the tree. - let mut r_offset: P0::ScalarField = P0::ScalarField::zero(); - let lcindex: usize = key_index.try_into().unwrap(); - let mut p_prime: Affine = leaf_commitments[lcindex]; - // TODO: this is basically repeating what's already done in - // sr_params creation, but I don't know how else to extract the number - // of H bumps that were done (and we need to, see previous comment). - while !sr_params.even_parameters.uh.is_permissible(p_prime) { - p_prime = (p_prime + b_blinding).into(); - r_offset += P0::ScalarField::one(); - } - // print the root of the curve tree. - // TODO: how to allow the return value to be either - // Affine or Affine? And as a consequence, - // to let the code be correct for any depth. - // And/or, is there not - // a simpler way to extract the root of the tree - // (which should be just .parent_commitment, but all methods - // to extract this value seem to be private) - let newpath = curve_tree.select_and_rerandomize_verification_commitments( - path_commitments.clone()); - let root_is_odd = newpath.even_commitments.len() == newpath.odd_commitments.len(); - println!("Root is odd? {}", root_is_odd); - let root: Affine; - if !root_is_odd { - root = *newpath.even_commitments.first().unwrap(); - } - else { - // derp, see above TODO - panic!("Wrong root parity, should be even"); - } - let p0_proof = p0_prover - .prove(&sr_params.even_parameters.bp_gens) - .unwrap(); - let p1_proof = p1_prover - .prove(&sr_params.odd_parameters.bp_gens) - .unwrap(); - let returned_rand = rand_scalar + r_offset; - (p0_proof, p1_proof, path_commitments, - returned_rand, b_blinding, root, privkey_parity_flip) -} #[tokio::main] -async fn main() -> Result<(), CustomError>{ - - if let Ok(autctcfg) = AutctConfig::build(){ - // TODO maybe remove this code duplication? - // The problem is that `L`, the branching factor of tree, - // *must* be a const generic, as I understand it, - // and we must return a tree with a specific value of that - // const usize integer. I'm not sure if we can do that - // with a macro, or if it's worth it to remove this - // minor duplication. +async fn main() -> Result<(), Box>{ + let autctcfg = AutctConfig::build()?; match autctcfg.clone().mode.unwrap().as_str() { - "prove" => {return run_prover(autctcfg) + "prove" => {return request_prove(autctcfg).await }, - "request" => {return run_request(autctcfg).await}, + "verify" => {return request_verify(autctcfg).await}, "serve" => {return rpcserver::do_serve(autctcfg).await }, - "newkeys" => {return run_create_keys(autctcfg).await}, - "convertkeys" => {return run_convert_keys::(autctcfg)}, - _ => {println!("Invalid mode, must be 'prove', 'serve', 'newkeys', 'convertkeys' or 'request'")}, + "newkeys" => {return create_keys(autctcfg).await}, + _ => {return Err("Invalid mode, must be 'prove', 'serve', 'newkeys' or 'verify'".into())}, - }} - else { - return Err(CustomError{}); } - Ok(()) } -// Takes as input a hex list of actual BIP340 pubkeys -// that should come from the utxo set; -// converts each point into a permissible point -// and then writes these points in binary format into -// a new file with same name as keyset with .p appended. -pub fn run_convert_keys + Copy, -P1: SWCurveConfig + Copy,>(autctcfg: AutctConfig) -> Result<(), CustomError>{ - let (cls, mut kss) = autctcfg.clone() - .get_context_labels_and_keysets().unwrap(); - if kss.len() != 1 || cls.len() != 1 { - return Err(CustomError{}); - } - let keyset = kss.pop().unwrap(); - let raw_pubkeys = get_pubkey_leaves_hex::(&keyset); - let mut rng = rand::thread_rng(); - let generators_length = 1 << autctcfg.generators_length_log_2.unwrap(); - - let sr_params = - SelRerandParameters::::new(generators_length, - generators_length, &mut rng); - let (permissible_points, _pr) - = create_permissible_points_and_randomnesses( - &raw_pubkeys, &sr_params); - // take vec permissible points and write it in binary as n*33 bytes - let mut buf: Vec = Vec::with_capacity(permissible_points.len()*33); - let _: Vec<_> = permissible_points - .iter() - .map(|pt: &Affine| { - pt.serialize_compressed(&mut buf).expect("Failed to serialize point") - }).collect(); - let output_file = keyset.clone() + ".p"; - write_file_string(&output_file, buf); - Ok(()) -} - -async fn run_create_keys(autctcfg: AutctConfig) ->Result<(), CustomError> { +async fn create_keys(autctcfg: AutctConfig) ->Result<(), Box> { let nw = match autctcfg.bc_network.unwrap().as_str() { "mainnet" => bitcoin::Network::Bitcoin, "signet" => bitcoin::Network::Signet, "regtest" => bitcoin::Network::Regtest, - _ => panic!("Invalid bitcoin network string in config."), + _ => return Err("Invalid bitcoin network string in config.".into()), }; // This uses the `rand-std` feature in the rust-bitcoin crate to generate @@ -251,8 +54,8 @@ async fn run_create_keys(autctcfg: AutctConfig) ->Result<(), CustomError> { println!("The WIF string above can be imported into e.g. Sparrow, Core to sweep or access the funds in it."); Ok(()) } -async fn run_request(autctcfg: AutctConfig) -> Result<(), CustomError> { - let res = rpcclient::do_request(autctcfg).await; +async fn request_verify(autctcfg: AutctConfig) -> Result<(), Box> { + let res = rpcclient::verify(autctcfg).await; match res { Ok(rest) => { // codes defined in lib.rs @@ -267,160 +70,33 @@ async fn run_request(autctcfg: AutctConfig) -> Result<(), CustomError> { _ => println!("Unrecognized error code from server?"), } }, - Err(_) => return Err(CustomError{}), + Err(_) => return Err("Verificatoin request processing failed.".into()), }; Ok(()) } -pub fn run_prover(autctcfg: AutctConfig) -> Result<(), CustomError>{ - type F = ::ScalarField; - let nw = match autctcfg.bc_network.clone().unwrap().as_str() { - "mainnet" => bitcoin::Network::Bitcoin, - "signet" => bitcoin::Network::Signet, - "regtest" => bitcoin::Network::Regtest, - _ => panic!("Invalid bitcoin network string in config."), +async fn request_prove(autctcfg: AutctConfig) -> Result<(), Box>{ + let res = rpcclient::prove(autctcfg).await; + match res { + Ok(rest) => { + // codes defined in lib.rs + // TODO: create some callback structure to receive the resource + match rest.accepted { + // deliberately verbose message here to help testers understand: + 0 => println!("Proof generated successfully."), + -1 => println!("Undefined failure in proving."), + -2 => println!("Proving request rejected, must be only one context:keyset provided."), + -3 => println!("Proving request rejected, provided context label is not served."), + -4 => println!("Proving request rejected, provided keyset is not served."), + -5 => println!("Proving request rejected, wrong bitcoin network."), + -6 => println!("Proving request rejected, could not read private key from file."), + -7 => println!("Proving request rejected, invalid private key format (must be WIF or hex)."), + -8 => println!("Proving request rejected, provided key is not in the keyset"), + _ => println!("Unrecognized error code from server?"), + } + }, + Err(_) => return Err("Proving request processing failed.".into()), }; - let secp = Secp256k1::new(); // is the context global? need to check - // read privkey from file; we prioritize WIF format for compatibility - // with external wallets, but if that fails, we attempt to read it - // as raw hex: - let privkey_file_str = autctcfg.privkey_file_str.clone().unwrap(); - let privwif:String = read_file_string(&privkey_file_str) - .expect("Failed to read the private key from the file"); - - // Because sparrow (and, kinda, Core) expect usage of non-raw p2tr, - // it means we're forced to use the default tweaking algo here, even - // though it majorly screws up the flow (as we want to use ark's Affine<> - // objects for the curve points here). - // 1. First convert the hex equivalent of the WIF into a byte slice ( - // note that this is a big endian encoding still). - // 2. Then call PrivateKey.from_slice to deserialize into a PrivateKey. - // 3. use the privkey.public_key(&secp).inner.tap_tweak function - // to get a tweaked pubkey. - // 3. Then serialize that as a string, deserialize it back out to Affine - - let privkeyres1 = PrivateKey::from_wif(privwif.as_str()); - let privkey: PrivateKey; - if privkeyres1.is_err(){ - let privkeyres2 = PrivateKey::from_slice( - &hex::decode(privwif).unwrap(), - nw); - if privkeyres2.is_err(){ - panic!("Failed to read the private key as either WIF or hex format!"); - } - privkey = privkeyres2.unwrap(); - } - else { - privkey = privkeyres1.unwrap(); - } - let untweaked_key_pair: UntweakedKeypair = UntweakedKeypair::from_secret_key( - &secp, &privkey.inner); - let tweaked_key_pair = untweaked_key_pair.tap_tweak(&secp, None); - let privkey_bytes = tweaked_key_pair.to_inner().secret_bytes(); - let privhex = hex::encode(&privkey_bytes); - - let mut x = decode_hex_le_to_F::(&privhex); - let G = SecpConfig::GENERATOR; - let mut P = G.mul(x).into_affine(); - print_affine_compressed(P, "our pubkey"); - let (mut cls, mut kss) = autctcfg.clone() - .get_context_labels_and_keysets().unwrap(); - if kss.len() != 1 || cls.len() != 1 { - //return Err("You may only specify one context_label:keyset in the proof request".into()) - return Err(CustomError{}); - } - let keyset = kss.pop().unwrap(); - let context_label = cls.pop().unwrap(); - let gctwptime = Instant::now(); - let (p0proof, - p1proof, - path, - r, - H, - root, - privkey_parity_flip) = get_curve_tree_with_proof::< - SecpBase, - SecpConfig, - SecqConfig>( - autctcfg.depth.unwrap().try_into().unwrap(), - autctcfg.generators_length_log_2.unwrap().try_into().unwrap(), - &keyset, P); - println!("Elapsed time for get curve tree with proof: {:.2?}", gctwptime.elapsed()); - // if we could only find our pubkey in the list by flipping - // the sign of our private key (this is because the BIP340 compression - // logic is different from that in ark-ec; a TODO is to remove this - // confusion by having the BIP340 logic in this code): - if privkey_parity_flip { - x = -x; - P = -P; - } - print_affine_compressed(P, "P after flipping"); - // next steps create the Pedersen DLEQ proof for this key: - // - let J = get_generators::(context_label.as_bytes()); - print_affine_compressed(J, "J"); - // blinding factor for Pedersen - // the Pedersen commitment D is xG + rH - let rH = H.mul(r).into_affine(); - let D = P.add(rH).into_affine(); - // the key image (E) is xJ - let E = J.mul(x).into_affine(); - let mut transcript = Transcript::new(APP_DOMAIN_LABEL); - let proof = PedDleqProof::create( - &mut transcript, - &D, - &E, - &x, - &r, - &G, - &H, - &J, - None, - None, - context_label.as_bytes(), - autctcfg.user_string.as_ref().unwrap().as_bytes() - ); - let mut buf = Vec::with_capacity(proof.serialized_size(Compress::Yes)); - proof.serialize_compressed(&mut buf).unwrap(); - - let mut verifier = Transcript::new(APP_DOMAIN_LABEL); - assert!(proof - .verify( - &mut verifier, - &D, - &E, - &G, - &H, - &J, - context_label.as_bytes(), - autctcfg.user_string.unwrap().as_bytes() - ) - .is_ok()); - print_affine_compressed(D, "D"); - print_affine_compressed(E, "E"); - let total_size = - 33 + 33 + // D and E points (compressed) - proof.serialized_size(Compress::Yes) + - p0proof.serialized_size(Compress::Yes) + - p1proof.serialized_size(Compress::Yes) + - path.serialized_size(Compress::Yes); - let mut buf2 = Vec::with_capacity(total_size); - D.serialize_compressed(&mut buf2).unwrap(); - E.serialize_compressed(&mut buf2).unwrap(); - proof.serialize_with_mode(&mut buf2, Compress::Yes).unwrap(); - p0proof.serialize_compressed(&mut buf2).unwrap(); - p1proof.serialize_compressed(&mut buf2).unwrap(); - path.serialize_compressed(&mut buf2).unwrap(); - root.serialize_compressed(&mut buf2).unwrap(); - if autctcfg.base64_proof.unwrap() { - let encoded = BASE64_STANDARD.encode(buf2); - println!("Proof generated successfully:\n{}", encoded); - return Ok(()); - } - write_file_string(&autctcfg.proof_file_str.clone().unwrap(), buf2); - println!("Proof generated successfully and written to {}. Size was {}", - &autctcfg.proof_file_str.unwrap(), total_size); - print_affine_compressed(root, "root"); Ok(()) } diff --git a/src/config.rs b/src/config.rs index ea3f7f3..bc70a5b 100644 --- a/src/config.rs +++ b/src/config.rs @@ -5,7 +5,7 @@ use std::path::PathBuf; use clap::{Parser, CommandFactory, Command}; // This handles config items with syntax: "a:b, c:d,.." -fn get_params_from_config_string(params: String) -> Result<(Vec, Vec), Box> { +pub fn get_params_from_config_string(params: String) -> Result<(Vec, Vec), Box> { let pairs: Vec = params.split(",").map(|s| s.to_string()).collect(); let mut kss: Vec = vec![]; let mut cls: Vec = vec![]; @@ -46,7 +46,7 @@ https://stackoverflow.com/a/75981247 #[clap(version, about="Anonymous Usage Tokens from Curve Trees")] pub struct AutctConfig { /// `mode` is one of: "newkeys", "prove", - /// "serve", "convertkeys" or "request" + /// "serve" or "verify" #[arg(short('M'), long, required=false)] #[clap(verbatim_doc_comment)] pub mode: Option, diff --git a/src/lib.rs b/src/lib.rs index a1e765a..b975a94 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -5,7 +5,8 @@ pub mod utils; pub mod autctverifier; pub mod config; pub mod keyimagestore; - +pub mod rpcclient; +pub mod rpcserver; extern crate rand; extern crate alloc; extern crate ark_secp256k1; @@ -28,12 +29,270 @@ use std::io::Cursor; use std::time::Instant; use toy_rpc::macros::export_impl; +//use autct::get_curve_tree_with_proof; +use base64::prelude::*; + +use bitcoin::key::{Secp256k1, TapTweak, UntweakedKeypair}; + +use alloc::vec::Vec; +use ark_ec::{AffineRepr, short_weierstrass::SWCurveConfig, CurveGroup}; +use ark_secp256k1::Fq as SecpBase; +use std::ops::{Mul, Add}; +use bitcoin::PrivateKey; + pub mod rpc { + use crate::config::get_params_from_config_string; + use super::*; use std::sync::{Arc, Mutex}; use relations::curve_tree::{CurveTree, SelRerandParameters}; + #[derive(Clone)] + pub struct RPCProverVerifierArgs { + pub keyset_file_locs: Vec, + pub context_labels: Vec, + pub sr_params: SelRerandParameters, + pub curve_trees: Vec>, + pub G: Affine, + pub H: Affine, + pub Js: Vec>, + pub ks: Vec>>>>, + } + + #[derive(Serialize, Deserialize)] + pub struct RPCProverRequest { + pub keyset: String, + pub depth: i32, + pub generators_length_log_2: u8, + pub user_label: String, + pub key_credential: String, + pub bc_network: String, // this is needed for parsing private keys + } + + #[derive(Debug, Serialize, Deserialize)] + pub struct RPCProverResponse { + pub keyset: Option, + pub user_label: Option, + pub context_label: Option, + pub proof: Option, + pub key_image: Option, + pub accepted: i32, + } + + pub struct RPCProver{ + pub prover_verifier_args: RPCProverVerifierArgs, + } + + #[export_impl] + impl RPCProver { + #[export_method] + pub async fn prove(&self, args: RPCProverRequest) -> Result{ + let pva = &self.prover_verifier_args; + let mut resp: RPCProverResponse = RPCProverResponse{ + keyset: None, // note that this needs to be parsed out + user_label: Some(args.user_label.clone()), + context_label: None, // as above for keyset + proof: None, + key_image: None, + accepted: -1 + }; + // parse out the single (context label, keyset pair) provided + // by the caller's request, and then check that they are included + // in the list supported by this server. + let (mut cls, mut kss) = get_params_from_config_string(args.keyset).unwrap(); + if kss.len() != 1 || cls.len() != 1 { + resp.accepted = -2; + return Ok(resp); + } + let keyset = kss.pop().unwrap(); + let context_label = cls.pop().unwrap(); + if !(pva.context_labels.contains(&context_label)) { + resp.accepted = -3; + return Ok(resp); + } + if !(pva.keyset_file_locs.contains(&keyset)){ + resp.accepted = -4; + return Ok(resp); + } + type F = ::ScalarField; + let nw = match args.bc_network.clone().as_str() { + "mainnet" => bitcoin::Network::Bitcoin, + "signet" => bitcoin::Network::Signet, + "regtest" => bitcoin::Network::Regtest, + _ => {resp.accepted = -5; + return Ok(resp);}, + }; + let secp = Secp256k1::new(); + // read privkey from file; we prioritize WIF format for compatibility + // with external wallets, but if that fails, we attempt to read it + // as raw hex: + let privkey_file_str = args.key_credential.clone(); + let privwifres = read_file_string(&privkey_file_str); + if privwifres.is_err(){ + resp.accepted = -6; + return Ok(resp); + } + let privwif = privwifres.unwrap(); + + // Because sparrow (and, kinda, Core) expect usage of non-raw p2tr, + // it means we're forced to use the default tweaking algo here, even + // though it majorly screws up the flow (as we want to use ark's Affine<> + // objects for the curve points here). + // 1. First convert the hex equivalent of the WIF into a byte slice ( + // note that this is a big endian encoding still). + // 2. Then call PrivateKey.from_slice to deserialize into a PrivateKey. + // 3. use the privkey.public_key(&secp).inner.tap_tweak function + // to get a tweaked pubkey. + // 3. Then serialize that as a string, deserialize it back out to Affine + + let privkeyres1 = PrivateKey::from_wif(privwif.as_str()); + let privkey: PrivateKey; + if privkeyres1.is_err(){ + let privkeyres2 = PrivateKey::from_slice( + &hex::decode(privwif).unwrap(), + nw); + if privkeyres2.is_err(){ + //panic!("Failed to read the private key as either WIF or hex format!"); + resp.accepted = -7; + return Ok(resp); + } + privkey = privkeyres2.unwrap(); + } + else { + privkey = privkeyres1.unwrap(); + } + let untweaked_key_pair: UntweakedKeypair = UntweakedKeypair::from_secret_key( + &secp, &privkey.inner); + let tweaked_key_pair = untweaked_key_pair.tap_tweak(&secp, None); + let privkey_bytes = tweaked_key_pair.to_inner().secret_bytes(); + let privhex = hex::encode(&privkey_bytes); + + let mut x = decode_hex_le_to_F::(&privhex); + let G = SecpConfig::GENERATOR; + let mut P = G.mul(x).into_affine(); + print_affine_compressed(P, "request pubkey"); + let gctwptime = Instant::now(); + let (p0proof, + p1proof, + path, + r, + H, + root, + privkey_parity_flip) = match get_curve_tree_with_proof::< + SecpBase, + SecpConfig, + SecqConfig>( + args.depth.try_into().unwrap(), + args.generators_length_log_2.try_into().unwrap(), + &keyset, P) { + Err(_) => {resp.accepted = -8; + return Ok(resp);} + Ok((p0proof, + p1proof, + path, + r, + H, + root, + privkey_parity_flip)) => (p0proof, + p1proof, + path, + r, + H, + root, + privkey_parity_flip), + }; + + println!("Elapsed time for get curve tree with proof: {:.2?}", gctwptime.elapsed()); + // if we could only find our pubkey in the list by flipping + // the sign of our private key (this is because the BIP340 compression + // logic is different from that in ark-ec; a TODO is to remove this + // confusion by having the BIP340 logic in this code): + if privkey_parity_flip { + x = -x; + P = -P; + } + print_affine_compressed(P, "P after flipping"); + // next steps create the Pedersen DLEQ proof for this key: + // + let J = get_generators::(context_label.as_bytes()); + print_affine_compressed(J, "J"); + // blinding factor for Pedersen + // the Pedersen commitment D is xG + rH + let rH = H.mul(r).into_affine(); + let D = P.add(rH).into_affine(); + // the key image (E) is xJ + let E = J.mul(x).into_affine(); + let mut transcript = Transcript::new(APP_DOMAIN_LABEL); + let proof = PedDleqProof::create( + &mut transcript, + &D, + &E, + &x, + &r, + &G, + &H, + &J, + None, + None, + context_label.as_bytes(), + args.user_label.as_bytes() + ); + let mut buf = Vec::with_capacity(proof.serialized_size(Compress::Yes)); + proof.serialize_compressed(&mut buf).unwrap(); + + let mut verifier = Transcript::new(APP_DOMAIN_LABEL); + assert!(proof + .verify( + &mut verifier, + &D, + &E, + &G, + &H, + &J, + context_label.as_bytes(), + args.user_label.as_bytes() + ) + .is_ok()); + print_affine_compressed(D, "D"); + print_affine_compressed(E, "E"); + let total_size = + 33 + 33 + // D and E points (compressed) + proof.serialized_size(Compress::Yes) + + p0proof.serialized_size(Compress::Yes) + + p1proof.serialized_size(Compress::Yes) + + path.serialized_size(Compress::Yes); + let mut buf2 = Vec::with_capacity(total_size); + D.serialize_compressed(&mut buf2).unwrap(); + E.serialize_compressed(&mut buf2).unwrap(); + proof.serialize_with_mode(&mut buf2, Compress::Yes).unwrap(); + p0proof.serialize_compressed(&mut buf2).unwrap(); + p1proof.serialize_compressed(&mut buf2).unwrap(); + path.serialize_compressed(&mut buf2).unwrap(); + root.serialize_compressed(&mut buf2).unwrap(); + // base64 output as an option no longer makes sense: + //if autctcfg.base64_proof.unwrap() { + let encoded = BASE64_STANDARD.encode(buf2); + // println!("Proof generated successfully:\n{}", encoded); + // return Ok(()); + //write_file_string(&autctcfg.proof_file_str.clone().unwrap(), buf2); + //println!("Proof generated successfully and written to {}. Size was {}", + //&autctcfg.proof_file_str.unwrap(), total_size); + print_affine_compressed(root, "root"); + let mut e = Vec::new(); + E.serialize_compressed(&mut e).expect("Failed to serialize point"); + let resp: RPCProverResponse = RPCProverResponse{ + keyset: Some(keyset), + user_label: Some(args.user_label), + context_label: Some(context_label), + proof: Some(encoded), + key_image: Some(hex::encode(&e)), + accepted: 0, + }; + Ok(resp) + } + } + #[derive(Serialize, Deserialize)] pub struct RPCProofVerifyRequest { pub keyset: String, @@ -66,19 +325,11 @@ pub mod rpc { // should be able to decide the business logic of that based on the // context label given in the Request object. pub struct RPCProofVerifier{ - pub keyset_file_locs: Vec, - pub context_labels: Vec, - pub sr_params: SelRerandParameters, - pub curve_trees: Vec>, - pub G: Affine, - pub H: Affine, - pub Js: Vec>, - pub ks: Vec>>>>, + pub prover_verifier_args: RPCProverVerifierArgs, } #[export_impl] impl RPCProofVerifier { - // currently the only request in the API: // the two arguments are: // a String containing the name of the file containing the pubkeys // a bytestring (Vec) containing the serialized proof of ownership @@ -109,7 +360,7 @@ pub mod rpc { key_image: None, }; - if !(self.context_labels.contains(&verif_request.context_label)) { + if !(self.prover_verifier_args.context_labels.contains(&verif_request.context_label)) { resp.accepted = -4; return Ok(resp); } @@ -120,7 +371,7 @@ pub mod rpc { // (TODO can fold in above existence check to this call; // but for now, this is guaranteed to succeed because of // that check.) - let idx = self.context_labels.iter().position( + let idx = self.prover_verifier_args.context_labels.iter().position( |x| x == &verif_request.context_label).unwrap(); let mut cursor = Cursor::new(verif_request.proof); // deserialize the components of the PedDLEQ proof first and verify it: @@ -136,9 +387,9 @@ pub mod rpc { &mut transcript, &D, &E, - &self.G, - &self.H, - &self.Js[idx], + &self.prover_verifier_args.G, + &self.prover_verifier_args.H, + &self.prover_verifier_args.Js[idx], verif_request.context_label.as_bytes(), verif_request.user_label.as_bytes() ); @@ -153,7 +404,7 @@ pub mod rpc { return Ok(resp); } // check early if the now-verified key image (E) is a reuse-attempt: - if self.ks[idx].lock().unwrap().is_key_in_store(E) { + if self.prover_verifier_args.ks[idx].lock().unwrap().is_key_in_store(E) { println!("Reuse of key image disallowed: "); print_affine_compressed(E, "Key image value"); resp.accepted = -3; @@ -161,7 +412,7 @@ pub mod rpc { return Ok(resp); } // if it isn't, then it counts as used now: - self.ks[idx].lock().unwrap().add_key(E).expect("Failed to add keyimage to store."); + self.prover_verifier_args.ks[idx].lock().unwrap().add_key(E).expect("Failed to add keyimage to store."); // Next, we deserialize and validate the curve tree proof. // TODO replace these `expect()` calls; we need to return // an 'invalid proof format' error if they send us junk, not crash! @@ -180,8 +431,8 @@ pub mod rpc { &mut cursor).expect("Failed to deserialize root"); let timer1 = Instant::now(); let claimed_D = verify_curve_tree_proof( - path.clone(), &self.sr_params, - &self.curve_trees[idx], + path.clone(), &self.prover_verifier_args.sr_params, + &self.prover_verifier_args.curve_trees[idx], p0proof, p1proof, prover_root); let claimed_D_result = match claimed_D { Ok(x) => x, diff --git a/src/rpcclient.rs b/src/rpcclient.rs index 0022904..7ad333d 100644 --- a/src/rpcclient.rs +++ b/src/rpcclient.rs @@ -1,13 +1,15 @@ -use autct::utils::APP_DOMAIN_LABEL; + use toy_rpc::Client; -use autct::config::AutctConfig; +use crate::config::AutctConfig; +use crate::utils::*; + // import everything including the client stub generated by the macro -use autct::rpc::*; +use crate::rpc::*; use std::error::Error; use std::fs; -pub async fn do_request(autctcfg: AutctConfig) -> Result>{ +pub async fn verify(autctcfg: AutctConfig) -> Result>{ let rpc_port = autctcfg.rpc_port; let host: &str= &autctcfg.rpc_host.clone().unwrap(); let port_str: &str = &rpc_port.unwrap().to_string(); @@ -38,3 +40,40 @@ pub async fn do_request(autctcfg: AutctConfig) -> Result Result>{ + let rpc_port = autctcfg.rpc_port; + let host: &str= &autctcfg.rpc_host.clone().unwrap(); + let port_str: &str = &rpc_port.unwrap().to_string(); + let addr: String = format!("{}:{}", host, port_str); + + // request must specify *only one* context label, keyset. + // This will be checked by the server but we can check it here also. + let (cls, kss) = autctcfg.clone() + .get_context_labels_and_keysets().unwrap(); + if kss.len() != 1 || cls.len() != 1 { + return Err("You may only specify one context_label:keyset in the request".into()) + } + let req: RPCProverRequest = RPCProverRequest { + keyset: autctcfg.keysets.unwrap(), + depth: autctcfg.depth.unwrap(), + generators_length_log_2: autctcfg.generators_length_log_2.unwrap(), + user_label: autctcfg.user_string.unwrap(), + key_credential: autctcfg.privkey_file_str.unwrap(), + bc_network: autctcfg.bc_network.unwrap() + }; + let mut client = Client::dial(&addr).await.unwrap(); + // we set a very generous timeout for proving requests, though they should + // usually be in the sub 15s area. + client.set_default_timeout(std::time::Duration::from_secs(120)); + let result = client + .r_p_c_prover().prove(req) + .await; + match result { + Ok(x) => return Ok(x), + Err(x) => { + println!("Error in rpc client prove call: {}", &x); + return Err(x.into()); + } + } +} + diff --git a/src/rpcserver.rs b/src/rpcserver.rs index 9ca6fa0..111dac5 100644 --- a/src/rpcserver.rs +++ b/src/rpcserver.rs @@ -2,24 +2,25 @@ use ark_serialize::{ CanonicalDeserialize, Compress, Validate}; -use autct::utils::{get_curve_tree, get_leaf_commitments, APP_DOMAIN_LABEL}; +use crate::rpc::RPCProverVerifierArgs; +use crate::utils::{get_curve_tree, get_leaf_commitments, convert_keys, APP_DOMAIN_LABEL}; use tokio::{task, net::TcpListener}; use std::fs; +use std::error::Error; use std::io::Cursor; use std::sync::{Arc, Mutex}; use toy_rpc::Server; use std::iter::zip; -use autct::{rpc::RPCProofVerifier, utils}; -use autct::config::AutctConfig; -use autct::keyimagestore::{KeyImageStore, create_new_store}; +use crate::{rpc::RPCProofVerifier, rpc::RPCProver, utils}; +use crate::config::AutctConfig; +use crate::keyimagestore::{KeyImageStore, create_new_store}; use relations::curve_tree::{SelRerandParameters, CurveTree}; use ark_secp256k1::{Config as SecpConfig, Fq as SecpBase}; use ark_secq256k1::Config as SecqConfig; use ark_ec::short_weierstrass::{Affine, SWCurveConfig}; -use utils::CustomError; -pub async fn do_serve(autctcfg: AutctConfig) -> Result<(), CustomError>{ +pub async fn do_serve(autctcfg: AutctConfig) -> Result<(), Box>{ let (context_labels, keyset_file_locs) = autctcfg .clone().get_context_labels_and_keysets().unwrap(); let rpc_port = autctcfg.rpc_port.unwrap(); @@ -35,7 +36,16 @@ pub async fn do_serve(autctcfg: AutctConfig) -> Result<(), CustomError>{ let mut Js: Vec> = vec![]; let mut kss: Vec>>>> = vec![]; for (fl, cl) in zip(keyset_file_locs.iter(), context_labels.iter()) { + // this part is what consumes time, so we do it upfront on startup of the rpc server, + // for every keyset that we are serving. + // also note that for now there are no errors returned by convert_keys hence unwrap() + // TODO add info to interface so user knows why the startup is hanging + convert_keys::(fl.to_string(), autctcfg.generators_length_log_2.unwrap()).unwrap(); let leaf_commitments = get_leaf_commitments(&(fl.to_string() + ".p")); + + // Actually creating the curve tree is much less time consuming (a few seconds for most trees) let (curve_tree2, _) = get_curve_tree:: ( leaf_commitments, @@ -70,19 +80,27 @@ pub async fn do_serve(autctcfg: AutctConfig) -> Result<(), CustomError>{ let G = SecpConfig::GENERATOR; //let J = utils::get_generators(autctcfg.context_label.as_ref().unwrap().as_bytes()); let H = sr_params.even_parameters.pc_gens.B_blinding.clone(); + let prover_verifier_args = RPCProverVerifierArgs { + sr_params, + keyset_file_locs, + context_labels, + curve_trees, + G, + H, + Js, + ks: kss + }; let verifier_service = Arc::new( RPCProofVerifier{ - sr_params, - keyset_file_locs, - context_labels, - curve_trees, - G, - H, - Js, - ks: kss} + prover_verifier_args: prover_verifier_args.clone()} + ); + let prover_service = Arc::new( + RPCProver{ + prover_verifier_args} ); let server = Server::builder() .register(verifier_service) // register service + .register(prover_service) .build(); let listener = TcpListener::bind(&addr).await.unwrap(); diff --git a/src/utils.rs b/src/utils.rs index d1444a4..39e2c89 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -4,18 +4,20 @@ extern crate alloc; extern crate ark_secp256k1; use ark_ff::Field; -use ark_ff::PrimeField; +use ark_ff::{PrimeField, Zero, One}; use ark_ec::AffineRepr; use ark_ec::short_weierstrass::SWCurveConfig; use ark_ec::short_weierstrass::Affine; use relations::curve_tree::CurveTree; -use std::io::Error; +use std::error::Error; use std::fs; -use std::fmt; use std::path::PathBuf; use std::time::Instant; use ark_serialize::{CanonicalSerialize, CanonicalDeserialize}; -use relations::curve_tree::SelRerandParameters; +use relations::curve_tree::{SelRerandParameters, SelectAndRerandomizePath}; +use bulletproofs::r1cs::R1CSProof; +use bulletproofs::r1cs::Prover; +use merlin::Transcript; // all transcripts created in this project should be // initialized with this name: @@ -29,20 +31,6 @@ pub const CONTEXT_LABEL: &[u8] = b"default-app-context-label"; // primarily for testing pub const USER_STRING: &[u8] = b"name-goes-here"; -// TODO this customerror class is not developed; -// I just needed an error class that can be handled -// by pymethod as part of Result, because Result> -// apparently just doesn't work with pyo3 (?) -#[derive(Debug)] -pub struct CustomError; - -impl std::error::Error for CustomError {} - -impl fmt::Display for CustomError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "Oh no!") - } -} // Given a hex string of big-endian encoding, @@ -102,7 +90,7 @@ pub fn field_as_bytes(field: &F) -> Vec { bytes } -pub fn read_file_string(filepath: &str) -> Result> { +pub fn read_file_string(filepath: &str) -> Result> { let resp = match fs::read_to_string(filepath) { Ok(data) => data, Err(e) => {return Err(e.into());} @@ -114,7 +102,7 @@ pub fn write_file_string(filepath: &str, mut buf: Vec) -> () { fs::write(filepath, &mut buf).expect("Failed to write to file"); } -pub fn write_file_string2(loc: PathBuf, mut buf: Vec) ->Result<(), Error> { +pub fn write_file_string2(loc: PathBuf, mut buf: Vec) ->Result<(), std::io::Error> { fs::write(loc, &mut buf) } @@ -223,4 +211,167 @@ P1: SWCurveConfig + Co let curve_tree = CurveTree::::from_set( &leaf_commitments, sr_params, Some(depth)); (curve_tree, sr_params.even_parameters.pc_gens.B_blinding) -} \ No newline at end of file +} + +// this function returns the curve tree for the set of points +// read from disk (currently pubkey file location is passed as an argument), and +// then returns a tree, along with two bulletproofs for secp and secq, +// and the "merkle proof" of (blinded) commitments to the root. +// For the details on this proof, see "Select-and-Rerandomize" in the paper. +pub fn get_curve_tree_with_proof< + F: PrimeField, + P0: SWCurveConfig + Copy, + P1: SWCurveConfig + Copy, +>( + depth: usize, + generators_length_log_2: usize, + pubkey_file_path: &str, + our_pubkey: Affine, +) -> Result<(R1CSProof>, R1CSProof>, + SelectAndRerandomizePath, + P0::ScalarField, + Affine, Affine, bool), Box> { + let mut rng = rand::thread_rng(); + let generators_length = 1 << generators_length_log_2; + + let sr_params = + SelRerandParameters::::new(generators_length, generators_length, &mut rng); + + let p0_transcript = Transcript::new(b"select_and_rerandomize"); + let mut p0_prover: Prover<_, Affine> = + Prover::new(&sr_params.even_parameters.pc_gens, p0_transcript); + + let p1_transcript = Transcript::new(b"select_and_rerandomize"); + let mut p1_prover: Prover<_, Affine> = + Prover::new(&sr_params.odd_parameters.pc_gens, p1_transcript); + + // these are called 'leaf commitments' and not 'leaves', but it's just + // to emphasize that we are not committing to scalars, but using points (i.e. pubkeys) + // as the commitments (i.e. pedersen commitments with zero randomness) at + // the leaf level. + let mut privkey_parity_flip: bool = false; + let leaf_commitments = get_leaf_commitments::( + &(pubkey_file_path.to_string() + ".p")); + // derive the index where our pubkey is in the list. + // but: since it will have been permissible-ized, we need to rederive the permissible + // version here, purely for searching: + + // as well as the randomness in the blinded commitment, we also need to use the same + // blinding base: + let b_blinding = sr_params.even_parameters.pc_gens.B_blinding; + let mut our_pubkey_permiss1: Affine = our_pubkey; + while !sr_params.even_parameters.uh.is_permissible(our_pubkey_permiss1) { + our_pubkey_permiss1 = (our_pubkey_permiss1 + b_blinding).into(); + } + let mut our_pubkey_permiss2: Affine = -our_pubkey; + while !sr_params.even_parameters.uh.is_permissible(our_pubkey_permiss2) { + our_pubkey_permiss2 = (our_pubkey_permiss2 + b_blinding).into(); + } + let mut key_index: i32; // we're guaranteed to overwrite or panic but the compiler insists. + // the reason for 2 rounds of search is that BIP340 can output a different parity + // compared to ark-ec 's compression algo. + key_index = match leaf_commitments.iter().position(|&x| x == our_pubkey_permiss1) { + None => -1, + Some(ks) => ks.try_into().unwrap() + }; + if key_index == -1 { + key_index = match leaf_commitments.iter().position(|&x| x == our_pubkey_permiss2) { + None => {return Err("provided pubkey not found in the set".into());}, + Some(ks) => { + privkey_parity_flip = true; + ks.try_into().unwrap() + } + } + }; + + // Now we know we have a key that's in the set, we can construct the curve + // tree from the set, and then the proof using its private key: + let beforect = Instant::now(); + let (curve_tree, _) = get_curve_tree::( + leaf_commitments.clone(), depth, &sr_params); + println!("Elapsed time for curve tree construction: {:.2?}", beforect.elapsed()); + assert_eq!(curve_tree.height(), depth); + + let (path_commitments, rand_scalar) = + curve_tree.select_and_rerandomize_prover_gadget( + key_index.try_into().unwrap(), + &mut p0_prover, + &mut p1_prover, + &sr_params, + &mut rng, + ); + // The randomness for the PedDLEQ proof will have to be the randomness + // used in the curve tree randomization, *plus* the randomness that was used + // to convert P to a permissible point, upon initial insertion into the tree. + let mut r_offset: P0::ScalarField = P0::ScalarField::zero(); + let lcindex: usize = key_index.try_into().unwrap(); + let mut p_prime: Affine = leaf_commitments[lcindex]; + // TODO: this is basically repeating what's already done in + // sr_params creation, but I don't know how else to extract the number + // of H bumps that were done (and we need to, see previous comment). + while !sr_params.even_parameters.uh.is_permissible(p_prime) { + p_prime = (p_prime + b_blinding).into(); + r_offset += P0::ScalarField::one(); + } + // print the root of the curve tree. + // TODO: how to allow the return value to be either + // Affine or Affine? And as a consequence, + // to let the code be correct for any depth. + // And/or, is there not + // a simpler way to extract the root of the tree + // (which should be just .parent_commitment, but all methods + // to extract this value seem to be private) + let newpath = curve_tree.select_and_rerandomize_verification_commitments( + path_commitments.clone()); + let root_is_odd = newpath.even_commitments.len() == newpath.odd_commitments.len(); + println!("Root is odd? {}", root_is_odd); + let root: Affine; + if !root_is_odd { + root = *newpath.even_commitments.first().unwrap(); + } + else { + // derp, see above TODO + panic!("Wrong root parity, should be even"); + } + let p0_proof = p0_prover + .prove(&sr_params.even_parameters.bp_gens) + .unwrap(); + let p1_proof = p1_prover + .prove(&sr_params.odd_parameters.bp_gens) + .unwrap(); + let returned_rand = rand_scalar + r_offset; + Ok((p0_proof, p1_proof, path_commitments, + returned_rand, b_blinding, root, privkey_parity_flip)) +} + +// Takes as input a hex list of actual BIP340 pubkeys +// that should come from the utxo set; +// converts each point into a permissible point +// and then writes these points in binary format into +// a new file with same name as keyset with .p appended. +// TODO return an error if this can't be done. +pub fn convert_keys + Copy, +P1: SWCurveConfig + Copy,>(keyset: String, generators_length_log_2: u8) -> Result<(), Box>{ + let raw_pubkeys = get_pubkey_leaves_hex::(&keyset); + let mut rng = rand::thread_rng(); + let generators_length = 1 << generators_length_log_2; + + let sr_params = + SelRerandParameters::::new(generators_length, + generators_length, &mut rng); + let (permissible_points, _pr) + = create_permissible_points_and_randomnesses( + &raw_pubkeys, &sr_params); + // take vec permissible points and write it in binary as n*33 bytes + let mut buf: Vec = Vec::with_capacity(permissible_points.len()*33); + let _: Vec<_> = permissible_points + .iter() + .map(|pt: &Affine| { + pt.serialize_compressed(&mut buf).expect("Failed to serialize point") + }).collect(); + let output_file = keyset.clone() + ".p"; + write_file_string(&output_file, buf); + Ok(()) +}