From ab61e3ab70aa0f7a037e0ad4a430975f50266097 Mon Sep 17 00:00:00 2001 From: jfecher Date: Tue, 1 Aug 2023 09:52:19 -0500 Subject: [PATCH 01/19] fix: Implement `.len()` in Acir-Gen (#2077) * Start experiment to merge array and slice types * Finish merger of slices and arrays * Implement missing try_bind function * Add missed case for NotConstant * Fix some tests * Fix poseidon test * Fix evaluation of slice length * Fix tests * Fix 2070 --- crates/nargo_cli/tests/test_data/array_len/Prover.toml | 1 + crates/nargo_cli/tests/test_data/array_len/src/main.nr | 7 ++++++- .../src/ssa_refactor/acir_gen/acir_ir/acir_variable.rs | 5 +++++ crates/noirc_evaluator/src/ssa_refactor/acir_gen/mod.rs | 8 ++++++++ 4 files changed, 20 insertions(+), 1 deletion(-) diff --git a/crates/nargo_cli/tests/test_data/array_len/Prover.toml b/crates/nargo_cli/tests/test_data/array_len/Prover.toml index 3c3295e6848..a5ffe607b73 100644 --- a/crates/nargo_cli/tests/test_data/array_len/Prover.toml +++ b/crates/nargo_cli/tests/test_data/array_len/Prover.toml @@ -1,2 +1,3 @@ len3 = [1, 2, 3] len4 = [1, 2, 3, 4] +x = 123 diff --git a/crates/nargo_cli/tests/test_data/array_len/src/main.nr b/crates/nargo_cli/tests/test_data/array_len/src/main.nr index 2c3cc0aee60..65c2295cefb 100644 --- a/crates/nargo_cli/tests/test_data/array_len/src/main.nr +++ b/crates/nargo_cli/tests/test_data/array_len/src/main.nr @@ -12,7 +12,7 @@ fn nested_call(b: [Field; N]) -> Field { len_plus_1(b) } -fn main(len3: [u8; 3], len4: [Field; 4]) { +fn main(x: Field, len3: [u8; 3], len4: [Field; 4]) { assert(len_plus_1(len3) == 4); assert(len_plus_1(len4) == 5); assert(add_lens(len3, len4) == 7); @@ -20,4 +20,9 @@ fn main(len3: [u8; 3], len4: [Field; 4]) { // std::array::len returns a comptime value assert(len4[len3.len()] == 4); + + // Regression for #1023, ensure .len still works after calling to_le_bytes on a witness. + // This was needed because normally .len is evaluated before acir-gen where to_le_bytes + // on a witness is only evaluated during/after acir-gen. + assert(x.to_le_bytes(8).len() != 0); } diff --git a/crates/noirc_evaluator/src/ssa_refactor/acir_gen/acir_ir/acir_variable.rs b/crates/noirc_evaluator/src/ssa_refactor/acir_gen/acir_ir/acir_variable.rs index 6d8178b6a2c..25d92ed8b85 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/acir_gen/acir_ir/acir_variable.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/acir_gen/acir_ir/acir_variable.rs @@ -55,6 +55,11 @@ impl AcirType { } } + /// Returns a field type + pub(crate) fn field() -> Self { + AcirType::NumericType(NumericType::NativeField) + } + /// Returns a boolean type fn boolean() -> Self { AcirType::NumericType(NumericType::Unsigned { bit_size: 1 }) diff --git a/crates/noirc_evaluator/src/ssa_refactor/acir_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/acir_gen/mod.rs index 1fce4cd76ad..da8409431ce 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/acir_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/acir_gen/mod.rs @@ -996,6 +996,14 @@ impl Context { Ok(Self::convert_vars_to_values(out_vars, dfg, result_ids)) } + Intrinsic::ArrayLen => { + let len = match self.convert_value(arguments[0], dfg) { + AcirValue::Var(_, _) => unreachable!("Non-array passed to array.len() method"), + AcirValue::Array(values) => (values.len() as u128).into(), + AcirValue::DynamicArray(array) => (array.len as u128).into(), + }; + Ok(vec![AcirValue::Var(self.acir_context.add_constant(len), AcirType::field())]) + } _ => todo!("expected a black box function"), } } From e85e4850546552b7240466031e770c2667280444 Mon Sep 17 00:00:00 2001 From: jfecher Date: Tue, 1 Aug 2023 09:54:22 -0500 Subject: [PATCH 02/19] fix: Mutating a variable no longer mutates its copy (#2057) * Fix 2054 * Rename function --- .../tests/test_data/references/src/main.nr | 10 ++++++ .../src/ssa_refactor/ssa_gen/mod.rs | 31 ++++++++++++++++--- 2 files changed, 37 insertions(+), 4 deletions(-) diff --git a/crates/nargo_cli/tests/test_data/references/src/main.nr b/crates/nargo_cli/tests/test_data/references/src/main.nr index b112875b9ff..f70293cb5a6 100644 --- a/crates/nargo_cli/tests/test_data/references/src/main.nr +++ b/crates/nargo_cli/tests/test_data/references/src/main.nr @@ -32,6 +32,7 @@ fn main(mut x: Field) { assert(*c.bar.array == [3, 4]); regression_1887(); + regression_2054(); } fn add1(x: &mut Field) { @@ -77,3 +78,12 @@ impl Bar { self.x = 32; } } + +// Ensure that mutating a variable does not also mutate its copy +fn regression_2054() { + let mut x = 2; + let z = x; + + x += 1; + assert(z == 2); +} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs index 710450eb1e6..d6169dfd218 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs @@ -89,8 +89,13 @@ impl<'a> FunctionContext<'a> { self.codegen_expression(expr).into_leaf().eval(self) } - /// Codegen for identifiers - fn codegen_ident(&mut self, ident: &ast::Ident) -> Values { + /// Codegen a reference to an ident. + /// The only difference between this and codegen_ident is that if the variable is mutable + /// as in `let mut var = ...;` the `Value::Mutable` will be returned directly instead of + /// being automatically loaded from. This is needed when taking the reference of a variable + /// to reassign to it. Note that mutable references `let x = &mut ...;` do not require this + /// since they are not automatically loaded from and must be explicitly dereferenced. + fn codegen_ident_reference(&mut self, ident: &ast::Ident) -> Values { match &ident.definition { ast::Definition::Local(id) => self.lookup(*id), ast::Definition::Function(id) => self.get_or_queue_function(*id), @@ -104,6 +109,11 @@ impl<'a> FunctionContext<'a> { } } + /// Codegen an identifier, automatically loading its value if it is mutable. + fn codegen_ident(&mut self, ident: &ast::Ident) -> Values { + self.codegen_ident_reference(ident).map(|value| value.eval(self).into()) + } + fn codegen_literal(&mut self, literal: &ast::Literal) -> Values { match literal { ast::Literal::Array(array) => { @@ -159,20 +169,21 @@ impl<'a> FunctionContext<'a> { } fn codegen_unary(&mut self, unary: &ast::Unary) -> Values { - let rhs = self.codegen_expression(&unary.rhs); match unary.operator { noirc_frontend::UnaryOp::Not => { + let rhs = self.codegen_expression(&unary.rhs); let rhs = rhs.into_leaf().eval(self); self.builder.insert_not(rhs).into() } noirc_frontend::UnaryOp::Minus => { + let rhs = self.codegen_expression(&unary.rhs); let rhs = rhs.into_leaf().eval(self); let typ = self.builder.type_of_value(rhs); let zero = self.builder.numeric_constant(0u128, typ); self.builder.insert_binary(zero, BinaryOp::Sub, rhs).into() } noirc_frontend::UnaryOp::MutableReference => { - rhs.map(|rhs| { + self.codegen_reference(&unary.rhs).map(|rhs| { match rhs { value::Value::Normal(value) => { let alloc = self.builder.insert_allocate(); @@ -186,11 +197,23 @@ impl<'a> FunctionContext<'a> { }) } noirc_frontend::UnaryOp::Dereference { .. } => { + let rhs = self.codegen_expression(&unary.rhs); self.dereference(&rhs, &unary.result_type) } } } + fn codegen_reference(&mut self, expr: &Expression) -> Values { + match expr { + Expression::Ident(ident) => self.codegen_ident_reference(ident), + Expression::ExtractTupleField(tuple, index) => { + let tuple = self.codegen_reference(tuple); + Self::get_field(tuple, *index) + } + other => self.codegen_expression(other), + } + } + fn codegen_binary(&mut self, binary: &ast::Binary) -> Values { let lhs = self.codegen_non_tuple_expression(&binary.lhs); let rhs = self.codegen_non_tuple_expression(&binary.rhs); From 5cb816664e03992a766ba9dcb2650e9596fbb291 Mon Sep 17 00:00:00 2001 From: Maxim Vezenov Date: Tue, 1 Aug 2023 19:10:18 +0100 Subject: [PATCH 03/19] feat(acir_gen): RecursiveAggregation opcode and updates to black box func call generation (#2097) * update black box opcodes to accept multiple variables inputs and variable outputs, add RecursiveAggregation opcode * remove old method and comment * remove config change * remove NotImplemented InternalError --- crates/noirc_evaluator/src/errors.rs | 3 - .../acir_gen/acir_ir/acir_variable.rs | 15 +- .../acir_gen/acir_ir/generated_acir.rs | 173 +++++++++++------- .../src/ssa_refactor/acir_gen/mod.rs | 6 +- noir_stdlib/src/lib.nr | 4 +- 5 files changed, 122 insertions(+), 79 deletions(-) diff --git a/crates/noirc_evaluator/src/errors.rs b/crates/noirc_evaluator/src/errors.rs index 6d53668d7cb..27a87ccce36 100644 --- a/crates/noirc_evaluator/src/errors.rs +++ b/crates/noirc_evaluator/src/errors.rs @@ -44,8 +44,6 @@ pub enum InternalError { MissingArg { name: String, arg: String, location: Option }, #[error("ICE: {name:?} should be a constant")] NotAConstant { name: String, location: Option }, - #[error("{name:?} is not implemented yet")] - NotImplemented { name: String, location: Option }, #[error("ICE: Undeclared AcirVar")] UndeclaredAcirVar { location: Option }, #[error("ICE: Expected {expected:?}, found {found:?}")] @@ -61,7 +59,6 @@ impl From for FileDiagnostic { | InternalError::General { location, .. } | InternalError::MissingArg { location, .. } | InternalError::NotAConstant { location, .. } - | InternalError::NotImplemented { location, .. } | InternalError::UndeclaredAcirVar { location } | InternalError::UnExpected { location, .. } => { let file_id = location.map(|loc| loc.file).unwrap(); diff --git a/crates/noirc_evaluator/src/ssa_refactor/acir_gen/acir_ir/acir_variable.rs b/crates/noirc_evaluator/src/ssa_refactor/acir_gen/acir_ir/acir_variable.rs index 25d92ed8b85..9177dc9ae6c 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/acir_gen/acir_ir/acir_variable.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/acir_gen/acir_ir/acir_variable.rs @@ -265,7 +265,7 @@ impl AcirContext { typ: AcirType, ) -> Result { let inputs = vec![AcirValue::Var(lhs, typ.clone()), AcirValue::Var(rhs, typ)]; - let outputs = self.black_box_function(BlackBoxFunc::XOR, inputs)?; + let outputs = self.black_box_function(BlackBoxFunc::XOR, inputs, 1)?; Ok(outputs[0]) } @@ -277,7 +277,7 @@ impl AcirContext { typ: AcirType, ) -> Result { let inputs = vec![AcirValue::Var(lhs, typ.clone()), AcirValue::Var(rhs, typ)]; - let outputs = self.black_box_function(BlackBoxFunc::AND, inputs)?; + let outputs = self.black_box_function(BlackBoxFunc::AND, inputs, 1)?; Ok(outputs[0]) } @@ -304,7 +304,7 @@ impl AcirContext { let a = self.sub_var(max, lhs)?; let b = self.sub_var(max, rhs)?; let inputs = vec![AcirValue::Var(a, typ.clone()), AcirValue::Var(b, typ)]; - let outputs = self.black_box_function(BlackBoxFunc::AND, inputs)?; + let outputs = self.black_box_function(BlackBoxFunc::AND, inputs, 1)?; self.sub_var(max, outputs[0]) } } @@ -682,6 +682,7 @@ impl AcirContext { &mut self, name: BlackBoxFunc, mut inputs: Vec, + output_count: usize, ) -> Result, RuntimeError> { // Separate out any arguments that should be constants let constants = match name { @@ -717,7 +718,7 @@ impl AcirContext { let inputs = self.prepare_inputs_for_black_box_func_call(inputs)?; // Call Black box with `FunctionInput` - let outputs = self.acir_ir.call_black_box(name, inputs, constants)?; + let outputs = self.acir_ir.call_black_box(name, &inputs, constants, output_count)?; // Convert `Witness` values which are now constrained to be the output of the // black box function call into `AcirVar`s. @@ -733,9 +734,10 @@ impl AcirContext { fn prepare_inputs_for_black_box_func_call( &mut self, inputs: Vec, - ) -> Result, RuntimeError> { + ) -> Result>, RuntimeError> { let mut witnesses = Vec::new(); for input in inputs { + let mut single_val_witnesses = Vec::new(); for (input, typ) in input.flatten() { let var_data = &self.vars[&input]; @@ -745,8 +747,9 @@ impl AcirContext { let expr = var_data.to_expression(); let witness = self.acir_ir.get_or_create_witness(&expr); let num_bits = typ.bit_size(); - witnesses.push(FunctionInput { witness, num_bits }); + single_val_witnesses.push(FunctionInput { witness, num_bits }); } + witnesses.push(single_val_witnesses); } Ok(witnesses) } diff --git a/crates/noirc_evaluator/src/ssa_refactor/acir_gen/acir_ir/generated_acir.rs b/crates/noirc_evaluator/src/ssa_refactor/acir_gen/acir_ir/generated_acir.rs index c368a042dc9..738387fbaab 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/acir_gen/acir_ir/generated_acir.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/acir_gen/acir_ir/generated_acir.rs @@ -122,12 +122,14 @@ impl GeneratedAcir { pub(crate) fn call_black_box( &mut self, func_name: BlackBoxFunc, - mut inputs: Vec, + inputs: &[Vec], constants: Vec, + output_count: usize, ) -> Result, InternalError> { - intrinsics_check_inputs(func_name, &inputs)?; + let input_count = inputs.iter().fold(0usize, |sum, val| sum + val.len()); + intrinsics_check_inputs(func_name, input_count); + intrinsics_check_outputs(func_name, output_count); - let output_count = black_box_expected_output_size(func_name)?; let outputs = vecmap(0..output_count, |_| self.next_witness_index()); // clone is needed since outputs is moved when used in blackbox function. @@ -135,57 +137,60 @@ impl GeneratedAcir { let black_box_func_call = match func_name { BlackBoxFunc::AND => { - BlackBoxFuncCall::AND { lhs: inputs[0], rhs: inputs[1], output: outputs[0] } + BlackBoxFuncCall::AND { lhs: inputs[0][0], rhs: inputs[1][0], output: outputs[0] } } BlackBoxFunc::XOR => { - BlackBoxFuncCall::XOR { lhs: inputs[0], rhs: inputs[1], output: outputs[0] } + BlackBoxFuncCall::XOR { lhs: inputs[0][0], rhs: inputs[1][0], output: outputs[0] } } - BlackBoxFunc::RANGE => BlackBoxFuncCall::RANGE { input: inputs[0] }, - BlackBoxFunc::SHA256 => BlackBoxFuncCall::SHA256 { inputs, outputs }, - BlackBoxFunc::Blake2s => BlackBoxFuncCall::Blake2s { inputs, outputs }, - BlackBoxFunc::HashToField128Security => { - BlackBoxFuncCall::HashToField128Security { inputs, output: outputs[0] } + BlackBoxFunc::RANGE => BlackBoxFuncCall::RANGE { input: inputs[0][0] }, + BlackBoxFunc::SHA256 => BlackBoxFuncCall::SHA256 { inputs: inputs[0].clone(), outputs }, + BlackBoxFunc::Blake2s => { + BlackBoxFuncCall::Blake2s { inputs: inputs[0].clone(), outputs } } + BlackBoxFunc::HashToField128Security => BlackBoxFuncCall::HashToField128Security { + inputs: inputs[0].clone(), + output: outputs[0], + }, BlackBoxFunc::SchnorrVerify => BlackBoxFuncCall::SchnorrVerify { - public_key_x: inputs[0], - public_key_y: inputs[1], + public_key_x: inputs[0][0], + public_key_y: inputs[1][0], // Schnorr signature is an r & s, 32 bytes each - signature: inputs[2..66].to_vec(), - message: inputs[66..].to_vec(), + signature: inputs[2].clone(), + message: inputs[3].clone(), output: outputs[0], }, BlackBoxFunc::Pedersen => BlackBoxFuncCall::Pedersen { - inputs, + inputs: inputs[0].clone(), outputs: (outputs[0], outputs[1]), domain_separator: constants[0].to_u128() as u32, }, BlackBoxFunc::EcdsaSecp256k1 => BlackBoxFuncCall::EcdsaSecp256k1 { // 32 bytes for each public key co-ordinate - public_key_x: inputs[0..32].to_vec(), - public_key_y: inputs[32..64].to_vec(), + public_key_x: inputs[0].clone(), + public_key_y: inputs[1].clone(), // (r,s) are both 32 bytes each, so signature // takes up 64 bytes - signature: inputs[64..128].to_vec(), - hashed_message: inputs[128..].to_vec(), + signature: inputs[2].clone(), + hashed_message: inputs[3].clone(), output: outputs[0], }, BlackBoxFunc::EcdsaSecp256r1 => BlackBoxFuncCall::EcdsaSecp256r1 { // 32 bytes for each public key co-ordinate - public_key_x: inputs[0..32].to_vec(), - public_key_y: inputs[32..64].to_vec(), + public_key_x: inputs[0].clone(), + public_key_y: inputs[1].clone(), // (r,s) are both 32 bytes each, so signature // takes up 64 bytes - signature: inputs[64..128].to_vec(), - hashed_message: inputs[128..].to_vec(), + signature: inputs[2].clone(), + hashed_message: inputs[3].clone(), output: outputs[0], }, BlackBoxFunc::FixedBaseScalarMul => BlackBoxFuncCall::FixedBaseScalarMul { - input: inputs[0], + input: inputs[0][0], outputs: (outputs[0], outputs[1]), }, BlackBoxFunc::Keccak256 => { - let var_message_size = match inputs.pop() { - Some(var_message_size) => var_message_size, + let var_message_size = match inputs.to_vec().pop() { + Some(var_message_size) => var_message_size[0], None => { return Err(InternalError::MissingArg { name: "".to_string(), @@ -194,14 +199,31 @@ impl GeneratedAcir { }); } }; - BlackBoxFuncCall::Keccak256VariableLength { inputs, var_message_size, outputs } + BlackBoxFuncCall::Keccak256VariableLength { + inputs: inputs[0].clone(), + var_message_size, + outputs, + } } - // TODO(#1570): Generate ACIR for recursive aggregation BlackBoxFunc::RecursiveAggregation => { - return Err(InternalError::NotImplemented { - name: "recursive aggregation".to_string(), - location: None, - }) + let has_previous_aggregation = self.opcodes.iter().any(|op| { + matches!( + op, + AcirOpcode::BlackBoxFuncCall(BlackBoxFuncCall::RecursiveAggregation { .. }) + ) + }); + + let input_aggregation_object = + if !has_previous_aggregation { None } else { Some(inputs[4].clone()) }; + + BlackBoxFuncCall::RecursiveAggregation { + verification_key: inputs[0].clone(), + proof: inputs[1].clone(), + public_inputs: inputs[2].clone(), + key_hash: inputs[3][0], + input_aggregation_object, + output_aggregation_object: outputs, + } } }; @@ -819,68 +841,60 @@ impl GeneratedAcir { /// This function will return the number of inputs that a blackbox function /// expects. Returning `None` if there is no expectation. -fn black_box_func_expected_input_size(name: BlackBoxFunc) -> Result, InternalError> { +fn black_box_func_expected_input_size(name: BlackBoxFunc) -> Option { match name { // Bitwise opcodes will take in 2 parameters - BlackBoxFunc::AND | BlackBoxFunc::XOR => Ok(Some(2)), + BlackBoxFunc::AND | BlackBoxFunc::XOR => Some(2), // All of the hash/cipher methods will take in a // variable number of inputs. BlackBoxFunc::Keccak256 | BlackBoxFunc::SHA256 | BlackBoxFunc::Blake2s | BlackBoxFunc::Pedersen - | BlackBoxFunc::HashToField128Security => Ok(None), + | BlackBoxFunc::HashToField128Security => None, // Can only apply a range constraint to one // witness at a time. - BlackBoxFunc::RANGE => Ok(Some(1)), + BlackBoxFunc::RANGE => Some(1), // Signature verification algorithms will take in a variable // number of inputs, since the message/hashed-message can vary in size. BlackBoxFunc::SchnorrVerify | BlackBoxFunc::EcdsaSecp256k1 - | BlackBoxFunc::EcdsaSecp256r1 => Ok(None), + | BlackBoxFunc::EcdsaSecp256r1 => None, // Inputs for fixed based scalar multiplication // is just a scalar - BlackBoxFunc::FixedBaseScalarMul => Ok(Some(1)), - // TODO(#1570): Generate ACIR for recursive aggregation - // RecursiveAggregation has variable inputs and we could return `None` here, - // but as it is not fully implemented we return an ICE error for now - BlackBoxFunc::RecursiveAggregation => Err(InternalError::NotImplemented { - name: "recursive aggregation".to_string(), - location: None, - }), + BlackBoxFunc::FixedBaseScalarMul => Some(1), + // Recursive aggregation has a variable number of inputs + BlackBoxFunc::RecursiveAggregation => None, } } /// This function will return the number of outputs that a blackbox function /// expects. Returning `None` if there is no expectation. -fn black_box_expected_output_size(name: BlackBoxFunc) -> Result { +fn black_box_expected_output_size(name: BlackBoxFunc) -> Option { match name { // Bitwise opcodes will return 1 parameter which is the output // or the operation. - BlackBoxFunc::AND | BlackBoxFunc::XOR => Ok(1), + BlackBoxFunc::AND | BlackBoxFunc::XOR => Some(1), // 32 byte hash algorithms - BlackBoxFunc::Keccak256 | BlackBoxFunc::SHA256 | BlackBoxFunc::Blake2s => Ok(32), + BlackBoxFunc::Keccak256 | BlackBoxFunc::SHA256 | BlackBoxFunc::Blake2s => Some(32), // Hash to field returns a field element - BlackBoxFunc::HashToField128Security => Ok(1), + BlackBoxFunc::HashToField128Security => Some(1), // Pedersen returns a point - BlackBoxFunc::Pedersen => Ok(2), + BlackBoxFunc::Pedersen => Some(2), // Can only apply a range constraint to one // witness at a time. - BlackBoxFunc::RANGE => Ok(0), + BlackBoxFunc::RANGE => Some(0), // Signature verification algorithms will return a boolean BlackBoxFunc::SchnorrVerify | BlackBoxFunc::EcdsaSecp256k1 - | BlackBoxFunc::EcdsaSecp256r1 => Ok(1), + | BlackBoxFunc::EcdsaSecp256r1 => Some(1), // Output of fixed based scalar mul over the embedded curve // will be 2 field elements representing the point. - BlackBoxFunc::FixedBaseScalarMul => Ok(2), - // TODO(#1570): Generate ACIR for recursive aggregation - BlackBoxFunc::RecursiveAggregation => Err(InternalError::NotImplemented { - name: "recursive aggregation".to_string(), - location: None, - }), + BlackBoxFunc::FixedBaseScalarMul => Some(2), + // Recursive aggregation has a variable number of outputs + BlackBoxFunc::RecursiveAggregation => None, } } @@ -899,16 +913,41 @@ fn black_box_expected_output_size(name: BlackBoxFunc) -> Result(_input : [u8; N]) -> [u8; 32] {} /// `` -fn intrinsics_check_inputs( - name: BlackBoxFunc, - inputs: &[FunctionInput], -) -> Result<(), InternalError> { - let expected_num_inputs = match black_box_func_expected_input_size(name)? { +fn intrinsics_check_inputs(name: BlackBoxFunc, input_count: usize) { + let expected_num_inputs = match black_box_func_expected_input_size(name) { + Some(expected_num_inputs) => expected_num_inputs, + None => return, + }; + + assert_eq!(expected_num_inputs,input_count,"Tried to call black box function {name} with {input_count} inputs, but this function's definition requires {expected_num_inputs} inputs"); +} + +/// Checks that the number of outputs being used to call the blackbox function +/// is correct according to the function definition. +/// +/// Some functions expect a variable number of outputs and in such a case, +/// this method will do nothing. An example of this is recursive aggregation. +/// In that case, this function will not check anything. +/// +/// Since we expect black box functions to be called behind a Noir shim function, +/// we trigger a compiler error if the inputs do not match. +/// +/// An example of Noir shim function is the following: +/// `` +/// #[foreign(sha256)] +/// fn verify_proof( +/// _verification_key : [Field], +/// _proof : [Field], +/// _public_inputs : [Field], +/// _key_hash : Field, +/// _input_aggregation_object : [Field; N] +/// ) -> [Field; N] {} +/// `` +fn intrinsics_check_outputs(name: BlackBoxFunc, output_count: usize) { + let expected_num_outputs = match black_box_expected_output_size(name) { Some(expected_num_inputs) => expected_num_inputs, - None => return Ok(()), + None => return, }; - let got_num_inputs = inputs.len(); - assert_eq!(expected_num_inputs,inputs.len(),"Tried to call black box function {name} with {got_num_inputs} inputs, but this function's definition requires {expected_num_inputs} inputs"); - Ok(()) + assert_eq!(expected_num_outputs,output_count,"Tried to call black box function {name} with {output_count} inputs, but this function's definition requires {expected_num_outputs} inputs"); } diff --git a/crates/noirc_evaluator/src/ssa_refactor/acir_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/acir_gen/mod.rs index da8409431ce..5253cb71875 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/acir_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/acir_gen/mod.rs @@ -943,7 +943,11 @@ impl Context { Intrinsic::BlackBox(black_box) => { let inputs = vecmap(arguments, |arg| self.convert_value(*arg, dfg)); - let vars = self.acir_context.black_box_function(black_box, inputs)?; + let output_count = result_ids.iter().fold(0usize, |sum, result_id| { + sum + dfg.try_get_array_length(*result_id).unwrap_or(1) + }); + + let vars = self.acir_context.black_box_function(black_box, inputs, output_count)?; Ok(Self::convert_vars_to_values(vars, dfg, result_ids)) } diff --git a/noir_stdlib/src/lib.nr b/noir_stdlib/src/lib.nr index f6c01ecdfaa..e654a20b1d8 100644 --- a/noir_stdlib/src/lib.nr +++ b/noir_stdlib/src/lib.nr @@ -18,11 +18,11 @@ mod compat; // Oracle calls are required to be wrapped in an unconstrained function // Thus, the only argument to the `println` oracle is expected to always be an ident #[oracle(println)] -unconstrained fn println_oracle(_input: T) {} +unconstrained fn println_oracle(_input: T) {} unconstrained fn println(input: T) { println_oracle(input); } #[foreign(recursive_aggregation)] -fn verify_proof(_verification_key : [Field], _proof : [Field], _public_inputs : [Field], _key_hash : Field, _input_aggregation_object : [Field]) -> [Field] {} +fn verify_proof(_verification_key : [Field], _proof : [Field], _public_inputs : [Field], _key_hash : Field, _input_aggregation_object : [Field; N]) -> [Field; N] {} From 3c827217900d19a710ee8a49d782ed3d43a6336c Mon Sep 17 00:00:00 2001 From: Maxim Vezenov Date: Tue, 1 Aug 2023 19:54:33 +0100 Subject: [PATCH 04/19] feat: Format strings for prints (#1952) * initial stdlib methods to start refactoring logign * foreign call enum * working println and println_format w/ brillig oracles * fix up brillig_oracle test * uncomment regression test for slice return from foreign calls in brillig * cargo clippy * got structs serialized correctly without aos_to_soa * remove dbg * working println_format * cargo clippy * rename enable_slices to experimental_ssa * remove dbg and fix format_field_string * initial work towards FmtStr literal * working format strins with one unified println method, still have some cleanup to-do, use Display/Debug for pretty printing * remove old comment * moved resolution of string to fmt string only when passing literals to functions * delete temp intrinsic for println new * remove unnecessary subtype * remove debugging code w/ def id as part of mono pass Ident * cleanup formatting stuff * cargo clippy * resolver test for fmt string * remove TODO comment * cargo clippy * pr comments * expose full fmtstr type to the user * add back fmt string resolver test * don't allow comparison of format strings * use JsonType Display trait * add issue for printing func params * remove Token::F variant * remove old append_abi_arg func * add comments to append_abi-arg * fix: format printing function parameters, store exprs rather than idents as part of HirLiteral::FmtStr * remove ve old comment about not being able to use witness values in fmt strings * push fix for asfs{x}{x} case and more specific regex for idents * Update crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs Co-authored-by: jfecher * remove is_match check * breakout literal fmt str case in resolver to its own func * update resolve_fmt_strings test * switch to_owned placement in resolve_fmt_str_literal * Update crates/noirc_frontend/src/ast/mod.rs Co-authored-by: jfecher * fix find_numeric_generics_in_type * add case of fmt string in if statement * add contains_numeric_typevar cases for string and fmtstring * add unify and subtype checks and fix resolver fmt string test * working generic fmtstr types * separate fmtstr codegen into variables * Update crates/noirc_frontend/src/parser/parser.rs * Update crates/noirc_abi/src/input_parser/json.rs Co-authored-by: jfecher * Update crates/noirc_frontend/src/ast/mod.rs Co-authored-by: jfecher * Update crates/noirc_frontend/src/monomorphization/mod.rs Co-authored-by: jfecher * Update crates/noirc_frontend/src/monomorphization/mod.rs Co-authored-by: jfecher * Update crates/noirc_frontend/src/monomorphization/mod.rs Co-authored-by: jfecher * Update crates/noirc_frontend/src/parser/parser.rs Co-authored-by: jfecher * keep the size of fmtrstr type as mandatory * print original fmt string in monomorphization printer * print literal update for fmtstr * add parens to f-string literal printer --------- Co-authored-by: jfecher --- Cargo.lock | 2 + crates/nargo/Cargo.toml | 3 +- crates/nargo/src/ops/foreign_calls.rs | 89 +++++++++++++++---- .../tests/test_data/debug_logs/src/main.nr | 48 +++++++++- .../src/ssa_refactor/ssa_gen/context.rs | 12 +++ .../src/ssa_refactor/ssa_gen/mod.rs | 12 +++ crates/noirc_frontend/Cargo.toml | 1 + crates/noirc_frontend/src/ast/expression.rs | 6 ++ crates/noirc_frontend/src/ast/mod.rs | 6 +- .../src/hir/resolution/errors.rs | 7 ++ .../src/hir/resolution/resolver.rs | 86 +++++++++++++++++- .../noirc_frontend/src/hir/type_check/expr.rs | 5 ++ crates/noirc_frontend/src/hir_def/expr.rs | 1 + crates/noirc_frontend/src/hir_def/types.rs | 43 ++++++++- crates/noirc_frontend/src/lexer/lexer.rs | 17 +++- crates/noirc_frontend/src/lexer/token.rs | 7 +- .../src/monomorphization/ast.rs | 7 +- .../src/monomorphization/mod.rs | 75 ++++++++++++++-- .../src/monomorphization/printer.rs | 5 ++ crates/noirc_frontend/src/node_interner.rs | 9 +- crates/noirc_frontend/src/parser/parser.rs | 15 ++++ 21 files changed, 414 insertions(+), 42 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4e1510c8df9..1b7a70b2063 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1982,6 +1982,7 @@ dependencies = [ "noirc_abi", "noirc_driver", "noirc_errors", + "regex", "rustc_version", "serde", "serde_json", @@ -2128,6 +2129,7 @@ dependencies = [ "iter-extended", "noirc_abi", "noirc_errors", + "regex", "rustc-hash", "serde", "serde_json", diff --git a/crates/nargo/Cargo.toml b/crates/nargo/Cargo.toml index 6c053cba931..afbafdff931 100644 --- a/crates/nargo/Cargo.toml +++ b/crates/nargo/Cargo.toml @@ -20,4 +20,5 @@ serde.workspace = true serde_json.workspace = true thiserror.workspace = true noirc_errors.workspace = true -base64.workspace = true \ No newline at end of file +base64.workspace = true +regex = "1.9.1" diff --git a/crates/nargo/src/ops/foreign_calls.rs b/crates/nargo/src/ops/foreign_calls.rs index 4bbd4eb58bc..2abc62b1032 100644 --- a/crates/nargo/src/ops/foreign_calls.rs +++ b/crates/nargo/src/ops/foreign_calls.rs @@ -4,6 +4,7 @@ use acvm::{ }; use iter_extended::vecmap; use noirc_abi::{decode_string_value, input_parser::InputValueDisplay, AbiType}; +use regex::{Captures, Regex}; use crate::errors::ForeignCallError; @@ -63,31 +64,89 @@ impl ForeignCall { } fn execute_println(foreign_call_inputs: &[Vec]) -> Result<(), ForeignCallError> { - let (abi_type, input_values) = fetch_abi_type(foreign_call_inputs)?; + let (is_fmt_str, foreign_call_inputs) = + foreign_call_inputs.split_last().ok_or(ForeignCallError::MissingForeignCallInputs)?; - // We must use a flat map here as each value in a struct will be in a separate input value - let mut input_values_as_fields = - input_values.iter().flat_map(|values| values.iter().map(|value| value.to_field())); - - let input_value_display = - InputValueDisplay::try_from_fields(&mut input_values_as_fields, abi_type)?; - - println!("{input_value_display}"); + let output_string = if is_fmt_str[0].to_field().is_one() { + convert_fmt_string_inputs(foreign_call_inputs)? + } else { + convert_string_inputs(foreign_call_inputs)? + }; + println!("{output_string}"); Ok(()) } } -/// Fetch the abi type from the foreign call input -/// The remaining input values should hold the values to be printed -fn fetch_abi_type( - foreign_call_inputs: &[Vec], -) -> Result<(AbiType, &[Vec]), ForeignCallError> { +fn convert_string_inputs(foreign_call_inputs: &[Vec]) -> Result { + // Fetch the abi type from the foreign call input + // The remaining input values should hold what is to be printed let (abi_type_as_values, input_values) = foreign_call_inputs.split_last().ok_or(ForeignCallError::MissingForeignCallInputs)?; + let abi_type = fetch_abi_type(abi_type_as_values)?; + + // We must use a flat map here as each value in a struct will be in a separate input value + let mut input_values_as_fields = + input_values.iter().flat_map(|values| vecmap(values, |value| value.to_field())); + + let input_value_display = + InputValueDisplay::try_from_fields(&mut input_values_as_fields, abi_type)?; + + Ok(input_value_display.to_string()) +} + +fn convert_fmt_string_inputs( + foreign_call_inputs: &[Vec], +) -> Result { + let (message_as_values, input_and_abi_values) = + foreign_call_inputs.split_first().ok_or(ForeignCallError::MissingForeignCallInputs)?; + + let message_as_fields = vecmap(message_as_values, |value| value.to_field()); + let message_as_string = decode_string_value(&message_as_fields); + + let (num_values, input_and_abi_values) = + input_and_abi_values.split_first().ok_or(ForeignCallError::MissingForeignCallInputs)?; + + let mut output_strings = Vec::new(); + let num_values = num_values[0].to_field().to_u128() as usize; + + let mut abi_types = Vec::new(); + for abi_values in input_and_abi_values.iter().skip(input_and_abi_values.len() - num_values) { + let abi_type = fetch_abi_type(abi_values)?; + abi_types.push(abi_type); + } + + for i in 0..num_values { + let abi_type = &abi_types[i]; + let type_size = abi_type.field_count() as usize; + + let mut input_values_as_fields = input_and_abi_values[i..(i + type_size)] + .iter() + .flat_map(|values| vecmap(values, |value| value.to_field())); + + let input_value_display = + InputValueDisplay::try_from_fields(&mut input_values_as_fields, abi_type.clone())?; + + output_strings.push(input_value_display.to_string()); + } + + let mut output_strings_iter = output_strings.into_iter(); + let re = Regex::new(r"\{([a-zA-Z0-9_]+)\}") + .expect("ICE: an invalid regex pattern was used for checking format strings"); + + let formatted_str = re.replace_all(&message_as_string, |_: &Captures| { + output_strings_iter + .next() + .expect("ICE: there are more regex matches than fields supplied to the format string") + }); + + Ok(formatted_str.into_owned()) +} + +fn fetch_abi_type(abi_type_as_values: &[Value]) -> Result { let abi_type_as_fields = vecmap(abi_type_as_values, |value| value.to_field()); let abi_type_as_string = decode_string_value(&abi_type_as_fields); let abi_type: AbiType = serde_json::from_str(&abi_type_as_string) .map_err(|err| ForeignCallError::InputParserError(err.into()))?; - Ok((abi_type, input_values)) + Ok(abi_type) } diff --git a/crates/nargo_cli/tests/test_data/debug_logs/src/main.nr b/crates/nargo_cli/tests/test_data/debug_logs/src/main.nr index 29386feb98c..c8d37a938c7 100644 --- a/crates/nargo_cli/tests/test_data/debug_logs/src/main.nr +++ b/crates/nargo_cli/tests/test_data/debug_logs/src/main.nr @@ -1,14 +1,56 @@ use dep::std; fn main(x : Field, y : pub Field) { + let string = "i: {i}, j: {j}"; + std::println(string); + + // A `fmtstr` lets you easily perform string interpolation. + let fmt_str: fmtstr<14, (Field, Field)> = f"i: {x}, j: {y}"; + let fmt_str = string_identity(fmt_str); + std::println(fmt_str); + + let fmt_str_no_type = f"i: {x}, j: {y}"; + std::println(fmt_str_no_type); + + let fmt_str_generic = string_with_generics(fmt_str_no_type); + std::println(fmt_str_generic); + + let s = myStruct { y: x, x: y }; + std::println(s); + + std::println(f"randomstring{x}{x}"); + + let fmt_str = string_with_partial_generics(f"i: {x}, s: {s}"); + std::println(fmt_str); - std::println("*** println ***"); std::println(x); std::println([x, y]); - let s = myStruct { y: x, x: y }; let foo = fooStruct { my_struct: s, foo: 15 }; - std::println(foo); + std::println(f"s: {s}, foo: {foo}"); + + std::println(f"x: 0, y: 1"); + + let s_2 = myStruct { x: 20, y: 30 }; + std::println(f"s1: {s}, s2: {s_2}"); + + let bar = fooStruct { my_struct: s_2, foo: 20 }; + std::println(f"foo1: {foo}, foo2: {bar}"); + + let struct_string = if x != 5 { f"{foo}" } else { f"{bar}" }; + std::println(struct_string); +} + +fn string_identity(string: fmtstr<14, (Field, Field)>) -> fmtstr<14, (Field, Field)> { + string +} + +fn string_with_generics(string: fmtstr) -> fmtstr { + string +} + +fn string_with_partial_generics(string: fmtstr) -> fmtstr { + string } struct myStruct { diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs index 769ee6aa09f..c485200a53e 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs @@ -177,6 +177,15 @@ impl<'a> FunctionContext<'a> { ast::Type::MutableReference(element) => { Self::map_type_helper(element, &mut |_| f(Type::Reference)) } + ast::Type::FmtString(len, fields) => { + // A format string is represented by multiple values + // The message string, the number of fields to be formatted, and + // then the encapsulated fields themselves + let final_fmt_str_fields = + vec![ast::Type::String(*len), ast::Type::Field, *fields.clone()]; + let fmt_str_tuple = ast::Type::Tuple(final_fmt_str_fields); + Self::map_type_helper(&fmt_str_tuple, f) + } other => Tree::Leaf(f(Self::convert_non_tuple_type(other))), } } @@ -204,6 +213,9 @@ impl<'a> FunctionContext<'a> { ast::Type::Integer(Signedness::Unsigned, bits) => Type::unsigned(*bits), ast::Type::Bool => Type::unsigned(1), ast::Type::String(len) => Type::Array(Rc::new(vec![Type::char()]), *len as usize), + ast::Type::FmtString(_, _) => { + panic!("convert_non_tuple_type called on a fmt string: {typ}") + } ast::Type::Unit => panic!("convert_non_tuple_type called on a unit type"), ast::Type::Tuple(_) => panic!("convert_non_tuple_type called on a tuple: {typ}"), ast::Type::Function(_, _) => Type::Function, diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs index d6169dfd218..0c0dd35211b 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs @@ -135,6 +135,18 @@ impl<'a> FunctionContext<'a> { let typ = Self::convert_non_tuple_type(&ast::Type::String(elements.len() as u64)); self.codegen_array(elements, typ) } + ast::Literal::FmtStr(string, number_of_fields, fields) => { + // A caller needs multiple pieces of information to make use of a format string + // The message string, the number of fields to be formatted, and the fields themselves + let string = Expression::Literal(ast::Literal::Str(string.clone())); + let number_of_fields = Expression::Literal(ast::Literal::Integer( + (*number_of_fields as u128).into(), + ast::Type::Field, + )); + let fields = *fields.clone(); + let fmt_str_tuple = &[string, number_of_fields, fields]; + self.codegen_tuple(fmt_str_tuple) + } } } diff --git a/crates/noirc_frontend/Cargo.toml b/crates/noirc_frontend/Cargo.toml index a9a62673af6..1f902d2d399 100644 --- a/crates/noirc_frontend/Cargo.toml +++ b/crates/noirc_frontend/Cargo.toml @@ -20,6 +20,7 @@ serde.workspace = true serde_json.workspace = true rustc-hash = "1.1.0" small-ord-set = "0.1.3" +regex = "1.9.1" [dev-dependencies] strum = "0.24" diff --git a/crates/noirc_frontend/src/ast/expression.rs b/crates/noirc_frontend/src/ast/expression.rs index 1f1d226310f..b1829e8c1ee 100644 --- a/crates/noirc_frontend/src/ast/expression.rs +++ b/crates/noirc_frontend/src/ast/expression.rs @@ -72,6 +72,10 @@ impl ExpressionKind { ExpressionKind::Literal(Literal::Str(contents)) } + pub fn format_string(contents: String) -> ExpressionKind { + ExpressionKind::Literal(Literal::FmtStr(contents)) + } + pub fn constructor((type_name, fields): (Path, Vec<(Ident, Expression)>)) -> ExpressionKind { ExpressionKind::Constructor(Box::new(ConstructorExpression { type_name, fields })) } @@ -298,6 +302,7 @@ pub enum Literal { Bool(bool), Integer(FieldElement), Str(String), + FmtStr(String), Unit, } @@ -473,6 +478,7 @@ impl Display for Literal { Literal::Bool(boolean) => write!(f, "{}", if *boolean { "true" } else { "false" }), Literal::Integer(integer) => write!(f, "{}", integer.to_u128()), Literal::Str(string) => write!(f, "\"{string}\""), + Literal::FmtStr(string) => write!(f, "f\"{string}\""), Literal::Unit => write!(f, "()"), } } diff --git a/crates/noirc_frontend/src/ast/mod.rs b/crates/noirc_frontend/src/ast/mod.rs index ed73cce486a..b52c3e685d3 100644 --- a/crates/noirc_frontend/src/ast/mod.rs +++ b/crates/noirc_frontend/src/ast/mod.rs @@ -36,6 +36,7 @@ pub enum UnresolvedType { Bool(CompTime), Expression(UnresolvedTypeExpression), String(Option), + FormatString(UnresolvedTypeExpression, Box), Unit, /// A Named UnresolvedType can be a struct type or a type variable @@ -102,9 +103,10 @@ impl std::fmt::Display for UnresolvedType { Expression(expression) => expression.fmt(f), Bool(is_const) => write!(f, "{is_const}bool"), String(len) => match len { - None => write!(f, "str[]"), - Some(len) => write!(f, "str[{len}]"), + None => write!(f, "str<_>"), + Some(len) => write!(f, "str<{len}>"), }, + FormatString(len, elements) => write!(f, "fmt<{len}, {elements}"), Function(args, ret) => { let args = vecmap(args, ToString::to_string); write!(f, "fn({}) -> {ret}", args.join(", ")) diff --git a/crates/noirc_frontend/src/hir/resolution/errors.rs b/crates/noirc_frontend/src/hir/resolution/errors.rs index 82688928575..e9cf8f31393 100644 --- a/crates/noirc_frontend/src/hir/resolution/errors.rs +++ b/crates/noirc_frontend/src/hir/resolution/errors.rs @@ -74,6 +74,8 @@ pub enum ResolverError { MutableReferenceToArrayElement { span: Span }, #[error("Function is not defined in a contract yet sets is_internal")] ContractFunctionInternalInNormalFunction { span: Span }, + #[error("Numeric constants should be printed without formatting braces")] + NumericConstantInFormatString { name: String, span: Span }, } impl ResolverError { @@ -283,6 +285,11 @@ impl From for Diagnostic { "Non-contract functions cannot be 'internal'".into(), span, ), + ResolverError::NumericConstantInFormatString { name, span } => Diagnostic::simple_error( + format!("cannot find `{name}` in this scope "), + "Numeric constants should be printed without formatting braces".to_string(), + span, + ), } } } diff --git a/crates/noirc_frontend/src/hir/resolution/resolver.rs b/crates/noirc_frontend/src/hir/resolution/resolver.rs index 29b3cc485d5..fe19cb633e4 100644 --- a/crates/noirc_frontend/src/hir/resolution/resolver.rs +++ b/crates/noirc_frontend/src/hir/resolution/resolver.rs @@ -18,6 +18,7 @@ use crate::hir_def::expr::{ HirMethodCallExpression, HirPrefixExpression, }; use crate::token::Attribute; +use regex::Regex; use std::collections::{HashMap, HashSet}; use std::rc::Rc; @@ -347,6 +348,11 @@ impl<'a> Resolver<'a> { let resolved_size = self.resolve_array_size(size, new_variables); Type::String(Box::new(resolved_size)) } + UnresolvedType::FormatString(size, fields) => { + let resolved_size = self.convert_expression_type(size); + let fields = self.resolve_type_inner(*fields, new_variables); + Type::FmtString(Box::new(resolved_size), Box::new(fields)) + } UnresolvedType::Unit => Type::Unit, UnresolvedType::Unspecified => Type::Error, UnresolvedType::Error => Type::Error, @@ -775,7 +781,6 @@ impl<'a> Resolver<'a> { Type::FieldElement(_) | Type::Integer(_, _, _) | Type::Bool(_) - | Type::String(_) | Type::Unit | Type::Error | Type::TypeVariable(_, _) @@ -784,10 +789,11 @@ impl<'a> Resolver<'a> { | Type::NotConstant | Type::Forall(_, _) => (), - Type::Array(length, _) => { + Type::Array(length, element_type) => { if let Type::NamedGeneric(type_variable, name) = length.as_ref() { found.insert(name.to_string(), type_variable.clone()); } + Self::find_numeric_generics_in_type(element_type, found); } Type::Tuple(fields) => { @@ -813,6 +819,17 @@ impl<'a> Resolver<'a> { } } Type::MutableReference(element) => Self::find_numeric_generics_in_type(element, found), + Type::String(length) => { + if let Type::NamedGeneric(type_variable, name) = length.as_ref() { + found.insert(name.to_string(), type_variable.clone()); + } + } + Type::FmtString(length, fields) => { + if let Type::NamedGeneric(type_variable, name) = length.as_ref() { + found.insert(name.to_string(), type_variable.clone()); + } + Self::find_numeric_generics_in_type(fields, found); + } } } @@ -904,6 +921,7 @@ impl<'a> Resolver<'a> { } Literal::Integer(integer) => HirLiteral::Integer(integer), Literal::Str(str) => HirLiteral::Str(str), + Literal::FmtStr(str) => self.resolve_fmt_str_literal(str, expr.span), Literal::Unit => HirLiteral::Unit, }), ExpressionKind::Variable(path) => { @@ -939,6 +957,7 @@ impl<'a> Resolver<'a> { ExpressionKind::Call(call_expr) => { // Get the span and name of path for error reporting let func = self.resolve_expression(*call_expr.func); + let arguments = vecmap(call_expr.arguments, |arg| self.resolve_expression(arg)); let location = Location::new(expr.span, self.file); HirExpression::Call(HirCallExpression { func, arguments, location }) @@ -1288,6 +1307,36 @@ impl<'a> Resolver<'a> { let module_id = self.path_resolver.module_id(); module_id.module(self.def_maps).is_contract } + + fn resolve_fmt_str_literal(&mut self, str: String, call_expr_span: Span) -> HirLiteral { + let re = Regex::new(r"\{([a-zA-Z0-9_]+)\}") + .expect("ICE: an invalid regex pattern was used for checking format strings"); + let mut fmt_str_idents = Vec::new(); + for field in re.find_iter(&str) { + let matched_str = field.as_str(); + let ident_name = &matched_str[1..(matched_str.len() - 1)]; + + let scope_tree = self.scopes.current_scope_tree(); + let variable = scope_tree.find(ident_name); + if let Some((old_value, _)) = variable { + old_value.num_times_used += 1; + let expr_id = self.interner.push_expr(HirExpression::Ident(old_value.ident)); + self.interner.push_expr_location(expr_id, call_expr_span, self.file); + fmt_str_idents.push(expr_id); + } else if ident_name.parse::().is_ok() { + self.errors.push(ResolverError::NumericConstantInFormatString { + name: ident_name.to_owned(), + span: call_expr_span, + }); + } else { + self.errors.push(ResolverError::VariableNotDeclared { + name: ident_name.to_owned(), + span: call_expr_span, + }); + } + } + HirLiteral::FmtStr(str, fmt_str_idents) + } } /// Gives an error if a user tries to create a mutable reference @@ -1572,6 +1621,39 @@ mod test { assert!(errors.is_empty()); } + #[test] + fn resolve_fmt_strings() { + let src = r#" + fn main() { + let string = f"this is i: {i}"; + println(string); + + println(f"I want to print {0}"); + + let new_val = 10; + println(f"randomstring{new_val}{new_val}"); + } + fn println(x : T) -> T { + x + } + "#; + + let errors = resolve_src_code(src, vec!["main", "println"]); + assert!(errors.len() == 2, "Expected 2 errors, got: {:?}", errors); + + for err in errors { + match &err { + ResolverError::VariableNotDeclared { name, .. } => { + assert_eq!(name, "i"); + } + ResolverError::NumericConstantInFormatString { name, .. } => { + assert_eq!(name, "0"); + } + _ => unimplemented!(), + }; + } + } + fn path_unresolved_error(err: ResolverError, expected_unresolved_path: &str) { match err { ResolverError::PathResolutionError(PathResolutionError::Unresolved(name)) => { diff --git a/crates/noirc_frontend/src/hir/type_check/expr.rs b/crates/noirc_frontend/src/hir/type_check/expr.rs index b19833fb311..12c11bf20e1 100644 --- a/crates/noirc_frontend/src/hir/type_check/expr.rs +++ b/crates/noirc_frontend/src/hir/type_check/expr.rs @@ -111,6 +111,11 @@ impl<'interner> TypeChecker<'interner> { let len = Type::Constant(string.len() as u64); Type::String(Box::new(len)) } + HirLiteral::FmtStr(string, idents) => { + let len = Type::Constant(string.len() as u64); + let types = vecmap(&idents, |elem| self.check_expression(elem)); + Type::FmtString(Box::new(len), Box::new(Type::Tuple(types))) + } HirLiteral::Unit => Type::Unit, } } diff --git a/crates/noirc_frontend/src/hir_def/expr.rs b/crates/noirc_frontend/src/hir_def/expr.rs index 63b7e421dc3..5db9751591a 100644 --- a/crates/noirc_frontend/src/hir_def/expr.rs +++ b/crates/noirc_frontend/src/hir_def/expr.rs @@ -80,6 +80,7 @@ pub enum HirLiteral { Bool(bool), Integer(FieldElement), Str(String), + FmtStr(String, Vec), Unit, } diff --git a/crates/noirc_frontend/src/hir_def/types.rs b/crates/noirc_frontend/src/hir_def/types.rs index 6e1113345a8..4b4318f79d6 100644 --- a/crates/noirc_frontend/src/hir_def/types.rs +++ b/crates/noirc_frontend/src/hir_def/types.rs @@ -39,6 +39,10 @@ pub enum Type { /// is either a type variable of some kind or a Type::Constant. String(Box), + /// FmtString(N, Vec) is an array of characters of length N that contains + /// a list of fields specified inside the string by the following regular expression r"\{([\S]+)\}" + FmtString(Box, Box), + /// The unit type `()`. Unit, @@ -608,7 +612,6 @@ impl Type { Type::FieldElement(_) | Type::Integer(_, _, _) | Type::Bool(_) - | Type::String(_) | Type::Unit | Type::Error | Type::TypeVariable(_, _) @@ -638,6 +641,11 @@ impl Type { }) } Type::MutableReference(element) => element.contains_numeric_typevar(target_id), + Type::String(length) => named_generic_id_matches_target(length), + Type::FmtString(length, elements) => { + elements.contains_numeric_typevar(target_id) + || named_generic_id_matches_target(length) + } } } @@ -704,6 +712,9 @@ impl std::fmt::Display for Type { } Type::Bool(comp_time) => write!(f, "{comp_time}bool"), Type::String(len) => write!(f, "str<{len}>"), + Type::FmtString(len, elements) => { + write!(f, "fmtstr<{len}, {elements}>") + } Type::Unit => write!(f, "()"), Type::Error => write!(f, "error"), Type::NamedGeneric(binding, name) => match &*binding.borrow() { @@ -1057,6 +1068,13 @@ impl Type { elem_a.try_unify(elem_b, span) } + (String(len_a), String(len_b)) => len_a.try_unify(len_b, span), + + (FmtString(len_a, elements_a), FmtString(len_b, elements_b)) => { + len_a.try_unify(len_b, span)?; + elements_a.try_unify(elements_b, span) + } + (Tuple(elements_a), Tuple(elements_b)) => { if elements_a.len() != elements_b.len() { Err(SpanKind::None) @@ -1258,6 +1276,13 @@ impl Type { elem_a.is_subtype_of(elem_b, span) } + (String(len_a), String(len_b)) => len_a.is_subtype_of(len_b, span), + + (FmtString(len_a, elements_a), FmtString(len_b, elements_b)) => { + len_a.is_subtype_of(len_b, span)?; + elements_a.is_subtype_of(elements_b, span) + } + (Tuple(elements_a), Tuple(elements_b)) => { if elements_a.len() != elements_b.len() { Err(SpanKind::None) @@ -1396,6 +1421,7 @@ impl Type { .expect("Cannot have variable sized strings as a parameter to main"); AbiType::String { length: size } } + Type::FmtString(_, _) => unreachable!("format strings cannot be used in the abi"), Type::Error => unreachable!(), Type::Unit => unreachable!(), Type::Constant(_) => unreachable!(), @@ -1497,6 +1523,11 @@ impl Type { let size = Box::new(size.substitute(type_bindings)); Type::String(size) } + Type::FmtString(size, fields) => { + let size = Box::new(size.substitute(type_bindings)); + let fields = Box::new(fields.substitute(type_bindings)); + Type::FmtString(size, fields) + } Type::NamedGeneric(binding, _) | Type::TypeVariable(binding, _) => { substitute_binding(binding) } @@ -1543,6 +1574,11 @@ impl Type { match self { Type::Array(len, elem) => len.occurs(target_id) || elem.occurs(target_id), Type::String(len) => len.occurs(target_id), + Type::FmtString(len, fields) => { + let len_occurs = len.occurs(target_id); + let field_occurs = fields.occurs(target_id); + len_occurs || field_occurs + } Type::Struct(_, generic_args) => generic_args.iter().any(|arg| arg.occurs(target_id)), Type::Tuple(fields) => fields.iter().any(|field| field.occurs(target_id)), Type::NamedGeneric(binding, _) | Type::TypeVariable(binding, _) => { @@ -1582,6 +1618,11 @@ impl Type { Array(Box::new(size.follow_bindings()), Box::new(elem.follow_bindings())) } String(size) => String(Box::new(size.follow_bindings())), + FmtString(size, args) => { + let size = Box::new(size.follow_bindings()); + let args = Box::new(args.follow_bindings()); + FmtString(size, args) + } Struct(def, args) => { let args = vecmap(args, |arg| arg.follow_bindings()); Struct(def.clone(), args) diff --git a/crates/noirc_frontend/src/lexer/lexer.rs b/crates/noirc_frontend/src/lexer/lexer.rs index e376d85ddf0..8a98d5bfa3c 100644 --- a/crates/noirc_frontend/src/lexer/lexer.rs +++ b/crates/noirc_frontend/src/lexer/lexer.rs @@ -102,7 +102,8 @@ impl<'a> Lexer<'a> { Some('}') => self.single_char_token(Token::RightBrace), Some('[') => self.single_char_token(Token::LeftBracket), Some(']') => self.single_char_token(Token::RightBracket), - Some('"') => Ok(self.eat_string_literal()), + Some('"') => Ok(self.eat_string_literal(false)), + Some('f') => self.eat_format_string_or_alpha_numeric(), Some('#') => self.eat_attribute(), Some(ch) if ch.is_ascii_alphanumeric() || ch == '_' => self.eat_alpha_numeric(ch), Some(ch) => { @@ -307,13 +308,23 @@ impl<'a> Lexer<'a> { Ok(integer_token.into_span(start, end)) } - fn eat_string_literal(&mut self) -> SpannedToken { + fn eat_string_literal(&mut self, is_format_string: bool) -> SpannedToken { let (str_literal, start_span, end_span) = self.eat_while(None, |ch| ch != '"'); - let str_literal_token = Token::Str(str_literal); + let str_literal_token = + if is_format_string { Token::FmtStr(str_literal) } else { Token::Str(str_literal) }; self.next_char(); // Advance past the closing quote str_literal_token.into_span(start_span, end_span) } + fn eat_format_string_or_alpha_numeric(&mut self) -> SpannedTokenResult { + if self.peek_char_is('"') { + self.next_char(); + Ok(self.eat_string_literal(true)) + } else { + self.eat_alpha_numeric('f') + } + } + fn parse_comment(&mut self) -> SpannedTokenResult { let _ = self.eat_while(None, |ch| ch != '\n'); self.next_token() diff --git a/crates/noirc_frontend/src/lexer/token.rs b/crates/noirc_frontend/src/lexer/token.rs index b39d1640c57..3ef1d2a5dde 100644 --- a/crates/noirc_frontend/src/lexer/token.rs +++ b/crates/noirc_frontend/src/lexer/token.rs @@ -15,6 +15,7 @@ pub enum Token { Int(FieldElement), Bool(bool), Str(String), + FmtStr(String), Keyword(Keyword), IntType(IntType), Attribute(Attribute), @@ -145,6 +146,7 @@ impl fmt::Display for Token { Token::Int(n) => write!(f, "{}", n.to_u128()), Token::Bool(b) => write!(f, "{b}"), Token::Str(ref b) => write!(f, "{b}"), + Token::FmtStr(ref b) => write!(f, "f{b}"), Token::Keyword(k) => write!(f, "{k}"), Token::Attribute(ref a) => write!(f, "{a}"), Token::IntType(ref i) => write!(f, "{i}"), @@ -212,7 +214,7 @@ impl Token { pub fn kind(&self) -> TokenKind { match *self { Token::Ident(_) => TokenKind::Ident, - Token::Int(_) | Token::Bool(_) | Token::Str(_) => TokenKind::Literal, + Token::Int(_) | Token::Bool(_) | Token::Str(_) | Token::FmtStr(_) => TokenKind::Literal, Token::Keyword(_) => TokenKind::Keyword, Token::Attribute(_) => TokenKind::Attribute, ref tok => TokenKind::Token(tok.clone()), @@ -451,6 +453,7 @@ pub enum Keyword { Field, Fn, For, + FormatString, Global, If, Impl, @@ -489,6 +492,7 @@ impl fmt::Display for Keyword { Keyword::Field => write!(f, "Field"), Keyword::Fn => write!(f, "fn"), Keyword::For => write!(f, "for"), + Keyword::FormatString => write!(f, "fmtstr"), Keyword::Global => write!(f, "global"), Keyword::If => write!(f, "if"), Keyword::Impl => write!(f, "impl"), @@ -530,6 +534,7 @@ impl Keyword { "Field" => Keyword::Field, "fn" => Keyword::Fn, "for" => Keyword::For, + "fmtstr" => Keyword::FormatString, "global" => Keyword::Global, "if" => Keyword::If, "impl" => Keyword::Impl, diff --git a/crates/noirc_frontend/src/monomorphization/ast.rs b/crates/noirc_frontend/src/monomorphization/ast.rs index 488d05c6509..7ad05f09231 100644 --- a/crates/noirc_frontend/src/monomorphization/ast.rs +++ b/crates/noirc_frontend/src/monomorphization/ast.rs @@ -83,6 +83,7 @@ pub enum Literal { Integer(FieldElement, Type), Bool(bool), Str(String), + FmtStr(String, u64, Box), } #[derive(Debug, Clone)] @@ -207,6 +208,7 @@ pub enum Type { Integer(Signedness, /*bits:*/ u32), // u32 = Integer(unsigned, 32) Bool, String(/*len:*/ u64), // String(4) = str[4] + FmtString(/*len:*/ u64, Box), Unit, Tuple(Vec), Slice(Box), @@ -313,7 +315,10 @@ impl std::fmt::Display for Type { Signedness::Signed => write!(f, "i{bits}"), }, Type::Bool => write!(f, "bool"), - Type::String(len) => write!(f, "str[{len}]"), + Type::String(len) => write!(f, "str<{len}>"), + Type::FmtString(len, elements) => { + write!(f, "fmtstr<{len}, {elements}>") + } Type::Unit => write!(f, "()"), Type::Tuple(elements) => { let elements = vecmap(elements, ToString::to_string); diff --git a/crates/noirc_frontend/src/monomorphization/mod.rs b/crates/noirc_frontend/src/monomorphization/mod.rs index bb0228091da..963d16a311c 100644 --- a/crates/noirc_frontend/src/monomorphization/mod.rs +++ b/crates/noirc_frontend/src/monomorphization/mod.rs @@ -22,7 +22,7 @@ use crate::{ }, node_interner::{self, DefinitionKind, NodeInterner, StmtId}, token::Attribute, - ContractFunctionType, FunctionKind, TypeBinding, TypeBindings, TypeVariableKind, + ContractFunctionType, FunctionKind, Type, TypeBinding, TypeBindings, TypeVariableKind, }; use self::ast::{Definition, FuncId, Function, LocalId, Program}; @@ -261,6 +261,14 @@ impl<'interner> Monomorphizer<'interner> { match self.interner.expression(&expr) { HirExpression::Ident(ident) => self.ident(ident, expr), HirExpression::Literal(HirLiteral::Str(contents)) => Literal(Str(contents)), + HirExpression::Literal(HirLiteral::FmtStr(contents, idents)) => { + let fields = vecmap(idents, |ident| self.expr(ident)); + Literal(FmtStr( + contents, + fields.len() as u64, + Box::new(ast::Expression::Tuple(fields)), + )) + } HirExpression::Literal(HirLiteral::Bool(value)) => Literal(Bool(value)), HirExpression::Literal(HirLiteral::Integer(value)) => { let typ = Self::convert_type(&self.interner.id_type(expr)); @@ -587,6 +595,11 @@ impl<'interner> Monomorphizer<'interner> { HirType::Integer(_, sign, bits) => ast::Type::Integer(*sign, *bits), HirType::Bool(_) => ast::Type::Bool, HirType::String(size) => ast::Type::String(size.evaluate_to_u64().unwrap_or(0)), + HirType::FmtString(size, fields) => { + let size = size.evaluate_to_u64().unwrap_or(0); + let fields = Box::new(Self::convert_type(fields.as_ref())); + ast::Type::FmtString(size, fields) + } HirType::Unit => ast::Type::Unit, HirType::Array(length, element) => { @@ -704,18 +717,50 @@ impl<'interner> Monomorphizer<'interner> { /// of field elements to/from JSON. The type metadata attached in this method /// is the serialized `AbiType` for the argument passed to the function. /// The caller that is running a Noir program should then deserialize the `AbiType`, - /// and accurately decode the list of field elements passed to the foreign call. - fn append_abi_arg(&self, hir_argument: &HirExpression, arguments: &mut Vec) { + /// and accurately decode the list of field elements passed to the foreign call. + fn append_abi_arg( + &mut self, + hir_argument: &HirExpression, + arguments: &mut Vec, + ) { match hir_argument { HirExpression::Ident(ident) => { let typ = self.interner.id_type(ident.id); - let typ = typ.follow_bindings(); - - let abi_type = typ.as_abi_type(); - let abi_as_string = - serde_json::to_string(&abi_type).expect("ICE: expected Abi type to serialize"); + let typ: Type = typ.follow_bindings(); + match &typ { + // A format string has many different possible types that need to be handled. + // Loop over each element in the format string to fetch each type's relevant metadata + Type::FmtString(_, elements) => { + match elements.as_ref() { + Type::Tuple(element_types) => { + for typ in element_types { + let abi_type = typ.as_abi_type(); + let abi_as_string = serde_json::to_string(&abi_type) + .expect("ICE: expected Abi type to serialize"); + + arguments.push(ast::Expression::Literal(ast::Literal::Str( + abi_as_string, + ))); + } + } + _ => unreachable!( + "ICE: format string type should be a tuple but got a {elements}" + ), + } - arguments.push(ast::Expression::Literal(ast::Literal::Str(abi_as_string))); + // The caller needs information as to whether it is handling a format string or a single type + arguments.push(ast::Expression::Literal(ast::Literal::Bool(true))); + } + _ => { + let abi_type = typ.as_abi_type(); + let abi_as_string = serde_json::to_string(&abi_type) + .expect("ICE: expected Abi type to serialize"); + + arguments.push(ast::Expression::Literal(ast::Literal::Str(abi_as_string))); + // The caller needs information as to whether it is handling a format string or a single type + arguments.push(ast::Expression::Literal(ast::Literal::Bool(false))); + } + } } _ => unreachable!("logging expr {:?} is not supported", arguments[0]), } @@ -922,6 +967,18 @@ impl<'interner> Monomorphizer<'interner> { ast::Type::String(length) => { ast::Expression::Literal(ast::Literal::Str("\0".repeat(*length as usize))) } + ast::Type::FmtString(length, fields) => { + let zeroed_tuple = self.zeroed_value_of_type(fields); + let fields_len = match &zeroed_tuple { + ast::Expression::Tuple(fields) => fields.len() as u64, + _ => unreachable!("ICE: format string fields should be structured in a tuple, but got a {zeroed_tuple}"), + }; + ast::Expression::Literal(ast::Literal::FmtStr( + "\0".repeat(*length as usize), + fields_len, + Box::new(zeroed_tuple), + )) + } ast::Type::Tuple(fields) => { ast::Expression::Tuple(vecmap(fields, |field| self.zeroed_value_of_type(field))) } diff --git a/crates/noirc_frontend/src/monomorphization/printer.rs b/crates/noirc_frontend/src/monomorphization/printer.rs index 929a14e07da..ff2b7d0d256 100644 --- a/crates/noirc_frontend/src/monomorphization/printer.rs +++ b/crates/noirc_frontend/src/monomorphization/printer.rs @@ -96,6 +96,11 @@ impl AstPrinter { super::ast::Literal::Integer(x, _) => x.fmt(f), super::ast::Literal::Bool(x) => x.fmt(f), super::ast::Literal::Str(s) => s.fmt(f), + super::ast::Literal::FmtStr(s, _, _) => { + write!(f, "f\"")?; + s.fmt(f)?; + write!(f, "\"") + } } } diff --git a/crates/noirc_frontend/src/node_interner.rs b/crates/noirc_frontend/src/node_interner.rs index f01c5f22a50..062e9daf2d6 100644 --- a/crates/noirc_frontend/src/node_interner.rs +++ b/crates/noirc_frontend/src/node_interner.rs @@ -213,11 +213,11 @@ impl DefinitionKind { matches!(self, DefinitionKind::Global(..)) } - pub fn get_rhs(self) -> Option { + pub fn get_rhs(&self) -> Option { match self { DefinitionKind::Function(_) => None, - DefinitionKind::Global(id) => Some(id), - DefinitionKind::Local(id) => id, + DefinitionKind::Global(id) => Some(*id), + DefinitionKind::Local(id) => *id, DefinitionKind::GenericType(_) => None, } } @@ -637,6 +637,7 @@ fn get_type_method_key(typ: &Type) -> Option { | Type::Constant(_) | Type::Error | Type::NotConstant - | Type::Struct(_, _) => None, + | Type::Struct(_, _) + | Type::FmtString(_, _) => None, } } diff --git a/crates/noirc_frontend/src/parser/parser.rs b/crates/noirc_frontend/src/parser/parser.rs index c6d84416975..65446e5d6c6 100644 --- a/crates/noirc_frontend/src/parser/parser.rs +++ b/crates/noirc_frontend/src/parser/parser.rs @@ -795,6 +795,7 @@ fn parse_type_inner( int_type(), bool_type(), string_type(), + format_string_type(recursive_type_parser.clone()), named_type(recursive_type_parser.clone()), array_type(recursive_type_parser.clone()), recursive_type_parser.clone().delimited_by(just(Token::LeftParen), just(Token::RightParen)), @@ -841,6 +842,19 @@ fn string_type() -> impl NoirParser { .map(UnresolvedType::String) } +fn format_string_type( + type_parser: impl NoirParser, +) -> impl NoirParser { + keyword(Keyword::FormatString) + .ignore_then( + type_expression() + .then_ignore(just(Token::Comma)) + .then(type_parser) + .delimited_by(just(Token::Less), just(Token::Greater)), + ) + .map(|(size, fields)| UnresolvedType::FormatString(size, Box::new(fields))) +} + fn int_type() -> impl NoirParser { maybe_comp_time() .then(filter_map(|span, token: Token| match token { @@ -1366,6 +1380,7 @@ fn literal() -> impl NoirParser { Token::Int(x) => ExpressionKind::integer(x), Token::Bool(b) => ExpressionKind::boolean(b), Token::Str(s) => ExpressionKind::string(s), + Token::FmtStr(s) => ExpressionKind::format_string(s), unexpected => unreachable!("Non-literal {} parsed as a literal", unexpected), }) } From 920a900818b31285c9bf2f5dd5b84c2799610a7c Mon Sep 17 00:00:00 2001 From: jfecher Date: Tue, 1 Aug 2023 14:55:21 -0500 Subject: [PATCH 05/19] feat: Add `Option` to noir stdlib (#1781) * Add Option * Fix path * Add option test * Move test * Add docs and filter, flatten methods * Fix stdlib --- .../tests/test_data/option/Nargo.toml | 6 + .../tests/test_data/option/src/main.nr | 53 ++++++ .../src/ssa_refactor/acir_gen/mod.rs | 2 +- noir_stdlib/src/lib.nr | 1 + noir_stdlib/src/option.nr | 157 ++++++++++++++++++ 5 files changed, 218 insertions(+), 1 deletion(-) create mode 100644 crates/nargo_cli/tests/test_data/option/Nargo.toml create mode 100644 crates/nargo_cli/tests/test_data/option/src/main.nr create mode 100644 noir_stdlib/src/option.nr diff --git a/crates/nargo_cli/tests/test_data/option/Nargo.toml b/crates/nargo_cli/tests/test_data/option/Nargo.toml new file mode 100644 index 00000000000..2248e9c06dd --- /dev/null +++ b/crates/nargo_cli/tests/test_data/option/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "option" +authors = [""] +compiler_version = "0.7.0" + +[dependencies] diff --git a/crates/nargo_cli/tests/test_data/option/src/main.nr b/crates/nargo_cli/tests/test_data/option/src/main.nr new file mode 100644 index 00000000000..0a41b9a629c --- /dev/null +++ b/crates/nargo_cli/tests/test_data/option/src/main.nr @@ -0,0 +1,53 @@ +use dep::std::option::Option; + +fn main() { + let none = Option::none(); + let some = Option::some(3); + + assert(none.is_none()); + assert(some.is_some()); + + assert(some.unwrap() == 3); + + assert(none.unwrap_or(2) == 2); + assert(some.unwrap_or(2) == 3); + + assert(none.unwrap_or_else(|| 5) == 5); + assert(some.unwrap_or_else(|| 5) == 3); + + assert(none.map(|x| x * 2).is_none()); + assert(some.map(|x| x * 2).unwrap() == 6); + + assert(none.map_or(0, |x| x * 2) == 0); + assert(some.map_or(0, |x| x * 2) == 6); + + assert(none.map_or_else(|| 0, |x| x * 2) == 0); + assert(some.map_or_else(|| 0, |x| x * 2) == 6); + + assert(none.and(none).is_none()); + assert(none.and(some).is_none()); + assert(some.and(none).is_none()); + assert(some.and(some).is_some()); + + let add1_u64 = |value: Field| Option::some(value as u64 + 1); + + assert(none.and_then(|_value| Option::none()).is_none()); + assert(none.and_then(add1_u64).is_none()); + assert(some.and_then(|_value| Option::none()).is_none()); + assert(some.and_then(add1_u64).unwrap() == 4); + + assert(none.or(none).is_none()); + assert(none.or(some).is_some()); + assert(some.or(none).is_some()); + assert(some.or(some).is_some()); + + assert(none.or_else(|| Option::none()).is_none()); + assert(none.or_else(|| Option::some(5)).is_some()); + assert(some.or_else(|| Option::none()).is_some()); + assert(some.or_else(|| Option::some(5)).is_some()); + + assert(none.xor(none).is_none()); + assert(none.xor(some).is_some()); + assert(some.xor(none).is_some()); + assert(some.xor(some).is_none()); +} diff --git a/crates/noirc_evaluator/src/ssa_refactor/acir_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/acir_gen/mod.rs index 5253cb71875..4a7d2e46775 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/acir_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/acir_gen/mod.rs @@ -1002,7 +1002,7 @@ impl Context { } Intrinsic::ArrayLen => { let len = match self.convert_value(arguments[0], dfg) { - AcirValue::Var(_, _) => unreachable!("Non-array passed to array.len() method"), + AcirValue::Var(_, _) => unreachable!("Non-array passed to array.len() method"), AcirValue::Array(values) => (values.len() as u128).into(), AcirValue::DynamicArray(array) => (array.len as u128).into(), }; diff --git a/noir_stdlib/src/lib.nr b/noir_stdlib/src/lib.nr index e654a20b1d8..9c0dcc6b269 100644 --- a/noir_stdlib/src/lib.nr +++ b/noir_stdlib/src/lib.nr @@ -14,6 +14,7 @@ mod ec; mod unsafe; mod collections; mod compat; +mod option; // Oracle calls are required to be wrapped in an unconstrained function // Thus, the only argument to the `println` oracle is expected to always be an ident diff --git a/noir_stdlib/src/option.nr b/noir_stdlib/src/option.nr new file mode 100644 index 00000000000..5cc4dfae887 --- /dev/null +++ b/noir_stdlib/src/option.nr @@ -0,0 +1,157 @@ +struct Option { + _is_some: bool, + value: T, +} + +impl Option { + /// Constructs a None value + fn none() -> Self { + Self { _is_some: false, value: crate::unsafe::zeroed() } + } + + /// Constructs a Some wrapper around the given value + fn some(value: T) -> Self { + Self { _is_some: true, value } + } + + /// True if this Option is None + fn is_none(self) -> bool { + !self._is_some + } + + /// True if this Option is Some + fn is_some(self) -> bool { + self._is_some + } + + /// Asserts `self.is_some()` and returns the wrapped value. + fn unwrap(self) -> T { + assert(self._is_some); + self.value + } + + /// Returns the wrapped value if `self.is_some()`. Otherwise, returns the given default value. + fn unwrap_or(self, default: T) -> T { + if self._is_some { + self.value + } else { + default + } + } + + /// Returns the wrapped value if `self.is_some()`. Otherwise, calls the given function to return + /// a default value. + fn unwrap_or_else(self, default: fn() -> T) -> T { + if self._is_some { + self.value + } else { + default() + } + } + + /// If self is `Some(x)`, this returns `Some(f(x))`. Otherwise, this returns `None`. + fn map(self, f: fn(T) -> U) -> Option { + if self._is_some { + Option::some(f(self.value)) + } else { + Option::none() + } + } + + /// If self is `Some(x)`, this returns `f(x)`. Otherwise, this returns the given default value. + fn map_or(self, default: U, f: fn(T) -> U) -> U { + if self._is_some { + f(self.value) + } else { + default + } + } + + /// If self is `Some(x)`, this returns `f(x)`. Otherwise, this returns `default()`. + fn map_or_else(self, default: fn() -> U, f: fn(T) -> U) -> U { + if self._is_some { + f(self.value) + } else { + default() + } + } + + /// Returns None if self is None. Otherwise, this returns `other`. + fn and(self, other: Self) -> Self { + if self.is_none() { + Option::none() + } else { + other + } + } + + /// If self is None, this returns None. Otherwise, this calls the given function + /// with the Some value contained within self, and returns the result of that call. + /// + /// In some languages this function is called `flat_map` or `bind`. + fn and_then(self, f: fn(T) -> Option) -> Option { + if self._is_some { + f(self.value) + } else { + Option::none() + } + } + + /// If self is Some, return self. Otherwise, return `other`. + fn or(self, other: Self) -> Self { + if self._is_some { + self + } else { + other + } + } + + /// If self is Some, return self. Otherwise, return `default()`. + fn or_else(self, default: fn() -> Self) -> Self { + if self._is_some { + self + } else { + default() + } + } + + // If only one of the two Options is Some, return that option. + // Otherwise, if both options are Some or both are None, None is returned. + fn xor(self, other: Self) -> Self { + if self._is_some { + if other._is_some { + Option::none() + } else { + self + } + } else if other._is_some { + other + } else { + Option::none() + } + } + + /// Returns `Some(x)` if self is `Some(x)` and `predicate(x)` is true. + /// Otherwise, this returns `None` + fn filter(self, predicate: fn(T) -> bool) -> Self { + if self._is_some { + if predicate(self.value) { + self + } else { + Option::none() + } + } else { + Option::none() + } + } + + /// Flattens an Option> into a Option. + /// This returns None if the outer Option is None. Otherwise, this returns the inner Option. + fn flatten(option: Option>) -> Option { + if option._is_some { + option.value + } else { + Option::none() + } + } +} From ce94cb4f9f9fccf504de9d0b12b8760fc8fab75c Mon Sep 17 00:00:00 2001 From: jfecher Date: Tue, 1 Aug 2023 15:12:03 -0500 Subject: [PATCH 06/19] feat: Implement type aliases (#2112) * . * . * . * . * stash * . * . * . * remove tyalias as an hir type * namings * . * clippy * move to collector * working? * working? * move test to new_ssa * resolve type alias name in module * . * comments * review * move test to test_data folder * type aliases cannot be used in type namespace * more efficient? * remove comment * use interner for id * . * Rework def_interner storage of aliases * Update crates/noirc_frontend/src/ast/type_alias.rs Co-authored-by: Maxim Vezenov * Update crates/noirc_frontend/src/ast/type_alias.rs Co-authored-by: Maxim Vezenov * Update crates/noirc_frontend/src/ast/type_alias.rs Co-authored-by: Maxim Vezenov * Update crates/noirc_frontend/src/hir/def_collector/dc_mod.rs Co-authored-by: Maxim Vezenov * Update crates/noirc_frontend/src/hir/resolution/resolver.rs Co-authored-by: Maxim Vezenov * typ -> type --------- Co-authored-by: ethan-000 Co-authored-by: Ethan-000 Co-authored-by: Maxim Vezenov --- .../tests/test_data/type_aliases/Nargo.toml | 6 ++ .../tests/test_data/type_aliases/Prover.toml | 1 + .../tests/test_data/type_aliases/src/main.nr | 31 ++++++++ crates/noirc_frontend/src/ast/mod.rs | 2 + crates/noirc_frontend/src/ast/type_alias.rs | 31 ++++++++ .../src/hir/def_collector/dc_crate.rs | 38 +++++++++- .../src/hir/def_collector/dc_mod.rs | 40 ++++++++++- .../src/hir/def_map/item_scope.rs | 1 + .../src/hir/def_map/module_data.rs | 10 ++- .../src/hir/def_map/module_def.rs | 31 +++++++- .../src/hir/resolution/import.rs | 1 + .../src/hir/resolution/resolver.rs | 70 +++++++++++++++---- crates/noirc_frontend/src/hir_def/types.rs | 68 +++++++++++++++++- crates/noirc_frontend/src/node_interner.rs | 49 ++++++++++++- crates/noirc_frontend/src/parser/mod.rs | 15 +++- crates/noirc_frontend/src/parser/parser.rs | 28 +++++++- 16 files changed, 393 insertions(+), 29 deletions(-) create mode 100644 crates/nargo_cli/tests/test_data/type_aliases/Nargo.toml create mode 100644 crates/nargo_cli/tests/test_data/type_aliases/Prover.toml create mode 100644 crates/nargo_cli/tests/test_data/type_aliases/src/main.nr create mode 100644 crates/noirc_frontend/src/ast/type_alias.rs diff --git a/crates/nargo_cli/tests/test_data/type_aliases/Nargo.toml b/crates/nargo_cli/tests/test_data/type_aliases/Nargo.toml new file mode 100644 index 00000000000..a797cb0bbe2 --- /dev/null +++ b/crates/nargo_cli/tests/test_data/type_aliases/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "type_aliases" +authors = [""] +compiler_version = "0.1" + +[dependencies] diff --git a/crates/nargo_cli/tests/test_data/type_aliases/Prover.toml b/crates/nargo_cli/tests/test_data/type_aliases/Prover.toml new file mode 100644 index 00000000000..771df41899d --- /dev/null +++ b/crates/nargo_cli/tests/test_data/type_aliases/Prover.toml @@ -0,0 +1 @@ +x = [2, 3] diff --git a/crates/nargo_cli/tests/test_data/type_aliases/src/main.nr b/crates/nargo_cli/tests/test_data/type_aliases/src/main.nr new file mode 100644 index 00000000000..6cfafc91b7d --- /dev/null +++ b/crates/nargo_cli/tests/test_data/type_aliases/src/main.nr @@ -0,0 +1,31 @@ +use dep::std; + +type Foo = [T; 2]; + +type Bar = Field; + +type Three = Two; +type Two = One; +type One = (A, B); + +struct MyStruct { + foo: Bar, +} + +fn main(x : [Field; 2]) { + let a: Foo = [1, 2]; + assert(a[0] != x[0]); + + let b: Bar = 2; + assert(x[0] == b); + + let c: u8 = 1; + let d: u32 = 2; + let e: Three = (c, d); + assert(e.0 == 1); + + let s = MyStruct { + foo: 10 + }; + assert(s.foo == 10); +} diff --git a/crates/noirc_frontend/src/ast/mod.rs b/crates/noirc_frontend/src/ast/mod.rs index b52c3e685d3..6aa373c66a9 100644 --- a/crates/noirc_frontend/src/ast/mod.rs +++ b/crates/noirc_frontend/src/ast/mod.rs @@ -9,6 +9,7 @@ mod function; mod statement; mod structure; mod traits; +mod type_alias; pub use expression::*; pub use function::*; @@ -17,6 +18,7 @@ use noirc_errors::Span; pub use statement::*; pub use structure::*; pub use traits::*; +pub use type_alias::*; use crate::{ parser::{ParserError, ParserErrorReason}, diff --git a/crates/noirc_frontend/src/ast/type_alias.rs b/crates/noirc_frontend/src/ast/type_alias.rs new file mode 100644 index 00000000000..76a1e5a7e30 --- /dev/null +++ b/crates/noirc_frontend/src/ast/type_alias.rs @@ -0,0 +1,31 @@ +use crate::{Ident, UnresolvedGenerics, UnresolvedType}; +use iter_extended::vecmap; +use noirc_errors::Span; +use std::fmt::Display; + +/// Ast node for type aliases +#[derive(Clone, Debug)] +pub struct NoirTypeAlias { + pub name: Ident, + pub generics: UnresolvedGenerics, + pub typ: UnresolvedType, + pub span: Span, +} + +impl NoirTypeAlias { + pub fn new( + name: Ident, + generics: UnresolvedGenerics, + typ: UnresolvedType, + span: Span, + ) -> NoirTypeAlias { + NoirTypeAlias { name, generics, typ, span } + } +} + +impl Display for NoirTypeAlias { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let generics = vecmap(&self.generics, |generic| generic.to_string()); + write!(f, "type {}<{}> = {}", self.name, generics.join(", "), self.typ) + } +} diff --git a/crates/noirc_frontend/src/hir/def_collector/dc_crate.rs b/crates/noirc_frontend/src/hir/def_collector/dc_crate.rs index 3f30a4990e4..e974961a405 100644 --- a/crates/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/crates/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -10,10 +10,10 @@ use crate::hir::resolution::{ }; use crate::hir::type_check::{type_check_func, TypeChecker}; use crate::hir::Context; -use crate::node_interner::{FuncId, NodeInterner, StmtId, StructId}; +use crate::node_interner::{FuncId, NodeInterner, StmtId, StructId, TypeAliasId}; use crate::{ - ExpressionKind, Generics, Ident, LetStatement, NoirFunction, NoirStruct, ParsedModule, Shared, - Type, TypeBinding, UnresolvedGenerics, UnresolvedType, + ExpressionKind, Generics, Ident, LetStatement, NoirFunction, NoirStruct, NoirTypeAlias, + ParsedModule, Shared, Type, TypeBinding, UnresolvedGenerics, UnresolvedType, }; use fm::FileId; use iter_extended::vecmap; @@ -40,6 +40,13 @@ pub struct UnresolvedStruct { pub struct_def: NoirStruct, } +#[derive(Clone)] +pub struct UnresolvedTypeAlias { + pub file_id: FileId, + pub module_id: LocalModuleId, + pub type_alias_def: NoirTypeAlias, +} + #[derive(Clone)] pub struct UnresolvedGlobal { pub file_id: FileId, @@ -54,6 +61,7 @@ pub struct DefCollector { pub(crate) collected_imports: Vec, pub(crate) collected_functions: Vec, pub(crate) collected_types: HashMap, + pub(crate) collected_type_aliases: HashMap, pub(crate) collected_globals: Vec, pub(crate) collected_impls: ImplMap, } @@ -71,6 +79,7 @@ impl DefCollector { collected_imports: vec![], collected_functions: vec![], collected_types: HashMap::new(), + collected_type_aliases: HashMap::new(), collected_impls: HashMap::new(), collected_globals: vec![], } @@ -157,6 +166,8 @@ impl DefCollector { let mut file_global_ids = resolve_globals(context, integer_globals, crate_id, errors); + resolve_type_aliases(context, def_collector.collected_type_aliases, crate_id, errors); + // Must resolve structs before we resolve globals. resolve_structs(context, def_collector.collected_types, crate_id, errors); @@ -358,6 +369,27 @@ fn resolve_struct_fields( (generics, fields) } +fn resolve_type_aliases( + context: &mut Context, + type_aliases: HashMap, + crate_id: CrateId, + all_errors: &mut Vec, +) { + for (type_id, unresolved_typ) in type_aliases { + let path_resolver = StandardPathResolver::new(ModuleId { + local_id: unresolved_typ.module_id, + krate: crate_id, + }); + let file = unresolved_typ.file_id; + let (typ, generics, errors) = + Resolver::new(&mut context.def_interner, &path_resolver, &context.def_maps, file) + .resolve_type_aliases(unresolved_typ.type_alias_def); + extend_errors(all_errors, file, errors); + + context.def_interner.set_type_alias(type_id, typ, generics); + } +} + fn resolve_impls( interner: &mut NodeInterner, crate_id: CrateId, diff --git a/crates/noirc_frontend/src/hir/def_collector/dc_mod.rs b/crates/noirc_frontend/src/hir/def_collector/dc_mod.rs index 2e478b6c040..37c017ecb96 100644 --- a/crates/noirc_frontend/src/hir/def_collector/dc_mod.rs +++ b/crates/noirc_frontend/src/hir/def_collector/dc_mod.rs @@ -3,11 +3,12 @@ use noirc_errors::FileDiagnostic; use crate::{ graph::CrateId, hir::def_collector::dc_crate::UnresolvedStruct, node_interner::StructId, - parser::SubModule, Ident, LetStatement, NoirFunction, NoirStruct, ParsedModule, TypeImpl, + parser::SubModule, Ident, LetStatement, NoirFunction, NoirStruct, NoirTypeAlias, ParsedModule, + TypeImpl, }; use super::{ - dc_crate::{DefCollector, UnresolvedFunctions, UnresolvedGlobal}, + dc_crate::{DefCollector, UnresolvedFunctions, UnresolvedGlobal, UnresolvedTypeAlias}, errors::DefCollectorErrorKind, }; use crate::hir::def_map::{parse_file, LocalModuleId, ModuleData, ModuleId, ModuleOrigin}; @@ -55,6 +56,8 @@ pub fn collect_defs( collector.collect_structs(ast.types, crate_id, errors); + collector.collect_type_aliases(context, ast.type_aliases, errors); + collector.collect_functions(context, ast.functions, errors); collector.collect_impls(context, ast.impls); @@ -183,6 +186,39 @@ impl<'a> ModCollector<'a> { } } + /// Collect any type aliases definitions declared within the ast. + /// Returns a vector of errors if any type aliases were already defined. + fn collect_type_aliases( + &mut self, + context: &mut Context, + type_aliases: Vec, + errors: &mut Vec, + ) { + for type_alias in type_aliases { + let name = type_alias.name.clone(); + + // And store the TypeId -> TypeAlias mapping somewhere it is reachable + let unresolved = UnresolvedTypeAlias { + file_id: self.file_id, + module_id: self.module_id, + type_alias_def: type_alias, + }; + + let type_alias_id = context.def_interner.push_type_alias(&unresolved); + + // Add the type alias to scope so its path can be looked up later + let result = self.def_collector.def_map.modules[self.module_id.0] + .declare_type_alias(name, type_alias_id); + + if let Err((first_def, second_def)) = result { + let err = DefCollectorErrorKind::DuplicateFunction { first_def, second_def }; + errors.push(err.into_file_diagnostic(self.file_id)); + } + + self.def_collector.collected_type_aliases.insert(type_alias_id, unresolved); + } + } + fn collect_submodules( &mut self, context: &mut Context, diff --git a/crates/noirc_frontend/src/hir/def_map/item_scope.rs b/crates/noirc_frontend/src/hir/def_map/item_scope.rs index 52201f7ade3..760088a3b7e 100644 --- a/crates/noirc_frontend/src/hir/def_map/item_scope.rs +++ b/crates/noirc_frontend/src/hir/def_map/item_scope.rs @@ -48,6 +48,7 @@ impl ItemScope { ModuleDefId::ModuleId(_) => add_item(&mut self.types), ModuleDefId::FunctionId(_) => add_item(&mut self.values), ModuleDefId::TypeId(_) => add_item(&mut self.types), + ModuleDefId::TypeAliasId(_) => add_item(&mut self.types), ModuleDefId::GlobalId(_) => add_item(&mut self.values), } } diff --git a/crates/noirc_frontend/src/hir/def_map/module_data.rs b/crates/noirc_frontend/src/hir/def_map/module_data.rs index 20906885ad9..5b93d04fea7 100644 --- a/crates/noirc_frontend/src/hir/def_map/module_data.rs +++ b/crates/noirc_frontend/src/hir/def_map/module_data.rs @@ -3,7 +3,7 @@ use std::collections::HashMap; use fm::FileId; use crate::{ - node_interner::{FuncId, StmtId, StructId}, + node_interner::{FuncId, StmtId, StructId, TypeAliasId}, Ident, }; @@ -65,6 +65,14 @@ impl ModuleData { self.declare(name, ModuleDefId::TypeId(id)) } + pub fn declare_type_alias( + &mut self, + name: Ident, + id: TypeAliasId, + ) -> Result<(), (Ident, Ident)> { + self.declare(name, id.into()) + } + pub fn declare_child_module( &mut self, name: Ident, diff --git a/crates/noirc_frontend/src/hir/def_map/module_def.rs b/crates/noirc_frontend/src/hir/def_map/module_def.rs index 399ee15700c..b64ced78772 100644 --- a/crates/noirc_frontend/src/hir/def_map/module_def.rs +++ b/crates/noirc_frontend/src/hir/def_map/module_def.rs @@ -1,4 +1,4 @@ -use crate::node_interner::{FuncId, StmtId, StructId}; +use crate::node_interner::{FuncId, StmtId, StructId, TypeAliasId}; use super::ModuleId; @@ -8,6 +8,7 @@ pub enum ModuleDefId { ModuleId(ModuleId), FunctionId(FuncId), TypeId(StructId), + TypeAliasId(TypeAliasId), GlobalId(StmtId), } @@ -26,6 +27,13 @@ impl ModuleDefId { } } + pub fn as_type_alias(&self) -> Option { + match self { + ModuleDefId::TypeAliasId(type_alias_id) => Some(*type_alias_id), + _ => None, + } + } + pub fn as_global(&self) -> Option { match self { ModuleDefId::GlobalId(stmt_id) => Some(*stmt_id), @@ -39,6 +47,7 @@ impl ModuleDefId { match self { ModuleDefId::FunctionId(_) => "function", ModuleDefId::TypeId(_) => "type", + ModuleDefId::TypeAliasId(_) => "type alias", ModuleDefId::ModuleId(_) => "module", ModuleDefId::GlobalId(_) => "global", } @@ -57,6 +66,12 @@ impl From for ModuleDefId { } } +impl From for ModuleDefId { + fn from(fid: TypeAliasId) -> Self { + ModuleDefId::TypeAliasId(fid) + } +} + impl From for ModuleDefId { fn from(stmt_id: StmtId) -> Self { ModuleDefId::GlobalId(stmt_id) @@ -97,6 +112,20 @@ impl TryFromModuleDefId for StructId { } } +impl TryFromModuleDefId for TypeAliasId { + fn try_from(id: ModuleDefId) -> Option { + id.as_type_alias() + } + + fn dummy_id() -> Self { + TypeAliasId::dummy_id() + } + + fn description() -> String { + "type alias".to_string() + } +} + impl TryFromModuleDefId for StmtId { fn try_from(id: ModuleDefId) -> Option { id.as_global() diff --git a/crates/noirc_frontend/src/hir/resolution/import.rs b/crates/noirc_frontend/src/hir/resolution/import.rs index 0bc7e065adb..9a6ef9b1b8b 100644 --- a/crates/noirc_frontend/src/hir/resolution/import.rs +++ b/crates/noirc_frontend/src/hir/resolution/import.rs @@ -152,6 +152,7 @@ fn resolve_name_in_module( ModuleDefId::FunctionId(_) => panic!("functions cannot be in the type namespace"), // TODO: If impls are ever implemented, types can be used in a path ModuleDefId::TypeId(id) => id.0, + ModuleDefId::TypeAliasId(_) => panic!("type aliases cannot be used in type namespace"), ModuleDefId::GlobalId(_) => panic!("globals cannot be in the type namespace"), }; diff --git a/crates/noirc_frontend/src/hir/resolution/resolver.rs b/crates/noirc_frontend/src/hir/resolution/resolver.rs index fe19cb633e4..8b4f97dbd8e 100644 --- a/crates/noirc_frontend/src/hir/resolution/resolver.rs +++ b/crates/noirc_frontend/src/hir/resolution/resolver.rs @@ -34,9 +34,9 @@ use crate::{ Statement, }; use crate::{ - ArrayLiteral, ContractFunctionType, Generics, LValue, NoirStruct, Path, Pattern, Shared, - StructType, Type, TypeBinding, TypeVariable, UnaryOp, UnresolvedGenerics, UnresolvedType, - UnresolvedTypeExpression, ERROR_IDENT, + ArrayLiteral, ContractFunctionType, Generics, LValue, NoirStruct, NoirTypeAlias, Path, Pattern, + Shared, StructType, Type, TypeAliasType, TypeBinding, TypeVariable, UnaryOp, + UnresolvedGenerics, UnresolvedType, UnresolvedTypeExpression, ERROR_IDENT, }; use fm::FileId; use iter_extended::vecmap; @@ -403,22 +403,27 @@ impl<'a> Resolver<'a> { } let span = path.span(); + let mut args = vecmap(args, |arg| self.resolve_type_inner(arg, new_variables)); + + if let Some(type_alias_type) = self.lookup_type_alias(path.clone()) { + let expected_generic_count = type_alias_type.generics.len(); + let type_alias_string = type_alias_type.to_string(); + let id = type_alias_type.id; + + self.verify_generics_count(expected_generic_count, &mut args, span, || { + type_alias_string + }); + + return self.interner.get_type_alias(id).get_type(&args); + } + match self.lookup_struct_or_error(path) { Some(struct_type) => { - let mut args = vecmap(args, |arg| self.resolve_type_inner(arg, new_variables)); let expected_generic_count = struct_type.borrow().generics.len(); - if args.len() != expected_generic_count { - self.push_err(ResolverError::IncorrectGenericCount { - span, - struct_type: struct_type.borrow().to_string(), - actual: args.len(), - expected: expected_generic_count, - }); - - // Fix the generic count so we can continue typechecking - args.resize_with(expected_generic_count, || Type::Error); - } + self.verify_generics_count(expected_generic_count, &mut args, span, || { + struct_type.borrow().to_string() + }); Type::Struct(struct_type, args) } @@ -426,6 +431,26 @@ impl<'a> Resolver<'a> { } } + fn verify_generics_count( + &mut self, + expected_count: usize, + args: &mut Vec, + span: Span, + type_name: impl FnOnce() -> String, + ) { + if args.len() != expected_count { + self.errors.push(ResolverError::IncorrectGenericCount { + span, + struct_type: type_name(), + actual: args.len(), + expected: expected_count, + }); + + // Fix the generic count so we can continue typechecking + args.resize_with(expected_count, || Type::Error); + } + } + fn lookup_generic_or_global_type(&mut self, path: &Path) -> Option { if path.segments.len() == 1 { let name = &path.last_segment().0.contents; @@ -517,6 +542,17 @@ impl<'a> Resolver<'a> { self.resolve_type_inner(typ, &mut vec![]) } + pub fn resolve_type_aliases( + mut self, + unresolved: NoirTypeAlias, + ) -> (Type, Generics, Vec) { + let generics = self.add_generics(&unresolved.generics); + self.resolve_local_globals(); + let typ = self.resolve_type(unresolved.typ); + + (typ, generics, self.errors) + } + pub fn take_errors(self) -> Vec { self.errors } @@ -1253,6 +1289,10 @@ impl<'a> Resolver<'a> { } } + fn lookup_type_alias(&mut self, path: Path) -> Option<&TypeAliasType> { + self.lookup(path).ok().map(|id| self.interner.get_type_alias(id)) + } + fn resolve_path(&mut self, path: Path) -> Result { self.path_resolver.resolve(self.def_maps, path).map_err(ResolverError::PathResolutionError) } diff --git a/crates/noirc_frontend/src/hir_def/types.rs b/crates/noirc_frontend/src/hir_def/types.rs index 4b4318f79d6..df4c2f6c229 100644 --- a/crates/noirc_frontend/src/hir_def/types.rs +++ b/crates/noirc_frontend/src/hir_def/types.rs @@ -7,7 +7,7 @@ use std::{ use crate::{ hir::type_check::TypeCheckError, - node_interner::{ExprId, NodeInterner}, + node_interner::{ExprId, NodeInterner, TypeAliasId}, }; use iter_extended::vecmap; use noirc_abi::AbiType; @@ -226,6 +226,72 @@ impl std::fmt::Display for StructType { } } +/// Wrap around an unsolved type +#[derive(Debug, Clone, Eq)] +pub struct TypeAliasType { + pub name: Ident, + pub id: TypeAliasId, + pub typ: Type, + pub generics: Generics, + pub span: Span, +} + +impl std::hash::Hash for TypeAliasType { + fn hash(&self, state: &mut H) { + self.id.hash(state); + } +} + +impl PartialEq for TypeAliasType { + fn eq(&self, other: &Self) -> bool { + self.id == other.id + } +} + +impl std::fmt::Display for TypeAliasType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.name)?; + + if !self.generics.is_empty() { + let generics = vecmap(&self.generics, |(_, binding)| binding.borrow().to_string()); + write!(f, "{}", generics.join(", "))?; + } + + Ok(()) + } +} + +impl TypeAliasType { + pub fn new( + id: TypeAliasId, + name: Ident, + span: Span, + typ: Type, + generics: Generics, + ) -> TypeAliasType { + TypeAliasType { id, typ, name, span, generics } + } + + pub fn set_type_and_generics(&mut self, new_typ: Type, new_generics: Generics) { + assert_eq!(self.typ, Type::Error); + self.typ = new_typ; + self.generics = new_generics; + } + + pub fn get_type(&self, generic_args: &[Type]) -> Type { + assert_eq!(self.generics.len(), generic_args.len()); + + let substitutions = self + .generics + .iter() + .zip(generic_args) + .map(|((old_id, old_var), new)| (*old_id, (old_var.clone(), new.clone()))) + .collect(); + + self.typ.substitute(&substitutions) + } +} + /// A shared, mutable reference to some T. /// Wrapper is required for Hash impl of RefCell. #[derive(Debug, Eq, PartialOrd, Ord)] diff --git a/crates/noirc_frontend/src/node_interner.rs b/crates/noirc_frontend/src/node_interner.rs index 062e9daf2d6..f5fea5c1ea7 100644 --- a/crates/noirc_frontend/src/node_interner.rs +++ b/crates/noirc_frontend/src/node_interner.rs @@ -7,7 +7,7 @@ use noirc_errors::{Location, Span, Spanned}; use crate::ast::Ident; use crate::graph::CrateId; -use crate::hir::def_collector::dc_crate::UnresolvedStruct; +use crate::hir::def_collector::dc_crate::{UnresolvedStruct, UnresolvedTypeAlias}; use crate::hir::def_map::{LocalModuleId, ModuleId}; use crate::hir::StorageSlot; use crate::hir_def::stmt::HirLetStatement; @@ -17,7 +17,10 @@ use crate::hir_def::{ function::{FuncMeta, HirFunction}, stmt::HirStatement, }; -use crate::{Shared, TypeBinding, TypeBindings, TypeVariable, TypeVariableId, TypeVariableKind}; +use crate::{ + Generics, Shared, TypeAliasType, TypeBinding, TypeBindings, TypeVariable, TypeVariableId, + TypeVariableKind, +}; /// The node interner is the central storage location of all nodes in Noir's Hir (the /// various node types can be found in hir_def). The interner is also used to collect @@ -52,6 +55,12 @@ pub struct NodeInterner { // methods from impls to the type. structs: HashMap>, + // Type Aliases map. + // + // Map type aliases to the actual type. + // When resolving types, check against this map to see if a type alias is defined. + type_aliases: Vec, + /// Map from ExprId (referring to a Function/Method call) to its corresponding TypeBindings, /// filled out during type checking from instantiated variables. Used during monomorphization /// to map call site types back onto function parameter types, and undo this binding as needed. @@ -132,6 +141,15 @@ impl StructId { } } +#[derive(Debug, Eq, PartialEq, Hash, Copy, Clone)] +pub struct TypeAliasId(pub usize); + +impl TypeAliasId { + pub fn dummy_id() -> TypeAliasId { + TypeAliasId(std::usize::MAX) + } +} + macro_rules! into_index { ($id_type:ty) => { impl From<$id_type> for Index { @@ -243,6 +261,7 @@ impl Default for NodeInterner { definitions: vec![], id_to_type: HashMap::new(), structs: HashMap::new(), + type_aliases: Vec::new(), instantiation_bindings: HashMap::new(), field_indices: HashMap::new(), next_type_variable_id: 0, @@ -305,11 +324,33 @@ impl NodeInterner { ); } + pub fn push_type_alias(&mut self, typ: &UnresolvedTypeAlias) -> TypeAliasId { + let type_id = TypeAliasId(self.type_aliases.len()); + + self.type_aliases.push(TypeAliasType::new( + type_id, + typ.type_alias_def.name.clone(), + typ.type_alias_def.span, + Type::Error, + vecmap(&typ.type_alias_def.generics, |_| { + let id = TypeVariableId(0); + (id, Shared::new(TypeBinding::Unbound(id))) + }), + )); + + type_id + } + pub fn update_struct(&mut self, type_id: StructId, f: impl FnOnce(&mut StructType)) { let mut value = self.structs.get_mut(&type_id).unwrap().borrow_mut(); f(&mut value); } + pub fn set_type_alias(&mut self, type_id: TypeAliasId, typ: Type, generics: Generics) { + let type_alias_type = &mut self.type_aliases[type_id.0]; + type_alias_type.set_type_and_generics(typ, generics); + } + /// Returns the interned statement corresponding to `stmt_id` pub fn update_statement(&mut self, stmt_id: &StmtId, f: impl FnOnce(&mut HirStatement)) { let def = @@ -506,6 +547,10 @@ impl NodeInterner { self.structs[&id].clone() } + pub fn get_type_alias(&self, id: TypeAliasId) -> &TypeAliasType { + &self.type_aliases[id.0] + } + pub fn get_global(&self, stmt_id: &StmtId) -> Option { self.globals.get(stmt_id).cloned() } diff --git a/crates/noirc_frontend/src/parser/mod.rs b/crates/noirc_frontend/src/parser/mod.rs index 9cf9f1e9869..ad519836b39 100644 --- a/crates/noirc_frontend/src/parser/mod.rs +++ b/crates/noirc_frontend/src/parser/mod.rs @@ -17,8 +17,8 @@ use crate::token::{Keyword, Token}; use crate::{ast::ImportStatement, Expression, NoirStruct}; use crate::{ BlockExpression, ExpressionKind, ForExpression, Ident, IndexExpression, LetStatement, - MethodCallExpression, NoirFunction, NoirTrait, Path, PathKind, Pattern, Recoverable, Statement, - TraitImpl, TypeImpl, UnresolvedType, UseTree, + MethodCallExpression, NoirFunction, NoirTrait, NoirTypeAlias, Path, PathKind, Pattern, + Recoverable, Statement, TraitImpl, TypeImpl, UnresolvedType, UseTree, }; use acvm::FieldElement; @@ -43,6 +43,7 @@ pub(crate) enum TopLevelStatement { Trait(NoirTrait), TraitImpl(TraitImpl), Impl(TypeImpl), + TypeAlias(NoirTypeAlias), SubModule(SubModule), Global(LetStatement), Error, @@ -225,6 +226,7 @@ pub struct ParsedModule { pub traits: Vec, pub trait_impls: Vec, pub impls: Vec, + pub type_aliases: Vec, pub globals: Vec, /// Module declarations like `mod foo;` @@ -264,6 +266,10 @@ impl ParsedModule { self.impls.push(r#impl); } + fn push_type_alias(&mut self, type_alias: NoirTypeAlias) { + self.type_aliases.push(type_alias); + } + fn push_import(&mut self, import_stmt: UseTree) { self.imports.extend(import_stmt.desugar(None)); } @@ -463,6 +469,7 @@ impl std::fmt::Display for TopLevelStatement { TopLevelStatement::TraitImpl(i) => i.fmt(f), TopLevelStatement::Struct(s) => s.fmt(f), TopLevelStatement::Impl(i) => i.fmt(f), + TopLevelStatement::TypeAlias(t) => t.fmt(f), TopLevelStatement::SubModule(s) => s.fmt(f), TopLevelStatement::Global(c) => c.fmt(f), TopLevelStatement::Error => write!(f, "error"), @@ -496,6 +503,10 @@ impl std::fmt::Display for ParsedModule { write!(f, "{impl_}")?; } + for type_alias in &self.type_aliases { + write!(f, "{type_alias}")?; + } + for submodule in &self.submodules { write!(f, "{submodule}")?; } diff --git a/crates/noirc_frontend/src/parser/parser.rs b/crates/noirc_frontend/src/parser/parser.rs index 65446e5d6c6..6445205eae6 100644 --- a/crates/noirc_frontend/src/parser/parser.rs +++ b/crates/noirc_frontend/src/parser/parser.rs @@ -36,8 +36,8 @@ use crate::token::{Attribute, Keyword, Token, TokenKind}; use crate::{ BinaryOp, BinaryOpKind, BlockExpression, CompTime, ConstrainStatement, FunctionDefinition, Ident, IfExpression, InfixExpression, LValue, Lambda, Literal, NoirFunction, NoirStruct, - NoirTrait, Path, PathKind, Pattern, Recoverable, TraitConstraint, TraitImpl, TraitImplItem, - TraitItem, TypeImpl, UnaryOp, UnresolvedTypeExpression, UseTree, UseTreeKind, + NoirTrait, NoirTypeAlias, Path, PathKind, Pattern, Recoverable, TraitConstraint, TraitImpl, + TraitImplItem, TraitItem, TypeImpl, UnaryOp, UnresolvedTypeExpression, UseTree, UseTreeKind, }; use chumsky::prelude::*; @@ -82,6 +82,7 @@ fn module() -> impl NoirParser { TopLevelStatement::Trait(t) => program.push_trait(t), TopLevelStatement::TraitImpl(t) => program.push_trait_impl(t), TopLevelStatement::Impl(i) => program.push_impl(i), + TopLevelStatement::TypeAlias(t) => program.push_type_alias(t), TopLevelStatement::SubModule(s) => program.push_submodule(s), TopLevelStatement::Global(c) => program.push_global(c), TopLevelStatement::Error => (), @@ -108,6 +109,7 @@ fn top_level_statement( trait_definition(), trait_implementation(), implementation(), + type_alias_definition().then_ignore(force(just(Token::Semicolon))), submodule(module_parser.clone()), contract(module_parser), module_declaration().then_ignore(force(just(Token::Semicolon))), @@ -236,6 +238,19 @@ fn struct_definition() -> impl NoirParser { ) } +fn type_alias_definition() -> impl NoirParser { + use self::Keyword::Type; + + let p = ignore_then_commit(keyword(Type), ident()); + let p = then_commit(p, generics()); + let p = then_commit_ignore(p, just(Token::Assign)); + let p = then_commit(p, parse_type()); + + p.map_with_span(|((name, generics), typ), span| { + TopLevelStatement::TypeAlias(NoirTypeAlias { name, generics, typ, span }) + }) +} + fn lambda_return_type() -> impl NoirParser { just(Token::Arrow) .ignore_then(parse_type()) @@ -1917,6 +1932,15 @@ mod test { parse_all_failing(struct_definition(), failing); } + #[test] + fn parse_type_aliases() { + let cases = vec!["type foo = u8", "type bar = String", "type baz = Vec"]; + parse_all(type_alias_definition(), cases); + + let failing = vec!["type = u8", "type foo", "type foo = 1"]; + parse_all_failing(type_alias_definition(), failing); + } + #[test] fn parse_member_access() { let cases = vec!["a.b", "a + b.c", "foo.bar as i32"]; From 3a423686ee657db9cc3cbc6376fe0f7b4316ccc4 Mon Sep 17 00:00:00 2001 From: Maxim Vezenov Date: Tue, 1 Aug 2023 21:36:39 +0100 Subject: [PATCH 07/19] chore: Make a more clear error for slices passed to std::println (#2113) * chore: make a more clear error for slices passed to std::println * fix up err message --- .../src/monomorphization/mod.rs | 42 ++++++++++--------- 1 file changed, 22 insertions(+), 20 deletions(-) diff --git a/crates/noirc_frontend/src/monomorphization/mod.rs b/crates/noirc_frontend/src/monomorphization/mod.rs index 963d16a311c..dbe2ee080bf 100644 --- a/crates/noirc_frontend/src/monomorphization/mod.rs +++ b/crates/noirc_frontend/src/monomorphization/mod.rs @@ -727,45 +727,47 @@ impl<'interner> Monomorphizer<'interner> { HirExpression::Ident(ident) => { let typ = self.interner.id_type(ident.id); let typ: Type = typ.follow_bindings(); - match &typ { + let is_fmt_str = match typ { // A format string has many different possible types that need to be handled. // Loop over each element in the format string to fetch each type's relevant metadata Type::FmtString(_, elements) => { - match elements.as_ref() { + match *elements { Type::Tuple(element_types) => { for typ in element_types { - let abi_type = typ.as_abi_type(); - let abi_as_string = serde_json::to_string(&abi_type) - .expect("ICE: expected Abi type to serialize"); - - arguments.push(ast::Expression::Literal(ast::Literal::Str( - abi_as_string, - ))); + Self::append_abi_arg_inner(&typ, arguments); } } _ => unreachable!( "ICE: format string type should be a tuple but got a {elements}" ), } - - // The caller needs information as to whether it is handling a format string or a single type - arguments.push(ast::Expression::Literal(ast::Literal::Bool(true))); + true } _ => { - let abi_type = typ.as_abi_type(); - let abi_as_string = serde_json::to_string(&abi_type) - .expect("ICE: expected Abi type to serialize"); - - arguments.push(ast::Expression::Literal(ast::Literal::Str(abi_as_string))); - // The caller needs information as to whether it is handling a format string or a single type - arguments.push(ast::Expression::Literal(ast::Literal::Bool(false))); + Self::append_abi_arg_inner(&typ, arguments); + false } - } + }; + // The caller needs information as to whether it is handling a format string or a single type + arguments.push(ast::Expression::Literal(ast::Literal::Bool(is_fmt_str))); } _ => unreachable!("logging expr {:?} is not supported", arguments[0]), } } + fn append_abi_arg_inner(typ: &Type, arguments: &mut Vec) { + if let HirType::Array(size, _) = typ { + if let HirType::NotConstant = **size { + unreachable!("println does not support slices. Convert the slice to an array before passing it to println"); + } + } + let abi_type = typ.as_abi_type(); + let abi_as_string = + serde_json::to_string(&abi_type).expect("ICE: expected Abi type to serialize"); + + arguments.push(ast::Expression::Literal(ast::Literal::Str(abi_as_string))); + } + /// Try to evaluate certain builtin functions (currently only 'array_len' and field modulus methods) /// at their call site. /// NOTE: Evaluating at the call site means we cannot track aliased functions. From 940b189d4fd47dad8cc9f2650162da9e99c5024c Mon Sep 17 00:00:00 2001 From: Blaine Bublitz Date: Tue, 1 Aug 2023 14:51:22 -0700 Subject: [PATCH 08/19] feat!: Support workspaces and package selection on every nargo command (#1992) * feat!: Support workspaces and package selection on every nargo command * add package name to contract directory * print package name at the beginning of any stdout messages * Remove circuit_name from compile command and use package name * remove resolve_workspace_in_directory * avoid resolving dependencies as a Workspace struct by always requiring it to be a Package * chore: ensure workspace packages are distinct * Update crates/nargo_cli/src/git.rs * remove proof name argument and use package name, remove stdout printing of proof * fix tests * rename functions to be more descriptive * add issue number to todo --------- Co-authored-by: Tom French Co-authored-by: Tom French <15848336+TomAFrench@users.noreply.github.com> --- Cargo.lock | 2 +- crates/nargo/Cargo.toml | 2 +- crates/{nargo_cli => nargo}/src/constants.rs | 18 +- crates/nargo/src/lib.rs | 4 +- crates/nargo/src/manifest/errors.rs | 26 -- crates/nargo/src/manifest/mod.rs | 147 --------- crates/nargo/src/package.rs | 33 ++ crates/nargo/src/workspace.rs | 74 +++++ crates/nargo_cli/build.rs | 4 +- crates/nargo_cli/src/cli/check_cmd.rs | 86 +++--- .../nargo_cli/src/cli/codegen_verifier_cmd.rs | 97 +++--- crates/nargo_cli/src/cli/compile_cmd.rs | 141 +++++---- crates/nargo_cli/src/cli/execute_cmd.rs | 53 ++-- crates/nargo_cli/src/cli/fs/inputs.rs | 2 +- crates/nargo_cli/src/cli/fs/program.rs | 6 +- crates/nargo_cli/src/cli/fs/proof.rs | 4 +- crates/nargo_cli/src/cli/fs/witness.rs | 3 +- crates/nargo_cli/src/cli/info_cmd.rs | 38 ++- crates/nargo_cli/src/cli/init_cmd.rs | 6 +- crates/nargo_cli/src/cli/prove_cmd.rs | 149 ++++----- crates/nargo_cli/src/cli/test_cmd.rs | 58 ++-- crates/nargo_cli/src/cli/verify_cmd.rs | 106 ++++--- crates/nargo_cli/src/errors.rs | 56 +++- crates/nargo_cli/src/git.rs | 11 +- crates/nargo_cli/src/lib.rs | 60 +++- crates/nargo_cli/src/manifest.rs | 289 +++++++++++++++++- crates/nargo_cli/src/resolver.rs | 265 ---------------- crates/nargo_cli/tests/codegen-verifier.rs | 6 +- crates/nargo_cli/tests/hello_world.rs | 9 +- crates/nargo_cli/tests/test_data/config.toml | 2 +- .../test_data/workspace/crates/a/Prover.toml | 2 + .../test_data/workspace/crates/a/src/main.nr | 10 +- .../test_data/workspace/crates/b/Prover.toml | 2 + .../test_data/workspace/crates/b/src/main.nr | 8 - .../workspace_default_member/a/Prover.toml | 2 + .../workspace_default_member/a/src/main.nr | 10 +- .../workspace_default_member/b/Nargo.toml | 6 + .../workspace_default_member/b/Prover.toml | 3 + .../workspace_default_member/b/src/main.nr | 3 + .../tests/test_data/workspace_fail/Nargo.toml | 2 + .../workspace_fail/crates/a/Nargo.toml | 6 + .../workspace_fail/crates/a/Prover.toml | 3 + .../workspace_fail/crates/a/src/main.nr | 3 + .../workspace_fail/crates/b/Nargo.toml | 6 + .../workspace_fail/crates/b/Prover.toml | 2 + .../workspace_fail/crates/b/src/main.nr | 3 + .../workspace_missing_toml/Nargo.toml | 2 + .../crates/a/Prover.toml | 2 + .../crates/a/src/main.nr | 3 + .../crates/b/Nargo.toml | 6 + .../crates/b/Prover.toml | 2 + .../crates/b/src/main.nr | 3 + crates/noirc_driver/src/lib.rs | 20 +- crates/noirc_frontend/src/graph/mod.rs | 16 +- crates/noirc_frontend/src/hir/mod.rs | 18 +- 55 files changed, 1004 insertions(+), 896 deletions(-) rename crates/{nargo_cli => nargo}/src/constants.rs (55%) delete mode 100644 crates/nargo/src/manifest/errors.rs delete mode 100644 crates/nargo/src/manifest/mod.rs create mode 100644 crates/nargo/src/package.rs delete mode 100644 crates/nargo_cli/src/resolver.rs create mode 100644 crates/nargo_cli/tests/test_data/workspace/crates/a/Prover.toml create mode 100644 crates/nargo_cli/tests/test_data/workspace/crates/b/Prover.toml create mode 100644 crates/nargo_cli/tests/test_data/workspace_default_member/a/Prover.toml create mode 100644 crates/nargo_cli/tests/test_data/workspace_default_member/b/Nargo.toml create mode 100644 crates/nargo_cli/tests/test_data/workspace_default_member/b/Prover.toml create mode 100644 crates/nargo_cli/tests/test_data/workspace_default_member/b/src/main.nr create mode 100644 crates/nargo_cli/tests/test_data/workspace_fail/Nargo.toml create mode 100644 crates/nargo_cli/tests/test_data/workspace_fail/crates/a/Nargo.toml create mode 100644 crates/nargo_cli/tests/test_data/workspace_fail/crates/a/Prover.toml create mode 100644 crates/nargo_cli/tests/test_data/workspace_fail/crates/a/src/main.nr create mode 100644 crates/nargo_cli/tests/test_data/workspace_fail/crates/b/Nargo.toml create mode 100644 crates/nargo_cli/tests/test_data/workspace_fail/crates/b/Prover.toml create mode 100644 crates/nargo_cli/tests/test_data/workspace_fail/crates/b/src/main.nr create mode 100644 crates/nargo_cli/tests/test_data/workspace_missing_toml/Nargo.toml create mode 100644 crates/nargo_cli/tests/test_data/workspace_missing_toml/crates/a/Prover.toml create mode 100644 crates/nargo_cli/tests/test_data/workspace_missing_toml/crates/a/src/main.nr create mode 100644 crates/nargo_cli/tests/test_data/workspace_missing_toml/crates/b/Nargo.toml create mode 100644 crates/nargo_cli/tests/test_data/workspace_missing_toml/crates/b/Prover.toml create mode 100644 crates/nargo_cli/tests/test_data/workspace_missing_toml/crates/b/src/main.nr diff --git a/Cargo.lock b/Cargo.lock index 1b7a70b2063..f513136caf3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1982,12 +1982,12 @@ dependencies = [ "noirc_abi", "noirc_driver", "noirc_errors", + "noirc_frontend", "regex", "rustc_version", "serde", "serde_json", "thiserror", - "toml", ] [[package]] diff --git a/crates/nargo/Cargo.toml b/crates/nargo/Cargo.toml index afbafdff931..3039268281c 100644 --- a/crates/nargo/Cargo.toml +++ b/crates/nargo/Cargo.toml @@ -14,8 +14,8 @@ rustc_version = "0.4.0" acvm.workspace = true noirc_abi.workspace = true noirc_driver.workspace = true +noirc_frontend.workspace = true iter-extended.workspace = true -toml.workspace = true serde.workspace = true serde_json.workspace = true thiserror.workspace = true diff --git a/crates/nargo_cli/src/constants.rs b/crates/nargo/src/constants.rs similarity index 55% rename from crates/nargo_cli/src/constants.rs rename to crates/nargo/src/constants.rs index d3e6b7f28e1..5e448277694 100644 --- a/crates/nargo_cli/src/constants.rs +++ b/crates/nargo/src/constants.rs @@ -1,23 +1,23 @@ // Directories /// The directory for the `nargo contract` command output -pub(crate) const CONTRACT_DIR: &str = "contract"; +pub const CONTRACT_DIR: &str = "contract"; /// The directory to store serialized circuit proofs. -pub(crate) const PROOFS_DIR: &str = "proofs"; +pub const PROOFS_DIR: &str = "proofs"; /// The directory to store Noir source files -pub(crate) const SRC_DIR: &str = "src"; +pub const SRC_DIR: &str = "src"; /// The directory to store circuits' serialized ACIR representations. -pub(crate) const TARGET_DIR: &str = "target"; +pub const TARGET_DIR: &str = "target"; // Files /// The file from which Nargo pulls prover inputs -pub(crate) const PROVER_INPUT_FILE: &str = "Prover"; +pub const PROVER_INPUT_FILE: &str = "Prover"; /// The file from which Nargo pulls verifier inputs -pub(crate) const VERIFIER_INPUT_FILE: &str = "Verifier"; +pub const VERIFIER_INPUT_FILE: &str = "Verifier"; /// The package definition file for a Noir project. -pub(crate) const PKG_FILE: &str = "Nargo.toml"; +pub const PKG_FILE: &str = "Nargo.toml"; // Extensions /// The extension for files containing circuit proofs. -pub(crate) const PROOF_EXT: &str = "proof"; +pub const PROOF_EXT: &str = "proof"; /// The extension for files containing proof witnesses. -pub(crate) const WITNESS_EXT: &str = "tr"; +pub const WITNESS_EXT: &str = "tr"; diff --git a/crates/nargo/src/lib.rs b/crates/nargo/src/lib.rs index 24605de7849..fda02cf98c2 100644 --- a/crates/nargo/src/lib.rs +++ b/crates/nargo/src/lib.rs @@ -8,8 +8,10 @@ //! Noir Package Manager abbreviated is npm, which is already taken. pub mod artifacts; +pub mod constants; mod errors; -pub mod manifest; pub mod ops; +pub mod package; +pub mod workspace; pub use self::errors::NargoError; diff --git a/crates/nargo/src/manifest/errors.rs b/crates/nargo/src/manifest/errors.rs deleted file mode 100644 index 250211de6fb..00000000000 --- a/crates/nargo/src/manifest/errors.rs +++ /dev/null @@ -1,26 +0,0 @@ -use std::path::PathBuf; -use thiserror::Error; - -/// Errors covering situations where a package is either missing or malformed. -#[derive(Debug, Error)] -pub enum InvalidPackageError { - /// Package doesn't have a manifest file - #[error("cannot find a Nargo.toml in {}", .0.display())] - MissingManifestFile(PathBuf), - - /// Package manifest is unreadable. - #[error("Nargo.toml is badly formed, could not parse.\n\n {0}")] - MalformedManifestFile(#[from] toml::de::Error), - - /// Package does not contain Noir source files. - #[error("cannot find src directory in path {}", .0.display())] - NoSourceDir(PathBuf), - - /// Package has neither of `main.nr` and `lib.nr`. - #[error("package must contain either a `lib.nr`(Library) or a `main.nr`(Binary).")] - ContainsZeroCrates, - - /// Package has both a `main.nr` (for binaries) and `lib.nr` (for libraries) - #[error("package cannot contain both a `lib.nr` and a `main.nr`")] - ContainsMultipleCrates, -} diff --git a/crates/nargo/src/manifest/mod.rs b/crates/nargo/src/manifest/mod.rs deleted file mode 100644 index f5a247cf72a..00000000000 --- a/crates/nargo/src/manifest/mod.rs +++ /dev/null @@ -1,147 +0,0 @@ -use serde::Deserialize; -use std::{collections::BTreeMap, path::PathBuf}; - -mod errors; -pub use self::errors::InvalidPackageError; - -#[derive(Debug, Deserialize, Clone)] -pub struct PackageManifest { - pub package: PackageMetadata, - pub dependencies: BTreeMap, -} - -/// Contains all the information about a package, as loaded from a `Nargo.toml`. -/// Represents a manifest, which can be either a package manifest or a workspace manifest. -#[derive(Debug, Deserialize, Clone)] -#[serde(untagged)] -pub enum Manifest { - /// Represents a package manifest. - Package(PackageManifest), - /// Represents a workspace manifest. - Workspace(Workspace), -} - -impl Manifest { - pub fn from_toml_str(toml_as_string: &str) -> Result { - let manifest = toml::from_str(toml_as_string)?; - Ok(manifest) - } - - pub fn to_package(self) -> Option { - match self { - Self::Package(v) => Some(v), - _ => None, - } - } -} - -impl PackageManifest { - /// Returns whether the package has a local dependency. - // Local paths are usually relative and are discouraged when sharing libraries - // It is better to separate these into different packages. - pub fn has_local_dependency(&self) -> bool { - self.dependencies.values().any(|dep| matches!(dep, Dependency::Path { .. })) - } -} - -/// Configuration of a workspace in a manifest. -/// Indicates that `[workspace]` was present and the members were specified as well. -#[derive(Debug, Deserialize, Clone)] -pub struct Workspace { - #[serde(rename = "workspace")] - pub config: WorkspaceConfig, -} - -#[derive(Default, Debug, Deserialize, Clone)] -#[serde(rename_all = "kebab-case")] -pub struct WorkspaceConfig { - /// List of members in this workspace. - pub members: Vec, - /// Specifies the default crate to interact with in the context (similarly to how we have nargo as the default crate in this repository). - pub default_member: Option, -} - -#[allow(dead_code)] -#[derive(Default, Debug, Deserialize, Clone)] -pub struct PackageMetadata { - #[serde(default = "panic_missing_name")] - pub name: String, - description: Option, - authors: Vec, - // If not compiler version is supplied, the latest is used - // For now, we state that all packages must be compiled under the same - // compiler version. - // We also state that ACIR and the compiler will upgrade in lockstep. - // so you will not need to supply an ACIR and compiler version - compiler_version: Option, - backend: Option, - license: Option, -} - -// TODO: Remove this after a couple of breaking releases (added in 0.10.0) -fn panic_missing_name() -> String { - panic!( - r#" - -Failed to parse `Nargo.toml`. - -`Nargo.toml` now requires a "name" field for Noir packages. - -```toml -[package] -name = "package_name" -``` - -Modify your `Nargo.toml` similarly to above and rerun the command. - -"# - ) -} - -#[derive(Debug, Deserialize, Clone)] -#[serde(untagged)] -/// Enum representing the different types of ways to -/// supply a source for the dependency -pub enum Dependency { - Github { git: String, tag: String }, - Path { path: String }, -} - -#[test] -fn parse_standard_toml() { - let src = r#" - - [package] - name = "test" - authors = ["kev", "foo"] - compiler_version = "0.1" - - [dependencies] - rand = { tag = "next", git = "https://github.com/rust-lang-nursery/rand"} - cool = { tag = "next", git = "https://github.com/rust-lang-nursery/rand"} - hello = {path = "./noir_driver"} - "#; - - assert!(Manifest::from_toml_str(src).is_ok()); -} - -#[test] -fn parse_workspace_toml() { - let src = r#" - [workspace] - members = ["a", "b"] - "#; - - assert!(Manifest::from_toml_str(src).is_ok()); -} - -#[test] -fn parse_workspace_default_member_toml() { - let src = r#" - [workspace] - members = ["a", "b"] - default-member = "a" - "#; - - assert!(Manifest::from_toml_str(src).is_ok()); -} diff --git a/crates/nargo/src/package.rs b/crates/nargo/src/package.rs new file mode 100644 index 00000000000..20c662b69f4 --- /dev/null +++ b/crates/nargo/src/package.rs @@ -0,0 +1,33 @@ +use std::{collections::BTreeMap, path::PathBuf}; + +use noirc_frontend::graph::{CrateName, CrateType}; + +use crate::constants::{PROVER_INPUT_FILE, VERIFIER_INPUT_FILE}; + +#[derive(Clone)] +pub enum Dependency { + Local { package: Package }, + Remote { package: Package }, +} + +#[derive(Clone)] +pub struct Package { + pub root_dir: PathBuf, + pub crate_type: CrateType, + pub entry_path: PathBuf, + pub name: CrateName, + pub dependencies: BTreeMap, +} + +impl Package { + pub fn prover_input_path(&self) -> PathBuf { + // TODO: This should be configurable, such as if we are looking for .json or .toml or custom paths + // For now it is hard-coded to be toml. + self.root_dir.join(format!("{PROVER_INPUT_FILE}.toml")) + } + pub fn verifier_input_path(&self) -> PathBuf { + // TODO: This should be configurable, such as if we are looking for .json or .toml or custom paths + // For now it is hard-coded to be toml. + self.root_dir.join(format!("{VERIFIER_INPUT_FILE}.toml")) + } +} diff --git a/crates/nargo/src/workspace.rs b/crates/nargo/src/workspace.rs index 0954b4eb143..5df13350683 100644 --- a/crates/nargo/src/workspace.rs +++ b/crates/nargo/src/workspace.rs @@ -2,3 +2,77 @@ // Then we use workspace to allow more than one. In the future, do not allow there to be // both a binary and a library. // - library will be default + +use std::{ + iter::{once, Once}, + path::PathBuf, + slice, +}; + +use crate::{ + constants::{CONTRACT_DIR, PROOFS_DIR, TARGET_DIR}, + package::Package, +}; + +#[derive(Clone)] +pub struct Workspace { + pub root_dir: PathBuf, + pub members: Vec, + // If `Some()`, the `selected_package_index` is used to select the only `Package` when iterating a Workspace + pub selected_package_index: Option, +} + +impl Workspace { + pub fn package_build_path(&self, package: &Package) -> PathBuf { + let name: String = package.name.clone().into(); + self.target_directory_path().join(name) + } + + pub fn contracts_directory_path(&self, package: &Package) -> PathBuf { + let name: String = package.name.clone().into(); + self.root_dir.join(CONTRACT_DIR).join(name) + } + + pub fn proofs_directory_path(&self) -> PathBuf { + self.root_dir.join(PROOFS_DIR) + } + + pub fn target_directory_path(&self) -> PathBuf { + self.root_dir.join(TARGET_DIR) + } +} + +pub enum IntoIter<'a, T> { + Only(Once<&'a T>), + All(slice::Iter<'a, T>), +} + +impl<'a> IntoIterator for &'a Workspace { + type Item = &'a Package; + type IntoIter = IntoIter<'a, Package>; + + fn into_iter(self) -> Self::IntoIter { + if let Some(index) = self.selected_package_index { + // Precondition: The selected_package_index was verified to be in-bounds before constructing workspace + let member = self + .members + .get(index) + .expect("Workspace constructed with invalid selected_package_index"); + + IntoIter::Only(once(member)) + } else { + IntoIter::All(self.members.iter()) + } + } +} + +impl<'a> Iterator for IntoIter<'a, Package> { + type Item = &'a Package; + + fn next(&mut self) -> Option { + match self { + Self::Only(iter) => iter.next(), + Self::All(iter) => iter.next(), + } + } +} diff --git a/crates/nargo_cli/build.rs b/crates/nargo_cli/build.rs index d889ba6856c..f3493148a7f 100644 --- a/crates/nargo_cli/build.rs +++ b/crates/nargo_cli/build.rs @@ -84,7 +84,6 @@ fn generate_tests(test_file: &mut File) { if config_data["exclude"].contains(&test_name) { "#[ignore]" } else { "" }; let should_fail = config_data["fail"].contains(&test_name); - let is_workspace = test_dir.to_str().map_or(false, |s| s.contains("workspace")); write!( test_file, @@ -96,8 +95,7 @@ fn execute_{test_sub_dir}_{test_name}() {{ let mut cmd = Command::cargo_bin("nargo").unwrap(); cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg(if {is_workspace} {{ "test" }} else {{ "execute" }}); - + cmd.arg("execute"); if {should_fail} {{ cmd.assert().failure(); diff --git a/crates/nargo_cli/src/cli/check_cmd.rs b/crates/nargo_cli/src/cli/check_cmd.rs index 9a0a2f77e7c..8f2e23ed750 100644 --- a/crates/nargo_cli/src/cli/check_cmd.rs +++ b/crates/nargo_cli/src/cli/check_cmd.rs @@ -1,53 +1,58 @@ -use crate::{errors::CliError, resolver::resolve_root_manifest}; +use crate::{ + errors::CliError, find_package_manifest, manifest::resolve_workspace_from_toml, prepare_package, +}; use acvm::Backend; use clap::Args; use iter_extended::btree_map; +use nargo::package::Package; use noirc_abi::{AbiParameter, AbiType, MAIN_RETURN_NAME}; use noirc_driver::{check_crate, compute_function_signature, CompileOptions}; use noirc_errors::reporter::ReportedErrors; -use noirc_frontend::{graph::CrateId, hir::Context}; -use std::path::{Path, PathBuf}; +use noirc_frontend::{ + graph::{CrateId, CrateName}, + hir::Context, +}; use super::fs::write_to_file; use super::NargoConfig; -use crate::constants::{PROVER_INPUT_FILE, VERIFIER_INPUT_FILE}; /// Checks the constraint system for errors #[derive(Debug, Clone, Args)] pub(crate) struct CheckCommand { + /// The name of the package to check + #[clap(long)] + package: Option, + #[clap(flatten)] compile_options: CompileOptions, } pub(crate) fn run( - backend: &B, + _backend: &B, args: CheckCommand, config: NargoConfig, ) -> Result<(), CliError> { - check_from_path(backend, &config.program_dir, &args.compile_options)?; - println!("Constraint system successfully built!"); + let toml_path = find_package_manifest(&config.program_dir)?; + let workspace = resolve_workspace_from_toml(&toml_path, args.package)?; + + for package in &workspace { + check_package(package, &args.compile_options)?; + println!("[{}] Constraint system successfully built!", package.name); + } Ok(()) } -fn check_from_path( - // Backend isn't used but keeping it in the signature allows for better type inference - // TODO: This function doesn't need to exist but requires a little more refactoring - _backend: &B, - program_dir: &Path, +fn check_package( + package: &Package, compile_options: &CompileOptions, -) -> Result<(), CliError> { - let (mut context, crate_id) = resolve_root_manifest(program_dir, None)?; +) -> Result<(), ReportedErrors> { + let (mut context, crate_id) = prepare_package(package); check_crate_and_report_errors(&mut context, crate_id, compile_options.deny_warnings)?; // XXX: We can have a --overwrite flag to determine if you want to overwrite the Prover/Verifier.toml files if let Some((parameters, return_type)) = compute_function_signature(&context, &crate_id) { - // XXX: The root config should return an enum to determine if we are looking for .json or .toml - // For now it is hard-coded to be toml. - // - // Check for input.toml and verifier.toml - let path_to_root = PathBuf::from(program_dir); - let path_to_prover_input = path_to_root.join(format!("{PROVER_INPUT_FILE}.toml")); - let path_to_verifier_input = path_to_root.join(format!("{VERIFIER_INPUT_FILE}.toml")); + let path_to_prover_input = package.prover_input_path(); + let path_to_verifier_input = package.verifier_input_path(); // If they are not available, then create them and populate them based on the ABI if !path_to_prover_input.exists() { @@ -108,6 +113,8 @@ mod tests { use noirc_abi::{AbiParameter, AbiType, AbiVisibility, Sign}; use noirc_driver::CompileOptions; + use crate::{find_package_manifest, manifest::resolve_workspace_from_toml}; + use super::create_input_toml_template; const TEST_DATA_DIR: &str = "tests/target_tests_data"; @@ -157,16 +164,15 @@ d2 = ["", "", ""] let pass_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join(format!("{TEST_DATA_DIR}/pass")); - let backend = crate::backends::ConcreteBackend::default(); let config = CompileOptions::default(); let paths = std::fs::read_dir(pass_dir).unwrap(); for path in paths.flatten() { let path = path.path(); - assert!( - super::check_from_path(&backend, &path, &config).is_ok(), - "path: {}", - path.display() - ); + let toml_path = find_package_manifest(&path).unwrap(); + let workspace = resolve_workspace_from_toml(&toml_path, None).unwrap(); + for package in &workspace { + assert!(super::check_package(package, &config).is_ok(), "path: {}", path.display()); + } } } @@ -176,16 +182,19 @@ d2 = ["", "", ""] let fail_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join(format!("{TEST_DATA_DIR}/fail")); - let backend = crate::backends::ConcreteBackend::default(); let config = CompileOptions::default(); let paths = std::fs::read_dir(fail_dir).unwrap(); for path in paths.flatten() { let path = path.path(); - assert!( - super::check_from_path(&backend, &path, &config).is_err(), - "path: {}", - path.display() - ); + let toml_path = find_package_manifest(&path).unwrap(); + let workspace = resolve_workspace_from_toml(&toml_path, None).unwrap(); + for package in &workspace { + assert!( + super::check_package(package, &config).is_err(), + "path: {}", + path.display() + ); + } } } @@ -194,17 +203,16 @@ d2 = ["", "", ""] let pass_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")) .join(format!("{TEST_DATA_DIR}/pass_dev_mode")); - let backend = crate::backends::ConcreteBackend::default(); let config = CompileOptions { deny_warnings: false, ..Default::default() }; let paths = std::fs::read_dir(pass_dir).unwrap(); for path in paths.flatten() { let path = path.path(); - assert!( - super::check_from_path(&backend, &path, &config).is_ok(), - "path: {}", - path.display() - ); + let toml_path = find_package_manifest(&path).unwrap(); + let workspace = resolve_workspace_from_toml(&toml_path, None).unwrap(); + for package in &workspace { + assert!(super::check_package(package, &config).is_ok(), "path: {}", path.display()); + } } } } diff --git a/crates/nargo_cli/src/cli/codegen_verifier_cmd.rs b/crates/nargo_cli/src/cli/codegen_verifier_cmd.rs index cedf558bcb8..0c01f8d5dc8 100644 --- a/crates/nargo_cli/src/cli/codegen_verifier_cmd.rs +++ b/crates/nargo_cli/src/cli/codegen_verifier_cmd.rs @@ -1,3 +1,5 @@ +use std::path::PathBuf; + use super::fs::{ common_reference_string::{ read_cached_common_reference_string, update_common_reference_string, @@ -8,20 +10,23 @@ use super::fs::{ write_to_file, }; use super::NargoConfig; -use crate::{ - cli::compile_cmd::compile_circuit, constants::CONTRACT_DIR, constants::TARGET_DIR, - errors::CliError, -}; +use crate::{cli::compile_cmd::compile_circuit, errors::CliError}; +use crate::{find_package_manifest, manifest::resolve_workspace_from_toml, prepare_package}; use acvm::Backend; use clap::Args; -use nargo::ops::{codegen_verifier, preprocess_program}; +use nargo::{ + ops::{codegen_verifier, preprocess_program}, + package::Package, +}; use noirc_driver::CompileOptions; +use noirc_frontend::graph::CrateName; /// Generates a Solidity verifier smart contract for the program #[derive(Debug, Clone, Args)] pub(crate) struct CodegenVerifierCommand { - /// The name of the circuit build files (ACIR, proving and verification keys) - circuit_name: Option, + /// The name of the package to codegen + #[clap(long)] + package: Option, #[clap(flatten)] compile_options: CompileOptions, @@ -32,34 +37,52 @@ pub(crate) fn run( args: CodegenVerifierCommand, config: NargoConfig, ) -> Result<(), CliError> { - // TODO(#1201): Should this be a utility function? - let circuit_build_path = args - .circuit_name - .map(|circuit_name| config.program_dir.join(TARGET_DIR).join(circuit_name)); + let toml_path = find_package_manifest(&config.program_dir)?; + let workspace = resolve_workspace_from_toml(&toml_path, args.package)?; - let common_reference_string = read_cached_common_reference_string(); + for package in &workspace { + let circuit_build_path = workspace.package_build_path(package); + + let smart_contract_string = smart_contract_for_package( + backend, + package, + circuit_build_path, + &args.compile_options, + )?; + + let contract_dir = workspace.contracts_directory_path(package); + create_named_dir(&contract_dir, "contract"); + let contract_path = contract_dir.join("plonk_vk").with_extension("sol"); + + let path = write_to_file(smart_contract_string.as_bytes(), &contract_path); + println!("[{}] Contract successfully created and located at {path}", package.name); + } - let (common_reference_string, preprocessed_program) = match circuit_build_path { - Some(circuit_build_path) => { - let program = read_program_from_file(circuit_build_path)?; - let common_reference_string = update_common_reference_string( - backend, - &common_reference_string, - &program.bytecode, - ) - .map_err(CliError::CommonReferenceStringError)?; - (common_reference_string, program) - } - None => { - let (program, _) = - compile_circuit(backend, None, config.program_dir.as_ref(), &args.compile_options)?; - let common_reference_string = - update_common_reference_string(backend, &common_reference_string, &program.circuit) - .map_err(CliError::CommonReferenceStringError)?; - let (program, _) = preprocess_program(backend, true, &common_reference_string, program) - .map_err(CliError::ProofSystemCompilerError)?; - (common_reference_string, program) - } + Ok(()) +} + +fn smart_contract_for_package( + backend: &B, + package: &Package, + circuit_build_path: PathBuf, + compile_options: &CompileOptions, +) -> Result> { + let common_reference_string = read_cached_common_reference_string(); + let (common_reference_string, preprocessed_program) = if circuit_build_path.exists() { + let program = read_program_from_file(circuit_build_path)?; + let common_reference_string = + update_common_reference_string(backend, &common_reference_string, &program.bytecode) + .map_err(CliError::CommonReferenceStringError)?; + (common_reference_string, program) + } else { + let (mut context, crate_id) = prepare_package(package); + let program = compile_circuit(backend, &mut context, crate_id, compile_options)?; + let common_reference_string = + update_common_reference_string(backend, &common_reference_string, &program.circuit) + .map_err(CliError::CommonReferenceStringError)?; + let (program, _) = preprocess_program(backend, true, &common_reference_string, program) + .map_err(CliError::ProofSystemCompilerError)?; + (common_reference_string, program) }; let verification_key = preprocessed_program @@ -75,11 +98,5 @@ pub(crate) fn run( write_cached_common_reference_string(&common_reference_string); - let contract_dir = config.program_dir.join(CONTRACT_DIR); - create_named_dir(&contract_dir, "contract"); - let contract_path = contract_dir.join("plonk_vk").with_extension("sol"); - - let path = write_to_file(smart_contract_string.as_bytes(), &contract_path); - println!("Contract successfully created and located at {path}"); - Ok(()) + Ok(smart_contract_string) } diff --git a/crates/nargo_cli/src/cli/compile_cmd.rs b/crates/nargo_cli/src/cli/compile_cmd.rs index fbaecb606a1..2d59667e7ff 100644 --- a/crates/nargo_cli/src/cli/compile_cmd.rs +++ b/crates/nargo_cli/src/cli/compile_cmd.rs @@ -7,14 +7,16 @@ use noirc_driver::{ compile_contracts, compile_main, CompileOptions, CompiledProgram, ErrorsAndWarnings, Warnings, }; use noirc_errors::reporter::ReportedErrors; +use noirc_frontend::graph::{CrateId, CrateName}; use noirc_frontend::hir::Context; -use std::path::Path; use clap::Args; use nargo::ops::{preprocess_contract_function, preprocess_program}; -use crate::{constants::TARGET_DIR, errors::CliError, resolver::resolve_root_manifest}; +use crate::errors::CliError; +use crate::manifest::resolve_workspace_from_toml; +use crate::{find_package_manifest, prepare_package}; use super::fs::{ common_reference_string::{ @@ -31,9 +33,6 @@ const BACKEND_IDENTIFIER: &str = "acvm-backend-barretenberg"; /// Compile the program and its secret execution trace into ACIR format #[derive(Debug, Clone, Args)] pub(crate) struct CompileCommand { - /// The name of the ACIR file - circuit_name: String, - /// Include Proving and Verification keys in the build artifacts. #[arg(long)] include_keys: bool, @@ -42,6 +41,10 @@ pub(crate) struct CompileCommand { #[arg(short, long)] contracts: bool, + /// The name of the package to compile + #[clap(long)] + package: Option, + #[clap(flatten)] compile_options: CompileOptions, } @@ -51,66 +54,72 @@ pub(crate) fn run( args: CompileCommand, config: NargoConfig, ) -> Result<(), CliError> { - let circuit_dir = config.program_dir.join(TARGET_DIR); + let toml_path = find_package_manifest(&config.program_dir)?; + let workspace = resolve_workspace_from_toml(&toml_path, args.package)?; + let circuit_dir = workspace.target_directory_path(); let mut common_reference_string = read_cached_common_reference_string(); // If contracts is set we're compiling every function in a 'contract' rather than just 'main'. if args.contracts { - let (mut context, crate_id) = resolve_root_manifest(&config.program_dir, None)?; - - let result = compile_contracts(&mut context, crate_id, &args.compile_options); - let contracts = report_errors(result, &context, args.compile_options.deny_warnings)?; - - // TODO(#1389): I wonder if it is incorrect for nargo-core to know anything about contracts. - // As can be seen here, It seems like a leaky abstraction where ContractFunctions (essentially CompiledPrograms) - // are compiled via nargo-core and then the PreprocessedContract is constructed here. - // This is due to EACH function needing it's own CRS, PKey, and VKey from the backend. - let preprocessed_contracts: Result, CliError> = - try_vecmap(contracts, |contract| { - let preprocessed_contract_functions = - try_vecmap(contract.functions, |mut func| { - func.bytecode = optimize_circuit(backend, func.bytecode)?.0; - common_reference_string = update_common_reference_string( - backend, - &common_reference_string, - &func.bytecode, - ) - .map_err(CliError::CommonReferenceStringError)?; - - preprocess_contract_function( - backend, - args.include_keys, - &common_reference_string, - func, - ) - .map_err(CliError::ProofSystemCompilerError) - })?; - - Ok(PreprocessedContract { - name: contract.name, - backend: String::from(BACKEND_IDENTIFIER), - functions: preprocessed_contract_functions, - }) - }); - for contract in preprocessed_contracts? { - save_contract_to_file( - &contract, - &format!("{}-{}", &args.circuit_name, contract.name), - &circuit_dir, - ); + for package in &workspace { + let (mut context, crate_id) = prepare_package(package); + let result = compile_contracts(&mut context, crate_id, &args.compile_options); + let contracts = report_errors(result, &context, args.compile_options.deny_warnings)?; + + // TODO(#1389): I wonder if it is incorrect for nargo-core to know anything about contracts. + // As can be seen here, It seems like a leaky abstraction where ContractFunctions (essentially CompiledPrograms) + // are compiled via nargo-core and then the PreprocessedContract is constructed here. + // This is due to EACH function needing it's own CRS, PKey, and VKey from the backend. + let preprocessed_contracts: Result, CliError> = + try_vecmap(contracts, |contract| { + let preprocessed_contract_functions = + try_vecmap(contract.functions, |mut func| { + func.bytecode = optimize_circuit(backend, func.bytecode)?.0; + common_reference_string = update_common_reference_string( + backend, + &common_reference_string, + &func.bytecode, + ) + .map_err(CliError::CommonReferenceStringError)?; + + preprocess_contract_function( + backend, + args.include_keys, + &common_reference_string, + func, + ) + .map_err(CliError::ProofSystemCompilerError) + })?; + + Ok(PreprocessedContract { + name: contract.name, + backend: String::from(BACKEND_IDENTIFIER), + functions: preprocessed_contract_functions, + }) + }); + for contract in preprocessed_contracts? { + save_contract_to_file( + &contract, + &format!("{}-{}", package.name, contract.name), + &circuit_dir, + ); + } } } else { - let (program, _) = - compile_circuit(backend, None, &config.program_dir, &args.compile_options)?; - common_reference_string = - update_common_reference_string(backend, &common_reference_string, &program.circuit) - .map_err(CliError::CommonReferenceStringError)?; - - let (preprocessed_program, _) = - preprocess_program(backend, args.include_keys, &common_reference_string, program) - .map_err(CliError::ProofSystemCompilerError)?; - save_program_to_file(&preprocessed_program, &args.circuit_name, circuit_dir); + for package in &workspace { + let (mut context, crate_id) = prepare_package(package); + let program = compile_circuit(backend, &mut context, crate_id, &args.compile_options)?; + + common_reference_string = + update_common_reference_string(backend, &common_reference_string, &program.circuit) + .map_err(CliError::CommonReferenceStringError)?; + + let (preprocessed_program, _) = + preprocess_program(backend, args.include_keys, &common_reference_string, program) + .map_err(CliError::ProofSystemCompilerError)?; + save_program_to_file(&preprocessed_program, &package.name, &circuit_dir); + } } write_cached_common_reference_string(&common_reference_string); @@ -120,18 +129,18 @@ pub(crate) fn run( pub(crate) fn compile_circuit( backend: &B, - package: Option, - program_dir: &Path, + context: &mut Context, + crate_id: CrateId, compile_options: &CompileOptions, -) -> Result<(CompiledProgram, Context), CliError> { - let (mut context, crate_id) = resolve_root_manifest(program_dir, package)?; - let result = compile_main(&mut context, crate_id, compile_options); - let mut program = report_errors(result, &context, compile_options.deny_warnings)?; - +) -> Result { + let result = compile_main(context, crate_id, compile_options); + let mut program = report_errors(result, context, compile_options.deny_warnings)?; // Apply backend specific optimizations. let (optimized_circuit, opcode_labels) = optimize_circuit(backend, program.circuit) .expect("Backend does not support an opcode that is in the IR"); + // TODO(#2110): Why does this set `program.circuit` to `optimized_circuit` instead of the function taking ownership + // and requiring we use `optimized_circuit` everywhere after program.circuit = optimized_circuit; let opcode_ids = vecmap(opcode_labels, |label| match label { OpcodeLabel::Unresolved => { @@ -141,7 +150,7 @@ pub(crate) fn compile_circuit( }); program.debug.update_acir(opcode_ids); - Ok((program, context)) + Ok(program) } pub(super) fn optimize_circuit( diff --git a/crates/nargo_cli/src/cli/execute_cmd.rs b/crates/nargo_cli/src/cli/execute_cmd.rs index eaaea6d4ab3..ca5c18585ab 100644 --- a/crates/nargo_cli/src/cli/execute_cmd.rs +++ b/crates/nargo_cli/src/cli/execute_cmd.rs @@ -1,23 +1,23 @@ -use std::path::Path; - use acvm::acir::circuit::OpcodeLabel; use acvm::acir::{circuit::Circuit, native_types::WitnessMap}; use acvm::Backend; use clap::Args; +use nargo::constants::PROVER_INPUT_FILE; +use nargo::package::Package; use nargo::NargoError; use noirc_abi::input_parser::{Format, InputValue}; use noirc_abi::{Abi, InputMap}; use noirc_driver::{CompileOptions, CompiledProgram}; use noirc_errors::{debug_info::DebugInfo, CustomDiagnostic}; +use noirc_frontend::graph::CrateName; use noirc_frontend::hir::Context; +use super::compile_cmd::compile_circuit; use super::fs::{inputs::read_inputs_from_file, witness::save_witness_to_dir}; use super::NargoConfig; -use crate::{ - cli::compile_cmd::compile_circuit, - constants::{PROVER_INPUT_FILE, TARGET_DIR}, - errors::CliError, -}; +use crate::errors::CliError; +use crate::manifest::resolve_workspace_from_toml; +use crate::{find_package_manifest, prepare_package}; /// Executes a circuit to calculate its return value #[derive(Debug, Clone, Args)] @@ -29,6 +29,10 @@ pub(crate) struct ExecuteCommand { #[clap(long, short, default_value = PROVER_INPUT_FILE)] prover_name: String, + /// The name of the package to execute + #[clap(long)] + package: Option, + #[clap(flatten)] compile_options: CompileOptions, } @@ -38,35 +42,40 @@ pub(crate) fn run( args: ExecuteCommand, config: NargoConfig, ) -> Result<(), CliError> { - let (return_value, solved_witness) = - execute_with_path(backend, &config.program_dir, args.prover_name, &args.compile_options)?; + let toml_path = find_package_manifest(&config.program_dir)?; + let workspace = resolve_workspace_from_toml(&toml_path, args.package)?; + let witness_dir = &workspace.target_directory_path(); - println!("Circuit witness successfully solved"); - if let Some(return_value) = return_value { - println!("Circuit output: {return_value:?}"); - } - if let Some(witness_name) = args.witness_name { - let witness_dir = config.program_dir.join(TARGET_DIR); + for package in &workspace { + let (return_value, solved_witness) = + execute_package(backend, package, &args.prover_name, &args.compile_options)?; - let witness_path = save_witness_to_dir(solved_witness, &witness_name, witness_dir)?; + println!("[{}] Circuit witness successfully solved", package.name); + if let Some(return_value) = return_value { + println!("[{}] Circuit output: {return_value:?}", package.name); + } + if let Some(witness_name) = &args.witness_name { + let witness_path = save_witness_to_dir(solved_witness, witness_name, witness_dir)?; - println!("Witness saved to {}", witness_path.display()); + println!("[{}] Witness saved to {}", package.name, witness_path.display()); + } } Ok(()) } -fn execute_with_path( +fn execute_package( backend: &B, - program_dir: &Path, - prover_name: String, + package: &Package, + prover_name: &str, compile_options: &CompileOptions, ) -> Result<(Option, WitnessMap), CliError> { - let (compiled_program, context) = compile_circuit(backend, None, program_dir, compile_options)?; + let (mut context, crate_id) = prepare_package(package); + let compiled_program = compile_circuit(backend, &mut context, crate_id, compile_options)?; let CompiledProgram { abi, circuit, debug } = compiled_program; // Parse the initial witness values from Prover.toml let (inputs_map, _) = - read_inputs_from_file(program_dir, prover_name.as_str(), Format::Toml, &abi)?; + read_inputs_from_file(&package.root_dir, prover_name, Format::Toml, &abi)?; let solved_witness = execute_program(backend, circuit, &abi, &inputs_map, Some((debug, context)))?; diff --git a/crates/nargo_cli/src/cli/fs/inputs.rs b/crates/nargo_cli/src/cli/fs/inputs.rs index bd55e4b0abd..fd2afdefa12 100644 --- a/crates/nargo_cli/src/cli/fs/inputs.rs +++ b/crates/nargo_cli/src/cli/fs/inputs.rs @@ -70,6 +70,7 @@ mod tests { use std::{collections::BTreeMap, vec}; use acvm::FieldElement; + use nargo::constants::VERIFIER_INPUT_FILE; use noirc_abi::{ input_parser::{Format, InputValue}, Abi, AbiParameter, AbiType, AbiVisibility, @@ -77,7 +78,6 @@ mod tests { use tempdir::TempDir; use super::{read_inputs_from_file, write_inputs_to_file}; - use crate::constants::VERIFIER_INPUT_FILE; #[test] fn write_and_read_recovers_inputs_and_return_value() { diff --git a/crates/nargo_cli/src/cli/fs/program.rs b/crates/nargo_cli/src/cli/fs/program.rs index 871a6023837..311923a6686 100644 --- a/crates/nargo_cli/src/cli/fs/program.rs +++ b/crates/nargo_cli/src/cli/fs/program.rs @@ -1,6 +1,7 @@ use std::path::{Path, PathBuf}; use nargo::artifacts::{contract::PreprocessedContract, program::PreprocessedProgram}; +use noirc_frontend::graph::CrateName; use crate::errors::FilesystemError; @@ -8,10 +9,11 @@ use super::{create_named_dir, write_to_file}; pub(crate) fn save_program_to_file>( compiled_program: &PreprocessedProgram, - circuit_name: &str, + crate_name: &CrateName, circuit_dir: P, ) -> PathBuf { - save_build_artifact_to_file(compiled_program, circuit_name, circuit_dir) + let circuit_name: String = crate_name.into(); + save_build_artifact_to_file(compiled_program, &circuit_name, circuit_dir) } pub(crate) fn save_contract_to_file>( compiled_contract: &PreprocessedContract, diff --git a/crates/nargo_cli/src/cli/fs/proof.rs b/crates/nargo_cli/src/cli/fs/proof.rs index 3a54aa908f8..d2b3050708b 100644 --- a/crates/nargo_cli/src/cli/fs/proof.rs +++ b/crates/nargo_cli/src/cli/fs/proof.rs @@ -1,6 +1,8 @@ use std::path::{Path, PathBuf}; -use crate::{constants::PROOF_EXT, errors::FilesystemError}; +use nargo::constants::PROOF_EXT; + +use crate::errors::FilesystemError; use super::{create_named_dir, write_to_file}; diff --git a/crates/nargo_cli/src/cli/fs/witness.rs b/crates/nargo_cli/src/cli/fs/witness.rs index 7ecafb1615b..edfb1aa63d6 100644 --- a/crates/nargo_cli/src/cli/fs/witness.rs +++ b/crates/nargo_cli/src/cli/fs/witness.rs @@ -1,9 +1,10 @@ use std::path::{Path, PathBuf}; use acvm::acir::native_types::WitnessMap; +use nargo::constants::WITNESS_EXT; use super::{create_named_dir, write_to_file}; -use crate::{constants::WITNESS_EXT, errors::FilesystemError}; +use crate::errors::FilesystemError; pub(crate) fn save_witness_to_dir>( witnesses: WitnessMap, diff --git a/crates/nargo_cli/src/cli/info_cmd.rs b/crates/nargo_cli/src/cli/info_cmd.rs index 7ad0a2caf8c..12a70f7b13e 100644 --- a/crates/nargo_cli/src/cli/info_cmd.rs +++ b/crates/nargo_cli/src/cli/info_cmd.rs @@ -1,19 +1,26 @@ use acvm::Backend; use clap::Args; +use nargo::package::Package; use noirc_driver::CompileOptions; -use std::path::Path; +use noirc_frontend::graph::CrateName; -use crate::cli::compile_cmd::compile_circuit; -use crate::errors::CliError; +use crate::{ + cli::compile_cmd::compile_circuit, errors::CliError, find_package_manifest, + manifest::resolve_workspace_from_toml, prepare_package, +}; use super::NargoConfig; -/// Provides detailed informaton on a circuit +/// Provides detailed information on a circuit /// Current information provided: /// 1. The number of ACIR opcodes /// 2. Counts the final number gates in the circuit used by a backend #[derive(Debug, Clone, Args)] pub(crate) struct InfoCommand { + /// The name of the package to detail + #[clap(long)] + package: Option, + #[clap(flatten)] compile_options: CompileOptions, } @@ -23,20 +30,29 @@ pub(crate) fn run( args: InfoCommand, config: NargoConfig, ) -> Result<(), CliError> { - count_opcodes_and_gates_with_path(backend, config.program_dir, &args.compile_options) + let toml_path = find_package_manifest(&config.program_dir)?; + let workspace = resolve_workspace_from_toml(&toml_path, args.package)?; + + for package in &workspace { + count_opcodes_and_gates_in_package(backend, package, &args.compile_options)?; + } + + Ok(()) } -fn count_opcodes_and_gates_with_path>( +fn count_opcodes_and_gates_in_package( backend: &B, - program_dir: P, + package: &Package, compile_options: &CompileOptions, ) -> Result<(), CliError> { - let (compiled_program, _) = - compile_circuit(backend, None, program_dir.as_ref(), compile_options)?; + let (mut context, crate_id) = prepare_package(package); + let compiled_program = compile_circuit(backend, &mut context, crate_id, compile_options)?; + let num_opcodes = compiled_program.circuit.opcodes.len(); println!( - "Total ACIR opcodes generated for language {:?}: {}", + "[{}] Total ACIR opcodes generated for language {:?}: {}", + package.name, backend.np_language(), num_opcodes ); @@ -44,7 +60,7 @@ fn count_opcodes_and_gates_with_path>( let exact_circuit_size = backend .get_exact_circuit_size(&compiled_program.circuit) .map_err(CliError::ProofSystemCompilerError)?; - println!("Backend circuit size: {exact_circuit_size}"); + println!("[{}] Backend circuit size: {exact_circuit_size}", package.name); Ok(()) } diff --git a/crates/nargo_cli/src/cli/init_cmd.rs b/crates/nargo_cli/src/cli/init_cmd.rs index 77613611343..576690b7fab 100644 --- a/crates/nargo_cli/src/cli/init_cmd.rs +++ b/crates/nargo_cli/src/cli/init_cmd.rs @@ -1,12 +1,10 @@ -use crate::{ - constants::{PKG_FILE, SRC_DIR}, - errors::CliError, -}; +use crate::errors::CliError; use super::fs::{create_named_dir, write_to_file}; use super::{NargoConfig, CARGO_PKG_VERSION}; use acvm::Backend; use clap::Args; +use nargo::constants::{PKG_FILE, SRC_DIR}; use std::path::PathBuf; /// Create a Noir project in the current directory. diff --git a/crates/nargo_cli/src/cli/prove_cmd.rs b/crates/nargo_cli/src/cli/prove_cmd.rs index 92e9599cd8b..cdf83f9759b 100644 --- a/crates/nargo_cli/src/cli/prove_cmd.rs +++ b/crates/nargo_cli/src/cli/prove_cmd.rs @@ -3,38 +3,31 @@ use std::path::{Path, PathBuf}; use acvm::Backend; use clap::Args; use nargo::artifacts::program::PreprocessedProgram; +use nargo::constants::{PROVER_INPUT_FILE, VERIFIER_INPUT_FILE}; use nargo::ops::{preprocess_program, prove_execution, verify_proof}; +use nargo::package::Package; use noirc_abi::input_parser::Format; use noirc_driver::CompileOptions; +use noirc_frontend::graph::CrateName; -use super::NargoConfig; -use super::{ - compile_cmd::compile_circuit, - fs::{ - common_reference_string::{ - read_cached_common_reference_string, update_common_reference_string, - write_cached_common_reference_string, - }, - inputs::{read_inputs_from_file, write_inputs_to_file}, - program::read_program_from_file, - proof::save_proof_to_dir, +use super::compile_cmd::compile_circuit; +use super::fs::{ + common_reference_string::{ + read_cached_common_reference_string, update_common_reference_string, + write_cached_common_reference_string, }, + inputs::{read_inputs_from_file, write_inputs_to_file}, + program::read_program_from_file, + proof::save_proof_to_dir, }; -use crate::{ - cli::execute_cmd::execute_program, - constants::{PROOFS_DIR, PROVER_INPUT_FILE, TARGET_DIR, VERIFIER_INPUT_FILE}, - errors::CliError, -}; +use super::NargoConfig; +use crate::manifest::resolve_workspace_from_toml; +use crate::{cli::execute_cmd::execute_program, errors::CliError}; +use crate::{find_package_manifest, prepare_package}; /// Create proof for this program. The proof is returned as a hex encoded string. #[derive(Debug, Clone, Args)] pub(crate) struct ProveCommand { - /// The name of the proof - proof_name: Option, - - /// The name of the circuit build files (ACIR, proving and verification keys) - circuit_name: Option, - /// The name of the toml file which contains the inputs for the prover #[clap(long, short, default_value = PROVER_INPUT_FILE)] prover_name: String, @@ -47,11 +40,12 @@ pub(crate) struct ProveCommand { #[arg(long)] verify: bool, + /// The name of the package to prove + #[clap(long)] + package: Option, + #[clap(flatten)] compile_options: CompileOptions, - - #[clap(long)] - package: Option, } pub(crate) fn run( @@ -59,65 +53,57 @@ pub(crate) fn run( args: ProveCommand, config: NargoConfig, ) -> Result<(), CliError> { - let proof_dir = config.program_dir.join(PROOFS_DIR); - - let circuit_build_path = args - .circuit_name - .map(|circuit_name| config.program_dir.join(TARGET_DIR).join(circuit_name)); - - prove_with_path( - backend, - args.proof_name, - args.prover_name, - args.verifier_name, - args.package, - config.program_dir, - proof_dir, - circuit_build_path, - args.verify, - &args.compile_options, - )?; + let toml_path = find_package_manifest(&config.program_dir)?; + let workspace = resolve_workspace_from_toml(&toml_path, args.package)?; + let proof_dir = workspace.proofs_directory_path(); + + for package in &workspace { + let circuit_build_path = workspace.package_build_path(package); + + prove_package( + backend, + package, + &args.prover_name, + &args.verifier_name, + &proof_dir, + circuit_build_path, + args.verify, + &args.compile_options, + )?; + } Ok(()) } #[allow(clippy::too_many_arguments)] -pub(crate) fn prove_with_path>( +pub(crate) fn prove_package( backend: &B, - proof_name: Option, - prover_name: String, - verifier_name: String, - package: Option, - program_dir: P, - proof_dir: P, - circuit_build_path: Option, + package: &Package, + prover_name: &str, + verifier_name: &str, + proof_dir: &Path, + circuit_build_path: PathBuf, check_proof: bool, compile_options: &CompileOptions, -) -> Result, CliError> { +) -> Result<(), CliError> { let common_reference_string = read_cached_common_reference_string(); - let (common_reference_string, preprocessed_program, debug_data) = match circuit_build_path { - Some(circuit_build_path) => { - let program = read_program_from_file(circuit_build_path)?; - let common_reference_string = update_common_reference_string( - backend, - &common_reference_string, - &program.bytecode, - ) - .map_err(CliError::CommonReferenceStringError)?; - (common_reference_string, program, None) - } - None => { - let (program, context) = - compile_circuit(backend, package, program_dir.as_ref(), compile_options)?; - let common_reference_string = - update_common_reference_string(backend, &common_reference_string, &program.circuit) - .map_err(CliError::CommonReferenceStringError)?; - let (program, debug) = - preprocess_program(backend, true, &common_reference_string, program) - .map_err(CliError::ProofSystemCompilerError)?; - (common_reference_string, program, Some((debug, context))) - } + let (common_reference_string, preprocessed_program, debug_data) = if circuit_build_path.exists() + { + let program = read_program_from_file(circuit_build_path)?; + let common_reference_string = + update_common_reference_string(backend, &common_reference_string, &program.bytecode) + .map_err(CliError::CommonReferenceStringError)?; + (common_reference_string, program, None) + } else { + let (mut context, crate_id) = prepare_package(package); + let program = compile_circuit(backend, &mut context, crate_id, compile_options)?; + let common_reference_string = + update_common_reference_string(backend, &common_reference_string, &program.circuit) + .map_err(CliError::CommonReferenceStringError)?; + let (program, debug) = preprocess_program(backend, true, &common_reference_string, program) + .map_err(CliError::ProofSystemCompilerError)?; + (common_reference_string, program, Some((debug, context))) }; write_cached_common_reference_string(&common_reference_string); @@ -127,7 +113,7 @@ pub(crate) fn prove_with_path>( // Parse the initial witness values from Prover.toml let (inputs_map, _) = - read_inputs_from_file(&program_dir, prover_name.as_str(), Format::Toml, &abi)?; + read_inputs_from_file(&package.root_dir, prover_name, Format::Toml, &abi)?; let solved_witness = execute_program(backend, bytecode.clone(), &abi, &inputs_map, debug_data)?; @@ -139,8 +125,8 @@ pub(crate) fn prove_with_path>( &public_inputs, &return_value, &public_abi, - &program_dir, - verifier_name.as_str(), + &package.root_dir, + verifier_name, Format::Toml, )?; @@ -170,12 +156,7 @@ pub(crate) fn prove_with_path>( } } - let proof_path = if let Some(proof_name) = proof_name { - Some(save_proof_to_dir(&proof, &proof_name, proof_dir)?) - } else { - println!("{}", hex::encode(&proof)); - None - }; + save_proof_to_dir(&proof, &String::from(&package.name), proof_dir)?; - Ok(proof_path) + Ok(()) } diff --git a/crates/nargo_cli/src/cli/test_cmd.rs b/crates/nargo_cli/src/cli/test_cmd.rs index c1aa359e724..7eb1c9bff74 100644 --- a/crates/nargo_cli/src/cli/test_cmd.rs +++ b/crates/nargo_cli/src/cli/test_cmd.rs @@ -1,15 +1,15 @@ -use std::{io::Write, path::Path}; +use std::io::Write; use acvm::{acir::native_types::WitnessMap, Backend}; use clap::Args; -use nargo::ops::execute_circuit; +use nargo::{ops::execute_circuit, package::Package}; use noirc_driver::{compile_no_check, CompileOptions}; -use noirc_frontend::{hir::Context, node_interner::FuncId}; +use noirc_frontend::{graph::CrateName, hir::Context, node_interner::FuncId}; use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor}; use crate::{ - cli::check_cmd::check_crate_and_report_errors, errors::CliError, - resolver::resolve_root_manifest, + cli::check_cmd::check_crate_and_report_errors, errors::CliError, find_package_manifest, + manifest::resolve_workspace_from_toml, prepare_package, }; use super::{compile_cmd::optimize_circuit, NargoConfig}; @@ -24,6 +24,10 @@ pub(crate) struct TestCommand { #[arg(long)] show_output: bool, + /// The name of the package to test + #[clap(long)] + package: Option, + #[clap(flatten)] compile_options: CompileOptions, } @@ -35,56 +39,62 @@ pub(crate) fn run( ) -> Result<(), CliError> { let test_name: String = args.test_name.unwrap_or_else(|| "".to_owned()); - run_tests(backend, &config.program_dir, &test_name, args.show_output, &args.compile_options) + let toml_path = find_package_manifest(&config.program_dir)?; + let workspace = resolve_workspace_from_toml(&toml_path, args.package)?; + + for package in &workspace { + run_tests(backend, package, &test_name, args.show_output, &args.compile_options)?; + } + + Ok(()) } fn run_tests( backend: &B, - program_dir: &Path, + package: &Package, test_name: &str, show_output: bool, compile_options: &CompileOptions, ) -> Result<(), CliError> { - let (mut context, crate_id) = resolve_root_manifest(program_dir, None)?; + let (mut context, crate_id) = prepare_package(package); check_crate_and_report_errors(&mut context, crate_id, compile_options.deny_warnings)?; - let test_functions = match context.crate_graph.crate_type(crate_id) { - noirc_frontend::graph::CrateType::Workspace => { - context.get_all_test_functions_in_workspace_matching(test_name) - } - _ => context.get_all_test_functions_in_crate_matching(&crate_id, test_name), - }; + let test_functions = context.get_all_test_functions_in_crate_matching(&crate_id, test_name); - println!("Running {} test functions...", test_functions.len()); + println!("[{}] Running {} test functions", package.name, test_functions.len()); let mut failing = 0; let writer = StandardStream::stderr(ColorChoice::Always); let mut writer = writer.lock(); for (test_name, test_function) in test_functions { - writeln!(writer, "Testing {test_name}...").expect("Failed to write to stdout"); - writer.flush().ok(); + write!(writer, "[{}] Testing {test_name}... ", package.name) + .expect("Failed to write to stdout"); + writer.flush().expect("Failed to flush writer"); match run_test(backend, &test_name, test_function, &context, show_output, compile_options) { Ok(_) => { - writer.set_color(ColorSpec::new().set_fg(Some(Color::Green))).ok(); - writeln!(writer, "ok").ok(); + writer + .set_color(ColorSpec::new().set_fg(Some(Color::Green))) + .expect("Failed to set color"); + writeln!(writer, "ok").expect("Failed to write to stdout"); } // Assume an error was already printed to stdout Err(_) => failing += 1, } - writer.reset().ok(); + writer.reset().expect("Failed to reset writer"); } if failing == 0 { - writer.set_color(ColorSpec::new().set_fg(Some(Color::Green))).unwrap(); - writeln!(writer, "All tests passed").ok(); + write!(writer, "[{}] ", package.name).expect("Failed to write to stdout"); + writer.set_color(ColorSpec::new().set_fg(Some(Color::Green))).expect("Failed to set color"); + writeln!(writer, "All tests passed").expect("Failed to write to stdout"); } else { let plural = if failing == 1 { "" } else { "s" }; - return Err(CliError::Generic(format!("{failing} test{plural} failed"))); + return Err(CliError::Generic(format!("[{}] {failing} test{plural} failed", package.name))); } - writer.reset().ok(); + writer.reset().expect("Failed to reset writer"); Ok(()) } diff --git a/crates/nargo_cli/src/cli/verify_cmd.rs b/crates/nargo_cli/src/cli/verify_cmd.rs index f9068c66c9c..78b23a0612d 100644 --- a/crates/nargo_cli/src/cli/verify_cmd.rs +++ b/crates/nargo_cli/src/cli/verify_cmd.rs @@ -9,32 +9,31 @@ use super::fs::{ program::read_program_from_file, }; use super::NargoConfig; -use crate::{ - constants::{PROOFS_DIR, PROOF_EXT, TARGET_DIR, VERIFIER_INPUT_FILE}, - errors::CliError, -}; +use crate::errors::CliError; +use crate::manifest::resolve_workspace_from_toml; +use crate::{find_package_manifest, prepare_package}; use acvm::Backend; use clap::Args; -use nargo::artifacts::program::PreprocessedProgram; +use nargo::constants::{PROOF_EXT, VERIFIER_INPUT_FILE}; use nargo::ops::{preprocess_program, verify_proof}; +use nargo::{artifacts::program::PreprocessedProgram, package::Package}; use noirc_abi::input_parser::Format; use noirc_driver::CompileOptions; +use noirc_frontend::graph::CrateName; use std::path::{Path, PathBuf}; /// Given a proof and a program, verify whether the proof is valid #[derive(Debug, Clone, Args)] pub(crate) struct VerifyCommand { - /// The proof to verify - proof: String, - - /// The name of the circuit build files (ACIR, proving and verification keys) - circuit_name: Option, - /// The name of the toml file which contains the inputs for the verifier #[clap(long, short, default_value = VERIFIER_INPUT_FILE)] verifier_name: String, + /// The name of the package verify + #[clap(long)] + package: Option, + #[clap(flatten)] compile_options: CompileOptions, } @@ -44,54 +43,53 @@ pub(crate) fn run( args: VerifyCommand, config: NargoConfig, ) -> Result<(), CliError> { - let proof_path = - config.program_dir.join(PROOFS_DIR).join(&args.proof).with_extension(PROOF_EXT); - - let circuit_build_path = args - .circuit_name - .map(|circuit_name| config.program_dir.join(TARGET_DIR).join(circuit_name)); + let toml_path = find_package_manifest(&config.program_dir)?; + let workspace = resolve_workspace_from_toml(&toml_path, args.package)?; + let proofs_dir = workspace.proofs_directory_path(); + + for package in &workspace { + let circuit_build_path = workspace.package_build_path(package); + + let proof_path = proofs_dir.join(String::from(&package.name)).with_extension(PROOF_EXT); + + verify_package( + backend, + package, + &proof_path, + circuit_build_path, + &args.verifier_name, + &args.compile_options, + )?; + } - verify_with_path( - backend, - &config.program_dir, - proof_path, - circuit_build_path.as_ref(), - args.verifier_name, - &args.compile_options, - ) + Ok(()) } -fn verify_with_path>( +fn verify_package( backend: &B, - program_dir: P, - proof_path: PathBuf, - circuit_build_path: Option

, - verifier_name: String, + package: &Package, + proof_path: &Path, + circuit_build_path: PathBuf, + verifier_name: &str, compile_options: &CompileOptions, ) -> Result<(), CliError> { let common_reference_string = read_cached_common_reference_string(); - let (common_reference_string, preprocessed_program) = match circuit_build_path { - Some(circuit_build_path) => { - let program = read_program_from_file(circuit_build_path)?; - let common_reference_string = update_common_reference_string( - backend, - &common_reference_string, - &program.bytecode, - ) - .map_err(CliError::CommonReferenceStringError)?; - (common_reference_string, program) - } - None => { - let (program, _) = - compile_circuit(backend, None, program_dir.as_ref(), compile_options)?; - let common_reference_string = - update_common_reference_string(backend, &common_reference_string, &program.circuit) - .map_err(CliError::CommonReferenceStringError)?; - let (program, _) = preprocess_program(backend, true, &common_reference_string, program) - .map_err(CliError::ProofSystemCompilerError)?; - (common_reference_string, program) - } + let (common_reference_string, preprocessed_program) = if circuit_build_path.exists() { + let program = read_program_from_file(circuit_build_path)?; + let common_reference_string = + update_common_reference_string(backend, &common_reference_string, &program.bytecode) + .map_err(CliError::CommonReferenceStringError)?; + (common_reference_string, program) + } else { + let (mut context, crate_id) = prepare_package(package); + let program = compile_circuit(backend, &mut context, crate_id, compile_options)?; + let common_reference_string = + update_common_reference_string(backend, &common_reference_string, &program.circuit) + .map_err(CliError::CommonReferenceStringError)?; + let (program, _) = preprocess_program(backend, true, &common_reference_string, program) + .map_err(CliError::ProofSystemCompilerError)?; + (common_reference_string, program) }; write_cached_common_reference_string(&common_reference_string); @@ -101,10 +99,10 @@ fn verify_with_path>( // Load public inputs (if any) from `verifier_name`. let public_abi = abi.public_abi(); let (public_inputs_map, return_value) = - read_inputs_from_file(program_dir, verifier_name.as_str(), Format::Toml, &public_abi)?; + read_inputs_from_file(&package.root_dir, verifier_name, Format::Toml, &public_abi)?; let public_inputs = public_abi.encode(&public_inputs_map, return_value)?; - let proof = load_hex_data(&proof_path)?; + let proof = load_hex_data(proof_path)?; let verification_key = verification_key .expect("Verification key should exist as `true` is passed to `preprocess_program`"); @@ -121,6 +119,6 @@ fn verify_with_path>( if valid_proof { Ok(()) } else { - Err(CliError::InvalidProof(proof_path)) + Err(CliError::InvalidProof(proof_path.to_path_buf())) } } diff --git a/crates/nargo_cli/src/errors.rs b/crates/nargo_cli/src/errors.rs index f9220d55b1c..00a84ff2964 100644 --- a/crates/nargo_cli/src/errors.rs +++ b/crates/nargo_cli/src/errors.rs @@ -9,8 +9,6 @@ use noirc_errors::reporter::ReportedErrors; use std::path::PathBuf; use thiserror::Error; -use crate::resolver::DependencyResolutionError; - #[derive(Debug, Error)] pub(crate) enum FilesystemError { #[error("Error: {} is not a valid path\nRun either `nargo compile` to generate missing build artifacts or `nargo prove` to construct a proof", .0.display())] @@ -41,9 +39,6 @@ pub(crate) enum CliError { #[error("Failed to verify proof {}", .0.display())] InvalidProof(PathBuf), - #[error(transparent)] - ResolutionError(#[from] DependencyResolutionError), - /// Errors encountered while compiling the noir program. /// These errors are already written to stderr. #[error("Aborting due to {} previous error{}", .0.error_count, if .0.error_count == 1 { "" } else { "s" })] @@ -64,6 +59,10 @@ pub(crate) enum CliError { #[error(transparent)] NargoError(#[from] NargoError), + /// Error from Manifest + #[error(transparent)] + ManifestError(#[from] ManifestError), + /// Backend error caused by a function on the SmartContract trait #[error(transparent)] SmartContractError(::Error), // Unfortunately, Rust won't let us `impl From` over an Associated Type on a generic @@ -82,3 +81,50 @@ impl From for CliError { Self::ReportedErrors(errors) } } + +/// Errors covering situations where a package is either missing or malformed. +#[derive(Debug, Error)] +pub(crate) enum ManifestError { + /// Package doesn't have a manifest file + #[error("cannot find a Nargo.toml in {}", .0.display())] + MissingFile(PathBuf), + + #[error("Cannot read file {0}. Does it exist?")] + ReadFailed(PathBuf), + + #[error("Nargo.toml is missing a parent directory")] + MissingParent, + + /// Package manifest is unreadable. + #[error("Nargo.toml is badly formed, could not parse.\n\n {0}")] + MalformedFile(#[from] toml::de::Error), + + #[error("Unxpected workspace definition found in {0}")] + UnexpectedWorkspace(PathBuf), + + /// Package does not contain Noir source files. + #[error("cannot find src directory in path {0}")] + NoSourceDir(PathBuf), + + /// Package has neither of `main.nr` and `lib.nr`. + #[error("package must contain either a `lib.nr`(Library) or a `main.nr`(Binary).")] + ContainsZeroCrates, + + /// Package has both a `main.nr` (for binaries) and `lib.nr` (for libraries) + #[error("package cannot contain both a `lib.nr` and a `main.nr`")] + ContainsMultipleCrates, + + /// Invalid character `-` in package name + #[error("invalid character `-` in package name")] + InvalidPackageName, + + /// Encountered error while downloading git repository. + #[error("{0}")] + GitError(String), + + #[error("Selected package ({0}) was not found")] + MissingSelectedPackage(String), + + #[error("Default package was not found. Does {0} exist in your workspace?")] + MissingDefaultPackage(PathBuf), +} diff --git a/crates/nargo_cli/src/git.rs b/crates/nargo_cli/src/git.rs index 7f103e21b38..850657a8af1 100644 --- a/crates/nargo_cli/src/git.rs +++ b/crates/nargo_cli/src/git.rs @@ -1,7 +1,16 @@ use std::path::PathBuf; +/// Creates a unique folder name for a GitHub repo +/// by using its URL and tag +fn resolve_folder_name(base: &url::Url, tag: &str) -> String { + let mut folder_name = base.domain().unwrap().to_owned(); + folder_name.push_str(base.path()); + folder_name.push_str(tag); + folder_name +} + pub(crate) fn git_dep_location(base: &url::Url, tag: &str) -> PathBuf { - let folder_name = super::resolver::resolve_folder_name(base, tag); + let folder_name = resolve_folder_name(base, tag); super::nargo_crates().join(folder_name) } diff --git a/crates/nargo_cli/src/lib.rs b/crates/nargo_cli/src/lib.rs index 9426decf194..b456d31c0ca 100644 --- a/crates/nargo_cli/src/lib.rs +++ b/crates/nargo_cli/src/lib.rs @@ -7,21 +7,26 @@ //! This name was used because it sounds like `cargo` and //! Noir Package Manager abbreviated is npm, which is already taken. -use noirc_frontend::graph::CrateType; +use fm::FileManager; +use nargo::package::{Dependency, Package}; +use noirc_driver::{add_dep, create_local_crate, create_non_local_crate}; +use noirc_frontend::{ + graph::{CrateGraph, CrateId, CrateName, CrateType}, + hir::Context, +}; use std::{ + collections::BTreeMap, fs::ReadDir, path::{Path, PathBuf}, }; +use errors::ManifestError; + mod backends; pub mod cli; -mod constants; mod errors; mod git; mod manifest; -mod resolver; - -use nargo::manifest::InvalidPackageError; fn nargo_crates() -> PathBuf { dirs::home_dir().unwrap().join("nargo") @@ -30,7 +35,7 @@ fn nargo_crates() -> PathBuf { /// Returns the path of the root directory of the package containing `current_path`. /// /// Returns a `CliError` if no parent directories of `current_path` contain a manifest file. -fn find_package_root(current_path: &Path) -> Result { +fn find_package_root(current_path: &Path) -> Result { let manifest_path = find_package_manifest(current_path)?; let package_root = @@ -42,27 +47,27 @@ fn find_package_root(current_path: &Path) -> Result Result { +fn find_package_manifest(current_path: &Path) -> Result { current_path .ancestors() .find_map(|dir| find_file(dir, "Nargo", "toml")) - .ok_or_else(|| InvalidPackageError::MissingManifestFile(current_path.to_path_buf())) + .ok_or_else(|| ManifestError::MissingFile(current_path.to_path_buf())) } -fn lib_or_bin(current_path: impl AsRef) -> Result<(PathBuf, CrateType), InvalidPackageError> { - let current_path = current_path.as_ref(); +fn lib_or_bin(root_dir: impl AsRef) -> Result<(PathBuf, CrateType), ManifestError> { + let current_path = root_dir.as_ref(); // A library has a lib.nr and a binary has a main.nr // You cannot have both. let src_path = find_dir(current_path, "src") - .ok_or_else(|| InvalidPackageError::NoSourceDir(current_path.to_path_buf()))?; + .ok_or_else(|| ManifestError::NoSourceDir(current_path.to_path_buf()))?; let lib_nr_path = find_file(&src_path, "lib", "nr"); let bin_nr_path = find_file(&src_path, "main", "nr"); match (lib_nr_path, bin_nr_path) { - (Some(_), Some(_)) => Err(InvalidPackageError::ContainsMultipleCrates), + (Some(_), Some(_)) => Err(ManifestError::ContainsMultipleCrates), (None, Some(path)) => Ok((path, CrateType::Binary)), (Some(path), None) => Ok((path, CrateType::Library)), - (None, None) => Err(InvalidPackageError::ContainsZeroCrates), + (None, None) => Err(ManifestError::ContainsZeroCrates), } } @@ -93,3 +98,32 @@ fn find_artifact(entries: ReadDir, artifact_name: &str) -> Option { fn list_files_and_folders_in>(path: P) -> Option { std::fs::read_dir(path).ok() } + +fn prepare_dependencies( + context: &mut Context, + parent_crate: CrateId, + dependencies: BTreeMap, +) { + for (dep_name, dep) in dependencies.into_iter() { + match dep { + Dependency::Remote { package } | Dependency::Local { package } => { + let crate_id = + create_non_local_crate(context, &package.entry_path, package.crate_type); + add_dep(context, parent_crate, crate_id, dep_name); + prepare_dependencies(context, crate_id, package.dependencies.to_owned()); + } + } + } +} + +fn prepare_package(package: &Package) -> (Context, CrateId) { + let fm = FileManager::new(&package.root_dir); + let graph = CrateGraph::default(); + let mut context = Context::new(fm, graph); + + let crate_id = create_local_crate(&mut context, &package.entry_path, package.crate_type); + + prepare_dependencies(&mut context, crate_id, package.dependencies.to_owned()); + + (context, crate_id) +} diff --git a/crates/nargo_cli/src/manifest.rs b/crates/nargo_cli/src/manifest.rs index 2660fd8c1cb..e1da57c0c2b 100644 --- a/crates/nargo_cli/src/manifest.rs +++ b/crates/nargo_cli/src/manifest.rs @@ -1,13 +1,284 @@ -use std::path::Path; +use std::{ + collections::BTreeMap, + path::{Path, PathBuf}, +}; -use nargo::manifest::{InvalidPackageError, Manifest}; +use nargo::{ + package::{Dependency, Package}, + workspace::Workspace, +}; +use noirc_frontend::graph::CrateName; +use serde::Deserialize; -/// Parses a Nargo.toml file from it's path -/// The path to the toml file must be present. -/// Calling this function without this guarantee is an ICE. -pub(crate) fn parse>(path_to_toml: P) -> Result { - let toml_as_string = - std::fs::read_to_string(&path_to_toml).expect("ice: path given for toml file is invalid"); +use crate::{errors::ManifestError, git::clone_git_repo}; - Manifest::from_toml_str(&toml_as_string) +#[derive(Debug, Deserialize, Clone)] +struct PackageConfig { + package: PackageMetadata, + dependencies: BTreeMap, +} + +impl PackageConfig { + fn resolve_to_package(&self, root_dir: &Path) -> Result { + let name = self.package.name.parse().map_err(|_| ManifestError::InvalidPackageName)?; + + let mut dependencies: BTreeMap = BTreeMap::new(); + for (name, dep_config) in self.dependencies.iter() { + let name = name.parse().map_err(|_| ManifestError::InvalidPackageName)?; + let resolved_dep = dep_config.resolve_to_dependency(root_dir)?; + + dependencies.insert(name, resolved_dep); + } + + let (entry_path, crate_type) = crate::lib_or_bin(root_dir)?; + + Ok(Package { root_dir: root_dir.to_path_buf(), entry_path, crate_type, name, dependencies }) + } +} + +/// Contains all the information about a package, as loaded from a `Nargo.toml`. +#[derive(Debug, Deserialize, Clone)] +#[serde(untagged)] +enum Config { + /// Represents a `Nargo.toml` with package fields. + Package { + #[serde(flatten)] + package_config: PackageConfig, + }, + /// Represents a `Nargo.toml` with workspace fields. + Workspace { + #[serde(alias = "workspace")] + workspace_config: WorkspaceConfig, + }, +} + +impl TryFrom for Config { + type Error = toml::de::Error; + + fn try_from(toml: String) -> Result { + toml::from_str(&toml) + } +} + +impl TryFrom<&str> for Config { + type Error = toml::de::Error; + + fn try_from(toml: &str) -> Result { + toml::from_str(toml) + } +} + +/// Tracks the root_dir of a `Nargo.toml` and the contents inside the file. +struct NargoToml { + root_dir: PathBuf, + config: Config, +} + +#[derive(Default, Debug, Deserialize, Clone)] +#[serde(rename_all = "kebab-case")] +struct WorkspaceConfig { + /// List of members in this workspace. + members: Vec, + /// Specifies the default crate to interact with in the context (similarly to how we have nargo as the default crate in this repository). + default_member: Option, +} + +#[allow(dead_code)] +#[derive(Default, Debug, Deserialize, Clone)] +struct PackageMetadata { + #[serde(default = "panic_missing_name")] + name: String, + description: Option, + authors: Option>, + // If not compiler version is supplied, the latest is used + // For now, we state that all packages must be compiled under the same + // compiler version. + // We also state that ACIR and the compiler will upgrade in lockstep. + // so you will not need to supply an ACIR and compiler version + compiler_version: Option, + backend: Option, + license: Option, +} + +// TODO: Remove this after a couple of breaking releases (added in 0.10.0) +fn panic_missing_name() -> String { + panic!( + r#" + +Failed to parse `Nargo.toml`. + +`Nargo.toml` now requires a "name" field for Noir packages. + +```toml +[package] +name = "package_name" +``` + +Modify your `Nargo.toml` similarly to above and rerun the command. + +"# + ) +} + +#[derive(Debug, Deserialize, Clone)] +#[serde(untagged)] +/// Enum representing the different types of ways to +/// supply a source for the dependency +enum DependencyConfig { + Github { git: String, tag: String }, + Path { path: String }, +} + +impl DependencyConfig { + fn resolve_to_dependency(&self, pkg_root: &Path) -> Result { + match self { + Self::Github { git, tag } => { + let dir_path = clone_git_repo(git, tag).map_err(ManifestError::GitError)?; + let toml_path = dir_path.join("Nargo.toml"); + let package = resolve_package_from_toml(&toml_path)?; + Ok(Dependency::Remote { package }) + } + Self::Path { path } => { + let dir_path = pkg_root.join(path); + let toml_path = dir_path.join("Nargo.toml"); + let package = resolve_package_from_toml(&toml_path)?; + Ok(Dependency::Local { package }) + } + } + } +} + +fn toml_to_workspace( + nargo_toml: NargoToml, + selected_package: Option, +) -> Result { + let workspace = match nargo_toml.config { + Config::Package { package_config } => { + let member = package_config.resolve_to_package(&nargo_toml.root_dir)?; + if selected_package.is_none() || Some(&member.name) == selected_package.as_ref() { + Workspace { + root_dir: nargo_toml.root_dir, + selected_package_index: Some(0), + members: vec![member], + } + } else { + return Err(ManifestError::MissingSelectedPackage(member.name.into())); + } + } + Config::Workspace { workspace_config } => { + let mut members = Vec::new(); + let mut selected_package_index = None; + for (index, member_path) in workspace_config.members.into_iter().enumerate() { + let package_root_dir = nargo_toml.root_dir.join(&member_path); + let package_toml_path = package_root_dir.join("Nargo.toml"); + let member = resolve_package_from_toml(&package_toml_path)?; + + match selected_package.as_ref() { + Some(selected_name) => { + if &member.name == selected_name { + selected_package_index = Some(index); + } + } + None => { + if Some(&member_path) == workspace_config.default_member.as_ref() { + selected_package_index = Some(index); + } + } + } + + members.push(member); + } + + // If the selected_package_index is still `None` but we have see a default_member or selected package, + // we want to present an error to users + if selected_package_index.is_none() { + if let Some(selected_name) = selected_package { + return Err(ManifestError::MissingSelectedPackage(selected_name.into())); + } + if let Some(default_path) = workspace_config.default_member { + return Err(ManifestError::MissingDefaultPackage(default_path)); + } + } + + Workspace { root_dir: nargo_toml.root_dir, members, selected_package_index } + } + }; + + Ok(workspace) +} + +fn read_toml(toml_path: &Path) -> Result { + let toml_as_string = std::fs::read_to_string(toml_path) + .map_err(|_| ManifestError::ReadFailed(toml_path.to_path_buf()))?; + let root_dir = toml_path.parent().ok_or(ManifestError::MissingParent)?; + let nargo_toml = + NargoToml { root_dir: root_dir.to_path_buf(), config: toml_as_string.try_into()? }; + + Ok(nargo_toml) +} + +/// Resolves a Nargo.toml file into a `Package` struct as defined by our `nargo` core. +fn resolve_package_from_toml(toml_path: &Path) -> Result { + let nargo_toml = read_toml(toml_path)?; + + match nargo_toml.config { + Config::Package { package_config } => { + package_config.resolve_to_package(&nargo_toml.root_dir) + } + Config::Workspace { .. } => { + Err(ManifestError::UnexpectedWorkspace(toml_path.to_path_buf())) + } + } +} + +/// Resolves a Nargo.toml file into a `Workspace` struct as defined by our `nargo` core. +pub(crate) fn resolve_workspace_from_toml( + toml_path: &Path, + selected_package: Option, +) -> Result { + let nargo_toml = read_toml(toml_path)?; + + toml_to_workspace(nargo_toml, selected_package) +} + +#[test] +fn parse_standard_toml() { + let src = r#" + + [package] + name = "test" + authors = ["kev", "foo"] + compiler_version = "0.1" + + [dependencies] + rand = { tag = "next", git = "https://github.com/rust-lang-nursery/rand"} + cool = { tag = "next", git = "https://github.com/rust-lang-nursery/rand"} + hello = {path = "./noir_driver"} + "#; + + assert!(Config::try_from(String::from(src)).is_ok()); + assert!(Config::try_from(src).is_ok()); +} + +#[test] +fn parse_workspace_toml() { + let src = r#" + [workspace] + members = ["a", "b"] + "#; + + assert!(Config::try_from(String::from(src)).is_ok()); + assert!(Config::try_from(src).is_ok()); +} + +#[test] +fn parse_workspace_default_member_toml() { + let src = r#" + [workspace] + members = ["a", "b"] + default-member = "a" + "#; + + assert!(Config::try_from(String::from(src)).is_ok()); + assert!(Config::try_from(src).is_ok()); } diff --git a/crates/nargo_cli/src/resolver.rs b/crates/nargo_cli/src/resolver.rs deleted file mode 100644 index 5c4e8225ee4..00000000000 --- a/crates/nargo_cli/src/resolver.rs +++ /dev/null @@ -1,265 +0,0 @@ -use std::{ - collections::HashMap, - path::{Path, PathBuf}, -}; - -use fm::FileManager; -use nargo::manifest::{Dependency, Manifest, PackageManifest, WorkspaceConfig}; -use noirc_driver::{add_dep, create_local_crate, create_non_local_crate}; -use noirc_frontend::{ - graph::{CrateGraph, CrateId, CrateName, CrateType}, - hir::Context, -}; -use thiserror::Error; - -use crate::{git::clone_git_repo, InvalidPackageError}; - -/// Creates a unique folder name for a GitHub repo -/// by using it's URL and tag -pub(crate) fn resolve_folder_name(base: &url::Url, tag: &str) -> String { - let mut folder_name = base.domain().unwrap().to_owned(); - folder_name.push_str(base.path()); - folder_name.push_str(tag); - folder_name -} - -/// Errors covering situations where a crate's dependency tree cannot be resolved. -#[derive(Debug, Error)] -pub(crate) enum DependencyResolutionError { - /// Encountered error while downloading git repository. - #[error("{0}")] - GitError(String), - - /// Attempted to depend on a binary crate. - #[error("dependency {dep_pkg_name} is a binary package and so it cannot be depended upon.")] - BinaryDependency { dep_pkg_name: String }, - - /// Attempted to depend on remote crate which has a local dependency. - /// We have no guarantees that this local dependency will be available so must error. - #[error("remote(git) dependency has a local dependency.\ndependency located at {}", dependency_path.display())] - RemoteDepWithLocalDep { dependency_path: PathBuf }, - - /// Dependency is not a valid crate - #[error(transparent)] - MalformedDependency(#[from] InvalidPackageError), - - /// Workspace does not contain packages - #[error("manifest path `{}` contains no packages", path.display())] - EmptyWorkspace { path: PathBuf }, - - /// Use workspace as a dependency is not currently supported - #[error("use workspace as a dependency is not currently supported")] - WorkspaceDependency, - - /// Multiple workspace roots found in the same workspace - #[error("multiple workspace roots found in the same workspace:\n{}\n{}", root.display(), member.display())] - MultipleWorkspace { root: PathBuf, member: PathBuf }, - - /// Invalid character `-` in package name - #[error("invalid character `-` in package name")] - InvalidPackageName, - - #[error("package specification `{0}` did not match any packages")] - PackageNotFound(String), - - #[error("two packages named `{0}` in this workspace")] - PackageCollision(String), -} - -#[derive(Debug, Clone)] -struct CachedDep { - entry_path: PathBuf, - crate_type: CrateType, - manifest: PackageManifest, - // Whether the dependency came from - // a remote dependency - remote: bool, -} - -/// Resolves a toml file by either downloading the necessary git repo -/// or it uses the repo on the cache. -/// Downloading will be recursive, so if a package contains packages -/// We need to download those too - -/// Returns the Driver and the backend to use -/// Note that the backend is ignored in the dependencies. -/// Since Noir is backend agnostic, this is okay to do. -/// XXX: Need to handle when a local package changes! -pub(crate) fn resolve_root_manifest( - dir_path: &std::path::Path, - package: Option, -) -> Result<(Context, CrateId), DependencyResolutionError> { - let fm = FileManager::new(dir_path); - let graph = CrateGraph::default(); - let mut context = Context::new(fm, graph); - - let manifest_path = super::find_package_manifest(dir_path)?; - let manifest = super::manifest::parse(&manifest_path)?; - - let crate_id = match manifest { - Manifest::Package(package) => { - let (entry_path, crate_type) = super::lib_or_bin(dir_path)?; - - let crate_id = create_local_crate(&mut context, &entry_path, crate_type); - let pkg_root = manifest_path.parent().expect("Every manifest path has a parent."); - - resolve_package_manifest(&mut context, crate_id, package, pkg_root)?; - - crate_id - } - Manifest::Workspace(workspace) => resolve_workspace_manifest( - &mut context, - package, - manifest_path, - dir_path, - workspace.config, - )?, - }; - - Ok((context, crate_id)) -} - -// Resolves a config file by recursively resolving the dependencies in the config -// Need to solve the case of a project trying to use itself as a dep -// -// We do not need to add stdlib, as it's implicitly -// imported. However, it may be helpful to have the stdlib imported by the -// package manager. -fn resolve_package_manifest( - context: &mut Context, - parent_crate: CrateId, - manifest: PackageManifest, - pkg_root: &Path, -) -> Result<(), DependencyResolutionError> { - let mut cached_packages: HashMap = HashMap::new(); - - // First download and add these top level dependencies crates to the Driver - for (dep_pkg_name, pkg_src) in manifest.dependencies.iter() { - let (dir_path, dep_meta) = cache_dep(pkg_src, pkg_root)?; - - let (entry_path, crate_type) = (&dep_meta.entry_path, &dep_meta.crate_type); - - if crate_type == &CrateType::Binary { - return Err(DependencyResolutionError::BinaryDependency { - dep_pkg_name: dep_pkg_name.to_string(), - }); - } - - let crate_id = create_non_local_crate(context, entry_path, *crate_type); - add_dep(context, parent_crate, crate_id, dep_pkg_name); - - cached_packages.insert(dir_path, (crate_id, dep_meta)); - } - - // Resolve all transitive dependencies - for (dependency_path, (crate_id, dep_meta)) in cached_packages { - if dep_meta.remote && dep_meta.manifest.has_local_dependency() { - return Err(DependencyResolutionError::RemoteDepWithLocalDep { dependency_path }); - } - // TODO: Why did it create a new resolver? - resolve_package_manifest(context, crate_id, dep_meta.manifest, &dependency_path)?; - } - Ok(()) -} - -fn resolve_workspace_manifest( - context: &mut Context, - mut local_package: Option, - manifest_path: PathBuf, - dir_path: &Path, - workspace: WorkspaceConfig, -) -> Result { - let members = workspace.members; - let mut packages = HashMap::new(); - - if members.is_empty() { - return Err(DependencyResolutionError::EmptyWorkspace { path: manifest_path }); - } - - for member in &members { - let member_path: PathBuf = dir_path.join(member); - let member_member_path = super::find_package_manifest(&member_path)?; - let member_manifest = super::manifest::parse(&member_member_path)?; - - match member_manifest { - Manifest::Package(inner) => { - let name: CrateName = inner - .package - .name - .parse() - .map_err(|_name| DependencyResolutionError::InvalidPackageName)?; - - if packages.insert(name.clone(), member_path).is_some() { - return Err(DependencyResolutionError::PackageCollision(name.into())); - } - - if local_package.is_none() && workspace.default_member.as_ref() == Some(member) { - local_package = Some(name.into()); - } - } - Manifest::Workspace(_) => { - return Err(DependencyResolutionError::MultipleWorkspace { - root: manifest_path, - member: member_member_path, - }) - } - } - } - - let local_package = match local_package { - Some(local_package) => { - local_package.parse().map_err(|_| DependencyResolutionError::InvalidPackageName)? - } - None => packages.keys().last().expect("non-empty packages").clone(), - }; - - let local_crate = packages - .remove(&local_package) - .ok_or_else(|| DependencyResolutionError::PackageNotFound(local_package.into()))?; - - let (entry_path, _crate_type) = super::lib_or_bin(local_crate)?; - let crate_id = create_local_crate(context, &entry_path, CrateType::Workspace); - - for (_, package_path) in packages.drain() { - let (entry_path, crate_type) = super::lib_or_bin(package_path)?; - create_non_local_crate(context, &entry_path, crate_type); - } - - Ok(crate_id) -} - -/// If the dependency is remote, download the dependency -/// and return the directory path along with the metadata -/// Needed to fill the CachedDep struct -/// -/// If it's a local path, the same applies, however it will not -/// be downloaded -fn cache_dep( - dep: &Dependency, - pkg_root: &Path, -) -> Result<(PathBuf, CachedDep), DependencyResolutionError> { - fn retrieve_meta( - dir_path: &Path, - remote: bool, - ) -> Result { - let (entry_path, crate_type) = super::lib_or_bin(dir_path)?; - let manifest_path = super::find_package_manifest(dir_path)?; - let manifest = super::manifest::parse(manifest_path)? - .to_package() - .ok_or(DependencyResolutionError::WorkspaceDependency)?; - Ok(CachedDep { entry_path, crate_type, manifest, remote }) - } - - match dep { - Dependency::Github { git, tag } => { - let dir_path = clone_git_repo(git, tag).map_err(DependencyResolutionError::GitError)?; - let meta = retrieve_meta(&dir_path, true)?; - Ok((dir_path, meta)) - } - Dependency::Path { path } => { - let dir_path = pkg_root.join(path); - let meta = retrieve_meta(&dir_path, false)?; - Ok((dir_path, meta)) - } - } -} diff --git a/crates/nargo_cli/tests/codegen-verifier.rs b/crates/nargo_cli/tests/codegen-verifier.rs index 3e4dc1dc745..f991f72b108 100644 --- a/crates/nargo_cli/tests/codegen-verifier.rs +++ b/crates/nargo_cli/tests/codegen-verifier.rs @@ -29,5 +29,9 @@ fn simple_verifier_codegen() { .success() .stdout(predicate::str::contains("Contract successfully created and located at")); - project_dir.child("contract").child("plonk_vk.sol").assert(predicate::path::is_file()); + project_dir + .child("contract") + .child("hello_world") + .child("plonk_vk.sol") + .assert(predicate::path::is_file()); } diff --git a/crates/nargo_cli/tests/hello_world.rs b/crates/nargo_cli/tests/hello_world.rs index 121f09f0ece..bc7022d1567 100644 --- a/crates/nargo_cli/tests/hello_world.rs +++ b/crates/nargo_cli/tests/hello_world.rs @@ -36,21 +36,20 @@ fn hello_world_example() { project_dir.child("Prover.toml").assert(predicate::path::is_file()); project_dir.child("Verifier.toml").assert(predicate::path::is_file()); - // `nargo prove p` - let proof_name = "p"; + // `nargo prove` project_dir.child("Prover.toml").write_str("x = 1\ny = 2").unwrap(); let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("prove").arg(proof_name); + cmd.arg("prove"); cmd.assert().success(); project_dir .child("proofs") - .child(format!("{proof_name}.proof")) + .child(format!("{project_name}.proof")) .assert(predicate::path::is_file()); // `nargo verify p` let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("verify").arg(proof_name); + cmd.arg("verify"); cmd.assert().success(); } diff --git a/crates/nargo_cli/tests/test_data/config.toml b/crates/nargo_cli/tests/test_data/config.toml index 88776ed03d2..6fe6c7897e1 100644 --- a/crates/nargo_cli/tests/test_data/config.toml +++ b/crates/nargo_cli/tests/test_data/config.toml @@ -2,4 +2,4 @@ exclude = [] # List of tests (as their directory name) expecting to fail: if the test pass, we report an error. -fail = ["brillig_assert_fail", "dep_impl_primitive"] +fail = ["brillig_assert_fail", "dep_impl_primitive", "workspace_fail", "workspace_missing_toml"] diff --git a/crates/nargo_cli/tests/test_data/workspace/crates/a/Prover.toml b/crates/nargo_cli/tests/test_data/workspace/crates/a/Prover.toml new file mode 100644 index 00000000000..465ef562de4 --- /dev/null +++ b/crates/nargo_cli/tests/test_data/workspace/crates/a/Prover.toml @@ -0,0 +1,2 @@ +x = "1" +y = "1" diff --git a/crates/nargo_cli/tests/test_data/workspace/crates/a/src/main.nr b/crates/nargo_cli/tests/test_data/workspace/crates/a/src/main.nr index 81847a9031d..550e5034a7b 100644 --- a/crates/nargo_cli/tests/test_data/workspace/crates/a/src/main.nr +++ b/crates/nargo_cli/tests/test_data/workspace/crates/a/src/main.nr @@ -1,11 +1,3 @@ fn main(x : Field, y : pub Field) { - assert(x != y); -} - -#[test] -fn a() { - main(1, 2); - - // Uncomment to make test fail - // main(1, 1); + assert(x == y); } diff --git a/crates/nargo_cli/tests/test_data/workspace/crates/b/Prover.toml b/crates/nargo_cli/tests/test_data/workspace/crates/b/Prover.toml new file mode 100644 index 00000000000..a0397e89477 --- /dev/null +++ b/crates/nargo_cli/tests/test_data/workspace/crates/b/Prover.toml @@ -0,0 +1,2 @@ +x = "1" +y = "0" diff --git a/crates/nargo_cli/tests/test_data/workspace/crates/b/src/main.nr b/crates/nargo_cli/tests/test_data/workspace/crates/b/src/main.nr index 512f99feeca..6e170de75fc 100644 --- a/crates/nargo_cli/tests/test_data/workspace/crates/b/src/main.nr +++ b/crates/nargo_cli/tests/test_data/workspace/crates/b/src/main.nr @@ -1,11 +1,3 @@ fn main(x : Field, y : pub Field) { assert(x != y); } - -#[test] -fn b() { - main(1, 2); - - // Uncomment to make test fail - // main(1, 1); -} diff --git a/crates/nargo_cli/tests/test_data/workspace_default_member/a/Prover.toml b/crates/nargo_cli/tests/test_data/workspace_default_member/a/Prover.toml new file mode 100644 index 00000000000..465ef562de4 --- /dev/null +++ b/crates/nargo_cli/tests/test_data/workspace_default_member/a/Prover.toml @@ -0,0 +1,2 @@ +x = "1" +y = "1" diff --git a/crates/nargo_cli/tests/test_data/workspace_default_member/a/src/main.nr b/crates/nargo_cli/tests/test_data/workspace_default_member/a/src/main.nr index 206dc46d57a..550e5034a7b 100644 --- a/crates/nargo_cli/tests/test_data/workspace_default_member/a/src/main.nr +++ b/crates/nargo_cli/tests/test_data/workspace_default_member/a/src/main.nr @@ -1,11 +1,3 @@ fn main(x : Field, y : pub Field) { - assert(x != y); -} - -#[test] -fn test_main() { - main(1, 2); - - // Uncomment to make test fail - // main(1, 1); + assert(x == y); } diff --git a/crates/nargo_cli/tests/test_data/workspace_default_member/b/Nargo.toml b/crates/nargo_cli/tests/test_data/workspace_default_member/b/Nargo.toml new file mode 100644 index 00000000000..85c6119c62c --- /dev/null +++ b/crates/nargo_cli/tests/test_data/workspace_default_member/b/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "b" +authors = [""] +compiler_version = "0.8.0" + +[dependencies] diff --git a/crates/nargo_cli/tests/test_data/workspace_default_member/b/Prover.toml b/crates/nargo_cli/tests/test_data/workspace_default_member/b/Prover.toml new file mode 100644 index 00000000000..83fcd8678e7 --- /dev/null +++ b/crates/nargo_cli/tests/test_data/workspace_default_member/b/Prover.toml @@ -0,0 +1,3 @@ +# Deliberately setting these to fail to prove this is NOT executed since a default is specified +x = "1" +y = "1" diff --git a/crates/nargo_cli/tests/test_data/workspace_default_member/b/src/main.nr b/crates/nargo_cli/tests/test_data/workspace_default_member/b/src/main.nr new file mode 100644 index 00000000000..6e170de75fc --- /dev/null +++ b/crates/nargo_cli/tests/test_data/workspace_default_member/b/src/main.nr @@ -0,0 +1,3 @@ +fn main(x : Field, y : pub Field) { + assert(x != y); +} diff --git a/crates/nargo_cli/tests/test_data/workspace_fail/Nargo.toml b/crates/nargo_cli/tests/test_data/workspace_fail/Nargo.toml new file mode 100644 index 00000000000..36db098686f --- /dev/null +++ b/crates/nargo_cli/tests/test_data/workspace_fail/Nargo.toml @@ -0,0 +1,2 @@ +[workspace] +members = ["crates/a", "crates/b"] diff --git a/crates/nargo_cli/tests/test_data/workspace_fail/crates/a/Nargo.toml b/crates/nargo_cli/tests/test_data/workspace_fail/crates/a/Nargo.toml new file mode 100644 index 00000000000..5ff1a743e3d --- /dev/null +++ b/crates/nargo_cli/tests/test_data/workspace_fail/crates/a/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "a" +authors = [""] +compiler_version = "0.8.0" + +[dependencies] \ No newline at end of file diff --git a/crates/nargo_cli/tests/test_data/workspace_fail/crates/a/Prover.toml b/crates/nargo_cli/tests/test_data/workspace_fail/crates/a/Prover.toml new file mode 100644 index 00000000000..b76c88bf536 --- /dev/null +++ b/crates/nargo_cli/tests/test_data/workspace_fail/crates/a/Prover.toml @@ -0,0 +1,3 @@ +# Deliberately setting these to fail to prove this is being executed +x = "1" +y = "2" diff --git a/crates/nargo_cli/tests/test_data/workspace_fail/crates/a/src/main.nr b/crates/nargo_cli/tests/test_data/workspace_fail/crates/a/src/main.nr new file mode 100644 index 00000000000..550e5034a7b --- /dev/null +++ b/crates/nargo_cli/tests/test_data/workspace_fail/crates/a/src/main.nr @@ -0,0 +1,3 @@ +fn main(x : Field, y : pub Field) { + assert(x == y); +} diff --git a/crates/nargo_cli/tests/test_data/workspace_fail/crates/b/Nargo.toml b/crates/nargo_cli/tests/test_data/workspace_fail/crates/b/Nargo.toml new file mode 100644 index 00000000000..8ae69a781eb --- /dev/null +++ b/crates/nargo_cli/tests/test_data/workspace_fail/crates/b/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "b" +authors = [""] +compiler_version = "0.8.0" + +[dependencies] \ No newline at end of file diff --git a/crates/nargo_cli/tests/test_data/workspace_fail/crates/b/Prover.toml b/crates/nargo_cli/tests/test_data/workspace_fail/crates/b/Prover.toml new file mode 100644 index 00000000000..a0397e89477 --- /dev/null +++ b/crates/nargo_cli/tests/test_data/workspace_fail/crates/b/Prover.toml @@ -0,0 +1,2 @@ +x = "1" +y = "0" diff --git a/crates/nargo_cli/tests/test_data/workspace_fail/crates/b/src/main.nr b/crates/nargo_cli/tests/test_data/workspace_fail/crates/b/src/main.nr new file mode 100644 index 00000000000..6e170de75fc --- /dev/null +++ b/crates/nargo_cli/tests/test_data/workspace_fail/crates/b/src/main.nr @@ -0,0 +1,3 @@ +fn main(x : Field, y : pub Field) { + assert(x != y); +} diff --git a/crates/nargo_cli/tests/test_data/workspace_missing_toml/Nargo.toml b/crates/nargo_cli/tests/test_data/workspace_missing_toml/Nargo.toml new file mode 100644 index 00000000000..36db098686f --- /dev/null +++ b/crates/nargo_cli/tests/test_data/workspace_missing_toml/Nargo.toml @@ -0,0 +1,2 @@ +[workspace] +members = ["crates/a", "crates/b"] diff --git a/crates/nargo_cli/tests/test_data/workspace_missing_toml/crates/a/Prover.toml b/crates/nargo_cli/tests/test_data/workspace_missing_toml/crates/a/Prover.toml new file mode 100644 index 00000000000..465ef562de4 --- /dev/null +++ b/crates/nargo_cli/tests/test_data/workspace_missing_toml/crates/a/Prover.toml @@ -0,0 +1,2 @@ +x = "1" +y = "1" diff --git a/crates/nargo_cli/tests/test_data/workspace_missing_toml/crates/a/src/main.nr b/crates/nargo_cli/tests/test_data/workspace_missing_toml/crates/a/src/main.nr new file mode 100644 index 00000000000..550e5034a7b --- /dev/null +++ b/crates/nargo_cli/tests/test_data/workspace_missing_toml/crates/a/src/main.nr @@ -0,0 +1,3 @@ +fn main(x : Field, y : pub Field) { + assert(x == y); +} diff --git a/crates/nargo_cli/tests/test_data/workspace_missing_toml/crates/b/Nargo.toml b/crates/nargo_cli/tests/test_data/workspace_missing_toml/crates/b/Nargo.toml new file mode 100644 index 00000000000..8ae69a781eb --- /dev/null +++ b/crates/nargo_cli/tests/test_data/workspace_missing_toml/crates/b/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "b" +authors = [""] +compiler_version = "0.8.0" + +[dependencies] \ No newline at end of file diff --git a/crates/nargo_cli/tests/test_data/workspace_missing_toml/crates/b/Prover.toml b/crates/nargo_cli/tests/test_data/workspace_missing_toml/crates/b/Prover.toml new file mode 100644 index 00000000000..a0397e89477 --- /dev/null +++ b/crates/nargo_cli/tests/test_data/workspace_missing_toml/crates/b/Prover.toml @@ -0,0 +1,2 @@ +x = "1" +y = "0" diff --git a/crates/nargo_cli/tests/test_data/workspace_missing_toml/crates/b/src/main.nr b/crates/nargo_cli/tests/test_data/workspace_missing_toml/crates/b/src/main.nr new file mode 100644 index 00000000000..6e170de75fc --- /dev/null +++ b/crates/nargo_cli/tests/test_data/workspace_missing_toml/crates/b/src/main.nr @@ -0,0 +1,3 @@ +fn main(x : Field, y : pub Field) { + assert(x != y); +} diff --git a/crates/noirc_driver/src/lib.rs b/crates/noirc_driver/src/lib.rs index f2537bb88fe..c0957313f69 100644 --- a/crates/noirc_driver/src/lib.rs +++ b/crates/noirc_driver/src/lib.rs @@ -87,10 +87,12 @@ pub fn create_non_local_crate( } /// Adds a edge in the crate graph for two crates -pub fn add_dep(context: &mut Context, this_crate: CrateId, depends_on: CrateId, crate_name: &str) { - let crate_name = - crate_name.parse().expect("crate name contains blacklisted characters, please remove"); - +pub fn add_dep( + context: &mut Context, + this_crate: CrateId, + depends_on: CrateId, + crate_name: CrateName, +) { // Cannot depend on a binary if context.crate_graph.crate_type(depends_on) == CrateType::Binary { panic!("crates cannot depend on binaries. {crate_name:?} is a binary crate") @@ -142,15 +144,7 @@ pub fn check_crate( propagate_dep(context, std_crate, &std_crate_name.parse().unwrap()); let mut errors = vec![]; - match context.crate_graph.crate_type(crate_id) { - CrateType::Workspace => { - let keys: Vec<_> = context.crate_graph.iter_keys().collect(); // avoid borrow checker - for crate_id in keys { - CrateDefMap::collect_defs(crate_id, context, &mut errors); - } - } - _ => CrateDefMap::collect_defs(crate_id, context, &mut errors), - } + CrateDefMap::collect_defs(crate_id, context, &mut errors); if has_errors(&errors, deny_warnings) { Err(errors) diff --git a/crates/noirc_frontend/src/graph/mod.rs b/crates/noirc_frontend/src/graph/mod.rs index 7ebfbae4817..af9216071e6 100644 --- a/crates/noirc_frontend/src/graph/mod.rs +++ b/crates/noirc_frontend/src/graph/mod.rs @@ -4,7 +4,7 @@ // This version is also simpler due to not having macro_defs or proc_macros // XXX: Edition may be reintroduced or some sort of versioning -use std::str::FromStr; +use std::{fmt::Display, str::FromStr}; use fm::FileId; use rustc_hash::{FxHashMap, FxHashSet}; @@ -26,14 +26,25 @@ impl CrateId { } } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, Ord, PartialOrd)] pub struct CrateName(SmolStr); +impl Display for CrateName { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.0.fmt(f) + } +} + impl From for String { fn from(crate_name: CrateName) -> Self { crate_name.0.into() } } +impl From<&CrateName> for String { + fn from(crate_name: &CrateName) -> Self { + crate_name.0.clone().into() + } +} /// Creates a new CrateName rejecting any crate name that /// has a character on the blacklist. @@ -66,7 +77,6 @@ pub const CHARACTER_BLACK_LIST: [char; 1] = ['-']; pub enum CrateType { Library, Binary, - Workspace, } #[derive(Debug, Clone, PartialEq, Eq)] diff --git a/crates/noirc_frontend/src/hir/mod.rs b/crates/noirc_frontend/src/hir/mod.rs index 5937f57a8c7..d6f98e112af 100644 --- a/crates/noirc_frontend/src/hir/mod.rs +++ b/crates/noirc_frontend/src/hir/mod.rs @@ -69,10 +69,7 @@ impl Context { // Check the crate type // We don't panic here to allow users to `evaluate` libraries which will do nothing - if matches!( - self.crate_graph[*crate_id].crate_type, - CrateType::Binary | CrateType::Workspace - ) { + if matches!(self.crate_graph[*crate_id].crate_type, CrateType::Binary) { // All Binaries should have a main function local_crate.main_function() } else { @@ -112,19 +109,6 @@ impl Context { .collect() } - pub fn get_all_test_functions_in_workspace_matching( - &self, - pattern: &str, - ) -> Vec<(String, FuncId)> { - let mut tests = Vec::new(); - - for crate_id in self.crate_graph.iter_keys() { - tests.extend(self.get_all_test_functions_in_crate_matching(&crate_id, pattern)); - } - - tests - } - /// Return a Vec of all `contract` declarations in the source code and the functions they contain pub fn get_all_contracts(&self, crate_id: &CrateId) -> Vec { self.def_map(crate_id) From 39610af5b3cc8de7e3aa963a2cbff3083179cbf4 Mon Sep 17 00:00:00 2001 From: Blaine Bublitz Date: Wed, 2 Aug 2023 01:28:09 -0700 Subject: [PATCH 09/19] chore(noirc_driver): Unify crate preparation (#2119) --- crates/lsp/src/lib.rs | 6 +++--- crates/lsp/src/lib_hacky.rs | 7 +++---- crates/nargo_cli/src/cli/mod.rs | 4 ++-- crates/nargo_cli/src/lib.rs | 7 +++---- crates/noirc_driver/src/lib.rs | 29 +++-------------------------- crates/wasm/src/compile.rs | 8 ++++---- 6 files changed, 18 insertions(+), 43 deletions(-) diff --git a/crates/lsp/src/lib.rs b/crates/lsp/src/lib.rs index bd4112218e4..1c02c802808 100644 --- a/crates/lsp/src/lib.rs +++ b/crates/lsp/src/lib.rs @@ -22,7 +22,7 @@ use lsp_types::{ InitializeParams, InitializeResult, InitializedParams, Position, PublishDiagnosticsParams, Range, ServerCapabilities, TextDocumentSyncOptions, }; -use noirc_driver::{check_crate, create_local_crate}; +use noirc_driver::{check_crate, prepare_crate}; use noirc_errors::{DiagnosticKind, FileDiagnostic}; use noirc_frontend::{ graph::{CrateGraph, CrateType}, @@ -190,7 +190,7 @@ fn on_code_lens_request( } }; - let crate_id = create_local_crate(&mut context, file_path, CrateType::Binary); + let crate_id = prepare_crate(&mut context, file_path, CrateType::Binary); // We ignore the warnings and errors produced by compilation for producing codelenses // because we can still get the test functions even if compilation fails @@ -283,7 +283,7 @@ fn on_did_save_text_document( } }; - let crate_id = create_local_crate(&mut context, file_path, CrateType::Binary); + let crate_id = prepare_crate(&mut context, file_path, CrateType::Binary); let mut diagnostics = Vec::new(); diff --git a/crates/lsp/src/lib_hacky.rs b/crates/lsp/src/lib_hacky.rs index 72a2625fcac..13bb2b82847 100644 --- a/crates/lsp/src/lib_hacky.rs +++ b/crates/lsp/src/lib_hacky.rs @@ -19,7 +19,7 @@ use lsp_types::{ InitializedParams, Position, PublishDiagnosticsParams, Range, ServerCapabilities, TextDocumentSyncOptions, }; -use noirc_driver::{check_crate, create_local_crate, create_non_local_crate, propagate_dep}; +use noirc_driver::{check_crate, prepare_crate, propagate_dep}; use noirc_errors::{DiagnosticKind, FileDiagnostic}; use noirc_frontend::{ graph::{CrateGraph, CrateId, CrateType}, @@ -286,7 +286,7 @@ fn create_context_at_path( } let nargo_toml_path = find_nearest_parent_file(&file_path, &["Nargo.toml"]); - let current_crate_id = create_local_crate(&mut context, &file_path, CrateType::Binary); + let current_crate_id = prepare_crate(&mut context, &file_path, CrateType::Binary); // TODO(AD): undo hacky dependency resolution if let Some(nargo_toml_path) = nargo_toml_path { @@ -297,8 +297,7 @@ fn create_context_at_path( .parent() .unwrap() // TODO .join(PathBuf::from(&dependency_path).join("src").join("lib.nr")); - let library_crate = - create_non_local_crate(&mut context, &path_to_lib, CrateType::Library); + let library_crate = prepare_crate(&mut context, &path_to_lib, CrateType::Library); propagate_dep(&mut context, library_crate, &crate_name.parse().unwrap()); } } diff --git a/crates/nargo_cli/src/cli/mod.rs b/crates/nargo_cli/src/cli/mod.rs index 8ce66db1b7b..9d494b21e6a 100644 --- a/crates/nargo_cli/src/cli/mod.rs +++ b/crates/nargo_cli/src/cli/mod.rs @@ -92,7 +92,7 @@ pub fn start_cli() -> eyre::Result<()> { #[cfg(test)] mod tests { use fm::FileManager; - use noirc_driver::{check_crate, create_local_crate}; + use noirc_driver::{check_crate, prepare_crate}; use noirc_errors::reporter; use noirc_frontend::{ graph::{CrateGraph, CrateType}, @@ -110,7 +110,7 @@ mod tests { let fm = FileManager::new(root_dir); let graph = CrateGraph::default(); let mut context = Context::new(fm, graph); - let crate_id = create_local_crate(&mut context, root_file, CrateType::Binary); + let crate_id = prepare_crate(&mut context, root_file, CrateType::Binary); let result = check_crate(&mut context, crate_id, false); let success = result.is_ok(); diff --git a/crates/nargo_cli/src/lib.rs b/crates/nargo_cli/src/lib.rs index b456d31c0ca..05753f7f3d8 100644 --- a/crates/nargo_cli/src/lib.rs +++ b/crates/nargo_cli/src/lib.rs @@ -9,7 +9,7 @@ use fm::FileManager; use nargo::package::{Dependency, Package}; -use noirc_driver::{add_dep, create_local_crate, create_non_local_crate}; +use noirc_driver::{add_dep, prepare_crate}; use noirc_frontend::{ graph::{CrateGraph, CrateId, CrateName, CrateType}, hir::Context, @@ -107,8 +107,7 @@ fn prepare_dependencies( for (dep_name, dep) in dependencies.into_iter() { match dep { Dependency::Remote { package } | Dependency::Local { package } => { - let crate_id = - create_non_local_crate(context, &package.entry_path, package.crate_type); + let crate_id = prepare_crate(context, &package.entry_path, package.crate_type); add_dep(context, parent_crate, crate_id, dep_name); prepare_dependencies(context, crate_id, package.dependencies.to_owned()); } @@ -121,7 +120,7 @@ fn prepare_package(package: &Package) -> (Context, CrateId) { let graph = CrateGraph::default(); let mut context = Context::new(fm, graph); - let crate_id = create_local_crate(&mut context, &package.entry_path, package.crate_type); + let crate_id = prepare_crate(&mut context, &package.entry_path, package.crate_type); prepare_dependencies(&mut context, crate_id, package.dependencies.to_owned()); diff --git a/crates/noirc_driver/src/lib.rs b/crates/noirc_driver/src/lib.rs index c0957313f69..4d1b7fe2675 100644 --- a/crates/noirc_driver/src/lib.rs +++ b/crates/noirc_driver/src/lib.rs @@ -52,40 +52,17 @@ pub fn compile_file( context: &mut Context, root_file: &Path, ) -> Result<(CompiledProgram, Warnings), ErrorsAndWarnings> { - let crate_id = create_local_crate(context, root_file, CrateType::Binary); + let crate_id = prepare_crate(context, root_file, CrateType::Binary); compile_main(context, crate_id, &CompileOptions::default()) } -/// Adds the File with the local crate root to the file system -/// and adds the local crate to the graph -/// XXX: This may pose a problem with workspaces, where you can change the local crate and where -/// we have multiple binaries in one workspace -/// A Fix would be for the driver instance to store the local crate id. -// Granted that this is the only place which relies on the local crate being first -pub fn create_local_crate( - context: &mut Context, - file_name: &Path, - crate_type: CrateType, -) -> CrateId { +/// Adds the file from the file system at `Path` to the crate graph +pub fn prepare_crate(context: &mut Context, file_name: &Path, crate_type: CrateType) -> CrateId { let root_file_id = context.file_manager.add_file(file_name).unwrap(); context.crate_graph.add_crate_root(crate_type, root_file_id) } -/// Creates a Non Local Crate. A Non Local Crate is any crate which is the not the crate that -/// the compiler is compiling. -pub fn create_non_local_crate( - context: &mut Context, - file_name: &Path, - crate_type: CrateType, -) -> CrateId { - let root_file_id = context.file_manager.add_file(file_name).unwrap(); - - // You can add any crate type to the crate graph - // but you cannot depend on Binaries - context.crate_graph.add_crate_root(crate_type, root_file_id) -} - /// Adds a edge in the crate graph for two crates pub fn add_dep( context: &mut Context, diff --git a/crates/wasm/src/compile.rs b/crates/wasm/src/compile.rs index c940f0ce246..15d8d5107ea 100644 --- a/crates/wasm/src/compile.rs +++ b/crates/wasm/src/compile.rs @@ -3,8 +3,8 @@ use fm::FileManager; use gloo_utils::format::JsValueSerdeExt; use log::debug; use noirc_driver::{ - check_crate, compile_contracts, compile_no_check, create_local_crate, create_non_local_crate, - propagate_dep, CompileOptions, CompiledContract, + check_crate, compile_contracts, compile_no_check, prepare_crate, propagate_dep, CompileOptions, + CompiledContract, }; use noirc_frontend::{ graph::{CrateGraph, CrateType}, @@ -63,7 +63,7 @@ impl Default for WASMCompileOptions { fn add_noir_lib(context: &mut Context, crate_name: &str) { let path_to_lib = Path::new(&crate_name).join("lib.nr"); - let library_crate = create_non_local_crate(context, &path_to_lib, CrateType::Library); + let library_crate = prepare_crate(context, &path_to_lib, CrateType::Library); propagate_dep(context, library_crate, &crate_name.parse().unwrap()); } @@ -87,7 +87,7 @@ pub fn compile(args: JsValue) -> JsValue { let mut context = Context::new(fm, graph); let path = Path::new(&options.entry_point); - let crate_id = create_local_crate(&mut context, path, CrateType::Binary); + let crate_id = prepare_crate(&mut context, path, CrateType::Binary); for dependency in options.optional_dependencies_set { add_noir_lib(&mut context, dependency.as_str()); From f7742ab026092f129bd4ec4f122bcd3249100529 Mon Sep 17 00:00:00 2001 From: jfecher Date: Wed, 2 Aug 2023 03:59:08 -0500 Subject: [PATCH 10/19] fix: flattening pass no longer overwrites previously mapped condition values (#2117) * Fix flattening pass overwriting previously mapped values * chore: add backticks to variable names in comment --------- Co-authored-by: Tom French <15848336+TomAFrench@users.noreply.github.com> --- .../test_data/regression_2099/Nargo.toml | 6 +++ .../test_data/regression_2099/src/main.nr | 37 +++++++++++++++++++ .../src/ssa_refactor/ir/function_inserter.rs | 1 - .../src/ssa_refactor/opt/flatten_cfg.rs | 5 ++- 4 files changed, 47 insertions(+), 2 deletions(-) create mode 100644 crates/nargo_cli/tests/test_data/regression_2099/Nargo.toml create mode 100644 crates/nargo_cli/tests/test_data/regression_2099/src/main.nr diff --git a/crates/nargo_cli/tests/test_data/regression_2099/Nargo.toml b/crates/nargo_cli/tests/test_data/regression_2099/Nargo.toml new file mode 100644 index 00000000000..ca96e7164a5 --- /dev/null +++ b/crates/nargo_cli/tests/test_data/regression_2099/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "regression_2099" +authors = [""] +compiler_version = "0.9.0" + +[dependencies] \ No newline at end of file diff --git a/crates/nargo_cli/tests/test_data/regression_2099/src/main.nr b/crates/nargo_cli/tests/test_data/regression_2099/src/main.nr new file mode 100644 index 00000000000..b96e664dedf --- /dev/null +++ b/crates/nargo_cli/tests/test_data/regression_2099/src/main.nr @@ -0,0 +1,37 @@ +use dep::std::ec::tecurve::affine::Curve as AffineCurve; +use dep::std::ec::tecurve::affine::Point as Gaffine; +use dep::std::ec::tecurve::curvegroup::Curve; +use dep::std::ec::tecurve::curvegroup::Point as G; + +use dep::std::ec::swcurve::affine::Point as SWGaffine; +use dep::std::ec::swcurve::curvegroup::Point as SWG; + +use dep::std::ec::montcurve::affine::Point as MGaffine; +use dep::std::ec::montcurve::curvegroup::Point as MG; + +fn main() { + // Define Baby Jubjub (ERC-2494) parameters in affine representation + let bjj_affine = AffineCurve::new(168700, 168696, Gaffine::new(995203441582195749578291179787384436505546430278305826713579947235728471134,5472060717959818805561601436314318772137091100104008585924551046643952123905)); + + // Test addition + let p1_affine = Gaffine::new(17777552123799933955779906779655732241715742912184938656739573121738514868268, 2626589144620713026669568689430873010625803728049924121243784502389097019475); + let p2_affine = Gaffine::new(16540640123574156134436876038791482806971768689494387082833631921987005038935, 20819045374670962167435360035096875258406992893633759881276124905556507972311); + let _p3_affine = bjj_affine.add(p1_affine, p2_affine); + + // Test SWCurve equivalents of the above + // First the affine representation + let bjj_swcurve_affine = bjj_affine.into_swcurve(); + + let p1_swcurve_affine = bjj_affine.map_into_swcurve(p1_affine); + let p2_swcurve_affine = bjj_affine.map_into_swcurve(p2_affine); + + let _p3_swcurve_affine_from_add = bjj_swcurve_affine.add( + p1_swcurve_affine, + p2_swcurve_affine + ); + + // Check that these points are on the curve + assert( + bjj_swcurve_affine.contains(p1_swcurve_affine) + ); +} diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/function_inserter.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/function_inserter.rs index 38dcfbbb168..15c755f40c2 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/function_inserter.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/function_inserter.rs @@ -124,7 +124,6 @@ impl<'f> FunctionInserter<'f> { let old_parameters = self.function.dfg.block_parameters(block); for (param, new_param) in old_parameters.iter().zip(new_values) { - // Don't overwrite any existing entries to avoid overwriting the induction variable self.values.entry(*param).or_insert(*new_param); } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/opt/flatten_cfg.rs b/crates/noirc_evaluator/src/ssa_refactor/opt/flatten_cfg.rs index 4ff857f942f..fdc4be085d7 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/opt/flatten_cfg.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/opt/flatten_cfg.rs @@ -274,7 +274,10 @@ impl<'f> Context<'f> { // end, in addition to resetting the value of old_condition since it is set to // known to be true/false within the then/else branch respectively. self.insert_current_side_effects_enabled(); - self.inserter.map_value(old_condition, old_condition); + + // We must map back to `then_condition` here. Mapping `old_condition` to itself would + // lose any previous mappings. + self.inserter.map_value(old_condition, then_condition); // While there is a condition on the stack we don't compile outside the condition // until it is popped. This ensures we inline the full then and else branches From 50b2816099a021e4b8cb44a9017fb849abf014e6 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Wed, 2 Aug 2023 14:20:28 +0100 Subject: [PATCH 11/19] feat: Add additional `BinaryOp` simplifications (#2124) feat: add additional `BinaryOp` simplifictions --- .../src/ssa_refactor/ir/instruction.rs | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs index b7a3ea02ae9..6d74a99e002 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs @@ -733,6 +733,9 @@ impl Binary { let zero = dfg.make_constant(FieldElement::zero(), operand_type); return SimplifyResult::SimplifiedTo(zero); } + if dfg.resolve(self.lhs) == dfg.resolve(self.rhs) { + return SimplifyResult::SimplifiedTo(self.lhs); + } } BinaryOp::Or => { if lhs_is_zero { @@ -741,8 +744,17 @@ impl Binary { if rhs_is_zero { return SimplifyResult::SimplifiedTo(self.lhs); } + if dfg.resolve(self.lhs) == dfg.resolve(self.rhs) { + return SimplifyResult::SimplifiedTo(self.lhs); + } } BinaryOp::Xor => { + if lhs_is_zero { + return SimplifyResult::SimplifiedTo(self.rhs); + } + if rhs_is_zero { + return SimplifyResult::SimplifiedTo(self.lhs); + } if dfg.resolve(self.lhs) == dfg.resolve(self.rhs) { let zero = dfg.make_constant(FieldElement::zero(), Type::bool()); return SimplifyResult::SimplifiedTo(zero); From b0fbc536dc432ba8d3ab6c12462758b11c2c21c4 Mon Sep 17 00:00:00 2001 From: guipublic <47281315+guipublic@users.noreply.github.com> Date: Wed, 2 Aug 2023 15:52:35 +0200 Subject: [PATCH 12/19] feat: Add support for bitshifts by distances known at runtime (#2072) * remove shr and shl from ssa instruction * move bit_shift_runtime test to test_data * code review, fix typo * Forbid signed integers for bit shift and fix brillig failing test * Check for signeness also during the delayed checks * Add missing method * Code review * Code review --- .../test_data/bit_shifts_runtime/Nargo.toml | 6 ++ .../test_data/bit_shifts_runtime/Prover.toml | 2 + .../test_data/bit_shifts_runtime/src/main.nr | 9 +++ .../src/brillig/brillig_gen/brillig_block.rs | 24 +++--- .../src/brillig/brillig_gen/brillig_fn.rs | 7 -- .../noirc_evaluator/src/brillig/brillig_ir.rs | 12 +++ .../src/brillig/brillig_ir/debug_show.rs | 5 +- .../src/ssa_refactor/acir_gen/mod.rs | 7 -- .../src/ssa_refactor/ir/instruction.rs | 20 ----- .../src/ssa_refactor/ssa_gen/context.rs | 77 +++++++++++++++---- crates/noirc_frontend/src/ast/expression.rs | 4 + .../noirc_frontend/src/hir/type_check/expr.rs | 20 ++++- crates/noirc_frontend/src/hir_def/expr.rs | 4 + crates/noirc_frontend/src/hir_def/types.rs | 4 + 14 files changed, 132 insertions(+), 69 deletions(-) create mode 100644 crates/nargo_cli/tests/test_data/bit_shifts_runtime/Nargo.toml create mode 100644 crates/nargo_cli/tests/test_data/bit_shifts_runtime/Prover.toml create mode 100644 crates/nargo_cli/tests/test_data/bit_shifts_runtime/src/main.nr diff --git a/crates/nargo_cli/tests/test_data/bit_shifts_runtime/Nargo.toml b/crates/nargo_cli/tests/test_data/bit_shifts_runtime/Nargo.toml new file mode 100644 index 00000000000..661f4f937d5 --- /dev/null +++ b/crates/nargo_cli/tests/test_data/bit_shifts_runtime/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "bit_shifts_runtime" +authors = [""] +compiler_version = "0.1" + +[dependencies] diff --git a/crates/nargo_cli/tests/test_data/bit_shifts_runtime/Prover.toml b/crates/nargo_cli/tests/test_data/bit_shifts_runtime/Prover.toml new file mode 100644 index 00000000000..98d8630792e --- /dev/null +++ b/crates/nargo_cli/tests/test_data/bit_shifts_runtime/Prover.toml @@ -0,0 +1,2 @@ +x = 64 +y = 1 \ No newline at end of file diff --git a/crates/nargo_cli/tests/test_data/bit_shifts_runtime/src/main.nr b/crates/nargo_cli/tests/test_data/bit_shifts_runtime/src/main.nr new file mode 100644 index 00000000000..271a1ecb880 --- /dev/null +++ b/crates/nargo_cli/tests/test_data/bit_shifts_runtime/src/main.nr @@ -0,0 +1,9 @@ +fn main(x: u64, y: u64) { + // runtime shifts on comptime values + assert(64 << y == 128); + assert(64 >> y == 32); + + // runtime shifts on runtime values + assert(x << y == 128); + assert(x >> y == 32); +} \ No newline at end of file diff --git a/crates/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/crates/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index c7779533a8a..a9bbe189e57 100644 --- a/crates/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/crates/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -336,10 +336,10 @@ impl<'block> BrilligBlock<'block> { dfg.instruction_results(instruction_id)[0], dfg, ); - + let heap_vec = self.brillig_context.extract_heap_vector(target_slice); self.brillig_context.radix_instruction( source, - self.function_context.extract_heap_vector(target_slice), + heap_vec, radix, limb_count, matches!(endianness, Endian::Big), @@ -355,10 +355,10 @@ impl<'block> BrilligBlock<'block> { ); let radix = self.brillig_context.make_constant(2_usize.into()); - + let heap_vec = self.brillig_context.extract_heap_vector(target_slice); self.brillig_context.radix_instruction( source, - self.function_context.extract_heap_vector(target_slice), + heap_vec, radix, limb_count, matches!(endianness, Endian::Big), @@ -589,7 +589,7 @@ impl<'block> BrilligBlock<'block> { dfg.instruction_results(instruction_id)[0], dfg, ); - let target_vector = self.function_context.extract_heap_vector(target_variable); + let target_vector = self.brillig_context.extract_heap_vector(target_variable); let item_value = self.convert_ssa_register_value(arguments[1], dfg); slice_push_back_operation( self.brillig_context, @@ -604,7 +604,7 @@ impl<'block> BrilligBlock<'block> { dfg.instruction_results(instruction_id)[0], dfg, ); - let target_vector = self.function_context.extract_heap_vector(target_variable); + let target_vector = self.brillig_context.extract_heap_vector(target_variable); let item_value = self.convert_ssa_register_value(arguments[1], dfg); slice_push_front_operation( self.brillig_context, @@ -618,7 +618,7 @@ impl<'block> BrilligBlock<'block> { let target_variable = self.function_context.create_variable(self.brillig_context, results[0], dfg); - let target_vector = self.function_context.extract_heap_vector(target_variable); + let target_vector = self.brillig_context.extract_heap_vector(target_variable); let pop_item = self.function_context.create_register_variable( self.brillig_context, @@ -643,7 +643,7 @@ impl<'block> BrilligBlock<'block> { ); let target_variable = self.function_context.create_variable(self.brillig_context, results[1], dfg); - let target_vector = self.function_context.extract_heap_vector(target_variable); + let target_vector = self.brillig_context.extract_heap_vector(target_variable); slice_pop_front_operation( self.brillig_context, @@ -659,7 +659,7 @@ impl<'block> BrilligBlock<'block> { let target_variable = self.function_context.create_variable(self.brillig_context, results[0], dfg); - let target_vector = self.function_context.extract_heap_vector(target_variable); + let target_vector = self.brillig_context.extract_heap_vector(target_variable); slice_insert_operation( self.brillig_context, target_vector, @@ -674,7 +674,7 @@ impl<'block> BrilligBlock<'block> { let target_variable = self.function_context.create_variable(self.brillig_context, results[0], dfg); - let target_vector = self.function_context.extract_heap_vector(target_variable); + let target_vector = self.brillig_context.extract_heap_vector(target_variable); let removed_item_register = self.function_context.create_register_variable( self.brillig_context, @@ -877,7 +877,7 @@ impl<'block> BrilligBlock<'block> { Type::Slice(_) => { let variable = self.function_context.create_variable(self.brillig_context, result, dfg); - let vector = self.function_context.extract_heap_vector(variable); + let vector = self.brillig_context.extract_heap_vector(variable); // Set the pointer to the current stack frame // The stack pointer will then be update by the caller of this method @@ -981,8 +981,6 @@ pub(crate) fn convert_ssa_binary_op_to_brillig_binary_op( BinaryOp::And => BinaryIntOp::And, BinaryOp::Or => BinaryIntOp::Or, BinaryOp::Xor => BinaryIntOp::Xor, - BinaryOp::Shl => BinaryIntOp::Shl, - BinaryOp::Shr => BinaryIntOp::Shr, }; BrilligBinaryOp::Integer { op: operation, bit_size } diff --git a/crates/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs b/crates/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs index 1a751d28b23..210d6da7be6 100644 --- a/crates/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs +++ b/crates/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs @@ -115,13 +115,6 @@ impl FunctionContext { } } - pub(crate) fn extract_heap_vector(&self, variable: RegisterOrMemory) -> HeapVector { - match variable { - RegisterOrMemory::HeapVector(vector) => vector, - _ => unreachable!("ICE: Expected vector, got {variable:?}"), - } - } - /// Collects the registers that a given variable is stored in. pub(crate) fn extract_registers(&self, variable: RegisterOrMemory) -> Vec { match variable { diff --git a/crates/noirc_evaluator/src/brillig/brillig_ir.rs b/crates/noirc_evaluator/src/brillig/brillig_ir.rs index ac0103dd9ed..4471d507579 100644 --- a/crates/noirc_evaluator/src/brillig/brillig_ir.rs +++ b/crates/noirc_evaluator/src/brillig/brillig_ir.rs @@ -951,6 +951,18 @@ impl BrilligContext { self.deallocate_register(end_value_register); self.deallocate_register(index_at_end_of_array); } + + pub(crate) fn extract_heap_vector(&mut self, variable: RegisterOrMemory) -> HeapVector { + match variable { + RegisterOrMemory::HeapVector(vector) => vector, + RegisterOrMemory::HeapArray(array) => { + let size = self.allocate_register(); + self.const_instruction(size, array.size.into()); + HeapVector { pointer: array.pointer, size } + } + _ => unreachable!("ICE: Expected vector, got {variable:?}"), + } + } } /// Type to encapsulate the binary operation types in Brillig diff --git a/crates/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs b/crates/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs index 75716e49177..2bb753de760 100644 --- a/crates/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs +++ b/crates/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs @@ -73,8 +73,9 @@ impl DebugToString for BinaryIntOp { BinaryIntOp::And => "&&".into(), BinaryIntOp::Or => "||".into(), BinaryIntOp::Xor => "^".into(), - BinaryIntOp::Shl => "<<".into(), - BinaryIntOp::Shr => ">>".into(), + BinaryIntOp::Shl | BinaryIntOp::Shr => { + unreachable!("bit shift should have been replaced") + } } } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/acir_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/acir_gen/mod.rs index 4a7d2e46775..f00f15d8f05 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/acir_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/acir_gen/mod.rs @@ -796,13 +796,6 @@ impl Context { bit_count, self.current_side_effects_enabled_var, ), - BinaryOp::Shl => self.acir_context.shift_left_var(lhs, rhs, binary_type), - BinaryOp::Shr => self.acir_context.shift_right_var( - lhs, - rhs, - binary_type, - self.current_side_effects_enabled_var, - ), BinaryOp::Xor => self.acir_context.xor_var(lhs, rhs, binary_type), BinaryOp::And => self.acir_context.and_var(lhs, rhs, binary_type), BinaryOp::Or => self.acir_context.or_var(lhs, rhs, binary_type), diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs index 6d74a99e002..a56b12ab875 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs @@ -760,16 +760,6 @@ impl Binary { return SimplifyResult::SimplifiedTo(zero); } } - BinaryOp::Shl => { - if rhs_is_zero { - return SimplifyResult::SimplifiedTo(self.lhs); - } - } - BinaryOp::Shr => { - if rhs_is_zero { - return SimplifyResult::SimplifiedTo(self.lhs); - } - } } SimplifyResult::None } @@ -825,8 +815,6 @@ impl BinaryOp { BinaryOp::And => None, BinaryOp::Or => None, BinaryOp::Xor => None, - BinaryOp::Shl => None, - BinaryOp::Shr => None, } } @@ -840,8 +828,6 @@ impl BinaryOp { BinaryOp::And => |x, y| Some(x & y), BinaryOp::Or => |x, y| Some(x | y), BinaryOp::Xor => |x, y| Some(x ^ y), - BinaryOp::Shl => |x, y| x.checked_shl(y.try_into().ok()?), - BinaryOp::Shr => |x, y| Some(x >> y), BinaryOp::Eq => |x, y| Some((x == y) as u128), BinaryOp::Lt => |x, y| Some((x < y) as u128), } @@ -882,10 +868,6 @@ pub(crate) enum BinaryOp { Or, /// Bitwise xor (^) Xor, - /// Shift lhs left by rhs bits (<<) - Shl, - /// Shift lhs right by rhs bits (>>) - Shr, } impl std::fmt::Display for BinaryOp { @@ -901,8 +883,6 @@ impl std::fmt::Display for BinaryOp { BinaryOp::And => write!(f, "and"), BinaryOp::Or => write!(f, "or"), BinaryOp::Xor => write!(f, "xor"), - BinaryOp::Shl => write!(f, "shl"), - BinaryOp::Shr => write!(f, "shr"), } } } diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs index c485200a53e..a526d93f85b 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs @@ -7,12 +7,12 @@ use iter_extended::vecmap; use noirc_errors::Location; use noirc_frontend::monomorphization::ast::{self, LocalId, Parameters}; use noirc_frontend::monomorphization::ast::{FuncId, Program}; -use noirc_frontend::Signedness; +use noirc_frontend::{BinaryOpKind, Signedness}; use crate::ssa_refactor::ir::dfg::DataFlowGraph; use crate::ssa_refactor::ir::function::FunctionId as IrFunctionId; use crate::ssa_refactor::ir::function::{Function, RuntimeType}; -use crate::ssa_refactor::ir::instruction::BinaryOp; +use crate::ssa_refactor::ir::instruction::{BinaryOp, Endian, Intrinsic}; use crate::ssa_refactor::ir::map::AtomicCounter; use crate::ssa_refactor::ir::types::{NumericType, Type}; use crate::ssa_refactor::ir::value::ValueId; @@ -236,6 +236,46 @@ impl<'a> FunctionContext<'a> { Values::empty() } + /// Insert ssa instructions which computes lhs << rhs by doing lhs*2^rhs + fn insert_shift_left(&mut self, lhs: ValueId, rhs: ValueId) -> ValueId { + let base = self.builder.field_constant(FieldElement::from(2_u128)); + let pow = self.pow(base, rhs); + self.builder.insert_binary(lhs, BinaryOp::Mul, pow) + } + + /// Insert ssa instructions which computes lhs << rhs by doing lhs/2^rhs + fn insert_shift_right(&mut self, lhs: ValueId, rhs: ValueId) -> ValueId { + let base = self.builder.field_constant(FieldElement::from(2_u128)); + let pow = self.pow(base, rhs); + self.builder.insert_binary(lhs, BinaryOp::Div, pow) + } + + /// Computes lhs^rhs via square&multiply, using the bits decomposition of rhs + fn pow(&mut self, lhs: ValueId, rhs: ValueId) -> ValueId { + let typ = self.builder.current_function.dfg.type_of_value(rhs); + if let Type::Numeric(NumericType::Unsigned { bit_size }) = typ { + let to_bits = self.builder.import_intrinsic_id(Intrinsic::ToBits(Endian::Little)); + let length = self.builder.field_constant(FieldElement::from(bit_size as i128)); + let result_types = vec![Type::Array(Rc::new(vec![Type::bool()]), bit_size as usize)]; + let rhs_bits = self.builder.insert_call(to_bits, vec![rhs, length], result_types)[0]; + let one = self.builder.field_constant(FieldElement::one()); + let mut r = one; + for i in 1..bit_size + 1 { + let r1 = self.builder.insert_binary(r, BinaryOp::Mul, r); + let a = self.builder.insert_binary(r1, BinaryOp::Mul, lhs); + let idx = self.builder.field_constant(FieldElement::from((bit_size - i) as i128)); + let b = self.builder.insert_array_get(rhs_bits, idx, Type::field()); + let r2 = self.builder.insert_binary(a, BinaryOp::Mul, b); + let c = self.builder.insert_binary(one, BinaryOp::Sub, b); + let r3 = self.builder.insert_binary(c, BinaryOp::Mul, r1); + r = self.builder.insert_binary(r2, BinaryOp::Add, r3); + } + r + } else { + unreachable!("Value must be unsigned in power operation"); + } + } + /// Insert a binary instruction at the end of the current block. /// Converts the form of the binary instruction as necessary /// (e.g. swapping arguments, inserting a not) to represent it in the IR. @@ -247,17 +287,22 @@ impl<'a> FunctionContext<'a> { mut rhs: ValueId, location: Location, ) -> Values { - let op = convert_operator(operator); - - if op == BinaryOp::Eq && matches!(self.builder.type_of_value(lhs), Type::Array(..)) { - return self.insert_array_equality(lhs, operator, rhs, location); - } - - if operator_requires_swapped_operands(operator) { - std::mem::swap(&mut lhs, &mut rhs); - } - - let mut result = self.builder.set_location(location).insert_binary(lhs, op, rhs); + let mut result = match operator { + BinaryOpKind::ShiftLeft => self.insert_shift_left(lhs, rhs), + BinaryOpKind::ShiftRight => self.insert_shift_right(lhs, rhs), + BinaryOpKind::Equal | BinaryOpKind::NotEqual + if matches!(self.builder.type_of_value(lhs), Type::Array(..)) => + { + return self.insert_array_equality(lhs, operator, rhs, location) + } + _ => { + let op = convert_operator(operator); + if operator_requires_swapped_operands(operator) { + std::mem::swap(&mut lhs, &mut rhs); + } + self.builder.set_location(location).insert_binary(lhs, op, rhs) + } + }; if let Some(max_bit_size) = operator_result_max_bit_size_to_truncate( operator, @@ -704,7 +749,6 @@ fn operator_result_max_bit_size_to_truncate( /// checking operator_requires_not and operator_requires_swapped_operands /// to represent the full operation correctly. fn convert_operator(op: noirc_frontend::BinaryOpKind) -> BinaryOp { - use noirc_frontend::BinaryOpKind; match op { BinaryOpKind::Add => BinaryOp::Add, BinaryOpKind::Subtract => BinaryOp::Sub, @@ -720,8 +764,9 @@ fn convert_operator(op: noirc_frontend::BinaryOpKind) -> BinaryOp { BinaryOpKind::And => BinaryOp::And, BinaryOpKind::Or => BinaryOp::Or, BinaryOpKind::Xor => BinaryOp::Xor, - BinaryOpKind::ShiftRight => BinaryOp::Shr, - BinaryOpKind::ShiftLeft => BinaryOp::Shl, + BinaryOpKind::ShiftRight | BinaryOpKind::ShiftLeft => unreachable!( + "ICE - bit shift operators do not exist in SSA and should have been replaced" + ), } } diff --git a/crates/noirc_frontend/src/ast/expression.rs b/crates/noirc_frontend/src/ast/expression.rs index b1829e8c1ee..b1170ff0ed0 100644 --- a/crates/noirc_frontend/src/ast/expression.rs +++ b/crates/noirc_frontend/src/ast/expression.rs @@ -268,6 +268,10 @@ impl BinaryOpKind { BinaryOpKind::Modulo => Token::Percent, } } + + pub fn is_bit_shift(&self) -> bool { + matches!(self, BinaryOpKind::ShiftRight | BinaryOpKind::ShiftLeft) + } } #[derive(PartialEq, PartialOrd, Eq, Ord, Hash, Debug, Copy, Clone)] diff --git a/crates/noirc_frontend/src/hir/type_check/expr.rs b/crates/noirc_frontend/src/hir/type_check/expr.rs index 12c11bf20e1..24ac5f3443e 100644 --- a/crates/noirc_frontend/src/hir/type_check/expr.rs +++ b/crates/noirc_frontend/src/hir/type_check/expr.rs @@ -12,7 +12,7 @@ use crate::{ }, node_interner::{DefinitionKind, ExprId, FuncId}, token::Attribute::Deprecated, - CompTime, Shared, TypeBinding, TypeVariableKind, UnaryOp, + CompTime, Shared, Signedness, TypeBinding, TypeVariableKind, UnaryOp, }; use super::{errors::TypeCheckError, TypeChecker}; @@ -954,7 +954,7 @@ impl<'interner> TypeChecker<'interner> { if op.is_bitwise() && (other.is_bindable() || other.is_field()) { let other = other.follow_bindings(); - + let kind = op.kind; // This will be an error if these types later resolve to a Field, or stay // polymorphic as the bit size will be unknown. Delay this error until the function // finishes resolving so we can still allow cases like `let x: u8 = 1 << 2;`. @@ -963,6 +963,12 @@ impl<'interner> TypeChecker<'interner> { Err(TypeCheckError::InvalidBitwiseOperationOnField { span }) } else if other.is_bindable() { Err(TypeCheckError::AmbiguousBitWidth { span }) + } else if kind.is_bit_shift() && other.is_signed() { + Err(TypeCheckError::TypeCannotBeUsed { + typ: other, + place: "bit shift", + span, + }) } else { Ok(()) } @@ -1001,8 +1007,14 @@ impl<'interner> TypeChecker<'interner> { span, }); } - let comptime = comptime_x.and(comptime_y, op.location.span); - Ok(Integer(comptime, *sign_x, *bit_width_x)) + if op.is_bit_shift() + && (*sign_x == Signedness::Signed || *sign_y == Signedness::Signed) + { + Err(TypeCheckError::InvalidInfixOp { kind: "Signed integer", span }) + } else { + let comptime = comptime_x.and(comptime_y, op.location.span); + Ok(Integer(comptime, *sign_x, *bit_width_x)) + } } (Integer(..), FieldElement(..)) | (FieldElement(..), Integer(..)) => { Err(TypeCheckError::IntegerAndFieldBinaryOperation { span }) diff --git a/crates/noirc_frontend/src/hir_def/expr.rs b/crates/noirc_frontend/src/hir_def/expr.rs index 5db9751591a..db7db0a803d 100644 --- a/crates/noirc_frontend/src/hir_def/expr.rs +++ b/crates/noirc_frontend/src/hir_def/expr.rs @@ -72,6 +72,10 @@ impl HirBinaryOp { use BinaryOpKind::*; matches!(self.kind, And | Or | Xor | ShiftRight | ShiftLeft) } + + pub fn is_bit_shift(&self) -> bool { + self.kind.is_bit_shift() + } } #[derive(Debug, Clone)] diff --git a/crates/noirc_frontend/src/hir_def/types.rs b/crates/noirc_frontend/src/hir_def/types.rs index df4c2f6c229..ff0a4e53fae 100644 --- a/crates/noirc_frontend/src/hir_def/types.rs +++ b/crates/noirc_frontend/src/hir_def/types.rs @@ -659,6 +659,10 @@ impl Type { matches!(self.follow_bindings(), Type::FieldElement(_)) } + pub fn is_signed(&self) -> bool { + matches!(self.follow_bindings(), Type::Integer(_, Signedness::Signed, _)) + } + fn contains_numeric_typevar(&self, target_id: TypeVariableId) -> bool { // True if the given type is a NamedGeneric with the target_id let named_generic_id_matches_target = |typ: &Type| { From 292724fc8b4d3791a87a829ce8d87a1a537dfcc5 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Wed, 2 Aug 2023 15:19:40 +0100 Subject: [PATCH 13/19] chore: create a `const` to hold the panic message (#2122) chore: create a const to hold the panic message --- crates/nargo_cli/src/main.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/nargo_cli/src/main.rs b/crates/nargo_cli/src/main.rs index a73785c64c6..a79c43dad48 100644 --- a/crates/nargo_cli/src/main.rs +++ b/crates/nargo_cli/src/main.rs @@ -3,12 +3,12 @@ use color_eyre::{config::HookBuilder, eyre}; use nargo_cli::cli::start_cli; +const PANIC_MESSAGE: &str = "This is a bug. We may have already fixed this in newer versions of Nargo so try searching for similar issues at https://github.com/noir-lang/noir/issues/.\nIf there isn't an open issue for this bug, consider opening one at https://github.com/noir-lang/noir/issues/new?labels=bug&template=bug_report.yml"; + fn main() -> eyre::Result<()> { // Register a panic hook to display more readable panic messages to end-users - let (panic_hook, _) = HookBuilder::default() - .display_env_section(false) - .panic_section("This is a bug. We may have already fixed this in newer versions of Nargo so try searching for similar issues at https://github.com/noir-lang/noir/issues/.\nIf there isn't an open issue for this bug, consider opening one at https://github.com/noir-lang/noir/issues/new?labels=bug&template=bug_report.yml") - .into_hooks(); + let (panic_hook, _) = + HookBuilder::default().display_env_section(false).panic_section(PANIC_MESSAGE).into_hooks(); panic_hook.install(); start_cli() From 435ab3520d06b6b4f898d41a5ad403c5ddbd7771 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Wed, 2 Aug 2023 16:51:04 +0100 Subject: [PATCH 14/19] feat: replace boolean `AND`s with multiplication (#1954) * feat: replace boolean `AND`s with multiplication * chore: move optimisation to live within ssa-gen * chore: fill out message in `unreachable` * chore: remove `SimplifyResult::None` * chore: abstract away `SimplifyResult::SimplifiedToInstruction(None)` * Revert "chore: abstract away `SimplifyResult::SimplifiedToInstruction(None)`" This reverts commit a7736eb418944864ff9a67b07aea01e7ba0bdb17. * Revert "chore: remove `SimplifyResult::None`" This reverts commit 429ccd473883ac3b210dda3eac59d780a0b45a2f. * chore: add `SimplifyResult.instruction()` --- .../noirc_evaluator/src/ssa_refactor/ir/dfg.rs | 3 ++- .../src/ssa_refactor/ir/instruction.rs | 17 +++++++++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs index caf65c85a7e..6d74e49b03b 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs @@ -158,7 +158,8 @@ impl DataFlowGraph { SimplifiedToMultiple(simplification) } SimplifyResult::Remove => InstructionRemoved, - SimplifyResult::None => { + result @ (SimplifyResult::SimplifiedToInstruction(_) | SimplifyResult::None) => { + let instruction = result.instruction().unwrap_or(instruction); let id = self.make_instruction(instruction, ctrl_typevars); self.blocks[block].insert_instruction(id); if let Some(location) = location { diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs index a56b12ab875..afb47d423e2 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs @@ -736,6 +736,11 @@ impl Binary { if dfg.resolve(self.lhs) == dfg.resolve(self.rhs) { return SimplifyResult::SimplifiedTo(self.lhs); } + if operand_type == Type::bool() { + // Boolean AND is equivalent to multiplication, which is a cheaper operation. + let instruction = Instruction::binary(BinaryOp::Mul, self.lhs, self.rhs); + return SimplifyResult::SimplifiedToInstruction(instruction); + } } BinaryOp::Or => { if lhs_is_zero { @@ -898,9 +903,21 @@ pub(crate) enum SimplifyResult { /// a function such as a tuple SimplifiedToMultiple(Vec), + /// Replace this function with an simpler but equivalent function. + SimplifiedToInstruction(Instruction), + /// Remove the instruction, it is unnecessary Remove, /// Instruction could not be simplified None, } + +impl SimplifyResult { + pub(crate) fn instruction(self) -> Option { + match self { + SimplifyResult::SimplifiedToInstruction(instruction) => Some(instruction), + _ => None, + } + } +} From 8a1ace792c4550ab1ce8c6044794abdb39d02872 Mon Sep 17 00:00:00 2001 From: jfecher Date: Wed, 2 Aug 2023 11:06:00 -0500 Subject: [PATCH 15/19] fix: Rename `Option::value` to `Option::_value` (#2127) * Rename Option::value to Option::_value * Add unwrap_unchecked method --- noir_stdlib/src/option.nr | 33 ++++++++++++++++++++------------- 1 file changed, 20 insertions(+), 13 deletions(-) diff --git a/noir_stdlib/src/option.nr b/noir_stdlib/src/option.nr index 5cc4dfae887..919c40fd9e0 100644 --- a/noir_stdlib/src/option.nr +++ b/noir_stdlib/src/option.nr @@ -1,17 +1,17 @@ struct Option { _is_some: bool, - value: T, + _value: T, } impl Option { /// Constructs a None value fn none() -> Self { - Self { _is_some: false, value: crate::unsafe::zeroed() } + Self { _is_some: false, _value: crate::unsafe::zeroed() } } /// Constructs a Some wrapper around the given value - fn some(value: T) -> Self { - Self { _is_some: true, value } + fn some(_value: T) -> Self { + Self { _is_some: true, _value } } /// True if this Option is None @@ -27,13 +27,20 @@ impl Option { /// Asserts `self.is_some()` and returns the wrapped value. fn unwrap(self) -> T { assert(self._is_some); - self.value + self._value + } + + /// Returns the inner value without asserting `self.is_some()` + /// Note that if `self` is `None`, there is no guarantee what value will be returned, + /// only that it will be of type `T`. + fn unwrap_unchecked(self) -> T { + self._value } /// Returns the wrapped value if `self.is_some()`. Otherwise, returns the given default value. fn unwrap_or(self, default: T) -> T { if self._is_some { - self.value + self._value } else { default } @@ -43,7 +50,7 @@ impl Option { /// a default value. fn unwrap_or_else(self, default: fn() -> T) -> T { if self._is_some { - self.value + self._value } else { default() } @@ -52,7 +59,7 @@ impl Option { /// If self is `Some(x)`, this returns `Some(f(x))`. Otherwise, this returns `None`. fn map(self, f: fn(T) -> U) -> Option { if self._is_some { - Option::some(f(self.value)) + Option::some(f(self._value)) } else { Option::none() } @@ -61,7 +68,7 @@ impl Option { /// If self is `Some(x)`, this returns `f(x)`. Otherwise, this returns the given default value. fn map_or(self, default: U, f: fn(T) -> U) -> U { if self._is_some { - f(self.value) + f(self._value) } else { default } @@ -70,7 +77,7 @@ impl Option { /// If self is `Some(x)`, this returns `f(x)`. Otherwise, this returns `default()`. fn map_or_else(self, default: fn() -> U, f: fn(T) -> U) -> U { if self._is_some { - f(self.value) + f(self._value) } else { default() } @@ -91,7 +98,7 @@ impl Option { /// In some languages this function is called `flat_map` or `bind`. fn and_then(self, f: fn(T) -> Option) -> Option { if self._is_some { - f(self.value) + f(self._value) } else { Option::none() } @@ -135,7 +142,7 @@ impl Option { /// Otherwise, this returns `None` fn filter(self, predicate: fn(T) -> bool) -> Self { if self._is_some { - if predicate(self.value) { + if predicate(self._value) { self } else { Option::none() @@ -149,7 +156,7 @@ impl Option { /// This returns None if the outer Option is None. Otherwise, this returns the inner Option. fn flatten(option: Option>) -> Option { if option._is_some { - option.value + option._value } else { Option::none() } From 47b372c1762ed1184bf2ed9b90d7dc3e2c161880 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Wed, 2 Aug 2023 17:17:29 +0100 Subject: [PATCH 16/19] feat: Optimize away constant calls to black box functions (#1981) * feat: optimize away constant calls to black box functions * chore: remove `use SimplifyResult::*` * chore: remove unnecessary match arms * Update crates/noirc_evaluator/src/ssa_refactor/ir/instruction/call.rs * Update crates/noirc_evaluator/src/ssa_refactor/ir/instruction/call.rs --------- Co-authored-by: jfecher --- .../src/ssa_refactor/ir/instruction.rs | 156 +------- .../src/ssa_refactor/ir/instruction/call.rs | 334 ++++++++++++++++++ 2 files changed, 338 insertions(+), 152 deletions(-) create mode 100644 crates/noirc_evaluator/src/ssa_refactor/ir/instruction/call.rs diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs index afb47d423e2..7edb74f4206 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs @@ -1,5 +1,3 @@ -use std::rc::Rc; - use acvm::{acir::BlackBoxFunc, FieldElement}; use iter_extended::vecmap; use num_bigint::BigUint; @@ -14,6 +12,10 @@ use super::{ value::{Value, ValueId}, }; +mod call; + +use call::simplify_call; + /// Reference to an instruction /// /// Note that InstructionIds are not unique. That is, two InstructionIds @@ -385,156 +387,6 @@ fn simplify_cast(value: ValueId, dst_typ: &Type, dfg: &mut DataFlowGraph) -> Sim } } -/// Try to simplify this call instruction. If the instruction can be simplified to a known value, -/// that value is returned. Otherwise None is returned. -fn simplify_call(func: ValueId, arguments: &[ValueId], dfg: &mut DataFlowGraph) -> SimplifyResult { - use SimplifyResult::*; - let intrinsic = match &dfg[func] { - Value::Intrinsic(intrinsic) => *intrinsic, - _ => return None, - }; - - let constant_args: Option> = - arguments.iter().map(|value_id| dfg.get_numeric_constant(*value_id)).collect(); - - match intrinsic { - Intrinsic::ToBits(endian) => { - if let Some(constant_args) = constant_args { - let field = constant_args[0]; - let limb_count = constant_args[1].to_u128() as u32; - SimplifiedTo(constant_to_radix(endian, field, 2, limb_count, dfg)) - } else { - None - } - } - Intrinsic::ToRadix(endian) => { - if let Some(constant_args) = constant_args { - let field = constant_args[0]; - let radix = constant_args[1].to_u128() as u32; - let limb_count = constant_args[2].to_u128() as u32; - SimplifiedTo(constant_to_radix(endian, field, radix, limb_count, dfg)) - } else { - None - } - } - Intrinsic::ArrayLen => { - let slice = dfg.get_array_constant(arguments[0]); - if let Some((slice, _)) = slice { - SimplifiedTo(dfg.make_constant((slice.len() as u128).into(), Type::field())) - } else if let Some(length) = dfg.try_get_array_length(arguments[0]) { - SimplifiedTo(dfg.make_constant((length as u128).into(), Type::field())) - } else { - None - } - } - Intrinsic::SlicePushBack => { - let slice = dfg.get_array_constant(arguments[0]); - if let (Some((mut slice, element_type)), elem) = (slice, arguments[1]) { - slice.push_back(elem); - let new_slice = dfg.make_array(slice, element_type); - SimplifiedTo(new_slice) - } else { - None - } - } - Intrinsic::SlicePushFront => { - let slice = dfg.get_array_constant(arguments[0]); - if let (Some((mut slice, element_type)), elem) = (slice, arguments[1]) { - slice.push_front(elem); - let new_slice = dfg.make_array(slice, element_type); - SimplifiedTo(new_slice) - } else { - None - } - } - Intrinsic::SlicePopBack => { - let slice = dfg.get_array_constant(arguments[0]); - if let Some((mut slice, element_type)) = slice { - let elem = - slice.pop_back().expect("There are no elements in this slice to be removed"); - let new_slice = dfg.make_array(slice, element_type); - SimplifiedToMultiple(vec![new_slice, elem]) - } else { - None - } - } - Intrinsic::SlicePopFront => { - let slice = dfg.get_array_constant(arguments[0]); - if let Some((mut slice, element_type)) = slice { - let elem = - slice.pop_front().expect("There are no elements in this slice to be removed"); - let new_slice = dfg.make_array(slice, element_type); - SimplifiedToMultiple(vec![elem, new_slice]) - } else { - None - } - } - Intrinsic::SliceInsert => { - let slice = dfg.get_array_constant(arguments[0]); - let index = dfg.get_numeric_constant(arguments[1]); - if let (Some((mut slice, element_type)), Some(index), value) = - (slice, index, arguments[2]) - { - slice.insert(index.to_u128() as usize, value); - let new_slice = dfg.make_array(slice, element_type); - SimplifiedTo(new_slice) - } else { - None - } - } - Intrinsic::SliceRemove => { - let slice = dfg.get_array_constant(arguments[0]); - let index = dfg.get_numeric_constant(arguments[1]); - if let (Some((mut slice, element_type)), Some(index)) = (slice, index) { - let removed_elem = slice.remove(index.to_u128() as usize); - let new_slice = dfg.make_array(slice, element_type); - SimplifiedToMultiple(vec![new_slice, removed_elem]) - } else { - None - } - } - Intrinsic::BlackBox(_) | Intrinsic::Println | Intrinsic::Sort => None, - } -} - -/// Returns a Value::Array of constants corresponding to the limbs of the radix decomposition. -fn constant_to_radix( - endian: Endian, - field: FieldElement, - radix: u32, - limb_count: u32, - dfg: &mut DataFlowGraph, -) -> ValueId { - let bit_size = u32::BITS - (radix - 1).leading_zeros(); - let radix_big = BigUint::from(radix); - assert_eq!(BigUint::from(2u128).pow(bit_size), radix_big, "ICE: Radix must be a power of 2"); - let big_integer = BigUint::from_bytes_be(&field.to_be_bytes()); - - // Decompose the integer into its radix digits in little endian form. - let decomposed_integer = big_integer.to_radix_le(radix); - let mut limbs = vecmap(0..limb_count, |i| match decomposed_integer.get(i as usize) { - Some(digit) => FieldElement::from_be_bytes_reduce(&[*digit]), - None => FieldElement::zero(), - }); - if endian == Endian::Big { - limbs.reverse(); - } - - // For legacy reasons (see #617) the to_radix interface supports 256 bits even though - // FieldElement::max_num_bits() is only 254 bits. Any limbs beyond the specified count - // become zero padding. - let max_decomposable_bits: u32 = 256; - let limb_count_with_padding = max_decomposable_bits / bit_size; - while limbs.len() < limb_count_with_padding as usize { - limbs.push(FieldElement::zero()); - } - let result_constants: im::Vector = - limbs.into_iter().map(|limb| dfg.make_constant(limb, Type::unsigned(bit_size))).collect(); - - let typ = Type::Array(Rc::new(vec![Type::unsigned(bit_size)]), result_constants.len()); - dfg.make_array(result_constants, typ) -} - /// The possible return values for Instruction::return_types pub(crate) enum InstructionResultType { /// The result type of this instruction matches that of this operand diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction/call.rs b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction/call.rs new file mode 100644 index 00000000000..96998d92fcf --- /dev/null +++ b/crates/noirc_evaluator/src/ssa_refactor/ir/instruction/call.rs @@ -0,0 +1,334 @@ +use std::rc::Rc; + +use acvm::{acir::BlackBoxFunc, BlackBoxResolutionError, FieldElement}; +use iter_extended::vecmap; +use num_bigint::BigUint; + +use crate::ssa_refactor::ir::{ + dfg::DataFlowGraph, + instruction::Intrinsic, + map::Id, + types::Type, + value::{Value, ValueId}, +}; + +use super::{Endian, SimplifyResult}; + +/// Try to simplify this call instruction. If the instruction can be simplified to a known value, +/// that value is returned. Otherwise None is returned. +pub(super) fn simplify_call( + func: ValueId, + arguments: &[ValueId], + dfg: &mut DataFlowGraph, +) -> SimplifyResult { + let intrinsic = match &dfg[func] { + Value::Intrinsic(intrinsic) => *intrinsic, + _ => return SimplifyResult::None, + }; + + let constant_args: Option> = + arguments.iter().map(|value_id| dfg.get_numeric_constant(*value_id)).collect(); + + match intrinsic { + Intrinsic::ToBits(endian) => { + if let Some(constant_args) = constant_args { + let field = constant_args[0]; + let limb_count = constant_args[1].to_u128() as u32; + SimplifyResult::SimplifiedTo(constant_to_radix(endian, field, 2, limb_count, dfg)) + } else { + SimplifyResult::None + } + } + Intrinsic::ToRadix(endian) => { + if let Some(constant_args) = constant_args { + let field = constant_args[0]; + let radix = constant_args[1].to_u128() as u32; + let limb_count = constant_args[2].to_u128() as u32; + SimplifyResult::SimplifiedTo(constant_to_radix( + endian, field, radix, limb_count, dfg, + )) + } else { + SimplifyResult::None + } + } + Intrinsic::ArrayLen => { + let slice = dfg.get_array_constant(arguments[0]); + if let Some((slice, _)) = slice { + SimplifyResult::SimplifiedTo( + dfg.make_constant((slice.len() as u128).into(), Type::field()), + ) + } else if let Some(length) = dfg.try_get_array_length(arguments[0]) { + SimplifyResult::SimplifiedTo( + dfg.make_constant((length as u128).into(), Type::field()), + ) + } else { + SimplifyResult::None + } + } + Intrinsic::SlicePushBack => { + let slice = dfg.get_array_constant(arguments[0]); + if let (Some((mut slice, element_type)), elem) = (slice, arguments[1]) { + slice.push_back(elem); + let new_slice = dfg.make_array(slice, element_type); + SimplifyResult::SimplifiedTo(new_slice) + } else { + SimplifyResult::None + } + } + Intrinsic::SlicePushFront => { + let slice = dfg.get_array_constant(arguments[0]); + if let (Some((mut slice, element_type)), elem) = (slice, arguments[1]) { + slice.push_front(elem); + let new_slice = dfg.make_array(slice, element_type); + SimplifyResult::SimplifiedTo(new_slice) + } else { + SimplifyResult::None + } + } + Intrinsic::SlicePopBack => { + let slice = dfg.get_array_constant(arguments[0]); + if let Some((mut slice, element_type)) = slice { + let elem = + slice.pop_back().expect("There are no elements in this slice to be removed"); + let new_slice = dfg.make_array(slice, element_type); + SimplifyResult::SimplifiedToMultiple(vec![new_slice, elem]) + } else { + SimplifyResult::None + } + } + Intrinsic::SlicePopFront => { + let slice = dfg.get_array_constant(arguments[0]); + if let Some((mut slice, element_type)) = slice { + let elem = + slice.pop_front().expect("There are no elements in this slice to be removed"); + let new_slice = dfg.make_array(slice, element_type); + SimplifyResult::SimplifiedToMultiple(vec![elem, new_slice]) + } else { + SimplifyResult::None + } + } + Intrinsic::SliceInsert => { + let slice = dfg.get_array_constant(arguments[0]); + let index = dfg.get_numeric_constant(arguments[1]); + if let (Some((mut slice, element_type)), Some(index), value) = + (slice, index, arguments[2]) + { + slice.insert(index.to_u128() as usize, value); + let new_slice = dfg.make_array(slice, element_type); + SimplifyResult::SimplifiedTo(new_slice) + } else { + SimplifyResult::None + } + } + Intrinsic::SliceRemove => { + let slice = dfg.get_array_constant(arguments[0]); + let index = dfg.get_numeric_constant(arguments[1]); + if let (Some((mut slice, element_type)), Some(index)) = (slice, index) { + let removed_elem = slice.remove(index.to_u128() as usize); + let new_slice = dfg.make_array(slice, element_type); + SimplifyResult::SimplifiedToMultiple(vec![new_slice, removed_elem]) + } else { + SimplifyResult::None + } + } + Intrinsic::BlackBox(bb_func) => simplify_black_box_func(bb_func, arguments, dfg), + Intrinsic::Println | Intrinsic::Sort => SimplifyResult::None, + } +} + +/// Try to simplify this black box call. If the call can be simplified to a known value, +/// that value is returned. Otherwise [`SimplifyResult::None`] is returned. +fn simplify_black_box_func( + bb_func: BlackBoxFunc, + arguments: &[ValueId], + dfg: &mut DataFlowGraph, +) -> SimplifyResult { + match bb_func { + BlackBoxFunc::SHA256 => simplify_hash(dfg, arguments, acvm::blackbox_solver::sha256), + BlackBoxFunc::Blake2s => simplify_hash(dfg, arguments, acvm::blackbox_solver::blake2s), + BlackBoxFunc::Keccak256 => { + match (dfg.get_array_constant(arguments[0]), dfg.get_numeric_constant(arguments[1])) { + (Some((input, _)), Some(num_bytes)) if array_is_constant(dfg, &input) => { + let input_bytes: Vec = to_u8_vec(dfg, input); + + let num_bytes = num_bytes.to_u128() as usize; + let truncated_input_bytes = &input_bytes[0..num_bytes]; + let hash = acvm::blackbox_solver::keccak256(truncated_input_bytes) + .expect("Rust solvable black box function should not fail"); + + let hash_values = + vecmap(hash, |byte| FieldElement::from_be_bytes_reduce(&[byte])); + + let result_array = make_constant_array(dfg, hash_values, Type::unsigned(8)); + SimplifyResult::SimplifiedTo(result_array) + } + _ => SimplifyResult::None, + } + } + BlackBoxFunc::HashToField128Security => match dfg.get_array_constant(arguments[0]) { + Some((input, _)) if array_is_constant(dfg, &input) => { + let input_bytes: Vec = to_u8_vec(dfg, input); + + let field = acvm::blackbox_solver::hash_to_field_128_security(&input_bytes) + .expect("Rust solvable black box function should not fail"); + + let field_constant = dfg.make_constant(field, Type::field()); + SimplifyResult::SimplifiedTo(field_constant) + } + _ => SimplifyResult::None, + }, + + BlackBoxFunc::EcdsaSecp256k1 => { + simplify_signature(dfg, arguments, acvm::blackbox_solver::ecdsa_secp256k1_verify) + } + BlackBoxFunc::EcdsaSecp256r1 => { + simplify_signature(dfg, arguments, acvm::blackbox_solver::ecdsa_secp256r1_verify) + } + + BlackBoxFunc::FixedBaseScalarMul | BlackBoxFunc::SchnorrVerify | BlackBoxFunc::Pedersen => { + // Currently unsolvable here as we rely on an implementation in the backend. + SimplifyResult::None + } + + BlackBoxFunc::RecursiveAggregation => SimplifyResult::None, + + BlackBoxFunc::AND => { + unreachable!("ICE: `BlackBoxFunc::AND` calls should be transformed into a `BinaryOp`") + } + BlackBoxFunc::XOR => { + unreachable!("ICE: `BlackBoxFunc::XOR` calls should be transformed into a `BinaryOp`") + } + BlackBoxFunc::RANGE => { + unreachable!( + "ICE: `BlackBoxFunc::RANGE` calls should be transformed into a `Instruction::Cast`" + ) + } + } +} + +fn make_constant_array(dfg: &mut DataFlowGraph, results: Vec, typ: Type) -> ValueId { + let result_constants = vecmap(results, |element| dfg.make_constant(element, typ.clone())); + + let typ = Type::Array(Rc::new(vec![typ]), result_constants.len()); + dfg.make_array(result_constants.into(), typ) +} + +/// Returns a Value::Array of constants corresponding to the limbs of the radix decomposition. +fn constant_to_radix( + endian: Endian, + field: FieldElement, + radix: u32, + limb_count: u32, + dfg: &mut DataFlowGraph, +) -> ValueId { + let bit_size = u32::BITS - (radix - 1).leading_zeros(); + let radix_big = BigUint::from(radix); + assert_eq!(BigUint::from(2u128).pow(bit_size), radix_big, "ICE: Radix must be a power of 2"); + let big_integer = BigUint::from_bytes_be(&field.to_be_bytes()); + + // Decompose the integer into its radix digits in little endian form. + let decomposed_integer = big_integer.to_radix_le(radix); + let mut limbs = vecmap(0..limb_count, |i| match decomposed_integer.get(i as usize) { + Some(digit) => FieldElement::from_be_bytes_reduce(&[*digit]), + None => FieldElement::zero(), + }); + if endian == Endian::Big { + limbs.reverse(); + } + + // For legacy reasons (see #617) the to_radix interface supports 256 bits even though + // FieldElement::max_num_bits() is only 254 bits. Any limbs beyond the specified count + // become zero padding. + let max_decomposable_bits: u32 = 256; + let limb_count_with_padding = max_decomposable_bits / bit_size; + while limbs.len() < limb_count_with_padding as usize { + limbs.push(FieldElement::zero()); + } + + make_constant_array(dfg, limbs, Type::unsigned(bit_size)) +} + +fn to_u8_vec(dfg: &DataFlowGraph, values: im::Vector>) -> Vec { + values + .iter() + .map(|id| { + let field = dfg + .get_numeric_constant(*id) + .expect("value id from array should point at constant"); + *field.to_be_bytes().last().unwrap() + }) + .collect() +} + +fn array_is_constant(dfg: &DataFlowGraph, values: &im::Vector>) -> bool { + values.iter().all(|value| dfg.get_numeric_constant(*value).is_some()) +} + +fn simplify_hash( + dfg: &mut DataFlowGraph, + arguments: &[ValueId], + hash_function: fn(&[u8]) -> Result<[u8; 32], BlackBoxResolutionError>, +) -> SimplifyResult { + match dfg.get_array_constant(arguments[0]) { + Some((input, _)) if array_is_constant(dfg, &input) => { + let input_bytes: Vec = to_u8_vec(dfg, input); + + let hash = hash_function(&input_bytes) + .expect("Rust solvable black box function should not fail"); + + let hash_values = vecmap(hash, |byte| FieldElement::from_be_bytes_reduce(&[byte])); + + let result_array = make_constant_array(dfg, hash_values, Type::unsigned(8)); + SimplifyResult::SimplifiedTo(result_array) + } + _ => SimplifyResult::None, + } +} + +type ECDSASignatureVerifier = fn( + hashed_msg: &[u8], + public_key_x: &[u8; 32], + public_key_y: &[u8; 32], + signature: &[u8; 64], +) -> Result; +fn simplify_signature( + dfg: &mut DataFlowGraph, + arguments: &[ValueId], + signature_verifier: ECDSASignatureVerifier, +) -> SimplifyResult { + match ( + dfg.get_array_constant(arguments[0]), + dfg.get_array_constant(arguments[1]), + dfg.get_array_constant(arguments[2]), + dfg.get_array_constant(arguments[3]), + ) { + ( + Some((public_key_x, _)), + Some((public_key_y, _)), + Some((signature, _)), + Some((hashed_message, _)), + ) if array_is_constant(dfg, &public_key_x) + && array_is_constant(dfg, &public_key_y) + && array_is_constant(dfg, &signature) + && array_is_constant(dfg, &hashed_message) => + { + let public_key_x: [u8; 32] = to_u8_vec(dfg, public_key_x) + .try_into() + .expect("ECDSA public key fields are 32 bytes"); + let public_key_y: [u8; 32] = to_u8_vec(dfg, public_key_y) + .try_into() + .expect("ECDSA public key fields are 32 bytes"); + let signature: [u8; 64] = + to_u8_vec(dfg, signature).try_into().expect("ECDSA signatures are 64 bytes"); + let hashed_message: Vec = to_u8_vec(dfg, hashed_message); + + let valid_signature = + signature_verifier(&hashed_message, &public_key_x, &public_key_y, &signature) + .expect("Rust solvable black box function should not fail"); + + let valid_signature = dfg.make_constant(valid_signature.into(), Type::bool()); + SimplifyResult::SimplifiedTo(valid_signature) + } + _ => SimplifyResult::None, + } +} From 1c21d0caf1e3b3a92266b4b8238f3e6e6c394d05 Mon Sep 17 00:00:00 2001 From: Maxim Vezenov Date: Wed, 2 Aug 2023 17:21:35 +0100 Subject: [PATCH 17/19] fix(globals): Accurately filter literals for resolving globals (#2126) accurately filter literals for resolving globals --- .../tests/test_data/global_consts/src/main.nr | 7 +++++++ .../tests/test_data/strings/src/main.nr | 6 +++++- .../src/hir/def_collector/dc_crate.rs | 20 ++++++++++--------- 3 files changed, 23 insertions(+), 10 deletions(-) diff --git a/crates/nargo_cli/tests/test_data/global_consts/src/main.nr b/crates/nargo_cli/tests/test_data/global_consts/src/main.nr index 9bcca2b8071..2ed6e4593dd 100644 --- a/crates/nargo_cli/tests/test_data/global_consts/src/main.nr +++ b/crates/nargo_cli/tests/test_data/global_consts/src/main.nr @@ -12,12 +12,19 @@ struct Dummy { y: [Field; foo::MAGIC_NUMBER] } +struct Test { + v: Field, +} +global VALS: [Test; 1] = [Test { v: 100 }]; +global NESTED = [VALS, VALS]; + fn main(a: [Field; M + N - N], b: [Field; 30 + N / 2], c : pub [Field; foo::MAGIC_NUMBER], d: [Field; foo::bar::N]) { let test_struct = Dummy { x: d, y: c }; for i in 0..foo::MAGIC_NUMBER { assert(c[i] == foo::MAGIC_NUMBER); assert(test_struct.y[i] == foo::MAGIC_NUMBER); + assert(test_struct.y[i] != NESTED[1][0].v); } assert(N != M); diff --git a/crates/nargo_cli/tests/test_data/strings/src/main.nr b/crates/nargo_cli/tests/test_data/strings/src/main.nr index bee2370201c..edf5fff55b4 100644 --- a/crates/nargo_cli/tests/test_data/strings/src/main.nr +++ b/crates/nargo_cli/tests/test_data/strings/src/main.nr @@ -1,10 +1,13 @@ use dep::std; +// Test global string literals +global HELLO_WORLD = "hello world"; + fn main(message : pub str<11>, y : Field, hex_as_string : str<4>, hex_as_field : Field) { let mut bad_message = "hello world"; assert(message == "hello world"); - bad_message = "helld world"; + assert(message == HELLO_WORLD); let x = 10; let z = x * 5; std::println(10); @@ -16,6 +19,7 @@ fn main(message : pub str<11>, y : Field, hex_as_string : str<4>, hex_as_field : assert(y == 5); // Change to y != 5 to see how the later print statements are not called std::println(array); + bad_message = "helld world"; std::println(bad_message); assert(message != bad_message); diff --git a/crates/noirc_frontend/src/hir/def_collector/dc_crate.rs b/crates/noirc_frontend/src/hir/def_collector/dc_crate.rs index e974961a405..76fbea289be 100644 --- a/crates/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/crates/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -13,7 +13,7 @@ use crate::hir::Context; use crate::node_interner::{FuncId, NodeInterner, StmtId, StructId, TypeAliasId}; use crate::{ ExpressionKind, Generics, Ident, LetStatement, NoirFunction, NoirStruct, NoirTypeAlias, - ParsedModule, Shared, Type, TypeBinding, UnresolvedGenerics, UnresolvedType, + ParsedModule, Shared, Type, TypeBinding, UnresolvedGenerics, UnresolvedType, Literal, }; use fm::FileId; use iter_extended::vecmap; @@ -161,10 +161,10 @@ impl DefCollector { // // Additionally, we must resolve integer globals before structs since structs may refer to // the values of integer globals as numeric generics. - let (integer_globals, other_globals) = - filter_integer_globals(def_collector.collected_globals); + let (literal_globals, other_globals) = + filter_literal_globals(def_collector.collected_globals); - let mut file_global_ids = resolve_globals(context, integer_globals, crate_id, errors); + let mut file_global_ids = resolve_globals(context, literal_globals, crate_id, errors); resolve_type_aliases(context, def_collector.collected_type_aliases, crate_id, errors); @@ -274,13 +274,15 @@ where } /// Separate the globals Vec into two. The first element in the tuple will be the -/// integer literal globals, and the second will be all other globals. -fn filter_integer_globals( +/// literal globals, except for arrays, and the second will be all other globals. +/// We exclude array literals as they can contain complex types +fn filter_literal_globals( globals: Vec, ) -> (Vec, Vec) { - globals - .into_iter() - .partition(|global| matches!(&global.stmt_def.expression.kind, ExpressionKind::Literal(_))) + globals.into_iter().partition(|global| match &global.stmt_def.expression.kind { + ExpressionKind::Literal(literal) => !matches!(literal, Literal::Array(_)), + _ => false, + }) } fn resolve_globals( From 27ab78f3e298e94202b8dcc9ea44075a185a78e7 Mon Sep 17 00:00:00 2001 From: Maxim Vezenov Date: Wed, 2 Aug 2023 19:15:45 +0100 Subject: [PATCH 18/19] chore: Use `--show-output` flag on execution rather than compilation (#2116) * move show-output to occur on execute rather than compilation * remove assert(false) from test * fix compile err * report compile errors in tests * aupdate failing constraint test * change comment and link issue --- crates/nargo/src/ops/execute.rs | 3 +- crates/nargo/src/ops/foreign_calls.rs | 5 ++- crates/nargo_cli/src/cli/execute_cmd.rs | 2 +- crates/nargo_cli/src/cli/test_cmd.rs | 9 +++-- .../tests/test_data/strings/src/main.nr | 20 ++++++++-- crates/noirc_driver/src/lib.rs | 8 ++-- crates/noirc_evaluator/src/ssa_refactor.rs | 6 +-- .../acir_gen/acir_ir/acir_variable.rs | 13 ------- .../src/ssa_refactor/acir_gen/mod.rs | 37 +++++-------------- crates/wasm/src/compile.rs | 4 +- 10 files changed, 46 insertions(+), 61 deletions(-) diff --git a/crates/nargo/src/ops/execute.rs b/crates/nargo/src/ops/execute.rs index 13ea64ed261..2a126443468 100644 --- a/crates/nargo/src/ops/execute.rs +++ b/crates/nargo/src/ops/execute.rs @@ -10,6 +10,7 @@ pub fn execute_circuit( _backend: &B, circuit: Circuit, initial_witness: WitnessMap, + show_output: bool, ) -> Result { let mut acvm = ACVM::new(B::default(), circuit.opcodes, initial_witness); @@ -23,7 +24,7 @@ pub fn execute_circuit( } ACVMStatus::Failure(error) => return Err(error.into()), ACVMStatus::RequiresForeignCall(foreign_call) => { - let foreign_call_result = ForeignCall::execute(&foreign_call)?; + let foreign_call_result = ForeignCall::execute(&foreign_call, show_output)?; acvm.resolve_pending_foreign_call(foreign_call_result); } } diff --git a/crates/nargo/src/ops/foreign_calls.rs b/crates/nargo/src/ops/foreign_calls.rs index 2abc62b1032..4d2f5988e38 100644 --- a/crates/nargo/src/ops/foreign_calls.rs +++ b/crates/nargo/src/ops/foreign_calls.rs @@ -42,11 +42,14 @@ impl ForeignCall { pub(crate) fn execute( foreign_call: &ForeignCallWaitInfo, + show_output: bool, ) -> Result { let foreign_call_name = foreign_call.function.as_str(); match Self::lookup(foreign_call_name) { Some(ForeignCall::Println) => { - Self::execute_println(&foreign_call.inputs)?; + if show_output { + Self::execute_println(&foreign_call.inputs)?; + } Ok(ForeignCallResult { values: vec![] }) } Some(ForeignCall::Sequence) => { diff --git a/crates/nargo_cli/src/cli/execute_cmd.rs b/crates/nargo_cli/src/cli/execute_cmd.rs index ca5c18585ab..a2700caee0f 100644 --- a/crates/nargo_cli/src/cli/execute_cmd.rs +++ b/crates/nargo_cli/src/cli/execute_cmd.rs @@ -132,7 +132,7 @@ pub(crate) fn execute_program( debug_data: Option<(DebugInfo, Context)>, ) -> Result> { let initial_witness = abi.encode(inputs_map, None)?; - let solved_witness_err = nargo::ops::execute_circuit(backend, circuit, initial_witness); + let solved_witness_err = nargo::ops::execute_circuit(backend, circuit, initial_witness, true); match solved_witness_err { Ok(solved_witness) => Ok(solved_witness), Err(err) => { diff --git a/crates/nargo_cli/src/cli/test_cmd.rs b/crates/nargo_cli/src/cli/test_cmd.rs index 7eb1c9bff74..e52e3e5aa8d 100644 --- a/crates/nargo_cli/src/cli/test_cmd.rs +++ b/crates/nargo_cli/src/cli/test_cmd.rs @@ -106,14 +106,17 @@ fn run_test( show_output: bool, config: &CompileOptions, ) -> Result<(), CliError> { - let mut program = compile_no_check(context, show_output, config, main) - .map_err(|_| CliError::Generic(format!("Test '{test_name}' failed to compile")))?; + let mut program = compile_no_check(context, config, main).map_err(|err| { + noirc_errors::reporter::report_all(&context.file_manager, &[err], config.deny_warnings); + CliError::Generic(format!("Test '{test_name}' failed to compile")) + })?; + // Note: We could perform this test using the unoptimized ACIR as generated by `compile_no_check`. program.circuit = optimize_circuit(backend, program.circuit).unwrap().0; // Run the backend to ensure the PWG evaluates functions like std::hash::pedersen, // otherwise constraints involving these expressions will not error. - match execute_circuit(backend, program.circuit, WitnessMap::new()) { + match execute_circuit(backend, program.circuit, WitnessMap::new(), show_output) { Ok(_) => Ok(()), Err(error) => { let writer = StandardStream::stderr(ColorChoice::Always); diff --git a/crates/nargo_cli/tests/test_data/strings/src/main.nr b/crates/nargo_cli/tests/test_data/strings/src/main.nr index edf5fff55b4..9f122c3a137 100644 --- a/crates/nargo_cli/tests/test_data/strings/src/main.nr +++ b/crates/nargo_cli/tests/test_data/strings/src/main.nr @@ -43,9 +43,8 @@ fn test_prints_strings() { fn test_prints_array() { let array = [1, 2, 3, 5, 8]; - // TODO: Printing structs currently not supported - // let s = Test { a: 1, b: 2, c: [3, 4] }; - // std::println(s); + let s = Test { a: 1, b: 2, c: [3, 4] }; + std::println(s); std::println(array); @@ -53,6 +52,21 @@ fn test_prints_array() { std::println(hash); } +fn failed_constraint(hex_as_field: Field) { + // TODO(#2116): Note that `println` will not work if a failed constraint can be + // evaluated at compile time. + // When this method is called from a test method or with constant values + // a `Failed constraint` compile error will be caught before this `println` + // is executed as the input will be a constant. + std::println(hex_as_field); + assert(hex_as_field != 0x41); +} + +#[test] +fn test_failed_constraint() { + failed_constraint(0x41); +} + struct Test { a: Field, b: Field, diff --git a/crates/noirc_driver/src/lib.rs b/crates/noirc_driver/src/lib.rs index 4d1b7fe2675..27109af6a2f 100644 --- a/crates/noirc_driver/src/lib.rs +++ b/crates/noirc_driver/src/lib.rs @@ -163,7 +163,7 @@ pub fn compile_main( } }; - let compiled_program = compile_no_check(context, true, options, main)?; + let compiled_program = compile_no_check(context, options, main)?; if options.print_acir { println!("Compiled ACIR for main (unoptimized):"); @@ -230,7 +230,7 @@ fn compile_contract( let mut errs = Vec::new(); for function_id in &contract.functions { let name = context.function_name(function_id).to_owned(); - let function = match compile_no_check(context, true, options, *function_id) { + let function = match compile_no_check(context, options, *function_id) { Ok(function) => function, Err(err) => { errs.push(err); @@ -267,14 +267,12 @@ fn compile_contract( #[allow(deprecated)] pub fn compile_no_check( context: &Context, - show_output: bool, options: &CompileOptions, main_function: FuncId, ) -> Result { let program = monomorphize(main_function, &context.def_interner); - let (circuit, debug, abi) = - create_circuit(program, options.show_ssa, options.show_brillig, show_output)?; + let (circuit, debug, abi) = create_circuit(program, options.show_ssa, options.show_brillig)?; Ok(CompiledProgram { circuit, debug, abi }) } diff --git a/crates/noirc_evaluator/src/ssa_refactor.rs b/crates/noirc_evaluator/src/ssa_refactor.rs index 6326b45554d..c57bb330b09 100644 --- a/crates/noirc_evaluator/src/ssa_refactor.rs +++ b/crates/noirc_evaluator/src/ssa_refactor.rs @@ -35,7 +35,6 @@ pub mod ssa_gen; /// convert the final SSA into ACIR and return it. pub(crate) fn optimize_into_acir( program: Program, - allow_log_ops: bool, print_ssa_passes: bool, print_brillig_trace: bool, ) -> Result { @@ -63,7 +62,7 @@ pub(crate) fn optimize_into_acir( .dead_instruction_elimination() .print(print_ssa_passes, "After Dead Instruction Elimination:"); } - ssa.into_acir(brillig, abi_distinctness, allow_log_ops) + ssa.into_acir(brillig, abi_distinctness) } /// Compiles the Program into ACIR and applies optimizations to the arithmetic gates @@ -74,7 +73,6 @@ pub fn create_circuit( program: Program, enable_ssa_logging: bool, enable_brillig_logging: bool, - show_output: bool, ) -> Result<(Circuit, DebugInfo, Abi), RuntimeError> { let func_sig = program.main_function_signature.clone(); let GeneratedAcir { @@ -84,7 +82,7 @@ pub fn create_circuit( locations, input_witnesses, .. - } = optimize_into_acir(program, show_output, enable_ssa_logging, enable_brillig_logging)?; + } = optimize_into_acir(program, enable_ssa_logging, enable_brillig_logging)?; let abi = gen_abi(func_sig, &input_witnesses, return_witnesses.clone()); let public_abi = abi.clone().public_abi(); diff --git a/crates/noirc_evaluator/src/ssa_refactor/acir_gen/acir_ir/acir_variable.rs b/crates/noirc_evaluator/src/ssa_refactor/acir_gen/acir_ir/acir_variable.rs index 9177dc9ae6c..d1479ef1f1b 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/acir_gen/acir_ir/acir_variable.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/acir_gen/acir_ir/acir_variable.rs @@ -827,19 +827,6 @@ impl AcirContext { self.radix_decompose(endian, input_var, two_var, limb_count_var, result_element_type) } - /// Prints the given `AcirVar`s as witnesses. - pub(crate) fn print(&mut self, input: Vec) -> Result<(), RuntimeError> { - let input = Self::flatten_values(input); - - let witnesses = vecmap(input, |acir_var| { - let var_data = &self.vars[&acir_var]; - let expr = var_data.to_expression(); - self.acir_ir.get_or_create_witness(&expr) - }); - self.acir_ir.call_print(witnesses); - Ok(()) - } - /// Flatten the given Vector of AcirValues into a single vector of only variables. /// Each AcirValue::Array in the vector is recursively flattened, so each element /// will flattened into the resulting Vec. E.g. flatten_values([1, [2, 3]) == [1, 2, 3]. diff --git a/crates/noirc_evaluator/src/ssa_refactor/acir_gen/mod.rs b/crates/noirc_evaluator/src/ssa_refactor/acir_gen/mod.rs index f00f15d8f05..62a9dd5969d 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/acir_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa_refactor/acir_gen/mod.rs @@ -103,10 +103,9 @@ impl Ssa { self, brillig: Brillig, abi_distinctness: AbiDistinctness, - allow_log_ops: bool, ) -> Result { let context = Context::new(); - let mut generated_acir = context.convert_ssa(self, brillig, allow_log_ops)?; + let mut generated_acir = context.convert_ssa(self, brillig)?; match abi_distinctness { AbiDistinctness::Distinct => { @@ -144,15 +143,10 @@ impl Context { } /// Converts SSA into ACIR - fn convert_ssa( - self, - ssa: Ssa, - brillig: Brillig, - allow_log_ops: bool, - ) -> Result { + fn convert_ssa(self, ssa: Ssa, brillig: Brillig) -> Result { let main_func = ssa.main(); match main_func.runtime() { - RuntimeType::Acir => self.convert_acir_main(main_func, &ssa, brillig, allow_log_ops), + RuntimeType::Acir => self.convert_acir_main(main_func, &ssa, brillig), RuntimeType::Brillig => self.convert_brillig_main(main_func, brillig), } } @@ -162,14 +156,13 @@ impl Context { main_func: &Function, ssa: &Ssa, brillig: Brillig, - allow_log_ops: bool, ) -> Result { let dfg = &main_func.dfg; let entry_block = &dfg[main_func.entry_block()]; let input_witness = self.convert_ssa_block_params(entry_block.parameters(), dfg)?; for instruction_id in entry_block.instructions() { - self.convert_ssa_instruction(*instruction_id, dfg, ssa, &brillig, allow_log_ops)?; + self.convert_ssa_instruction(*instruction_id, dfg, ssa, &brillig)?; } self.convert_ssa_return(entry_block.unwrap_terminator(), dfg)?; @@ -294,7 +287,6 @@ impl Context { dfg: &DataFlowGraph, ssa: &Ssa, brillig: &Brillig, - allow_log_ops: bool, ) -> Result<(), RuntimeError> { let instruction = &dfg[instruction_id]; self.acir_context.set_location(dfg.get_location(&instruction_id)); @@ -339,13 +331,8 @@ impl Context { } } Value::Intrinsic(intrinsic) => { - let outputs = self.convert_ssa_intrinsic_call( - *intrinsic, - arguments, - dfg, - allow_log_ops, - result_ids, - )?; + let outputs = self + .convert_ssa_intrinsic_call(*intrinsic, arguments, dfg, result_ids)?; // Issue #1438 causes this check to fail with intrinsics that return 0 // results but the ssa form instead creates 1 unit result value. @@ -929,7 +916,6 @@ impl Context { intrinsic: Intrinsic, arguments: &[ValueId], dfg: &DataFlowGraph, - allow_log_ops: bool, result_ids: &[ValueId], ) -> Result, RuntimeError> { match intrinsic { @@ -959,13 +945,8 @@ impl Context { self.acir_context.bit_decompose(endian, field, bit_size, result_type) } - Intrinsic::Println => { - let inputs = vecmap(arguments, |arg| self.convert_value(*arg, dfg)); - if allow_log_ops { - self.acir_context.print(inputs)?; - } - Ok(Vec::new()) - } + // TODO(#2115): Remove the println intrinsic as the oracle println is now used instead + Intrinsic::Println => Ok(Vec::new()), Intrinsic::Sort => { let inputs = vecmap(arguments, |arg| self.convert_value(*arg, dfg)); // We flatten the inputs and retrieve the bit_size of the elements @@ -1133,7 +1114,7 @@ mod tests { let ssa = builder.finish(); let context = Context::new(); - let acir = context.convert_ssa(ssa, Brillig::default(), false).unwrap(); + let acir = context.convert_ssa(ssa, Brillig::default()).unwrap(); let expected_opcodes = vec![Opcode::Arithmetic(&Expression::one() - &Expression::from(Witness(1)))]; diff --git a/crates/wasm/src/compile.rs b/crates/wasm/src/compile.rs index 15d8d5107ea..4254110b849 100644 --- a/crates/wasm/src/compile.rs +++ b/crates/wasm/src/compile.rs @@ -107,8 +107,8 @@ pub fn compile(args: JsValue) -> JsValue { ::from_serde(&optimized_contracts).unwrap() } else { let main = context.get_main_function(&crate_id).expect("Could not find main function!"); - let mut compiled_program = compile_no_check(&context, true, &options.compile_options, main) - .expect("Compilation failed"); + let mut compiled_program = + compile_no_check(&context, &options.compile_options, main).expect("Compilation failed"); compiled_program.circuit = optimize_circuit(compiled_program.circuit); From a07b8a48924865d8425d35e40c75f48a13a81935 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Wed, 2 Aug 2023 20:00:23 +0100 Subject: [PATCH 19/19] chore: rename `ssa_refactor` module to `ssa` (#2129) --- .gitignore | 2 -- .../noirc_evaluator/src/brillig/brillig_gen.rs | 2 +- .../src/brillig/brillig_gen/brillig_block.rs | 9 +++++---- .../src/brillig/brillig_gen/brillig_fn.rs | 2 +- crates/noirc_evaluator/src/brillig/mod.rs | 2 +- crates/noirc_evaluator/src/lib.rs | 4 ++-- .../src/{ssa_refactor.rs => ssa.rs} | 0 .../src/{ssa_refactor => ssa}/abi_gen/mod.rs | 0 .../{ssa_refactor => ssa}/acir_gen/acir_ir.rs | 0 .../acir_gen/acir_ir/acir_variable.rs | 6 +++--- .../acir_gen/acir_ir/generated_acir.rs | 0 .../acir_gen/acir_ir/sort.rs | 0 .../src/{ssa_refactor => ssa}/acir_gen/mod.rs | 2 +- .../src/{ssa_refactor => ssa}/ir.rs | 0 .../src/{ssa_refactor => ssa}/ir/basic_block.rs | 0 .../src/{ssa_refactor => ssa}/ir/cfg.rs | 2 +- .../src/{ssa_refactor => ssa}/ir/dfg.rs | 4 ++-- .../src/{ssa_refactor => ssa}/ir/dom.rs | 2 +- .../src/{ssa_refactor => ssa}/ir/function.rs | 0 .../ir/function_inserter.rs | 0 .../src/{ssa_refactor => ssa}/ir/instruction.rs | 4 +--- .../{ssa_refactor => ssa}/ir/instruction/call.rs | 2 +- .../src/{ssa_refactor => ssa}/ir/map.rs | 0 .../src/{ssa_refactor => ssa}/ir/post_order.rs | 4 ++-- .../src/{ssa_refactor => ssa}/ir/printer.rs | 0 .../src/{ssa_refactor => ssa}/ir/types.rs | 0 .../src/{ssa_refactor => ssa}/ir/value.rs | 2 +- .../opt/constant_folding.rs | 4 ++-- .../{ssa_refactor => ssa}/opt/defunctionalize.rs | 2 +- .../src/{ssa_refactor => ssa}/opt/die.rs | 4 ++-- .../src/{ssa_refactor => ssa}/opt/flatten_cfg.rs | 6 +++--- .../opt/flatten_cfg/branch_analysis.rs | 6 ++---- .../src/{ssa_refactor => ssa}/opt/inlining.rs | 4 ++-- .../src/{ssa_refactor => ssa}/opt/mem2reg.rs | 4 ++-- .../src/{ssa_refactor => ssa}/opt/mod.rs | 0 .../{ssa_refactor => ssa}/opt/simplify_cfg.rs | 4 ++-- .../src/{ssa_refactor => ssa}/opt/unrolling.rs | 4 ++-- .../src/{ssa_refactor => ssa}/ssa_builder/mod.rs | 4 ++-- .../src/{ssa_refactor => ssa}/ssa_gen/context.rs | 16 ++++++++-------- .../src/{ssa_refactor => ssa}/ssa_gen/mod.rs | 0 .../src/{ssa_refactor => ssa}/ssa_gen/program.rs | 2 +- .../src/{ssa_refactor => ssa}/ssa_gen/value.rs | 4 ++-- 42 files changed, 54 insertions(+), 59 deletions(-) rename crates/noirc_evaluator/src/{ssa_refactor.rs => ssa.rs} (100%) rename crates/noirc_evaluator/src/{ssa_refactor => ssa}/abi_gen/mod.rs (100%) rename crates/noirc_evaluator/src/{ssa_refactor => ssa}/acir_gen/acir_ir.rs (100%) rename crates/noirc_evaluator/src/{ssa_refactor => ssa}/acir_gen/acir_ir/acir_variable.rs (99%) rename crates/noirc_evaluator/src/{ssa_refactor => ssa}/acir_gen/acir_ir/generated_acir.rs (100%) rename crates/noirc_evaluator/src/{ssa_refactor => ssa}/acir_gen/acir_ir/sort.rs (100%) rename crates/noirc_evaluator/src/{ssa_refactor => ssa}/acir_gen/mod.rs (99%) rename crates/noirc_evaluator/src/{ssa_refactor => ssa}/ir.rs (100%) rename crates/noirc_evaluator/src/{ssa_refactor => ssa}/ir/basic_block.rs (100%) rename crates/noirc_evaluator/src/{ssa_refactor => ssa}/ir/cfg.rs (99%) rename crates/noirc_evaluator/src/{ssa_refactor => ssa}/ir/dfg.rs (99%) rename crates/noirc_evaluator/src/{ssa_refactor => ssa}/ir/dom.rs (99%) rename crates/noirc_evaluator/src/{ssa_refactor => ssa}/ir/function.rs (100%) rename crates/noirc_evaluator/src/{ssa_refactor => ssa}/ir/function_inserter.rs (100%) rename crates/noirc_evaluator/src/{ssa_refactor => ssa}/ir/instruction.rs (99%) rename crates/noirc_evaluator/src/{ssa_refactor => ssa}/ir/instruction/call.rs (99%) rename crates/noirc_evaluator/src/{ssa_refactor => ssa}/ir/map.rs (100%) rename crates/noirc_evaluator/src/{ssa_refactor => ssa}/ir/post_order.rs (97%) rename crates/noirc_evaluator/src/{ssa_refactor => ssa}/ir/printer.rs (100%) rename crates/noirc_evaluator/src/{ssa_refactor => ssa}/ir/types.rs (100%) rename crates/noirc_evaluator/src/{ssa_refactor => ssa}/ir/value.rs (98%) rename crates/noirc_evaluator/src/{ssa_refactor => ssa}/opt/constant_folding.rs (99%) rename crates/noirc_evaluator/src/{ssa_refactor => ssa}/opt/defunctionalize.rs (99%) rename crates/noirc_evaluator/src/{ssa_refactor => ssa}/opt/die.rs (99%) rename crates/noirc_evaluator/src/{ssa_refactor => ssa}/opt/flatten_cfg.rs (99%) rename crates/noirc_evaluator/src/{ssa_refactor => ssa}/opt/flatten_cfg/branch_analysis.rs (98%) rename crates/noirc_evaluator/src/{ssa_refactor => ssa}/opt/inlining.rs (99%) rename crates/noirc_evaluator/src/{ssa_refactor => ssa}/opt/mem2reg.rs (99%) rename crates/noirc_evaluator/src/{ssa_refactor => ssa}/opt/mod.rs (100%) rename crates/noirc_evaluator/src/{ssa_refactor => ssa}/opt/simplify_cfg.rs (99%) rename crates/noirc_evaluator/src/{ssa_refactor => ssa}/opt/unrolling.rs (99%) rename crates/noirc_evaluator/src/{ssa_refactor => ssa}/ssa_builder/mod.rs (99%) rename crates/noirc_evaluator/src/{ssa_refactor => ssa}/ssa_gen/context.rs (98%) rename crates/noirc_evaluator/src/{ssa_refactor => ssa}/ssa_gen/mod.rs (100%) rename crates/noirc_evaluator/src/{ssa_refactor => ssa}/ssa_gen/program.rs (98%) rename crates/noirc_evaluator/src/{ssa_refactor => ssa}/ssa_gen/value.rs (98%) diff --git a/.gitignore b/.gitignore index af3a8e8beb2..8aec0edeadc 100644 --- a/.gitignore +++ b/.gitignore @@ -22,5 +22,3 @@ result **/target !crates/nargo_cli/tests/test_data/*/target !crates/nargo_cli/tests/test_data/*/target/witness.tr -!crates/nargo_cli/tests/test_data_ssa_refactor/*/target -!crates/nargo_cli/tests/test_data_ssa_refactor/*/target/witness.tr \ No newline at end of file diff --git a/crates/noirc_evaluator/src/brillig/brillig_gen.rs b/crates/noirc_evaluator/src/brillig/brillig_gen.rs index 3ba04ed1afb..a1e82bbf443 100644 --- a/crates/noirc_evaluator/src/brillig/brillig_gen.rs +++ b/crates/noirc_evaluator/src/brillig/brillig_gen.rs @@ -4,7 +4,7 @@ pub(crate) mod brillig_directive; pub(crate) mod brillig_fn; pub(crate) mod brillig_slice_ops; -use crate::ssa_refactor::ir::{function::Function, post_order::PostOrder}; +use crate::ssa::ir::{function::Function, post_order::PostOrder}; use std::collections::HashMap; diff --git a/crates/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/crates/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index a9bbe189e57..ded6be71bd5 100644 --- a/crates/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/crates/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -4,12 +4,13 @@ use crate::brillig::brillig_gen::brillig_slice_ops::{ use crate::brillig::brillig_ir::{ BrilligBinaryOp, BrilligContext, BRILLIG_INTEGER_ARITHMETIC_BIT_SIZE, }; -use crate::ssa_refactor::ir::function::FunctionId; -use crate::ssa_refactor::ir::instruction::{Endian, Intrinsic}; -use crate::ssa_refactor::ir::{ +use crate::ssa::ir::{ basic_block::{BasicBlock, BasicBlockId}, dfg::DataFlowGraph, - instruction::{Binary, BinaryOp, Instruction, InstructionId, TerminatorInstruction}, + function::FunctionId, + instruction::{ + Binary, BinaryOp, Endian, Instruction, InstructionId, Intrinsic, TerminatorInstruction, + }, types::{NumericType, Type}, value::{Value, ValueId}, }; diff --git a/crates/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs b/crates/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs index 210d6da7be6..7c4cb5e2ced 100644 --- a/crates/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs +++ b/crates/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs @@ -8,7 +8,7 @@ use crate::{ artifact::{BrilligParameter, Label}, BrilligContext, }, - ssa_refactor::ir::{ + ssa::ir::{ dfg::DataFlowGraph, function::{Function, FunctionId}, types::{CompositeType, Type}, diff --git a/crates/noirc_evaluator/src/brillig/mod.rs b/crates/noirc_evaluator/src/brillig/mod.rs index 105475323a7..0c6ddd53a4e 100644 --- a/crates/noirc_evaluator/src/brillig/mod.rs +++ b/crates/noirc_evaluator/src/brillig/mod.rs @@ -5,7 +5,7 @@ use self::{ brillig_gen::{brillig_fn::FunctionContext, convert_ssa_function}, brillig_ir::artifact::{BrilligArtifact, Label}, }; -use crate::ssa_refactor::{ +use crate::ssa::{ ir::{ function::{Function, FunctionId, RuntimeType}, value::Value, diff --git a/crates/noirc_evaluator/src/lib.rs b/crates/noirc_evaluator/src/lib.rs index c7d4f5baed6..f5403e1cf49 100644 --- a/crates/noirc_evaluator/src/lib.rs +++ b/crates/noirc_evaluator/src/lib.rs @@ -7,8 +7,8 @@ mod errors; // SSA code to create the SSA based IR // for functions and execute different optimizations. -pub mod ssa_refactor; +pub mod ssa; pub mod brillig; -pub use ssa_refactor::create_circuit; +pub use ssa::create_circuit; diff --git a/crates/noirc_evaluator/src/ssa_refactor.rs b/crates/noirc_evaluator/src/ssa.rs similarity index 100% rename from crates/noirc_evaluator/src/ssa_refactor.rs rename to crates/noirc_evaluator/src/ssa.rs diff --git a/crates/noirc_evaluator/src/ssa_refactor/abi_gen/mod.rs b/crates/noirc_evaluator/src/ssa/abi_gen/mod.rs similarity index 100% rename from crates/noirc_evaluator/src/ssa_refactor/abi_gen/mod.rs rename to crates/noirc_evaluator/src/ssa/abi_gen/mod.rs diff --git a/crates/noirc_evaluator/src/ssa_refactor/acir_gen/acir_ir.rs b/crates/noirc_evaluator/src/ssa/acir_gen/acir_ir.rs similarity index 100% rename from crates/noirc_evaluator/src/ssa_refactor/acir_gen/acir_ir.rs rename to crates/noirc_evaluator/src/ssa/acir_gen/acir_ir.rs diff --git a/crates/noirc_evaluator/src/ssa_refactor/acir_gen/acir_ir/acir_variable.rs b/crates/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs similarity index 99% rename from crates/noirc_evaluator/src/ssa_refactor/acir_gen/acir_ir/acir_variable.rs rename to crates/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs index d1479ef1f1b..779aaa559ed 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/acir_gen/acir_ir/acir_variable.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs @@ -1,9 +1,9 @@ use super::generated_acir::GeneratedAcir; use crate::brillig::brillig_gen::brillig_directive; use crate::errors::{InternalError, RuntimeError}; -use crate::ssa_refactor::acir_gen::{AcirDynamicArray, AcirValue}; -use crate::ssa_refactor::ir::types::Type as SsaType; -use crate::ssa_refactor::ir::{instruction::Endian, types::NumericType}; +use crate::ssa::acir_gen::{AcirDynamicArray, AcirValue}; +use crate::ssa::ir::types::Type as SsaType; +use crate::ssa::ir::{instruction::Endian, types::NumericType}; use acvm::acir::circuit::opcodes::{BlockId, MemOp}; use acvm::acir::circuit::Opcode; use acvm::acir::{ diff --git a/crates/noirc_evaluator/src/ssa_refactor/acir_gen/acir_ir/generated_acir.rs b/crates/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs similarity index 100% rename from crates/noirc_evaluator/src/ssa_refactor/acir_gen/acir_ir/generated_acir.rs rename to crates/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs diff --git a/crates/noirc_evaluator/src/ssa_refactor/acir_gen/acir_ir/sort.rs b/crates/noirc_evaluator/src/ssa/acir_gen/acir_ir/sort.rs similarity index 100% rename from crates/noirc_evaluator/src/ssa_refactor/acir_gen/acir_ir/sort.rs rename to crates/noirc_evaluator/src/ssa/acir_gen/acir_ir/sort.rs diff --git a/crates/noirc_evaluator/src/ssa_refactor/acir_gen/mod.rs b/crates/noirc_evaluator/src/ssa/acir_gen/mod.rs similarity index 99% rename from crates/noirc_evaluator/src/ssa_refactor/acir_gen/mod.rs rename to crates/noirc_evaluator/src/ssa/acir_gen/mod.rs index 62a9dd5969d..331c56f59d7 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/acir_gen/mod.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -1086,7 +1086,7 @@ mod tests { use crate::{ brillig::Brillig, - ssa_refactor::{ + ssa::{ ir::{function::RuntimeType, map::Id, types::Type}, ssa_builder::FunctionBuilder, }, diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir.rs b/crates/noirc_evaluator/src/ssa/ir.rs similarity index 100% rename from crates/noirc_evaluator/src/ssa_refactor/ir.rs rename to crates/noirc_evaluator/src/ssa/ir.rs diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs b/crates/noirc_evaluator/src/ssa/ir/basic_block.rs similarity index 100% rename from crates/noirc_evaluator/src/ssa_refactor/ir/basic_block.rs rename to crates/noirc_evaluator/src/ssa/ir/basic_block.rs diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs b/crates/noirc_evaluator/src/ssa/ir/cfg.rs similarity index 99% rename from crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs rename to crates/noirc_evaluator/src/ssa/ir/cfg.rs index f08b477696a..a91123438fa 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/cfg.rs +++ b/crates/noirc_evaluator/src/ssa/ir/cfg.rs @@ -128,7 +128,7 @@ impl ControlFlowGraph { #[cfg(test)] mod tests { - use crate::ssa_refactor::ir::{instruction::TerminatorInstruction, map::Id, types::Type}; + use crate::ssa::ir::{instruction::TerminatorInstruction, map::Id, types::Type}; use super::{super::function::Function, ControlFlowGraph}; diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs b/crates/noirc_evaluator/src/ssa/ir/dfg.rs similarity index 99% rename from crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs rename to crates/noirc_evaluator/src/ssa/ir/dfg.rs index 6d74e49b03b..29f5156a88c 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/dfg.rs +++ b/crates/noirc_evaluator/src/ssa/ir/dfg.rs @@ -1,6 +1,6 @@ use std::{borrow::Cow, collections::HashMap}; -use crate::ssa_refactor::ir::instruction::SimplifyResult; +use crate::ssa::ir::instruction::SimplifyResult; use super::{ basic_block::{BasicBlock, BasicBlockId}, @@ -503,7 +503,7 @@ impl<'dfg> InsertInstructionResult<'dfg> { #[cfg(test)] mod tests { use super::DataFlowGraph; - use crate::ssa_refactor::ir::instruction::Instruction; + use crate::ssa::ir::instruction::Instruction; #[test] fn make_instruction() { diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/dom.rs b/crates/noirc_evaluator/src/ssa/ir/dom.rs similarity index 99% rename from crates/noirc_evaluator/src/ssa_refactor/ir/dom.rs rename to crates/noirc_evaluator/src/ssa/ir/dom.rs index 4763ffffbd1..b7b1728d035 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/dom.rs +++ b/crates/noirc_evaluator/src/ssa/ir/dom.rs @@ -245,7 +245,7 @@ impl DominatorTree { mod tests { use std::cmp::Ordering; - use crate::ssa_refactor::{ + use crate::ssa::{ ir::{ basic_block::BasicBlockId, dom::DominatorTree, diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/function.rs b/crates/noirc_evaluator/src/ssa/ir/function.rs similarity index 100% rename from crates/noirc_evaluator/src/ssa_refactor/ir/function.rs rename to crates/noirc_evaluator/src/ssa/ir/function.rs diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/function_inserter.rs b/crates/noirc_evaluator/src/ssa/ir/function_inserter.rs similarity index 100% rename from crates/noirc_evaluator/src/ssa_refactor/ir/function_inserter.rs rename to crates/noirc_evaluator/src/ssa/ir/function_inserter.rs diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs b/crates/noirc_evaluator/src/ssa/ir/instruction.rs similarity index 99% rename from crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs rename to crates/noirc_evaluator/src/ssa/ir/instruction.rs index 7edb74f4206..680715fb0ec 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction.rs +++ b/crates/noirc_evaluator/src/ssa/ir/instruction.rs @@ -2,13 +2,11 @@ use acvm::{acir::BlackBoxFunc, FieldElement}; use iter_extended::vecmap; use num_bigint::BigUint; -use crate::ssa_refactor::ir::types::NumericType; - use super::{ basic_block::BasicBlockId, dfg::DataFlowGraph, map::Id, - types::Type, + types::{NumericType, Type}, value::{Value, ValueId}, }; diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction/call.rs b/crates/noirc_evaluator/src/ssa/ir/instruction/call.rs similarity index 99% rename from crates/noirc_evaluator/src/ssa_refactor/ir/instruction/call.rs rename to crates/noirc_evaluator/src/ssa/ir/instruction/call.rs index 96998d92fcf..2f0c077a1a7 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/instruction/call.rs +++ b/crates/noirc_evaluator/src/ssa/ir/instruction/call.rs @@ -4,7 +4,7 @@ use acvm::{acir::BlackBoxFunc, BlackBoxResolutionError, FieldElement}; use iter_extended::vecmap; use num_bigint::BigUint; -use crate::ssa_refactor::ir::{ +use crate::ssa::ir::{ dfg::DataFlowGraph, instruction::Intrinsic, map::Id, diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/map.rs b/crates/noirc_evaluator/src/ssa/ir/map.rs similarity index 100% rename from crates/noirc_evaluator/src/ssa_refactor/ir/map.rs rename to crates/noirc_evaluator/src/ssa/ir/map.rs diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/post_order.rs b/crates/noirc_evaluator/src/ssa/ir/post_order.rs similarity index 97% rename from crates/noirc_evaluator/src/ssa_refactor/ir/post_order.rs rename to crates/noirc_evaluator/src/ssa/ir/post_order.rs index 2f7b5edebe6..202f5cff716 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/post_order.rs +++ b/crates/noirc_evaluator/src/ssa/ir/post_order.rs @@ -5,7 +5,7 @@ use std::collections::HashSet; -use crate::ssa_refactor::ir::{basic_block::BasicBlockId, function::Function}; +use crate::ssa::ir::{basic_block::BasicBlockId, function::Function}; /// Depth-first traversal stack state marker for computing the cfg post-order. enum Visit { @@ -67,7 +67,7 @@ impl PostOrder { #[cfg(test)] mod tests { - use crate::ssa_refactor::{ + use crate::ssa::{ ir::{ function::{Function, RuntimeType}, map::Id, diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs b/crates/noirc_evaluator/src/ssa/ir/printer.rs similarity index 100% rename from crates/noirc_evaluator/src/ssa_refactor/ir/printer.rs rename to crates/noirc_evaluator/src/ssa/ir/printer.rs diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/types.rs b/crates/noirc_evaluator/src/ssa/ir/types.rs similarity index 100% rename from crates/noirc_evaluator/src/ssa_refactor/ir/types.rs rename to crates/noirc_evaluator/src/ssa/ir/types.rs diff --git a/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs b/crates/noirc_evaluator/src/ssa/ir/value.rs similarity index 98% rename from crates/noirc_evaluator/src/ssa_refactor/ir/value.rs rename to crates/noirc_evaluator/src/ssa/ir/value.rs index cea526058b4..54831eb4a07 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ir/value.rs +++ b/crates/noirc_evaluator/src/ssa/ir/value.rs @@ -1,6 +1,6 @@ use acvm::FieldElement; -use crate::ssa_refactor::ir::basic_block::BasicBlockId; +use crate::ssa::ir::basic_block::BasicBlockId; use super::{ function::FunctionId, diff --git a/crates/noirc_evaluator/src/ssa_refactor/opt/constant_folding.rs b/crates/noirc_evaluator/src/ssa/opt/constant_folding.rs similarity index 99% rename from crates/noirc_evaluator/src/ssa_refactor/opt/constant_folding.rs rename to crates/noirc_evaluator/src/ssa/opt/constant_folding.rs index acf048595d7..ea46ddf1d4f 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/opt/constant_folding.rs +++ b/crates/noirc_evaluator/src/ssa/opt/constant_folding.rs @@ -2,7 +2,7 @@ use std::collections::HashSet; use iter_extended::vecmap; -use crate::ssa_refactor::{ +use crate::ssa::{ ir::{ basic_block::BasicBlockId, dfg::InsertInstructionResult, function::Function, instruction::InstructionId, @@ -94,7 +94,7 @@ impl Context { mod test { use std::rc::Rc; - use crate::ssa_refactor::{ + use crate::ssa::{ ir::{ function::RuntimeType, instruction::{BinaryOp, TerminatorInstruction}, diff --git a/crates/noirc_evaluator/src/ssa_refactor/opt/defunctionalize.rs b/crates/noirc_evaluator/src/ssa/opt/defunctionalize.rs similarity index 99% rename from crates/noirc_evaluator/src/ssa_refactor/opt/defunctionalize.rs rename to crates/noirc_evaluator/src/ssa/opt/defunctionalize.rs index fc3bc5d9aa6..10561bf731f 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/opt/defunctionalize.rs +++ b/crates/noirc_evaluator/src/ssa/opt/defunctionalize.rs @@ -9,7 +9,7 @@ use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet}; use acvm::FieldElement; use iter_extended::vecmap; -use crate::ssa_refactor::{ +use crate::ssa::{ ir::{ basic_block::BasicBlockId, function::{Function, FunctionId, RuntimeType, Signature}, diff --git a/crates/noirc_evaluator/src/ssa_refactor/opt/die.rs b/crates/noirc_evaluator/src/ssa/opt/die.rs similarity index 99% rename from crates/noirc_evaluator/src/ssa_refactor/opt/die.rs rename to crates/noirc_evaluator/src/ssa/opt/die.rs index ef73938cc37..935568af2db 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/opt/die.rs +++ b/crates/noirc_evaluator/src/ssa/opt/die.rs @@ -2,7 +2,7 @@ //! which the results are unused. use std::collections::HashSet; -use crate::ssa_refactor::{ +use crate::ssa::{ ir::{ basic_block::{BasicBlock, BasicBlockId}, dfg::DataFlowGraph, @@ -133,7 +133,7 @@ impl Context { #[cfg(test)] mod test { - use crate::ssa_refactor::{ + use crate::ssa::{ ir::{function::RuntimeType, instruction::BinaryOp, map::Id, types::Type}, ssa_builder::FunctionBuilder, }; diff --git a/crates/noirc_evaluator/src/ssa_refactor/opt/flatten_cfg.rs b/crates/noirc_evaluator/src/ssa/opt/flatten_cfg.rs similarity index 99% rename from crates/noirc_evaluator/src/ssa_refactor/opt/flatten_cfg.rs rename to crates/noirc_evaluator/src/ssa/opt/flatten_cfg.rs index fdc4be085d7..1bcdf433d79 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/opt/flatten_cfg.rs +++ b/crates/noirc_evaluator/src/ssa/opt/flatten_cfg.rs @@ -137,7 +137,7 @@ use acvm::FieldElement; use iter_extended::vecmap; use noirc_errors::Location; -use crate::ssa_refactor::{ +use crate::ssa::{ ir::{ basic_block::BasicBlockId, cfg::ControlFlowGraph, @@ -213,7 +213,7 @@ fn flatten_function_cfg(function: &mut Function) { // TODO This loops forever, if the predecessors are not then processed // TODO Because it will visit the same block again, pop it out of the queue // TODO then back into the queue again. - if let crate::ssa_refactor::ir::function::RuntimeType::Brillig = function.runtime() { + if let crate::ssa::ir::function::RuntimeType::Brillig = function.runtime() { return; } let cfg = ControlFlowGraph::with_function(function); @@ -739,7 +739,7 @@ impl<'f> Context<'f> { mod test { use std::rc::Rc; - use crate::ssa_refactor::{ + use crate::ssa::{ ir::{ dfg::DataFlowGraph, function::{Function, RuntimeType}, diff --git a/crates/noirc_evaluator/src/ssa_refactor/opt/flatten_cfg/branch_analysis.rs b/crates/noirc_evaluator/src/ssa/opt/flatten_cfg/branch_analysis.rs similarity index 98% rename from crates/noirc_evaluator/src/ssa_refactor/opt/flatten_cfg/branch_analysis.rs rename to crates/noirc_evaluator/src/ssa/opt/flatten_cfg/branch_analysis.rs index bed0686e45b..1203d03f562 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/opt/flatten_cfg/branch_analysis.rs +++ b/crates/noirc_evaluator/src/ssa/opt/flatten_cfg/branch_analysis.rs @@ -21,9 +21,7 @@ //! the resulting map from each split block to each join block is returned. use std::collections::HashMap; -use crate::ssa_refactor::ir::{ - basic_block::BasicBlockId, cfg::ControlFlowGraph, function::Function, -}; +use crate::ssa::ir::{basic_block::BasicBlockId, cfg::ControlFlowGraph, function::Function}; /// Returns a `HashMap` mapping blocks that start a branch (i.e. blocks terminated with jmpif) to /// their corresponding blocks that end the branch. @@ -114,7 +112,7 @@ impl<'cfg> Context<'cfg> { #[cfg(test)] mod test { - use crate::ssa_refactor::{ + use crate::ssa::{ ir::{cfg::ControlFlowGraph, function::RuntimeType, map::Id, types::Type}, opt::flatten_cfg::branch_analysis::find_branch_ends, ssa_builder::FunctionBuilder, diff --git a/crates/noirc_evaluator/src/ssa_refactor/opt/inlining.rs b/crates/noirc_evaluator/src/ssa/opt/inlining.rs similarity index 99% rename from crates/noirc_evaluator/src/ssa_refactor/opt/inlining.rs rename to crates/noirc_evaluator/src/ssa/opt/inlining.rs index 7aa2f9d176a..d4c118fd3f4 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/opt/inlining.rs +++ b/crates/noirc_evaluator/src/ssa/opt/inlining.rs @@ -6,7 +6,7 @@ use std::collections::{HashMap, HashSet}; use iter_extended::vecmap; -use crate::ssa_refactor::{ +use crate::ssa::{ ir::{ basic_block::BasicBlockId, dfg::InsertInstructionResult, @@ -482,7 +482,7 @@ impl<'function> PerFunctionContext<'function> { mod test { use acvm::FieldElement; - use crate::ssa_refactor::{ + use crate::ssa::{ ir::{ basic_block::BasicBlockId, function::RuntimeType, diff --git a/crates/noirc_evaluator/src/ssa_refactor/opt/mem2reg.rs b/crates/noirc_evaluator/src/ssa/opt/mem2reg.rs similarity index 99% rename from crates/noirc_evaluator/src/ssa_refactor/opt/mem2reg.rs rename to crates/noirc_evaluator/src/ssa/opt/mem2reg.rs index 15108abc490..b9e849bb77c 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/opt/mem2reg.rs +++ b/crates/noirc_evaluator/src/ssa/opt/mem2reg.rs @@ -5,7 +5,7 @@ use std::collections::{BTreeMap, HashMap, HashSet}; use iter_extended::vecmap; -use crate::ssa_refactor::{ +use crate::ssa::{ ir::{ basic_block::BasicBlockId, dfg::DataFlowGraph, @@ -182,7 +182,7 @@ mod tests { use acvm::FieldElement; use im::vector; - use crate::ssa_refactor::{ + use crate::ssa::{ ir::{ basic_block::BasicBlockId, dfg::DataFlowGraph, diff --git a/crates/noirc_evaluator/src/ssa_refactor/opt/mod.rs b/crates/noirc_evaluator/src/ssa/opt/mod.rs similarity index 100% rename from crates/noirc_evaluator/src/ssa_refactor/opt/mod.rs rename to crates/noirc_evaluator/src/ssa/opt/mod.rs diff --git a/crates/noirc_evaluator/src/ssa_refactor/opt/simplify_cfg.rs b/crates/noirc_evaluator/src/ssa/opt/simplify_cfg.rs similarity index 99% rename from crates/noirc_evaluator/src/ssa_refactor/opt/simplify_cfg.rs rename to crates/noirc_evaluator/src/ssa/opt/simplify_cfg.rs index 22991e38b94..58259cec90c 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/opt/simplify_cfg.rs +++ b/crates/noirc_evaluator/src/ssa/opt/simplify_cfg.rs @@ -11,7 +11,7 @@ //! Currently, 1 and 4 are unimplemented. use std::collections::HashSet; -use crate::ssa_refactor::{ +use crate::ssa::{ ir::{ basic_block::BasicBlockId, cfg::ControlFlowGraph, function::Function, instruction::TerminatorInstruction, @@ -148,7 +148,7 @@ fn try_inline_into_predecessor( #[cfg(test)] mod test { - use crate::ssa_refactor::{ + use crate::ssa::{ ir::{ function::RuntimeType, instruction::{BinaryOp, TerminatorInstruction}, diff --git a/crates/noirc_evaluator/src/ssa_refactor/opt/unrolling.rs b/crates/noirc_evaluator/src/ssa/opt/unrolling.rs similarity index 99% rename from crates/noirc_evaluator/src/ssa_refactor/opt/unrolling.rs rename to crates/noirc_evaluator/src/ssa/opt/unrolling.rs index e5d7d6f0d5c..f6d7c952277 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/opt/unrolling.rs +++ b/crates/noirc_evaluator/src/ssa/opt/unrolling.rs @@ -14,7 +14,7 @@ //! program that will need to be removed by a later simplify cfg pass. use std::collections::{HashMap, HashSet}; -use crate::ssa_refactor::{ +use crate::ssa::{ ir::{ basic_block::BasicBlockId, cfg::ControlFlowGraph, dfg::DataFlowGraph, dom::DominatorTree, function::Function, function_inserter::FunctionInserter, @@ -424,7 +424,7 @@ impl<'f> LoopIteration<'f> { #[cfg(test)] mod tests { - use crate::ssa_refactor::{ + use crate::ssa::{ ir::{function::RuntimeType, instruction::BinaryOp, map::Id, types::Type}, ssa_builder::FunctionBuilder, }; diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs b/crates/noirc_evaluator/src/ssa/ssa_builder/mod.rs similarity index 99% rename from crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs rename to crates/noirc_evaluator/src/ssa/ssa_builder/mod.rs index 02350d9ed17..066b5b51199 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_builder/mod.rs +++ b/crates/noirc_evaluator/src/ssa/ssa_builder/mod.rs @@ -3,7 +3,7 @@ use std::borrow::Cow; use acvm::FieldElement; use noirc_errors::Location; -use crate::ssa_refactor::ir::{ +use crate::ssa::ir::{ basic_block::BasicBlockId, function::{Function, FunctionId}, instruction::{Binary, BinaryOp, Instruction, TerminatorInstruction}, @@ -363,7 +363,7 @@ mod tests { use acvm::FieldElement; - use crate::ssa_refactor::ir::{ + use crate::ssa::ir::{ function::RuntimeType, instruction::{Endian, Intrinsic}, map::Id, diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs b/crates/noirc_evaluator/src/ssa/ssa_gen/context.rs similarity index 98% rename from crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs rename to crates/noirc_evaluator/src/ssa/ssa_gen/context.rs index a526d93f85b..3e0bbff2a83 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/context.rs +++ b/crates/noirc_evaluator/src/ssa/ssa_gen/context.rs @@ -9,14 +9,14 @@ use noirc_frontend::monomorphization::ast::{self, LocalId, Parameters}; use noirc_frontend::monomorphization::ast::{FuncId, Program}; use noirc_frontend::{BinaryOpKind, Signedness}; -use crate::ssa_refactor::ir::dfg::DataFlowGraph; -use crate::ssa_refactor::ir::function::FunctionId as IrFunctionId; -use crate::ssa_refactor::ir::function::{Function, RuntimeType}; -use crate::ssa_refactor::ir::instruction::{BinaryOp, Endian, Intrinsic}; -use crate::ssa_refactor::ir::map::AtomicCounter; -use crate::ssa_refactor::ir::types::{NumericType, Type}; -use crate::ssa_refactor::ir::value::ValueId; -use crate::ssa_refactor::ssa_builder::FunctionBuilder; +use crate::ssa::ir::dfg::DataFlowGraph; +use crate::ssa::ir::function::FunctionId as IrFunctionId; +use crate::ssa::ir::function::{Function, RuntimeType}; +use crate::ssa::ir::instruction::{BinaryOp, Endian, Intrinsic}; +use crate::ssa::ir::map::AtomicCounter; +use crate::ssa::ir::types::{NumericType, Type}; +use crate::ssa::ir::value::ValueId; +use crate::ssa::ssa_builder::FunctionBuilder; use super::value::{Tree, Value, Values}; diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs b/crates/noirc_evaluator/src/ssa/ssa_gen/mod.rs similarity index 100% rename from crates/noirc_evaluator/src/ssa_refactor/ssa_gen/mod.rs rename to crates/noirc_evaluator/src/ssa/ssa_gen/mod.rs diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/program.rs b/crates/noirc_evaluator/src/ssa/ssa_gen/program.rs similarity index 98% rename from crates/noirc_evaluator/src/ssa_refactor/ssa_gen/program.rs rename to crates/noirc_evaluator/src/ssa/ssa_gen/program.rs index aec0e4262c8..509f778f3b0 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/program.rs +++ b/crates/noirc_evaluator/src/ssa/ssa_gen/program.rs @@ -2,7 +2,7 @@ use std::{collections::BTreeMap, fmt::Display}; use iter_extended::btree_map; -use crate::ssa_refactor::ir::{ +use crate::ssa::ir::{ function::{Function, FunctionId}, map::AtomicCounter, }; diff --git a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs b/crates/noirc_evaluator/src/ssa/ssa_gen/value.rs similarity index 98% rename from crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs rename to crates/noirc_evaluator/src/ssa/ssa_gen/value.rs index 2d209635610..e7bb515465b 100644 --- a/crates/noirc_evaluator/src/ssa_refactor/ssa_gen/value.rs +++ b/crates/noirc_evaluator/src/ssa/ssa_gen/value.rs @@ -1,7 +1,7 @@ use iter_extended::vecmap; -use crate::ssa_refactor::ir::types::Type; -use crate::ssa_refactor::ir::value::ValueId as IrValueId; +use crate::ssa::ir::types::Type; +use crate::ssa::ir::value::ValueId as IrValueId; use super::context::FunctionContext;