Skip to content

Commit

Permalink
fix: Accurate tracking of slice capacities across blocks (#4240)
Browse files Browse the repository at this point in the history
# Description

## Problem\*

Resolves #4202 and
#2446 (comment)
## Summary\*

This PR brings back the `capacity_tracker` that was used by the fill
internal slices pass and for merging nested slices
(#3979). We now track the capacity
of slices as we insert instructions. The capacity tracker has also been
simplified as it no longer needs to handle nested slice.

The current strategy checks for previously saved store instructions and
backtracks the instructions to see what is the capacity of a slice when
it comes time to merge two slice values. This required an additional
`outer_block_stores` map as we `store_values` only track stores within
the current branch that is being inlined, and a `get_slice_length`
method that ultimately did the recursive backtracking to determine the
slice length. With the capacity tracker we already have the slice length
once it comes time to perform a slice merger allowing us to remove
`outer_block_stores` and `get_slice_length`.

Overall, the capacity tracker enables us to have more accurate tracking
of slice capacities and gives us a more general strategy for tracking a
slice capacity per instruction.

## Additional Context

## Documentation\*

Check one:
- [X] No documentation needed.
- [ ] Documentation included in this PR.
- [ ] **[Exceptional Case]** Documentation to be submitted in a separate
PR.

# PR Checklist\*

- [X] I have tested the changes locally.
- [X] I have formatted the changes with [Prettier](https://prettier.io/)
and/or `cargo fmt` on default settings.

---------

Co-authored-by: Tom French <15848336+TomAFrench@users.noreply.github.com>
  • Loading branch information
vezenovm and TomAFrench authored Feb 8, 2024
1 parent 0e07303 commit 7420dbb
Show file tree
Hide file tree
Showing 10 changed files with 275 additions and 148 deletions.
2 changes: 1 addition & 1 deletion compiler/noirc_evaluator/src/errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ impl RuntimeError {
_ => {
let message = self.to_string();
let location =
self.call_stack().back().expect("Expected RuntimeError to have a location");
self.call_stack().back().unwrap_or_else(|| panic!("Expected RuntimeError to have a location. Error message: {message}"));

Diagnostic::simple_error(message, String::new(), location.span)
}
Expand Down
6 changes: 3 additions & 3 deletions compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -679,11 +679,11 @@ impl Context {
instruction: InstructionId,
dfg: &DataFlowGraph,
index: ValueId,
array: ValueId,
array_id: ValueId,
store_value: Option<ValueId>,
) -> Result<bool, RuntimeError> {
let index_const = dfg.get_numeric_constant(index);
let value_type = dfg.type_of_value(array);
let value_type = dfg.type_of_value(array_id);
// Compiler sanity checks
assert!(
!value_type.is_nested_slice(),
Expand All @@ -693,7 +693,7 @@ impl Context {
unreachable!("ICE: expected array or slice type");
};

match self.convert_value(array, dfg) {
match self.convert_value(array_id, dfg) {
AcirValue::Var(acir_var, _) => {
return Err(RuntimeError::InternalError(InternalError::Unexpected {
expected: "an array value".to_string(),
Expand Down
9 changes: 8 additions & 1 deletion compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
use fxhash::FxHashMap as HashMap;
use std::{collections::VecDeque, rc::Rc};

use acvm::{acir::BlackBoxFunc, BlackBoxResolutionError, FieldElement};
Expand Down Expand Up @@ -318,6 +319,8 @@ fn simplify_slice_push_back(
for elem in &arguments[2..] {
slice.push_back(*elem);
}
let slice_size = slice.len();
let element_size = element_type.element_size();
let new_slice = dfg.make_array(slice, element_type);

let set_last_slice_value_instr =
Expand All @@ -326,7 +329,11 @@ fn simplify_slice_push_back(
.insert_instruction_and_results(set_last_slice_value_instr, block, None, call_stack)
.first();

let mut value_merger = ValueMerger::new(dfg, block, None, None);
let mut slice_sizes = HashMap::default();
slice_sizes.insert(set_last_slice_value, slice_size / element_size);
slice_sizes.insert(new_slice, slice_size / element_size);

let mut value_merger = ValueMerger::new(dfg, block, &mut slice_sizes);
let new_slice = value_merger.merge_values(
len_not_equals_capacity,
len_equals_capacity,
Expand Down
2 changes: 1 addition & 1 deletion compiler/noirc_evaluator/src/ssa/ir/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ impl Type {
}
Type::Slice(_) => true,
Type::Numeric(_) => false,
Type::Reference(_) => false,
Type::Reference(element) => element.contains_slice_element(),
Type::Function => false,
}
}
Expand Down
117 changes: 73 additions & 44 deletions compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs
Original file line number Diff line number Diff line change
Expand Up @@ -152,8 +152,10 @@ use crate::ssa::{
};

mod branch_analysis;
mod capacity_tracker;
pub(crate) mod value_merger;

use capacity_tracker::SliceCapacityTracker;
use value_merger::ValueMerger;

impl Ssa {
Expand Down Expand Up @@ -184,17 +186,6 @@ struct Context<'f> {
/// between inlining of branches.
store_values: HashMap<ValueId, Store>,

/// Maps an address to the old and new value of the element at that address
/// The difference between this map and store_values is that this stores
/// the old and new value of an element from the outer block whose jmpif
/// terminator is being flattened.
///
/// This map persists throughout the flattening process, where addresses
/// are overwritten as new stores are found. This overwriting is the desired behavior,
/// as we want the most update to date value to be stored at a given address as
/// we walk through blocks to flatten.
outer_block_stores: HashMap<ValueId, ValueId>,

/// Stores all allocations local to the current branch.
/// Since these branches are local to the current branch (ie. only defined within one branch of
/// an if expression), they should not be merged with their previous value or stored value in
Expand All @@ -209,6 +200,10 @@ struct Context<'f> {
/// condition. If we are under multiple conditions (a nested if), the topmost condition is
/// the most recent condition combined with all previous conditions via `And` instructions.
conditions: Vec<(BasicBlockId, ValueId)>,

/// Maps SSA array values with a slice type to their size.
/// This is maintained by appropriate calls to the `SliceCapacityTracker` and is used by the `ValueMerger`.
slice_sizes: HashMap<ValueId, usize>,
}

pub(crate) struct Store {
Expand Down Expand Up @@ -239,7 +234,7 @@ fn flatten_function_cfg(function: &mut Function) {
local_allocations: HashSet::new(),
branch_ends,
conditions: Vec::new(),
outer_block_stores: HashMap::default(),
slice_sizes: HashMap::default(),
};
context.flatten();
}
Expand All @@ -262,21 +257,18 @@ impl<'f> Context<'f> {
/// Returns the last block to be inlined. This is either the return block of the function or,
/// if self.conditions is not empty, the end block of the most recent condition.
fn handle_terminator(&mut self, block: BasicBlockId) -> BasicBlockId {
if let TerminatorInstruction::JmpIf { .. } =
self.inserter.function.dfg[block].unwrap_terminator()
{
// Find stores in the outer block and insert into the `outer_block_stores` map.
// Not using this map can lead to issues when attempting to merge slices.
// When inlining a branch end, only the then branch and the else branch are checked for stores.
// However, there are cases where we want to load a value that comes from the outer block
// that we are handling the terminator for here.
let instructions = self.inserter.function.dfg[block].instructions().to_vec();
for instruction in instructions {
let (instruction, _) = self.inserter.map_instruction(instruction);
if let Instruction::Store { address, value } = instruction {
self.outer_block_stores.insert(address, value);
}
}
// As we recursively flatten inner blocks, we need to track the slice information
// for the outer block before we start recursively inlining
let outer_block_instructions = self.inserter.function.dfg[block].instructions();
let mut capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg);
for instruction in outer_block_instructions {
let results = self.inserter.function.dfg.instruction_results(*instruction);
let instruction = &self.inserter.function.dfg[*instruction];
capacity_tracker.collect_slice_information(
instruction,
&mut self.slice_sizes,
results.to_vec(),
);
}

match self.inserter.function.dfg[block].unwrap_terminator() {
Expand Down Expand Up @@ -494,12 +486,16 @@ impl<'f> Context<'f> {
});

let block = self.inserter.function.entry_block();
let mut value_merger = ValueMerger::new(
&mut self.inserter.function.dfg,
block,
Some(&self.store_values),
Some(&self.outer_block_stores),
);

// Make sure we have tracked the slice capacities of any block arguments
let capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg);
for (then_arg, else_arg) in args.iter() {
capacity_tracker.compute_slice_capacity(*then_arg, &mut self.slice_sizes);
capacity_tracker.compute_slice_capacity(*else_arg, &mut self.slice_sizes);
}

let mut value_merger =
ValueMerger::new(&mut self.inserter.function.dfg, block, &mut self.slice_sizes);

// Cannot include this in the previous vecmap since it requires exclusive access to self
let args = vecmap(args, |(then_arg, else_arg)| {
Expand Down Expand Up @@ -538,18 +534,22 @@ impl<'f> Context<'f> {
}
}

// Most slice information is collected when instructions are inlined.
// We need to collect information on slice values here as we may possibly merge stores
// before any inlining occurs.
let capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg);
for (then_case, else_case, _) in new_map.values() {
capacity_tracker.compute_slice_capacity(*then_case, &mut self.slice_sizes);
capacity_tracker.compute_slice_capacity(*else_case, &mut self.slice_sizes);
}

let then_condition = then_branch.condition;
let else_condition = else_branch.condition;

let block = self.inserter.function.entry_block();

let mut value_merger = ValueMerger::new(
&mut self.inserter.function.dfg,
block,
Some(&self.store_values),
Some(&self.outer_block_stores),
);

let mut value_merger =
ValueMerger::new(&mut self.inserter.function.dfg, block, &mut self.slice_sizes);
// Merging must occur in a separate loop as we cannot borrow `self` as mutable while `value_merger` does
let mut new_values = HashMap::default();
for (address, (then_case, else_case, _)) in &new_map {
Expand All @@ -571,6 +571,16 @@ impl<'f> Context<'f> {
.insert(address, Store { old_value: *old_value, new_value: value });
}
}

// Collect any potential slice information on the stores we are merging
for (address, (_, _, _)) in &new_map {
let value = new_values[address];
let address = *address;
let instruction = Instruction::Store { address, value };

let mut capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg);
capacity_tracker.collect_slice_information(&instruction, &mut self.slice_sizes, vec![]);
}
}

fn remember_store(&mut self, address: ValueId, new_value: ValueId) {
Expand All @@ -579,8 +589,18 @@ impl<'f> Context<'f> {
store_value.new_value = new_value;
} else {
let load = Instruction::Load { address };

let load_type = Some(vec![self.inserter.function.dfg.type_of_value(new_value)]);
let old_value = self.insert_instruction_with_typevars(load, load_type).first();
let old_value =
self.insert_instruction_with_typevars(load.clone(), load_type).first();

// Need this or else we will be missing a the previous value of a slice that we wish to merge
let mut capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg);
capacity_tracker.collect_slice_information(
&load,
&mut self.slice_sizes,
vec![old_value],
);

self.store_values.insert(address, Store { old_value, new_value });
}
Expand All @@ -602,8 +622,15 @@ impl<'f> Context<'f> {
// unnecessary, when removing it actually causes an aliasing/mutability error.
let instructions = self.inserter.function.dfg[destination].instructions().to_vec();

for instruction in instructions {
self.push_instruction(instruction);
for instruction in instructions.iter() {
let results = self.push_instruction(*instruction);
let (instruction, _) = self.inserter.map_instruction(*instruction);
let mut capacity_tracker = SliceCapacityTracker::new(&self.inserter.function.dfg);
capacity_tracker.collect_slice_information(
&instruction,
&mut self.slice_sizes,
results,
);
}

self.handle_terminator(destination)
Expand All @@ -615,7 +642,7 @@ impl<'f> Context<'f> {
/// As a result, the instruction that will be pushed will actually be a new instruction
/// with a different InstructionId from the original. The results of the given instruction
/// will also be mapped to the results of the new instruction.
fn push_instruction(&mut self, id: InstructionId) {
fn push_instruction(&mut self, id: InstructionId) -> Vec<ValueId> {
let (instruction, call_stack) = self.inserter.map_instruction(id);
let instruction = self.handle_instruction_side_effects(instruction, call_stack.clone());
let is_allocate = matches!(instruction, Instruction::Allocate);
Expand All @@ -628,6 +655,8 @@ impl<'f> Context<'f> {
if is_allocate {
self.local_allocations.insert(results.first());
}

results.results().into_owned()
}

/// If we are currently in a branch, we need to modify constrain instructions
Expand Down
Loading

0 comments on commit 7420dbb

Please sign in to comment.