Skip to content

Commit

Permalink
Auto merge of rust-lang#68001 - Marwes:lexical_region_resolve, r=niko…
Browse files Browse the repository at this point in the history
…matsakis

perf: Only search potentially changed constraints in lexical_region_resolve

Gives a big performance increase to the unicode_normalization benchmark in my testing.
  • Loading branch information
bors committed Jan 18, 2020
2 parents 35b5157 + 917eb18 commit 779f85b
Showing 1 changed file with 36 additions and 40 deletions.
76 changes: 36 additions & 40 deletions src/librustc/infer/lexical_region_resolve/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ use rustc_data_structures::graph::implementation::{
Direction, Graph, NodeIndex, INCOMING, OUTGOING,
};
use rustc_hir::def_id::DefId;
use rustc_index::bit_set::BitSet;
use rustc_index::vec::{Idx, IndexVec};
use rustc_span::Span;
use std::fmt;
Expand Down Expand Up @@ -295,62 +294,59 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> {
}

fn expansion(&self, var_values: &mut LexicalRegionResolutions<'tcx>) {
let mut process_constraint = |constraint: &Constraint<'tcx>| {
let (a_region, b_vid, b_data, retain) = match *constraint {
let mut constraints = IndexVec::from_elem_n(Vec::new(), var_values.values.len());
let mut changes = Vec::new();
for constraint in self.data.constraints.keys() {
let (a_vid, a_region, b_vid, b_data) = match *constraint {
Constraint::RegSubVar(a_region, b_vid) => {
let b_data = var_values.value_mut(b_vid);
(a_region, b_vid, b_data, false)
(None, a_region, b_vid, b_data)
}
Constraint::VarSubVar(a_vid, b_vid) => match *var_values.value(a_vid) {
VarValue::ErrorValue => return (false, false),
VarValue::ErrorValue => continue,
VarValue::Value(a_region) => {
let b_data = var_values.value_mut(b_vid);
let retain = match *b_data {
VarValue::Value(ReStatic) | VarValue::ErrorValue => false,
_ => true,
};
(a_region, b_vid, b_data, retain)
(Some(a_vid), a_region, b_vid, b_data)
}
},
Constraint::RegSubReg(..) | Constraint::VarSubReg(..) => {
// These constraints are checked after expansion
// is done, in `collect_errors`.
return (false, false);
continue;
}
};

let changed = self.expand_node(a_region, b_vid, b_data);
(changed, retain)
};

// Using bitsets to track the remaining elements is faster than using a
// `Vec` by itself (which requires removing elements, which requires
// element shuffling, which is slow).
let constraints: Vec<_> = self.data.constraints.keys().collect();
let mut live_indices: BitSet<usize> = BitSet::new_filled(constraints.len());
let mut killed_indices: BitSet<usize> = BitSet::new_empty(constraints.len());
let mut changed = true;
while changed {
changed = false;
for index in live_indices.iter() {
let constraint = constraints[index];
let (edge_changed, retain) = process_constraint(constraint);
changed |= edge_changed;
if !retain {
let changed = killed_indices.insert(index);
debug_assert!(changed);
if self.expand_node(a_region, b_vid, b_data) {
changes.push(b_vid);
}
if let Some(a_vid) = a_vid {
match *b_data {
VarValue::Value(ReStatic) | VarValue::ErrorValue => (),
_ => {
constraints[a_vid].push((a_vid, b_vid));
constraints[b_vid].push((a_vid, b_vid));
}
}
}
live_indices.subtract(&killed_indices);
}

// We could clear `killed_indices` here, but we don't need to and
// it's cheaper not to.
while let Some(vid) = changes.pop() {
constraints[vid].retain(|&(a_vid, b_vid)| {
let a_region = match *var_values.value(a_vid) {
VarValue::ErrorValue => return false,
VarValue::Value(a_region) => a_region,
};
let b_data = var_values.value_mut(b_vid);
if self.expand_node(a_region, b_vid, b_data) {
changes.push(b_vid);
}
match *b_data {
VarValue::Value(ReStatic) | VarValue::ErrorValue => false,
_ => true,
}
});
}
}

// This function is very hot in some workloads. There's a single callsite
// so always inlining is ok even though it's large.
#[inline(always)]
fn expand_node(
&self,
a_region: Region<'tcx>,
Expand Down Expand Up @@ -790,8 +786,8 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> {
self.var_infos[node_idx].origin.span(),
&format!(
"collect_error_for_expanding_node() could not find \
error for var {:?} in universe {:?}, lower_bounds={:#?}, \
upper_bounds={:#?}",
error for var {:?} in universe {:?}, lower_bounds={:#?}, \
upper_bounds={:#?}",
node_idx, node_universe, lower_bounds, upper_bounds
),
);
Expand Down

0 comments on commit 779f85b

Please sign in to comment.