Skip to content

Commit

Permalink
Rollup merge of rust-lang#65480 - nnethercote:rm-iterate_until_fixed_…
Browse files Browse the repository at this point in the history
…size, r=nikomatsakis

Speed up `LexicalResolve::expansion()`

A couple of improvements that speed up `unicode_normalization` by about 4%. The first commit was enabled by the improvements to `BitSet` iteration in rust-lang#65425.

r? @nikomatsakis
  • Loading branch information
tmandry committed Oct 18, 2019
2 parents 7a49208 + d51fee0 commit 8cb6225
Showing 1 changed file with 29 additions and 27 deletions.
56 changes: 29 additions & 27 deletions src/librustc/infer/lexical_region_resolve/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::graph::implementation::{
Direction, Graph, NodeIndex, INCOMING, OUTGOING,
};
use rustc_index::bit_set::BitSet;
use rustc_index::vec::{Idx, IndexVec};
use smallvec::SmallVec;
use std::fmt;
use syntax_pos::Span;

Expand Down Expand Up @@ -304,8 +304,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> {
}

fn expansion(&self, var_values: &mut LexicalRegionResolutions<'tcx>) {
self.iterate_until_fixed_point(|constraint| {
debug!("expansion: constraint={:?}", constraint);
let mut process_constraint = |constraint: &Constraint<'tcx>| {
let (a_region, b_vid, b_data, retain) = match *constraint {
Constraint::RegSubVar(a_region, b_vid) => {
let b_data = var_values.value_mut(b_vid);
Expand All @@ -331,7 +330,33 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> {

let changed = self.expand_node(a_region, b_vid, b_data);
(changed, retain)
})
};

// Using bitsets to track the remaining elements is faster than using a
// `Vec` by itself (which requires removing elements, which requires
// element shuffling, which is slow).
let constraints: Vec<_> = self.data.constraints.keys().collect();
let mut live_indices: BitSet<usize> = BitSet::new_filled(constraints.len());
let mut killed_indices: BitSet<usize> = BitSet::new_empty(constraints.len());
let mut changed = true;
while changed {
changed = false;
for index in live_indices.iter() {
let constraint = constraints[index];
let (edge_changed, retain) = process_constraint(constraint);
if edge_changed {
changed = true;
}
if !retain {
let changed = killed_indices.insert(index);
debug_assert!(changed);
}
}
live_indices.subtract(&killed_indices);

// We could clear `killed_indices` here, but we don't need to and
// it's cheaper not to.
}
}

// This function is very hot in some workloads. There's a single callsite
Expand Down Expand Up @@ -866,29 +891,6 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> {
}
}

fn iterate_until_fixed_point<F>(&self, mut body: F)
where
F: FnMut(&Constraint<'tcx>) -> (bool, bool),
{
let mut constraints: SmallVec<[_; 16]> = self.data.constraints.keys().collect();
let mut iteration = 0;
let mut changed = true;
while changed {
changed = false;
iteration += 1;
debug!("---- Expansion iteration {}", iteration);
constraints.retain(|constraint| {
let (edge_changed, retain) = body(constraint);
if edge_changed {
debug!("updated due to constraint {:?}", constraint);
changed = true;
}
retain
});
}
debug!("---- Expansion complete after {} iteration(s)", iteration);
}

fn bound_is_met(
&self,
bound: &VerifyBound<'tcx>,
Expand Down

0 comments on commit 8cb6225

Please sign in to comment.