Skip to content

Commit

Permalink
introduce ConstraintGraph, stop mutating constraints in place
Browse files Browse the repository at this point in the history
Encapsulate the dependencies more cleanly.
  • Loading branch information
nikomatsakis committed Jul 12, 2018
1 parent 8fa24bb commit d54e7e3
Show file tree
Hide file tree
Showing 6 changed files with 76 additions and 94 deletions.
86 changes: 43 additions & 43 deletions src/librustc_mir/borrow_check/nll/constraint_set.rs
Expand Up @@ -32,39 +32,6 @@ impl ConstraintSet {
}
self.constraints.push(constraint);
}

/// Once all constraints have been added, `link()` is used to thread together the constraints
/// based on which would be affected when a particular region changes. See the next field of
/// `OutlivesContraint` for more details.
/// link returns a map that is needed later by `each_affected_by_dirty`.
pub fn link(&mut self, len: usize) -> IndexVec<RegionVid, Option<ConstraintIndex>> {
let mut map = IndexVec::from_elem_n(None, len);

for (idx, constraint) in self.constraints.iter_enumerated_mut().rev() {
let mut head = &mut map[constraint.sub];
debug_assert!(constraint.next.is_none());
constraint.next = *head;
*head = Some(idx);
}

map
}

/// When a region R1 changes, we need to reprocess all constraints R2: R1 to take into account
/// any new elements that R1 now has. This method will quickly enumerate all such constraints
/// (that is, constraints where R1 is in the "subregion" position).
/// To use it, invoke with `map[R1]` where map is the map returned by `link`;
/// the callback op will be invoked for each affected constraint.
pub fn each_affected_by_dirty(
&self,
mut opt_dep_idx: Option<ConstraintIndex>,
mut op: impl FnMut(ConstraintIndex),
) {
while let Some(dep_idx) = opt_dep_idx {
op(dep_idx);
opt_dep_idx = self.constraints[dep_idx].next;
}
}
}

impl Deref for ConstraintSet {
Expand All @@ -85,16 +52,6 @@ pub struct OutlivesConstraint {
/// Region that must be outlived.
pub sub: RegionVid,

/// Later on, we thread the constraints onto a linked list
/// grouped by their `sub` field. So if you had:
///
/// Index | Constraint | Next Field
/// ----- | ---------- | ----------
/// 0 | `'a: 'b` | Some(2)
/// 1 | `'b: 'c` | None
/// 2 | `'c: 'b` | None
pub next: Option<ConstraintIndex>,

/// Where did this constraint arise?
pub locations: Locations,
}
Expand All @@ -110,3 +67,46 @@ impl fmt::Debug for OutlivesConstraint {
}

newtype_index!(ConstraintIndex { DEBUG_FORMAT = "ConstraintIndex({})" });

crate struct ConstraintGraph {
first_constraints: IndexVec<RegionVid, Option<ConstraintIndex>>,
next_constraints: IndexVec<ConstraintIndex, Option<ConstraintIndex>>,
}

impl ConstraintGraph {
/// Constraint a graph where each region constraint `R1: R2` is
/// treated as an edge `R2 -> R1`. This is useful for cheaply
/// finding dirty constraints.
crate fn new(set: &ConstraintSet, num_region_vars: usize) -> Self {
let mut first_constraints = IndexVec::from_elem_n(None, num_region_vars);
let mut next_constraints = IndexVec::from_elem(None, &set.constraints);

for (idx, constraint) in set.constraints.iter_enumerated().rev() {
let mut head = &mut first_constraints[constraint.sub];
let mut next = &mut next_constraints[idx];
debug_assert!(next.is_none());
*next = *head;
*head = Some(idx);
}

ConstraintGraph { first_constraints, next_constraints }
}

/// Invokes `op` with the index of any constraints of the form
/// `region_sup: region_sub`. These are the constraints that must
/// be reprocessed when the value of `R1` changes. If you think of
/// each constraint `R1: R2` as an edge `R2 -> R1`, then this
/// gives the set of successors to R2.
crate fn for_each_dependent(
&self,
region_sub: RegionVid,
mut op: impl FnMut(ConstraintIndex),
) {
let mut p = self.first_constraints[region_sub];
while let Some(dep_idx) = p {
op(dep_idx);
p = self.next_constraints[dep_idx];
}
}
}

1 change: 0 additions & 1 deletion src/librustc_mir/borrow_check/nll/region_infer/dump_mir.rs
Expand Up @@ -83,7 +83,6 @@ impl<'tcx> RegionInferenceContext<'tcx> {
sup,
sub,
locations,
next: _,
} = constraint;
with_msg(&format!(
"{:?}: {:?} due to {:?}",
Expand Down
Expand Up @@ -89,8 +89,6 @@ impl<'tcx> RegionInferenceContext<'tcx> {
stack: &mut Vec<ConstraintIndex>,
results: &mut Vec<Vec<ConstraintIndex>>,
) {
let dependency_map = self.dependency_map.as_ref().unwrap();

// Check if we already visited this region.
if !visited.insert(current_region) {
return;
Expand All @@ -105,20 +103,19 @@ impl<'tcx> RegionInferenceContext<'tcx> {
return;
}

self.constraints
.each_affected_by_dirty(dependency_map[current_region], |constraint| {
assert_eq!(self.constraints[constraint].sub, current_region);
stack.push(constraint);
self.find_constraint_paths_between_regions_helper(
from_region,
self.constraints[constraint].sup,
target_test,
visited,
stack,
results,
);
stack.pop();
});
self.constraint_graph.for_each_dependent(current_region, |constraint| {
assert_eq!(self.constraints[constraint].sub, current_region);
stack.push(constraint);
self.find_constraint_paths_between_regions_helper(
from_region,
self.constraints[constraint].sup,
target_test,
visited,
stack,
results,
);
stack.pop();
});
}

/// This function will return true if a constraint is interesting and false if a constraint
Expand Down
52 changes: 20 additions & 32 deletions src/librustc_mir/borrow_check/nll/region_infer/mod.rs
Expand Up @@ -9,7 +9,9 @@
// except according to those terms.

use super::universal_regions::UniversalRegions;
use borrow_check::nll::constraint_set::{ConstraintIndex, ConstraintSet, OutlivesConstraint};
use borrow_check::nll::constraint_set::{
ConstraintIndex, ConstraintGraph, ConstraintSet, OutlivesConstraint
};
use borrow_check::nll::type_check::Locations;
use rustc::hir::def_id::DefId;
use rustc::infer::canonical::QueryRegionConstraint;
Expand Down Expand Up @@ -57,16 +59,12 @@ pub struct RegionInferenceContext<'tcx> {
/// until `solve` is invoked.
inferred_values: Option<RegionValues>,

/// For each variable, stores the index of the first constraint
/// where that variable appears on the RHS. This is the start of a
/// 'linked list' threaded by the `next` field in `Constraint`.
///
/// This map is build when values are inferred.
dependency_map: Option<IndexVec<RegionVid, Option<ConstraintIndex>>>,

/// The constraints we have accumulated and used during solving.
constraints: ConstraintSet,

/// The constraint-set, but organized by regions.
constraint_graph: ConstraintGraph,

/// Type constraints that we check after solving.
type_tests: Vec<TypeTest<'tcx>>,

Expand Down Expand Up @@ -203,27 +201,26 @@ impl<'tcx> RegionInferenceContext<'tcx> {
outlives_constraints: ConstraintSet,
type_tests: Vec<TypeTest<'tcx>>,
) -> Self {
// The `next` field should not yet have been initialized:
debug_assert!(outlives_constraints.iter().all(|c| c.next.is_none()));

let num_region_variables = var_infos.len();
let num_universal_regions = universal_regions.len();

let elements = &Rc::new(RegionValueElements::new(mir, num_universal_regions));

// Create a RegionDefinition for each inference variable.
let definitions = var_infos
let definitions: IndexVec<_, _> = var_infos
.into_iter()
.map(|info| RegionDefinition::new(info.origin))
.collect();

let constraint_graph = ConstraintGraph::new(&outlives_constraints, definitions.len());

let mut result = Self {
definitions,
elements: elements.clone(),
liveness_constraints: RegionValues::new(elements, num_region_variables),
inferred_values: None,
dependency_map: None,
constraints: outlives_constraints,
constraint_graph,
type_tests,
universal_regions,
};
Expand Down Expand Up @@ -392,7 +389,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
/// satisfied. Note that some values may grow **too** large to be
/// feasible, but we check this later.
fn propagate_constraints(&mut self, mir: &Mir<'tcx>) {
self.dependency_map = Some(self.build_dependency_map());
assert!(self.inferred_values.is_none());
let inferred_values = self.compute_region_values(mir);
self.inferred_values = Some(inferred_values);
}
Expand All @@ -409,8 +406,6 @@ impl<'tcx> RegionInferenceContext<'tcx> {
// constraints we have accumulated.
let mut inferred_values = self.liveness_constraints.clone();

let dependency_map = self.dependency_map.as_ref().unwrap();

// Constraints that may need to be repropagated (initially all):
let mut dirty_list: Vec<_> = self.constraints.indices().collect();

Expand All @@ -428,14 +423,15 @@ impl<'tcx> RegionInferenceContext<'tcx> {
debug!("propagate_constraints: sub={:?}", constraint.sub);
debug!("propagate_constraints: sup={:?}", constraint.sup);

self.constraints.each_affected_by_dirty(
dependency_map[constraint.sup],
|dep_idx| {
if clean_bit_vec.remove(dep_idx.index()) {
dirty_list.push(dep_idx);
}
},
);
// The region of `constraint.sup` changed, so find all
// constraints of the form `R: constriant.sup` and
// enqueue them as dirty. We will have to reprocess
// them.
self.constraint_graph.for_each_dependent(constraint.sup, |dep_idx| {
if clean_bit_vec.remove(dep_idx.index()) {
dirty_list.push(dep_idx);
}
});
}

debug!("\n");
Expand All @@ -444,14 +440,6 @@ impl<'tcx> RegionInferenceContext<'tcx> {
inferred_values
}

/// Builds up a map from each region variable X to a vector with the
/// indices of constraints that need to be re-evaluated when X changes.
/// These are constraints like Y: X @ P -- so if X changed, we may
/// need to grow Y.
fn build_dependency_map(&mut self) -> IndexVec<RegionVid, Option<ConstraintIndex>> {
self.constraints.link(self.definitions.len())
}

/// Once regions have been propagated, this method is used to see
/// whether the "type tests" produced by typeck were satisfied;
/// type tests encode type-outlives relationships like `T:
Expand Down
Expand Up @@ -186,7 +186,6 @@ impl<'a, 'gcx, 'tcx> ConstraintConversion<'a, 'gcx, 'tcx> {
locations: self.locations,
sub,
sup,
next: None,
});
}

Expand Down
1 change: 0 additions & 1 deletion src/librustc_mir/borrow_check/nll/type_check/mod.rs
Expand Up @@ -1537,7 +1537,6 @@ impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> {
sup: ref_region.to_region_vid(),
sub: borrow_region.to_region_vid(),
locations: location.boring(),
next: None,
});

if let Some(all_facts) = all_facts {
Expand Down

0 comments on commit d54e7e3

Please sign in to comment.