Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
81 changes: 51 additions & 30 deletions compiler/rustc_borrowck/src/handle_placeholders.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,8 @@
//! (with placeholders and universes) and turn them into regular
//! outlives constraints.
use rustc_data_structures::frozen::Frozen;
use rustc_data_structures::fx::FxIndexMap;
use rustc_data_structures::graph::scc;
use rustc_data_structures::graph::scc::Sccs;
use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
use rustc_data_structures::graph::scc::{self, Annotation, Sccs};
use rustc_index::IndexVec;
use rustc_infer::infer::RegionVariableOrigin;
use rustc_middle::mir::ConstraintCategory;
Expand Down Expand Up @@ -280,31 +279,17 @@ pub(crate) fn compute_sccs_applying_placeholder_outlives_constraints<'tcx>(
}
debug!("Placeholders present; activating placeholder handling logic!");

let added_constraints = rewrite_placeholder_outlives(
&constraint_sccs,
&scc_annotations,
let (constraint_sccs, scc_annotations) = rewrite_placeholder_outlives(
constraint_sccs,
scc_annotations,
fr_static,
&mut outlives_constraints,
);

let (constraint_sccs, scc_annotations) = if added_constraints {
let mut annotations = SccAnnotations::init(&definitions);

// We changed the constraint set and so must recompute SCCs.
// Optimisation opportunity: if we can add them incrementally (and that's
// possible because edges to 'static always only merge SCCs into 'static),
// we would potentially save a lot of work here.
(compute_sccs(&outlives_constraints, &mut annotations), annotations.scc_to_annotation)
} else {
// If we didn't add any back-edges; no more work needs doing
debug!("No constraints rewritten!");
(constraint_sccs, scc_annotations.scc_to_annotation)
};

LoweredConstraints {
constraint_sccs,
definitions,
scc_annotations,
definitions,
outlives_constraints: Frozen::freeze(outlives_constraints),
type_tests,
liveness_constraints,
Expand All @@ -314,16 +299,14 @@ pub(crate) fn compute_sccs_applying_placeholder_outlives_constraints<'tcx>(
}

pub(crate) fn rewrite_placeholder_outlives<'tcx>(
sccs: &Sccs<RegionVid, ConstraintSccIndex>,
annotations: &SccAnnotations<'_, '_, RegionTracker>,
sccs: Sccs<RegionVid, ConstraintSccIndex>,
annotations: SccAnnotations<'_, '_, RegionTracker>,
fr_static: RegionVid,
outlives_constraints: &mut OutlivesConstraintSet<'tcx>,
) -> bool {
// Changed to `true` if we added any constraints and need to
// recompute SCCs.
let mut added_constraints = false;
) -> (Sccs<RegionVid, ConstraintSccIndex>, IndexVec<ConstraintSccIndex, RegionTracker>) {
let mut forced_to_outlive_static = FxHashSet::default();

let annotations = &annotations.scc_to_annotation;
let annotations = annotations.scc_to_annotation;

for scc in sccs.all_sccs() {
// No point in adding 'static: 'static!
Expand Down Expand Up @@ -371,7 +354,8 @@ pub(crate) fn rewrite_placeholder_outlives<'tcx>(
// FIXME: if we can extract a useful blame span here, future error
// reporting and constraint search can be simplified.

added_constraints = true;
forced_to_outlive_static.insert(scc);

outlives_constraints.push(OutlivesConstraint {
sup: annotation.representative.rvid(),
sub: fr_static,
Expand All @@ -382,5 +366,42 @@ pub(crate) fn rewrite_placeholder_outlives<'tcx>(
from_closure: false,
});
}
added_constraints

if forced_to_outlive_static.is_empty() {
debug!("Added no `: 'static`s that mattered; reusing old SCCs!");
return (sccs, annotations);
}

// Now we need to fix the SCC annotations and SCC graph to match
// the outlives graph we modified.

let static_scc = sccs.scc(fr_static);

let mut scc_to_annotation: IndexVec<ConstraintSccIndex, RegionTracker> = IndexVec::new();

// Now we need to update the SCCs to account for the new constraints.
let new_sccs = sccs.merge_into(
forced_to_outlive_static,
static_scc,
|old_scc, new_scc, forced_to_outlive_static| {
debug!("Mapping {old_scc:?} to {new_scc:?}");
let annotation_idx = scc_to_annotation.push(annotations[old_scc]);
assert_eq!(annotation_idx, new_scc, "Annotation SCC indices out of step!");

// Merge the annotations for the SCCs 'static just ate.
// This only executes once, since we only add 'static (and indeed any SCC) once.
// Since 'static is the final step, we know that we also have the full list
// of merged SCCs in `forced_to_outlive_static`.
if old_scc == static_scc {
// This instability does not matter since the merging isn't order-dependent.
#[allow(rustc::potential_query_instability)]
for merged_old_scc in forced_to_outlive_static.iter() {
let merged_annotation = annotations[*merged_old_scc];
scc_to_annotation[new_scc].update_scc(&merged_annotation);
}
}
},
);

(new_sccs, scc_to_annotation)
}
Original file line number Diff line number Diff line change
Expand Up @@ -57,21 +57,15 @@ impl<'a, 'tcx> RegionCtxt<'a, 'tcx> {
};

let mut scc_annotations = SccAnnotations::init(&definitions);
let mut constraint_sccs = compute_sccs(&outlives_constraints, &mut scc_annotations);

let added_constraints = crate::handle_placeholders::rewrite_placeholder_outlives(
&constraint_sccs,
&scc_annotations,
universal_regions.fr_static,
&mut outlives_constraints,
);

if added_constraints {
scc_annotations = SccAnnotations::init(&definitions);
constraint_sccs = compute_sccs(&outlives_constraints, &mut scc_annotations);
}

let scc_annotations = scc_annotations.scc_to_annotation;
let constraint_sccs = compute_sccs(&outlives_constraints, &mut scc_annotations);

let (constraint_sccs, scc_annotations) =
crate::handle_placeholders::rewrite_placeholder_outlives(
constraint_sccs,
scc_annotations,
universal_regions.fr_static,
&mut outlives_constraints,
);

// Unlike the `RegionInferenceContext`, we only care about free regions
// and fully ignore liveness and placeholders.
Expand Down
108 changes: 108 additions & 0 deletions compiler/rustc_data_structures/src/graph/scc/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ use std::fmt::Debug;
use std::marker::PhantomData;
use std::ops::Range;

use rustc_hash::FxHashMap;
use rustc_index::{Idx, IndexSlice, IndexVec};
use tracing::{debug, instrument, trace};

Expand Down Expand Up @@ -157,6 +158,113 @@ impl<N: Idx, S: Idx + Ord> Sccs<N, S> {
.collect(),
)
}

/// Merge a bunch of SCCs `to_merge` into `predecessor`. `predecessor`
/// must be a source in the graph in the sense that it reaches every node in it.
/// In other words, the merging into `predecessor`'s SCC must not add
/// any new successors to it. This reindexes the SCCs (because some disappear)
/// and so consumes the instance.
///
/// This preserves dependency order since any merged SCC is not reachable from
/// any other SCC (so removing them preserves order), and the order of `predecessor`
/// does not change, since it must be last as it reaches everything.
///
/// This operation is mainly useful in borrowck, where `'static` is such a
/// predecessor in the outlives constraint graph.
///
/// Every time an SCC is constructed, `on_new_scc(old, new, &to_merge)` is called
/// with the old and new indices, and the current list of known SCCs merged. This
/// is meant to be used to update any [`Annotation`]s or other external structures
/// that depends on SCC indices.
#[instrument(skip(self, on_new_scc))]
pub fn merge_into<S2: Idx, F>(
self,
mut to_merge: FxHashSet<S>,
predecessor: S,
mut on_new_scc: F,
) -> Sccs<N, S2>
where
F: FnMut(S, S2, &FxHashSet<S>) -> (),
{
assert!(!to_merge.contains(&predecessor), "Can't merge {predecessor:?} into itself!");

// Calling this method does not preserve dependency order if this
// doesn't hold, since this is one of the properties we exploit
// for a cheap merge. `predecessor` needs to precede *everything*.
// (This constraint is stronger than it needs to be; this tests if it
// precedes it in one step, it's fine if it's also in more than one,
// but that requires a full graph search to check and good enough for 'static).
for scc in self.all_sccs() {
debug_assert!(
scc == predecessor || self.successors(predecessor).contains(&scc),
"{predecessor:?} is not a source, does not precede {scc:?}!"
);
}

let mut scc_details = IndexVec::new();
let mut all_successors = Vec::new();

let mut old_to_new_scc = FxHashMap::default();

// Collect predecessors and translate unaffected nodes into their new IDs
for old_scc in self.all_sccs() {
if to_merge.contains(&old_scc) || old_scc == predecessor {
// These are handled separately (below)
continue;
}

// This requires two iterations over successors.
// It's possible to optimise this by adding them
// speculatively and then undoing that if we find one
// that shouldn't be added, but that's complicated
// and error prone and this shouldn't be too bad.
if self.successors(old_scc).iter().any(|successor| to_merge.contains(successor)) {
debug!("{old_scc:?} reaches a merged node; merge it into {predecessor:?}!");
to_merge.insert(old_scc);
continue;
}

// We now know `old_scc` is unaffected by the merge.
let all_successors_start = all_successors.len();
all_successors.extend(self.successors(old_scc).into_iter().map(|old_successor| old_to_new_scc.get(old_successor).expect("Iteration should have been in dependency order, so {old_successor:?} should have been visited and added before {old_scc:?}!")));
let all_successors_end = all_successors.len();
let new_scc =
scc_details.push(SccDetails { range: all_successors_start..all_successors_end });
on_new_scc(old_scc, new_scc, &to_merge);
old_to_new_scc.insert(old_scc, new_scc);
}

// Handle the merge; map the merged SCCs into `predecessor`,
// and update `predecessor` itself.
//
// OPTIMISATION IDEA: we could avoid storing a list of every
// SCC here if we used `predecessor` as its own successor for
// a sentinel value. However, this would cause `successors()`
// to return an iterator, which is a larger change.
let all_successors_start = all_successors.len();

// This SCC, by definition, precedes every SCC. Add them ALL.
all_successors.extend(scc_details.indices()); // Notably, `scc_details` does not yet hold this SCC!
let all_successors_end = all_successors.len();

// Finally, add the SCC we merged into back.
let new_predecessor =
scc_details.push(SccDetails { range: all_successors_start..all_successors_end });
on_new_scc(predecessor, new_predecessor, &to_merge);

old_to_new_scc.insert(predecessor, new_predecessor);

// All the merged SCCs become `new_predecessor`; they're gone.
for merged_scc in to_merge.iter() {
old_to_new_scc.insert(*merged_scc, new_predecessor);
}

debug!("The full mapping after reconstruction is: {old_to_new_scc:?}");

// Finally, translate the mapping from graph nodes to SCCs to the new SCC indices.
let scc_indices = self.scc_indices.into_iter().map(|s| old_to_new_scc[&s]).collect();
Sccs { scc_indices, scc_data: SccData { scc_details, all_successors } }
}
}

impl<N: Idx, S: Idx + Ord> DirectedGraph for Sccs<N, S> {
Expand Down
Loading