Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Rollup of 7 pull requests #126002

Closed
wants to merge 23 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
7cd732f
Avoid `mut` and simplify initialization of `TASK_QUEUE`
raoulstrackx May 29, 2024
8530285
rustc_span: Inline some hot functions
petrochenkov Jun 2, 2024
07dc3eb
Allow static mut definitions with #[linkage]
bjorn3 Jun 3, 2024
e609c9b
Add unit tests for `Span::trim_start`
Zalathar Jun 3, 2024
df96cba
Add `Span::trim_end`
Zalathar Jun 3, 2024
9c931c0
coverage: Return a nested vector from initial span extraction
Zalathar Jun 3, 2024
464dee2
coverage: Build up initial spans by appending to a vector
Zalathar Jun 3, 2024
6d1557f
coverage: Use hole spans to carve up coverage spans into separate buc…
Zalathar Jun 3, 2024
c57a1d1
coverage: Remove hole-carving code from the main span refiner
Zalathar Jun 2, 2024
9b2e41a
Pass function for `Thread` as `Send` to `Thread::imp`
raoulstrackx May 30, 2024
b8c6008
Store `Task::p` as `dyn FnOnce() + Send`
raoulstrackx May 30, 2024
8db363c
Let compiler auto impl `Send` for `Task`
raoulstrackx May 30, 2024
8f677e8
bootstrap: implement new feature `bootstrap-self-test`
onur-ozkan May 19, 2024
5d26f58
Closures are recursively reachable
tmiasko Jun 4, 2024
ac96fa4
Use inline const instead of unsafe to construct arrays in `MaybeUnini…
kpreid May 13, 2024
ec8fa17
Use inline const instead of unsafe to implement `MaybeUninit::uninit_…
kpreid May 13, 2024
ae9aef2
Rollup merge of #125273 - onur-ozkan:bootstrap-self-test, r=albertlar…
fmease Jun 4, 2024
8237899
Rollup merge of #125800 - fortanix:raoul/rte-99-fix_mut_static_task_q…
fmease Jun 4, 2024
2073ac1
Rollup merge of #125903 - petrochenkov:upctxt3, r=nnethercote
fmease Jun 4, 2024
010525e
Rollup merge of #125920 - bjorn3:allow_static_mut_linkage_def, r=Urgau
fmease Jun 4, 2024
a208878
Rollup merge of #125921 - Zalathar:buckets, r=oli-obk
fmease Jun 4, 2024
b6117c6
Rollup merge of #125995 - kpreid:const-uninit-stable, r=Nilstrieb
fmease Jun 4, 2024
a82d14b
Rollup merge of #125996 - tmiasko:closure-recursively-reachable, r=ol…
fmease Jun 4, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 13 additions & 12 deletions compiler/rustc_codegen_ssa/src/codegen_attrs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -324,21 +324,22 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs {
let linkage = Some(linkage_by_name(tcx, did, val.as_str()));
if tcx.is_foreign_item(did) {
codegen_fn_attrs.import_linkage = linkage;

if tcx.is_mutable_static(did.into()) {
let mut diag = tcx.dcx().struct_span_err(
attr.span,
"extern mutable statics are not allowed with `#[linkage]`",
);
diag.note(
"marking the extern static mutable would allow changing which symbol \
the static references rather than make the target of the symbol \
mutable",
);
diag.emit();
}
} else {
codegen_fn_attrs.linkage = linkage;
}
if tcx.is_mutable_static(did.into()) {
let mut diag = tcx.dcx().struct_span_err(
attr.span,
"mutable statics are not allowed with `#[linkage]`",
);
diag.note(
"making the static mutable would allow changing which symbol the \
static references rather than make the target of the symbol \
mutable",
);
diag.emit();
}
}
}
sym::link_section => {
Expand Down
110 changes: 26 additions & 84 deletions compiler/rustc_mir_transform/src/coverage/spans.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,37 +20,31 @@ pub(super) fn extract_refined_covspans(
basic_coverage_blocks: &CoverageGraph,
code_mappings: &mut impl Extend<mappings::CodeMapping>,
) {
let sorted_spans =
let sorted_span_buckets =
from_mir::mir_to_initial_sorted_coverage_spans(mir_body, hir_info, basic_coverage_blocks);
let coverage_spans = SpansRefiner::refine_sorted_spans(sorted_spans);
code_mappings.extend(coverage_spans.into_iter().map(|RefinedCovspan { bcb, span, .. }| {
// Each span produced by the generator represents an ordinary code region.
mappings::CodeMapping { span, bcb }
}));
for bucket in sorted_span_buckets {
let refined_spans = SpansRefiner::refine_sorted_spans(bucket);
code_mappings.extend(refined_spans.into_iter().map(|RefinedCovspan { span, bcb }| {
// Each span produced by the refiner represents an ordinary code region.
mappings::CodeMapping { span, bcb }
}));
}
}

#[derive(Debug)]
struct CurrCovspan {
span: Span,
bcb: BasicCoverageBlock,
is_hole: bool,
}

impl CurrCovspan {
fn new(span: Span, bcb: BasicCoverageBlock, is_hole: bool) -> Self {
Self { span, bcb, is_hole }
fn new(span: Span, bcb: BasicCoverageBlock) -> Self {
Self { span, bcb }
}

fn into_prev(self) -> PrevCovspan {
let Self { span, bcb, is_hole } = self;
PrevCovspan { span, bcb, merged_spans: vec![span], is_hole }
}

fn into_refined(self) -> RefinedCovspan {
// This is only called in cases where `curr` is a hole span that has
// been carved out of `prev`.
debug_assert!(self.is_hole);
self.into_prev().into_refined()
let Self { span, bcb } = self;
PrevCovspan { span, bcb, merged_spans: vec![span] }
}
}

Expand All @@ -61,12 +55,11 @@ struct PrevCovspan {
/// List of all the original spans from MIR that have been merged into this
/// span. Mainly used to precisely skip over gaps when truncating a span.
merged_spans: Vec<Span>,
is_hole: bool,
}

impl PrevCovspan {
fn is_mergeable(&self, other: &CurrCovspan) -> bool {
self.bcb == other.bcb && !self.is_hole && !other.is_hole
self.bcb == other.bcb
}

fn merge_from(&mut self, other: &CurrCovspan) {
Expand All @@ -84,27 +77,21 @@ impl PrevCovspan {
if self.merged_spans.is_empty() { None } else { Some(self.into_refined()) }
}

fn refined_copy(&self) -> RefinedCovspan {
let &Self { span, bcb, merged_spans: _, is_hole } = self;
RefinedCovspan { span, bcb, is_hole }
}

fn into_refined(self) -> RefinedCovspan {
// Even though we consume self, we can just reuse the copying impl.
self.refined_copy()
let Self { span, bcb, merged_spans: _ } = self;
RefinedCovspan { span, bcb }
}
}

#[derive(Debug)]
struct RefinedCovspan {
span: Span,
bcb: BasicCoverageBlock,
is_hole: bool,
}

impl RefinedCovspan {
fn is_mergeable(&self, other: &Self) -> bool {
self.bcb == other.bcb && !self.is_hole && !other.is_hole
self.bcb == other.bcb
}

fn merge_from(&mut self, other: &Self) {
Expand All @@ -119,8 +106,6 @@ impl RefinedCovspan {
/// * Remove duplicate source code coverage regions
/// * Merge spans that represent continuous (both in source code and control flow), non-branching
/// execution
/// * Carve out (leave uncovered) any "hole" spans that need to be left blank
/// (e.g. closures that will be counted by their own MIR body)
struct SpansRefiner {
/// The initial set of coverage spans, sorted by `Span` (`lo` and `hi`) and by relative
/// dominance between the `BasicCoverageBlock`s of equal `Span`s.
Expand Down Expand Up @@ -181,13 +166,6 @@ impl SpansRefiner {
);
let prev = self.take_prev().into_refined();
self.refined_spans.push(prev);
} else if prev.is_hole {
// drop any equal or overlapping span (`curr`) and keep `prev` to test again in the
// next iter
debug!(?prev, "prev (a hole) overlaps curr, so discarding curr");
self.take_curr(); // Discards curr.
} else if curr.is_hole {
self.carve_out_span_for_hole();
} else {
self.cutoff_prev_at_overlapping_curr();
}
Expand All @@ -211,9 +189,6 @@ impl SpansRefiner {
}
});

// Discard hole spans, since their purpose was to carve out chunks from
// other spans, but we don't want the holes themselves in the final mappings.
self.refined_spans.retain(|covspan| !covspan.is_hole);
self.refined_spans
}

Expand Down Expand Up @@ -249,50 +224,17 @@ impl SpansRefiner {
if let Some(curr) = self.some_curr.take() {
self.some_prev = Some(curr.into_prev());
}
while let Some(curr) = self.sorted_spans_iter.next() {
debug!("FOR curr={:?}", curr);
if let Some(prev) = &self.some_prev
&& prev.span.lo() > curr.span.lo()
{
// Skip curr because prev has already advanced beyond the end of curr.
// This can only happen if a prior iteration updated `prev` to skip past
// a region of code, such as skipping past a hole.
debug!(?prev, "prev.span starts after curr.span, so curr will be dropped");
} else {
self.some_curr = Some(CurrCovspan::new(curr.span, curr.bcb, curr.is_hole));
return true;
if let Some(SpanFromMir { span, bcb, .. }) = self.sorted_spans_iter.next() {
// This code only sees sorted spans after hole-carving, so there should
// be no way for `curr` to start before `prev`.
if let Some(prev) = &self.some_prev {
debug_assert!(prev.span.lo() <= span.lo());
}
}
false
}

/// If `prev`s span extends left of the hole (`curr`), carve out the hole's span from
/// `prev`'s span. Add the portion of the span to the left of the hole; and if the span
/// extends to the right of the hole, update `prev` to that portion of the span.
fn carve_out_span_for_hole(&mut self) {
let prev = self.prev();
let curr = self.curr();

let left_cutoff = curr.span.lo();
let right_cutoff = curr.span.hi();
let has_pre_hole_span = prev.span.lo() < right_cutoff;
let has_post_hole_span = prev.span.hi() > right_cutoff;

if has_pre_hole_span {
let mut pre_hole = prev.refined_copy();
pre_hole.span = pre_hole.span.with_hi(left_cutoff);
debug!(?pre_hole, "prev overlaps a hole; adding pre-hole span");
self.refined_spans.push(pre_hole);
}

if has_post_hole_span {
// Mutate `prev.span` to start after the hole (and discard curr).
self.prev_mut().span = self.prev().span.with_lo(right_cutoff);
debug!(prev=?self.prev(), "mutated prev to start after the hole");

// Prevent this curr from becoming prev.
let hole_covspan = self.take_curr().into_refined();
self.refined_spans.push(hole_covspan); // since self.prev() was already updated
self.some_curr = Some(CurrCovspan::new(span, bcb));
debug!(?self.some_prev, ?self.some_curr, "next_coverage_span");
true
} else {
false
}
}

Expand Down
Loading
Loading