Skip to content

Commit

Permalink
Auto merge of rust-lang#114331 - matthiaskrgr:rollup-rnrmwcx, r=matth…
Browse files Browse the repository at this point in the history
…iaskrgr

Rollup of 7 pull requests

Successful merges:

 - rust-lang#100455 (Implement RefUnwindSafe for Backtrace)
 - rust-lang#113428 (coverage: Replace `ExpressionOperandId` with enum `Operand`)
 - rust-lang#114283 (Use parking lot's rwlock even without parallel-rustc)
 - rust-lang#114288 (Improve diagnostic for wrong borrow on binary operations)
 - rust-lang#114296 (interpret: fix alignment handling for Repeat expressions)
 - rust-lang#114306 ([rustc_data_structures][perf] Simplify base_n::push_str.)
 - rust-lang#114320 (Cover statements for stable_mir)

r? `@ghost`
`@rustbot` modify labels: rollup
  • Loading branch information
bors committed Aug 1, 2023
2 parents 828bdc2 + 41364c7 commit 4896daa
Show file tree
Hide file tree
Showing 30 changed files with 815 additions and 326 deletions.
10 changes: 4 additions & 6 deletions compiler/rustc_codegen_llvm/src/coverageinfo/ffi.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use rustc_middle::mir::coverage::{CounterValueReference, MappedExpressionIndex};
use rustc_middle::mir::coverage::{CounterId, MappedExpressionIndex};

/// Must match the layout of `LLVMRustCounterKind`.
#[derive(Copy, Clone, Debug)]
Expand Down Expand Up @@ -36,11 +36,9 @@ impl Counter {
Self { kind: CounterKind::Zero, id: 0 }
}

/// Constructs a new `Counter` of kind `CounterValueReference`, and converts
/// the given 1-based counter_id to the required 0-based equivalent for
/// the `Counter` encoding.
pub fn counter_value_reference(counter_id: CounterValueReference) -> Self {
Self { kind: CounterKind::CounterValueReference, id: counter_id.zero_based_index() }
/// Constructs a new `Counter` of kind `CounterValueReference`.
pub fn counter_value_reference(counter_id: CounterId) -> Self {
Self { kind: CounterKind::CounterValueReference, id: counter_id.as_u32() }
}

/// Constructs a new `Counter` of kind `Expression`.
Expand Down
87 changes: 25 additions & 62 deletions compiler/rustc_codegen_llvm/src/coverageinfo/map_data.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,24 +3,22 @@ pub use super::ffi::*;
use rustc_index::{IndexSlice, IndexVec};
use rustc_middle::bug;
use rustc_middle::mir::coverage::{
CodeRegion, CounterValueReference, ExpressionOperandId, InjectedExpressionId,
InjectedExpressionIndex, MappedExpressionIndex, Op,
CodeRegion, CounterId, ExpressionId, MappedExpressionIndex, Op, Operand,
};
use rustc_middle::ty::Instance;
use rustc_middle::ty::TyCtxt;

#[derive(Clone, Debug, PartialEq)]
pub struct Expression {
lhs: ExpressionOperandId,
lhs: Operand,
op: Op,
rhs: ExpressionOperandId,
rhs: Operand,
region: Option<CodeRegion>,
}

/// Collects all of the coverage regions associated with (a) injected counters, (b) counter
/// expressions (additions or subtraction), and (c) unreachable regions (always counted as zero),
/// for a given Function. Counters and counter expressions have non-overlapping `id`s because they
/// can both be operands in an expression. This struct also stores the `function_source_hash`,
/// for a given Function. This struct also stores the `function_source_hash`,
/// computed during instrumentation, and forwarded with counters.
///
/// Note, it may be important to understand LLVM's definitions of `unreachable` regions versus "gap
Expand All @@ -34,8 +32,8 @@ pub struct FunctionCoverage<'tcx> {
instance: Instance<'tcx>,
source_hash: u64,
is_used: bool,
counters: IndexVec<CounterValueReference, Option<CodeRegion>>,
expressions: IndexVec<InjectedExpressionIndex, Option<Expression>>,
counters: IndexVec<CounterId, Option<CodeRegion>>,
expressions: IndexVec<ExpressionId, Option<Expression>>,
unreachable_regions: Vec<CodeRegion>,
}

Expand Down Expand Up @@ -82,48 +80,36 @@ impl<'tcx> FunctionCoverage<'tcx> {
}

/// Adds a code region to be counted by an injected counter intrinsic.
pub fn add_counter(&mut self, id: CounterValueReference, region: CodeRegion) {
pub fn add_counter(&mut self, id: CounterId, region: CodeRegion) {
if let Some(previous_region) = self.counters[id].replace(region.clone()) {
assert_eq!(previous_region, region, "add_counter: code region for id changed");
}
}

/// Both counters and "counter expressions" (or simply, "expressions") can be operands in other
/// expressions. Expression IDs start from `u32::MAX` and go down, so the range of expression
/// IDs will not overlap with the range of counter IDs. Counters and expressions can be added in
/// any order, and expressions can still be assigned contiguous (though descending) IDs, without
/// knowing what the last counter ID will be.
///
/// When storing the expression data in the `expressions` vector in the `FunctionCoverage`
/// struct, its vector index is computed, from the given expression ID, by subtracting from
/// `u32::MAX`.
///
/// Since the expression operands (`lhs` and `rhs`) can reference either counters or
/// expressions, an operand that references an expression also uses its original ID, descending
/// from `u32::MAX`. Theses operands are translated only during code generation, after all
/// counters and expressions have been added.
/// expressions. These are tracked as separate variants of `Operand`, so there is no ambiguity
/// between operands that are counter IDs and operands that are expression IDs.
pub fn add_counter_expression(
&mut self,
expression_id: InjectedExpressionId,
lhs: ExpressionOperandId,
expression_id: ExpressionId,
lhs: Operand,
op: Op,
rhs: ExpressionOperandId,
rhs: Operand,
region: Option<CodeRegion>,
) {
debug!(
"add_counter_expression({:?}, lhs={:?}, op={:?}, rhs={:?} at {:?}",
expression_id, lhs, op, rhs, region
);
let expression_index = self.expression_index(u32::from(expression_id));
debug_assert!(
expression_index.as_usize() < self.expressions.len(),
"expression_index {} is out of range for expressions.len() = {}
expression_id.as_usize() < self.expressions.len(),
"expression_id {} is out of range for expressions.len() = {}
for {:?}",
expression_index.as_usize(),
expression_id.as_usize(),
self.expressions.len(),
self,
);
if let Some(previous_expression) = self.expressions[expression_index].replace(Expression {
if let Some(previous_expression) = self.expressions[expression_id].replace(Expression {
lhs,
op,
rhs,
Expand Down Expand Up @@ -186,14 +172,11 @@ impl<'tcx> FunctionCoverage<'tcx> {

// This closure converts any `Expression` operand (`lhs` or `rhs` of the `Op::Add` or
// `Op::Subtract` operation) into its native `llvm::coverage::Counter::CounterKind` type
// and value. Operand ID value `0` maps to `CounterKind::Zero`; values in the known range
// of injected LLVM counters map to `CounterKind::CounterValueReference` (and the value
// matches the injected counter index); and any other value is converted into a
// `CounterKind::Expression` with the expression's `new_index`.
// and value.
//
// Expressions will be returned from this function in a sequential vector (array) of
// `CounterExpression`, so the expression IDs must be mapped from their original,
// potentially sparse set of indexes, originally in reverse order from `u32::MAX`.
// potentially sparse set of indexes.
//
// An `Expression` as an operand will have already been encountered as an `Expression` with
// operands, so its new_index will already have been generated (as a 1-up index value).
Expand All @@ -206,34 +189,19 @@ impl<'tcx> FunctionCoverage<'tcx> {
// `expression_index`s lower than the referencing `Expression`. Therefore, it is
// reasonable to look up the new index of an expression operand while the `new_indexes`
// vector is only complete up to the current `ExpressionIndex`.
let id_to_counter = |new_indexes: &IndexSlice<
InjectedExpressionIndex,
Option<MappedExpressionIndex>,
>,
id: ExpressionOperandId| {
if id == ExpressionOperandId::ZERO {
Some(Counter::zero())
} else if id.index() < self.counters.len() {
debug_assert!(
id.index() > 0,
"ExpressionOperandId indexes for counters are 1-based, but this id={}",
id.index()
);
// Note: Some codegen-injected Counters may be only referenced by `Expression`s,
// and may not have their own `CodeRegion`s,
let index = CounterValueReference::from(id.index());
// Note, the conversion to LLVM `Counter` adjusts the index to be zero-based.
Some(Counter::counter_value_reference(index))
} else {
let index = self.expression_index(u32::from(id));
type NewIndexes = IndexSlice<ExpressionId, Option<MappedExpressionIndex>>;
let id_to_counter = |new_indexes: &NewIndexes, operand: Operand| match operand {
Operand::Zero => Some(Counter::zero()),
Operand::Counter(id) => Some(Counter::counter_value_reference(id)),
Operand::Expression(id) => {
self.expressions
.get(index)
.get(id)
.expect("expression id is out of range")
.as_ref()
// If an expression was optimized out, assume it would have produced a count
// of zero. This ensures that expressions dependent on optimized-out
// expressions are still valid.
.map_or(Some(Counter::zero()), |_| new_indexes[index].map(Counter::expression))
.map_or(Some(Counter::zero()), |_| new_indexes[id].map(Counter::expression))
}
};

Expand Down Expand Up @@ -340,9 +308,4 @@ impl<'tcx> FunctionCoverage<'tcx> {
fn unreachable_regions(&self) -> impl Iterator<Item = (Counter, &CodeRegion)> {
self.unreachable_regions.iter().map(|region| (Counter::zero(), region))
}

fn expression_index(&self, id_descending_from_max: u32) -> InjectedExpressionIndex {
debug_assert!(id_descending_from_max >= self.counters.len() as u32);
InjectedExpressionIndex::from(u32::MAX - id_descending_from_max)
}
}
16 changes: 7 additions & 9 deletions compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,7 @@ use rustc_hir as hir;
use rustc_hir::def_id::DefId;
use rustc_llvm::RustString;
use rustc_middle::bug;
use rustc_middle::mir::coverage::{
CodeRegion, CounterValueReference, CoverageKind, ExpressionOperandId, InjectedExpressionId, Op,
};
use rustc_middle::mir::coverage::{CodeRegion, CounterId, CoverageKind, ExpressionId, Op, Operand};
use rustc_middle::mir::Coverage;
use rustc_middle::ty;
use rustc_middle::ty::layout::{FnAbiOf, HasTyCtxt};
Expand All @@ -33,7 +31,7 @@ mod ffi;
pub(crate) mod map_data;
pub mod mapgen;

const UNUSED_FUNCTION_COUNTER_ID: CounterValueReference = CounterValueReference::START;
const UNUSED_FUNCTION_COUNTER_ID: CounterId = CounterId::START;

const VAR_ALIGN_BYTES: usize = 8;

Expand Down Expand Up @@ -125,7 +123,7 @@ impl<'tcx> CoverageInfoBuilderMethods<'tcx> for Builder<'_, '_, 'tcx> {
let fn_name = bx.get_pgo_func_name_var(instance);
let hash = bx.const_u64(function_source_hash);
let num_counters = bx.const_u32(coverageinfo.num_counters);
let index = bx.const_u32(id.zero_based_index());
let index = bx.const_u32(id.as_u32());
debug!(
"codegen intrinsic instrprof.increment(fn_name={:?}, hash={:?}, num_counters={:?}, index={:?})",
fn_name, hash, num_counters, index,
Expand Down Expand Up @@ -178,7 +176,7 @@ impl<'tcx> Builder<'_, '_, 'tcx> {
fn add_coverage_counter(
&mut self,
instance: Instance<'tcx>,
id: CounterValueReference,
id: CounterId,
region: CodeRegion,
) -> bool {
if let Some(coverage_context) = self.coverage_context() {
Expand All @@ -202,10 +200,10 @@ impl<'tcx> Builder<'_, '_, 'tcx> {
fn add_coverage_counter_expression(
&mut self,
instance: Instance<'tcx>,
id: InjectedExpressionId,
lhs: ExpressionOperandId,
id: ExpressionId,
lhs: Operand,
op: Op,
rhs: ExpressionOperandId,
rhs: Operand,
region: Option<CodeRegion>,
) -> bool {
if let Some(coverage_context) = self.coverage_context() {
Expand Down
8 changes: 4 additions & 4 deletions compiler/rustc_const_eval/src/interpret/step.rs
Original file line number Diff line number Diff line change
Expand Up @@ -208,13 +208,13 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
let rest_ptr = first_ptr.offset(elem_size, self)?;
// For the alignment of `rest_ptr`, we crucially do *not* use `first.align` as
// that place might be more aligned than its type mandates (a `u8` array could
// be 4-aligned if it sits at the right spot in a struct). Instead we use
// `first.layout.align`, i.e., the alignment given by the type.
// be 4-aligned if it sits at the right spot in a struct). We have to also factor
// in element size.
self.mem_copy_repeatedly(
first_ptr,
first.align,
dest.align,
rest_ptr,
first.layout.align.abi,
dest.align.restrict_for_offset(elem_size),
elem_size,
length - 1,
/*nonoverlapping:*/ true,
Expand Down
7 changes: 3 additions & 4 deletions compiler/rustc_data_structures/src/base_n.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,22 +16,21 @@ const BASE_64: &[u8; MAX_BASE] =
pub fn push_str(mut n: u128, base: usize, output: &mut String) {
debug_assert!(base >= 2 && base <= MAX_BASE);
let mut s = [0u8; 128];
let mut index = 0;
let mut index = s.len();

let base = base as u128;

loop {
index -= 1;
s[index] = BASE_64[(n % base) as usize];
index += 1;
n /= base;

if n == 0 {
break;
}
}
s[0..index].reverse();

output.push_str(str::from_utf8(&s[0..index]).unwrap());
output.push_str(str::from_utf8(&s[index..]).unwrap());
}

#[inline]
Expand Down
28 changes: 7 additions & 21 deletions compiler/rustc_data_structures/src/sync/vec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,37 +43,23 @@ impl<I: Idx, T: Copy> AppendOnlyIndexVec<I, T> {

#[derive(Default)]
pub struct AppendOnlyVec<T: Copy> {
#[cfg(not(parallel_compiler))]
vec: elsa::vec::FrozenVec<T>,
#[cfg(parallel_compiler)]
vec: elsa::sync::LockFreeFrozenVec<T>,
vec: parking_lot::RwLock<Vec<T>>,
}

impl<T: Copy> AppendOnlyVec<T> {
pub fn new() -> Self {
Self {
#[cfg(not(parallel_compiler))]
vec: elsa::vec::FrozenVec::new(),
#[cfg(parallel_compiler)]
vec: elsa::sync::LockFreeFrozenVec::new(),
}
Self { vec: Default::default() }
}

pub fn push(&self, val: T) -> usize {
#[cfg(not(parallel_compiler))]
let i = self.vec.len();
#[cfg(not(parallel_compiler))]
self.vec.push(val);
#[cfg(parallel_compiler)]
let i = self.vec.push(val);
i
let mut v = self.vec.write();
let n = v.len();
v.push(val);
n
}

pub fn get(&self, i: usize) -> Option<T> {
#[cfg(not(parallel_compiler))]
return self.vec.get_copy(i);
#[cfg(parallel_compiler)]
return self.vec.get(i);
self.vec.read().get(i).copied()
}

pub fn iter_enumerated(&self) -> impl Iterator<Item = (usize, T)> + '_ {
Expand Down

0 comments on commit 4896daa

Please sign in to comment.