Skip to content

Commit

Permalink
Auto merge of #78319 - jonas-schievink:rollup-vzj8a6l, r=jonas-schievink
Browse files Browse the repository at this point in the history
Rollup of 15 pull requests

Successful merges:

 - #76649 (Add a spin loop hint for Arc::downgrade)
 - #77392 (add `insert` to `Option`)
 - #77716 (Revert "Allow dynamic linking for iOS/tvOS targets.")
 - #78109 (Check for exhaustion in RangeInclusive::contains and slicing)
 - #78198 (Simplify assert terminator only if condition evaluates to expected value)
 - #78243 (--test-args flag description)
 - #78249 (improve const infer error)
 - #78250 (Document inline-const)
 - #78264 (Add regression test for issue-77475)
 - #78274 (Update description of Empty Enum for accuracy)
 - #78278 (move `visit_predicate` into `TypeVisitor`)
 - #78292 (Loop instead of recursion)
 - #78293 (Always store Rustdoc theme when it's changed)
 - #78300 (Make codegen coverage_context optional, and check)
 - #78307 (Revert "Set .llvmbc and .llvmcmd sections as allocatable")

Failed merges:

r? `@ghost`
  • Loading branch information
bors committed Oct 24, 2020
2 parents 2e8a54a + 1ac137b commit 89fdb30
Show file tree
Hide file tree
Showing 26 changed files with 386 additions and 195 deletions.
150 changes: 79 additions & 71 deletions compiler/rustc_ast_lowering/src/pat.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,82 +10,90 @@ use rustc_span::symbol::Ident;
use rustc_span::{source_map::Spanned, Span};

impl<'a, 'hir> LoweringContext<'a, 'hir> {
crate fn lower_pat(&mut self, p: &Pat) -> &'hir hir::Pat<'hir> {
crate fn lower_pat(&mut self, mut pattern: &Pat) -> &'hir hir::Pat<'hir> {
ensure_sufficient_stack(|| {
let node = match p.kind {
PatKind::Wild => hir::PatKind::Wild,
PatKind::Ident(ref binding_mode, ident, ref sub) => {
let lower_sub = |this: &mut Self| sub.as_ref().map(|s| this.lower_pat(&*s));
let node = self.lower_pat_ident(p, binding_mode, ident, lower_sub);
node
}
PatKind::Lit(ref e) => hir::PatKind::Lit(self.lower_expr(e)),
PatKind::TupleStruct(ref path, ref pats) => {
let qpath = self.lower_qpath(
p.id,
&None,
path,
ParamMode::Optional,
ImplTraitContext::disallowed(),
);
let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple struct");
hir::PatKind::TupleStruct(qpath, pats, ddpos)
}
PatKind::Or(ref pats) => hir::PatKind::Or(
self.arena.alloc_from_iter(pats.iter().map(|x| self.lower_pat(x))),
),
PatKind::Path(ref qself, ref path) => {
let qpath = self.lower_qpath(
p.id,
qself,
path,
ParamMode::Optional,
ImplTraitContext::disallowed(),
);
hir::PatKind::Path(qpath)
}
PatKind::Struct(ref path, ref fields, etc) => {
let qpath = self.lower_qpath(
p.id,
&None,
path,
ParamMode::Optional,
ImplTraitContext::disallowed(),
);
// loop here to avoid recursion
let node = loop {
match pattern.kind {
PatKind::Wild => break hir::PatKind::Wild,
PatKind::Ident(ref binding_mode, ident, ref sub) => {
let lower_sub = |this: &mut Self| sub.as_ref().map(|s| this.lower_pat(&*s));
break self.lower_pat_ident(pattern, binding_mode, ident, lower_sub);
}
PatKind::Lit(ref e) => break hir::PatKind::Lit(self.lower_expr(e)),
PatKind::TupleStruct(ref path, ref pats) => {
let qpath = self.lower_qpath(
pattern.id,
&None,
path,
ParamMode::Optional,
ImplTraitContext::disallowed(),
);
let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple struct");
break hir::PatKind::TupleStruct(qpath, pats, ddpos);
}
PatKind::Or(ref pats) => {
break hir::PatKind::Or(
self.arena.alloc_from_iter(pats.iter().map(|x| self.lower_pat(x))),
);
}
PatKind::Path(ref qself, ref path) => {
let qpath = self.lower_qpath(
pattern.id,
qself,
path,
ParamMode::Optional,
ImplTraitContext::disallowed(),
);
break hir::PatKind::Path(qpath);
}
PatKind::Struct(ref path, ref fields, etc) => {
let qpath = self.lower_qpath(
pattern.id,
&None,
path,
ParamMode::Optional,
ImplTraitContext::disallowed(),
);

let fs = self.arena.alloc_from_iter(fields.iter().map(|f| hir::FieldPat {
hir_id: self.next_id(),
ident: f.ident,
pat: self.lower_pat(&f.pat),
is_shorthand: f.is_shorthand,
span: f.span,
}));
hir::PatKind::Struct(qpath, fs, etc)
}
PatKind::Tuple(ref pats) => {
let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple");
hir::PatKind::Tuple(pats, ddpos)
}
PatKind::Box(ref inner) => hir::PatKind::Box(self.lower_pat(inner)),
PatKind::Ref(ref inner, mutbl) => hir::PatKind::Ref(self.lower_pat(inner), mutbl),
PatKind::Range(ref e1, ref e2, Spanned { node: ref end, .. }) => {
hir::PatKind::Range(
e1.as_deref().map(|e| self.lower_expr(e)),
e2.as_deref().map(|e| self.lower_expr(e)),
self.lower_range_end(end, e2.is_some()),
)
}
PatKind::Slice(ref pats) => self.lower_pat_slice(pats),
PatKind::Rest => {
// If we reach here the `..` pattern is not semantically allowed.
self.ban_illegal_rest_pat(p.span)
let fs = self.arena.alloc_from_iter(fields.iter().map(|f| hir::FieldPat {
hir_id: self.next_id(),
ident: f.ident,
pat: self.lower_pat(&f.pat),
is_shorthand: f.is_shorthand,
span: f.span,
}));
break hir::PatKind::Struct(qpath, fs, etc);
}
PatKind::Tuple(ref pats) => {
let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple");
break hir::PatKind::Tuple(pats, ddpos);
}
PatKind::Box(ref inner) => {
break hir::PatKind::Box(self.lower_pat(inner));
}
PatKind::Ref(ref inner, mutbl) => {
break hir::PatKind::Ref(self.lower_pat(inner), mutbl);
}
PatKind::Range(ref e1, ref e2, Spanned { node: ref end, .. }) => {
break hir::PatKind::Range(
e1.as_deref().map(|e| self.lower_expr(e)),
e2.as_deref().map(|e| self.lower_expr(e)),
self.lower_range_end(end, e2.is_some()),
);
}
PatKind::Slice(ref pats) => break self.lower_pat_slice(pats),
PatKind::Rest => {
// If we reach here the `..` pattern is not semantically allowed.
break self.ban_illegal_rest_pat(pattern.span);
}
// return inner to be processed in next loop
PatKind::Paren(ref inner) => pattern = inner,
PatKind::MacCall(_) => panic!("{:?} shouldn't exist here", pattern.span),
}
// FIXME: consider not using recursion to lower this.
PatKind::Paren(ref inner) => return self.lower_pat(inner),
PatKind::MacCall(_) => panic!("{:?} shouldn't exist here", p.span),
};

self.pat_with_node_id_of(p, node)
self.pat_with_node_id_of(pattern, node)
})
}

Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_codegen_llvm/src/back/write.rs
Original file line number Diff line number Diff line change
Expand Up @@ -936,8 +936,8 @@ unsafe fn embed_bitcode(
llvm::LLVMRustAppendModuleInlineAsm(llmod, asm.as_ptr().cast(), asm.len());
} else {
let asm = "
.section .llvmbc,\"a\"
.section .llvmcmd,\"a\"
.section .llvmbc,\"e\"
.section .llvmcmd,\"e\"
";
llvm::LLVMRustAppendModuleInlineAsm(llmod, asm.as_ptr().cast(), asm.len());
}
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_codegen_llvm/src/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -324,8 +324,8 @@ impl<'ll, 'tcx> CodegenCx<'ll, 'tcx> {
}

#[inline]
pub fn coverage_context(&'a self) -> &'a coverageinfo::CrateCoverageContext<'tcx> {
self.coverage_cx.as_ref().unwrap()
pub fn coverage_context(&'a self) -> Option<&'a coverageinfo::CrateCoverageContext<'tcx>> {
self.coverage_cx.as_ref()
}
}

Expand Down
5 changes: 4 additions & 1 deletion compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,10 @@ use tracing::debug;
/// undocumented details in Clang's implementation (that may or may not be important) were also
/// replicated for Rust's Coverage Map.
pub fn finalize<'ll, 'tcx>(cx: &CodegenCx<'ll, 'tcx>) {
let function_coverage_map = cx.coverage_context().take_function_coverage_map();
let function_coverage_map = match cx.coverage_context() {
Some(ctx) => ctx.take_function_coverage_map(),
None => return,
};
if function_coverage_map.is_empty() {
// This module has no functions with coverage instrumentation
return;
Expand Down
79 changes: 47 additions & 32 deletions compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,17 +64,22 @@ impl CoverageInfoBuilderMethods<'tcx> for Builder<'a, 'll, 'tcx> {
function_source_hash: u64,
id: CounterValueReference,
region: CodeRegion,
) {
debug!(
"adding counter to coverage_regions: instance={:?}, function_source_hash={}, id={:?}, \
at {:?}",
instance, function_source_hash, id, region,
);
let mut coverage_regions = self.coverage_context().function_coverage_map.borrow_mut();
coverage_regions
.entry(instance)
.or_insert_with(|| FunctionCoverage::new(self.tcx, instance))
.add_counter(function_source_hash, id, region);
) -> bool {
if let Some(coverage_context) = self.coverage_context() {
debug!(
"adding counter to coverage_regions: instance={:?}, function_source_hash={}, id={:?}, \
at {:?}",
instance, function_source_hash, id, region,
);
let mut coverage_regions = coverage_context.function_coverage_map.borrow_mut();
coverage_regions
.entry(instance)
.or_insert_with(|| FunctionCoverage::new(self.tcx, instance))
.add_counter(function_source_hash, id, region);
true
} else {
false
}
}

fn add_counter_expression_region(
Expand All @@ -85,29 +90,39 @@ impl CoverageInfoBuilderMethods<'tcx> for Builder<'a, 'll, 'tcx> {
op: Op,
rhs: ExpressionOperandId,
region: CodeRegion,
) {
debug!(
"adding counter expression to coverage_regions: instance={:?}, id={:?}, {:?} {:?} {:?}, \
at {:?}",
instance, id, lhs, op, rhs, region,
);
let mut coverage_regions = self.coverage_context().function_coverage_map.borrow_mut();
coverage_regions
.entry(instance)
.or_insert_with(|| FunctionCoverage::new(self.tcx, instance))
.add_counter_expression(id, lhs, op, rhs, region);
) -> bool {
if let Some(coverage_context) = self.coverage_context() {
debug!(
"adding counter expression to coverage_regions: instance={:?}, id={:?}, {:?} {:?} {:?}, \
at {:?}",
instance, id, lhs, op, rhs, region,
);
let mut coverage_regions = coverage_context.function_coverage_map.borrow_mut();
coverage_regions
.entry(instance)
.or_insert_with(|| FunctionCoverage::new(self.tcx, instance))
.add_counter_expression(id, lhs, op, rhs, region);
true
} else {
false
}
}

fn add_unreachable_region(&mut self, instance: Instance<'tcx>, region: CodeRegion) {
debug!(
"adding unreachable code to coverage_regions: instance={:?}, at {:?}",
instance, region,
);
let mut coverage_regions = self.coverage_context().function_coverage_map.borrow_mut();
coverage_regions
.entry(instance)
.or_insert_with(|| FunctionCoverage::new(self.tcx, instance))
.add_unreachable_region(region);
fn add_unreachable_region(&mut self, instance: Instance<'tcx>, region: CodeRegion) -> bool {
if let Some(coverage_context) = self.coverage_context() {
debug!(
"adding unreachable code to coverage_regions: instance={:?}, at {:?}",
instance, region,
);
let mut coverage_regions = coverage_context.function_coverage_map.borrow_mut();
coverage_regions
.entry(instance)
.or_insert_with(|| FunctionCoverage::new(self.tcx, instance))
.add_unreachable_region(region);
true
} else {
false
}
}
}

Expand Down
24 changes: 12 additions & 12 deletions compiler/rustc_codegen_ssa/src/mir/coverageinfo.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,19 +10,19 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
let Coverage { kind, code_region } = coverage;
match kind {
CoverageKind::Counter { function_source_hash, id } => {
bx.add_counter_region(self.instance, function_source_hash, id, code_region);
if bx.add_counter_region(self.instance, function_source_hash, id, code_region) {
let coverageinfo = bx.tcx().coverageinfo(self.instance.def_id());

let coverageinfo = bx.tcx().coverageinfo(self.instance.def_id());

let fn_name = bx.create_pgo_func_name_var(self.instance);
let hash = bx.const_u64(function_source_hash);
let num_counters = bx.const_u32(coverageinfo.num_counters);
let id = bx.const_u32(u32::from(id));
debug!(
"codegen intrinsic instrprof.increment(fn_name={:?}, hash={:?}, num_counters={:?}, index={:?})",
fn_name, hash, num_counters, id,
);
bx.instrprof_increment(fn_name, hash, num_counters, id);
let fn_name = bx.create_pgo_func_name_var(self.instance);
let hash = bx.const_u64(function_source_hash);
let num_counters = bx.const_u32(coverageinfo.num_counters);
let id = bx.const_u32(u32::from(id));
debug!(
"codegen intrinsic instrprof.increment(fn_name={:?}, hash={:?}, num_counters={:?}, index={:?})",
fn_name, hash, num_counters, id,
);
bx.instrprof_increment(fn_name, hash, num_counters, id);
}
}
CoverageKind::Expression { id, lhs, op, rhs } => {
bx.add_counter_expression_region(self.instance, id, lhs, op, rhs, code_region);
Expand Down
12 changes: 9 additions & 3 deletions compiler/rustc_codegen_ssa/src/traits/coverageinfo.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,14 +9,18 @@ pub trait CoverageInfoMethods: BackendTypes {
pub trait CoverageInfoBuilderMethods<'tcx>: BackendTypes {
fn create_pgo_func_name_var(&self, instance: Instance<'tcx>) -> Self::Value;

/// Returns true if the counter was added to the coverage map; false if `-Z instrument-coverage`
/// is not enabled (a coverage map is not being generated).
fn add_counter_region(
&mut self,
instance: Instance<'tcx>,
function_source_hash: u64,
id: CounterValueReference,
region: CodeRegion,
);
) -> bool;

/// Returns true if the expression was added to the coverage map; false if
/// `-Z instrument-coverage` is not enabled (a coverage map is not being generated).
fn add_counter_expression_region(
&mut self,
instance: Instance<'tcx>,
Expand All @@ -25,7 +29,9 @@ pub trait CoverageInfoBuilderMethods<'tcx>: BackendTypes {
op: Op,
rhs: ExpressionOperandId,
region: CodeRegion,
);
) -> bool;

fn add_unreachable_region(&mut self, instance: Instance<'tcx>, region: CodeRegion);
/// Returns true if the region was added to the coverage map; false if `-Z instrument-coverage`
/// is not enabled (a coverage map is not being generated).
fn add_unreachable_region(&mut self, instance: Instance<'tcx>, region: CodeRegion) -> bool;
}
25 changes: 8 additions & 17 deletions compiler/rustc_middle/src/infer/unify_key.rs
Original file line number Diff line number Diff line change
Expand Up @@ -175,19 +175,15 @@ impl<'tcx> UnifyKey for ty::ConstVid<'tcx> {
impl<'tcx> UnifyValue for ConstVarValue<'tcx> {
type Error = (&'tcx ty::Const<'tcx>, &'tcx ty::Const<'tcx>);

fn unify_values(value1: &Self, value2: &Self) -> Result<Self, Self::Error> {
let (val, span) = match (value1.val, value2.val) {
fn unify_values(&value1: &Self, &value2: &Self) -> Result<Self, Self::Error> {
Ok(match (value1.val, value2.val) {
(ConstVariableValue::Known { .. }, ConstVariableValue::Known { .. }) => {
bug!("equating two const variables, both of which have known values")
}

// If one side is known, prefer that one.
(ConstVariableValue::Known { .. }, ConstVariableValue::Unknown { .. }) => {
(value1.val, value1.origin.span)
}
(ConstVariableValue::Unknown { .. }, ConstVariableValue::Known { .. }) => {
(value2.val, value2.origin.span)
}
(ConstVariableValue::Known { .. }, ConstVariableValue::Unknown { .. }) => value1,
(ConstVariableValue::Unknown { .. }, ConstVariableValue::Known { .. }) => value2,

// If both sides are *unknown*, it hardly matters, does it?
(
Expand All @@ -200,16 +196,11 @@ impl<'tcx> UnifyValue for ConstVarValue<'tcx> {
// universe is the minimum of the two universes, because that is
// the one which contains the fewest names in scope.
let universe = cmp::min(universe1, universe2);
(ConstVariableValue::Unknown { universe }, value1.origin.span)
ConstVarValue {
val: ConstVariableValue::Unknown { universe },
origin: value1.origin,
}
}
};

Ok(ConstVarValue {
origin: ConstVariableOrigin {
kind: ConstVariableOriginKind::ConstInference,
span: span,
},
val,
})
}
}
Expand Down
Loading

0 comments on commit 89fdb30

Please sign in to comment.