Skip to content

Commit

Permalink
Lower intrinsics calls: forget, size_of, unreachable, wrapping_*
Browse files Browse the repository at this point in the history
This allows constant propagation to evaluate `size_of` and `wrapping_*`,
and unreachable propagation to propagate a call to `unreachable`.

The lowering is performed as a MIR optimization, rather than during MIR
building to preserve the special status of intrinsics with respect to
unsafety checks and promotion.
  • Loading branch information
tmiasko committed Nov 14, 2020
1 parent 30e49a9 commit 6903273
Show file tree
Hide file tree
Showing 10 changed files with 373 additions and 5 deletions.
108 changes: 108 additions & 0 deletions compiler/rustc_mir/src/transform/lower_intrinsics.rs
@@ -0,0 +1,108 @@
//! Lowers intrinsic calls

use crate::transform::MirPass;
use rustc_middle::mir::*;
use rustc_middle::ty::subst::SubstsRef;
use rustc_middle::ty::{self, Ty, TyCtxt};
use rustc_span::symbol::{sym, Symbol};
use rustc_target::spec::abi::Abi;

pub struct LowerIntrinsics;

impl<'tcx> MirPass<'tcx> for LowerIntrinsics {
fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
for block in body.basic_blocks_mut() {
let terminator = block.terminator.as_mut().unwrap();
if let TerminatorKind::Call {
func: Operand::Constant(box Constant { literal: ty::Const { ty: func_ty, .. }, .. }),
args,
destination,
..
} = &mut terminator.kind
{
let (intrinsic_name, substs) = match resolve_rust_intrinsic(tcx, func_ty) {
None => continue,
Some(it) => it,
};
match intrinsic_name {
sym::unreachable => {
terminator.kind = TerminatorKind::Unreachable;
}
sym::forget => {
if let Some((destination, target)) = *destination {
block.statements.push(Statement {
source_info: terminator.source_info,
kind: StatementKind::Assign(box (
destination,
Rvalue::Use(Operand::Constant(box Constant {
span: terminator.source_info.span,
user_ty: None,
literal: ty::Const::zero_sized(tcx, tcx.types.unit),
})),
)),
});
terminator.kind = TerminatorKind::Goto { target };
}
}
sym::wrapping_add | sym::wrapping_sub | sym::wrapping_mul => {
if let Some((destination, target)) = *destination {
let lhs;
let rhs;
{
let mut args = args.drain(..);
lhs = args.next().unwrap();
rhs = args.next().unwrap();
}
let bin_op = match intrinsic_name {
sym::wrapping_add => BinOp::Add,
sym::wrapping_sub => BinOp::Sub,
sym::wrapping_mul => BinOp::Mul,
_ => bug!("unexpected intrinsic"),
};
block.statements.push(Statement {
source_info: terminator.source_info,
kind: StatementKind::Assign(box (
destination,
Rvalue::BinaryOp(bin_op, lhs, rhs),
)),
});
terminator.kind = TerminatorKind::Goto { target };
}
}
sym::add_with_overflow | sym::sub_with_overflow | sym::mul_with_overflow => {
// The checked binary operations are not suitable target for lowering here,
// since their semantics depend on the value of overflow-checks flag used
// during codegen. Issue #35310.
}
sym::size_of => {
if let Some((destination, target)) = *destination {
let tp_ty = substs.type_at(0);
block.statements.push(Statement {
source_info: terminator.source_info,
kind: StatementKind::Assign(box (
destination,
Rvalue::NullaryOp(NullOp::SizeOf, tp_ty),
)),
});
terminator.kind = TerminatorKind::Goto { target };
}
}
_ => {}
}
}
}
}
}

fn resolve_rust_intrinsic(
tcx: TyCtxt<'tcx>,
func_ty: Ty<'tcx>,
) -> Option<(Symbol, SubstsRef<'tcx>)> {
if let ty::FnDef(def_id, substs) = *func_ty.kind() {
let fn_sig = func_ty.fn_sig(tcx);
if fn_sig.abi() == Abi::RustIntrinsic {
return Some((tcx.item_name(def_id), substs));
}
}
None
}
2 changes: 2 additions & 0 deletions compiler/rustc_mir/src/transform/mod.rs
Expand Up @@ -32,6 +32,7 @@ pub mod function_item_references;
pub mod generator;
pub mod inline;
pub mod instcombine;
pub mod lower_intrinsics;
pub mod match_branches;
pub mod multiple_return_terminators;
pub mod no_landing_pads;
Expand Down Expand Up @@ -390,6 +391,7 @@ fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {

// The main optimizations that we do on MIR.
let optimizations: &[&dyn MirPass<'tcx>] = &[
&lower_intrinsics::LowerIntrinsics,
&remove_unneeded_drops::RemoveUnneededDrops,
&match_branches::MatchBranchSimplification,
// inst combine is after MatchBranchSimplification to clean up Ne(_1, false)
Expand Down
27 changes: 27 additions & 0 deletions src/test/mir-opt/lower_intrinsics.f_u64.PreCodegen.before.mir
@@ -0,0 +1,27 @@
// MIR for `f_u64` before PreCodegen

fn f_u64() -> () {
let mut _0: (); // return place in scope 0 at $DIR/lower_intrinsics.rs:34:16: 34:16
scope 1 (inlined f_dispatch::<u64>) { // at $DIR/lower_intrinsics.rs:35:5: 35:21
debug t => _2; // in scope 1 at $DIR/lower_intrinsics.rs:35:5: 35:21
let _1: (); // in scope 1 at $DIR/lower_intrinsics.rs:35:5: 35:21
let mut _2: u64; // in scope 1 at $DIR/lower_intrinsics.rs:35:5: 35:21
scope 2 (inlined std::mem::size_of::<u64>) { // at $DIR/lower_intrinsics.rs:35:5: 35:21
}
}

bb0: {
_2 = const 0_u64; // scope 0 at $DIR/lower_intrinsics.rs:35:5: 35:21
StorageLive(_1); // scope 1 at $DIR/lower_intrinsics.rs:35:5: 35:21
_1 = f_non_zst::<u64>(move _2) -> bb1; // scope 1 at $DIR/lower_intrinsics.rs:35:5: 35:21
// mir::Constant
// + span: $DIR/lower_intrinsics.rs:35:5: 35:21
// + literal: Const { ty: fn(u64) {f_non_zst::<u64>}, val: Value(Scalar(<ZST>)) }
}

bb1: {
StorageDead(_1); // scope 1 at $DIR/lower_intrinsics.rs:35:5: 35:21
_0 = const (); // scope 0 at $DIR/lower_intrinsics.rs:34:16: 36:2
return; // scope 0 at $DIR/lower_intrinsics.rs:36:2: 36:2
}
}
28 changes: 28 additions & 0 deletions src/test/mir-opt/lower_intrinsics.f_unit.PreCodegen.before.mir
@@ -0,0 +1,28 @@
// MIR for `f_unit` before PreCodegen

fn f_unit() -> () {
let mut _0: (); // return place in scope 0 at $DIR/lower_intrinsics.rs:28:17: 28:17
let mut _1: (); // in scope 0 at $DIR/lower_intrinsics.rs:29:16: 29:18
scope 1 (inlined f_dispatch::<()>) { // at $DIR/lower_intrinsics.rs:29:5: 29:19
debug t => _1; // in scope 1 at $DIR/lower_intrinsics.rs:29:5: 29:19
let _2: (); // in scope 1 at $DIR/lower_intrinsics.rs:29:5: 29:19
scope 2 (inlined std::mem::size_of::<()>) { // at $DIR/lower_intrinsics.rs:29:5: 29:19
}
}

bb0: {
StorageLive(_1); // scope 0 at $DIR/lower_intrinsics.rs:29:16: 29:18
StorageLive(_2); // scope 1 at $DIR/lower_intrinsics.rs:29:5: 29:19
_2 = f_zst::<()>(const ()) -> bb1; // scope 1 at $DIR/lower_intrinsics.rs:29:5: 29:19
// mir::Constant
// + span: $DIR/lower_intrinsics.rs:29:5: 29:19
// + literal: Const { ty: fn(()) {f_zst::<()>}, val: Value(Scalar(<ZST>)) }
}

bb1: {
StorageDead(_2); // scope 1 at $DIR/lower_intrinsics.rs:29:5: 29:19
StorageDead(_1); // scope 0 at $DIR/lower_intrinsics.rs:29:18: 29:19
_0 = const (); // scope 0 at $DIR/lower_intrinsics.rs:28:17: 30:2
return; // scope 0 at $DIR/lower_intrinsics.rs:30:2: 30:2
}
}
31 changes: 31 additions & 0 deletions src/test/mir-opt/lower_intrinsics.forget.LowerIntrinsics.diff
@@ -0,0 +1,31 @@
- // MIR for `forget` before LowerIntrinsics
+ // MIR for `forget` after LowerIntrinsics

fn forget(_1: T) -> () {
debug t => _1; // in scope 0 at $DIR/lower_intrinsics.rs:18:18: 18:19
let mut _0: (); // return place in scope 0 at $DIR/lower_intrinsics.rs:18:24: 18:24
let _2: (); // in scope 0 at $DIR/lower_intrinsics.rs:19:14: 19:41
let mut _3: T; // in scope 0 at $DIR/lower_intrinsics.rs:19:39: 19:40
scope 1 {
}

bb0: {
StorageLive(_2); // scope 0 at $DIR/lower_intrinsics.rs:19:5: 19:43
StorageLive(_3); // scope 1 at $DIR/lower_intrinsics.rs:19:39: 19:40
_3 = move _1; // scope 1 at $DIR/lower_intrinsics.rs:19:39: 19:40
- _2 = std::intrinsics::forget::<T>(move _3) -> bb1; // scope 1 at $DIR/lower_intrinsics.rs:19:14: 19:41
- // mir::Constant
- // + span: $DIR/lower_intrinsics.rs:19:14: 19:38
- // + literal: Const { ty: unsafe extern "rust-intrinsic" fn(T) {std::intrinsics::forget::<T>}, val: Value(Scalar(<ZST>)) }
+ _2 = const (); // scope 1 at $DIR/lower_intrinsics.rs:19:14: 19:41
+ goto -> bb1; // scope 1 at $DIR/lower_intrinsics.rs:19:14: 19:41
}

bb1: {
StorageDead(_3); // scope 1 at $DIR/lower_intrinsics.rs:19:40: 19:41
StorageDead(_2); // scope 0 at $DIR/lower_intrinsics.rs:19:43: 19:44
_0 = const (); // scope 0 at $DIR/lower_intrinsics.rs:18:24: 20:2
return; // scope 0 at $DIR/lower_intrinsics.rs:20:2: 20:2
}
}

52 changes: 52 additions & 0 deletions src/test/mir-opt/lower_intrinsics.rs
@@ -0,0 +1,52 @@
// compile-flags: -Cpanic=abort
#![feature(core_intrinsics)]
#![crate_type = "lib"]

// EMIT_MIR lower_intrinsics.wrapping.LowerIntrinsics.diff
pub fn wrapping<T: Copy>(a: T, b: T) {
let _x = core::intrinsics::wrapping_add(a, b);
let _y = core::intrinsics::wrapping_sub(a, b);
let _z = core::intrinsics::wrapping_mul(a, b);
}

// EMIT_MIR lower_intrinsics.size_of.LowerIntrinsics.diff
pub fn size_of<T>() -> usize {
core::intrinsics::size_of::<T>()
}

// EMIT_MIR lower_intrinsics.forget.LowerIntrinsics.diff
pub fn forget<T>(t: T) {
unsafe { core::intrinsics::forget(t) };
}

// EMIT_MIR lower_intrinsics.unreachable.LowerIntrinsics.diff
pub fn unreachable() -> ! {
unsafe { core::intrinsics::unreachable() };
}

// EMIT_MIR lower_intrinsics.f_unit.PreCodegen.before.mir
pub fn f_unit() {
f_dispatch(());
}


// EMIT_MIR lower_intrinsics.f_u64.PreCodegen.before.mir
pub fn f_u64() {
f_dispatch(0u64);
}

#[inline(always)]
pub fn f_dispatch<T>(t: T) {
if std::mem::size_of::<T>() == 0 {
f_zst(t);
} else {
f_non_zst(t);
}
}

#[inline(never)]
pub fn f_zst<T>(t: T) {
}

#[inline(never)]
pub fn f_non_zst<T>(t: T) {}
20 changes: 20 additions & 0 deletions src/test/mir-opt/lower_intrinsics.size_of.LowerIntrinsics.diff
@@ -0,0 +1,20 @@
- // MIR for `size_of` before LowerIntrinsics
+ // MIR for `size_of` after LowerIntrinsics

fn size_of() -> usize {
let mut _0: usize; // return place in scope 0 at $DIR/lower_intrinsics.rs:13:24: 13:29

bb0: {
- _0 = std::intrinsics::size_of::<T>() -> bb1; // scope 0 at $DIR/lower_intrinsics.rs:14:5: 14:37
- // mir::Constant
- // + span: $DIR/lower_intrinsics.rs:14:5: 14:35
- // + literal: Const { ty: extern "rust-intrinsic" fn() -> usize {std::intrinsics::size_of::<T>}, val: Value(Scalar(<ZST>)) }
+ _0 = SizeOf(T); // scope 0 at $DIR/lower_intrinsics.rs:14:5: 14:37
+ goto -> bb1; // scope 0 at $DIR/lower_intrinsics.rs:14:5: 14:37
}

bb1: {
return; // scope 0 at $DIR/lower_intrinsics.rs:15:2: 15:2
}
}

22 changes: 22 additions & 0 deletions src/test/mir-opt/lower_intrinsics.unreachable.LowerIntrinsics.diff
@@ -0,0 +1,22 @@
- // MIR for `unreachable` before LowerIntrinsics
+ // MIR for `unreachable` after LowerIntrinsics

fn unreachable() -> ! {
let mut _0: !; // return place in scope 0 at $DIR/lower_intrinsics.rs:23:25: 23:26
let mut _1: !; // in scope 0 at $DIR/lower_intrinsics.rs:23:27: 25:2
let _2: (); // in scope 0 at $DIR/lower_intrinsics.rs:24:14: 24:45
let mut _3: !; // in scope 0 at $DIR/lower_intrinsics.rs:24:14: 24:45
scope 1 {
}

bb0: {
StorageLive(_2); // scope 0 at $DIR/lower_intrinsics.rs:24:5: 24:47
StorageLive(_3); // scope 1 at $DIR/lower_intrinsics.rs:24:14: 24:45
- std::intrinsics::unreachable(); // scope 1 at $DIR/lower_intrinsics.rs:24:14: 24:45
- // mir::Constant
- // + span: $DIR/lower_intrinsics.rs:24:14: 24:43
- // + literal: Const { ty: unsafe extern "rust-intrinsic" fn() -> ! {std::intrinsics::unreachable}, val: Value(Scalar(<ZST>)) }
+ unreachable; // scope 1 at $DIR/lower_intrinsics.rs:24:14: 24:45
}
}

0 comments on commit 6903273

Please sign in to comment.