diff --git a/src/librustc_mir/build/block.rs b/src/librustc_mir/build/block.rs index 7353ca9285ddb..f9440866e4925 100644 --- a/src/librustc_mir/build/block.rs +++ b/src/librustc_mir/build/block.rs @@ -1,18 +1,22 @@ use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder}; use crate::build::ForGuard::OutsideGuard; use crate::build::matches::ArmHasGuard; +use crate::build::scope::DropKind; use crate::hair::*; +use rustc::middle::region; use rustc::mir::*; use rustc::hir; use syntax_pos::Span; impl<'a, 'tcx> Builder<'a, 'tcx> { - pub fn ast_block(&mut self, - destination: &Place<'tcx>, - block: BasicBlock, - ast_block: &'tcx hir::Block, - source_info: SourceInfo) - -> BlockAnd<()> { + pub fn ast_block( + &mut self, + destination: &Place<'tcx>, + scope: Option, + block: BasicBlock, + ast_block: &'tcx hir::Block, + source_info: SourceInfo, + ) -> BlockAnd<()> { let Block { region_scope, opt_destruction_scope, @@ -21,37 +25,61 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { expr, targeted_by_break, safety_mode - } = - self.hir.mirror(ast_block); + } = self.hir.mirror(ast_block); self.in_opt_scope(opt_destruction_scope.map(|de|(de, source_info)), move |this| { this.in_scope((region_scope, source_info), LintLevel::Inherited, move |this| { if targeted_by_break { // This is a `break`-able block let exit_block = this.cfg.start_new_block(); + if let Some(scope) = scope { + // Breakable blocks assign to their destination on each + // `break`, as well as when they exit normally. So we + // can't schedule the drop in the last expression like + // normal blocks do. + let local = destination.as_local() + .expect("cannot schedule drop of non-Local place"); + this.schedule_drop(span, scope, local, DropKind::Value); + } let block_exit = this.in_breakable_scope( None, exit_block, destination.clone(), |this| { - this.ast_block_stmts(destination, block, span, stmts, expr, - safety_mode) + this.ast_block_stmts( + destination, + None, + block, + span, + stmts, + expr, + safety_mode, + ) }); this.cfg.terminate(unpack!(block_exit), source_info, TerminatorKind::Goto { target: exit_block }); exit_block.unit() } else { - this.ast_block_stmts(destination, block, span, stmts, expr, - safety_mode) + this.ast_block_stmts( + destination, + scope, + block, + span, + stmts, + expr, + safety_mode, + ) } }) }) } - fn ast_block_stmts(&mut self, - destination: &Place<'tcx>, - mut block: BasicBlock, - span: Span, - stmts: Vec>, - expr: Option>, - safety_mode: BlockSafety) - -> BlockAnd<()> { + fn ast_block_stmts( + &mut self, + destination: &Place<'tcx>, + scope: Option, + mut block: BasicBlock, + span: Span, + stmts: Vec>, + expr: Option>, + safety_mode: BlockSafety, + ) -> BlockAnd<()> { let this = self; // This convoluted structure is to avoid using recursion as we walk down a list @@ -177,7 +205,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { this.block_context.currently_ignores_tail_results(); this.block_context.push(BlockFrame::TailExpr { tail_result_is_ignored }); - unpack!(block = this.into(destination, block, expr)); + unpack!(block = this.into(destination, scope, block, expr)); let popped = this.block_context.pop(); assert!(popped.map_or(false, |bf|bf.is_tail_expr())); diff --git a/src/librustc_mir/build/expr/as_rvalue.rs b/src/librustc_mir/build/expr/as_rvalue.rs index b4d93a4493fd9..bd9edc56872b9 100644 --- a/src/librustc_mir/build/expr/as_rvalue.rs +++ b/src/librustc_mir/build/expr/as_rvalue.rs @@ -128,7 +128,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { expr_span, scope, result, - expr.ty, ); } @@ -137,11 +136,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { this.cfg .push_assign(block, source_info, &Place::from(result), box_); - // initialize the box contents: + // Initialize the box contents. No scope is needed since the + // `Box` is already scheduled to be dropped. unpack!( block = this.into( &Place::from(result).deref(), - block, value + None, + block, + value ) ); block.and(Rvalue::Use(Operand::Move(Place::from(result)))) @@ -569,7 +571,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { upvar_span, temp_lifetime, temp, - upvar_ty, ); } diff --git a/src/librustc_mir/build/expr/as_temp.rs b/src/librustc_mir/build/expr/as_temp.rs index dbcc330eca382..bd20f27c945c1 100644 --- a/src/librustc_mir/build/expr/as_temp.rs +++ b/src/librustc_mir/build/expr/as_temp.rs @@ -103,24 +103,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { expr_span, temp_lifetime, temp, - expr_ty, DropKind::Storage, ); } } } - unpack!(block = this.into(temp_place, block, expr)); - - if let Some(temp_lifetime) = temp_lifetime { - this.schedule_drop( - expr_span, - temp_lifetime, - temp, - expr_ty, - DropKind::Value, - ); - } + unpack!(block = this.into(temp_place, temp_lifetime, block, expr)); block.and(temp) } diff --git a/src/librustc_mir/build/expr/into.rs b/src/librustc_mir/build/expr/into.rs index 8a6bc5a2a764e..c262215ab9be4 100644 --- a/src/librustc_mir/build/expr/into.rs +++ b/src/librustc_mir/build/expr/into.rs @@ -2,7 +2,9 @@ use crate::build::expr::category::{Category, RvalueFunc}; use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder}; +use crate::build::scope::DropKind; use crate::hair::*; +use rustc::middle::region; use rustc::mir::*; use rustc::ty; @@ -11,15 +13,18 @@ use rustc_target::spec::abi::Abi; impl<'a, 'tcx> Builder<'a, 'tcx> { /// Compile `expr`, storing the result into `destination`, which /// is assumed to be uninitialized. + /// If a `drop_scope` is provided, `destination` is scheduled to be dropped + /// in `scope` once it has been initialized. pub fn into_expr( &mut self, destination: &Place<'tcx>, + scope: Option, mut block: BasicBlock, expr: Expr<'tcx>, ) -> BlockAnd<()> { debug!( - "into_expr(destination={:?}, block={:?}, expr={:?})", - destination, block, expr + "into_expr(destination={:?}, scope={:?}, block={:?}, expr={:?})", + destination, scope, block, expr ); // since we frequently have to reference `self` from within a @@ -35,6 +40,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { _ => false, }; + let schedule_drop = move |this: &mut Self| { + if let Some(drop_scope) = scope { + let local = destination.as_local() + .expect("cannot schedule drop of non-Local place"); + this.schedule_drop(expr_span, drop_scope, local, DropKind::Value); + } + }; + if !expr_is_block_or_scope { this.block_context.push(BlockFrame::SubExpr); } @@ -47,14 +60,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } => { let region_scope = (region_scope, source_info); this.in_scope(region_scope, lint_level, |this| { - this.into(destination, block, value) + this.into(destination, scope, block, value) }) } ExprKind::Block { body: ast_block } => { - this.ast_block(destination, block, ast_block, source_info) + this.ast_block(destination, scope, block, ast_block, source_info) } ExprKind::Match { scrutinee, arms } => { - this.match_expr(destination, expr_span, block, scrutinee, arms) + this.match_expr(destination, scope, expr_span, block, scrutinee, arms) } ExprKind::NeverToAny { source } => { let source = this.hir.mirror(source); @@ -67,6 +80,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // This is an optimization. If the expression was a call then we already have an // unreachable block. Don't bother to terminate it and create a new one. + schedule_drop(this); if is_call { block.unit() } else { @@ -164,6 +178,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { TerminatorKind::Goto { target: loop_block }, ); + // Loops assign to their destination on each `break`. Since we + // can't easily unschedule drops, we schedule the drop now. + schedule_drop(this); this.in_breakable_scope( Some(loop_block), exit_block, @@ -185,7 +202,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // introduce a unit temporary as the destination for the loop body. let tmp = this.get_unit_temp(); // Execute the body, branching back to the test. - let body_block_end = unpack!(this.into(&tmp, body_block, body)); + // No scope is provided, since we've scheduled the drop above. + let body_block_end = unpack!(this.into(&tmp, None, body_block, body)); this.cfg.terminate( body_block_end, source_info, @@ -234,8 +252,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { is_block_tail: None, }); let ptr_temp = Place::from(ptr_temp); - let block = unpack!(this.into(&ptr_temp, block, ptr)); - this.into(&ptr_temp.deref(), block, val) + // No need for a scope, ptr_temp doesn't need drop + let block = unpack!(this.into(&ptr_temp, None, block, ptr)); + // Maybe we should provide a scope here so that + // `move_val_init` wouldn't leak on panic even with an + // arbitrary `val` expression, but `schedule_drop`, + // borrowck and drop elaboration all prevent us from + // dropping `ptr_temp.deref()`. + this.into(&ptr_temp.deref(), None, block, val) } else { let args: Vec<_> = args .into_iter() @@ -265,11 +289,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { from_hir_call, }, ); + schedule_drop(this); success.unit() } } ExprKind::Use { source } => { - this.into(destination, block, source) + this.into(destination, scope, block, source) } // These cases don't actually need a destination @@ -296,6 +321,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let rvalue = Rvalue::Use(this.consume_by_copy_or_move(place)); this.cfg .push_assign(block, source_info, destination, rvalue); + schedule_drop(this); block.unit() } ExprKind::Index { .. } | ExprKind::Deref { .. } | ExprKind::Field { .. } => { @@ -315,6 +341,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let rvalue = Rvalue::Use(this.consume_by_copy_or_move(place)); this.cfg .push_assign(block, source_info, destination, rvalue); + schedule_drop(this); block.unit() } @@ -346,6 +373,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let rvalue = unpack!(block = this.as_local_rvalue(block, expr)); this.cfg.push_assign(block, source_info, destination, rvalue); + schedule_drop(this); block.unit() } }; diff --git a/src/librustc_mir/build/into.rs b/src/librustc_mir/build/into.rs index 077840c9ccf17..e57f10f0b14e9 100644 --- a/src/librustc_mir/build/into.rs +++ b/src/librustc_mir/build/into.rs @@ -6,6 +6,7 @@ use crate::build::{BlockAnd, Builder}; use crate::hair::*; +use rustc::middle::region; use rustc::mir::*; pub(in crate::build) trait EvalInto<'tcx> { @@ -13,19 +14,23 @@ pub(in crate::build) trait EvalInto<'tcx> { self, builder: &mut Builder<'_, 'tcx>, destination: &Place<'tcx>, + scope: Option, block: BasicBlock, ) -> BlockAnd<()>; } impl<'a, 'tcx> Builder<'a, 'tcx> { - pub fn into(&mut self, - destination: &Place<'tcx>, - block: BasicBlock, - expr: E) - -> BlockAnd<()> - where E: EvalInto<'tcx> + pub fn into( + &mut self, + destination: &Place<'tcx>, + scope: Option, + block: BasicBlock, + expr: E, + ) -> BlockAnd<()> + where + E: EvalInto<'tcx>, { - expr.eval_into(self, destination, block) + expr.eval_into(self, destination, scope, block) } } @@ -34,10 +39,11 @@ impl<'tcx> EvalInto<'tcx> for ExprRef<'tcx> { self, builder: &mut Builder<'_, 'tcx>, destination: &Place<'tcx>, + scope: Option, block: BasicBlock, ) -> BlockAnd<()> { let expr = builder.hir.mirror(self); - builder.into_expr(destination, block, expr) + builder.into_expr(destination, scope, block, expr) } } @@ -46,8 +52,9 @@ impl<'tcx> EvalInto<'tcx> for Expr<'tcx> { self, builder: &mut Builder<'_, 'tcx>, destination: &Place<'tcx>, + scope: Option, block: BasicBlock, ) -> BlockAnd<()> { - builder.into_expr(destination, block, self) + builder.into_expr(destination, scope, block, self) } } diff --git a/src/librustc_mir/build/matches/mod.rs b/src/librustc_mir/build/matches/mod.rs index 861de07c612e9..e8cf827aa900b 100644 --- a/src/librustc_mir/build/matches/mod.rs +++ b/src/librustc_mir/build/matches/mod.rs @@ -102,6 +102,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { pub fn match_expr( &mut self, destination: &Place<'tcx>, + destination_scope: Option, span: Span, mut block: BasicBlock, scrutinee: ExprRef<'tcx>, @@ -228,6 +229,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { }; // Step 5. Create everything else: the guards and the arms. + if let Some(scope) = destination_scope { + // `match` assigns to its destination in each arm. Since we can't + // easily unschedule drops, we schedule the drop now. + let local = destination.as_local() + .expect("cannot schedule drop of non-Local place"); + self.schedule_drop(span, scope, local, DropKind::Value); + } + let match_scope = self.scopes.topmost(); let arm_end_blocks: Vec<_> = arm_candidates.into_iter().map(|(arm, mut candidates)| { @@ -275,7 +284,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { this.source_scope = source_scope; } - this.into(destination, arm_block, body) + // No scope is provided, since we've scheduled the drop above. + this.into(destination, None, arm_block, body) }) }).collect(); @@ -311,8 +321,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } => { let place = self.storage_live_binding(block, var, irrefutable_pat.span, OutsideGuard); - unpack!(block = self.into(&place, block, initializer)); + let region_scope = self.hir.region_scope_tree.var_scope(var.local_id); + unpack!(block = self.into(&place, Some(region_scope), block, initializer)); // Inject a fake read, see comments on `FakeReadCause::ForLet`. let source_info = self.source_info(irrefutable_pat.span); @@ -324,7 +335,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { }, ); - self.schedule_drop_for_binding(var, irrefutable_pat.span, OutsideGuard); block.unit() } @@ -352,9 +362,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { user_ty_span, }, } => { + let region_scope = self.hir.region_scope_tree.var_scope(var.local_id); let place = self.storage_live_binding(block, var, irrefutable_pat.span, OutsideGuard); - unpack!(block = self.into(&place, block, initializer)); + unpack!(block = self.into(&place, Some(region_scope), block, initializer)); // Inject a fake read, see comments on `FakeReadCause::ForLet`. let pattern_source_info = self.source_info(irrefutable_pat.span); @@ -400,7 +411,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { }, ); - self.schedule_drop_for_binding(var, irrefutable_pat.span, OutsideGuard); block.unit() } @@ -535,21 +545,18 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { kind: StatementKind::StorageLive(local_id), }, ); - let var_ty = self.local_decls[local_id].ty; let region_scope = self.hir.region_scope_tree.var_scope(var.local_id); - self.schedule_drop(span, region_scope, local_id, var_ty, DropKind::Storage); + self.schedule_drop(span, region_scope, local_id, DropKind::Storage); Place::from(local_id) } pub fn schedule_drop_for_binding(&mut self, var: HirId, span: Span, for_guard: ForGuard) { let local_id = self.var_local_id(var, for_guard); - let var_ty = self.local_decls[local_id].ty; let region_scope = self.hir.region_scope_tree.var_scope(var.local_id); self.schedule_drop( span, region_scope, local_id, - var_ty, DropKind::Value, ); } diff --git a/src/librustc_mir/build/mod.rs b/src/librustc_mir/build/mod.rs index 6b6eac5add2fa..9a98cf1cb88c1 100644 --- a/src/librustc_mir/build/mod.rs +++ b/src/librustc_mir/build/mod.rs @@ -616,6 +616,7 @@ where let source_info = builder.source_info(span); let call_site_s = (call_site_scope, source_info); unpack!(block = builder.in_scope(call_site_s, LintLevel::Inherited, |builder| { + builder.schedule_drop(span, call_site_scope, RETURN_PLACE, DropKind::Value); if should_abort_on_panic(tcx, fn_def_id, abi) { builder.schedule_abort(); } @@ -646,6 +647,7 @@ where builder.cfg.terminate(unreachable_block, source_info, TerminatorKind::Unreachable); } + builder.unschedule_return_place_drop(); return_block.unit() })); assert_eq!(block, builder.return_block()); @@ -687,7 +689,9 @@ fn construct_const<'a, 'tcx>( let mut block = START_BLOCK; let ast_expr = &tcx.hir().body(body_id).value; let expr = builder.hir.mirror(ast_expr); - unpack!(block = builder.into_expr(&Place::return_place(), block, expr)); + // We don't provide a scope because we can't unwind in constants, so won't + // need to drop the return place. + unpack!(block = builder.into_expr(&Place::return_place(), None, block, expr)); let source_info = builder.source_info(span); builder.cfg.terminate(block, source_info, TerminatorKind::Return); @@ -829,12 +833,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // Function arguments always get the first Local indices after the return place let local = Local::new(index + 1); let place = Place::from(local); - let &ArgInfo(ty, opt_ty_info, arg_opt, ref self_binding) = arg_info; + let &ArgInfo(_, opt_ty_info, arg_opt, ref self_binding) = arg_info; // Make sure we drop (parts of) the argument even when not matched on. self.schedule_drop( arg_opt.as_ref().map_or(ast_body.span, |arg| arg.pat.span), - argument_scope, local, ty, DropKind::Value, + argument_scope, local, DropKind::Value, ); if let Some(arg) = arg_opt { @@ -888,7 +892,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } let body = self.hir.mirror(ast_body); - self.into(&Place::return_place(), block, body) + // No scope is provided, since we've scheduled the drop of the return + // place. + self.into(&Place::return_place(), None, block, body) } fn set_correct_source_scope_for_arg( diff --git a/src/librustc_mir/build/scope.rs b/src/librustc_mir/build/scope.rs index a26ec72584bda..6206bfd9f3eac 100644 --- a/src/librustc_mir/build/scope.rs +++ b/src/librustc_mir/build/scope.rs @@ -85,7 +85,6 @@ should go to. use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder, CFG}; use crate::hair::{Expr, ExprRef, LintLevel}; use rustc::middle::region; -use rustc::ty::Ty; use rustc::hir; use rustc::mir::*; use syntax_pos::{DUMMY_SP, Span}; @@ -173,11 +172,11 @@ struct BreakableScope<'tcx> { region_scope: region::Scope, /// Where the body of the loop begins. `None` if block continue_block: Option, - /// Block to branch into when the loop or block terminates (either by being `break`-en out - /// from, or by having its condition to become false) + /// Block to branch into when the loop or block terminates (either by being + /// `break`-en out from, or by having its condition to become false) break_block: BasicBlock, - /// The destination of the loop/block expression itself (i.e., where to put the result of a - /// `break` expression) + /// The destination of the loop/block expression itself (i.e., where to put + /// the result of a `break` expression) break_destination: Place<'tcx>, } @@ -514,7 +513,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { if let Some(value) = value { debug!("stmt_expr Break val block_context.push(SubExpr)"); self.block_context.push(BlockFrame::SubExpr); - unpack!(block = self.into(&destination, block, value)); + unpack!(block = self.into(&destination, None, block, value)); self.block_context.pop(); } else { self.cfg.push_assign_unit(block, source_info, &destination) @@ -728,10 +727,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { span: Span, region_scope: region::Scope, local: Local, - place_ty: Ty<'tcx>, ) { - self.schedule_drop(span, region_scope, local, place_ty, DropKind::Storage); - self.schedule_drop(span, region_scope, local, place_ty, DropKind::Value); + self.schedule_drop(span, region_scope, local, DropKind::Storage); + self.schedule_drop(span, region_scope, local, DropKind::Value); } /// Indicates that `place` should be dropped on exit from @@ -744,12 +742,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { span: Span, region_scope: region::Scope, local: Local, - place_ty: Ty<'tcx>, drop_kind: DropKind, ) { - let needs_drop = self.hir.needs_drop(place_ty); - match drop_kind { - DropKind::Value => if !needs_drop { return }, + let needs_drop = match drop_kind { + DropKind::Value => { + if !self.hir.needs_drop(self.local_decls[local].ty) { return } + true + }, DropKind::Storage => { if local.index() <= self.arg_count { span_bug!( @@ -758,8 +757,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { self.arg_count, ) } + false } - } + }; for scope in self.scopes.iter_mut() { let this_scope = scope.region_scope == region_scope; @@ -1070,6 +1070,18 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { success_block } + /// Unschedules the drop of the return place. + /// + /// If the return type of a function requires drop, then we schedule it + /// in the outermost scope so that it's dropped if there's a panic while + /// we drop any local variables. But we don't want to drop it if we + /// return normally. + crate fn unschedule_return_place_drop(&mut self) { + assert_eq!(self.scopes.len(), 1); + assert!(self.scopes.scopes[0].drops.len() <= 1); + self.scopes.scopes[0].drops.clear(); + } + // `match` arm scopes // ================== /// Unschedules any drops in the top scope. diff --git a/src/librustc_mir/dataflow/impls/storage_liveness.rs b/src/librustc_mir/dataflow/impls/storage_liveness.rs index 0f66b13fdc51a..c1695ba66d0d5 100644 --- a/src/librustc_mir/dataflow/impls/storage_liveness.rs +++ b/src/librustc_mir/dataflow/impls/storage_liveness.rs @@ -109,15 +109,13 @@ impl<'mir, 'tcx> BitDenotation<'tcx> for RequiresStorage<'mir, 'tcx> { assert_eq!(1, self.body.arg_count); } - fn statement_effect(&self, - sets: &mut GenKillSet, - loc: Location) { - self.check_for_move(sets, loc); + fn before_statement_effect(&self, sets: &mut GenKillSet, loc: Location) { + // If we borrow or assign to a place then it needs storage for that + // statement. self.check_for_borrow(sets, loc); let stmt = &self.body[loc.block].statements[loc.statement_index]; match stmt.kind { - StatementKind::StorageLive(l) => sets.gen(l), StatementKind::StorageDead(l) => sets.kill(l), StatementKind::Assign(box(ref place, _)) | StatementKind::SetDiscriminant { box ref place, .. } => { @@ -136,11 +134,35 @@ impl<'mir, 'tcx> BitDenotation<'tcx> for RequiresStorage<'mir, 'tcx> { } } - fn terminator_effect(&self, - sets: &mut GenKillSet, - loc: Location) { + fn statement_effect(&self, sets: &mut GenKillSet, loc: Location) { + // If we move from a place then only stops needing storage *after* + // that statement. self.check_for_move(sets, loc); + } + + fn before_terminator_effect(&self, sets: &mut GenKillSet, loc: Location) { self.check_for_borrow(sets, loc); + + if let TerminatorKind::Call { + destination: Some((Place { base: PlaceBase::Local(local), .. }, _)), + .. + } = self.body[loc.block].terminator().kind { + sets.gen(local); + } + } + + fn terminator_effect(&self, sets: &mut GenKillSet, loc: Location) { + // For call terminators the destination requires storage for the call + // and after the call returns successfully, but not after a panic. + // Since `propagate_call_unwind` doesn't exist, we have to kill the + // destination here, and then gen it again in `propagate_call_return`. + if let TerminatorKind::Call { + destination: Some((Place { base: PlaceBase::Local(local), projection: box [] }, _)), + .. + } = self.body[loc.block].terminator().kind { + sets.kill(local); + } + self.check_for_move(sets, loc); } fn propagate_call_return( diff --git a/src/librustc_mir/transform/generator.rs b/src/librustc_mir/transform/generator.rs index a648fadde43be..865fa012c2995 100644 --- a/src/librustc_mir/transform/generator.rs +++ b/src/librustc_mir/transform/generator.rs @@ -508,10 +508,7 @@ fn locals_live_across_suspend_points( storage_liveness_map.insert(block, storage_liveness.clone()); requires_storage_cursor.seek(loc); - let mut storage_required = requires_storage_cursor.get().clone(); - - // Mark locals without storage statements as always requiring storage - storage_required.union(&ignored.0); + let storage_required = requires_storage_cursor.get().clone(); // Locals live are live at this point only if they are used across // suspension points (the `liveness` variable) diff --git a/src/test/mir-opt/box_expr.rs b/src/test/mir-opt/box_expr.rs index 8dc6b73edf6d4..76098731947fe 100644 --- a/src/test/mir-opt/box_expr.rs +++ b/src/test/mir-opt/box_expr.rs @@ -41,33 +41,36 @@ impl Drop for S { // // bb2: { // _1 = move _2; -// drop(_2) -> bb4; +// drop(_2) -> [return: bb5, unwind: bb4]; // } // // bb3 (cleanup): { // drop(_2) -> bb1; // } // -// bb4: { +// bb4 (cleanup): { +// drop(_1) -> bb1; +// } +// +// bb5: { // StorageDead(_2); // StorageLive(_3); // StorageLive(_4); // _4 = move _1; -// _3 = const std::mem::drop::>(move _4) -> [return: bb5, unwind: bb7]; +// _3 = const std::mem::drop::>(move _4) -> [return: bb6, unwind: bb7]; // } // -// bb5: { +// bb6: { // StorageDead(_4); // StorageDead(_3); // _0 = (); // drop(_1) -> bb8; // } -// bb6 (cleanup): { -// drop(_1) -> bb1; -// } +// // bb7 (cleanup): { -// drop(_4) -> bb6; +// drop(_4) -> bb4; // } +// // bb8: { // StorageDead(_1); // return; diff --git a/src/test/mir-opt/issue-62289.rs b/src/test/mir-opt/issue-62289.rs index a3b517e9bca87..e8dd56cbbae22 100644 --- a/src/test/mir-opt/issue-62289.rs +++ b/src/test/mir-opt/issue-62289.rs @@ -24,7 +24,7 @@ fn main() { // StorageLive(_3); // StorageLive(_4); // _4 = std::option::Option::::None; -// _3 = const as std::ops::Try>::into_result(move _4) -> [return: bb2, unwind: bb3]; +// _3 = const as std::ops::Try>::into_result(move _4) -> [return: bb2, unwind: bb4]; // } // bb1 (cleanup): { // resume; @@ -32,60 +32,63 @@ fn main() { // bb2: { // StorageDead(_4); // _5 = discriminant(_3); -// switchInt(move _5) -> [0isize: bb10, 1isize: bb5, otherwise: bb4]; +// switchInt(move _5) -> [0isize: bb11, 1isize: bb6, otherwise: bb5]; // } // bb3 (cleanup): { -// drop(_2) -> bb1; +// drop(_0) -> bb1; // } -// bb4: { -// unreachable; +// bb4 (cleanup): { +// drop(_2) -> bb3; // } // bb5: { +// unreachable; +// } +// bb6: { // StorageLive(_6); // _6 = ((_3 as Err).0: std::option::NoneError); // StorageLive(_8); // StorageLive(_9); // _9 = _6; -// _8 = const >::from(move _9) -> [return: bb7, unwind: bb3]; +// _8 = const >::from(move _9) -> [return: bb8, unwind: bb4]; // } -// bb6: { +// bb7: { // return; // } -// bb7: { +// bb8: { // StorageDead(_9); -// _0 = const > as std::ops::Try>::from_error(move _8) -> [return: bb8, unwind: bb3]; +// _0 = const > as std::ops::Try>::from_error(move _8) -> [return: bb9, unwind: bb4]; // } -// bb8: { +// bb9: { // StorageDead(_8); // StorageDead(_6); -// drop(_2) -> bb9; +// drop(_2) -> [return: bb10, unwind: bb3]; // } -// bb9: { +// bb10: { // StorageDead(_2); // StorageDead(_1); // StorageDead(_3); -// goto -> bb6; +// goto -> bb7; // } -// bb10: { +// bb11: { // StorageLive(_10); // _10 = ((_3 as Ok).0: u32); // (*_2) = _10; // StorageDead(_10); // _1 = move _2; -// drop(_2) -> [return: bb12, unwind: bb11]; +// drop(_2) -> [return: bb13, unwind: bb12]; // } -// bb11 (cleanup): { -// drop(_1) -> bb1; +// bb12 (cleanup): { +// drop(_1) -> bb3; // } -// bb12: { +// bb13: { // StorageDead(_2); // _0 = std::option::Option::>::Some(move _1,); -// drop(_1) -> bb13; +// drop(_1) -> [return: bb14, unwind: bb3]; // } -// bb13: { +// bb14: { // StorageDead(_1); // StorageDead(_3); -// goto -> bb6; +// goto -> bb7; // } // } // END rustc.test.ElaborateDrops.before.mir diff --git a/src/test/ui/async-await/async-fn-size-moved-locals.rs b/src/test/ui/async-await/async-fn-size-moved-locals.rs index 3ffcbb58595eb..c266644fd702c 100644 --- a/src/test/ui/async-await/async-fn-size-moved-locals.rs +++ b/src/test/ui/async-await/async-fn-size-moved-locals.rs @@ -22,7 +22,8 @@ struct BigFut([u8; BIG_FUT_SIZE]); impl BigFut { fn new() -> Self { BigFut([0; BIG_FUT_SIZE]) - } } + } +} impl Drop for BigFut { fn drop(&mut self) {} diff --git a/src/test/ui/async-await/async-fn-size-uninit-locals.rs b/src/test/ui/async-await/async-fn-size-uninit-locals.rs new file mode 100644 index 0000000000000..a489fb11630cd --- /dev/null +++ b/src/test/ui/async-await/async-fn-size-uninit-locals.rs @@ -0,0 +1,103 @@ +// Test that we don't store uninitialized locals in futures from `async fn`. +// +// The exact sizes can change by a few bytes (we'd like to know when they do). +// What we don't want to see is the wrong multiple of 1024 (the size of `Big`) +// being reflected in the size. + +// ignore-wasm32-bare (sizes don't match) +// run-pass + +// edition:2018 + +#![allow(unused_variables, unused_assignments)] + +use std::future::Future; +use std::pin::Pin; +use std::task::{Context, Poll}; + +const BIG_FUT_SIZE: usize = 1024; +struct Big([u8; BIG_FUT_SIZE]); + +impl Big { + fn new() -> Self { + Big([0; BIG_FUT_SIZE]) + } +} + +impl Drop for Big { + fn drop(&mut self) {} +} + +#[allow(dead_code)] +struct Joiner { + a: Option, + b: Option, + c: Option, +} + +impl Future for Joiner { + type Output = (); + + fn poll(self: Pin<&mut Self>, _ctx: &mut Context<'_>) -> Poll { + Poll::Ready(()) + } +} + +fn noop() {} +async fn fut() {} + +async fn single() { + let x; + fut().await; + x = Big::new(); +} + +async fn single_with_noop() { + let x; + fut().await; + noop(); + x = Big::new(); + noop(); +} + +async fn joined() { + let joiner; + let a = Big::new(); + let b = Big::new(); + let c = Big::new(); + + fut().await; + noop(); + joiner = Joiner { a: Some(a), b: Some(b), c: Some(c) }; + noop(); +} + +async fn joined_with_noop() { + let joiner; + let a = Big::new(); + let b = Big::new(); + let c = Big::new(); + + fut().await; + noop(); + joiner = Joiner { a: Some(a), b: Some(b), c: Some(c) }; + noop(); +} + +async fn join_retval() -> Joiner { + let a = Big::new(); + let b = Big::new(); + let c = Big::new(); + + fut().await; + noop(); + Joiner { a: Some(a), b: Some(b), c: Some(c) } +} + +fn main() { + assert_eq!(8, std::mem::size_of_val(&single())); + assert_eq!(12, std::mem::size_of_val(&single_with_noop())); + assert_eq!(3084, std::mem::size_of_val(&joined())); + assert_eq!(3084, std::mem::size_of_val(&joined_with_noop())); + assert_eq!(3084, std::mem::size_of_val(&join_retval())); +} diff --git a/src/test/ui/async-await/async-fn-size.rs b/src/test/ui/async-await/async-fn-size.rs index b5c94ecb71690..b313992db4ecb 100644 --- a/src/test/ui/async-await/async-fn-size.rs +++ b/src/test/ui/async-await/async-fn-size.rs @@ -89,10 +89,10 @@ fn main() { assert_eq!(8, std::mem::size_of_val(&await1_level1())); assert_eq!(12, std::mem::size_of_val(&await2_level1())); assert_eq!(12, std::mem::size_of_val(&await3_level1())); - assert_eq!(20, std::mem::size_of_val(&await3_level2())); - assert_eq!(28, std::mem::size_of_val(&await3_level3())); - assert_eq!(36, std::mem::size_of_val(&await3_level4())); - assert_eq!(44, std::mem::size_of_val(&await3_level5())); + assert_eq!(24, std::mem::size_of_val(&await3_level2())); + assert_eq!(36, std::mem::size_of_val(&await3_level3())); + assert_eq!(48, std::mem::size_of_val(&await3_level4())); + assert_eq!(60, std::mem::size_of_val(&await3_level5())); assert_eq!(1, wait(base())); assert_eq!(1, wait(await1_level1())); diff --git a/src/test/ui/drop/dynamic-drop-async.rs b/src/test/ui/drop/dynamic-drop-async.rs index 91063edf0f6c4..398bcb7ec0e82 100644 --- a/src/test/ui/drop/dynamic-drop-async.rs +++ b/src/test/ui/drop/dynamic-drop-async.rs @@ -7,7 +7,7 @@ // edition:2018 // ignore-wasm32-bare compiled with panic=abort by default -#![feature(slice_patterns)] +#![feature(slice_patterns, arbitrary_self_types)] #![allow(unused)] use std::{ @@ -45,6 +45,7 @@ impl Future for Defer { /// The `failing_op`-th operation will panic. struct Allocator { data: RefCell>, + name: &'static str, failing_op: usize, cur_ops: Cell, } @@ -56,23 +57,28 @@ impl Drop for Allocator { fn drop(&mut self) { let data = self.data.borrow(); if data.iter().any(|d| *d) { - panic!("missing free: {:?}", data); + panic!("missing free in {:?}: {:?}", self.name, data); } } } impl Allocator { - fn new(failing_op: usize) -> Self { - Allocator { failing_op, cur_ops: Cell::new(0), data: RefCell::new(vec![]) } + fn new(failing_op: usize, name: &'static str) -> Self { + Allocator { + failing_op, + name, + cur_ops: Cell::new(0), + data: RefCell::new(vec![]), + } } - fn alloc(&self) -> impl Future> + '_ { + fn alloc(self: &Rc) -> impl Future + 'static { self.fallible_operation(); let mut data = self.data.borrow_mut(); let addr = data.len(); data.push(true); - Defer { ready: false, value: Some(Ptr(addr, self)) } + Defer { ready: false, value: Some(Ptr(addr, self.clone())) } } fn fallible_operation(&self) { self.cur_ops.set(self.cur_ops.get() + 1); @@ -85,11 +91,11 @@ impl Allocator { // Type that tracks whether it was dropped and can panic when it's created or // destroyed. -struct Ptr<'a>(usize, &'a Allocator); -impl<'a> Drop for Ptr<'a> { +struct Ptr(usize, Rc); +impl Drop for Ptr { fn drop(&mut self) { match self.1.data.borrow_mut()[self.0] { - false => panic!("double free at index {:?}", self.0), + false => panic!("double free in {:?} at index {:?}", self.1.name, self.0), ref mut d => *d = false, } @@ -113,7 +119,7 @@ async fn dynamic_drop(a: Rc, c: bool) { }; } -struct TwoPtrs<'a>(Ptr<'a>, Ptr<'a>); +struct TwoPtrs(Ptr, Ptr); async fn struct_dynamic_drop(a: Rc, c0: bool, c1: bool, c: bool) { for i in 0..2 { let x; @@ -228,21 +234,62 @@ async fn subslice_pattern_reassign(a: Rc) { a.alloc().await; } -fn run_test(cx: &mut Context<'_>, ref f: F) +async fn panic_after_return(a: Rc, c: bool) -> (Ptr,) { + a.alloc().await; + let p = a.alloc().await; + if c { + a.alloc().await; + let q = a.alloc().await; + // We use a return type that isn't used anywhere else to make sure that + // the return place doesn't incorrectly end up in the generator state. + return (a.alloc().await,); + } + (a.alloc().await,) +} + + +async fn panic_after_init_by_loop(a: Rc) { + a.alloc().await; + let p = a.alloc().await; + let q = loop { + a.alloc().await; + let r = a.alloc().await; + break a.alloc().await; + }; +} + +async fn panic_after_init_by_match_with_bindings_and_guard(a: Rc, b: bool) { + a.alloc().await; + let p = a.alloc().await; + let q = match a.alloc().await { + ref _x if b => { + a.alloc().await; + let r = a.alloc().await; + a.alloc().await + } + _x => { + a.alloc().await; + let r = a.alloc().await; + a.alloc().await + }, + }; +} + +fn run_test(cx: &mut Context<'_>, ref f: F, name: &'static str) where F: Fn(Rc) -> G, - G: Future, + G: Future, { for polls in 0.. { // Run without any panics to find which operations happen after the // penultimate `poll`. - let first_alloc = Rc::new(Allocator::new(usize::MAX)); + let first_alloc = Rc::new(Allocator::new(usize::MAX, name)); let mut fut = Box::pin(f(first_alloc.clone())); let mut ops_before_last_poll = 0; let mut completed = false; for _ in 0..polls { ops_before_last_poll = first_alloc.cur_ops.get(); - if let Poll::Ready(()) = fut.as_mut().poll(cx) { + if let Poll::Ready(_) = fut.as_mut().poll(cx) { completed = true; } } @@ -251,7 +298,7 @@ where // Start at `ops_before_last_poll` so that we will always be able to // `poll` the expected number of times. for failing_op in ops_before_last_poll..first_alloc.cur_ops.get() { - let alloc = Rc::new(Allocator::new(failing_op + 1)); + let alloc = Rc::new(Allocator::new(failing_op + 1, name)); let f = &f; let cx = &mut *cx; let result = panic::catch_unwind(panic::AssertUnwindSafe(move || { @@ -281,46 +328,56 @@ fn clone_waker(data: *const ()) -> RawWaker { RawWaker::new(data, &RawWakerVTable::new(clone_waker, drop, drop, drop)) } +macro_rules! run_test { + ($ctxt:expr, $e:expr) => { run_test($ctxt, $e, stringify!($e)); }; +} + fn main() { let waker = unsafe { Waker::from_raw(clone_waker(ptr::null())) }; let context = &mut Context::from_waker(&waker); - run_test(context, |a| dynamic_init(a, false)); - run_test(context, |a| dynamic_init(a, true)); - run_test(context, |a| dynamic_drop(a, false)); - run_test(context, |a| dynamic_drop(a, true)); - - run_test(context, |a| assignment(a, false, false)); - run_test(context, |a| assignment(a, false, true)); - run_test(context, |a| assignment(a, true, false)); - run_test(context, |a| assignment(a, true, true)); - - run_test(context, |a| array_simple(a)); - run_test(context, |a| vec_simple(a)); - run_test(context, |a| vec_unreachable(a)); - - run_test(context, |a| struct_dynamic_drop(a, false, false, false)); - run_test(context, |a| struct_dynamic_drop(a, false, false, true)); - run_test(context, |a| struct_dynamic_drop(a, false, true, false)); - run_test(context, |a| struct_dynamic_drop(a, false, true, true)); - run_test(context, |a| struct_dynamic_drop(a, true, false, false)); - run_test(context, |a| struct_dynamic_drop(a, true, false, true)); - run_test(context, |a| struct_dynamic_drop(a, true, true, false)); - run_test(context, |a| struct_dynamic_drop(a, true, true, true)); - - run_test(context, |a| field_assignment(a, false)); - run_test(context, |a| field_assignment(a, true)); - - run_test(context, |a| mixed_drop_and_nondrop(a)); - - run_test(context, |a| slice_pattern_one_of(a, 0)); - run_test(context, |a| slice_pattern_one_of(a, 1)); - run_test(context, |a| slice_pattern_one_of(a, 2)); - run_test(context, |a| slice_pattern_one_of(a, 3)); - - run_test(context, |a| subslice_pattern_from_end_with_drop(a, true, true)); - run_test(context, |a| subslice_pattern_from_end_with_drop(a, true, false)); - run_test(context, |a| subslice_pattern_from_end_with_drop(a, false, true)); - run_test(context, |a| subslice_pattern_from_end_with_drop(a, false, false)); - run_test(context, |a| subslice_pattern_reassign(a)); + run_test!(context, |a| dynamic_init(a, false)); + run_test!(context, |a| dynamic_init(a, true)); + run_test!(context, |a| dynamic_drop(a, false)); + run_test!(context, |a| dynamic_drop(a, true)); + + run_test!(context, |a| assignment(a, false, false)); + run_test!(context, |a| assignment(a, false, true)); + run_test!(context, |a| assignment(a, true, false)); + run_test!(context, |a| assignment(a, true, true)); + + run_test!(context, |a| array_simple(a)); + run_test!(context, |a| vec_simple(a)); + run_test!(context, |a| vec_unreachable(a)); + + run_test!(context, |a| struct_dynamic_drop(a, false, false, false)); + run_test!(context, |a| struct_dynamic_drop(a, false, false, true)); + run_test!(context, |a| struct_dynamic_drop(a, false, true, false)); + run_test!(context, |a| struct_dynamic_drop(a, false, true, true)); + run_test!(context, |a| struct_dynamic_drop(a, true, false, false)); + run_test!(context, |a| struct_dynamic_drop(a, true, false, true)); + run_test!(context, |a| struct_dynamic_drop(a, true, true, false)); + run_test!(context, |a| struct_dynamic_drop(a, true, true, true)); + + run_test!(context, |a| field_assignment(a, false)); + run_test!(context, |a| field_assignment(a, true)); + + run_test!(context, |a| mixed_drop_and_nondrop(a)); + + run_test!(context, |a| slice_pattern_one_of(a, 0)); + run_test!(context, |a| slice_pattern_one_of(a, 1)); + run_test!(context, |a| slice_pattern_one_of(a, 2)); + run_test!(context, |a| slice_pattern_one_of(a, 3)); + + run_test!(context, |a| subslice_pattern_from_end_with_drop(a, true, true)); + run_test!(context, |a| subslice_pattern_from_end_with_drop(a, true, false)); + run_test!(context, |a| subslice_pattern_from_end_with_drop(a, false, true)); + run_test!(context, |a| subslice_pattern_from_end_with_drop(a, false, false)); + run_test!(context, |a| subslice_pattern_reassign(a)); + + run_test!(context, |a| panic_after_return(a, false)); + run_test!(context, |a| panic_after_return(a, true)); + run_test!(context, |a| panic_after_init_by_loop(a)); + run_test!(context, |a| panic_after_init_by_match_with_bindings_and_guard(a, false)); + run_test!(context, |a| panic_after_init_by_match_with_bindings_and_guard(a, true)); } diff --git a/src/test/ui/drop/dynamic-drop.rs b/src/test/ui/drop/dynamic-drop.rs index 8516bc3d96424..6f9112ae00605 100644 --- a/src/test/ui/drop/dynamic-drop.rs +++ b/src/test/ui/drop/dynamic-drop.rs @@ -17,6 +17,7 @@ struct InjectedFailure; struct Allocator { data: RefCell>, + name: &'static str, failing_op: usize, cur_ops: Cell, } @@ -28,17 +29,18 @@ impl Drop for Allocator { fn drop(&mut self) { let data = self.data.borrow(); if data.iter().any(|d| *d) { - panic!("missing free: {:?}", data); + panic!("missing free in {:?}: {:?}", self.name, data); } } } impl Allocator { - fn new(failing_op: usize) -> Self { + fn new(failing_op: usize, name: &'static str) -> Self { Allocator { failing_op: failing_op, cur_ops: Cell::new(0), - data: RefCell::new(vec![]) + data: RefCell::new(vec![]), + name, } } fn alloc(&self) -> Ptr<'_> { @@ -53,20 +55,6 @@ impl Allocator { data.push(true); Ptr(addr, self) } - // FIXME(#47949) Any use of this indicates a bug in rustc: we should never - // be leaking values in the cases here. - // - // Creates a `Ptr<'_>` and checks that the allocated value is leaked if the - // `failing_op` is in the list of exception. - fn alloc_leaked(&self, exceptions: Vec) -> Ptr<'_> { - let ptr = self.alloc(); - - if exceptions.iter().any(|operation| *operation == self.failing_op) { - let mut data = self.data.borrow_mut(); - data[ptr.0] = false; - } - ptr - } } struct Ptr<'a>(usize, &'a Allocator); @@ -74,7 +62,7 @@ impl<'a> Drop for Ptr<'a> { fn drop(&mut self) { match self.1.data.borrow_mut()[self.0] { false => { - panic!("double free at index {:?}", self.0) + panic!("double free in {:?} at index {:?}", self.1.name, self.0) } ref mut d => *d = false } @@ -270,79 +258,148 @@ fn subslice_pattern_reassign(a: &Allocator) { } fn panic_after_return(a: &Allocator) -> Ptr<'_> { - // Panic in the drop of `p` or `q` can leak - let exceptions = vec![8, 9]; a.alloc(); let p = a.alloc(); { a.alloc(); let p = a.alloc(); - // FIXME (#47949) We leak values when we panic in a destructor after - // evaluating an expression with `rustc_mir::build::Builder::into`. - a.alloc_leaked(exceptions) + a.alloc() } } fn panic_after_return_expr(a: &Allocator) -> Ptr<'_> { - // Panic in the drop of `p` or `q` can leak - let exceptions = vec![8, 9]; a.alloc(); let p = a.alloc(); { a.alloc(); let q = a.alloc(); - // FIXME (#47949) - return a.alloc_leaked(exceptions); + return a.alloc(); } } fn panic_after_init(a: &Allocator) { - // Panic in the drop of `r` can leak - let exceptions = vec![8]; a.alloc(); let p = a.alloc(); let q = { a.alloc(); let r = a.alloc(); - // FIXME (#47949) - a.alloc_leaked(exceptions) + a.alloc() }; } fn panic_after_init_temp(a: &Allocator) { - // Panic in the drop of `r` can leak - let exceptions = vec![8]; a.alloc(); let p = a.alloc(); { a.alloc(); let r = a.alloc(); - // FIXME (#47949) - a.alloc_leaked(exceptions) + a.alloc() }; } fn panic_after_init_by_loop(a: &Allocator) { - // Panic in the drop of `r` can leak - let exceptions = vec![8]; a.alloc(); let p = a.alloc(); let q = loop { a.alloc(); let r = a.alloc(); - // FIXME (#47949) - break a.alloc_leaked(exceptions); + break a.alloc(); + }; +} + +fn panic_after_init_by_match(a: &Allocator, b: bool) { + a.alloc(); + let p = a.alloc(); + loop { + let q = match b { + true => { + a.alloc(); + let r = a.alloc(); + a.alloc() + } + false => { + a.alloc(); + let r = a.alloc(); + break a.alloc(); + } + }; + return; + }; +} + +fn panic_after_init_by_match_with_guard(a: &Allocator, b: bool) { + a.alloc(); + let p = a.alloc(); + let q = match a.alloc() { + _ if b => { + a.alloc(); + let r = a.alloc(); + a.alloc() + } + _ => { + a.alloc(); + let r = a.alloc(); + a.alloc() + }, + }; +} + +fn panic_after_init_by_match_with_bindings_and_guard(a: &Allocator, b: bool) { + a.alloc(); + let p = a.alloc(); + let q = match a.alloc() { + _x if b => { + a.alloc(); + let r = a.alloc(); + a.alloc() + } + _x => { + a.alloc(); + let r = a.alloc(); + a.alloc() + }, + }; +} + +fn panic_after_init_by_match_with_ref_bindings_and_guard(a: &Allocator, b: bool) { + a.alloc(); + let p = a.alloc(); + let q = match a.alloc() { + ref _x if b => { + a.alloc(); + let r = a.alloc(); + a.alloc() + } + ref _x => { + a.alloc(); + let r = a.alloc(); + a.alloc() + }, + }; +} + +fn panic_after_init_by_break_if(a: &Allocator, b: bool) { + a.alloc(); + let p = a.alloc(); + let q = loop { + let r = a.alloc(); + break if b { + let s = a.alloc(); + a.alloc() + } else { + a.alloc() + }; }; } -fn run_test(mut f: F) +fn run_test(mut f: F, name: &'static str) where F: FnMut(&Allocator) { - let first_alloc = Allocator::new(usize::MAX); + let first_alloc = Allocator::new(usize::MAX, name); f(&first_alloc); for failing_op in 1..first_alloc.cur_ops.get()+1 { - let alloc = Allocator::new(failing_op); + let alloc = Allocator::new(failing_op, name); let alloc = &alloc; let f = panic::AssertUnwindSafe(&mut f); let result = panic::catch_unwind(move || { @@ -360,77 +417,91 @@ fn run_test(mut f: F) } } -fn run_test_nopanic(mut f: F) +fn run_test_nopanic(mut f: F, name: &'static str) where F: FnMut(&Allocator) { - let first_alloc = Allocator::new(usize::MAX); + let first_alloc = Allocator::new(usize::MAX, name); f(&first_alloc); } +macro_rules! run_test { + ($e:expr) => { run_test($e, stringify!($e)); } +} + fn main() { - run_test(|a| dynamic_init(a, false)); - run_test(|a| dynamic_init(a, true)); - run_test(|a| dynamic_drop(a, false)); - run_test(|a| dynamic_drop(a, true)); - - run_test(|a| assignment2(a, false, false)); - run_test(|a| assignment2(a, false, true)); - run_test(|a| assignment2(a, true, false)); - run_test(|a| assignment2(a, true, true)); - - run_test(|a| assignment1(a, false)); - run_test(|a| assignment1(a, true)); - - run_test(|a| array_simple(a)); - run_test(|a| vec_simple(a)); - run_test(|a| vec_unreachable(a)); - - run_test(|a| struct_dynamic_drop(a, false, false, false)); - run_test(|a| struct_dynamic_drop(a, false, false, true)); - run_test(|a| struct_dynamic_drop(a, false, true, false)); - run_test(|a| struct_dynamic_drop(a, false, true, true)); - run_test(|a| struct_dynamic_drop(a, true, false, false)); - run_test(|a| struct_dynamic_drop(a, true, false, true)); - run_test(|a| struct_dynamic_drop(a, true, true, false)); - run_test(|a| struct_dynamic_drop(a, true, true, true)); - - run_test(|a| field_assignment(a, false)); - run_test(|a| field_assignment(a, true)); - - run_test(|a| generator(a, 0)); - run_test(|a| generator(a, 1)); - run_test(|a| generator(a, 2)); - run_test(|a| generator(a, 3)); - - run_test(|a| mixed_drop_and_nondrop(a)); - - run_test(|a| slice_pattern_first(a)); - run_test(|a| slice_pattern_middle(a)); - run_test(|a| slice_pattern_two(a)); - run_test(|a| slice_pattern_last(a)); - run_test(|a| slice_pattern_one_of(a, 0)); - run_test(|a| slice_pattern_one_of(a, 1)); - run_test(|a| slice_pattern_one_of(a, 2)); - run_test(|a| slice_pattern_one_of(a, 3)); - - run_test(|a| subslice_pattern_from_end(a, true)); - run_test(|a| subslice_pattern_from_end(a, false)); - run_test(|a| subslice_pattern_from_end_with_drop(a, true, true)); - run_test(|a| subslice_pattern_from_end_with_drop(a, true, false)); - run_test(|a| subslice_pattern_from_end_with_drop(a, false, true)); - run_test(|a| subslice_pattern_from_end_with_drop(a, false, false)); - run_test(|a| slice_pattern_reassign(a)); - run_test(|a| subslice_pattern_reassign(a)); - - run_test(|a| { + run_test!(|a| dynamic_init(a, false)); + run_test!(|a| dynamic_init(a, true)); + run_test!(|a| dynamic_drop(a, false)); + run_test!(|a| dynamic_drop(a, true)); + + run_test!(|a| assignment2(a, false, false)); + run_test!(|a| assignment2(a, false, true)); + run_test!(|a| assignment2(a, true, false)); + run_test!(|a| assignment2(a, true, true)); + + run_test!(|a| assignment1(a, false)); + run_test!(|a| assignment1(a, true)); + + run_test!(|a| array_simple(a)); + run_test!(|a| vec_simple(a)); + run_test!(|a| vec_unreachable(a)); + + run_test!(|a| struct_dynamic_drop(a, false, false, false)); + run_test!(|a| struct_dynamic_drop(a, false, false, true)); + run_test!(|a| struct_dynamic_drop(a, false, true, false)); + run_test!(|a| struct_dynamic_drop(a, false, true, true)); + run_test!(|a| struct_dynamic_drop(a, true, false, false)); + run_test!(|a| struct_dynamic_drop(a, true, false, true)); + run_test!(|a| struct_dynamic_drop(a, true, true, false)); + run_test!(|a| struct_dynamic_drop(a, true, true, true)); + + run_test!(|a| field_assignment(a, false)); + run_test!(|a| field_assignment(a, true)); + + run_test!(|a| generator(a, 0)); + run_test!(|a| generator(a, 1)); + run_test!(|a| generator(a, 2)); + run_test!(|a| generator(a, 3)); + + run_test!(|a| mixed_drop_and_nondrop(a)); + + run_test!(|a| slice_pattern_first(a)); + run_test!(|a| slice_pattern_middle(a)); + run_test!(|a| slice_pattern_two(a)); + run_test!(|a| slice_pattern_last(a)); + run_test!(|a| slice_pattern_one_of(a, 0)); + run_test!(|a| slice_pattern_one_of(a, 1)); + run_test!(|a| slice_pattern_one_of(a, 2)); + run_test!(|a| slice_pattern_one_of(a, 3)); + + run_test!(|a| subslice_pattern_from_end(a, true)); + run_test!(|a| subslice_pattern_from_end(a, false)); + run_test!(|a| subslice_pattern_from_end_with_drop(a, true, true)); + run_test!(|a| subslice_pattern_from_end_with_drop(a, true, false)); + run_test!(|a| subslice_pattern_from_end_with_drop(a, false, true)); + run_test!(|a| subslice_pattern_from_end_with_drop(a, false, false)); + run_test!(|a| slice_pattern_reassign(a)); + run_test!(|a| subslice_pattern_reassign(a)); + + run_test!(|a| { panic_after_return(a); }); - run_test(|a| { + run_test!(|a| { panic_after_return_expr(a); }); - run_test(|a| panic_after_init(a)); - run_test(|a| panic_after_init_temp(a)); - run_test(|a| panic_after_init_by_loop(a)); - - run_test_nopanic(|a| union1(a)); + run_test!(|a| panic_after_init(a)); + run_test!(|a| panic_after_init_temp(a)); + run_test!(|a| panic_after_init_by_loop(a)); + run_test!(|a| panic_after_init_by_match(a, false)); + run_test!(|a| panic_after_init_by_match(a, true)); + run_test!(|a| panic_after_init_by_match_with_guard(a, false)); + run_test!(|a| panic_after_init_by_match_with_guard(a, true)); + run_test!(|a| panic_after_init_by_match_with_bindings_and_guard(a, false)); + run_test!(|a| panic_after_init_by_match_with_bindings_and_guard(a, true)); + run_test!(|a| panic_after_init_by_match_with_ref_bindings_and_guard(a, false)); + run_test!(|a| panic_after_init_by_match_with_ref_bindings_and_guard(a, true)); + run_test!(|a| panic_after_init_by_break_if(a, false)); + run_test!(|a| panic_after_init_by_break_if(a, true)); + + run_test_nopanic(|a| union1(a), "|a| union1(a)"); }