diff --git a/src/libcore/iter/adapters/chain.rs b/src/libcore/iter/adapters/chain.rs
index c9612596b1ba0..016fa411bb360 100644
--- a/src/libcore/iter/adapters/chain.rs
+++ b/src/libcore/iter/adapters/chain.rs
@@ -54,7 +54,6 @@ impl<A, B> Iterator for Chain<A, B> where
 {
     type Item = A::Item;
 
-    #[inline]
     fn next(&mut self) -> Option<A::Item> {
         match self.state {
             ChainState::Both => match self.a.next() {
@@ -117,7 +116,6 @@ impl<A, B> Iterator for Chain<A, B> where
         accum
     }
 
-    #[inline]
     fn nth(&mut self, mut n: usize) -> Option<A::Item> {
         match self.state {
             ChainState::Both | ChainState::Front => {
@@ -157,7 +155,6 @@ impl<A, B> Iterator for Chain<A, B> where
         }
     }
 
-    #[inline]
     fn last(self) -> Option<A::Item> {
         match self.state {
             ChainState::Both => {
@@ -198,7 +195,6 @@ impl<A, B> DoubleEndedIterator for Chain<A, B> where
     A: DoubleEndedIterator,
     B: DoubleEndedIterator<Item=A::Item>,
 {
-    #[inline]
     fn next_back(&mut self) -> Option<A::Item> {
         match self.state {
             ChainState::Both => match self.b.next_back() {
@@ -213,7 +209,6 @@ impl<A, B> DoubleEndedIterator for Chain<A, B> where
         }
     }
 
-    #[inline]
     fn nth_back(&mut self, mut n: usize) -> Option<A::Item> {
         match self.state {
             ChainState::Both | ChainState::Back => {
diff --git a/src/librustc_mir/build/block.rs b/src/librustc_mir/build/block.rs
index 7353ca9285ddb..f9440866e4925 100644
--- a/src/librustc_mir/build/block.rs
+++ b/src/librustc_mir/build/block.rs
@@ -1,18 +1,22 @@
 use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder};
 use crate::build::ForGuard::OutsideGuard;
 use crate::build::matches::ArmHasGuard;
+use crate::build::scope::DropKind;
 use crate::hair::*;
+use rustc::middle::region;
 use rustc::mir::*;
 use rustc::hir;
 use syntax_pos::Span;
 
 impl<'a, 'tcx> Builder<'a, 'tcx> {
-    pub fn ast_block(&mut self,
-                     destination: &Place<'tcx>,
-                     block: BasicBlock,
-                     ast_block: &'tcx hir::Block,
-                     source_info: SourceInfo)
-                     -> BlockAnd<()> {
+    pub fn ast_block(
+        &mut self,
+        destination: &Place<'tcx>,
+        scope: Option<region::Scope>,
+        block: BasicBlock,
+        ast_block: &'tcx hir::Block,
+        source_info: SourceInfo,
+    ) -> BlockAnd<()> {
         let Block {
             region_scope,
             opt_destruction_scope,
@@ -21,37 +25,61 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
             expr,
             targeted_by_break,
             safety_mode
-        } =
-            self.hir.mirror(ast_block);
+        } = self.hir.mirror(ast_block);
         self.in_opt_scope(opt_destruction_scope.map(|de|(de, source_info)), move |this| {
             this.in_scope((region_scope, source_info), LintLevel::Inherited, move |this| {
                 if targeted_by_break {
                     // This is a `break`-able block
                     let exit_block = this.cfg.start_new_block();
+                    if let Some(scope) = scope {
+                        // Breakable blocks assign to their destination on each
+                        // `break`, as well as when they exit normally. So we
+                        // can't schedule the drop in the last expression like
+                        // normal blocks do.
+                        let local = destination.as_local()
+                            .expect("cannot schedule drop of non-Local place");
+                        this.schedule_drop(span, scope, local, DropKind::Value);
+                    }
                     let block_exit = this.in_breakable_scope(
                         None, exit_block, destination.clone(), |this| {
-                            this.ast_block_stmts(destination, block, span, stmts, expr,
-                                                 safety_mode)
+                            this.ast_block_stmts(
+                                destination,
+                                None,
+                                block,
+                                span,
+                                stmts,
+                                expr,
+                                safety_mode,
+                            )
                         });
                     this.cfg.terminate(unpack!(block_exit), source_info,
                                        TerminatorKind::Goto { target: exit_block });
                     exit_block.unit()
                 } else {
-                    this.ast_block_stmts(destination, block, span, stmts, expr,
-                                         safety_mode)
+                    this.ast_block_stmts(
+                        destination,
+                        scope,
+                        block,
+                        span,
+                        stmts,
+                        expr,
+                        safety_mode,
+                    )
                 }
             })
         })
     }
 
-    fn ast_block_stmts(&mut self,
-                       destination: &Place<'tcx>,
-                       mut block: BasicBlock,
-                       span: Span,
-                       stmts: Vec<StmtRef<'tcx>>,
-                       expr: Option<ExprRef<'tcx>>,
-                       safety_mode: BlockSafety)
-                       -> BlockAnd<()> {
+    fn ast_block_stmts(
+        &mut self,
+        destination: &Place<'tcx>,
+        scope: Option<region::Scope>,
+        mut block: BasicBlock,
+        span: Span,
+        stmts: Vec<StmtRef<'tcx>>,
+        expr: Option<ExprRef<'tcx>>,
+        safety_mode: BlockSafety,
+    ) -> BlockAnd<()> {
         let this = self;
 
         // This convoluted structure is to avoid using recursion as we walk down a list
@@ -177,7 +205,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
                 this.block_context.currently_ignores_tail_results();
             this.block_context.push(BlockFrame::TailExpr { tail_result_is_ignored });
 
-            unpack!(block = this.into(destination, block, expr));
+            unpack!(block = this.into(destination, scope, block, expr));
             let popped = this.block_context.pop();
 
             assert!(popped.map_or(false, |bf|bf.is_tail_expr()));
diff --git a/src/librustc_mir/build/expr/as_rvalue.rs b/src/librustc_mir/build/expr/as_rvalue.rs
index 4f1ac8e51dc20..a0df626911601 100644
--- a/src/librustc_mir/build/expr/as_rvalue.rs
+++ b/src/librustc_mir/build/expr/as_rvalue.rs
@@ -136,11 +136,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
                 this.cfg
                     .push_assign(block, source_info, &Place::from(result), box_);
 
-                // initialize the box contents:
+                // Initialize the box contents. No scope is needed since the
+                // `Box` is already scheduled to be dropped.
                 unpack!(
                     block = this.into(
                         &this.hir.tcx().mk_place_deref(Place::from(result)),
-                        block, value
+                        None,
+                        block,
+                        value,
                     )
                 );
                 block.and(Rvalue::Use(Operand::Move(Place::from(result))))
diff --git a/src/librustc_mir/build/expr/as_temp.rs b/src/librustc_mir/build/expr/as_temp.rs
index 18332ed68f8bd..bd20f27c945c1 100644
--- a/src/librustc_mir/build/expr/as_temp.rs
+++ b/src/librustc_mir/build/expr/as_temp.rs
@@ -109,16 +109,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
             }
         }
 
-        unpack!(block = this.into(temp_place, block, expr));
-
-        if let Some(temp_lifetime) = temp_lifetime {
-            this.schedule_drop(
-                expr_span,
-                temp_lifetime,
-                temp,
-                DropKind::Value,
-            );
-        }
+        unpack!(block = this.into(temp_place, temp_lifetime, block, expr));
 
         block.and(temp)
     }
diff --git a/src/librustc_mir/build/expr/into.rs b/src/librustc_mir/build/expr/into.rs
index e7388b920548b..014c1876f48e8 100644
--- a/src/librustc_mir/build/expr/into.rs
+++ b/src/librustc_mir/build/expr/into.rs
@@ -2,7 +2,9 @@
 
 use crate::build::expr::category::{Category, RvalueFunc};
 use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder};
+use crate::build::scope::DropKind;
 use crate::hair::*;
+use rustc::middle::region;
 use rustc::mir::*;
 use rustc::ty;
 
@@ -11,15 +13,18 @@ use rustc_target::spec::abi::Abi;
 impl<'a, 'tcx> Builder<'a, 'tcx> {
     /// Compile `expr`, storing the result into `destination`, which
     /// is assumed to be uninitialized.
+    /// If a `drop_scope` is provided, `destination` is scheduled to be dropped
+    /// in `scope` once it has been initialized.
     pub fn into_expr(
         &mut self,
         destination: &Place<'tcx>,
+        scope: Option<region::Scope>,
         mut block: BasicBlock,
         expr: Expr<'tcx>,
     ) -> BlockAnd<()> {
         debug!(
-            "into_expr(destination={:?}, block={:?}, expr={:?})",
-            destination, block, expr
+            "into_expr(destination={:?}, scope={:?}, block={:?}, expr={:?})",
+            destination, scope, block, expr
         );
 
         // since we frequently have to reference `self` from within a
@@ -35,6 +40,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
             _ => false,
         };
 
+        let schedule_drop = move |this: &mut Self| {
+            if let Some(drop_scope) = scope {
+                let local = destination.as_local()
+                    .expect("cannot schedule drop of non-Local place");
+                this.schedule_drop(expr_span, drop_scope, local, DropKind::Value);
+            }
+        };
+
         if !expr_is_block_or_scope {
             this.block_context.push(BlockFrame::SubExpr);
         }
@@ -47,14 +60,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
             } => {
                 let region_scope = (region_scope, source_info);
                 this.in_scope(region_scope, lint_level, |this| {
-                    this.into(destination, block, value)
+                    this.into(destination, scope, block, value)
                 })
             }
             ExprKind::Block { body: ast_block } => {
-                this.ast_block(destination, block, ast_block, source_info)
+                this.ast_block(destination, scope, block, ast_block, source_info)
             }
             ExprKind::Match { scrutinee, arms } => {
-                this.match_expr(destination, expr_span, block, scrutinee, arms)
+                this.match_expr(destination, scope, expr_span, block, scrutinee, arms)
             }
             ExprKind::NeverToAny { source } => {
                 let source = this.hir.mirror(source);
@@ -67,6 +80,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
 
                 // This is an optimization. If the expression was a call then we already have an
                 // unreachable block. Don't bother to terminate it and create a new one.
+                schedule_drop(this);
                 if is_call {
                     block.unit()
                 } else {
@@ -164,6 +178,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
                     TerminatorKind::Goto { target: loop_block },
                 );
 
+                // Loops assign to their destination on each `break`. Since we
+                // can't easily unschedule drops, we schedule the drop now.
+                schedule_drop(this);
                 this.in_breakable_scope(
                     Some(loop_block),
                     exit_block,
@@ -185,7 +202,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
                         // introduce a unit temporary as the destination for the loop body.
                         let tmp = this.get_unit_temp();
                         // Execute the body, branching back to the test.
-                        let body_block_end = unpack!(this.into(&tmp, body_block, body));
+                        // No scope is provided, since we've scheduled the drop above.
+                        let body_block_end = unpack!(this.into(&tmp, None, body_block, body));
                         this.cfg.terminate(
                             body_block_end,
                             source_info,
@@ -234,8 +252,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
                         is_block_tail: None,
                     });
                     let ptr_temp = Place::from(ptr_temp);
-                    let block = unpack!(this.into(&ptr_temp, block, ptr));
-                    this.into(&this.hir.tcx().mk_place_deref(ptr_temp), block, val)
+                    // No need for a scope, ptr_temp doesn't need drop
+                    let block = unpack!(this.into(&ptr_temp, None, block, ptr));
+                    // Maybe we should provide a scope here so that
+                    // `move_val_init` wouldn't leak on panic even with an
+                    // arbitrary `val` expression, but `schedule_drop`,
+                    // borrowck and drop elaboration all prevent us from
+                    // dropping `ptr_temp.deref()`.
+                    this.into(&this.hir.tcx().mk_place_deref(ptr_temp), None, block, val)
                 } else {
                     let args: Vec<_> = args
                         .into_iter()
@@ -265,11 +289,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
                             from_hir_call,
                         },
                     );
+                    schedule_drop(this);
                     success.unit()
                 }
             }
             ExprKind::Use { source } => {
-                this.into(destination, block, source)
+                this.into(destination, scope, block, source)
             }
 
             // These cases don't actually need a destination
@@ -296,6 +321,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
                 let rvalue = Rvalue::Use(this.consume_by_copy_or_move(place));
                 this.cfg
                     .push_assign(block, source_info, destination, rvalue);
+                schedule_drop(this);
                 block.unit()
             }
             ExprKind::Index { .. } | ExprKind::Deref { .. } | ExprKind::Field { .. } => {
@@ -315,6 +341,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
                 let rvalue = Rvalue::Use(this.consume_by_copy_or_move(place));
                 this.cfg
                     .push_assign(block, source_info, destination, rvalue);
+                schedule_drop(this);
                 block.unit()
             }
 
@@ -346,6 +373,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
 
                 let rvalue = unpack!(block = this.as_local_rvalue(block, expr));
                 this.cfg.push_assign(block, source_info, destination, rvalue);
+                schedule_drop(this);
                 block.unit()
             }
         };
diff --git a/src/librustc_mir/build/into.rs b/src/librustc_mir/build/into.rs
index 077840c9ccf17..e57f10f0b14e9 100644
--- a/src/librustc_mir/build/into.rs
+++ b/src/librustc_mir/build/into.rs
@@ -6,6 +6,7 @@
 
 use crate::build::{BlockAnd, Builder};
 use crate::hair::*;
+use rustc::middle::region;
 use rustc::mir::*;
 
 pub(in crate::build) trait EvalInto<'tcx> {
@@ -13,19 +14,23 @@ pub(in crate::build) trait EvalInto<'tcx> {
         self,
         builder: &mut Builder<'_, 'tcx>,
         destination: &Place<'tcx>,
+        scope: Option<region::Scope>,
         block: BasicBlock,
     ) -> BlockAnd<()>;
 }
 
 impl<'a, 'tcx> Builder<'a, 'tcx> {
-    pub fn into<E>(&mut self,
-                   destination: &Place<'tcx>,
-                   block: BasicBlock,
-                   expr: E)
-                   -> BlockAnd<()>
-        where E: EvalInto<'tcx>
+    pub fn into<E>(
+        &mut self,
+        destination: &Place<'tcx>,
+        scope: Option<region::Scope>,
+        block: BasicBlock,
+        expr: E,
+    ) -> BlockAnd<()>
+    where
+        E: EvalInto<'tcx>,
     {
-        expr.eval_into(self, destination, block)
+        expr.eval_into(self, destination, scope, block)
     }
 }
 
@@ -34,10 +39,11 @@ impl<'tcx> EvalInto<'tcx> for ExprRef<'tcx> {
         self,
         builder: &mut Builder<'_, 'tcx>,
         destination: &Place<'tcx>,
+        scope: Option<region::Scope>,
         block: BasicBlock,
     ) -> BlockAnd<()> {
         let expr = builder.hir.mirror(self);
-        builder.into_expr(destination, block, expr)
+        builder.into_expr(destination, scope, block, expr)
     }
 }
 
@@ -46,8 +52,9 @@ impl<'tcx> EvalInto<'tcx> for Expr<'tcx> {
         self,
         builder: &mut Builder<'_, 'tcx>,
         destination: &Place<'tcx>,
+        scope: Option<region::Scope>,
         block: BasicBlock,
     ) -> BlockAnd<()> {
-        builder.into_expr(destination, block, self)
+        builder.into_expr(destination, scope, block, self)
     }
 }
diff --git a/src/librustc_mir/build/matches/mod.rs b/src/librustc_mir/build/matches/mod.rs
index 667b37bbd80c8..518bb7603acbd 100644
--- a/src/librustc_mir/build/matches/mod.rs
+++ b/src/librustc_mir/build/matches/mod.rs
@@ -102,6 +102,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
     pub fn match_expr(
         &mut self,
         destination: &Place<'tcx>,
+        destination_scope: Option<region::Scope>,
         span: Span,
         mut block: BasicBlock,
         scrutinee: ExprRef<'tcx>,
@@ -228,6 +229,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
         };
 
         // Step 5. Create everything else: the guards and the arms.
+        if let Some(scope) = destination_scope {
+            // `match` assigns to its destination in each arm. Since we can't
+            // easily unschedule drops, we schedule the drop now.
+            let local = destination.as_local()
+                .expect("cannot schedule drop of non-Local place");
+            self.schedule_drop(span, scope, local, DropKind::Value);
+        }
+
         let match_scope = self.scopes.topmost();
 
         let arm_end_blocks: Vec<_> = arm_candidates.into_iter().map(|(arm, mut candidates)| {
@@ -275,7 +284,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
                     this.source_scope = source_scope;
                 }
 
-                this.into(destination, arm_block, body)
+                // No scope is provided, since we've scheduled the drop above.
+                this.into(destination, None, arm_block, body)
             })
         }).collect();
 
@@ -311,8 +321,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
             } => {
                 let place =
                     self.storage_live_binding(block, var, irrefutable_pat.span, OutsideGuard);
-                unpack!(block = self.into(&place, block, initializer));
+                let region_scope = self.hir.region_scope_tree.var_scope(var.local_id);
 
+                unpack!(block = self.into(&place, Some(region_scope), block, initializer));
 
                 // Inject a fake read, see comments on `FakeReadCause::ForLet`.
                 let source_info = self.source_info(irrefutable_pat.span);
@@ -324,7 +335,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
                     },
                 );
 
-                self.schedule_drop_for_binding(var, irrefutable_pat.span, OutsideGuard);
                 block.unit()
             }
 
@@ -352,9 +362,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
                     user_ty_span,
                 },
             } => {
+                let region_scope = self.hir.region_scope_tree.var_scope(var.local_id);
                 let place =
                     self.storage_live_binding(block, var, irrefutable_pat.span, OutsideGuard);
-                unpack!(block = self.into(&place, block, initializer));
+                unpack!(block = self.into(&place, Some(region_scope), block, initializer));
 
                 // Inject a fake read, see comments on `FakeReadCause::ForLet`.
                 let pattern_source_info = self.source_info(irrefutable_pat.span);
@@ -400,7 +411,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
                     },
                 );
 
-                self.schedule_drop_for_binding(var, irrefutable_pat.span, OutsideGuard);
                 block.unit()
             }
 
diff --git a/src/librustc_mir/build/mod.rs b/src/librustc_mir/build/mod.rs
index ffb70180bbb4b..d65f49480bca5 100644
--- a/src/librustc_mir/build/mod.rs
+++ b/src/librustc_mir/build/mod.rs
@@ -613,6 +613,7 @@ where
     let source_info = builder.source_info(span);
     let call_site_s = (call_site_scope, source_info);
     unpack!(block = builder.in_scope(call_site_s, LintLevel::Inherited, |builder| {
+        builder.schedule_drop(span, call_site_scope, RETURN_PLACE, DropKind::Value);
         if should_abort_on_panic(tcx, fn_def_id, abi) {
             builder.schedule_abort();
         }
@@ -643,6 +644,7 @@ where
             builder.cfg.terminate(unreachable_block, source_info,
                                   TerminatorKind::Unreachable);
         }
+        builder.unschedule_return_place_drop();
         return_block.unit()
     }));
     assert_eq!(block, builder.return_block());
@@ -684,7 +686,9 @@ fn construct_const<'a, 'tcx>(
     let mut block = START_BLOCK;
     let ast_expr = &tcx.hir().body(body_id).value;
     let expr = builder.hir.mirror(ast_expr);
-    unpack!(block = builder.into_expr(&Place::return_place(), block, expr));
+    // We don't provide a scope because we can't unwind in constants, so won't
+    // need to drop the return place.
+    unpack!(block = builder.into_expr(&Place::return_place(), None, block, expr));
 
     let source_info = builder.source_info(span);
     builder.cfg.terminate(block, source_info, TerminatorKind::Return);
@@ -885,7 +889,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
         }
 
         let body = self.hir.mirror(ast_body);
-        self.into(&Place::return_place(), block, body)
+        // No scope is provided, since we've scheduled the drop of the return
+        // place.
+        self.into(&Place::return_place(), None, block, body)
     }
 
     fn set_correct_source_scope_for_arg(
diff --git a/src/librustc_mir/build/scope.rs b/src/librustc_mir/build/scope.rs
index 1b3d8641f204e..f5cb9b8d0cd63 100644
--- a/src/librustc_mir/build/scope.rs
+++ b/src/librustc_mir/build/scope.rs
@@ -513,7 +513,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
             if let Some(value) = value {
                 debug!("stmt_expr Break val block_context.push(SubExpr)");
                 self.block_context.push(BlockFrame::SubExpr);
-                unpack!(block = self.into(&destination, block, value));
+                unpack!(block = self.into(&destination, None, block, value));
                 self.block_context.pop();
             } else {
                 self.cfg.push_assign_unit(block, source_info, &destination)
@@ -1067,6 +1067,18 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
         success_block
     }
 
+    /// Unschedules the drop of the return place.
+    ///
+    /// If the return type of a function requires drop, then we schedule it
+    /// in the outermost scope so that it's dropped if there's a panic while
+    /// we drop any local variables. But we don't want to drop it if we
+    /// return normally.
+    crate fn unschedule_return_place_drop(&mut self) {
+        assert_eq!(self.scopes.len(), 1);
+        assert!(self.scopes.scopes[0].drops.len() <= 1);
+        self.scopes.scopes[0].drops.clear();
+    }
+
     // `match` arm scopes
     // ==================
     /// Unschedules any drops in the top scope.
diff --git a/src/librustc_mir/dataflow/move_paths/builder.rs b/src/librustc_mir/dataflow/move_paths/builder.rs
index 52016d4c9363a..906776ed64259 100644
--- a/src/librustc_mir/dataflow/move_paths/builder.rs
+++ b/src/librustc_mir/dataflow/move_paths/builder.rs
@@ -348,6 +348,7 @@ impl<'b, 'a, 'tcx> Gatherer<'b, 'a, 'tcx> {
     fn gather_terminator(&mut self, term: &Terminator<'tcx>) {
         match term.kind {
             TerminatorKind::Goto { target: _ }
+            | TerminatorKind::Return
             | TerminatorKind::Resume
             | TerminatorKind::Abort
             | TerminatorKind::GeneratorDrop
@@ -355,10 +356,6 @@ impl<'b, 'a, 'tcx> Gatherer<'b, 'a, 'tcx> {
             | TerminatorKind::FalseUnwind { .. }
             | TerminatorKind::Unreachable => {}
 
-            TerminatorKind::Return => {
-                self.gather_move(&Place::return_place());
-            }
-
             TerminatorKind::Assert { ref cond, .. } => {
                 self.gather_operand(cond);
             }
diff --git a/src/librustc_mir/util/elaborate_drops.rs b/src/librustc_mir/util/elaborate_drops.rs
index a1846a1fb5eaf..3091f78403dc2 100644
--- a/src/librustc_mir/util/elaborate_drops.rs
+++ b/src/librustc_mir/util/elaborate_drops.rs
@@ -163,8 +163,6 @@ where
                 });
             }
             DropStyle::Static => {
-                let loc = self.terminator_loc(bb);
-                self.elaborator.clear_drop_flag(loc, self.path, DropFlagMode::Deep);
                 self.elaborator.patch().patch_terminator(bb, TerminatorKind::Drop {
                     location: self.place.clone(),
                     target: self.succ,
@@ -172,9 +170,7 @@ where
                 });
             }
             DropStyle::Conditional => {
-                let unwind = self.unwind; // FIXME(#43234)
-                let succ = self.succ;
-                let drop_bb = self.complete_drop(Some(DropFlagMode::Deep), succ, unwind);
+                let drop_bb = self.complete_drop(self.succ, self.unwind);
                 self.elaborator.patch().patch_terminator(bb, TerminatorKind::Goto {
                     target: drop_bb
                 });
@@ -236,7 +232,7 @@ where
                 // Using `self.path` here to condition the drop on
                 // our own drop flag.
                 path: self.path
-            }.complete_drop(None, succ, unwind)
+            }.complete_drop(succ, unwind)
         }
     }
 
@@ -265,13 +261,7 @@ where
         // Clear the "master" drop flag at the end. This is needed
         // because the "master" drop protects the ADT's discriminant,
         // which is invalidated after the ADT is dropped.
-        let (succ, unwind) = (self.succ, self.unwind); // FIXME(#43234)
-        (
-            self.drop_flag_reset_block(DropFlagMode::Shallow, succ, unwind),
-            unwind.map(|unwind| {
-                self.drop_flag_reset_block(DropFlagMode::Shallow, unwind, Unwind::InCleanup)
-            })
-        )
+        (self.drop_flag_reset_block(DropFlagMode::Shallow, self.succ, self.unwind), self.unwind)
     }
 
     /// Creates a full drop ladder, consisting of 2 connected half-drop-ladders
@@ -827,9 +817,7 @@ where
                 }
             }
             ty::Dynamic(..) => {
-                let unwind = self.unwind; // FIXME(#43234)
-                let succ = self.succ;
-                self.complete_drop(Some(DropFlagMode::Deep), succ, unwind)
+                self.complete_drop(self.succ, self.unwind)
             }
             ty::Array(ety, size) => {
                 let size = size.try_eval_usize(self.tcx(), self.elaborator.param_env());
@@ -850,18 +838,12 @@ where
     ///     drop(self.place)
     fn complete_drop(
         &mut self,
-        drop_mode: Option<DropFlagMode>,
         succ: BasicBlock,
         unwind: Unwind,
     ) -> BasicBlock {
-        debug!("complete_drop({:?},{:?})", self, drop_mode);
+        debug!("complete_drop(succ={:?}, unwind={:?})", succ, unwind);
 
         let drop_block = self.drop_block(succ, unwind);
-        let drop_block = if let Some(mode) = drop_mode {
-            self.drop_flag_reset_block(mode, drop_block, unwind)
-        } else {
-            drop_block
-        };
 
         self.drop_flag_test_block(drop_block, succ, unwind)
     }
@@ -873,6 +855,11 @@ where
     {
         debug!("drop_flag_reset_block({:?},{:?})", self, mode);
 
+        if unwind.is_cleanup() {
+            // The drop flag isn't read again on the unwind path, so don't
+            // bother setting it.
+            return succ;
+        }
         let block = self.new_block(unwind, TerminatorKind::Goto { target: succ });
         let block_start = Location { block: block, statement_index: 0 };
         self.elaborator.clear_drop_flag(block_start, self.path, mode);
@@ -976,11 +963,6 @@ where
         self.elaborator.patch().new_temp(ty, self.source_info.span)
     }
 
-    fn terminator_loc(&mut self, bb: BasicBlock) -> Location {
-        let body = self.elaborator.body();
-        self.elaborator.patch().terminator_loc(body, bb)
-    }
-
     fn constant_usize(&self, val: u16) -> Operand<'tcx> {
         Operand::Constant(box Constant {
             span: self.source_info.span,
diff --git a/src/test/mir-opt/box_expr.rs b/src/test/mir-opt/box_expr.rs
index 8dc6b73edf6d4..76098731947fe 100644
--- a/src/test/mir-opt/box_expr.rs
+++ b/src/test/mir-opt/box_expr.rs
@@ -41,33 +41,36 @@ impl Drop for S {
 //
 //     bb2: {
 //         _1 = move _2;
-//         drop(_2) -> bb4;
+//         drop(_2) -> [return: bb5, unwind: bb4];
 //     }
 //
 //     bb3 (cleanup): {
 //         drop(_2) -> bb1;
 //     }
 //
-//     bb4: {
+//     bb4 (cleanup): {
+//         drop(_1) -> bb1;
+//     }
+//
+//     bb5: {
 //         StorageDead(_2);
 //         StorageLive(_3);
 //         StorageLive(_4);
 //         _4 = move _1;
-//         _3 = const std::mem::drop::<std::boxed::Box<S>>(move _4) -> [return: bb5, unwind: bb7];
+//         _3 = const std::mem::drop::<std::boxed::Box<S>>(move _4) -> [return: bb6, unwind: bb7];
 //     }
 //
-//     bb5: {
+//     bb6: {
 //         StorageDead(_4);
 //         StorageDead(_3);
 //         _0 = ();
 //         drop(_1) -> bb8;
 //     }
-//     bb6 (cleanup): {
-//         drop(_1) -> bb1;
-//     }
+//
 //     bb7 (cleanup): {
-//         drop(_4) -> bb6;
+//         drop(_4) -> bb4;
 //     }
+//
 //     bb8: {
 //         StorageDead(_1);
 //         return;
diff --git a/src/test/mir-opt/issue-62289.rs b/src/test/mir-opt/issue-62289.rs
index a3b517e9bca87..e8dd56cbbae22 100644
--- a/src/test/mir-opt/issue-62289.rs
+++ b/src/test/mir-opt/issue-62289.rs
@@ -24,7 +24,7 @@ fn main() {
 //         StorageLive(_3);
 //         StorageLive(_4);
 //         _4 = std::option::Option::<u32>::None;
-//         _3 = const <std::option::Option<u32> as std::ops::Try>::into_result(move _4) -> [return: bb2, unwind: bb3];
+//         _3 = const <std::option::Option<u32> as std::ops::Try>::into_result(move _4) -> [return: bb2, unwind: bb4];
 //     }
 //     bb1 (cleanup): {
 //         resume;
@@ -32,60 +32,63 @@ fn main() {
 //     bb2: {
 //         StorageDead(_4);
 //         _5 = discriminant(_3);
-//         switchInt(move _5) -> [0isize: bb10, 1isize: bb5, otherwise: bb4];
+//         switchInt(move _5) -> [0isize: bb11, 1isize: bb6, otherwise: bb5];
 //     }
 //     bb3 (cleanup): {
-//         drop(_2) -> bb1;
+//         drop(_0) -> bb1;
 //     }
-//     bb4: {
-//         unreachable;
+//     bb4 (cleanup): {
+//         drop(_2) -> bb3;
 //     }
 //     bb5: {
+//         unreachable;
+//     }
+//     bb6: {
 //         StorageLive(_6);
 //         _6 = ((_3 as Err).0: std::option::NoneError);
 //         StorageLive(_8);
 //         StorageLive(_9);
 //         _9 = _6;
-//         _8 = const <std::option::NoneError as std::convert::From<std::option::NoneError>>::from(move _9) -> [return: bb7, unwind: bb3];
+//         _8 = const <std::option::NoneError as std::convert::From<std::option::NoneError>>::from(move _9) -> [return: bb8, unwind: bb4];
 //     }
-//     bb6: {
+//     bb7: {
 //         return;
 //     }
-//     bb7: {
+//     bb8: {
 //         StorageDead(_9);
-//         _0 = const <std::option::Option<std::boxed::Box<u32>> as std::ops::Try>::from_error(move _8) -> [return: bb8, unwind: bb3];
+//         _0 = const <std::option::Option<std::boxed::Box<u32>> as std::ops::Try>::from_error(move _8) -> [return: bb9, unwind: bb4];
 //     }
-//     bb8: {
+//     bb9: {
 //         StorageDead(_8);
 //         StorageDead(_6);
-//         drop(_2) -> bb9;
+//         drop(_2) -> [return: bb10, unwind: bb3];
 //     }
-//     bb9: {
+//     bb10: {
 //         StorageDead(_2);
 //         StorageDead(_1);
 //         StorageDead(_3);
-//         goto -> bb6;
+//         goto -> bb7;
 //     }
-//     bb10: {
+//     bb11: {
 //         StorageLive(_10);
 //         _10 = ((_3 as Ok).0: u32);
 //         (*_2) = _10;
 //         StorageDead(_10);
 //         _1 = move _2;
-//         drop(_2) -> [return: bb12, unwind: bb11];
+//         drop(_2) -> [return: bb13, unwind: bb12];
 //     }
-//     bb11 (cleanup): {
-//         drop(_1) -> bb1;
+//     bb12 (cleanup): {
+//         drop(_1) -> bb3;
 //     }
-//     bb12: {
+//     bb13: {
 //         StorageDead(_2);
 //         _0 = std::option::Option::<std::boxed::Box<u32>>::Some(move _1,);
-//         drop(_1) -> bb13;
+//         drop(_1) -> [return: bb14, unwind: bb3];
 //     }
-//     bb13: {
+//     bb14: {
 //         StorageDead(_1);
 //         StorageDead(_3);
-//         goto -> bb6;
+//         goto -> bb7;
 //     }
 // }
 // END rustc.test.ElaborateDrops.before.mir
diff --git a/src/test/mir-opt/unusual-item-types.rs b/src/test/mir-opt/unusual-item-types.rs
index f4d848dfc7ad1..8d29c21835976 100644
--- a/src/test/mir-opt/unusual-item-types.rs
+++ b/src/test/mir-opt/unusual-item-types.rs
@@ -46,8 +46,8 @@ fn main() {
 // END rustc.E-V-{{constant}}.mir_map.0.mir
 
 // START rustc.ptr-real_drop_in_place.std__vec__Vec_i32_.AddMovesForPackedDrops.before.mir
-//     bb0: {
-//     goto -> bb7;
+// bb0: {
+//     goto -> bb6;
 // }
 // bb1: {
 //     return;
@@ -59,17 +59,14 @@ fn main() {
 //     goto -> bb1;
 // }
 // bb4 (cleanup): {
-//     goto -> bb2;
+//     drop(((*_1).0: alloc::raw_vec::RawVec<i32>)) -> bb2;
 // }
-// bb5 (cleanup): {
-//     drop(((*_1).0: alloc::raw_vec::RawVec<i32>)) -> bb4;
+// bb5: {
+//     drop(((*_1).0: alloc::raw_vec::RawVec<i32>)) -> [return: bb3, unwind: bb2];
 // }
 // bb6: {
-//     drop(((*_1).0: alloc::raw_vec::RawVec<i32>)) -> [return: bb3, unwind: bb4];
-// }
-// bb7: {
 //     _2 = &mut (*_1);
-//     _3 = const <std::vec::Vec<i32> as std::ops::Drop>::drop(move _2) -> [return: bb6, unwind: bb5];
+//     _3 = const <std::vec::Vec<i32> as std::ops::Drop>::drop(move _2) -> [return: bb5, unwind: bb4];
 // }
 // END rustc.ptr-real_drop_in_place.std__vec__Vec_i32_.AddMovesForPackedDrops.before.mir
 
diff --git a/src/test/ui/async-await/async-fn-size-uninit-locals.rs b/src/test/ui/async-await/async-fn-size-uninit-locals.rs
index 0558084f4f8a3..44b14e27332e0 100644
--- a/src/test/ui/async-await/async-fn-size-uninit-locals.rs
+++ b/src/test/ui/async-await/async-fn-size-uninit-locals.rs
@@ -99,5 +99,5 @@ fn main() {
     assert_eq!(12, std::mem::size_of_val(&single_with_noop()));
     assert_eq!(3084, std::mem::size_of_val(&joined()));
     assert_eq!(3084, std::mem::size_of_val(&joined_with_noop()));
-    assert_eq!(3080, std::mem::size_of_val(&join_retval()));
+    assert_eq!(3084, std::mem::size_of_val(&join_retval()));
 }
diff --git a/src/test/ui/drop/dynamic-drop-async.rs b/src/test/ui/drop/dynamic-drop-async.rs
index 91063edf0f6c4..398bcb7ec0e82 100644
--- a/src/test/ui/drop/dynamic-drop-async.rs
+++ b/src/test/ui/drop/dynamic-drop-async.rs
@@ -7,7 +7,7 @@
 // edition:2018
 // ignore-wasm32-bare compiled with panic=abort by default
 
-#![feature(slice_patterns)]
+#![feature(slice_patterns, arbitrary_self_types)]
 #![allow(unused)]
 
 use std::{
@@ -45,6 +45,7 @@ impl<T: Unpin> Future for Defer<T> {
 /// The `failing_op`-th operation will panic.
 struct Allocator {
     data: RefCell<Vec<bool>>,
+    name: &'static str,
     failing_op: usize,
     cur_ops: Cell<usize>,
 }
@@ -56,23 +57,28 @@ impl Drop for Allocator {
     fn drop(&mut self) {
         let data = self.data.borrow();
         if data.iter().any(|d| *d) {
-            panic!("missing free: {:?}", data);
+            panic!("missing free in {:?}: {:?}", self.name, data);
         }
     }
 }
 
 impl Allocator {
-    fn new(failing_op: usize) -> Self {
-        Allocator { failing_op, cur_ops: Cell::new(0), data: RefCell::new(vec![]) }
+    fn new(failing_op: usize, name: &'static str) -> Self {
+        Allocator {
+            failing_op,
+            name,
+            cur_ops: Cell::new(0),
+            data: RefCell::new(vec![]),
+        }
     }
-    fn alloc(&self) -> impl Future<Output = Ptr<'_>> + '_ {
+    fn alloc(self: &Rc<Allocator>) -> impl Future<Output = Ptr> + 'static {
         self.fallible_operation();
 
         let mut data = self.data.borrow_mut();
 
         let addr = data.len();
         data.push(true);
-        Defer { ready: false, value: Some(Ptr(addr, self)) }
+        Defer { ready: false, value: Some(Ptr(addr, self.clone())) }
     }
     fn fallible_operation(&self) {
         self.cur_ops.set(self.cur_ops.get() + 1);
@@ -85,11 +91,11 @@ impl Allocator {
 
 // Type that tracks whether it was dropped and can panic when it's created or
 // destroyed.
-struct Ptr<'a>(usize, &'a Allocator);
-impl<'a> Drop for Ptr<'a> {
+struct Ptr(usize, Rc<Allocator>);
+impl Drop for Ptr {
     fn drop(&mut self) {
         match self.1.data.borrow_mut()[self.0] {
-            false => panic!("double free at index {:?}", self.0),
+            false => panic!("double free in {:?} at index {:?}", self.1.name, self.0),
             ref mut d => *d = false,
         }
 
@@ -113,7 +119,7 @@ async fn dynamic_drop(a: Rc<Allocator>, c: bool) {
     };
 }
 
-struct TwoPtrs<'a>(Ptr<'a>, Ptr<'a>);
+struct TwoPtrs(Ptr, Ptr);
 async fn struct_dynamic_drop(a: Rc<Allocator>, c0: bool, c1: bool, c: bool) {
     for i in 0..2 {
         let x;
@@ -228,21 +234,62 @@ async fn subslice_pattern_reassign(a: Rc<Allocator>) {
     a.alloc().await;
 }
 
-fn run_test<F, G>(cx: &mut Context<'_>, ref f: F)
+async fn panic_after_return(a: Rc<Allocator>, c: bool) -> (Ptr,) {
+    a.alloc().await;
+    let p = a.alloc().await;
+    if c {
+        a.alloc().await;
+        let q = a.alloc().await;
+        // We use a return type that isn't used anywhere else to make sure that
+        // the return place doesn't incorrectly end up in the generator state.
+        return (a.alloc().await,);
+    }
+    (a.alloc().await,)
+}
+
+
+async fn panic_after_init_by_loop(a: Rc<Allocator>) {
+    a.alloc().await;
+    let p = a.alloc().await;
+    let q = loop {
+        a.alloc().await;
+        let r = a.alloc().await;
+        break a.alloc().await;
+    };
+}
+
+async fn panic_after_init_by_match_with_bindings_and_guard(a: Rc<Allocator>, b: bool) {
+    a.alloc().await;
+    let p = a.alloc().await;
+    let q = match a.alloc().await {
+        ref _x if b => {
+            a.alloc().await;
+            let r = a.alloc().await;
+            a.alloc().await
+        }
+        _x => {
+            a.alloc().await;
+            let r = a.alloc().await;
+            a.alloc().await
+        },
+    };
+}
+
+fn run_test<F, G, O>(cx: &mut Context<'_>, ref f: F, name: &'static str)
 where
     F: Fn(Rc<Allocator>) -> G,
-    G: Future<Output = ()>,
+    G: Future<Output = O>,
 {
     for polls in 0.. {
         // Run without any panics to find which operations happen after the
         // penultimate `poll`.
-        let first_alloc = Rc::new(Allocator::new(usize::MAX));
+        let first_alloc = Rc::new(Allocator::new(usize::MAX, name));
         let mut fut = Box::pin(f(first_alloc.clone()));
         let mut ops_before_last_poll = 0;
         let mut completed = false;
         for _ in 0..polls {
             ops_before_last_poll = first_alloc.cur_ops.get();
-            if let Poll::Ready(()) = fut.as_mut().poll(cx) {
+            if let Poll::Ready(_) = fut.as_mut().poll(cx) {
                 completed = true;
             }
         }
@@ -251,7 +298,7 @@ where
         // Start at `ops_before_last_poll` so that we will always be able to
         // `poll` the expected number of times.
         for failing_op in ops_before_last_poll..first_alloc.cur_ops.get() {
-            let alloc = Rc::new(Allocator::new(failing_op + 1));
+            let alloc = Rc::new(Allocator::new(failing_op + 1, name));
             let f = &f;
             let cx = &mut *cx;
             let result = panic::catch_unwind(panic::AssertUnwindSafe(move || {
@@ -281,46 +328,56 @@ fn clone_waker(data: *const ()) -> RawWaker {
     RawWaker::new(data, &RawWakerVTable::new(clone_waker, drop, drop, drop))
 }
 
+macro_rules! run_test {
+    ($ctxt:expr, $e:expr) => { run_test($ctxt, $e, stringify!($e)); };
+}
+
 fn main() {
     let waker = unsafe { Waker::from_raw(clone_waker(ptr::null())) };
     let context = &mut Context::from_waker(&waker);
 
-    run_test(context, |a| dynamic_init(a, false));
-    run_test(context, |a| dynamic_init(a, true));
-    run_test(context, |a| dynamic_drop(a, false));
-    run_test(context, |a| dynamic_drop(a, true));
-
-    run_test(context, |a| assignment(a, false, false));
-    run_test(context, |a| assignment(a, false, true));
-    run_test(context, |a| assignment(a, true, false));
-    run_test(context, |a| assignment(a, true, true));
-
-    run_test(context, |a| array_simple(a));
-    run_test(context, |a| vec_simple(a));
-    run_test(context, |a| vec_unreachable(a));
-
-    run_test(context, |a| struct_dynamic_drop(a, false, false, false));
-    run_test(context, |a| struct_dynamic_drop(a, false, false, true));
-    run_test(context, |a| struct_dynamic_drop(a, false, true, false));
-    run_test(context, |a| struct_dynamic_drop(a, false, true, true));
-    run_test(context, |a| struct_dynamic_drop(a, true, false, false));
-    run_test(context, |a| struct_dynamic_drop(a, true, false, true));
-    run_test(context, |a| struct_dynamic_drop(a, true, true, false));
-    run_test(context, |a| struct_dynamic_drop(a, true, true, true));
-
-    run_test(context, |a| field_assignment(a, false));
-    run_test(context, |a| field_assignment(a, true));
-
-    run_test(context, |a| mixed_drop_and_nondrop(a));
-
-    run_test(context, |a| slice_pattern_one_of(a, 0));
-    run_test(context, |a| slice_pattern_one_of(a, 1));
-    run_test(context, |a| slice_pattern_one_of(a, 2));
-    run_test(context, |a| slice_pattern_one_of(a, 3));
-
-    run_test(context, |a| subslice_pattern_from_end_with_drop(a, true, true));
-    run_test(context, |a| subslice_pattern_from_end_with_drop(a, true, false));
-    run_test(context, |a| subslice_pattern_from_end_with_drop(a, false, true));
-    run_test(context, |a| subslice_pattern_from_end_with_drop(a, false, false));
-    run_test(context, |a| subslice_pattern_reassign(a));
+    run_test!(context, |a| dynamic_init(a, false));
+    run_test!(context, |a| dynamic_init(a, true));
+    run_test!(context, |a| dynamic_drop(a, false));
+    run_test!(context, |a| dynamic_drop(a, true));
+
+    run_test!(context, |a| assignment(a, false, false));
+    run_test!(context, |a| assignment(a, false, true));
+    run_test!(context, |a| assignment(a, true, false));
+    run_test!(context, |a| assignment(a, true, true));
+
+    run_test!(context, |a| array_simple(a));
+    run_test!(context, |a| vec_simple(a));
+    run_test!(context, |a| vec_unreachable(a));
+
+    run_test!(context, |a| struct_dynamic_drop(a, false, false, false));
+    run_test!(context, |a| struct_dynamic_drop(a, false, false, true));
+    run_test!(context, |a| struct_dynamic_drop(a, false, true, false));
+    run_test!(context, |a| struct_dynamic_drop(a, false, true, true));
+    run_test!(context, |a| struct_dynamic_drop(a, true, false, false));
+    run_test!(context, |a| struct_dynamic_drop(a, true, false, true));
+    run_test!(context, |a| struct_dynamic_drop(a, true, true, false));
+    run_test!(context, |a| struct_dynamic_drop(a, true, true, true));
+
+    run_test!(context, |a| field_assignment(a, false));
+    run_test!(context, |a| field_assignment(a, true));
+
+    run_test!(context, |a| mixed_drop_and_nondrop(a));
+
+    run_test!(context, |a| slice_pattern_one_of(a, 0));
+    run_test!(context, |a| slice_pattern_one_of(a, 1));
+    run_test!(context, |a| slice_pattern_one_of(a, 2));
+    run_test!(context, |a| slice_pattern_one_of(a, 3));
+
+    run_test!(context, |a| subslice_pattern_from_end_with_drop(a, true, true));
+    run_test!(context, |a| subslice_pattern_from_end_with_drop(a, true, false));
+    run_test!(context, |a| subslice_pattern_from_end_with_drop(a, false, true));
+    run_test!(context, |a| subslice_pattern_from_end_with_drop(a, false, false));
+    run_test!(context, |a| subslice_pattern_reassign(a));
+
+    run_test!(context, |a| panic_after_return(a, false));
+    run_test!(context, |a| panic_after_return(a, true));
+    run_test!(context, |a| panic_after_init_by_loop(a));
+    run_test!(context, |a| panic_after_init_by_match_with_bindings_and_guard(a, false));
+    run_test!(context, |a| panic_after_init_by_match_with_bindings_and_guard(a, true));
 }
diff --git a/src/test/ui/drop/dynamic-drop.rs b/src/test/ui/drop/dynamic-drop.rs
index 29dcbfe9609a0..c2e9a09cfd47d 100644
--- a/src/test/ui/drop/dynamic-drop.rs
+++ b/src/test/ui/drop/dynamic-drop.rs
@@ -18,6 +18,7 @@ struct InjectedFailure;
 
 struct Allocator {
     data: RefCell<Vec<bool>>,
+    name: &'static str,
     failing_op: usize,
     cur_ops: Cell<usize>,
 }
@@ -29,17 +30,18 @@ impl Drop for Allocator {
     fn drop(&mut self) {
         let data = self.data.borrow();
         if data.iter().any(|d| *d) {
-            panic!("missing free: {:?}", data);
+            panic!("missing free in {:?}: {:?}", self.name, data);
         }
     }
 }
 
 impl Allocator {
-    fn new(failing_op: usize) -> Self {
+    fn new(failing_op: usize, name: &'static str) -> Self {
         Allocator {
             failing_op: failing_op,
             cur_ops: Cell::new(0),
-            data: RefCell::new(vec![])
+            data: RefCell::new(vec![]),
+            name,
         }
     }
     fn alloc(&self) -> Ptr<'_> {
@@ -54,20 +56,6 @@ impl Allocator {
         data.push(true);
         Ptr(addr, self)
     }
-    // FIXME(#47949) Any use of this indicates a bug in rustc: we should never
-    // be leaking values in the cases here.
-    //
-    // Creates a `Ptr<'_>` and checks that the allocated value is leaked if the
-    // `failing_op` is in the list of exception.
-    fn alloc_leaked(&self, exceptions: Vec<usize>) -> Ptr<'_> {
-        let ptr = self.alloc();
-
-        if exceptions.iter().any(|operation| *operation == self.failing_op) {
-            let mut data = self.data.borrow_mut();
-            data[ptr.0] = false;
-        }
-        ptr
-    }
 }
 
 struct Ptr<'a>(usize, &'a Allocator);
@@ -75,7 +63,7 @@ impl<'a> Drop for Ptr<'a> {
     fn drop(&mut self) {
         match self.1.data.borrow_mut()[self.0] {
             false => {
-                panic!("double free at index {:?}", self.0)
+                panic!("double free in {:?} at index {:?}", self.1.name, self.0)
             }
             ref mut d => *d = false
         }
@@ -270,79 +258,148 @@ fn subslice_pattern_reassign(a: &Allocator) {
 }
 
 fn panic_after_return(a: &Allocator) -> Ptr<'_> {
-    // Panic in the drop of `p` or `q` can leak
-    let exceptions = vec![8, 9];
     a.alloc();
     let p = a.alloc();
     {
         a.alloc();
         let p = a.alloc();
-        // FIXME (#47949) We leak values when we panic in a destructor after
-        // evaluating an expression with `rustc_mir::build::Builder::into`.
-        a.alloc_leaked(exceptions)
+        a.alloc()
     }
 }
 
 fn panic_after_return_expr(a: &Allocator) -> Ptr<'_> {
-    // Panic in the drop of `p` or `q` can leak
-    let exceptions = vec![8, 9];
     a.alloc();
     let p = a.alloc();
     {
         a.alloc();
         let q = a.alloc();
-        // FIXME (#47949)
-        return a.alloc_leaked(exceptions);
+        return a.alloc();
     }
 }
 
 fn panic_after_init(a: &Allocator) {
-    // Panic in the drop of `r` can leak
-    let exceptions = vec![8];
     a.alloc();
     let p = a.alloc();
     let q = {
         a.alloc();
         let r = a.alloc();
-        // FIXME (#47949)
-        a.alloc_leaked(exceptions)
+        a.alloc()
     };
 }
 
 fn panic_after_init_temp(a: &Allocator) {
-    // Panic in the drop of `r` can leak
-    let exceptions = vec![8];
     a.alloc();
     let p = a.alloc();
     {
         a.alloc();
         let r = a.alloc();
-        // FIXME (#47949)
-        a.alloc_leaked(exceptions)
+        a.alloc()
     };
 }
 
 fn panic_after_init_by_loop(a: &Allocator) {
-    // Panic in the drop of `r` can leak
-    let exceptions = vec![8];
     a.alloc();
     let p = a.alloc();
     let q = loop {
         a.alloc();
         let r = a.alloc();
-        // FIXME (#47949)
-        break a.alloc_leaked(exceptions);
+        break a.alloc();
+    };
+}
+
+fn panic_after_init_by_match(a: &Allocator, b: bool) {
+    a.alloc();
+    let p = a.alloc();
+    loop {
+        let q = match b {
+            true => {
+                a.alloc();
+                let r = a.alloc();
+                a.alloc()
+            }
+            false => {
+                a.alloc();
+                let r = a.alloc();
+                break a.alloc();
+            }
+        };
+        return;
+    };
+}
+
+fn panic_after_init_by_match_with_guard(a: &Allocator, b: bool) {
+    a.alloc();
+    let p = a.alloc();
+    let q = match a.alloc() {
+        _ if b => {
+            a.alloc();
+            let r = a.alloc();
+            a.alloc()
+        }
+        _ => {
+            a.alloc();
+            let r = a.alloc();
+            a.alloc()
+        },
+    };
+}
+
+fn panic_after_init_by_match_with_bindings_and_guard(a: &Allocator, b: bool) {
+    a.alloc();
+    let p = a.alloc();
+    let q = match a.alloc() {
+        _x if b => {
+            a.alloc();
+            let r = a.alloc();
+            a.alloc()
+        }
+        _x => {
+            a.alloc();
+            let r = a.alloc();
+            a.alloc()
+        },
+    };
+}
+
+fn panic_after_init_by_match_with_ref_bindings_and_guard(a: &Allocator, b: bool) {
+    a.alloc();
+    let p = a.alloc();
+    let q = match a.alloc() {
+        ref _x if b => {
+            a.alloc();
+            let r = a.alloc();
+            a.alloc()
+        }
+        ref _x => {
+            a.alloc();
+            let r = a.alloc();
+            a.alloc()
+        },
+    };
+}
+
+fn panic_after_init_by_break_if(a: &Allocator, b: bool) {
+    a.alloc();
+    let p = a.alloc();
+    let q = loop {
+        let r = a.alloc();
+        break if b {
+            let s = a.alloc();
+            a.alloc()
+        } else {
+            a.alloc()
+        };
     };
 }
 
-fn run_test<F>(mut f: F)
+fn run_test<F>(mut f: F, name: &'static str)
     where F: FnMut(&Allocator)
 {
-    let first_alloc = Allocator::new(usize::MAX);
+    let first_alloc = Allocator::new(usize::MAX, name);
     f(&first_alloc);
 
     for failing_op in 1..first_alloc.cur_ops.get()+1 {
-        let alloc = Allocator::new(failing_op);
+        let alloc = Allocator::new(failing_op, name);
         let alloc = &alloc;
         let f = panic::AssertUnwindSafe(&mut f);
         let result = panic::catch_unwind(move || {
@@ -360,77 +417,91 @@ fn run_test<F>(mut f: F)
     }
 }
 
-fn run_test_nopanic<F>(mut f: F)
+fn run_test_nopanic<F>(mut f: F, name: &'static str)
     where F: FnMut(&Allocator)
 {
-    let first_alloc = Allocator::new(usize::MAX);
+    let first_alloc = Allocator::new(usize::MAX, name);
     f(&first_alloc);
 }
 
+macro_rules! run_test {
+    ($e:expr) => { run_test($e, stringify!($e)); }
+}
+
 fn main() {
-    run_test(|a| dynamic_init(a, false));
-    run_test(|a| dynamic_init(a, true));
-    run_test(|a| dynamic_drop(a, false));
-    run_test(|a| dynamic_drop(a, true));
-
-    run_test(|a| assignment2(a, false, false));
-    run_test(|a| assignment2(a, false, true));
-    run_test(|a| assignment2(a, true, false));
-    run_test(|a| assignment2(a, true, true));
-
-    run_test(|a| assignment1(a, false));
-    run_test(|a| assignment1(a, true));
-
-    run_test(|a| array_simple(a));
-    run_test(|a| vec_simple(a));
-    run_test(|a| vec_unreachable(a));
-
-    run_test(|a| struct_dynamic_drop(a, false, false, false));
-    run_test(|a| struct_dynamic_drop(a, false, false, true));
-    run_test(|a| struct_dynamic_drop(a, false, true, false));
-    run_test(|a| struct_dynamic_drop(a, false, true, true));
-    run_test(|a| struct_dynamic_drop(a, true, false, false));
-    run_test(|a| struct_dynamic_drop(a, true, false, true));
-    run_test(|a| struct_dynamic_drop(a, true, true, false));
-    run_test(|a| struct_dynamic_drop(a, true, true, true));
-
-    run_test(|a| field_assignment(a, false));
-    run_test(|a| field_assignment(a, true));
-
-    run_test(|a| generator(a, 0));
-    run_test(|a| generator(a, 1));
-    run_test(|a| generator(a, 2));
-    run_test(|a| generator(a, 3));
-
-    run_test(|a| mixed_drop_and_nondrop(a));
-
-    run_test(|a| slice_pattern_first(a));
-    run_test(|a| slice_pattern_middle(a));
-    run_test(|a| slice_pattern_two(a));
-    run_test(|a| slice_pattern_last(a));
-    run_test(|a| slice_pattern_one_of(a, 0));
-    run_test(|a| slice_pattern_one_of(a, 1));
-    run_test(|a| slice_pattern_one_of(a, 2));
-    run_test(|a| slice_pattern_one_of(a, 3));
-
-    run_test(|a| subslice_pattern_from_end(a, true));
-    run_test(|a| subslice_pattern_from_end(a, false));
-    run_test(|a| subslice_pattern_from_end_with_drop(a, true, true));
-    run_test(|a| subslice_pattern_from_end_with_drop(a, true, false));
-    run_test(|a| subslice_pattern_from_end_with_drop(a, false, true));
-    run_test(|a| subslice_pattern_from_end_with_drop(a, false, false));
-    run_test(|a| slice_pattern_reassign(a));
-    run_test(|a| subslice_pattern_reassign(a));
-
-    run_test(|a| {
+    run_test!(|a| dynamic_init(a, false));
+    run_test!(|a| dynamic_init(a, true));
+    run_test!(|a| dynamic_drop(a, false));
+    run_test!(|a| dynamic_drop(a, true));
+
+    run_test!(|a| assignment2(a, false, false));
+    run_test!(|a| assignment2(a, false, true));
+    run_test!(|a| assignment2(a, true, false));
+    run_test!(|a| assignment2(a, true, true));
+
+    run_test!(|a| assignment1(a, false));
+    run_test!(|a| assignment1(a, true));
+
+    run_test!(|a| array_simple(a));
+    run_test!(|a| vec_simple(a));
+    run_test!(|a| vec_unreachable(a));
+
+    run_test!(|a| struct_dynamic_drop(a, false, false, false));
+    run_test!(|a| struct_dynamic_drop(a, false, false, true));
+    run_test!(|a| struct_dynamic_drop(a, false, true, false));
+    run_test!(|a| struct_dynamic_drop(a, false, true, true));
+    run_test!(|a| struct_dynamic_drop(a, true, false, false));
+    run_test!(|a| struct_dynamic_drop(a, true, false, true));
+    run_test!(|a| struct_dynamic_drop(a, true, true, false));
+    run_test!(|a| struct_dynamic_drop(a, true, true, true));
+
+    run_test!(|a| field_assignment(a, false));
+    run_test!(|a| field_assignment(a, true));
+
+    run_test!(|a| generator(a, 0));
+    run_test!(|a| generator(a, 1));
+    run_test!(|a| generator(a, 2));
+    run_test!(|a| generator(a, 3));
+
+    run_test!(|a| mixed_drop_and_nondrop(a));
+
+    run_test!(|a| slice_pattern_first(a));
+    run_test!(|a| slice_pattern_middle(a));
+    run_test!(|a| slice_pattern_two(a));
+    run_test!(|a| slice_pattern_last(a));
+    run_test!(|a| slice_pattern_one_of(a, 0));
+    run_test!(|a| slice_pattern_one_of(a, 1));
+    run_test!(|a| slice_pattern_one_of(a, 2));
+    run_test!(|a| slice_pattern_one_of(a, 3));
+
+    run_test!(|a| subslice_pattern_from_end(a, true));
+    run_test!(|a| subslice_pattern_from_end(a, false));
+    run_test!(|a| subslice_pattern_from_end_with_drop(a, true, true));
+    run_test!(|a| subslice_pattern_from_end_with_drop(a, true, false));
+    run_test!(|a| subslice_pattern_from_end_with_drop(a, false, true));
+    run_test!(|a| subslice_pattern_from_end_with_drop(a, false, false));
+    run_test!(|a| slice_pattern_reassign(a));
+    run_test!(|a| subslice_pattern_reassign(a));
+
+    run_test!(|a| {
         panic_after_return(a);
     });
-    run_test(|a| {
+    run_test!(|a| {
         panic_after_return_expr(a);
     });
-    run_test(|a| panic_after_init(a));
-    run_test(|a| panic_after_init_temp(a));
-    run_test(|a| panic_after_init_by_loop(a));
-
-    run_test_nopanic(|a| union1(a));
+    run_test!(|a| panic_after_init(a));
+    run_test!(|a| panic_after_init_temp(a));
+    run_test!(|a| panic_after_init_by_loop(a));
+    run_test!(|a| panic_after_init_by_match(a, false));
+    run_test!(|a| panic_after_init_by_match(a, true));
+    run_test!(|a| panic_after_init_by_match_with_guard(a, false));
+    run_test!(|a| panic_after_init_by_match_with_guard(a, true));
+    run_test!(|a| panic_after_init_by_match_with_bindings_and_guard(a, false));
+    run_test!(|a| panic_after_init_by_match_with_bindings_and_guard(a, true));
+    run_test!(|a| panic_after_init_by_match_with_ref_bindings_and_guard(a, false));
+    run_test!(|a| panic_after_init_by_match_with_ref_bindings_and_guard(a, true));
+    run_test!(|a| panic_after_init_by_break_if(a, false));
+    run_test!(|a| panic_after_init_by_break_if(a, true));
+
+    run_test_nopanic(|a| union1(a), "|a| union1(a)");
 }