From a4e6b77ba7d15b608ccd1f5ec89e01b94b13f837 Mon Sep 17 00:00:00 2001 From: Matthew Jasper Date: Fri, 31 May 2024 15:47:18 +0000 Subject: [PATCH 1/5] Extend handling of moved_locals in mir building to some unwind paths --- .../src/build/expr/as_rvalue.rs | 2 +- .../rustc_mir_build/src/build/expr/into.rs | 13 +++++-- .../rustc_mir_build/src/build/expr/stmt.rs | 5 ++- compiler/rustc_mir_build/src/build/scope.rs | 37 +++++-------------- 4 files changed, 23 insertions(+), 34 deletions(-) diff --git a/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs b/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs index c5ee6db5999a5..3d56ac9809a22 100644 --- a/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs +++ b/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs @@ -738,7 +738,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { this.diverge_from(block); block = success; } - this.record_operands_moved(&[Spanned { node: value_operand, span: DUMMY_SP }]); + this.record_operands_moved(&[value_operand]); } block.and(Rvalue::Aggregate(Box::new(AggregateKind::Array(elem_ty)), IndexVec::new())) } diff --git a/compiler/rustc_mir_build/src/build/expr/into.rs b/compiler/rustc_mir_build/src/build/expr/into.rs index 942c69b5c0a75..9605f22f0c9ef 100644 --- a/compiler/rustc_mir_build/src/build/expr/into.rs +++ b/compiler/rustc_mir_build/src/build/expr/into.rs @@ -239,7 +239,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } ExprKind::Call { ty: _, fun, ref args, from_hir_call, fn_span } => { let fun = unpack!(block = this.as_local_operand(block, fun)); - let args: Box<[_]> = args + let spanned_args: Box<[_]> = args .into_iter() .copied() .map(|arg| Spanned { @@ -247,11 +247,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { span: this.thir.exprs[arg].span, }) .collect(); + let args: Vec<_> = spanned_args.iter().map(|arg| arg.node.clone()).collect(); let success = this.cfg.start_new_block(); - this.record_operands_moved(&args); - debug!("expr_into_dest: fn_span={:?}", fn_span); this.cfg.terminate( @@ -259,7 +258,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { source_info, TerminatorKind::Call { func: fun, - args, + args: spanned_args, unwind: UnwindAction::Continue, destination, // The presence or absence of a return edge affects control-flow sensitive @@ -279,6 +278,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { }, ); this.diverge_from(block); + + // This is here and not before `diverge_from` to avoid breaking + // the example in #80949. + // FIXME(matthewjasper): Look at this again if Polonius is + // stabilized. + this.record_operands_moved(&args); success.unit() } ExprKind::Use { source } => this.expr_into_dest(destination, block, source), diff --git a/compiler/rustc_mir_build/src/build/expr/stmt.rs b/compiler/rustc_mir_build/src/build/expr/stmt.rs index 88b76c46c90bf..327ccb159c60d 100644 --- a/compiler/rustc_mir_build/src/build/expr/stmt.rs +++ b/compiler/rustc_mir_build/src/build/expr/stmt.rs @@ -106,7 +106,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { this.in_scope((region_scope, source_info), lint_level, |this| { let fun = unpack!(block = this.as_local_operand(block, fun)); - let args: Box<[_]> = args + let spanned_args: Box<[_]> = args .into_iter() .copied() .map(|arg| Spanned { @@ -114,6 +114,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { span: this.thir.exprs[arg].span, }) .collect(); + let args: Vec<_> = spanned_args.iter().map(|arg| arg.node.clone()).collect(); this.record_operands_moved(&args); @@ -124,7 +125,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { this.cfg.terminate( block, source_info, - TerminatorKind::TailCall { func: fun, args, fn_span }, + TerminatorKind::TailCall { func: fun, args: spanned_args, fn_span }, ); this.cfg.start_new_block().unit() diff --git a/compiler/rustc_mir_build/src/build/scope.rs b/compiler/rustc_mir_build/src/build/scope.rs index 948301e2ece4d..2dde81ed85077 100644 --- a/compiler/rustc_mir_build/src/build/scope.rs +++ b/compiler/rustc_mir_build/src/build/scope.rs @@ -92,7 +92,6 @@ use rustc_middle::mir::*; use rustc_middle::thir::{ExprId, LintLevel}; use rustc_middle::{bug, span_bug}; use rustc_session::lint::Level; -use rustc_span::source_map::Spanned; use rustc_span::{Span, DUMMY_SP}; use tracing::{debug, instrument}; @@ -128,8 +127,6 @@ struct Scope { /// end of the vector (top of the stack) first. drops: Vec, - moved_locals: Vec, - /// The drop index that will drop everything in and below this scope on an /// unwind path. cached_unwind_block: Option, @@ -445,7 +442,6 @@ impl<'tcx> Scopes<'tcx> { source_scope: vis_scope, region_scope: region_scope.0, drops: vec![], - moved_locals: vec![], cached_unwind_block: None, cached_coroutine_drop_block: None, }); @@ -752,13 +748,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { pub(crate) fn break_for_tail_call( &mut self, mut block: BasicBlock, - args: &[Spanned>], + args: &[Operand<'tcx>], source_info: SourceInfo, ) -> BlockAnd<()> { let arg_drops: Vec<_> = args .iter() .rev() - .filter_map(|arg| match &arg.node { + .filter_map(|arg| match arg { Operand::Copy(_) => bug!("copy op in tail call args"), Operand::Move(place) => { let local = @@ -1102,14 +1098,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { span_bug!(span, "region scope {:?} not in scope to drop {:?}", region_scope, local); } - /// Indicates that the "local operand" stored in `local` is + /// Indicates that the "local operands" stored in `local` are /// *moved* at some point during execution (see `local_scope` for /// more information about what a "local operand" is -- in short, /// it's an intermediate operand created as part of preparing some /// MIR instruction). We use this information to suppress - /// redundant drops on the non-unwind paths. This results in less - /// MIR, but also avoids spurious borrow check errors - /// (c.f. #64391). + /// redundant drops. This results in less MIR, but also avoids spurious + /// borrow check errors (c.f. #64391). /// /// Example: when compiling the call to `foo` here: /// @@ -1138,27 +1133,23 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { /// spurious borrow-check errors -- the problem, ironically, is /// not the `DROP(_X)` itself, but the (spurious) unwind pathways /// that it creates. See #64391 for an example. - pub(crate) fn record_operands_moved(&mut self, operands: &[Spanned>]) { + pub(crate) fn record_operands_moved(&mut self, operands: &[Operand<'tcx>]) { let local_scope = self.local_scope(); let scope = self.scopes.scopes.last_mut().unwrap(); assert_eq!(scope.region_scope, local_scope, "local scope is not the topmost scope!",); // look for moves of a local variable, like `MOVE(_X)` - let locals_moved = operands.iter().flat_map(|operand| match operand.node { + let locals_moved = operands.iter().flat_map(|operand| match operand { Operand::Copy(_) | Operand::Constant(_) => None, Operand::Move(place) => place.as_local(), }); for local in locals_moved { - // check if we have a Drop for this operand and -- if so - // -- add it to the list of moved operands. Note that this - // local might not have been an operand created for this - // call, it could come from other places too. - if scope.drops.iter().any(|drop| drop.local == local && drop.kind == DropKind::Value) { - scope.moved_locals.push(local); - } + // Unschedule drops from the scope. + scope.drops.retain(|drop| drop.local != local || drop.kind != DropKind::Value); } + scope.invalidate_cache(); } // Other @@ -1382,14 +1373,6 @@ fn build_scope_drops<'tcx>( debug_assert_eq!(unwind_drops.drops[unwind_to].data.kind, drop_data.kind); unwind_to = unwind_drops.drops[unwind_to].next; - // If the operand has been moved, and we are not on an unwind - // path, then don't generate the drop. (We only take this into - // account for non-unwind paths so as not to disturb the - // caching mechanism.) - if scope.moved_locals.iter().any(|&o| o == local) { - continue; - } - unwind_drops.add_entry_point(block, unwind_to); let next = cfg.start_new_block(); From 1bcdc2e0081a1e4c980f2daaf47aa10fd4d40167 Mon Sep 17 00:00:00 2001 From: Matthew Jasper Date: Fri, 31 May 2024 16:21:33 +0000 Subject: [PATCH 2/5] Use record_operands_moved more in mir building --- compiler/rustc_mir_build/src/build/expr/as_rvalue.rs | 9 ++++++++- compiler/rustc_mir_build/src/build/expr/into.rs | 2 ++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs b/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs index 3d56ac9809a22..84d48c0035ff3 100644 --- a/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs +++ b/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs @@ -20,6 +20,8 @@ use rustc_middle::ty::{self, Ty, UpvarArgs}; use rustc_span::{Span, DUMMY_SP}; use tracing::debug; +use std::slice; + impl<'a, 'tcx> Builder<'a, 'tcx> { /// Returns an rvalue suitable for use until the end of the current /// scope expression. @@ -192,7 +194,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { value, ) ); - block.and(Rvalue::Use(Operand::Move(Place::from(result)))) + let result_operand = Operand::Move(Place::from(result)); + this.record_operands_moved(slice::from_ref(&result_operand)); + block.and(Rvalue::Use(result_operand)) } ExprKind::Cast { source } => { let source_expr = &this.thir[source]; @@ -360,6 +364,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { }) .collect(); + this.record_operands_moved(&fields.raw); block.and(Rvalue::Aggregate(Box::new(AggregateKind::Array(el_ty)), fields)) } ExprKind::Tuple { ref fields } => { @@ -381,6 +386,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { }) .collect(); + this.record_operands_moved(&fields.raw); block.and(Rvalue::Aggregate(Box::new(AggregateKind::Tuple), fields)) } ExprKind::Closure(box ClosureExpr { @@ -483,6 +489,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { Box::new(AggregateKind::CoroutineClosure(closure_id.to_def_id(), args)) } }; + this.record_operands_moved(&operands.raw); block.and(Rvalue::Aggregate(result, operands)) } ExprKind::Assign { .. } | ExprKind::AssignOp { .. } => { diff --git a/compiler/rustc_mir_build/src/build/expr/into.rs b/compiler/rustc_mir_build/src/build/expr/into.rs index 9605f22f0c9ef..11f9513751539 100644 --- a/compiler/rustc_mir_build/src/build/expr/into.rs +++ b/compiler/rustc_mir_build/src/build/expr/into.rs @@ -383,6 +383,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { user_ty, active_field_index, )); + this.record_operands_moved(&fields.raw); this.cfg.push_assign( block, source_info, @@ -561,6 +562,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { ) ); let resume = this.cfg.start_new_block(); + this.record_operands_moved(slice::from_ref(&value)); this.cfg.terminate( block, source_info, From f547dc87bd082f7f1bb826998eee04b9c1f748f9 Mon Sep 17 00:00:00 2001 From: Matthew Jasper Date: Fri, 31 May 2024 17:10:29 +0000 Subject: [PATCH 3/5] Avoid leaking values when panicking in certain cases --- compiler/rustc_mir_build/src/build/block.rs | 24 +- .../src/build/expr/as_rvalue.rs | 4 +- .../rustc_mir_build/src/build/expr/as_temp.rs | 6 +- .../rustc_mir_build/src/build/expr/into.rs | 104 +++-- .../rustc_mir_build/src/build/matches/mod.rs | 38 +- compiler/rustc_mir_build/src/build/mod.rs | 19 +- compiler/rustc_mir_build/src/build/scope.rs | 99 ++++- .../alloc/src/collections/btree/map/tests.rs | 3 +- .../mir-opt/box_expr.main.ElaborateDrops.diff | 43 +- ...e_out.move_out_by_subslice.built.after.mir | 46 +- ...move_out.move_out_from_end.built.after.mir | 46 +- ...nline_diverging.h.Inline.panic-unwind.diff | 14 +- ...test.ElaborateDrops.before.panic-abort.mir | 40 +- ...est.ElaborateDrops.before.panic-unwind.mir | 38 +- tests/mir-opt/issue_91633.foo.built.after.mir | 12 +- ...ropping.ScalarReplacementOfAggregates.diff | 12 +- tests/ui/deriving/deriving-with-helper.rs | 3 + tests/ui/drop/dynamic-drop-async.rs | 165 ++++--- tests/ui/drop/dynamic-drop.rs | 410 +++++++++++------- tests/ui/label/label_break_drop_value.rs | 17 + tests/ui/nll/ice-106874.rs | 2 - tests/ui/nll/ice-106874.stderr | 16 +- 22 files changed, 710 insertions(+), 451 deletions(-) create mode 100644 tests/ui/label/label_break_drop_value.rs diff --git a/compiler/rustc_mir_build/src/build/block.rs b/compiler/rustc_mir_build/src/build/block.rs index 5ccbd7c59cfba..aa413320aaf29 100644 --- a/compiler/rustc_mir_build/src/build/block.rs +++ b/compiler/rustc_mir_build/src/build/block.rs @@ -1,4 +1,5 @@ use crate::build::matches::{DeclareLetBindings, EmitStorageLive, ScheduleDrops}; +use crate::build::scope::DropKind; use crate::build::ForGuard::OutsideGuard; use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder}; use rustc_middle::middle::region::Scope; @@ -12,6 +13,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { pub(crate) fn ast_block( &mut self, destination: Place<'tcx>, + scope: Option, block: BasicBlock, ast_block: BlockId, source_info: SourceInfo, @@ -20,11 +22,19 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { self.thir[ast_block]; self.in_scope((region_scope, source_info), LintLevel::Inherited, move |this| { if targeted_by_break { - this.in_breakable_scope(None, destination, span, |this| { - Some(this.ast_block_stmts(destination, block, span, stmts, expr, region_scope)) + this.in_breakable_scope(None, destination, scope, span, |this| { + Some(this.ast_block_stmts( + destination, + scope, + block, + span, + stmts, + expr, + region_scope, + )) }) } else { - this.ast_block_stmts(destination, block, span, stmts, expr, region_scope) + this.ast_block_stmts(destination, scope, block, span, stmts, expr, region_scope) } }) } @@ -32,6 +42,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { fn ast_block_stmts( &mut self, destination: Place<'tcx>, + scope: Option, mut block: BasicBlock, span: Span, stmts: &[StmtId], @@ -169,6 +180,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { unpack!( failure_block = this.ast_block( dummy_place, + None, failure_entry, *else_block, this.source_info(else_block_span), @@ -333,7 +345,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { this.block_context .push(BlockFrame::TailExpr { tail_result_is_ignored, span: expr.span }); - unpack!(block = this.expr_into_dest(destination, block, expr_id)); + unpack!(block = this.expr_into_dest(destination, scope, block, expr_id)); let popped = this.block_context.pop(); assert!(popped.is_some_and(|bf| bf.is_tail_expr())); @@ -350,6 +362,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // We only want to assign an implicit `()` as the return value of the block if the // block does not diverge. (Otherwise, we may try to assign a unit to a `!`-type.) this.cfg.push_assign_unit(block, source_info, destination, this.tcx); + } else if let Some(destination_local) = destination.as_local() + && let Some(scope) = scope + { + this.schedule_drop(span, scope, destination_local, DropKind::Value); } } // Finally, we pop all the let scopes before exiting out from the scope of block diff --git a/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs b/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs index 84d48c0035ff3..481915905d94b 100644 --- a/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs +++ b/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs @@ -186,10 +186,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let box_ = Rvalue::ShallowInitBox(Operand::Move(storage), value_ty); this.cfg.push_assign(block, source_info, Place::from(result), box_); - // initialize the box contents: + // Initialize the box contents. No scope is needed since the + // `Box` is already scheduled to be dropped. unpack!( block = this.expr_into_dest( this.tcx.mk_place_deref(Place::from(result)), + None, block, value, ) diff --git a/compiler/rustc_mir_build/src/build/expr/as_temp.rs b/compiler/rustc_mir_build/src/build/expr/as_temp.rs index 607c7c3259c18..3040aabbd57b5 100644 --- a/compiler/rustc_mir_build/src/build/expr/as_temp.rs +++ b/compiler/rustc_mir_build/src/build/expr/as_temp.rs @@ -112,11 +112,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } } - unpack!(block = this.expr_into_dest(temp_place, block, expr_id)); - - if let Some(temp_lifetime) = temp_lifetime { - this.schedule_drop(expr_span, temp_lifetime, temp, DropKind::Value); - } + unpack!(block = this.expr_into_dest(temp_place, temp_lifetime, block, expr_id)); block.and(temp) } diff --git a/compiler/rustc_mir_build/src/build/expr/into.rs b/compiler/rustc_mir_build/src/build/expr/into.rs index 11f9513751539..58a8611a9b827 100644 --- a/compiler/rustc_mir_build/src/build/expr/into.rs +++ b/compiler/rustc_mir_build/src/build/expr/into.rs @@ -2,11 +2,14 @@ use crate::build::expr::category::{Category, RvalueFunc}; use crate::build::matches::DeclareLetBindings; +use crate::build::scope::DropKind; use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder, NeedsTemporary}; use rustc_ast::InlineAsmOptions; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::stack::ensure_sufficient_stack; use rustc_hir as hir; +use rustc_index::IndexVec; +use rustc_middle::middle::region; use rustc_middle::mir::*; use rustc_middle::span_bug; use rustc_middle::thir::*; @@ -15,6 +18,8 @@ use rustc_span::source_map::Spanned; use std::iter; use tracing::{debug, instrument}; +use std::slice; + impl<'a, 'tcx> Builder<'a, 'tcx> { /// Compile `expr`, storing the result into `destination`, which /// is assumed to be uninitialized. @@ -22,6 +27,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { pub(crate) fn expr_into_dest( &mut self, destination: Place<'tcx>, + scope: Option, mut block: BasicBlock, expr_id: ExprId, ) -> BlockAnd<()> { @@ -36,6 +42,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let expr_is_block_or_scope = matches!(expr.kind, ExprKind::Block { .. } | ExprKind::Scope { .. }); + let schedule_drop = move |this: &mut Self| { + if let Some(drop_scope) = scope { + let local = + destination.as_local().expect("cannot schedule drop of non-Local place"); + this.schedule_drop(expr_span, drop_scope, local, DropKind::Value); + } + }; + if !expr_is_block_or_scope { this.block_context.push(BlockFrame::SubExpr); } @@ -45,15 +59,16 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let region_scope = (region_scope, source_info); ensure_sufficient_stack(|| { this.in_scope(region_scope, lint_level, |this| { - this.expr_into_dest(destination, block, value) + this.expr_into_dest(destination, scope, block, value) }) }) } ExprKind::Block { block: ast_block } => { - this.ast_block(destination, block, ast_block, source_info) + this.ast_block(destination, scope, block, ast_block, source_info) } ExprKind::Match { scrutinee, ref arms, .. } => this.match_expr( destination, + scope, block, scrutinee, arms, @@ -91,7 +106,15 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { )); // Lower the `then` arm into its block. - this.expr_into_dest(destination, then_blk, then) + let then_blk = + this.expr_into_dest(destination, scope, then_blk, then); + if let Some(drop_scope) = scope { + let local = destination + .as_local() + .expect("cannot unschedule drop of non-Local place"); + this.unschedule_drop(drop_scope, local); + } + then_blk }); // Pack `(then_block, else_block)` into `BlockAnd`. @@ -105,7 +128,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // If there is an `else` arm, lower it into `else_blk`. if let Some(else_expr) = else_opt { - unpack!(else_blk = this.expr_into_dest(destination, else_blk, else_expr)); + unpack!( + else_blk = this.expr_into_dest(destination, scope, else_blk, else_expr) + ); } else { // There is no `else` arm, so we know both arms have type `()`. // Generate the implicit `else {}` by assigning unit. @@ -140,6 +165,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // This is an optimization. If the expression was a call then we already have an // unreachable block. Don't bother to terminate it and create a new one. + schedule_drop(this); if is_call { block.unit() } else { @@ -187,7 +213,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { const_: Const::from_bool(this.tcx, constant), }, ); - let mut rhs_block = unpack!(this.expr_into_dest(destination, continuation, rhs)); + let mut rhs_block = + unpack!(this.expr_into_dest(destination, scope, continuation, rhs)); // Instrument the lowered RHS's value for condition coverage. // (Does nothing if condition coverage is not enabled.) this.visit_coverage_standalone_condition(rhs, destination, &mut rhs_block); @@ -213,29 +240,37 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // Start the loop. this.cfg.goto(block, source_info, loop_block); - this.in_breakable_scope(Some(loop_block), destination, expr_span, move |this| { - // conduct the test, if necessary - let body_block = this.cfg.start_new_block(); - this.cfg.terminate( - loop_block, - source_info, - TerminatorKind::FalseUnwind { - real_target: body_block, - unwind: UnwindAction::Continue, - }, - ); - this.diverge_from(loop_block); - - // The “return” value of the loop body must always be a unit. We therefore - // introduce a unit temporary as the destination for the loop body. - let tmp = this.get_unit_temp(); - // Execute the body, branching back to the test. - let body_block_end = unpack!(this.expr_into_dest(tmp, body_block, body)); - this.cfg.goto(body_block_end, source_info, loop_block); - - // Loops are only exited by `break` expressions. - None - }) + this.in_breakable_scope( + Some(loop_block), + destination, + scope, + expr_span, + move |this| { + // conduct the test, if necessary + let body_block = this.cfg.start_new_block(); + this.cfg.terminate( + loop_block, + source_info, + TerminatorKind::FalseUnwind { + real_target: body_block, + unwind: UnwindAction::Continue, + }, + ); + this.diverge_from(loop_block); + + // The “return” value of the loop body must always be a unit. We therefore + // introduce a unit temporary as the destination for the loop body. + let tmp = this.get_unit_temp(); + // Execute the body, branching back to the test. + let body_block_end = + unpack!(this.expr_into_dest(tmp, scope, body_block, body)); + this.cfg.goto(body_block_end, source_info, loop_block); + schedule_drop(this); + + // Loops are only exited by `break` expressions. + None + }, + ) } ExprKind::Call { ty: _, fun, ref args, from_hir_call, fn_span } => { let fun = unpack!(block = this.as_local_operand(block, fun)); @@ -284,9 +319,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // FIXME(matthewjasper): Look at this again if Polonius is // stabilized. this.record_operands_moved(&args); + schedule_drop(this); success.unit() } - ExprKind::Use { source } => this.expr_into_dest(destination, block, source), + ExprKind::Use { source } => this.expr_into_dest(destination, scope, block, source), ExprKind::Borrow { arg, borrow_kind } => { // We don't do this in `as_rvalue` because we use `as_place` // for borrow expressions, so we cannot create an `RValue` that @@ -349,7 +385,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let field_names = adt_def.variant(variant_index).fields.indices(); - let fields = if let Some(FruInfo { base, field_types }) = base { + let fields: IndexVec<_, _> = if let Some(FruInfo { base, field_types }) = base { let place_builder = unpack!(block = this.as_place_builder(block, *base)); // MIR does not natively support FRU, so for each @@ -390,6 +426,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { destination, Rvalue::Aggregate(adt, fields), ); + schedule_drop(this); block.unit() } ExprKind::InlineAsm(box InlineAsmExpr { @@ -468,7 +505,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { targets.push(target); let tmp = this.get_unit_temp(); - let target = unpack!(this.ast_block(tmp, target, block, source_info)); + let target = + unpack!(this.ast_block(tmp, scope, target, block, source_info)); this.cfg.terminate( target, source_info, @@ -532,6 +570,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let place = unpack!(block = this.as_place(block, expr_id)); let rvalue = Rvalue::Use(this.consume_by_copy_or_move(place)); this.cfg.push_assign(block, source_info, destination, rvalue); + schedule_drop(this); block.unit() } ExprKind::Index { .. } | ExprKind::Deref { .. } | ExprKind::Field { .. } => { @@ -547,6 +586,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let place = unpack!(block = this.as_place(block, expr_id)); let rvalue = Rvalue::Use(this.consume_by_copy_or_move(place)); this.cfg.push_assign(block, source_info, destination, rvalue); + schedule_drop(this); block.unit() } @@ -569,6 +609,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { TerminatorKind::Yield { value, resume, resume_arg: destination, drop: None }, ); this.coroutine_drop_cleanup(block); + schedule_drop(this); resume.unit() } @@ -605,6 +646,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let rvalue = unpack!(block = this.as_local_rvalue(block, expr_id)); this.cfg.push_assign(block, source_info, destination, rvalue); + schedule_drop(this); block.unit() } }; diff --git a/compiler/rustc_mir_build/src/build/matches/mod.rs b/compiler/rustc_mir_build/src/build/matches/mod.rs index 841ef2719c99d..81294fdcba495 100644 --- a/compiler/rustc_mir_build/src/build/matches/mod.rs +++ b/compiler/rustc_mir_build/src/build/matches/mod.rs @@ -350,6 +350,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { pub(crate) fn match_expr( &mut self, destination: Place<'tcx>, + destination_scope: Option, mut block: BasicBlock, scrutinee_id: ExprId, arms: &[ArmId], @@ -386,6 +387,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { self.lower_match_arms( destination, + destination_scope, scrutinee_place, scrutinee_span, arm_candidates, @@ -442,20 +444,45 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { /// (by [Builder::lower_match_tree]). /// /// `outer_source_info` is the SourceInfo for the whole match. + /// [Builder::lower_match_tree]). fn lower_match_arms( &mut self, destination: Place<'tcx>, + destination_scope: Option, scrutinee_place_builder: PlaceBuilder<'tcx>, scrutinee_span: Span, arm_candidates: Vec<(&'_ Arm<'tcx>, Candidate<'_, 'tcx>)>, outer_source_info: SourceInfo, fake_borrow_temps: Vec<(Place<'tcx>, Local, FakeBorrowKind)>, ) -> BlockAnd<()> { + if arm_candidates.is_empty() { + // If there are no arms to schedule drops, then we have to do it + // manually. + if let Some(scope) = destination_scope { + self.schedule_drop( + outer_source_info.span, + scope, + destination.as_local().unwrap(), + DropKind::Value, + ); + } + return self.cfg.start_new_block().unit(); + } + + let mut first_arm = true; let arm_end_blocks: Vec<_> = arm_candidates .into_iter() .map(|(arm, candidate)| { debug!("lowering arm {:?}\ncandidate = {:?}", arm, candidate); + if first_arm { + first_arm = false; + } else if let Some(scope) = destination_scope { + // Unschedule the drop from the previous arm, it will then + // be rescheduled by the end of this arm. + self.unschedule_drop(scope, destination.as_local().unwrap()); + } + let arm_source_info = self.source_info(arm.span); let arm_scope = (arm.scope, arm_source_info); let match_scope = self.local_scope(); @@ -500,7 +527,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { this.source_scope = source_scope; } - this.expr_into_dest(destination, arm_block, arm.body) + this.expr_into_dest(destination, destination_scope, arm_block, arm.body) }) }) .collect(); @@ -621,13 +648,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { OutsideGuard, ScheduleDrops::Yes, ); - unpack!(block = self.expr_into_dest(place, block, initializer_id)); + let region_scope = self.region_scope_tree.var_scope(var.0.local_id); + + unpack!(block = self.expr_into_dest(place, region_scope, block, initializer_id)); // Inject a fake read, see comments on `FakeReadCause::ForLet`. let source_info = self.source_info(irrefutable_pat.span); self.cfg.push_fake_read(block, source_info, FakeReadCause::ForLet(None), place); - self.schedule_drop_for_binding(var, irrefutable_pat.span, OutsideGuard); block.unit() } @@ -653,6 +681,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { }, ascription: thir::Ascription { ref annotation, variance: _ }, } => { + let region_scope = self.region_scope_tree.var_scope(var.0.local_id); let place = self.storage_live_binding( block, var, @@ -660,7 +689,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { OutsideGuard, ScheduleDrops::Yes, ); - unpack!(block = self.expr_into_dest(place, block, initializer_id)); + unpack!(block = self.expr_into_dest(place, region_scope, block, initializer_id)); // Inject a fake read, see comments on `FakeReadCause::ForLet`. let pattern_source_info = self.source_info(irrefutable_pat.span); @@ -695,7 +724,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { }, ); - self.schedule_drop_for_binding(var, irrefutable_pat.span, OutsideGuard); block.unit() } diff --git a/compiler/rustc_mir_build/src/build/mod.rs b/compiler/rustc_mir_build/src/build/mod.rs index 0f9746cb719ca..1ff9ec7ff7b56 100644 --- a/compiler/rustc_mir_build/src/build/mod.rs +++ b/compiler/rustc_mir_build/src/build/mod.rs @@ -520,15 +520,21 @@ fn construct_fn<'tcx>( let arg_scope_s = (arg_scope, source_info); // Attribute epilogue to function's closing brace let fn_end = span_with_body.shrink_to_hi(); - let return_block = - unpack!(builder.in_breakable_scope(None, Place::return_place(), fn_end, |builder| { + let return_block = unpack!(builder.in_breakable_scope( + None, + Place::return_place(), + Some(call_site_scope), + fn_end, + |builder| { Some(builder.in_scope(arg_scope_s, LintLevel::Inherited, |builder| { builder.args_and_body(START_BLOCK, arguments, arg_scope, expr) })) - })); + }, + )); let source_info = builder.source_info(fn_end); builder.cfg.terminate(return_block, source_info, TerminatorKind::Return); builder.build_drop_trees(); + builder.unschedule_return_place_drop(); return_block.unit() })); @@ -579,7 +585,7 @@ fn construct_const<'a, 'tcx>( Builder::new(thir, infcx, def, hir_id, span, 0, const_ty, const_ty_span, None); let mut block = START_BLOCK; - unpack!(block = builder.expr_into_dest(Place::return_place(), block, expr)); + unpack!(block = builder.expr_into_dest(Place::return_place(), None, block, expr)); let source_info = builder.source_info(span); builder.cfg.terminate(block, source_info, TerminatorKind::Return); @@ -976,7 +982,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { self.cfg.terminate(block, source_info, TerminatorKind::Unreachable); self.cfg.start_new_block().unit() } else { - self.expr_into_dest(Place::return_place(), block, expr_id) + let body = self.tcx.hir().body_owned_by(self.def_id); + let call_site_scope = + region::Scope { id: body.id().hir_id.local_id, data: region::ScopeData::CallSite }; + self.expr_into_dest(Place::return_place(), Some(call_site_scope), block, expr_id) } } diff --git a/compiler/rustc_mir_build/src/build/scope.rs b/compiler/rustc_mir_build/src/build/scope.rs index 2dde81ed85077..2be29f461c137 100644 --- a/compiler/rustc_mir_build/src/build/scope.rs +++ b/compiler/rustc_mir_build/src/build/scope.rs @@ -162,6 +162,8 @@ struct BreakableScope<'tcx> { /// The destination of the loop/block expression itself (i.e., where to put /// the result of a `break` or `return` expression) break_destination: Place<'tcx>, + /// The scope that the destination should have its drop scheduled in. + destination_scope: Option, /// Drops that happen on the `break`/`return` path. break_drops: DropTree, /// Drops that happen on the `continue` path. @@ -477,6 +479,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { &mut self, loop_block: Option, break_destination: Place<'tcx>, + destination_scope: Option, span: Span, f: F, ) -> BlockAnd<()> @@ -487,9 +490,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let scope = BreakableScope { region_scope, break_destination, + destination_scope, break_drops: DropTree::new(), continue_drops: loop_block.map(|_| DropTree::new()), }; + let continue_block = loop_block.map(|block| (block, self.diverge_cleanup())); self.scopes.breakable_scopes.push(scope); let normal_exit_block = f(self); let breakable_scope = self.scopes.breakable_scopes.pop().unwrap(); @@ -497,7 +502,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let break_block = self.build_exit_tree(breakable_scope.break_drops, region_scope, span, None); if let Some(drops) = breakable_scope.continue_drops { - self.build_exit_tree(drops, region_scope, span, loop_block); + self.build_exit_tree(drops, region_scope, span, continue_block); } match (normal_exit_block, break_block) { (Some(block), None) | (None, Some(block)) => block, @@ -630,22 +635,22 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { .rposition(|breakable_scope| breakable_scope.region_scope == scope) .unwrap_or_else(|| span_bug!(span, "no enclosing breakable scope found")) }; - let (break_index, destination) = match target { + let (break_index, destination, dest_scope) = match target { BreakableTarget::Return => { let scope = &self.scopes.breakable_scopes[0]; if scope.break_destination != Place::return_place() { span_bug!(span, "`return` in item with no return scope"); } - (0, Some(scope.break_destination)) + (0, Some(scope.break_destination), scope.destination_scope) } BreakableTarget::Break(scope) => { let break_index = get_scope_index(scope); let scope = &self.scopes.breakable_scopes[break_index]; - (break_index, Some(scope.break_destination)) + (break_index, Some(scope.break_destination), scope.destination_scope) } BreakableTarget::Continue(scope) => { let break_index = get_scope_index(scope); - (break_index, None) + (break_index, None, None) } }; @@ -653,7 +658,17 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { (Some(destination), Some(value)) => { debug!("stmt_expr Break val block_context.push(SubExpr)"); self.block_context.push(BlockFrame::SubExpr); - unpack!(block = self.expr_into_dest(destination, block, value)); + unpack!(block = self.expr_into_dest(destination, dest_scope, block, value)); + if let Some(scope) = dest_scope { + // Most of the actual breaking is generated by `build_exit_tree`, so the drop + // scheduled in `expr_into_dest` above is only to handle cases like + // break { + // let x = ...; // panics in destructor + // y + // }; + // We unschedule now because we're continuing to the rest of the loop. + self.unschedule_drop(scope, destination.as_local().unwrap()) + }; self.block_context.pop(); } (Some(destination), None) => { @@ -1095,7 +1110,47 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } } - span_bug!(span, "region scope {:?} not in scope to drop {:?}", region_scope, local); + span_bug!( + span, + "region scope {:?} not in scope to drop {:?}\n{:#?}", + region_scope, + local, + self.scopes.scopes + ); + } + + /// Unschedule a drop. Used for `break`, `return` and `match` expressions, + /// where `record_operands_moved` is not powerful enough. + /// + /// The given local is expected to have a value drop scheduled in the given + /// scope and for that drop to be the most recent thing scheduled in that + /// scope. + pub(crate) fn unschedule_drop(&mut self, region_scope: region::Scope, local: Local) { + if !self.local_decls[local].ty.needs_drop(self.tcx, self.param_env) { + return; + } + for scope in self.scopes.scopes.iter_mut().rev() { + scope.invalidate_cache(); + + if scope.region_scope == region_scope { + let drop = scope.drops.pop(); + + match drop { + Some(DropData { local: removed_local, kind: DropKind::Value, .. }) + if removed_local == local => + { + return; + } + _ => bug!( + "found wrong drop, expected value drop of {:?}, found {:?}", + local, + drop, + ), + } + } + } + + bug!("region scope {:?} not in scope to unschedule drop of {:?}", region_scope, local); } /// Indicates that the "local operands" stored in `local` are @@ -1327,6 +1382,22 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { top_scope.drops.clear(); top_scope.invalidate_cache(); } + + /// Unschedules the drop of the return place. + /// + /// If the return type of a function requires drop, then we schedule it + /// in the outermost scope so that it's dropped if there's a panic while + /// we drop any local variables. But we don't want to drop it if we + /// return normally. + pub(crate) fn unschedule_return_place_drop(&mut self) { + assert_eq!(self.scopes.scopes.len(), 1); + assert!( + self.scopes.scopes[0].drops.len() <= 1, + "Found too many drops: {:?}", + self.scopes.scopes[0].drops + ); + self.scopes.scopes[0].drops.clear(); + } } /// Builds drops for `pop_scope` and `leave_top_scope`. @@ -1407,23 +1478,29 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> { /// Build a drop tree for a breakable scope. /// /// If `continue_block` is `Some`, then the tree is for `continue` inside a - /// loop. Otherwise this is for `break` or `return`. + /// loop. Otherwise this is for `break` or `return`. The `DropIdx` is the + /// next drop in the case that the drop tree unwinds. This is needed + /// because the drop of the break destination has already been scheduled + /// but it hasn't been initialized on the `continue` paths. fn build_exit_tree( &mut self, mut drops: DropTree, else_scope: region::Scope, span: Span, - continue_block: Option, + continue_block: Option<(BasicBlock, DropIdx)>, ) -> Option> { let mut blocks = IndexVec::from_elem(None, &drops.drops); - blocks[ROOT_NODE] = continue_block; + blocks[ROOT_NODE] = continue_block.map(|(block, _)| block); drops.build_mir::(&mut self.cfg, &mut blocks); let is_coroutine = self.coroutine.is_some(); // Link the exit drop tree to unwind drop tree. if drops.drops.iter().any(|drop_node| drop_node.data.kind == DropKind::Value) { - let unwind_target = self.diverge_cleanup_target(else_scope, span); + let unwind_target = continue_block.map_or_else( + || self.diverge_cleanup_target(else_scope, span), + |(_, unwind_target)| unwind_target, + ); let mut unwind_indices = IndexVec::from_elem_n(unwind_target, 1); for (drop_idx, drop_node) in drops.drops.iter_enumerated().skip(1) { match drop_node.data.kind { diff --git a/library/alloc/src/collections/btree/map/tests.rs b/library/alloc/src/collections/btree/map/tests.rs index ba1f38dcc3e52..1a6684d555362 100644 --- a/library/alloc/src/collections/btree/map/tests.rs +++ b/library/alloc/src/collections/btree/map/tests.rs @@ -2104,6 +2104,7 @@ create_append_test!(test_append_1700, 1700); #[test] #[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] +#[cfg_attr(bootstrap, ignore = "test requires compiler fix from current nightly")] fn test_append_drop_leak() { let a = CrashTestDummy::new(0); let b = CrashTestDummy::new(1); @@ -2118,7 +2119,7 @@ fn test_append_drop_leak() { catch_unwind(move || left.append(&mut right)).unwrap_err(); assert_eq!(a.dropped(), 1); - assert_eq!(b.dropped(), 1); // should be 2 were it not for Rust issue #47949 + assert_eq!(b.dropped(), 2); assert_eq!(c.dropped(), 2); } diff --git a/tests/mir-opt/box_expr.main.ElaborateDrops.diff b/tests/mir-opt/box_expr.main.ElaborateDrops.diff index ec40fac2894eb..3d6b2f9a0459f 100644 --- a/tests/mir-opt/box_expr.main.ElaborateDrops.diff +++ b/tests/mir-opt/box_expr.main.ElaborateDrops.diff @@ -26,62 +26,57 @@ bb1: { StorageLive(_5); _5 = ShallowInitBox(move _4, S); - (*_5) = S::new() -> [return: bb2, unwind: bb8]; + (*_5) = S::new() -> [return: bb2, unwind: bb7]; } bb2: { _1 = move _5; -- drop(_5) -> [return: bb3, unwind continue]; -+ goto -> bb3; - } - - bb3: { StorageDead(_5); StorageLive(_6); StorageLive(_7); _7 = move _1; - _6 = std::mem::drop::>(move _7) -> [return: bb4, unwind: bb6]; + _6 = std::mem::drop::>(move _7) -> [return: bb3, unwind: bb5]; } - bb4: { + bb3: { StorageDead(_7); StorageDead(_6); _0 = const (); -- drop(_1) -> [return: bb5, unwind continue]; -+ goto -> bb5; +- drop(_1) -> [return: bb4, unwind continue]; ++ goto -> bb4; } - bb5: { + bb4: { StorageDead(_1); return; } + bb5 (cleanup): { +- drop(_7) -> [return: bb6, unwind terminate(cleanup)]; ++ goto -> bb6; + } + bb6 (cleanup): { -- drop(_7) -> [return: bb7, unwind terminate(cleanup)]; -+ goto -> bb7; +- drop(_1) -> [return: bb8, unwind terminate(cleanup)]; ++ goto -> bb8; } bb7 (cleanup): { -- drop(_1) -> [return: bb9, unwind terminate(cleanup)]; -+ goto -> bb9; +- drop(_5) -> [return: bb8, unwind terminate(cleanup)]; ++ goto -> bb10; } bb8 (cleanup): { -- drop(_5) -> [return: bb9, unwind terminate(cleanup)]; -+ goto -> bb11; - } - - bb9 (cleanup): { resume; + } + -+ bb10 (cleanup): { ++ bb9 (cleanup): { + _8 = &mut _5; -+ _9 = as Drop>::drop(move _8) -> [return: bb9, unwind terminate(cleanup)]; ++ _9 = as Drop>::drop(move _8) -> [return: bb8, unwind terminate(cleanup)]; + } + -+ bb11 (cleanup): { -+ goto -> bb10; ++ bb10 (cleanup): { ++ goto -> bb9; } } diff --git a/tests/mir-opt/building/uniform_array_move_out.move_out_by_subslice.built.after.mir b/tests/mir-opt/building/uniform_array_move_out.move_out_by_subslice.built.after.mir index 6d3b2cf291038..aa04dac8162b5 100644 --- a/tests/mir-opt/building/uniform_array_move_out.move_out_by_subslice.built.after.mir +++ b/tests/mir-opt/building/uniform_array_move_out.move_out_by_subslice.built.after.mir @@ -26,7 +26,7 @@ fn move_out_by_subslice() -> () { StorageLive(_2); _3 = SizeOf(i32); _4 = AlignOf(i32); - _5 = alloc::alloc::exchange_malloc(move _3, move _4) -> [return: bb1, unwind: bb13]; + _5 = alloc::alloc::exchange_malloc(move _3, move _4) -> [return: bb1, unwind: bb8]; } bb1: { @@ -34,74 +34,54 @@ fn move_out_by_subslice() -> () { _6 = ShallowInitBox(move _5, i32); (*_6) = const 1_i32; _2 = move _6; - drop(_6) -> [return: bb2, unwind: bb12]; - } - - bb2: { StorageDead(_6); StorageLive(_7); _8 = SizeOf(i32); _9 = AlignOf(i32); - _10 = alloc::alloc::exchange_malloc(move _8, move _9) -> [return: bb3, unwind: bb12]; + _10 = alloc::alloc::exchange_malloc(move _8, move _9) -> [return: bb2, unwind: bb7]; } - bb3: { + bb2: { StorageLive(_11); _11 = ShallowInitBox(move _10, i32); (*_11) = const 2_i32; _7 = move _11; - drop(_11) -> [return: bb4, unwind: bb11]; - } - - bb4: { StorageDead(_11); _1 = [move _2, move _7]; - drop(_7) -> [return: bb5, unwind: bb12]; - } - - bb5: { StorageDead(_7); - drop(_2) -> [return: bb6, unwind: bb13]; - } - - bb6: { StorageDead(_2); FakeRead(ForLet(None), _1); PlaceMention(_1); StorageLive(_12); _12 = move _1[0..2]; _0 = const (); - drop(_12) -> [return: bb8, unwind: bb10]; + drop(_12) -> [return: bb4, unwind: bb6]; } - bb7: { + bb3: { FakeRead(ForMatchedPlace(None), _1); unreachable; } - bb8: { + bb4: { StorageDead(_12); - drop(_1) -> [return: bb9, unwind: bb13]; + drop(_1) -> [return: bb5, unwind: bb8]; } - bb9: { + bb5: { StorageDead(_1); return; } - bb10 (cleanup): { - drop(_1) -> [return: bb13, unwind terminate(cleanup)]; - } - - bb11 (cleanup): { - drop(_7) -> [return: bb12, unwind terminate(cleanup)]; + bb6 (cleanup): { + drop(_1) -> [return: bb8, unwind terminate(cleanup)]; } - bb12 (cleanup): { - drop(_2) -> [return: bb13, unwind terminate(cleanup)]; + bb7 (cleanup): { + drop(_2) -> [return: bb8, unwind terminate(cleanup)]; } - bb13 (cleanup): { + bb8 (cleanup): { resume; } } diff --git a/tests/mir-opt/building/uniform_array_move_out.move_out_from_end.built.after.mir b/tests/mir-opt/building/uniform_array_move_out.move_out_from_end.built.after.mir index 003b90a912d25..0727d26e34cac 100644 --- a/tests/mir-opt/building/uniform_array_move_out.move_out_from_end.built.after.mir +++ b/tests/mir-opt/building/uniform_array_move_out.move_out_from_end.built.after.mir @@ -26,7 +26,7 @@ fn move_out_from_end() -> () { StorageLive(_2); _3 = SizeOf(i32); _4 = AlignOf(i32); - _5 = alloc::alloc::exchange_malloc(move _3, move _4) -> [return: bb1, unwind: bb13]; + _5 = alloc::alloc::exchange_malloc(move _3, move _4) -> [return: bb1, unwind: bb8]; } bb1: { @@ -34,74 +34,54 @@ fn move_out_from_end() -> () { _6 = ShallowInitBox(move _5, i32); (*_6) = const 1_i32; _2 = move _6; - drop(_6) -> [return: bb2, unwind: bb12]; - } - - bb2: { StorageDead(_6); StorageLive(_7); _8 = SizeOf(i32); _9 = AlignOf(i32); - _10 = alloc::alloc::exchange_malloc(move _8, move _9) -> [return: bb3, unwind: bb12]; + _10 = alloc::alloc::exchange_malloc(move _8, move _9) -> [return: bb2, unwind: bb7]; } - bb3: { + bb2: { StorageLive(_11); _11 = ShallowInitBox(move _10, i32); (*_11) = const 2_i32; _7 = move _11; - drop(_11) -> [return: bb4, unwind: bb11]; - } - - bb4: { StorageDead(_11); _1 = [move _2, move _7]; - drop(_7) -> [return: bb5, unwind: bb12]; - } - - bb5: { StorageDead(_7); - drop(_2) -> [return: bb6, unwind: bb13]; - } - - bb6: { StorageDead(_2); FakeRead(ForLet(None), _1); PlaceMention(_1); StorageLive(_12); _12 = move _1[1 of 2]; _0 = const (); - drop(_12) -> [return: bb8, unwind: bb10]; + drop(_12) -> [return: bb4, unwind: bb6]; } - bb7: { + bb3: { FakeRead(ForMatchedPlace(None), _1); unreachable; } - bb8: { + bb4: { StorageDead(_12); - drop(_1) -> [return: bb9, unwind: bb13]; + drop(_1) -> [return: bb5, unwind: bb8]; } - bb9: { + bb5: { StorageDead(_1); return; } - bb10 (cleanup): { - drop(_1) -> [return: bb13, unwind terminate(cleanup)]; - } - - bb11 (cleanup): { - drop(_7) -> [return: bb12, unwind terminate(cleanup)]; + bb6 (cleanup): { + drop(_1) -> [return: bb8, unwind terminate(cleanup)]; } - bb12 (cleanup): { - drop(_2) -> [return: bb13, unwind terminate(cleanup)]; + bb7 (cleanup): { + drop(_2) -> [return: bb8, unwind terminate(cleanup)]; } - bb13 (cleanup): { + bb8 (cleanup): { resume; } } diff --git a/tests/mir-opt/inline/inline_diverging.h.Inline.panic-unwind.diff b/tests/mir-opt/inline/inline_diverging.h.Inline.panic-unwind.diff index bc4f2d24df0b5..ef1142e71a727 100644 --- a/tests/mir-opt/inline/inline_diverging.h.Inline.panic-unwind.diff +++ b/tests/mir-opt/inline/inline_diverging.h.Inline.panic-unwind.diff @@ -29,14 +29,14 @@ + StorageLive(_4); + StorageLive(_3); + _3 = &_2; -+ _4 = ! {sleep} as Fn<()>>::call(move _3, const ()) -> [return: bb1, unwind: bb5]; ++ _4 = ! {sleep} as Fn<()>>::call(move _3, const ()) -> [return: bb1, unwind: bb6]; + } + + bb1: { + StorageDead(_3); + StorageLive(_5); + _5 = &_2; -+ _6 = ! {sleep} as Fn<()>>::call(move _5, const ()) -> [return: bb2, unwind: bb4]; ++ _6 = ! {sleep} as Fn<()>>::call(move _5, const ()) -> [return: bb2, unwind: bb5]; + } + + bb2: { @@ -46,7 +46,7 @@ + _1 = (move _7, _6); + StorageDead(_7); + StorageDead(_4); -+ drop(_2) -> [return: bb3, unwind continue]; ++ drop(_2) -> [return: bb3, unwind: bb4]; + } + + bb3: { @@ -54,14 +54,18 @@ + } + + bb4 (cleanup): { -+ drop(_4) -> [return: bb5, unwind terminate(cleanup)]; ++ drop(_1) -> [return: bb7, unwind terminate(cleanup)]; + } + + bb5 (cleanup): { -+ drop(_2) -> [return: bb6, unwind terminate(cleanup)]; ++ drop(_4) -> [return: bb6, unwind terminate(cleanup)]; + } + + bb6 (cleanup): { ++ drop(_2) -> [return: bb7, unwind terminate(cleanup)]; ++ } ++ ++ bb7 (cleanup): { + resume; } } diff --git a/tests/mir-opt/issue_62289.test.ElaborateDrops.before.panic-abort.mir b/tests/mir-opt/issue_62289.test.ElaborateDrops.before.panic-abort.mir index 3104baa5fdbd4..645e3468be4f2 100644 --- a/tests/mir-opt/issue_62289.test.ElaborateDrops.before.panic-abort.mir +++ b/tests/mir-opt/issue_62289.test.ElaborateDrops.before.panic-abort.mir @@ -29,7 +29,7 @@ fn test() -> Option> { StorageLive(_1); _2 = SizeOf(u32); _3 = AlignOf(u32); - _4 = alloc::alloc::exchange_malloc(move _2, move _3) -> [return: bb1, unwind: bb13]; + _4 = alloc::alloc::exchange_malloc(move _2, move _3) -> [return: bb1, unwind: bb11]; } bb1: { @@ -38,7 +38,7 @@ fn test() -> Option> { StorageLive(_6); StorageLive(_7); _7 = Option::::None; - _6 = as Try>::branch(move _7) -> [return: bb2, unwind: bb12]; + _6 = as Try>::branch(move _7) -> [return: bb2, unwind: bb10]; } bb2: { @@ -58,7 +58,11 @@ fn test() -> Option> { (*_5) = _12; StorageDead(_12); _1 = move _5; - drop(_5) -> [return: bb7, unwind: bb11]; + StorageDead(_5); + _0 = Option::>::Some(move _1); + StorageDead(_1); + StorageDead(_6); + goto -> bb8; } bb5: { @@ -66,47 +70,35 @@ fn test() -> Option> { _9 = ((_6 as Break).0: std::option::Option); StorageLive(_11); _11 = _9; - _0 = > as FromResidual>>::from_residual(move _11) -> [return: bb6, unwind: bb12]; + _0 = > as FromResidual>>::from_residual(move _11) -> [return: bb6, unwind: bb10]; } bb6: { StorageDead(_11); StorageDead(_9); - drop(_5) -> [return: bb9, unwind: bb13]; + drop(_5) -> [return: bb7, unwind: bb9]; } bb7: { - StorageDead(_5); - _0 = Option::>::Some(move _1); - drop(_1) -> [return: bb8, unwind: bb13]; - } - - bb8: { - StorageDead(_1); - StorageDead(_6); - goto -> bb10; - } - - bb9: { StorageDead(_5); StorageDead(_1); StorageDead(_6); - goto -> bb10; + goto -> bb8; } - bb10: { + bb8: { return; } - bb11 (cleanup): { - drop(_1) -> [return: bb13, unwind terminate(cleanup)]; + bb9 (cleanup): { + drop(_0) -> [return: bb11, unwind terminate(cleanup)]; } - bb12 (cleanup): { - drop(_5) -> [return: bb13, unwind terminate(cleanup)]; + bb10 (cleanup): { + drop(_5) -> [return: bb11, unwind terminate(cleanup)]; } - bb13 (cleanup): { + bb11 (cleanup): { resume; } } diff --git a/tests/mir-opt/issue_62289.test.ElaborateDrops.before.panic-unwind.mir b/tests/mir-opt/issue_62289.test.ElaborateDrops.before.panic-unwind.mir index da33c83011516..833619a6320aa 100644 --- a/tests/mir-opt/issue_62289.test.ElaborateDrops.before.panic-unwind.mir +++ b/tests/mir-opt/issue_62289.test.ElaborateDrops.before.panic-unwind.mir @@ -38,7 +38,7 @@ fn test() -> Option> { StorageLive(_6); StorageLive(_7); _7 = Option::::None; - _6 = as Try>::branch(move _7) -> [return: bb2, unwind: bb12]; + _6 = as Try>::branch(move _7) -> [return: bb2, unwind: bb10]; } bb2: { @@ -58,7 +58,11 @@ fn test() -> Option> { (*_5) = _12; StorageDead(_12); _1 = move _5; - drop(_5) -> [return: bb7, unwind: bb11]; + StorageDead(_5); + _0 = Option::>::Some(move _1); + StorageDead(_1); + StorageDead(_6); + goto -> bb8; } bb5: { @@ -66,47 +70,35 @@ fn test() -> Option> { _9 = ((_6 as Break).0: std::option::Option); StorageLive(_11); _11 = _9; - _0 = > as FromResidual>>::from_residual(move _11) -> [return: bb6, unwind: bb12]; + _0 = > as FromResidual>>::from_residual(move _11) -> [return: bb6, unwind: bb10]; } bb6: { StorageDead(_11); StorageDead(_9); - drop(_5) -> [return: bb9, unwind continue]; + drop(_5) -> [return: bb7, unwind: bb9]; } bb7: { - StorageDead(_5); - _0 = Option::>::Some(move _1); - drop(_1) -> [return: bb8, unwind continue]; - } - - bb8: { - StorageDead(_1); - StorageDead(_6); - goto -> bb10; - } - - bb9: { StorageDead(_5); StorageDead(_1); StorageDead(_6); - goto -> bb10; + goto -> bb8; } - bb10: { + bb8: { return; } - bb11 (cleanup): { - drop(_1) -> [return: bb13, unwind terminate(cleanup)]; + bb9 (cleanup): { + drop(_0) -> [return: bb11, unwind terminate(cleanup)]; } - bb12 (cleanup): { - drop(_5) -> [return: bb13, unwind terminate(cleanup)]; + bb10 (cleanup): { + drop(_5) -> [return: bb11, unwind terminate(cleanup)]; } - bb13 (cleanup): { + bb11 (cleanup): { resume; } } diff --git a/tests/mir-opt/issue_91633.foo.built.after.mir b/tests/mir-opt/issue_91633.foo.built.after.mir index a66769f0d112e..20bd33d22a937 100644 --- a/tests/mir-opt/issue_91633.foo.built.after.mir +++ b/tests/mir-opt/issue_91633.foo.built.after.mir @@ -19,12 +19,12 @@ fn foo(_1: Box<[T]>) -> T { _4 = const 0_usize; _5 = Len((*_1)); _6 = Lt(_4, _5); - assert(move _6, "index out of bounds: the length is {} but the index is {}", move _5, _4) -> [success: bb1, unwind: bb5]; + assert(move _6, "index out of bounds: the length is {} but the index is {}", move _5, _4) -> [success: bb1, unwind: bb7]; } bb1: { _3 = &(*_1)[_4]; - _2 = ::clone(move _3) -> [return: bb2, unwind: bb5]; + _2 = ::clone(move _3) -> [return: bb2, unwind: bb7]; } bb2: { @@ -49,6 +49,14 @@ fn foo(_1: Box<[T]>) -> T { } bb6 (cleanup): { + drop(_0) -> [return: bb8, unwind terminate(cleanup)]; + } + + bb7 (cleanup): { + drop(_1) -> [return: bb8, unwind terminate(cleanup)]; + } + + bb8 (cleanup): { resume; } } diff --git a/tests/mir-opt/sroa/structs.dropping.ScalarReplacementOfAggregates.diff b/tests/mir-opt/sroa/structs.dropping.ScalarReplacementOfAggregates.diff index bc38a219ef3c5..8e549bc21884a 100644 --- a/tests/mir-opt/sroa/structs.dropping.ScalarReplacementOfAggregates.diff +++ b/tests/mir-opt/sroa/structs.dropping.ScalarReplacementOfAggregates.diff @@ -23,22 +23,22 @@ StorageDead(_4); StorageDead(_3); _1 = move (_2.1: Tag); - drop(_1) -> [return: bb1, unwind unreachable]; + drop(_1) -> [return: bb3, unwind unreachable]; } bb1: { - drop((_2.0: Tag)) -> [return: bb3, unwind unreachable]; - } - - bb2: { StorageDead(_2); StorageDead(_1); _0 = const (); return; } + bb2: { + drop((_2.2: Tag)) -> [return: bb1, unwind unreachable]; + } + bb3: { - drop((_2.2: Tag)) -> [return: bb2, unwind unreachable]; + drop((_2.0: Tag)) -> [return: bb2, unwind unreachable]; } } diff --git a/tests/ui/deriving/deriving-with-helper.rs b/tests/ui/deriving/deriving-with-helper.rs index c71d553c8927b..6c05cb4466c02 100644 --- a/tests/ui/deriving/deriving-with-helper.rs +++ b/tests/ui/deriving/deriving-with-helper.rs @@ -29,6 +29,9 @@ mod default { #[lang = "sized"] trait Sized {} +#[lang = "copy"] +trait Copy: Sized {} + #[derive(Default)] enum S { #[default] // OK diff --git a/tests/ui/drop/dynamic-drop-async.rs b/tests/ui/drop/dynamic-drop-async.rs index e7a32d3c24e92..6041257946b91 100644 --- a/tests/ui/drop/dynamic-drop-async.rs +++ b/tests/ui/drop/dynamic-drop-async.rs @@ -43,6 +43,7 @@ impl Future for Defer { /// The `failing_op`-th operation will panic. struct Allocator { data: RefCell>, + name: &'static str, failing_op: usize, cur_ops: Cell, } @@ -54,23 +55,28 @@ impl Drop for Allocator { fn drop(&mut self) { let data = self.data.borrow(); if data.iter().any(|d| *d) { - panic!("missing free: {:?}", data); + panic!("missing free in {:?}: {:?}", self.name, data); } } } impl Allocator { - fn new(failing_op: usize) -> Self { - Allocator { failing_op, cur_ops: Cell::new(0), data: RefCell::new(vec![]) } + fn new(failing_op: usize, name: &'static str) -> Self { + Allocator { + failing_op, + name, + cur_ops: Cell::new(0), + data: RefCell::new(vec![]), + } } - fn alloc(&self) -> impl Future> + '_ { + fn alloc(self: &Rc) -> impl Future + 'static { self.fallible_operation(); let mut data = self.data.borrow_mut(); let addr = data.len(); data.push(true); - Defer { ready: false, value: Some(Ptr(addr, self)) } + Defer { ready: false, value: Some(Ptr(addr, self.clone())) } } fn fallible_operation(&self) { self.cur_ops.set(self.cur_ops.get() + 1); @@ -83,11 +89,11 @@ impl Allocator { // Type that tracks whether it was dropped and can panic when it's created or // destroyed. -struct Ptr<'a>(usize, &'a Allocator); -impl<'a> Drop for Ptr<'a> { +struct Ptr(usize, Rc); +impl Drop for Ptr { fn drop(&mut self) { match self.1.data.borrow_mut()[self.0] { - false => panic!("double free at index {:?}", self.0), + false => panic!("double free in {:?} at index {:?}", self.1.name, self.0), ref mut d => *d = false, } @@ -111,7 +117,7 @@ async fn dynamic_drop(a: Rc, c: bool) { }; } -struct TwoPtrs<'a>(Ptr<'a>, Ptr<'a>); +struct TwoPtrs(Ptr, Ptr); async fn struct_dynamic_drop(a: Rc, c0: bool, c1: bool, c: bool) { for i in 0..2 { let x; @@ -232,21 +238,62 @@ async fn move_ref_pattern(a: Rc) { a.alloc().await; } -fn run_test(cx: &mut Context<'_>, ref f: F) +async fn panic_after_return(a: Rc, c: bool) -> (Ptr,) { + a.alloc().await; + let p = a.alloc().await; + if c { + a.alloc().await; + let q = a.alloc().await; + // We use a return type that isn't used anywhere else to make sure that + // the return place doesn't incorrectly end up in the generator state. + return (a.alloc().await,); + } + (a.alloc().await,) +} + + +async fn panic_after_init_by_loop(a: Rc) { + a.alloc().await; + let p = a.alloc().await; + let q = loop { + a.alloc().await; + let r = a.alloc().await; + break a.alloc().await; + }; +} + +async fn panic_after_init_by_match_with_bindings_and_guard(a: Rc, b: bool) { + a.alloc().await; + let p = a.alloc().await; + let q = match a.alloc().await { + ref _x if b => { + a.alloc().await; + let r = a.alloc().await; + a.alloc().await + } + _x => { + a.alloc().await; + let r = a.alloc().await; + a.alloc().await + }, + }; +} + +fn run_test(cx: &mut Context<'_>, ref f: F, name: &'static str) where F: Fn(Rc) -> G, - G: Future, + G: Future, { for polls in 0.. { // Run without any panics to find which operations happen after the // penultimate `poll`. - let first_alloc = Rc::new(Allocator::new(usize::MAX)); + let first_alloc = Rc::new(Allocator::new(usize::MAX, name)); let mut fut = Box::pin(f(first_alloc.clone())); let mut ops_before_last_poll = 0; let mut completed = false; for _ in 0..polls { ops_before_last_poll = first_alloc.cur_ops.get(); - if let Poll::Ready(()) = fut.as_mut().poll(cx) { + if let Poll::Ready(_) = fut.as_mut().poll(cx) { completed = true; } } @@ -255,7 +302,7 @@ where // Start at `ops_before_last_poll` so that we will always be able to // `poll` the expected number of times. for failing_op in ops_before_last_poll..first_alloc.cur_ops.get() { - let alloc = Rc::new(Allocator::new(failing_op + 1)); + let alloc = Rc::new(Allocator::new(failing_op + 1, name)); let f = &f; let cx = &mut *cx; let result = panic::catch_unwind(panic::AssertUnwindSafe(move || { @@ -285,48 +332,58 @@ fn clone_waker(data: *const ()) -> RawWaker { RawWaker::new(data, &RawWakerVTable::new(clone_waker, drop, drop, drop)) } +macro_rules! run_test { + ($ctxt:expr, $e:expr) => { run_test($ctxt, $e, stringify!($e)); }; +} + fn main() { let waker = unsafe { Waker::from_raw(clone_waker(ptr::null())) }; let context = &mut Context::from_waker(&waker); - run_test(context, |a| dynamic_init(a, false)); - run_test(context, |a| dynamic_init(a, true)); - run_test(context, |a| dynamic_drop(a, false)); - run_test(context, |a| dynamic_drop(a, true)); - - run_test(context, |a| assignment(a, false, false)); - run_test(context, |a| assignment(a, false, true)); - run_test(context, |a| assignment(a, true, false)); - run_test(context, |a| assignment(a, true, true)); - - run_test(context, |a| array_simple(a)); - run_test(context, |a| vec_simple(a)); - run_test(context, |a| vec_unreachable(a)); - - run_test(context, |a| struct_dynamic_drop(a, false, false, false)); - run_test(context, |a| struct_dynamic_drop(a, false, false, true)); - run_test(context, |a| struct_dynamic_drop(a, false, true, false)); - run_test(context, |a| struct_dynamic_drop(a, false, true, true)); - run_test(context, |a| struct_dynamic_drop(a, true, false, false)); - run_test(context, |a| struct_dynamic_drop(a, true, false, true)); - run_test(context, |a| struct_dynamic_drop(a, true, true, false)); - run_test(context, |a| struct_dynamic_drop(a, true, true, true)); - - run_test(context, |a| field_assignment(a, false)); - run_test(context, |a| field_assignment(a, true)); - - run_test(context, |a| mixed_drop_and_nondrop(a)); - - run_test(context, |a| slice_pattern_one_of(a, 0)); - run_test(context, |a| slice_pattern_one_of(a, 1)); - run_test(context, |a| slice_pattern_one_of(a, 2)); - run_test(context, |a| slice_pattern_one_of(a, 3)); - - run_test(context, |a| subslice_pattern_from_end_with_drop(a, true, true)); - run_test(context, |a| subslice_pattern_from_end_with_drop(a, true, false)); - run_test(context, |a| subslice_pattern_from_end_with_drop(a, false, true)); - run_test(context, |a| subslice_pattern_from_end_with_drop(a, false, false)); - run_test(context, |a| subslice_pattern_reassign(a)); - - run_test(context, |a| move_ref_pattern(a)); + run_test!(context, |a| dynamic_init(a, false)); + run_test!(context, |a| dynamic_init(a, true)); + run_test!(context, |a| dynamic_drop(a, false)); + run_test!(context, |a| dynamic_drop(a, true)); + + run_test!(context, |a| assignment(a, false, false)); + run_test!(context, |a| assignment(a, false, true)); + run_test!(context, |a| assignment(a, true, false)); + run_test!(context, |a| assignment(a, true, true)); + + run_test!(context, |a| array_simple(a)); + run_test!(context, |a| vec_simple(a)); + run_test!(context, |a| vec_unreachable(a)); + + run_test!(context, |a| struct_dynamic_drop(a, false, false, false)); + run_test!(context, |a| struct_dynamic_drop(a, false, false, true)); + run_test!(context, |a| struct_dynamic_drop(a, false, true, false)); + run_test!(context, |a| struct_dynamic_drop(a, false, true, true)); + run_test!(context, |a| struct_dynamic_drop(a, true, false, false)); + run_test!(context, |a| struct_dynamic_drop(a, true, false, true)); + run_test!(context, |a| struct_dynamic_drop(a, true, true, false)); + run_test!(context, |a| struct_dynamic_drop(a, true, true, true)); + + run_test!(context, |a| field_assignment(a, false)); + run_test!(context, |a| field_assignment(a, true)); + + run_test!(context, |a| mixed_drop_and_nondrop(a)); + + run_test!(context, |a| slice_pattern_one_of(a, 0)); + run_test!(context, |a| slice_pattern_one_of(a, 1)); + run_test!(context, |a| slice_pattern_one_of(a, 2)); + run_test!(context, |a| slice_pattern_one_of(a, 3)); + + run_test!(context, |a| subslice_pattern_from_end_with_drop(a, true, true)); + run_test!(context, |a| subslice_pattern_from_end_with_drop(a, true, false)); + run_test!(context, |a| subslice_pattern_from_end_with_drop(a, false, true)); + run_test!(context, |a| subslice_pattern_from_end_with_drop(a, false, false)); + run_test!(context, |a| subslice_pattern_reassign(a)); + + run_test!(context, |a| move_ref_pattern(a)); + + run_test!(context, |a| panic_after_return(a, false)); + run_test!(context, |a| panic_after_return(a, true)); + run_test!(context, |a| panic_after_init_by_loop(a)); + run_test!(context, |a| panic_after_init_by_match_with_bindings_and_guard(a, false)); + run_test!(context, |a| panic_after_init_by_match_with_bindings_and_guard(a, true)); } diff --git a/tests/ui/drop/dynamic-drop.rs b/tests/ui/drop/dynamic-drop.rs index b695b5702d943..3375d45306504 100644 --- a/tests/ui/drop/dynamic-drop.rs +++ b/tests/ui/drop/dynamic-drop.rs @@ -17,6 +17,7 @@ struct InjectedFailure; struct Allocator { data: RefCell>, + name: &'static str, failing_op: usize, cur_ops: Cell, } @@ -28,17 +29,18 @@ impl Drop for Allocator { fn drop(&mut self) { let data = self.data.borrow(); if data.iter().any(|d| *d) { - panic!("missing free: {:?}", data); + panic!("missing free in {:?}: {:?}", self.name, data); } } } impl Allocator { - fn new(failing_op: usize) -> Self { + fn new(failing_op: usize, name: &'static str) -> Self { Allocator { failing_op: failing_op, cur_ops: Cell::new(0), - data: RefCell::new(vec![]) + data: RefCell::new(vec![]), + name, } } fn alloc(&self) -> Ptr<'_> { @@ -53,33 +55,17 @@ impl Allocator { data.push(true); Ptr(addr, self) } - // FIXME(#47949) Any use of this indicates a bug in rustc: we should never - // be leaking values in the cases here. - // - // Creates a `Ptr<'_>` and checks that the allocated value is leaked if the - // `failing_op` is in the list of exception. - fn alloc_leaked(&self, exceptions: Vec) -> Ptr<'_> { - let ptr = self.alloc(); - - if exceptions.iter().any(|operation| *operation == self.failing_op) { - let mut data = self.data.borrow_mut(); - data[ptr.0] = false; - } - ptr - } } struct Ptr<'a>(usize, &'a Allocator); impl<'a> Drop for Ptr<'a> { fn drop(&mut self) { match self.1.data.borrow_mut()[self.0] { - false => { - panic!("double free at index {:?}", self.0) - } - ref mut d => *d = false + false => panic!("double free in {:?} at index {:?}", self.1.name, self.0), + ref mut d => *d = false, } - self.1.cur_ops.set(self.1.cur_ops.get()+1); + self.1.cur_ops.set(self.1.cur_ops.get() + 1); if self.1.cur_ops.get() == self.1.failing_op { panic::panic_any(InjectedFailure); @@ -205,19 +191,19 @@ fn vec_unreachable(a: &Allocator) { } fn slice_pattern_first(a: &Allocator) { - let[_x, ..] = [a.alloc(), a.alloc(), a.alloc()]; + let [_x, ..] = [a.alloc(), a.alloc(), a.alloc()]; } fn slice_pattern_middle(a: &Allocator) { - let[_, _x, _] = [a.alloc(), a.alloc(), a.alloc()]; + let [_, _x, _] = [a.alloc(), a.alloc(), a.alloc()]; } fn slice_pattern_two(a: &Allocator) { - let[_x, _, _y, _] = [a.alloc(), a.alloc(), a.alloc(), a.alloc()]; + let [_x, _, _y, _] = [a.alloc(), a.alloc(), a.alloc(), a.alloc()]; } fn slice_pattern_last(a: &Allocator) { - let[.., _y] = [a.alloc(), a.alloc(), a.alloc(), a.alloc()]; + let [.., _y] = [a.alloc(), a.alloc(), a.alloc(), a.alloc()]; } fn slice_pattern_one_of(a: &Allocator, i: usize) { @@ -234,9 +220,9 @@ fn slice_pattern_one_of(a: &Allocator, i: usize) { fn subslice_pattern_from_end(a: &Allocator, arg: bool) { let a = [a.alloc(), a.alloc(), a.alloc()]; if arg { - let[.., _x, _] = a; + let [.., _x, _] = a; } else { - let[_, _y @ ..] = a; + let [_, _y @ ..] = a; } } @@ -248,43 +234,43 @@ fn subslice_pattern_from_end_with_drop(a: &Allocator, arg: bool, arg2: bool) { } if arg { - let[.., _x, _] = a; + let [.., _x, _] = a; } else { - let[_, _y @ ..] = a; + let [_, _y @ ..] = a; } } fn slice_pattern_reassign(a: &Allocator) { let mut ar = [a.alloc(), a.alloc()]; - let[_, _x] = ar; + let [_, _x] = ar; ar = [a.alloc(), a.alloc()]; - let[.., _y] = ar; + let [.., _y] = ar; } fn subslice_pattern_reassign(a: &Allocator) { let mut ar = [a.alloc(), a.alloc(), a.alloc()]; - let[_, _, _x] = ar; + let [_, _, _x] = ar; ar = [a.alloc(), a.alloc(), a.alloc()]; - let[_, _y @ ..] = ar; + let [_, _y @ ..] = ar; } fn index_field_mixed_ends(a: &Allocator) { let ar = [(a.alloc(), a.alloc()), (a.alloc(), a.alloc())]; - let[(_x, _), ..] = ar; - let[(_, _y), _] = ar; - let[_, (_, _w)] = ar; - let[.., (_z, _)] = ar; + let [(_x, _), ..] = ar; + let [(_, _y), _] = ar; + let [_, (_, _w)] = ar; + let [.., (_z, _)] = ar; } fn subslice_mixed_min_lengths(a: &Allocator, c: i32) { let ar = [(a.alloc(), a.alloc()), (a.alloc(), a.alloc())]; match c { - 0 => { let[_x, ..] = ar; } - 1 => { let[_x, _, ..] = ar; } - 2 => { let[_x, _] = ar; } - 3 => { let[(_x, _), _, ..] = ar; } - 4 => { let[.., (_x, _)] = ar; } - 5 => { let[.., (_x, _), _] = ar; } + 0 => { let [_x, ..] = ar; } + 1 => { let [_x, _, ..] = ar; } + 2 => { let [_x, _] = ar; } + 3 => { let [(_x, _), _, ..] = ar; } + 4 => { let [.., (_x, _)] = ar; } + 5 => { let [.., (_x, _), _] = ar; } 6 => { let [_y @ ..] = ar; } _ => { let [_y @ .., _] = ar; } } @@ -355,87 +341,160 @@ fn if_let_guard_2(a: &Allocator, num: i32) { } fn panic_after_return(a: &Allocator) -> Ptr<'_> { - // Panic in the drop of `p` or `q` can leak - let exceptions = vec![8, 9]; a.alloc(); let p = a.alloc(); { a.alloc(); let p = a.alloc(); - // FIXME (#47949) We leak values when we panic in a destructor after - // evaluating an expression with `rustc_mir::build::Builder::into`. - a.alloc_leaked(exceptions) + a.alloc() } } fn panic_after_return_expr(a: &Allocator) -> Ptr<'_> { - // Panic in the drop of `p` or `q` can leak - let exceptions = vec![8, 9]; a.alloc(); let p = a.alloc(); { a.alloc(); let q = a.alloc(); - // FIXME (#47949) - return a.alloc_leaked(exceptions); + return a.alloc(); } } fn panic_after_init(a: &Allocator) { - // Panic in the drop of `r` can leak - let exceptions = vec![8]; a.alloc(); let p = a.alloc(); let q = { a.alloc(); let r = a.alloc(); - // FIXME (#47949) - a.alloc_leaked(exceptions) + a.alloc() }; } fn panic_after_init_temp(a: &Allocator) { - // Panic in the drop of `r` can leak - let exceptions = vec![8]; a.alloc(); let p = a.alloc(); { a.alloc(); let r = a.alloc(); - // FIXME (#47949) - a.alloc_leaked(exceptions) + a.alloc() }; } fn panic_after_init_by_loop(a: &Allocator) { - // Panic in the drop of `r` can leak - let exceptions = vec![8]; a.alloc(); let p = a.alloc(); let q = loop { a.alloc(); let r = a.alloc(); - // FIXME (#47949) - break a.alloc_leaked(exceptions); + break a.alloc(); + }; +} + +fn panic_after_init_by_match(a: &Allocator, b: bool) { + a.alloc(); + let p = a.alloc(); + let _ = loop { + let q = match b { + true => { + a.alloc(); + let r = a.alloc(); + a.alloc() + } + false => { + a.alloc(); + let r = a.alloc(); + break a.alloc(); + } + }; + return; }; } -fn run_test(mut f: F) - where F: FnMut(&Allocator) +fn panic_after_init_by_match_with_guard(a: &Allocator, b: bool) { + a.alloc(); + let p = a.alloc(); + let q = match a.alloc() { + _ if b => { + a.alloc(); + let r = a.alloc(); + a.alloc() + } + _ => { + a.alloc(); + let r = a.alloc(); + a.alloc() + } + }; +} + +fn panic_after_init_by_match_with_bindings_and_guard(a: &Allocator, b: bool) { + a.alloc(); + let p = a.alloc(); + let q = match a.alloc() { + _x if b => { + a.alloc(); + let r = a.alloc(); + a.alloc() + } + _x => { + a.alloc(); + let r = a.alloc(); + a.alloc() + } + }; +} + +fn panic_after_init_by_match_with_ref_bindings_and_guard(a: &Allocator, b: bool) { + a.alloc(); + let p = a.alloc(); + let q = match a.alloc() { + ref _x if b => { + a.alloc(); + let r = a.alloc(); + a.alloc() + } + ref _x => { + a.alloc(); + let r = a.alloc(); + a.alloc() + } + }; +} + +fn panic_after_init_by_break_if(a: &Allocator, b: bool) { + a.alloc(); + let p = a.alloc(); + let q = loop { + let r = a.alloc(); + break if b { + let s = a.alloc(); + a.alloc() + } else { + a.alloc() + }; + }; +} + +fn run_test(mut f: F, name: &'static str) +where + F: FnMut(&Allocator), { - let first_alloc = Allocator::new(usize::MAX); + let first_alloc = Allocator::new(usize::MAX, name); f(&first_alloc); - for failing_op in 1..first_alloc.cur_ops.get()+1 { - let alloc = Allocator::new(failing_op); + for failing_op in 1..first_alloc.cur_ops.get() + 1 { + let alloc = Allocator::new(failing_op, name); let alloc = &alloc; let f = panic::AssertUnwindSafe(&mut f); let result = panic::catch_unwind(move || { f.0(alloc); }); match result { - Ok(..) => panic!("test executed {} ops but now {}", - first_alloc.cur_ops.get(), alloc.cur_ops.get()), + Ok(..) => panic!( + "test executed {} ops but now {}", + first_alloc.cur_ops.get(), + alloc.cur_ops.get() + ), Err(e) => { if e.downcast_ref::().is_none() { panic::resume_unwind(e); @@ -445,108 +504,125 @@ fn run_test(mut f: F) } } -fn run_test_nopanic(mut f: F) - where F: FnMut(&Allocator) +fn run_test_nopanic(mut f: F, name: &'static str) +where + F: FnMut(&Allocator), { - let first_alloc = Allocator::new(usize::MAX); + let first_alloc = Allocator::new(usize::MAX, name); f(&first_alloc); } +macro_rules! run_test { + ($e:expr) => { + run_test($e, stringify!($e)); + }; +} + fn main() { - run_test(|a| dynamic_init(a, false)); - run_test(|a| dynamic_init(a, true)); - run_test(|a| dynamic_drop(a, false)); - run_test(|a| dynamic_drop(a, true)); - - run_test(|a| assignment2(a, false, false)); - run_test(|a| assignment2(a, false, true)); - run_test(|a| assignment2(a, true, false)); - run_test(|a| assignment2(a, true, true)); - - run_test(|a| assignment1(a, false)); - run_test(|a| assignment1(a, true)); - - run_test(|a| array_simple(a)); - run_test(|a| vec_simple(a)); - run_test(|a| vec_unreachable(a)); - - run_test(|a| struct_dynamic_drop(a, false, false, false)); - run_test(|a| struct_dynamic_drop(a, false, false, true)); - run_test(|a| struct_dynamic_drop(a, false, true, false)); - run_test(|a| struct_dynamic_drop(a, false, true, true)); - run_test(|a| struct_dynamic_drop(a, true, false, false)); - run_test(|a| struct_dynamic_drop(a, true, false, true)); - run_test(|a| struct_dynamic_drop(a, true, true, false)); - run_test(|a| struct_dynamic_drop(a, true, true, true)); - - run_test(|a| field_assignment(a, false)); - run_test(|a| field_assignment(a, true)); - - run_test(|a| coroutine(a, 0)); - run_test(|a| coroutine(a, 1)); - run_test(|a| coroutine(a, 2)); - run_test(|a| coroutine(a, 3)); - - run_test(|a| mixed_drop_and_nondrop(a)); - - run_test(|a| slice_pattern_first(a)); - run_test(|a| slice_pattern_middle(a)); - run_test(|a| slice_pattern_two(a)); - run_test(|a| slice_pattern_last(a)); - run_test(|a| slice_pattern_one_of(a, 0)); - run_test(|a| slice_pattern_one_of(a, 1)); - run_test(|a| slice_pattern_one_of(a, 2)); - run_test(|a| slice_pattern_one_of(a, 3)); - - run_test(|a| subslice_pattern_from_end(a, true)); - run_test(|a| subslice_pattern_from_end(a, false)); - run_test(|a| subslice_pattern_from_end_with_drop(a, true, true)); - run_test(|a| subslice_pattern_from_end_with_drop(a, true, false)); - run_test(|a| subslice_pattern_from_end_with_drop(a, false, true)); - run_test(|a| subslice_pattern_from_end_with_drop(a, false, false)); - run_test(|a| slice_pattern_reassign(a)); - run_test(|a| subslice_pattern_reassign(a)); - - run_test(|a| index_field_mixed_ends(a)); - run_test(|a| subslice_mixed_min_lengths(a, 0)); - run_test(|a| subslice_mixed_min_lengths(a, 1)); - run_test(|a| subslice_mixed_min_lengths(a, 2)); - run_test(|a| subslice_mixed_min_lengths(a, 3)); - run_test(|a| subslice_mixed_min_lengths(a, 4)); - run_test(|a| subslice_mixed_min_lengths(a, 5)); - run_test(|a| subslice_mixed_min_lengths(a, 6)); - run_test(|a| subslice_mixed_min_lengths(a, 7)); - - run_test(|a| move_ref_pattern(a)); - - run_test(|a| if_let_guard(a, true, 0)); - run_test(|a| if_let_guard(a, true, 1)); - run_test(|a| if_let_guard(a, true, 2)); - run_test(|a| if_let_guard(a, false, 0)); - run_test(|a| if_let_guard(a, false, 1)); - run_test(|a| if_let_guard(a, false, 2)); - run_test(|a| if_let_guard_2(a, 0)); - run_test(|a| if_let_guard_2(a, 1)); - run_test(|a| if_let_guard_2(a, 2)); - - run_test(|a| { + run_test!(|a| dynamic_init(a, false)); + run_test!(|a| dynamic_init(a, true)); + run_test!(|a| dynamic_drop(a, false)); + run_test!(|a| dynamic_drop(a, true)); + + run_test!(|a| assignment2(a, false, false)); + run_test!(|a| assignment2(a, false, true)); + run_test!(|a| assignment2(a, true, false)); + run_test!(|a| assignment2(a, true, true)); + + run_test!(|a| assignment1(a, false)); + run_test!(|a| assignment1(a, true)); + + run_test!(|a| array_simple(a)); + run_test!(|a| vec_simple(a)); + run_test!(|a| vec_unreachable(a)); + + run_test!(|a| struct_dynamic_drop(a, false, false, false)); + run_test!(|a| struct_dynamic_drop(a, false, false, true)); + run_test!(|a| struct_dynamic_drop(a, false, true, false)); + run_test!(|a| struct_dynamic_drop(a, false, true, true)); + run_test!(|a| struct_dynamic_drop(a, true, false, false)); + run_test!(|a| struct_dynamic_drop(a, true, false, true)); + run_test!(|a| struct_dynamic_drop(a, true, true, false)); + run_test!(|a| struct_dynamic_drop(a, true, true, true)); + + run_test!(|a| field_assignment(a, false)); + run_test!(|a| field_assignment(a, true)); + + run_test!(|a| coroutine(a, 0)); + run_test!(|a| coroutine(a, 1)); + run_test!(|a| coroutine(a, 2)); + run_test!(|a| coroutine(a, 3)); + + run_test!(|a| mixed_drop_and_nondrop(a)); + + run_test!(|a| slice_pattern_first(a)); + run_test!(|a| slice_pattern_middle(a)); + run_test!(|a| slice_pattern_two(a)); + run_test!(|a| slice_pattern_last(a)); + run_test!(|a| slice_pattern_one_of(a, 0)); + run_test!(|a| slice_pattern_one_of(a, 1)); + run_test!(|a| slice_pattern_one_of(a, 2)); + run_test!(|a| slice_pattern_one_of(a, 3)); + + run_test!(|a| subslice_pattern_from_end(a, true)); + run_test!(|a| subslice_pattern_from_end(a, false)); + run_test!(|a| subslice_pattern_from_end_with_drop(a, true, true)); + run_test!(|a| subslice_pattern_from_end_with_drop(a, true, false)); + run_test!(|a| subslice_pattern_from_end_with_drop(a, false, true)); + run_test!(|a| subslice_pattern_from_end_with_drop(a, false, false)); + run_test!(|a| slice_pattern_reassign(a)); + run_test!(|a| subslice_pattern_reassign(a)); + + run_test!(|a| index_field_mixed_ends(a)); + run_test!(|a| subslice_mixed_min_lengths(a, 0)); + run_test!(|a| subslice_mixed_min_lengths(a, 1)); + run_test!(|a| subslice_mixed_min_lengths(a, 2)); + run_test!(|a| subslice_mixed_min_lengths(a, 3)); + run_test!(|a| subslice_mixed_min_lengths(a, 4)); + run_test!(|a| subslice_mixed_min_lengths(a, 5)); + run_test!(|a| subslice_mixed_min_lengths(a, 6)); + run_test!(|a| subslice_mixed_min_lengths(a, 7)); + + run_test!(|a| move_ref_pattern(a)); + + run_test!(|a| if_let_guard(a, true, 0)); + run_test!(|a| if_let_guard(a, true, 1)); + run_test!(|a| if_let_guard(a, true, 2)); + run_test!(|a| if_let_guard(a, false, 0)); + run_test!(|a| if_let_guard(a, false, 1)); + run_test!(|a| if_let_guard(a, false, 2)); + run_test!(|a| if_let_guard_2(a, 0)); + run_test!(|a| if_let_guard_2(a, 1)); + run_test!(|a| if_let_guard_2(a, 2)); + + run_test!(|a| { panic_after_return(a); }); - run_test(|a| { + run_test!(|a| { panic_after_return_expr(a); }); - run_test(|a| panic_after_init(a)); - run_test(|a| panic_after_init_temp(a)); - run_test(|a| panic_after_init_by_loop(a)); - - run_test(|a| bindings_after_at_dynamic_init_move(a, true)); - run_test(|a| bindings_after_at_dynamic_init_move(a, false)); - run_test(|a| bindings_after_at_dynamic_init_ref(a, true)); - run_test(|a| bindings_after_at_dynamic_init_ref(a, false)); - run_test(|a| bindings_after_at_dynamic_drop_move(a, true)); - run_test(|a| bindings_after_at_dynamic_drop_move(a, false)); - run_test(|a| bindings_after_at_dynamic_drop_ref(a, true)); - run_test(|a| bindings_after_at_dynamic_drop_ref(a, false)); - - run_test_nopanic(|a| union1(a)); + run_test!(|a| panic_after_init(a)); + run_test!(|a| panic_after_init_temp(a)); + run_test!(|a| panic_after_init_by_loop(a)); + run_test!(|a| panic_after_init_by_match(a, false)); + run_test!(|a| panic_after_init_by_match(a, true)); + run_test!(|a| panic_after_init_by_match_with_guard(a, false)); + run_test!(|a| panic_after_init_by_match_with_guard(a, true)); + run_test!(|a| panic_after_init_by_match_with_bindings_and_guard(a, false)); + run_test!(|a| panic_after_init_by_match_with_bindings_and_guard(a, true)); + run_test!(|a| panic_after_init_by_match_with_ref_bindings_and_guard(a, false)); + run_test!(|a| panic_after_init_by_match_with_ref_bindings_and_guard(a, true)); + run_test!(|a| panic_after_init_by_break_if(a, false)); + run_test!(|a| panic_after_init_by_break_if(a, true)); + + run_test!(|a| bindings_after_at_dynamic_init_move(a, true)); + run_test!(|a| bindings_after_at_dynamic_init_move(a, false)); + run_test!(|a| bindings_after_at_dynamic_init_ref(a, true)); + run_test!(|a| bindings_after_at_dynamic_init_ref(a, false)); + run_test!(|a| bindings_after_at_dynamic_drop_move(a, true)); + run_test!(|a| bindings_after_at_dynamic_drop_move(a, false)); + run_test!(|a| bindings_after_at_dynamic_drop_ref(a, true)); + run_test!(|a| bindings_after_at_dynamic_drop_ref(a, false)); + + run_test_nopanic(|a| union1(a), "|a| union1(a)"); } diff --git a/tests/ui/label/label_break_drop_value.rs b/tests/ui/label/label_break_drop_value.rs new file mode 100644 index 0000000000000..bbe709cda31d0 --- /dev/null +++ b/tests/ui/label/label_break_drop_value.rs @@ -0,0 +1,17 @@ +//@ check-pass + +fn generate_item_fn(attr: String) { + match attr { + path => 'ret: { + if false { + break 'ret path; + } + + return; + } + + _ => return, + }; +} + +fn main() {} diff --git a/tests/ui/nll/ice-106874.rs b/tests/ui/nll/ice-106874.rs index 9337eee961bfb..6e8c651e438e5 100644 --- a/tests/ui/nll/ice-106874.rs +++ b/tests/ui/nll/ice-106874.rs @@ -14,8 +14,6 @@ pub fn func(f: F) -> A { //~| ERROR implementation of `FnOnce` is not general enough //~| ERROR implementation of `Fn` is not general enough //~| ERROR implementation of `FnOnce` is not general enough - //~| ERROR higher-ranked subtype error - //~| ERROR higher-ranked subtype error } trait X {} diff --git a/tests/ui/nll/ice-106874.stderr b/tests/ui/nll/ice-106874.stderr index ead4d490a6248..c1c94b140d661 100644 --- a/tests/ui/nll/ice-106874.stderr +++ b/tests/ui/nll/ice-106874.stderr @@ -72,19 +72,5 @@ LL | A(B(C::new(D::new(move |st| f(st))))) = note: closure with signature `fn(&'2 mut V)` must implement `FnOnce<(&'1 mut V,)>`, for any lifetime `'1`... = note: ...but it actually implements `FnOnce<(&'2 mut V,)>`, for some specific lifetime `'2` -error: higher-ranked subtype error - --> $DIR/ice-106874.rs:8:41 - | -LL | A(B(C::new(D::new(move |st| f(st))))) - | ^ - -error: higher-ranked subtype error - --> $DIR/ice-106874.rs:8:41 - | -LL | A(B(C::new(D::new(move |st| f(st))))) - | ^ - | - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - -error: aborting due to 10 previous errors +error: aborting due to 8 previous errors From 8b4edef6b31261f7e150a3794051f75563f45275 Mon Sep 17 00:00:00 2001 From: Matthew Jasper Date: Wed, 5 Jun 2024 11:47:20 +0000 Subject: [PATCH 4/5] Fix performance issue with `record_operands_moved` Large arrays/tuples can have enough operands that removing items one at a time is significantly slower than creating a hash set first. --- compiler/rustc_mir_build/src/build/scope.rs | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/compiler/rustc_mir_build/src/build/scope.rs b/compiler/rustc_mir_build/src/build/scope.rs index 2be29f461c137..a8ae07eb42d91 100644 --- a/compiler/rustc_mir_build/src/build/scope.rs +++ b/compiler/rustc_mir_build/src/build/scope.rs @@ -84,7 +84,7 @@ that contains only loops and breakable blocks. It tracks where a `break`, use std::mem; use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder, CFG}; -use rustc_data_structures::fx::FxHashMap; +use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_hir::HirId; use rustc_index::{IndexSlice, IndexVec}; use rustc_middle::middle::region; @@ -1195,15 +1195,18 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { assert_eq!(scope.region_scope, local_scope, "local scope is not the topmost scope!",); // look for moves of a local variable, like `MOVE(_X)` - let locals_moved = operands.iter().flat_map(|operand| match operand { - Operand::Copy(_) | Operand::Constant(_) => None, - Operand::Move(place) => place.as_local(), - }); + let locals_moved: FxHashSet = operands + .iter() + .flat_map(|operand| match operand { + Operand::Copy(_) | Operand::Constant(_) => None, + Operand::Move(place) => place.as_local(), + }) + .collect(); - for local in locals_moved { - // Unschedule drops from the scope. - scope.drops.retain(|drop| drop.local != local || drop.kind != DropKind::Value); - } + // Unschedule drops from the scope. + scope + .drops + .retain(|drop| drop.kind != DropKind::Value || !locals_moved.contains(&drop.local)); scope.invalidate_cache(); } From 3e0db6a36f01a5a8978ced19e0b83ebca5cd103c Mon Sep 17 00:00:00 2001 From: Matthew Jasper Date: Mon, 24 Jun 2024 16:34:46 +0000 Subject: [PATCH 5/5] Work around opaque types hiding needs_drop When building the MIR we sometimes try to unschedule drops. In this we assert that the drop has already been scheduled. Opaque types however may be initialized with an expression kind that we know doesn't have a type that needs to be dropped. To fix this we don't panic if we can't find the drop of a variable with an opaque type. --- compiler/rustc_mir_build/src/build/scope.rs | 4 ++++ tests/ui/impl-trait/rpit/non-drop-type.rs | 22 +++++++++++++++++++++ 2 files changed, 26 insertions(+) create mode 100644 tests/ui/impl-trait/rpit/non-drop-type.rs diff --git a/compiler/rustc_mir_build/src/build/scope.rs b/compiler/rustc_mir_build/src/build/scope.rs index a8ae07eb42d91..f9e1872791d33 100644 --- a/compiler/rustc_mir_build/src/build/scope.rs +++ b/compiler/rustc_mir_build/src/build/scope.rs @@ -90,6 +90,7 @@ use rustc_index::{IndexSlice, IndexVec}; use rustc_middle::middle::region; use rustc_middle::mir::*; use rustc_middle::thir::{ExprId, LintLevel}; +use rustc_middle::ty::TypeVisitableExt; use rustc_middle::{bug, span_bug}; use rustc_session::lint::Level; use rustc_span::{Span, DUMMY_SP}; @@ -1141,6 +1142,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { { return; } + // Opaque type may not have been scheduled if its underlying + // type does not need drop. + None if self.local_decls[local].ty.has_opaque_types() => return, _ => bug!( "found wrong drop, expected value drop of {:?}, found {:?}", local, diff --git a/tests/ui/impl-trait/rpit/non-drop-type.rs b/tests/ui/impl-trait/rpit/non-drop-type.rs new file mode 100644 index 0000000000000..9509310191b13 --- /dev/null +++ b/tests/ui/impl-trait/rpit/non-drop-type.rs @@ -0,0 +1,22 @@ +//@ check-pass + +fn if_else(c: bool) -> impl Sized { + if c { () } else { () } +} + +fn if_no_else(c: bool) -> impl Sized { + if c {} +} + +fn matches(c: bool) -> impl Sized { + match c { + true => (), + _ => (), + } +} + +fn tuple_tuple(c: bool) -> (impl Sized,) { + if c { ((),) } else { ((),) } +} + +fn main() {}