Make into schedule drop for the destination

This commit is contained in:
Matthew Jasper 2019-09-29 21:35:20 +01:00
parent 73c09870b5
commit 37026837a3
12 changed files with 458 additions and 238 deletions

View file

@ -1,18 +1,22 @@
use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder};
use crate::build::ForGuard::OutsideGuard;
use crate::build::matches::ArmHasGuard;
use crate::build::scope::DropKind;
use crate::hair::*;
use rustc::middle::region;
use rustc::mir::*;
use rustc::hir;
use syntax_pos::Span;
impl<'a, 'tcx> Builder<'a, 'tcx> {
pub fn ast_block(&mut self,
destination: &Place<'tcx>,
block: BasicBlock,
ast_block: &'tcx hir::Block,
source_info: SourceInfo)
-> BlockAnd<()> {
pub fn ast_block(
&mut self,
destination: &Place<'tcx>,
scope: Option<region::Scope>,
block: BasicBlock,
ast_block: &'tcx hir::Block,
source_info: SourceInfo,
) -> BlockAnd<()> {
let Block {
region_scope,
opt_destruction_scope,
@ -21,37 +25,61 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
expr,
targeted_by_break,
safety_mode
} =
self.hir.mirror(ast_block);
} = self.hir.mirror(ast_block);
self.in_opt_scope(opt_destruction_scope.map(|de|(de, source_info)), move |this| {
this.in_scope((region_scope, source_info), LintLevel::Inherited, move |this| {
if targeted_by_break {
// This is a `break`-able block
let exit_block = this.cfg.start_new_block();
if let Some(scope) = scope {
// Breakable blocks assign to their destination on each
// `break`, as well as when they exit normally. So we
// can't schedule the drop in the last expression like
// normal blocks do.
let local = destination.as_local()
.expect("cannot schedule drop of non-Local place");
this.schedule_drop(span, scope, local, DropKind::Value);
}
let block_exit = this.in_breakable_scope(
None, exit_block, destination.clone(), |this| {
this.ast_block_stmts(destination, block, span, stmts, expr,
safety_mode)
this.ast_block_stmts(
destination,
None,
block,
span,
stmts,
expr,
safety_mode,
)
});
this.cfg.terminate(unpack!(block_exit), source_info,
TerminatorKind::Goto { target: exit_block });
exit_block.unit()
} else {
this.ast_block_stmts(destination, block, span, stmts, expr,
safety_mode)
this.ast_block_stmts(
destination,
scope,
block,
span,
stmts,
expr,
safety_mode,
)
}
})
})
}
fn ast_block_stmts(&mut self,
destination: &Place<'tcx>,
mut block: BasicBlock,
span: Span,
stmts: Vec<StmtRef<'tcx>>,
expr: Option<ExprRef<'tcx>>,
safety_mode: BlockSafety)
-> BlockAnd<()> {
fn ast_block_stmts(
&mut self,
destination: &Place<'tcx>,
scope: Option<region::Scope>,
mut block: BasicBlock,
span: Span,
stmts: Vec<StmtRef<'tcx>>,
expr: Option<ExprRef<'tcx>>,
safety_mode: BlockSafety,
) -> BlockAnd<()> {
let this = self;
// This convoluted structure is to avoid using recursion as we walk down a list
@ -177,7 +205,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
this.block_context.currently_ignores_tail_results();
this.block_context.push(BlockFrame::TailExpr { tail_result_is_ignored });
unpack!(block = this.into(destination, block, expr));
unpack!(block = this.into(destination, scope, block, expr));
let popped = this.block_context.pop();
assert!(popped.map_or(false, |bf|bf.is_tail_expr()));

View file

@ -136,11 +136,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
this.cfg
.push_assign(block, source_info, &Place::from(result), box_);
// initialize the box contents:
// Initialize the box contents. No scope is needed since the
// `Box` is already scheduled to be dropped.
unpack!(
block = this.into(
&Place::from(result).deref(),
block, value
None,
block,
value
)
);
block.and(Rvalue::Use(Operand::Move(Place::from(result))))

View file

@ -109,17 +109,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
}
}
unpack!(block = this.into(temp_place, block, expr));
if let Some(temp_lifetime) = temp_lifetime {
this.schedule_drop(
expr_span,
temp_lifetime,
temp,
expr_ty,
DropKind::Value,
);
}
unpack!(block = this.into(temp_place, temp_lifetime, block, expr));
block.and(temp)
}

View file

@ -2,7 +2,9 @@
use crate::build::expr::category::{Category, RvalueFunc};
use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder};
use crate::build::scope::DropKind;
use crate::hair::*;
use rustc::middle::region;
use rustc::mir::*;
use rustc::ty;
@ -11,15 +13,18 @@ use rustc_target::spec::abi::Abi;
impl<'a, 'tcx> Builder<'a, 'tcx> {
/// Compile `expr`, storing the result into `destination`, which
/// is assumed to be uninitialized.
/// If a `drop_scope` is provided, `destination` is scheduled to be dropped
/// in `scope` once it has been initialized.
pub fn into_expr(
&mut self,
destination: &Place<'tcx>,
scope: Option<region::Scope>,
mut block: BasicBlock,
expr: Expr<'tcx>,
) -> BlockAnd<()> {
debug!(
"into_expr(destination={:?}, block={:?}, expr={:?})",
destination, block, expr
"into_expr(destination={:?}, scope={:?}, block={:?}, expr={:?})",
destination, scope, block, expr
);
// since we frequently have to reference `self` from within a
@ -35,6 +40,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
_ => false,
};
let schedule_drop = move |this: &mut Self| {
if let Some(drop_scope) = scope {
let local = destination.as_local()
.expect("cannot schedule drop of non-Local place");
this.schedule_drop(expr_span, drop_scope, local, DropKind::Value);
}
};
if !expr_is_block_or_scope {
this.block_context.push(BlockFrame::SubExpr);
}
@ -47,14 +60,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
} => {
let region_scope = (region_scope, source_info);
this.in_scope(region_scope, lint_level, |this| {
this.into(destination, block, value)
this.into(destination, scope, block, value)
})
}
ExprKind::Block { body: ast_block } => {
this.ast_block(destination, block, ast_block, source_info)
this.ast_block(destination, scope, block, ast_block, source_info)
}
ExprKind::Match { scrutinee, arms } => {
this.match_expr(destination, expr_span, block, scrutinee, arms)
this.match_expr(destination, scope, expr_span, block, scrutinee, arms)
}
ExprKind::NeverToAny { source } => {
let source = this.hir.mirror(source);
@ -67,6 +80,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// This is an optimization. If the expression was a call then we already have an
// unreachable block. Don't bother to terminate it and create a new one.
schedule_drop(this);
if is_call {
block.unit()
} else {
@ -164,6 +178,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
TerminatorKind::Goto { target: loop_block },
);
// Loops assign to their destination on each `break`. Since we
// can't easily unschedule drops, we schedule the drop now.
schedule_drop(this);
this.in_breakable_scope(
Some(loop_block),
exit_block,
@ -185,7 +202,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// introduce a unit temporary as the destination for the loop body.
let tmp = this.get_unit_temp();
// Execute the body, branching back to the test.
let body_block_end = unpack!(this.into(&tmp, body_block, body));
// No scope is provided, since we've scheduled the drop above.
let body_block_end = unpack!(this.into(&tmp, None, body_block, body));
this.cfg.terminate(
body_block_end,
source_info,
@ -234,8 +252,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
is_block_tail: None,
});
let ptr_temp = Place::from(ptr_temp);
let block = unpack!(this.into(&ptr_temp, block, ptr));
this.into(&ptr_temp.deref(), block, val)
// No need for a scope, ptr_temp doesn't need drop
let block = unpack!(this.into(&ptr_temp, None, block, ptr));
// Maybe we should provide a scope here so that
// `move_val_init` wouldn't leak on panic even with an
// arbitrary `val` expression, but `schedule_drop`,
// borrowck and drop elaboration all prevent us from
// dropping `ptr_temp.deref()`.
this.into(&ptr_temp.deref(), None, block, val)
} else {
let args: Vec<_> = args
.into_iter()
@ -265,11 +289,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
from_hir_call,
},
);
schedule_drop(this);
success.unit()
}
}
ExprKind::Use { source } => {
this.into(destination, block, source)
this.into(destination, scope, block, source)
}
// These cases don't actually need a destination
@ -296,6 +321,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let rvalue = Rvalue::Use(this.consume_by_copy_or_move(place));
this.cfg
.push_assign(block, source_info, destination, rvalue);
schedule_drop(this);
block.unit()
}
ExprKind::Index { .. } | ExprKind::Deref { .. } | ExprKind::Field { .. } => {
@ -315,6 +341,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let rvalue = Rvalue::Use(this.consume_by_copy_or_move(place));
this.cfg
.push_assign(block, source_info, destination, rvalue);
schedule_drop(this);
block.unit()
}
@ -346,6 +373,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let rvalue = unpack!(block = this.as_local_rvalue(block, expr));
this.cfg.push_assign(block, source_info, destination, rvalue);
schedule_drop(this);
block.unit()
}
};

View file

@ -6,6 +6,7 @@
use crate::build::{BlockAnd, Builder};
use crate::hair::*;
use rustc::middle::region;
use rustc::mir::*;
pub(in crate::build) trait EvalInto<'tcx> {
@ -13,19 +14,23 @@ pub(in crate::build) trait EvalInto<'tcx> {
self,
builder: &mut Builder<'_, 'tcx>,
destination: &Place<'tcx>,
scope: Option<region::Scope>,
block: BasicBlock,
) -> BlockAnd<()>;
}
impl<'a, 'tcx> Builder<'a, 'tcx> {
pub fn into<E>(&mut self,
destination: &Place<'tcx>,
block: BasicBlock,
expr: E)
-> BlockAnd<()>
where E: EvalInto<'tcx>
pub fn into<E>(
&mut self,
destination: &Place<'tcx>,
scope: Option<region::Scope>,
block: BasicBlock,
expr: E,
) -> BlockAnd<()>
where
E: EvalInto<'tcx>,
{
expr.eval_into(self, destination, block)
expr.eval_into(self, destination, scope, block)
}
}
@ -34,10 +39,11 @@ impl<'tcx> EvalInto<'tcx> for ExprRef<'tcx> {
self,
builder: &mut Builder<'_, 'tcx>,
destination: &Place<'tcx>,
scope: Option<region::Scope>,
block: BasicBlock,
) -> BlockAnd<()> {
let expr = builder.hir.mirror(self);
builder.into_expr(destination, block, expr)
builder.into_expr(destination, scope, block, expr)
}
}
@ -46,8 +52,9 @@ impl<'tcx> EvalInto<'tcx> for Expr<'tcx> {
self,
builder: &mut Builder<'_, 'tcx>,
destination: &Place<'tcx>,
scope: Option<region::Scope>,
block: BasicBlock,
) -> BlockAnd<()> {
builder.into_expr(destination, block, self)
builder.into_expr(destination, scope, block, self)
}
}

View file

@ -102,6 +102,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
pub fn match_expr(
&mut self,
destination: &Place<'tcx>,
destination_scope: Option<region::Scope>,
span: Span,
mut block: BasicBlock,
scrutinee: ExprRef<'tcx>,
@ -228,6 +229,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
};
// Step 5. Create everything else: the guards and the arms.
if let Some(scope) = destination_scope {
// `match` assigns to its destination in each arm. Since we can't
// easily unschedule drops, we schedule the drop now.
let local = destination.as_local()
.expect("cannot schedule drop of non-Local place");
self.schedule_drop(span, scope, local, DropKind::Value);
}
let match_scope = self.scopes.topmost();
let arm_end_blocks: Vec<_> = arm_candidates.into_iter().map(|(arm, mut candidates)| {
@ -275,7 +284,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
this.source_scope = source_scope;
}
this.into(destination, arm_block, body)
// No scope is provided, since we've scheduled the drop above.
this.into(destination, None, arm_block, body)
})
}).collect();
@ -311,8 +321,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
} => {
let place =
self.storage_live_binding(block, var, irrefutable_pat.span, OutsideGuard);
unpack!(block = self.into(&place, block, initializer));
let region_scope = self.hir.region_scope_tree.var_scope(var.local_id);
unpack!(block = self.into(&place, Some(region_scope), block, initializer));
// Inject a fake read, see comments on `FakeReadCause::ForLet`.
let source_info = self.source_info(irrefutable_pat.span);
@ -324,7 +335,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
},
);
self.schedule_drop_for_binding(var, irrefutable_pat.span, OutsideGuard);
block.unit()
}
@ -352,9 +362,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
user_ty_span,
},
} => {
let region_scope = self.hir.region_scope_tree.var_scope(var.local_id);
let place =
self.storage_live_binding(block, var, irrefutable_pat.span, OutsideGuard);
unpack!(block = self.into(&place, block, initializer));
unpack!(block = self.into(&place, Some(region_scope), block, initializer));
// Inject a fake read, see comments on `FakeReadCause::ForLet`.
let pattern_source_info = self.source_info(irrefutable_pat.span);
@ -400,7 +411,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
},
);
self.schedule_drop_for_binding(var, irrefutable_pat.span, OutsideGuard);
block.unit()
}

View file

@ -616,6 +616,7 @@ where
let source_info = builder.source_info(span);
let call_site_s = (call_site_scope, source_info);
unpack!(block = builder.in_scope(call_site_s, LintLevel::Inherited, |builder| {
builder.schedule_drop(span, call_site_scope, RETURN_PLACE, DropKind::Value);
if should_abort_on_panic(tcx, fn_def_id, abi) {
builder.schedule_abort();
}
@ -646,6 +647,7 @@ where
builder.cfg.terminate(unreachable_block, source_info,
TerminatorKind::Unreachable);
}
builder.unschedule_return_place_drop();
return_block.unit()
}));
assert_eq!(block, builder.return_block());
@ -687,7 +689,9 @@ fn construct_const<'a, 'tcx>(
let mut block = START_BLOCK;
let ast_expr = &tcx.hir().body(body_id).value;
let expr = builder.hir.mirror(ast_expr);
unpack!(block = builder.into_expr(&Place::return_place(), block, expr));
// We don't provide a scope because we can't unwind in constants, so won't
// need to drop the return place.
unpack!(block = builder.into_expr(&Place::return_place(), None, block, expr));
let source_info = builder.source_info(span);
builder.cfg.terminate(block, source_info, TerminatorKind::Return);
@ -888,7 +892,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
}
let body = self.hir.mirror(ast_body);
self.into(&Place::return_place(), block, body)
// No scope is provided, since we've scheduled the drop of the return
// place.
self.into(&Place::return_place(), None, block, body)
}
fn set_correct_source_scope_for_arg(

View file

@ -513,7 +513,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
if let Some(value) = value {
debug!("stmt_expr Break val block_context.push(SubExpr)");
self.block_context.push(BlockFrame::SubExpr);
unpack!(block = self.into(&destination, block, value));
unpack!(block = self.into(&destination, None, block, value));
self.block_context.pop();
} else {
self.cfg.push_assign_unit(block, source_info, &destination)
@ -742,12 +742,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
span: Span,
region_scope: region::Scope,
local: Local,
place_ty: Ty<'tcx>,
drop_kind: DropKind,
) {
let needs_drop = self.hir.needs_drop(place_ty);
match drop_kind {
DropKind::Value => if !needs_drop { return },
let needs_drop = match drop_kind {
DropKind::Value => {
if !self.hir.needs_drop(self.local_decls[local].ty) { return }
true
},
DropKind::Storage => {
if local.index() <= self.arg_count {
span_bug!(
@ -756,8 +757,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
self.arg_count,
)
}
false
}
}
};
for scope in self.scopes.iter_mut() {
let this_scope = scope.region_scope == region_scope;
@ -1068,6 +1070,18 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
success_block
}
/// Unschedules the drop of the return place.
///
/// If the return type of a function requires drop, then we schedule it
/// in the outermost scope so that it's dropped if there's a panic while
/// we drop any local variables. But we don't want to drop it if we
/// return normally.
crate fn unschedule_return_place_drop(&mut self) {
assert_eq!(self.scopes.len(), 1);
assert!(self.scopes.scopes[0].drops.len() <= 1);
self.scopes.scopes[0].drops.clear();
}
// `match` arm scopes
// ==================
/// Unschedules any drops in the top scope.

View file

@ -41,33 +41,36 @@ impl Drop for S {
//
// bb2: {
// _1 = move _2;
// drop(_2) -> bb4;
// drop(_2) -> [return: bb5, unwind: bb4];
// }
//
// bb3 (cleanup): {
// drop(_2) -> bb1;
// }
//
// bb4: {
// bb4 (cleanup): {
// drop(_1) -> bb1;
// }
//
// bb5: {
// StorageDead(_2);
// StorageLive(_3);
// StorageLive(_4);
// _4 = move _1;
// _3 = const std::mem::drop::<std::boxed::Box<S>>(move _4) -> [return: bb5, unwind: bb7];
// _3 = const std::mem::drop::<std::boxed::Box<S>>(move _4) -> [return: bb6, unwind: bb7];
// }
//
// bb5: {
// bb6: {
// StorageDead(_4);
// StorageDead(_3);
// _0 = ();
// drop(_1) -> bb8;
// }
// bb6 (cleanup): {
// drop(_1) -> bb1;
// }
//
// bb7 (cleanup): {
// drop(_4) -> bb6;
// drop(_4) -> bb4;
// }
//
// bb8: {
// StorageDead(_1);
// return;

View file

@ -24,7 +24,7 @@ fn main() {
// StorageLive(_3);
// StorageLive(_4);
// _4 = std::option::Option::<u32>::None;
// _3 = const <std::option::Option<u32> as std::ops::Try>::into_result(move _4) -> [return: bb2, unwind: bb3];
// _3 = const <std::option::Option<u32> as std::ops::Try>::into_result(move _4) -> [return: bb2, unwind: bb4];
// }
// bb1 (cleanup): {
// resume;
@ -32,60 +32,63 @@ fn main() {
// bb2: {
// StorageDead(_4);
// _5 = discriminant(_3);
// switchInt(move _5) -> [0isize: bb10, 1isize: bb5, otherwise: bb4];
// switchInt(move _5) -> [0isize: bb11, 1isize: bb6, otherwise: bb5];
// }
// bb3 (cleanup): {
// drop(_2) -> bb1;
// drop(_0) -> bb1;
// }
// bb4: {
// unreachable;
// bb4 (cleanup): {
// drop(_2) -> bb3;
// }
// bb5: {
// unreachable;
// }
// bb6: {
// StorageLive(_6);
// _6 = ((_3 as Err).0: std::option::NoneError);
// StorageLive(_8);
// StorageLive(_9);
// _9 = _6;
// _8 = const <std::option::NoneError as std::convert::From<std::option::NoneError>>::from(move _9) -> [return: bb7, unwind: bb3];
// }
// bb6: {
// return;
// _8 = const <std::option::NoneError as std::convert::From<std::option::NoneError>>::from(move _9) -> [return: bb8, unwind: bb4];
// }
// bb7: {
// StorageDead(_9);
// _0 = const <std::option::Option<std::boxed::Box<u32>> as std::ops::Try>::from_error(move _8) -> [return: bb8, unwind: bb3];
// return;
// }
// bb8: {
// StorageDead(_8);
// StorageDead(_6);
// drop(_2) -> bb9;
// StorageDead(_9);
// _0 = const <std::option::Option<std::boxed::Box<u32>> as std::ops::Try>::from_error(move _8) -> [return: bb9, unwind: bb4];
// }
// bb9: {
// StorageDead(_8);
// StorageDead(_6);
// drop(_2) -> [return: bb10, unwind: bb3];
// }
// bb10: {
// StorageDead(_2);
// StorageDead(_1);
// StorageDead(_3);
// goto -> bb6;
// goto -> bb7;
// }
// bb10: {
// bb11: {
// StorageLive(_10);
// _10 = ((_3 as Ok).0: u32);
// (*_2) = _10;
// StorageDead(_10);
// _1 = move _2;
// drop(_2) -> [return: bb12, unwind: bb11];
// drop(_2) -> [return: bb13, unwind: bb12];
// }
// bb11 (cleanup): {
// drop(_1) -> bb1;
// }
// bb12: {
// StorageDead(_2);
// _0 = std::option::Option::<std::boxed::Box<u32>>::Some(move _1,);
// drop(_1) -> bb13;
// bb12 (cleanup): {
// drop(_1) -> bb3;
// }
// bb13: {
// StorageDead(_2);
// _0 = std::option::Option::<std::boxed::Box<u32>>::Some(move _1,);
// drop(_1) -> [return: bb14, unwind: bb3];
// }
// bb14: {
// StorageDead(_1);
// StorageDead(_3);
// goto -> bb6;
// goto -> bb7;
// }
// }
// END rustc.test.ElaborateDrops.before.mir

View file

@ -7,7 +7,7 @@
// edition:2018
// ignore-wasm32-bare compiled with panic=abort by default
#![feature(slice_patterns)]
#![feature(slice_patterns, arbitrary_self_types)]
#![allow(unused)]
use std::{
@ -45,6 +45,7 @@ impl<T: Unpin> Future for Defer<T> {
/// The `failing_op`-th operation will panic.
struct Allocator {
data: RefCell<Vec<bool>>,
name: &'static str,
failing_op: usize,
cur_ops: Cell<usize>,
}
@ -56,23 +57,28 @@ impl Drop for Allocator {
fn drop(&mut self) {
let data = self.data.borrow();
if data.iter().any(|d| *d) {
panic!("missing free: {:?}", data);
panic!("missing free in {:?}: {:?}", self.name, data);
}
}
}
impl Allocator {
fn new(failing_op: usize) -> Self {
Allocator { failing_op, cur_ops: Cell::new(0), data: RefCell::new(vec![]) }
fn new(failing_op: usize, name: &'static str) -> Self {
Allocator {
failing_op,
name,
cur_ops: Cell::new(0),
data: RefCell::new(vec![]),
}
}
fn alloc(&self) -> impl Future<Output = Ptr<'_>> + '_ {
fn alloc(self: &Rc<Allocator>) -> impl Future<Output = Ptr> + 'static {
self.fallible_operation();
let mut data = self.data.borrow_mut();
let addr = data.len();
data.push(true);
Defer { ready: false, value: Some(Ptr(addr, self)) }
Defer { ready: false, value: Some(Ptr(addr, self.clone())) }
}
fn fallible_operation(&self) {
self.cur_ops.set(self.cur_ops.get() + 1);
@ -85,11 +91,11 @@ impl Allocator {
// Type that tracks whether it was dropped and can panic when it's created or
// destroyed.
struct Ptr<'a>(usize, &'a Allocator);
impl<'a> Drop for Ptr<'a> {
struct Ptr(usize, Rc<Allocator>);
impl Drop for Ptr {
fn drop(&mut self) {
match self.1.data.borrow_mut()[self.0] {
false => panic!("double free at index {:?}", self.0),
false => panic!("double free in {:?} at index {:?}", self.1.name, self.0),
ref mut d => *d = false,
}
@ -113,7 +119,7 @@ async fn dynamic_drop(a: Rc<Allocator>, c: bool) {
};
}
struct TwoPtrs<'a>(Ptr<'a>, Ptr<'a>);
struct TwoPtrs(Ptr, Ptr);
async fn struct_dynamic_drop(a: Rc<Allocator>, c0: bool, c1: bool, c: bool) {
for i in 0..2 {
let x;
@ -228,21 +234,62 @@ async fn subslice_pattern_reassign(a: Rc<Allocator>) {
a.alloc().await;
}
fn run_test<F, G>(cx: &mut Context<'_>, ref f: F)
async fn panic_after_return(a: Rc<Allocator>, c: bool) -> (Ptr,) {
a.alloc().await;
let p = a.alloc().await;
if c {
a.alloc().await;
let q = a.alloc().await;
// We use a return type that isn't used anywhere else to make sure that
// the return place doesn't incorrectly end up in the generator state.
return (a.alloc().await,);
}
(a.alloc().await,)
}
async fn panic_after_init_by_loop(a: Rc<Allocator>) {
a.alloc().await;
let p = a.alloc().await;
let q = loop {
a.alloc().await;
let r = a.alloc().await;
break a.alloc().await;
};
}
async fn panic_after_init_by_match_with_bindings_and_guard(a: Rc<Allocator>, b: bool) {
a.alloc().await;
let p = a.alloc().await;
let q = match a.alloc().await {
ref _x if b => {
a.alloc().await;
let r = a.alloc().await;
a.alloc().await
}
_x => {
a.alloc().await;
let r = a.alloc().await;
a.alloc().await
},
};
}
fn run_test<F, G, O>(cx: &mut Context<'_>, ref f: F, name: &'static str)
where
F: Fn(Rc<Allocator>) -> G,
G: Future<Output = ()>,
G: Future<Output = O>,
{
for polls in 0.. {
// Run without any panics to find which operations happen after the
// penultimate `poll`.
let first_alloc = Rc::new(Allocator::new(usize::MAX));
let first_alloc = Rc::new(Allocator::new(usize::MAX, name));
let mut fut = Box::pin(f(first_alloc.clone()));
let mut ops_before_last_poll = 0;
let mut completed = false;
for _ in 0..polls {
ops_before_last_poll = first_alloc.cur_ops.get();
if let Poll::Ready(()) = fut.as_mut().poll(cx) {
if let Poll::Ready(_) = fut.as_mut().poll(cx) {
completed = true;
}
}
@ -251,7 +298,7 @@ where
// Start at `ops_before_last_poll` so that we will always be able to
// `poll` the expected number of times.
for failing_op in ops_before_last_poll..first_alloc.cur_ops.get() {
let alloc = Rc::new(Allocator::new(failing_op + 1));
let alloc = Rc::new(Allocator::new(failing_op + 1, name));
let f = &f;
let cx = &mut *cx;
let result = panic::catch_unwind(panic::AssertUnwindSafe(move || {
@ -281,46 +328,56 @@ fn clone_waker(data: *const ()) -> RawWaker {
RawWaker::new(data, &RawWakerVTable::new(clone_waker, drop, drop, drop))
}
macro_rules! run_test {
($ctxt:expr, $e:expr) => { run_test($ctxt, $e, stringify!($e)); };
}
fn main() {
let waker = unsafe { Waker::from_raw(clone_waker(ptr::null())) };
let context = &mut Context::from_waker(&waker);
run_test(context, |a| dynamic_init(a, false));
run_test(context, |a| dynamic_init(a, true));
run_test(context, |a| dynamic_drop(a, false));
run_test(context, |a| dynamic_drop(a, true));
run_test!(context, |a| dynamic_init(a, false));
run_test!(context, |a| dynamic_init(a, true));
run_test!(context, |a| dynamic_drop(a, false));
run_test!(context, |a| dynamic_drop(a, true));
run_test(context, |a| assignment(a, false, false));
run_test(context, |a| assignment(a, false, true));
run_test(context, |a| assignment(a, true, false));
run_test(context, |a| assignment(a, true, true));
run_test!(context, |a| assignment(a, false, false));
run_test!(context, |a| assignment(a, false, true));
run_test!(context, |a| assignment(a, true, false));
run_test!(context, |a| assignment(a, true, true));
run_test(context, |a| array_simple(a));
run_test(context, |a| vec_simple(a));
run_test(context, |a| vec_unreachable(a));
run_test!(context, |a| array_simple(a));
run_test!(context, |a| vec_simple(a));
run_test!(context, |a| vec_unreachable(a));
run_test(context, |a| struct_dynamic_drop(a, false, false, false));
run_test(context, |a| struct_dynamic_drop(a, false, false, true));
run_test(context, |a| struct_dynamic_drop(a, false, true, false));
run_test(context, |a| struct_dynamic_drop(a, false, true, true));
run_test(context, |a| struct_dynamic_drop(a, true, false, false));
run_test(context, |a| struct_dynamic_drop(a, true, false, true));
run_test(context, |a| struct_dynamic_drop(a, true, true, false));
run_test(context, |a| struct_dynamic_drop(a, true, true, true));
run_test!(context, |a| struct_dynamic_drop(a, false, false, false));
run_test!(context, |a| struct_dynamic_drop(a, false, false, true));
run_test!(context, |a| struct_dynamic_drop(a, false, true, false));
run_test!(context, |a| struct_dynamic_drop(a, false, true, true));
run_test!(context, |a| struct_dynamic_drop(a, true, false, false));
run_test!(context, |a| struct_dynamic_drop(a, true, false, true));
run_test!(context, |a| struct_dynamic_drop(a, true, true, false));
run_test!(context, |a| struct_dynamic_drop(a, true, true, true));
run_test(context, |a| field_assignment(a, false));
run_test(context, |a| field_assignment(a, true));
run_test!(context, |a| field_assignment(a, false));
run_test!(context, |a| field_assignment(a, true));
run_test(context, |a| mixed_drop_and_nondrop(a));
run_test!(context, |a| mixed_drop_and_nondrop(a));
run_test(context, |a| slice_pattern_one_of(a, 0));
run_test(context, |a| slice_pattern_one_of(a, 1));
run_test(context, |a| slice_pattern_one_of(a, 2));
run_test(context, |a| slice_pattern_one_of(a, 3));
run_test!(context, |a| slice_pattern_one_of(a, 0));
run_test!(context, |a| slice_pattern_one_of(a, 1));
run_test!(context, |a| slice_pattern_one_of(a, 2));
run_test!(context, |a| slice_pattern_one_of(a, 3));
run_test(context, |a| subslice_pattern_from_end_with_drop(a, true, true));
run_test(context, |a| subslice_pattern_from_end_with_drop(a, true, false));
run_test(context, |a| subslice_pattern_from_end_with_drop(a, false, true));
run_test(context, |a| subslice_pattern_from_end_with_drop(a, false, false));
run_test(context, |a| subslice_pattern_reassign(a));
run_test!(context, |a| subslice_pattern_from_end_with_drop(a, true, true));
run_test!(context, |a| subslice_pattern_from_end_with_drop(a, true, false));
run_test!(context, |a| subslice_pattern_from_end_with_drop(a, false, true));
run_test!(context, |a| subslice_pattern_from_end_with_drop(a, false, false));
run_test!(context, |a| subslice_pattern_reassign(a));
run_test!(context, |a| panic_after_return(a, false));
run_test!(context, |a| panic_after_return(a, true));
run_test!(context, |a| panic_after_init_by_loop(a));
run_test!(context, |a| panic_after_init_by_match_with_bindings_and_guard(a, false));
run_test!(context, |a| panic_after_init_by_match_with_bindings_and_guard(a, true));
}

View file

@ -17,6 +17,7 @@ struct InjectedFailure;
struct Allocator {
data: RefCell<Vec<bool>>,
name: &'static str,
failing_op: usize,
cur_ops: Cell<usize>,
}
@ -28,17 +29,18 @@ impl Drop for Allocator {
fn drop(&mut self) {
let data = self.data.borrow();
if data.iter().any(|d| *d) {
panic!("missing free: {:?}", data);
panic!("missing free in {:?}: {:?}", self.name, data);
}
}
}
impl Allocator {
fn new(failing_op: usize) -> Self {
fn new(failing_op: usize, name: &'static str) -> Self {
Allocator {
failing_op: failing_op,
cur_ops: Cell::new(0),
data: RefCell::new(vec![])
data: RefCell::new(vec![]),
name,
}
}
fn alloc(&self) -> Ptr<'_> {
@ -53,20 +55,6 @@ impl Allocator {
data.push(true);
Ptr(addr, self)
}
// FIXME(#47949) Any use of this indicates a bug in rustc: we should never
// be leaking values in the cases here.
//
// Creates a `Ptr<'_>` and checks that the allocated value is leaked if the
// `failing_op` is in the list of exception.
fn alloc_leaked(&self, exceptions: Vec<usize>) -> Ptr<'_> {
let ptr = self.alloc();
if exceptions.iter().any(|operation| *operation == self.failing_op) {
let mut data = self.data.borrow_mut();
data[ptr.0] = false;
}
ptr
}
}
struct Ptr<'a>(usize, &'a Allocator);
@ -74,7 +62,7 @@ impl<'a> Drop for Ptr<'a> {
fn drop(&mut self) {
match self.1.data.borrow_mut()[self.0] {
false => {
panic!("double free at index {:?}", self.0)
panic!("double free in {:?} at index {:?}", self.1.name, self.0)
}
ref mut d => *d = false
}
@ -270,79 +258,148 @@ fn subslice_pattern_reassign(a: &Allocator) {
}
fn panic_after_return(a: &Allocator) -> Ptr<'_> {
// Panic in the drop of `p` or `q` can leak
let exceptions = vec![8, 9];
a.alloc();
let p = a.alloc();
{
a.alloc();
let p = a.alloc();
// FIXME (#47949) We leak values when we panic in a destructor after
// evaluating an expression with `rustc_mir::build::Builder::into`.
a.alloc_leaked(exceptions)
a.alloc()
}
}
fn panic_after_return_expr(a: &Allocator) -> Ptr<'_> {
// Panic in the drop of `p` or `q` can leak
let exceptions = vec![8, 9];
a.alloc();
let p = a.alloc();
{
a.alloc();
let q = a.alloc();
// FIXME (#47949)
return a.alloc_leaked(exceptions);
return a.alloc();
}
}
fn panic_after_init(a: &Allocator) {
// Panic in the drop of `r` can leak
let exceptions = vec![8];
a.alloc();
let p = a.alloc();
let q = {
a.alloc();
let r = a.alloc();
// FIXME (#47949)
a.alloc_leaked(exceptions)
a.alloc()
};
}
fn panic_after_init_temp(a: &Allocator) {
// Panic in the drop of `r` can leak
let exceptions = vec![8];
a.alloc();
let p = a.alloc();
{
a.alloc();
let r = a.alloc();
// FIXME (#47949)
a.alloc_leaked(exceptions)
a.alloc()
};
}
fn panic_after_init_by_loop(a: &Allocator) {
// Panic in the drop of `r` can leak
let exceptions = vec![8];
a.alloc();
let p = a.alloc();
let q = loop {
a.alloc();
let r = a.alloc();
// FIXME (#47949)
break a.alloc_leaked(exceptions);
break a.alloc();
};
}
fn run_test<F>(mut f: F)
fn panic_after_init_by_match(a: &Allocator, b: bool) {
a.alloc();
let p = a.alloc();
loop {
let q = match b {
true => {
a.alloc();
let r = a.alloc();
a.alloc()
}
false => {
a.alloc();
let r = a.alloc();
break a.alloc();
}
};
return;
};
}
fn panic_after_init_by_match_with_guard(a: &Allocator, b: bool) {
a.alloc();
let p = a.alloc();
let q = match a.alloc() {
_ if b => {
a.alloc();
let r = a.alloc();
a.alloc()
}
_ => {
a.alloc();
let r = a.alloc();
a.alloc()
},
};
}
fn panic_after_init_by_match_with_bindings_and_guard(a: &Allocator, b: bool) {
a.alloc();
let p = a.alloc();
let q = match a.alloc() {
_x if b => {
a.alloc();
let r = a.alloc();
a.alloc()
}
_x => {
a.alloc();
let r = a.alloc();
a.alloc()
},
};
}
fn panic_after_init_by_match_with_ref_bindings_and_guard(a: &Allocator, b: bool) {
a.alloc();
let p = a.alloc();
let q = match a.alloc() {
ref _x if b => {
a.alloc();
let r = a.alloc();
a.alloc()
}
ref _x => {
a.alloc();
let r = a.alloc();
a.alloc()
},
};
}
fn panic_after_init_by_break_if(a: &Allocator, b: bool) {
a.alloc();
let p = a.alloc();
let q = loop {
let r = a.alloc();
break if b {
let s = a.alloc();
a.alloc()
} else {
a.alloc()
};
};
}
fn run_test<F>(mut f: F, name: &'static str)
where F: FnMut(&Allocator)
{
let first_alloc = Allocator::new(usize::MAX);
let first_alloc = Allocator::new(usize::MAX, name);
f(&first_alloc);
for failing_op in 1..first_alloc.cur_ops.get()+1 {
let alloc = Allocator::new(failing_op);
let alloc = Allocator::new(failing_op, name);
let alloc = &alloc;
let f = panic::AssertUnwindSafe(&mut f);
let result = panic::catch_unwind(move || {
@ -360,77 +417,91 @@ fn run_test<F>(mut f: F)
}
}
fn run_test_nopanic<F>(mut f: F)
fn run_test_nopanic<F>(mut f: F, name: &'static str)
where F: FnMut(&Allocator)
{
let first_alloc = Allocator::new(usize::MAX);
let first_alloc = Allocator::new(usize::MAX, name);
f(&first_alloc);
}
macro_rules! run_test {
($e:expr) => { run_test($e, stringify!($e)); }
}
fn main() {
run_test(|a| dynamic_init(a, false));
run_test(|a| dynamic_init(a, true));
run_test(|a| dynamic_drop(a, false));
run_test(|a| dynamic_drop(a, true));
run_test!(|a| dynamic_init(a, false));
run_test!(|a| dynamic_init(a, true));
run_test!(|a| dynamic_drop(a, false));
run_test!(|a| dynamic_drop(a, true));
run_test(|a| assignment2(a, false, false));
run_test(|a| assignment2(a, false, true));
run_test(|a| assignment2(a, true, false));
run_test(|a| assignment2(a, true, true));
run_test!(|a| assignment2(a, false, false));
run_test!(|a| assignment2(a, false, true));
run_test!(|a| assignment2(a, true, false));
run_test!(|a| assignment2(a, true, true));
run_test(|a| assignment1(a, false));
run_test(|a| assignment1(a, true));
run_test!(|a| assignment1(a, false));
run_test!(|a| assignment1(a, true));
run_test(|a| array_simple(a));
run_test(|a| vec_simple(a));
run_test(|a| vec_unreachable(a));
run_test!(|a| array_simple(a));
run_test!(|a| vec_simple(a));
run_test!(|a| vec_unreachable(a));
run_test(|a| struct_dynamic_drop(a, false, false, false));
run_test(|a| struct_dynamic_drop(a, false, false, true));
run_test(|a| struct_dynamic_drop(a, false, true, false));
run_test(|a| struct_dynamic_drop(a, false, true, true));
run_test(|a| struct_dynamic_drop(a, true, false, false));
run_test(|a| struct_dynamic_drop(a, true, false, true));
run_test(|a| struct_dynamic_drop(a, true, true, false));
run_test(|a| struct_dynamic_drop(a, true, true, true));
run_test!(|a| struct_dynamic_drop(a, false, false, false));
run_test!(|a| struct_dynamic_drop(a, false, false, true));
run_test!(|a| struct_dynamic_drop(a, false, true, false));
run_test!(|a| struct_dynamic_drop(a, false, true, true));
run_test!(|a| struct_dynamic_drop(a, true, false, false));
run_test!(|a| struct_dynamic_drop(a, true, false, true));
run_test!(|a| struct_dynamic_drop(a, true, true, false));
run_test!(|a| struct_dynamic_drop(a, true, true, true));
run_test(|a| field_assignment(a, false));
run_test(|a| field_assignment(a, true));
run_test!(|a| field_assignment(a, false));
run_test!(|a| field_assignment(a, true));
run_test(|a| generator(a, 0));
run_test(|a| generator(a, 1));
run_test(|a| generator(a, 2));
run_test(|a| generator(a, 3));
run_test!(|a| generator(a, 0));
run_test!(|a| generator(a, 1));
run_test!(|a| generator(a, 2));
run_test!(|a| generator(a, 3));
run_test(|a| mixed_drop_and_nondrop(a));
run_test!(|a| mixed_drop_and_nondrop(a));
run_test(|a| slice_pattern_first(a));
run_test(|a| slice_pattern_middle(a));
run_test(|a| slice_pattern_two(a));
run_test(|a| slice_pattern_last(a));
run_test(|a| slice_pattern_one_of(a, 0));
run_test(|a| slice_pattern_one_of(a, 1));
run_test(|a| slice_pattern_one_of(a, 2));
run_test(|a| slice_pattern_one_of(a, 3));
run_test!(|a| slice_pattern_first(a));
run_test!(|a| slice_pattern_middle(a));
run_test!(|a| slice_pattern_two(a));
run_test!(|a| slice_pattern_last(a));
run_test!(|a| slice_pattern_one_of(a, 0));
run_test!(|a| slice_pattern_one_of(a, 1));
run_test!(|a| slice_pattern_one_of(a, 2));
run_test!(|a| slice_pattern_one_of(a, 3));
run_test(|a| subslice_pattern_from_end(a, true));
run_test(|a| subslice_pattern_from_end(a, false));
run_test(|a| subslice_pattern_from_end_with_drop(a, true, true));
run_test(|a| subslice_pattern_from_end_with_drop(a, true, false));
run_test(|a| subslice_pattern_from_end_with_drop(a, false, true));
run_test(|a| subslice_pattern_from_end_with_drop(a, false, false));
run_test(|a| slice_pattern_reassign(a));
run_test(|a| subslice_pattern_reassign(a));
run_test!(|a| subslice_pattern_from_end(a, true));
run_test!(|a| subslice_pattern_from_end(a, false));
run_test!(|a| subslice_pattern_from_end_with_drop(a, true, true));
run_test!(|a| subslice_pattern_from_end_with_drop(a, true, false));
run_test!(|a| subslice_pattern_from_end_with_drop(a, false, true));
run_test!(|a| subslice_pattern_from_end_with_drop(a, false, false));
run_test!(|a| slice_pattern_reassign(a));
run_test!(|a| subslice_pattern_reassign(a));
run_test(|a| {
run_test!(|a| {
panic_after_return(a);
});
run_test(|a| {
run_test!(|a| {
panic_after_return_expr(a);
});
run_test(|a| panic_after_init(a));
run_test(|a| panic_after_init_temp(a));
run_test(|a| panic_after_init_by_loop(a));
run_test!(|a| panic_after_init(a));
run_test!(|a| panic_after_init_temp(a));
run_test!(|a| panic_after_init_by_loop(a));
run_test!(|a| panic_after_init_by_match(a, false));
run_test!(|a| panic_after_init_by_match(a, true));
run_test!(|a| panic_after_init_by_match_with_guard(a, false));
run_test!(|a| panic_after_init_by_match_with_guard(a, true));
run_test!(|a| panic_after_init_by_match_with_bindings_and_guard(a, false));
run_test!(|a| panic_after_init_by_match_with_bindings_and_guard(a, true));
run_test!(|a| panic_after_init_by_match_with_ref_bindings_and_guard(a, false));
run_test!(|a| panic_after_init_by_match_with_ref_bindings_and_guard(a, true));
run_test!(|a| panic_after_init_by_break_if(a, false));
run_test!(|a| panic_after_init_by_break_if(a, true));
run_test_nopanic(|a| union1(a));
run_test_nopanic(|a| union1(a), "|a| union1(a)");
}