Auto merge of #59780 - RalfJung:miri-unsized, r=oli-obk

Miri: unsized locals and by-value dyn traits

r? @oli-obk
Cc @eddyb

Fixes https://github.com/rust-lang/miri/issues/449
This commit is contained in:
bors 2019-04-11 15:44:22 +00:00
commit 3de0106789
6 changed files with 192 additions and 153 deletions

View file

@ -108,34 +108,51 @@ pub enum StackPopCleanup {
/// State of a local variable including a memoized layout
#[derive(Clone, PartialEq, Eq)]
pub struct LocalState<'tcx, Tag=(), Id=AllocId> {
pub state: LocalValue<Tag, Id>,
pub value: LocalValue<Tag, Id>,
/// Don't modify if `Some`, this is only used to prevent computing the layout twice
pub layout: Cell<Option<TyLayout<'tcx>>>,
}
/// State of a local variable
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
/// Current value of a local variable
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
pub enum LocalValue<Tag=(), Id=AllocId> {
/// This local is not currently alive, and cannot be used at all.
Dead,
// Mostly for convenience, we re-use the `Operand` type here.
// This is an optimization over just always having a pointer here;
// we can thus avoid doing an allocation when the local just stores
// immediate values *and* never has its address taken.
/// This local is alive but not yet initialized. It can be written to
/// but not read from or its address taken. Locals get initialized on
/// first write because for unsized locals, we do not know their size
/// before that.
Uninitialized,
/// A normal, live local.
/// Mostly for convenience, we re-use the `Operand` type here.
/// This is an optimization over just always having a pointer here;
/// we can thus avoid doing an allocation when the local just stores
/// immediate values *and* never has its address taken.
Live(Operand<Tag, Id>),
}
impl<'tcx, Tag> LocalState<'tcx, Tag> {
pub fn access(&self) -> EvalResult<'tcx, &Operand<Tag>> {
match self.state {
impl<'tcx, Tag: Copy + 'static> LocalState<'tcx, Tag> {
pub fn access(&self) -> EvalResult<'tcx, Operand<Tag>> {
match self.value {
LocalValue::Dead => err!(DeadLocal),
LocalValue::Live(ref val) => Ok(val),
LocalValue::Uninitialized =>
bug!("The type checker should prevent reading from a never-written local"),
LocalValue::Live(val) => Ok(val),
}
}
pub fn access_mut(&mut self) -> EvalResult<'tcx, &mut Operand<Tag>> {
match self.state {
/// Overwrite the local. If the local can be overwritten in place, return a reference
/// to do so; otherwise return the `MemPlace` to consult instead.
pub fn access_mut(
&mut self,
) -> EvalResult<'tcx, Result<&mut LocalValue<Tag>, MemPlace<Tag>>> {
match self.value {
LocalValue::Dead => err!(DeadLocal),
LocalValue::Live(ref mut val) => Ok(val),
LocalValue::Live(Operand::Indirect(mplace)) => Ok(Err(mplace)),
ref mut local @ LocalValue::Live(Operand::Immediate(_)) |
ref mut local @ LocalValue::Uninitialized => {
Ok(Ok(local))
}
}
}
}
@ -327,6 +344,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tc
let local_ty = self.monomorphize_with_substs(local_ty, frame.instance.substs);
self.layout_of(local_ty)
})?;
// Layouts of locals are requested a lot, so we cache them.
frame.locals[local].layout.set(Some(layout));
Ok(layout)
}
@ -473,19 +491,15 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tc
// don't allocate at all for trivial constants
if mir.local_decls.len() > 1 {
// We put some marker immediate into the locals that we later want to initialize.
// This can be anything except for LocalValue::Dead -- because *that* is the
// value we use for things that we know are initially dead.
// Locals are initially uninitialized.
let dummy = LocalState {
state: LocalValue::Live(Operand::Immediate(Immediate::Scalar(
ScalarMaybeUndef::Undef,
))),
value: LocalValue::Uninitialized,
layout: Cell::new(None),
};
let mut locals = IndexVec::from_elem(dummy, &mir.local_decls);
// Return place is handled specially by the `eval_place` functions, and the
// entry in `locals` should never be used. Make it dead, to be sure.
locals[mir::RETURN_PLACE].state = LocalValue::Dead;
locals[mir::RETURN_PLACE].value = LocalValue::Dead;
// Now mark those locals as dead that we do not want to initialize
match self.tcx.describe_def(instance.def_id()) {
// statics and constants don't have `Storage*` statements, no need to look for them
@ -498,7 +512,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tc
match stmt.kind {
StorageLive(local) |
StorageDead(local) => {
locals[local].state = LocalValue::Dead;
locals[local].value = LocalValue::Dead;
}
_ => {}
}
@ -506,21 +520,6 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tc
}
},
}
// Finally, properly initialize all those that still have the dummy value
for (idx, local) in locals.iter_enumerated_mut() {
match local.state {
LocalValue::Live(_) => {
// This needs to be properly initialized.
let ty = self.monomorphize(mir.local_decls[idx].ty)?;
let layout = self.layout_of(ty)?;
local.state = LocalValue::Live(self.uninit_operand(layout)?);
local.layout = Cell::new(Some(layout));
}
LocalValue::Dead => {
// Nothing to do
}
}
}
// done
self.frame_mut().locals = locals;
}
@ -555,7 +554,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tc
}
// Deallocate all locals that are backed by an allocation.
for local in frame.locals {
self.deallocate_local(local.state)?;
self.deallocate_local(local.value)?;
}
// Validate the return value. Do this after deallocating so that we catch dangling
// references.
@ -603,10 +602,9 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tc
assert!(local != mir::RETURN_PLACE, "Cannot make return place live");
trace!("{:?} is now live", local);
let layout = self.layout_of_local(self.frame(), local, None)?;
let init = LocalValue::Live(self.uninit_operand(layout)?);
let local_val = LocalValue::Uninitialized;
// StorageLive *always* kills the value that's currently stored
Ok(mem::replace(&mut self.frame_mut().locals[local].state, init))
Ok(mem::replace(&mut self.frame_mut().locals[local].value, local_val))
}
/// Returns the old value of the local.
@ -615,7 +613,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tc
assert!(local != mir::RETURN_PLACE, "Cannot make return place dead");
trace!("{:?} is now dead", local);
mem::replace(&mut self.frame_mut().locals[local].state, LocalValue::Dead)
mem::replace(&mut self.frame_mut().locals[local].value, LocalValue::Dead)
}
pub(super) fn deallocate_local(
@ -668,31 +666,31 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tc
}
write!(msg, ":").unwrap();
match self.stack[frame].locals[local].access() {
Err(err) => {
if let InterpError::DeadLocal = err.kind {
write!(msg, " is dead").unwrap();
} else {
panic!("Failed to access local: {:?}", err);
}
}
Ok(Operand::Indirect(mplace)) => {
let (ptr, align) = mplace.to_scalar_ptr_align();
match ptr {
match self.stack[frame].locals[local].value {
LocalValue::Dead => write!(msg, " is dead").unwrap(),
LocalValue::Uninitialized => write!(msg, " is uninitialized").unwrap(),
LocalValue::Live(Operand::Indirect(mplace)) => {
match mplace.ptr {
Scalar::Ptr(ptr) => {
write!(msg, " by align({}) ref:", align.bytes()).unwrap();
write!(msg, " by align({}){} ref:",
mplace.align.bytes(),
match mplace.meta {
Some(meta) => format!(" meta({:?})", meta),
None => String::new()
}
).unwrap();
allocs.push(ptr.alloc_id);
}
ptr => write!(msg, " by integral ref: {:?}", ptr).unwrap(),
}
}
Ok(Operand::Immediate(Immediate::Scalar(val))) => {
LocalValue::Live(Operand::Immediate(Immediate::Scalar(val))) => {
write!(msg, " {:?}", val).unwrap();
if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = val {
allocs.push(ptr.alloc_id);
}
}
Ok(Operand::Immediate(Immediate::ScalarPair(val1, val2))) => {
LocalValue::Live(Operand::Immediate(Immediate::ScalarPair(val1, val2))) => {
write!(msg, " ({:?}, {:?})", val1, val2).unwrap();
if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = val1 {
allocs.push(ptr.alloc_id);

View file

@ -14,7 +14,7 @@ use rustc::mir::interpret::{
};
use super::{
InterpretCx, Machine,
MemPlace, MPlaceTy, PlaceTy, Place, MemoryKind,
MemPlace, MPlaceTy, PlaceTy, Place,
};
pub use rustc::mir::interpret::ScalarMaybeUndef;
@ -373,33 +373,6 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M>
Ok(str)
}
pub fn uninit_operand(
&mut self,
layout: TyLayout<'tcx>
) -> EvalResult<'tcx, Operand<M::PointerTag>> {
// This decides which types we will use the Immediate optimization for, and hence should
// match what `try_read_immediate` and `eval_place_to_op` support.
if layout.is_zst() {
return Ok(Operand::Immediate(Immediate::Scalar(Scalar::zst().into())));
}
Ok(match layout.abi {
layout::Abi::Scalar(..) =>
Operand::Immediate(Immediate::Scalar(ScalarMaybeUndef::Undef)),
layout::Abi::ScalarPair(..) =>
Operand::Immediate(Immediate::ScalarPair(
ScalarMaybeUndef::Undef,
ScalarMaybeUndef::Undef,
)),
_ => {
trace!("Forcing allocation for local of type {:?}", layout.ty);
Operand::Indirect(
*self.allocate(layout, MemoryKind::Stack)
)
}
})
}
/// Projection functions
pub fn operand_field(
&self,
@ -486,8 +459,13 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M>
layout: Option<TyLayout<'tcx>>,
) -> EvalResult<'tcx, OpTy<'tcx, M::PointerTag>> {
assert_ne!(local, mir::RETURN_PLACE);
let op = *frame.locals[local].access()?;
let layout = self.layout_of_local(frame, local, layout)?;
let op = if layout.is_zst() {
// Do not read from ZST, they might not be initialized
Operand::Immediate(Immediate::Scalar(Scalar::zst().into()))
} else {
frame.locals[local].access()?
};
Ok(OpTy { op, layout })
}
@ -502,7 +480,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M>
Operand::Indirect(mplace)
}
Place::Local { frame, local } =>
*self.stack[frame].locals[local].access()?
*self.access_local(&self.stack[frame], local, None)?
};
Ok(OpTy { op, layout: place.layout })
}

View file

@ -15,7 +15,7 @@ use rustc::ty::TypeFoldable;
use super::{
GlobalId, AllocId, Allocation, Scalar, EvalResult, Pointer, PointerArithmetic,
InterpretCx, Machine, AllocMap, AllocationExtra,
RawConst, Immediate, ImmTy, ScalarMaybeUndef, Operand, OpTy, MemoryKind
RawConst, Immediate, ImmTy, ScalarMaybeUndef, Operand, OpTy, MemoryKind, LocalValue
};
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
@ -639,6 +639,7 @@ where
None => return err!(InvalidNullPointerUsage),
},
Base(PlaceBase::Local(local)) => PlaceTy {
// This works even for dead/uninitialized locals; we check further when writing
place: Place::Local {
frame: self.cur_frame(),
local,
@ -714,16 +715,19 @@ where
// but not factored as a separate function.
let mplace = match dest.place {
Place::Local { frame, local } => {
match *self.stack[frame].locals[local].access_mut()? {
Operand::Immediate(ref mut dest_val) => {
// Yay, we can just change the local directly.
*dest_val = src;
match self.stack[frame].locals[local].access_mut()? {
Ok(local) => {
// Local can be updated in-place.
*local = LocalValue::Live(Operand::Immediate(src));
return Ok(());
},
Operand::Indirect(mplace) => mplace, // already in memory
}
Err(mplace) => {
// The local is in memory, go on below.
mplace
}
}
},
Place::Ptr(mplace) => mplace, // already in memory
Place::Ptr(mplace) => mplace, // already referring to memory
};
let dest = MPlaceTy { mplace, layout: dest.layout };
@ -822,8 +826,6 @@ where
src: OpTy<'tcx, M::PointerTag>,
dest: PlaceTy<'tcx, M::PointerTag>,
) -> EvalResult<'tcx> {
debug_assert!(!src.layout.is_unsized() && !dest.layout.is_unsized(),
"Cannot copy unsized data");
// We do NOT compare the types for equality, because well-typed code can
// actually "transmute" `&mut T` to `&T` in an assignment without a cast.
assert!(src.layout.details == dest.layout.details,
@ -832,6 +834,7 @@ where
// Let us see if the layout is simple so we take a shortcut, avoid force_allocation.
let src = match self.try_read_immediate(src)? {
Ok(src_val) => {
assert!(!src.layout.is_unsized(), "cannot have unsized immediates");
// Yay, we got a value that we can write directly.
// FIXME: Add a check to make sure that if `src` is indirect,
// it does not overlap with `dest`.
@ -842,13 +845,19 @@ where
// Slow path, this does not fit into an immediate. Just memcpy.
trace!("copy_op: {:?} <- {:?}: {}", *dest, src, dest.layout.ty);
let dest = self.force_allocation(dest)?;
let (src_ptr, src_align) = src.to_scalar_ptr_align();
let (dest_ptr, dest_align) = dest.to_scalar_ptr_align();
// This interprets `src.meta` with the `dest` local's layout, if an unsized local
// is being initialized!
let (dest, size) = self.force_allocation_maybe_sized(dest, src.meta)?;
let size = size.unwrap_or_else(|| {
assert!(!dest.layout.is_unsized(),
"Cannot copy into already initialized unsized place");
dest.layout.size
});
assert_eq!(src.meta, dest.meta, "Can only copy between equally-sized instances");
self.memory.copy(
src_ptr, src_align,
dest_ptr, dest_align,
dest.layout.size,
src.ptr, src.align,
dest.ptr, dest.align,
size,
/*nonoverlapping*/ true,
)?;
@ -866,11 +875,13 @@ where
// Fast path: Just use normal `copy_op`
return self.copy_op(src, dest);
}
// We still require the sizes to match
debug_assert!(!src.layout.is_unsized() && !dest.layout.is_unsized(),
"Cannot copy unsized data");
// We still require the sizes to match.
assert!(src.layout.size == dest.layout.size,
"Size mismatch when transmuting!\nsrc: {:#?}\ndest: {:#?}", src, dest);
// Unsized copies rely on interpreting `src.meta` with `dest.layout`, we want
// to avoid that here.
assert!(!src.layout.is_unsized() && !dest.layout.is_unsized(),
"Cannot transmute unsized data");
// The hard case is `ScalarPair`. `src` is already read from memory in this case,
// using `src.layout` to figure out which bytes to use for the 1st and 2nd field.
@ -898,39 +909,70 @@ where
/// If the place currently refers to a local that doesn't yet have a matching allocation,
/// create such an allocation.
/// This is essentially `force_to_memplace`.
pub fn force_allocation(
///
/// This supports unsized types and returns the computed size to avoid some
/// redundant computation when copying; use `force_allocation` for a simpler, sized-only
/// version.
pub fn force_allocation_maybe_sized(
&mut self,
place: PlaceTy<'tcx, M::PointerTag>,
) -> EvalResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
let mplace = match place.place {
meta: Option<Scalar<M::PointerTag>>,
) -> EvalResult<'tcx, (MPlaceTy<'tcx, M::PointerTag>, Option<Size>)> {
let (mplace, size) = match place.place {
Place::Local { frame, local } => {
match *self.stack[frame].locals[local].access()? {
Operand::Indirect(mplace) => mplace,
Operand::Immediate(value) => {
match self.stack[frame].locals[local].access_mut()? {
Ok(local_val) => {
// We need to make an allocation.
// FIXME: Consider not doing anything for a ZST, and just returning
// a fake pointer? Are we even called for ZST?
// We cannot hold on to the reference `local_val` while allocating,
// but we can hold on to the value in there.
let old_val =
if let LocalValue::Live(Operand::Immediate(value)) = *local_val {
Some(value)
} else {
None
};
// We need the layout of the local. We can NOT use the layout we got,
// that might e.g., be an inner field of a struct with `Scalar` layout,
// that has different alignment than the outer field.
// We also need to support unsized types, and hence cannot use `allocate`.
let local_layout = self.layout_of_local(&self.stack[frame], local, None)?;
let ptr = self.allocate(local_layout, MemoryKind::Stack);
// We don't have to validate as we can assume the local
// was already valid for its type.
self.write_immediate_to_mplace_no_validate(value, ptr)?;
let mplace = ptr.mplace;
// Update the local
*self.stack[frame].locals[local].access_mut()? =
Operand::Indirect(mplace);
mplace
let (size, align) = self.size_and_align_of(meta, local_layout)?
.expect("Cannot allocate for non-dyn-sized type");
let ptr = self.memory.allocate(size, align, MemoryKind::Stack);
let ptr = M::tag_new_allocation(self, ptr, MemoryKind::Stack);
let mplace = MemPlace { ptr: ptr.into(), align, meta };
if let Some(value) = old_val {
// Preserve old value.
// We don't have to validate as we can assume the local
// was already valid for its type.
let mplace = MPlaceTy { mplace, layout: local_layout };
self.write_immediate_to_mplace_no_validate(value, mplace)?;
}
// Now we can call `access_mut` again, asserting it goes well,
// and actually overwrite things.
*self.stack[frame].locals[local].access_mut().unwrap().unwrap() =
LocalValue::Live(Operand::Indirect(mplace));
(mplace, Some(size))
}
Err(mplace) => (mplace, None), // this already was an indirect local
}
}
Place::Ptr(mplace) => mplace
Place::Ptr(mplace) => (mplace, None)
};
// Return with the original layout, so that the caller can go on
Ok(MPlaceTy { mplace, layout: place.layout })
Ok((MPlaceTy { mplace, layout: place.layout }, size))
}
#[inline(always)]
pub fn force_allocation(
&mut self,
place: PlaceTy<'tcx, M::PointerTag>,
) -> EvalResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
Ok(self.force_allocation_maybe_sized(place, None)?.0)
}
pub fn allocate(
@ -938,15 +980,9 @@ where
layout: TyLayout<'tcx>,
kind: MemoryKind<M::MemoryKinds>,
) -> MPlaceTy<'tcx, M::PointerTag> {
if layout.is_unsized() {
assert!(self.tcx.features().unsized_locals, "cannot alloc memory for unsized type");
// FIXME: What should we do here? We should definitely also tag!
MPlaceTy::dangling(layout, self)
} else {
let ptr = self.memory.allocate(layout.size, layout.align.abi, kind);
let ptr = M::tag_new_allocation(self, ptr, kind);
MPlaceTy::from_aligned_ptr(ptr, layout)
}
let ptr = self.memory.allocate(layout.size, layout.align.abi, kind);
let ptr = M::tag_new_allocation(self, ptr, kind);
MPlaceTy::from_aligned_ptr(ptr, layout)
}
pub fn write_discriminant_index(

View file

@ -114,10 +114,11 @@ macro_rules! impl_snapshot_for {
fn snapshot(&self, __ctx: &'a Ctx) -> Self::Item {
match *self {
$(
$enum_name::$variant $( ( $(ref $field),* ) )? =>
$enum_name::$variant $( ( $(ref $field),* ) )? => {
$enum_name::$variant $(
( $( __impl_snapshot_field!($field, __ctx $(, $delegate)?) ),* ),
( $( __impl_snapshot_field!($field, __ctx $(, $delegate)?) ),* )
)?
}
)*
}
}
@ -250,11 +251,13 @@ impl_snapshot_for!(enum Operand {
impl_stable_hash_for!(enum crate::interpret::LocalValue {
Dead,
Uninitialized,
Live(x),
});
impl_snapshot_for!(enum LocalValue {
Live(v),
Dead,
Uninitialized,
Live(v),
});
impl<'a, Ctx> Snapshot<'a, Ctx> for Relocations
@ -360,13 +363,13 @@ impl<'a, 'tcx, Ctx> Snapshot<'a, Ctx> for &'a LocalState<'tcx>
type Item = LocalValue<(), AllocIdSnapshot<'a>>;
fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {
let LocalState { state, layout: _ } = self;
state.snapshot(ctx)
let LocalState { value, layout: _ } = self;
value.snapshot(ctx)
}
}
impl_stable_hash_for!(struct LocalState<'tcx> {
state,
value,
layout -> _,
});

View file

@ -315,12 +315,13 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M>
);
// Figure out how to pass which arguments.
// We have two iterators: Where the arguments come from,
// and where they go to.
// The Rust ABI is special: ZST get skipped.
let rust_abi = match caller_abi {
Abi::Rust | Abi::RustCall => true,
_ => false
};
// We have two iterators: Where the arguments come from,
// and where they go to.
// For where they come from: If the ABI is RustCall, we untuple the
// last incoming argument. These two iterators do not have the same type,
@ -368,7 +369,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M>
}
// Now we should have no more caller args
if caller_iter.next().is_some() {
trace!("Caller has too many args over");
trace!("Caller has passed too many args");
return err!(FunctionArgCountMismatch);
}
// Don't forget to check the return type!
@ -406,9 +407,24 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M>
}
// cannot use the shim here, because that will only result in infinite recursion
ty::InstanceDef::Virtual(_, idx) => {
let mut args = args.to_vec();
let ptr_size = self.pointer_size();
let ptr = self.deref_operand(args[0])?;
let vtable = ptr.vtable()?;
// We have to implement all "object safe receivers". Currently we
// support built-in pointers (&, &mut, Box) as well as unsized-self. We do
// not yet support custom self types.
// Also see librustc_codegen_llvm/abi.rs and librustc_codegen_llvm/mir/block.rs.
let receiver_place = match args[0].layout.ty.builtin_deref(true) {
Some(_) => {
// Built-in pointer.
self.deref_operand(args[0])?
}
None => {
// Unsized self.
args[0].to_mem_place()
}
};
// Find and consult vtable
let vtable = receiver_place.vtable()?;
self.memory.check_align(vtable.into(), self.tcx.data_layout.pointer_align.abi)?;
let fn_ptr = self.memory.get(vtable.alloc_id)?.read_ptr_sized(
self,
@ -416,15 +432,16 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M>
)?.to_ptr()?;
let instance = self.memory.get_fn(fn_ptr)?;
// We have to patch the self argument, in particular get the layout
// expected by the actual function. Cannot just use "field 0" due to
// Box<self>.
let mut args = args.to_vec();
let pointee = args[0].layout.ty.builtin_deref(true).unwrap().ty;
let fake_fat_ptr_ty = self.tcx.mk_mut_ptr(pointee);
args[0] = OpTy::from(ImmTy { // strip vtable
layout: self.layout_of(fake_fat_ptr_ty)?.field(self, 0)?,
imm: Immediate::Scalar(ptr.ptr.into())
// `*mut receiver_place.layout.ty` is almost the layout that we
// want for args[0]: We have to project to field 0 because we want
// a thin pointer.
assert!(receiver_place.layout.is_unsized());
let receiver_ptr_ty = self.tcx.mk_mut_ptr(receiver_place.layout.ty);
let this_receiver_ptr = self.layout_of(receiver_ptr_ty)?.field(self, 0)?;
// Adjust receiver argument.
args[0] = OpTy::from(ImmTy {
layout: this_receiver_ptr,
imm: Immediate::Scalar(receiver_place.ptr.into())
});
trace!("Patched self operand to {:#?}", args[0]);
// recurse with concrete function

View file

@ -3,7 +3,7 @@ use rustc::ty::{self, Ty};
use rustc::ty::layout::{Size, Align, LayoutOf};
use rustc::mir::interpret::{Scalar, Pointer, EvalResult, PointerArithmetic};
use super::{InterpretCx, Machine, MemoryKind};
use super::{InterpretCx, InterpError, Machine, MemoryKind};
impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M> {
/// Creates a dynamic vtable for the given type and vtable origin. This is used only for
@ -76,7 +76,14 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M>
for (i, method) in methods.iter().enumerate() {
if let Some((def_id, substs)) = *method {
let instance = self.resolve(def_id, substs)?;
// resolve for vtable: insert shims where needed
let substs = self.subst_and_normalize_erasing_regions(substs)?;
let instance = ty::Instance::resolve_for_vtable(
*self.tcx,
self.param_env,
def_id,
substs,
).ok_or_else(|| InterpError::TooGeneric)?;
let fn_ptr = self.memory.create_fn_alloc(instance).with_default_tag();
let method_ptr = vtable.offset(ptr_size * (3 + i as u64), self)?;
self.memory