Auto merge of #86291 - crlf0710:trait_vtbl_refactor, r=bjorn3
Refactor vtable codegen This refactor the codegen of vtables of miri interpreter, llvm, cranelift codegen backends. This is preparation for the implementation of trait upcasting feature. cc #65991 Note that aside from code reorganization, there's an internal behavior change here that now InstanceDef::Virtual's index now include the three metadata slots, and now the first method is with index 3. cc `@RalfJung` `@bjorn3`
This commit is contained in:
commit
2336406b38
|
@ -4,10 +4,7 @@
|
|||
// FIXME dedup this logic between miri, cg_llvm and cg_clif
|
||||
|
||||
use crate::prelude::*;
|
||||
|
||||
const DROP_FN_INDEX: usize = 0;
|
||||
const SIZE_INDEX: usize = 1;
|
||||
const ALIGN_INDEX: usize = 2;
|
||||
use ty::VtblEntry;
|
||||
|
||||
fn vtable_memflags() -> MemFlags {
|
||||
let mut flags = MemFlags::trusted(); // A vtable access is always aligned and will never trap.
|
||||
|
@ -21,7 +18,7 @@ pub(crate) fn drop_fn_of_obj(fx: &mut FunctionCx<'_, '_, '_>, vtable: Value) ->
|
|||
pointer_ty(fx.tcx),
|
||||
vtable_memflags(),
|
||||
vtable,
|
||||
(DROP_FN_INDEX * usize_size) as i32,
|
||||
(ty::COMMON_VTABLE_ENTRIES_DROPINPLACE * usize_size) as i32,
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -31,7 +28,7 @@ pub(crate) fn size_of_obj(fx: &mut FunctionCx<'_, '_, '_>, vtable: Value) -> Val
|
|||
pointer_ty(fx.tcx),
|
||||
vtable_memflags(),
|
||||
vtable,
|
||||
(SIZE_INDEX * usize_size) as i32,
|
||||
(ty::COMMON_VTABLE_ENTRIES_SIZE * usize_size) as i32,
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -41,7 +38,7 @@ pub(crate) fn min_align_of_obj(fx: &mut FunctionCx<'_, '_, '_>, vtable: Value) -
|
|||
pointer_ty(fx.tcx),
|
||||
vtable_memflags(),
|
||||
vtable,
|
||||
(ALIGN_INDEX * usize_size) as i32,
|
||||
(ty::COMMON_VTABLE_ENTRIES_SIZE * usize_size) as i32,
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -62,7 +59,7 @@ pub(crate) fn get_ptr_and_method_ref<'tcx>(
|
|||
pointer_ty(fx.tcx),
|
||||
vtable_memflags(),
|
||||
vtable,
|
||||
((idx + 3) * usize_size as usize) as i32,
|
||||
(idx * usize_size as usize) as i32,
|
||||
);
|
||||
(ptr, func_ref)
|
||||
}
|
||||
|
@ -98,42 +95,49 @@ fn build_vtable<'tcx>(
|
|||
Instance::resolve_drop_in_place(tcx, layout.ty).polymorphize(fx.tcx),
|
||||
);
|
||||
|
||||
let mut components: Vec<_> = vec![Some(drop_in_place_fn), None, None];
|
||||
|
||||
let methods_root;
|
||||
let methods = if let Some(trait_ref) = trait_ref {
|
||||
methods_root = tcx.vtable_methods(trait_ref.with_self_ty(tcx, layout.ty));
|
||||
methods_root.iter()
|
||||
let vtable_entries = if let Some(trait_ref) = trait_ref {
|
||||
tcx.vtable_entries(trait_ref.with_self_ty(tcx, layout.ty))
|
||||
} else {
|
||||
(&[]).iter()
|
||||
ty::COMMON_VTABLE_ENTRIES
|
||||
};
|
||||
let methods = methods.cloned().map(|opt_mth| {
|
||||
opt_mth.map(|(def_id, substs)| {
|
||||
import_function(
|
||||
tcx,
|
||||
fx.module,
|
||||
Instance::resolve_for_vtable(tcx, ParamEnv::reveal_all(), def_id, substs)
|
||||
.unwrap()
|
||||
.polymorphize(fx.tcx),
|
||||
)
|
||||
})
|
||||
});
|
||||
components.extend(methods);
|
||||
|
||||
let mut data_ctx = DataContext::new();
|
||||
let mut data = ::std::iter::repeat(0u8)
|
||||
.take(components.len() * usize_size)
|
||||
.take(vtable_entries.len() * usize_size)
|
||||
.collect::<Vec<u8>>()
|
||||
.into_boxed_slice();
|
||||
|
||||
write_usize(fx.tcx, &mut data, SIZE_INDEX, layout.size.bytes());
|
||||
write_usize(fx.tcx, &mut data, ALIGN_INDEX, layout.align.abi.bytes());
|
||||
for (idx, entry) in vtable_entries.iter().enumerate() {
|
||||
match entry {
|
||||
VtblEntry::MetadataSize => {
|
||||
write_usize(fx.tcx, &mut data, idx, layout.size.bytes());
|
||||
}
|
||||
VtblEntry::MetadataAlign => {
|
||||
write_usize(fx.tcx, &mut data, idx, layout.align.abi.bytes());
|
||||
}
|
||||
VtblEntry::MetadataDropInPlace | VtblEntry::Vacant | VtblEntry::Method(_, _) => {}
|
||||
}
|
||||
}
|
||||
data_ctx.define(data);
|
||||
|
||||
for (i, component) in components.into_iter().enumerate() {
|
||||
if let Some(func_id) = component {
|
||||
let func_ref = fx.module.declare_func_in_data(func_id, &mut data_ctx);
|
||||
data_ctx.write_function_addr((i * usize_size) as u32, func_ref);
|
||||
for (idx, entry) in vtable_entries.iter().enumerate() {
|
||||
match entry {
|
||||
VtblEntry::MetadataDropInPlace => {
|
||||
let func_ref = fx.module.declare_func_in_data(drop_in_place_fn, &mut data_ctx);
|
||||
data_ctx.write_function_addr((idx * usize_size) as u32, func_ref);
|
||||
}
|
||||
VtblEntry::Method(def_id, substs) => {
|
||||
let func_id = import_function(
|
||||
tcx,
|
||||
fx.module,
|
||||
Instance::resolve_for_vtable(tcx, ParamEnv::reveal_all(), *def_id, substs)
|
||||
.unwrap()
|
||||
.polymorphize(fx.tcx),
|
||||
);
|
||||
let func_ref = fx.module.declare_func_in_data(func_id, &mut data_ctx);
|
||||
data_ctx.write_function_addr((idx * usize_size) as u32, func_ref);
|
||||
}
|
||||
VtblEntry::MetadataSize | VtblEntry::MetadataAlign | VtblEntry::Vacant => {}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -23,7 +23,12 @@ pub fn size_and_align_of_dst<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
|
|||
ty::Dynamic(..) => {
|
||||
// load size/align from vtable
|
||||
let vtable = info.unwrap();
|
||||
(meth::SIZE.get_usize(bx, vtable), meth::ALIGN.get_usize(bx, vtable))
|
||||
(
|
||||
meth::VirtualIndex::from_index(ty::COMMON_VTABLE_ENTRIES_SIZE)
|
||||
.get_usize(bx, vtable),
|
||||
meth::VirtualIndex::from_index(ty::COMMON_VTABLE_ENTRIES_ALIGN)
|
||||
.get_usize(bx, vtable),
|
||||
)
|
||||
}
|
||||
ty::Slice(_) | ty::Str => {
|
||||
let unit = layout.field(bx, 0);
|
||||
|
|
|
@ -1,18 +1,14 @@
|
|||
use crate::traits::*;
|
||||
|
||||
use rustc_middle::ty::{self, Instance, Ty};
|
||||
use rustc_middle::ty::{self, Instance, Ty, VtblEntry, COMMON_VTABLE_ENTRIES};
|
||||
use rustc_target::abi::call::FnAbi;
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct VirtualIndex(u64);
|
||||
|
||||
pub const DESTRUCTOR: VirtualIndex = VirtualIndex(0);
|
||||
pub const SIZE: VirtualIndex = VirtualIndex(1);
|
||||
pub const ALIGN: VirtualIndex = VirtualIndex(2);
|
||||
|
||||
impl<'a, 'tcx> VirtualIndex {
|
||||
pub fn from_index(index: usize) -> Self {
|
||||
VirtualIndex(index as u64 + 3)
|
||||
VirtualIndex(index as u64)
|
||||
}
|
||||
|
||||
pub fn get_fn<Bx: BuilderMethods<'a, 'tcx>>(
|
||||
|
@ -77,43 +73,38 @@ pub fn get_vtable<'tcx, Cx: CodegenMethods<'tcx>>(
|
|||
// Not in the cache; build it.
|
||||
let nullptr = cx.const_null(cx.type_i8p_ext(cx.data_layout().instruction_address_space));
|
||||
|
||||
let methods_root;
|
||||
let methods = if let Some(trait_ref) = trait_ref {
|
||||
methods_root = tcx.vtable_methods(trait_ref.with_self_ty(tcx, ty));
|
||||
methods_root.iter()
|
||||
let vtable_entries = if let Some(trait_ref) = trait_ref {
|
||||
tcx.vtable_entries(trait_ref.with_self_ty(tcx, ty))
|
||||
} else {
|
||||
(&[]).iter()
|
||||
COMMON_VTABLE_ENTRIES
|
||||
};
|
||||
|
||||
let methods = methods.cloned().map(|opt_mth| {
|
||||
opt_mth.map_or(nullptr, |(def_id, substs)| {
|
||||
cx.get_fn_addr(
|
||||
ty::Instance::resolve_for_vtable(
|
||||
cx.tcx(),
|
||||
ty::ParamEnv::reveal_all(),
|
||||
def_id,
|
||||
substs,
|
||||
)
|
||||
.unwrap()
|
||||
.polymorphize(cx.tcx()),
|
||||
)
|
||||
})
|
||||
});
|
||||
|
||||
let layout = cx.layout_of(ty);
|
||||
// /////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// If you touch this code, be sure to also make the corresponding changes to
|
||||
// `get_vtable` in `rust_mir/interpret/traits.rs`.
|
||||
// /////////////////////////////////////////////////////////////////////////////////////////////
|
||||
let components: Vec<_> = [
|
||||
cx.get_fn_addr(Instance::resolve_drop_in_place(cx.tcx(), ty)),
|
||||
cx.const_usize(layout.size.bytes()),
|
||||
cx.const_usize(layout.align.abi.bytes()),
|
||||
]
|
||||
.iter()
|
||||
.cloned()
|
||||
.chain(methods)
|
||||
.collect();
|
||||
let components: Vec<_> = vtable_entries
|
||||
.iter()
|
||||
.map(|entry| match entry {
|
||||
VtblEntry::MetadataDropInPlace => {
|
||||
cx.get_fn_addr(Instance::resolve_drop_in_place(cx.tcx(), ty))
|
||||
}
|
||||
VtblEntry::MetadataSize => cx.const_usize(layout.size.bytes()),
|
||||
VtblEntry::MetadataAlign => cx.const_usize(layout.align.abi.bytes()),
|
||||
VtblEntry::Vacant => nullptr,
|
||||
VtblEntry::Method(def_id, substs) => cx.get_fn_addr(
|
||||
ty::Instance::resolve_for_vtable(
|
||||
cx.tcx(),
|
||||
ty::ParamEnv::reveal_all(),
|
||||
*def_id,
|
||||
substs,
|
||||
)
|
||||
.unwrap()
|
||||
.polymorphize(cx.tcx()),
|
||||
),
|
||||
})
|
||||
.collect();
|
||||
|
||||
let vtable_const = cx.const_struct(&components, false);
|
||||
let align = cx.data_layout().pointer_align.abi;
|
||||
|
|
|
@ -332,7 +332,11 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
let fn_abi = FnAbi::of_instance(&bx, virtual_drop, &[]);
|
||||
let vtable = args[1];
|
||||
args = &args[..1];
|
||||
(meth::DESTRUCTOR.get_fn(&mut bx, vtable, &fn_abi), fn_abi)
|
||||
(
|
||||
meth::VirtualIndex::from_index(ty::COMMON_VTABLE_ENTRIES_DROPINPLACE)
|
||||
.get_fn(&mut bx, vtable, &fn_abi),
|
||||
fn_abi,
|
||||
)
|
||||
}
|
||||
_ => (bx.get_fn_addr(drop_fn), FnAbi::of_instance(&bx, drop_fn, &[])),
|
||||
};
|
||||
|
|
|
@ -959,9 +959,9 @@ rustc_queries! {
|
|||
desc { |tcx| "checking if item has mir available: `{}`", tcx.def_path_str(key) }
|
||||
}
|
||||
|
||||
query vtable_methods(key: ty::PolyTraitRef<'tcx>)
|
||||
-> &'tcx [Option<(DefId, SubstsRef<'tcx>)>] {
|
||||
desc { |tcx| "finding all methods for trait {}", tcx.def_path_str(key.def_id()) }
|
||||
query vtable_entries(key: ty::PolyTraitRef<'tcx>)
|
||||
-> &'tcx [ty::VtblEntry<'tcx>] {
|
||||
desc { |tcx| "finding all vtable entries for trait {}", tcx.def_path_str(key.def_id()) }
|
||||
}
|
||||
|
||||
query codegen_fulfill_obligation(
|
||||
|
|
|
@ -2009,3 +2009,19 @@ impl<'tcx> fmt::Debug for SymbolName<'tcx> {
|
|||
fmt::Display::fmt(&self.name, fmt)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, HashStable)]
|
||||
pub enum VtblEntry<'tcx> {
|
||||
MetadataDropInPlace,
|
||||
MetadataSize,
|
||||
MetadataAlign,
|
||||
Vacant,
|
||||
Method(DefId, SubstsRef<'tcx>),
|
||||
}
|
||||
|
||||
pub const COMMON_VTABLE_ENTRIES: &[VtblEntry<'_>] =
|
||||
&[VtblEntry::MetadataDropInPlace, VtblEntry::MetadataSize, VtblEntry::MetadataAlign];
|
||||
|
||||
pub const COMMON_VTABLE_ENTRIES_DROPINPLACE: usize = 0;
|
||||
pub const COMMON_VTABLE_ENTRIES_SIZE: usize = 1;
|
||||
pub const COMMON_VTABLE_ENTRIES_ALIGN: usize = 2;
|
||||
|
|
|
@ -1,7 +1,10 @@
|
|||
use std::convert::TryFrom;
|
||||
|
||||
use rustc_middle::mir::interpret::{InterpResult, Pointer, PointerArithmetic, Scalar};
|
||||
use rustc_middle::ty::{self, Instance, Ty};
|
||||
use rustc_middle::ty::{
|
||||
self, Instance, Ty, VtblEntry, COMMON_VTABLE_ENTRIES, COMMON_VTABLE_ENTRIES_ALIGN,
|
||||
COMMON_VTABLE_ENTRIES_DROPINPLACE, COMMON_VTABLE_ENTRIES_SIZE,
|
||||
};
|
||||
use rustc_target::abi::{Align, LayoutOf, Size};
|
||||
|
||||
use super::util::ensure_monomorphic_enough;
|
||||
|
@ -35,13 +38,13 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
|||
return Ok(vtable);
|
||||
}
|
||||
|
||||
let methods = if let Some(poly_trait_ref) = poly_trait_ref {
|
||||
let vtable_entries = if let Some(poly_trait_ref) = poly_trait_ref {
|
||||
let trait_ref = poly_trait_ref.with_self_ty(*self.tcx, ty);
|
||||
let trait_ref = self.tcx.erase_regions(trait_ref);
|
||||
|
||||
self.tcx.vtable_methods(trait_ref)
|
||||
self.tcx.vtable_entries(trait_ref)
|
||||
} else {
|
||||
&[]
|
||||
COMMON_VTABLE_ENTRIES
|
||||
};
|
||||
|
||||
let layout = self.layout_of(ty)?;
|
||||
|
@ -56,38 +59,41 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
|||
// If you touch this code, be sure to also make the corresponding changes to
|
||||
// `get_vtable` in `rust_codegen_llvm/meth.rs`.
|
||||
// /////////////////////////////////////////////////////////////////////////////////////////
|
||||
let vtable_size = ptr_size * u64::try_from(methods.len()).unwrap().checked_add(3).unwrap();
|
||||
let vtable_size = ptr_size * u64::try_from(vtable_entries.len()).unwrap();
|
||||
let vtable = self.memory.allocate(vtable_size, ptr_align, MemoryKind::Vtable);
|
||||
|
||||
let drop = Instance::resolve_drop_in_place(tcx, ty);
|
||||
let drop = self.memory.create_fn_alloc(FnVal::Instance(drop));
|
||||
|
||||
// Prepare the fn ptrs we will write into the vtable later.
|
||||
let fn_ptrs = methods
|
||||
.iter()
|
||||
.enumerate() // remember the original position
|
||||
.filter_map(|(i, method)| {
|
||||
if let Some((def_id, substs)) = method { Some((i, def_id, substs)) } else { None }
|
||||
})
|
||||
.map(|(i, def_id, substs)| {
|
||||
let instance =
|
||||
ty::Instance::resolve_for_vtable(tcx, self.param_env, *def_id, substs)
|
||||
.ok_or_else(|| err_inval!(TooGeneric))?;
|
||||
Ok((i, self.memory.create_fn_alloc(FnVal::Instance(instance))))
|
||||
})
|
||||
.collect::<InterpResult<'tcx, Vec<(usize, Pointer<M::PointerTag>)>>>()?;
|
||||
|
||||
// No need to do any alignment checks on the memory accesses below, because we know the
|
||||
// allocation is correctly aligned as we created it above. Also we're only offsetting by
|
||||
// multiples of `ptr_align`, which means that it will stay aligned to `ptr_align`.
|
||||
let scalars = vtable_entries
|
||||
.iter()
|
||||
.map(|entry| -> InterpResult<'tcx, _> {
|
||||
match entry {
|
||||
VtblEntry::MetadataDropInPlace => Ok(Some(drop.into())),
|
||||
VtblEntry::MetadataSize => Ok(Some(Scalar::from_uint(size, ptr_size).into())),
|
||||
VtblEntry::MetadataAlign => Ok(Some(Scalar::from_uint(align, ptr_size).into())),
|
||||
VtblEntry::Vacant => Ok(None),
|
||||
VtblEntry::Method(def_id, substs) => {
|
||||
// Prepare the fn ptr we write into the vtable.
|
||||
let instance =
|
||||
ty::Instance::resolve_for_vtable(tcx, self.param_env, *def_id, substs)
|
||||
.ok_or_else(|| err_inval!(TooGeneric))?;
|
||||
let fn_ptr = self.memory.create_fn_alloc(FnVal::Instance(instance));
|
||||
Ok(Some(fn_ptr.into()))
|
||||
}
|
||||
}
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
let mut vtable_alloc =
|
||||
self.memory.get_mut(vtable.into(), vtable_size, ptr_align)?.expect("not a ZST");
|
||||
vtable_alloc.write_ptr_sized(ptr_size * 0, drop.into())?;
|
||||
vtable_alloc.write_ptr_sized(ptr_size * 1, Scalar::from_uint(size, ptr_size).into())?;
|
||||
vtable_alloc.write_ptr_sized(ptr_size * 2, Scalar::from_uint(align, ptr_size).into())?;
|
||||
|
||||
for (i, fn_ptr) in fn_ptrs.into_iter() {
|
||||
vtable_alloc.write_ptr_sized(ptr_size * (3 + i as u64), fn_ptr.into())?;
|
||||
for (idx, scalar) in scalars.into_iter().enumerate() {
|
||||
if let Some(scalar) = scalar {
|
||||
let idx: u64 = u64::try_from(idx).unwrap();
|
||||
vtable_alloc.write_ptr_sized(ptr_size * idx, scalar)?;
|
||||
}
|
||||
}
|
||||
|
||||
M::after_static_mem_initialized(self, vtable, vtable_size)?;
|
||||
|
@ -99,16 +105,15 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
|||
}
|
||||
|
||||
/// Resolves the function at the specified slot in the provided
|
||||
/// vtable. An index of '0' corresponds to the first method
|
||||
/// declared in the trait of the provided vtable.
|
||||
/// vtable. Currently an index of '3' (`COMMON_VTABLE_ENTRIES.len()`)
|
||||
/// corresponds to the first method declared in the trait of the provided vtable.
|
||||
pub fn get_vtable_slot(
|
||||
&self,
|
||||
vtable: Scalar<M::PointerTag>,
|
||||
idx: u64,
|
||||
) -> InterpResult<'tcx, FnVal<'tcx, M::ExtraFnVal>> {
|
||||
let ptr_size = self.pointer_size();
|
||||
// Skip over the 'drop_ptr', 'size', and 'align' fields.
|
||||
let vtable_slot = vtable.ptr_offset(ptr_size * idx.checked_add(3).unwrap(), self)?;
|
||||
let vtable_slot = vtable.ptr_offset(ptr_size * idx, self)?;
|
||||
let vtable_slot = self
|
||||
.memory
|
||||
.get(vtable_slot, ptr_size, self.tcx.data_layout.pointer_align.abi)?
|
||||
|
@ -122,12 +127,21 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
|||
&self,
|
||||
vtable: Scalar<M::PointerTag>,
|
||||
) -> InterpResult<'tcx, (ty::Instance<'tcx>, Ty<'tcx>)> {
|
||||
let pointer_size = self.pointer_size();
|
||||
// We don't care about the pointee type; we just want a pointer.
|
||||
let vtable = self
|
||||
.memory
|
||||
.get(vtable, self.tcx.data_layout.pointer_size, self.tcx.data_layout.pointer_align.abi)?
|
||||
.get(
|
||||
vtable,
|
||||
pointer_size * u64::try_from(COMMON_VTABLE_ENTRIES.len()).unwrap(),
|
||||
self.tcx.data_layout.pointer_align.abi,
|
||||
)?
|
||||
.expect("cannot be a ZST");
|
||||
let drop_fn = vtable.read_ptr_sized(Size::ZERO)?.check_init()?;
|
||||
let drop_fn = vtable
|
||||
.read_ptr_sized(
|
||||
pointer_size * u64::try_from(COMMON_VTABLE_ENTRIES_DROPINPLACE).unwrap(),
|
||||
)?
|
||||
.check_init()?;
|
||||
// We *need* an instance here, no other kind of function value, to be able
|
||||
// to determine the type.
|
||||
let drop_instance = self.memory.get_fn(drop_fn)?.as_instance()?;
|
||||
|
@ -153,11 +167,19 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
|||
// the size, and the align (which we read below).
|
||||
let vtable = self
|
||||
.memory
|
||||
.get(vtable, 3 * pointer_size, self.tcx.data_layout.pointer_align.abi)?
|
||||
.get(
|
||||
vtable,
|
||||
pointer_size * u64::try_from(COMMON_VTABLE_ENTRIES.len()).unwrap(),
|
||||
self.tcx.data_layout.pointer_align.abi,
|
||||
)?
|
||||
.expect("cannot be a ZST");
|
||||
let size = vtable.read_ptr_sized(pointer_size)?.check_init()?;
|
||||
let size = vtable
|
||||
.read_ptr_sized(pointer_size * u64::try_from(COMMON_VTABLE_ENTRIES_SIZE).unwrap())?
|
||||
.check_init()?;
|
||||
let size = u64::try_from(self.force_bits(size, pointer_size)?).unwrap();
|
||||
let align = vtable.read_ptr_sized(pointer_size * 2)?.check_init()?;
|
||||
let align = vtable
|
||||
.read_ptr_sized(pointer_size * u64::try_from(COMMON_VTABLE_ENTRIES_ALIGN).unwrap())?
|
||||
.check_init()?;
|
||||
let align = u64::try_from(self.force_bits(align, pointer_size)?).unwrap();
|
||||
let align = Align::from_bytes(align).map_err(|e| err_ub!(InvalidVtableAlignment(e)))?;
|
||||
|
||||
|
|
|
@ -196,7 +196,7 @@ use rustc_middle::mir::{self, Local, Location};
|
|||
use rustc_middle::ty::adjustment::{CustomCoerceUnsized, PointerCast};
|
||||
use rustc_middle::ty::print::with_no_trimmed_paths;
|
||||
use rustc_middle::ty::subst::{GenericArgKind, InternalSubsts};
|
||||
use rustc_middle::ty::{self, GenericParamDefKind, Instance, Ty, TyCtxt, TypeFoldable};
|
||||
use rustc_middle::ty::{self, GenericParamDefKind, Instance, Ty, TyCtxt, TypeFoldable, VtblEntry};
|
||||
use rustc_middle::{middle::codegen_fn_attrs::CodegenFnAttrFlags, mir::visit::TyContext};
|
||||
use rustc_session::config::EntryFnType;
|
||||
use rustc_session::lint::builtin::LARGE_ASSIGNMENTS;
|
||||
|
@ -1091,21 +1091,22 @@ fn create_mono_items_for_vtable_methods<'tcx>(
|
|||
assert!(!poly_trait_ref.has_escaping_bound_vars());
|
||||
|
||||
// Walk all methods of the trait, including those of its supertraits
|
||||
let methods = tcx.vtable_methods(poly_trait_ref);
|
||||
let methods = methods
|
||||
let entries = tcx.vtable_entries(poly_trait_ref);
|
||||
let methods = entries
|
||||
.iter()
|
||||
.cloned()
|
||||
.filter_map(|method| method)
|
||||
.map(|(def_id, substs)| {
|
||||
ty::Instance::resolve_for_vtable(
|
||||
.filter_map(|entry| match entry {
|
||||
VtblEntry::MetadataDropInPlace
|
||||
| VtblEntry::MetadataSize
|
||||
| VtblEntry::MetadataAlign
|
||||
| VtblEntry::Vacant => None,
|
||||
VtblEntry::Method(def_id, substs) => ty::Instance::resolve_for_vtable(
|
||||
tcx,
|
||||
ty::ParamEnv::reveal_all(),
|
||||
def_id,
|
||||
*def_id,
|
||||
substs,
|
||||
)
|
||||
.unwrap()
|
||||
.filter(|instance| should_codegen_locally(tcx, instance)),
|
||||
})
|
||||
.filter(|&instance| should_codegen_locally(tcx, &instance))
|
||||
.map(|item| create_fn_mono_item(tcx, item, source));
|
||||
output.extend(methods);
|
||||
}
|
||||
|
|
|
@ -31,7 +31,8 @@ use rustc_hir::def_id::DefId;
|
|||
use rustc_middle::ty::fold::TypeFoldable;
|
||||
use rustc_middle::ty::subst::{InternalSubsts, SubstsRef};
|
||||
use rustc_middle::ty::{
|
||||
self, GenericParamDefKind, ParamEnv, ToPredicate, Ty, TyCtxt, WithConstness,
|
||||
self, GenericParamDefKind, ParamEnv, ToPredicate, Ty, TyCtxt, VtblEntry, WithConstness,
|
||||
COMMON_VTABLE_ENTRIES,
|
||||
};
|
||||
use rustc_span::Span;
|
||||
|
||||
|
@ -455,59 +456,89 @@ fn subst_and_check_impossible_predicates<'tcx>(
|
|||
|
||||
/// Given a trait `trait_ref`, iterates the vtable entries
|
||||
/// that come from `trait_ref`, including its supertraits.
|
||||
fn vtable_methods<'tcx>(
|
||||
fn vtable_entries<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
trait_ref: ty::PolyTraitRef<'tcx>,
|
||||
) -> &'tcx [Option<(DefId, SubstsRef<'tcx>)>] {
|
||||
debug!("vtable_methods({:?})", trait_ref);
|
||||
) -> &'tcx [VtblEntry<'tcx>] {
|
||||
debug!("vtable_entries({:?})", trait_ref);
|
||||
|
||||
tcx.arena.alloc_from_iter(supertraits(tcx, trait_ref).flat_map(move |trait_ref| {
|
||||
let trait_methods = tcx
|
||||
.associated_items(trait_ref.def_id())
|
||||
.in_definition_order()
|
||||
.filter(|item| item.kind == ty::AssocKind::Fn);
|
||||
let entries = COMMON_VTABLE_ENTRIES.iter().cloned().chain(
|
||||
supertraits(tcx, trait_ref).flat_map(move |trait_ref| {
|
||||
let trait_methods = tcx
|
||||
.associated_items(trait_ref.def_id())
|
||||
.in_definition_order()
|
||||
.filter(|item| item.kind == ty::AssocKind::Fn);
|
||||
|
||||
// Now list each method's DefId and InternalSubsts (for within its trait).
|
||||
// If the method can never be called from this object, produce None.
|
||||
trait_methods.map(move |trait_method| {
|
||||
debug!("vtable_methods: trait_method={:?}", trait_method);
|
||||
let def_id = trait_method.def_id;
|
||||
// Now list each method's DefId and InternalSubsts (for within its trait).
|
||||
// If the method can never be called from this object, produce `Vacant`.
|
||||
trait_methods.map(move |trait_method| {
|
||||
debug!("vtable_entries: trait_method={:?}", trait_method);
|
||||
let def_id = trait_method.def_id;
|
||||
|
||||
// Some methods cannot be called on an object; skip those.
|
||||
if !is_vtable_safe_method(tcx, trait_ref.def_id(), &trait_method) {
|
||||
debug!("vtable_methods: not vtable safe");
|
||||
return None;
|
||||
}
|
||||
// Some methods cannot be called on an object; skip those.
|
||||
if !is_vtable_safe_method(tcx, trait_ref.def_id(), &trait_method) {
|
||||
debug!("vtable_entries: not vtable safe");
|
||||
return VtblEntry::Vacant;
|
||||
}
|
||||
|
||||
// The method may have some early-bound lifetimes; add regions for those.
|
||||
let substs = trait_ref.map_bound(|trait_ref| {
|
||||
InternalSubsts::for_item(tcx, def_id, |param, _| match param.kind {
|
||||
GenericParamDefKind::Lifetime => tcx.lifetimes.re_erased.into(),
|
||||
GenericParamDefKind::Type { .. } | GenericParamDefKind::Const { .. } => {
|
||||
trait_ref.substs[param.index as usize]
|
||||
}
|
||||
})
|
||||
});
|
||||
// The method may have some early-bound lifetimes; add regions for those.
|
||||
let substs = trait_ref.map_bound(|trait_ref| {
|
||||
InternalSubsts::for_item(tcx, def_id, |param, _| match param.kind {
|
||||
GenericParamDefKind::Lifetime => tcx.lifetimes.re_erased.into(),
|
||||
GenericParamDefKind::Type { .. } | GenericParamDefKind::Const { .. } => {
|
||||
trait_ref.substs[param.index as usize]
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
// The trait type may have higher-ranked lifetimes in it;
|
||||
// erase them if they appear, so that we get the type
|
||||
// at some particular call site.
|
||||
let substs =
|
||||
tcx.normalize_erasing_late_bound_regions(ty::ParamEnv::reveal_all(), substs);
|
||||
// The trait type may have higher-ranked lifetimes in it;
|
||||
// erase them if they appear, so that we get the type
|
||||
// at some particular call site.
|
||||
let substs =
|
||||
tcx.normalize_erasing_late_bound_regions(ty::ParamEnv::reveal_all(), substs);
|
||||
|
||||
// It's possible that the method relies on where-clauses that
|
||||
// do not hold for this particular set of type parameters.
|
||||
// Note that this method could then never be called, so we
|
||||
// do not want to try and codegen it, in that case (see #23435).
|
||||
let predicates = tcx.predicates_of(def_id).instantiate_own(tcx, substs);
|
||||
if impossible_predicates(tcx, predicates.predicates) {
|
||||
debug!("vtable_methods: predicates do not hold");
|
||||
return None;
|
||||
}
|
||||
// It's possible that the method relies on where-clauses that
|
||||
// do not hold for this particular set of type parameters.
|
||||
// Note that this method could then never be called, so we
|
||||
// do not want to try and codegen it, in that case (see #23435).
|
||||
let predicates = tcx.predicates_of(def_id).instantiate_own(tcx, substs);
|
||||
if impossible_predicates(tcx, predicates.predicates) {
|
||||
debug!("vtable_entries: predicates do not hold");
|
||||
return VtblEntry::Vacant;
|
||||
}
|
||||
|
||||
Some((def_id, substs))
|
||||
})
|
||||
}))
|
||||
VtblEntry::Method(def_id, substs)
|
||||
})
|
||||
}),
|
||||
);
|
||||
|
||||
tcx.arena.alloc_from_iter(entries)
|
||||
}
|
||||
|
||||
/// Find slot base for trait methods within vtable entries of another trait
|
||||
fn vtable_trait_first_method_offset<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
key: (
|
||||
ty::PolyTraitRef<'tcx>, // trait_to_be_found
|
||||
ty::PolyTraitRef<'tcx>, // trait_owning_vtable
|
||||
),
|
||||
) -> usize {
|
||||
let (trait_to_be_found, trait_owning_vtable) = key;
|
||||
|
||||
let mut supertraits = util::supertraits(tcx, trait_owning_vtable);
|
||||
|
||||
// For each of the non-matching predicates that
|
||||
// we pass over, we sum up the set of number of vtable
|
||||
// entries, so that we can compute the offset for the selected
|
||||
// trait.
|
||||
let vtable_base = ty::COMMON_VTABLE_ENTRIES.len()
|
||||
+ supertraits
|
||||
.by_ref()
|
||||
.take_while(|t| *t != trait_to_be_found)
|
||||
.map(|t| util::count_own_vtable_entries(tcx, t))
|
||||
.sum::<usize>();
|
||||
|
||||
vtable_base
|
||||
}
|
||||
|
||||
/// Check whether a `ty` implements given trait(trait_def_id).
|
||||
|
@ -547,7 +578,7 @@ pub fn provide(providers: &mut ty::query::Providers) {
|
|||
specialization_graph_of: specialize::specialization_graph_provider,
|
||||
specializes: specialize::specializes,
|
||||
codegen_fulfill_obligation: codegen::codegen_fulfill_obligation,
|
||||
vtable_methods,
|
||||
vtable_entries,
|
||||
type_implements_trait,
|
||||
subst_and_check_impossible_predicates,
|
||||
mir_abstract_const: |tcx, def_id| {
|
||||
|
|
|
@ -396,19 +396,8 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
|||
let mut nested = vec![];
|
||||
|
||||
let mut supertraits = util::supertraits(tcx, ty::Binder::dummy(object_trait_ref));
|
||||
|
||||
// For each of the non-matching predicates that
|
||||
// we pass over, we sum up the set of number of vtable
|
||||
// entries, so that we can compute the offset for the selected
|
||||
// trait.
|
||||
let vtable_base = supertraits
|
||||
.by_ref()
|
||||
.take(index)
|
||||
.map(|t| super::util::count_own_vtable_entries(tcx, t))
|
||||
.sum();
|
||||
|
||||
let unnormalized_upcast_trait_ref =
|
||||
supertraits.next().expect("supertraits iterator no longer has as many elements");
|
||||
supertraits.nth(index).expect("supertraits iterator no longer has as many elements");
|
||||
|
||||
let upcast_trait_ref = normalize_with_depth_to(
|
||||
self,
|
||||
|
@ -490,6 +479,12 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
|||
}
|
||||
|
||||
debug!(?nested, "object nested obligations");
|
||||
|
||||
let vtable_base = super::super::vtable_trait_first_method_offset(
|
||||
tcx,
|
||||
(unnormalized_upcast_trait_ref, ty::Binder::dummy(object_trait_ref)),
|
||||
);
|
||||
|
||||
Ok(ImplSourceObjectData { upcast_trait_ref, vtable_base, nested })
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue