Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

104 lines
3.6 KiB
Rust
Raw Normal View History

//! Codegen vtables and vtable accesses.
//!
//! See `rustc_codegen_ssa/src/meth.rs` for reference.
2018-09-08 18:00:06 +02:00
use crate::constant::data_id_for_alloc_id;
2018-09-08 18:00:06 +02:00
use crate::prelude::*;
2021-07-31 22:46:23 +08:00
pub(crate) fn vtable_memflags() -> MemFlags {
2019-12-20 16:16:28 +01:00
let mut flags = MemFlags::trusted(); // A vtable access is always aligned and will never trap.
flags.set_readonly(); // A vtable is always read-only.
flags
}
pub(crate) fn drop_fn_of_obj(fx: &mut FunctionCx<'_, '_, '_>, vtable: Value) -> Value {
let usize_size = fx.layout_of(fx.tcx.types.usize).size.bytes() as usize;
2019-02-07 20:45:15 +01:00
fx.bcx.ins().load(
fx.pointer_type,
2019-12-20 16:16:28 +01:00
vtable_memflags(),
2019-02-07 20:45:15 +01:00
vtable,
(ty::COMMON_VTABLE_ENTRIES_DROPINPLACE * usize_size) as i32,
2019-02-07 20:45:15 +01:00
)
}
pub(crate) fn size_of_obj(fx: &mut FunctionCx<'_, '_, '_>, vtable: Value) -> Value {
let usize_size = fx.layout_of(fx.tcx.types.usize).size.bytes() as usize;
2018-09-08 18:00:06 +02:00
fx.bcx.ins().load(
fx.pointer_type,
2019-12-20 16:16:28 +01:00
vtable_memflags(),
2018-09-08 18:00:06 +02:00
vtable,
(ty::COMMON_VTABLE_ENTRIES_SIZE * usize_size) as i32,
2018-09-08 18:00:06 +02:00
)
}
pub(crate) fn min_align_of_obj(fx: &mut FunctionCx<'_, '_, '_>, vtable: Value) -> Value {
let usize_size = fx.layout_of(fx.tcx.types.usize).size.bytes() as usize;
2018-09-15 11:14:27 +02:00
fx.bcx.ins().load(
fx.pointer_type,
2019-12-20 16:16:28 +01:00
vtable_memflags(),
2018-09-15 11:14:27 +02:00
vtable,
(ty::COMMON_VTABLE_ENTRIES_ALIGN * usize_size) as i32,
2018-09-15 11:14:27 +02:00
)
}
pub(crate) fn get_ptr_and_method_ref<'tcx>(
fx: &mut FunctionCx<'_, '_, 'tcx>,
mut arg: CValue<'tcx>,
2018-09-08 18:00:06 +02:00
idx: usize,
2022-09-15 14:08:30 +00:00
) -> (Pointer, Value) {
let (ptr, vtable) = 'block: {
if let Abi::Scalar(_) = arg.layout().abi {
while !arg.layout().ty.is_unsafe_ptr() && !arg.layout().ty.is_ref() {
let (idx, _) = arg
.layout()
.non_1zst_field(fx)
.expect("not exactly one non-1-ZST field in a `DispatchFromDyn` type");
arg = arg.value_field(fx, FieldIdx::new(idx));
}
}
2022-09-15 14:08:30 +00:00
if let ty::Ref(_, ty, _) = arg.layout().ty.kind() {
if ty.is_dyn_star() {
let inner_layout = fx.layout_of(arg.layout().ty.builtin_deref(true).unwrap().ty);
let dyn_star = CPlace::for_ptr(Pointer::new(arg.load_scalar(fx)), inner_layout);
let ptr = dyn_star.place_field(fx, FieldIdx::new(0)).to_ptr();
2022-09-15 14:08:30 +00:00
let vtable =
dyn_star.place_field(fx, FieldIdx::new(1)).to_cvalue(fx).load_scalar(fx);
2022-09-15 14:08:30 +00:00
break 'block (ptr, vtable);
}
}
if let Abi::ScalarPair(_, _) = arg.layout().abi {
let (ptr, vtable) = arg.load_scalar_pair(fx);
(Pointer::new(ptr), vtable)
} else {
let (ptr, vtable) = arg.try_to_ptr().unwrap();
(ptr, vtable.unwrap())
}
};
let usize_size = fx.layout_of(fx.tcx.types.usize).size.bytes();
2018-09-08 18:00:06 +02:00
let func_ref = fx.bcx.ins().load(
fx.pointer_type,
2019-12-20 16:16:28 +01:00
vtable_memflags(),
2018-09-08 18:00:06 +02:00
vtable,
(idx * usize_size as usize) as i32,
2018-09-08 18:00:06 +02:00
);
(ptr, func_ref)
}
pub(crate) fn get_vtable<'tcx>(
fx: &mut FunctionCx<'_, '_, 'tcx>,
ty: Ty<'tcx>,
trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>,
2018-09-08 18:00:06 +02:00
) -> Value {
let alloc_id = fx.tcx.vtable_allocation((ty, trait_ref));
let data_id =
data_id_for_alloc_id(&mut fx.constants_cx, &mut *fx.module, alloc_id, Mutability::Not);
let local_data_id = fx.module.declare_data_in_func(data_id, &mut fx.bcx.func);
if fx.clif_comments.enabled() {
fx.add_comment(local_data_id, format!("vtable: {:?}", alloc_id));
}
fx.bcx.ins().global_value(fx.pointer_type, local_data_id)
2018-09-08 18:00:06 +02:00
}