CFI: Use Instance at callsites

We already use `Instance` at declaration sites when available to glean
additional information about possible abstractions of the type in use.
This does the same when possible at callsites as well.

The primary purpose of this change is to allow CFI to alter how it
generates type information for indirect calls through `Virtual`
instances.
This commit is contained in:
Matthew Maurer 2024-03-15 19:45:46 +00:00
parent 020bbe46bd
commit 7967915c7b
11 changed files with 105 additions and 54 deletions

View File

@ -541,7 +541,7 @@ fn codegen_inline_asm(
let builtin_unreachable: RValue<'gcc> = unsafe {
std::mem::transmute(builtin_unreachable)
};
self.call(self.type_void(), None, None, builtin_unreachable, &[], None);
self.call(self.type_void(), None, None, builtin_unreachable, &[], None, None);
}
// Write results to outputs.

View File

@ -25,7 +25,7 @@
FnAbiError, FnAbiOfHelpers, FnAbiRequest, HasParamEnv, HasTyCtxt, LayoutError, LayoutOfHelpers,
TyAndLayout,
};
use rustc_middle::ty::{ParamEnv, Ty, TyCtxt};
use rustc_middle::ty::{ParamEnv, Ty, TyCtxt, Instance};
use rustc_span::def_id::DefId;
use rustc_span::Span;
use rustc_target::abi::{
@ -592,12 +592,13 @@ fn invoke(
then: Block<'gcc>,
catch: Block<'gcc>,
_funclet: Option<&Funclet>,
instance: Option<Instance<'tcx>>,
) -> RValue<'gcc> {
let try_block = self.current_func().new_block("try");
let current_block = self.block.clone();
self.block = try_block;
let call = self.call(typ, fn_attrs, None, func, args, None); // TODO(antoyo): use funclet here?
let call = self.call(typ, fn_attrs, None, func, args, None, instance); // TODO(antoyo): use funclet here?
self.block = current_block;
let return_value =
@ -1667,6 +1668,7 @@ fn call(
func: RValue<'gcc>,
args: &[RValue<'gcc>],
funclet: Option<&Funclet>,
_instance: Option<Instance<'tcx>>,
) -> RValue<'gcc> {
// FIXME(antoyo): remove when having a proper API.
let gcc_func = unsafe { std::mem::transmute(func) };

View File

@ -133,6 +133,7 @@ fn codegen_intrinsic_call(
func,
&args.iter().map(|arg| arg.immediate()).collect::<Vec<_>>(),
None,
None,
)
}
sym::likely => self.expect(args[0].immediate(), true),
@ -401,7 +402,7 @@ fn codegen_intrinsic_call(
fn abort(&mut self) {
let func = self.context.get_builtin_function("abort");
let func: RValue<'gcc> = unsafe { std::mem::transmute(func) };
self.call(self.type_void(), None, None, func, &[], None);
self.call(self.type_void(), None, None, func, &[], None, None);
}
fn assume(&mut self, value: Self::Value) {
@ -1103,7 +1104,7 @@ fn try_intrinsic<'a, 'b, 'gcc, 'tcx>(
dest: RValue<'gcc>,
) {
if bx.sess().panic_strategy() == PanicStrategy::Abort {
bx.call(bx.type_void(), None, None, try_func, &[data], None);
bx.call(bx.type_void(), None, None, try_func, &[data], None, None);
// Return 0 unconditionally from the intrinsic call;
// we can never unwind.
let ret_align = bx.tcx.data_layout.i32_align.abi;
@ -1177,21 +1178,21 @@ fn codegen_gnu_try<'gcc>(
let zero = bx.cx.context.new_rvalue_zero(bx.int_type);
let ptr = bx.cx.context.new_call(None, eh_pointer_builtin, &[zero]);
let catch_ty = bx.type_func(&[bx.type_i8p(), bx.type_i8p()], bx.type_void());
bx.call(catch_ty, None, None, catch_func, &[data, ptr], None);
bx.call(catch_ty, None, None, catch_func, &[data, ptr], None, None);
bx.ret(bx.const_i32(1));
// NOTE: the blocks must be filled before adding the try/catch, otherwise gcc will not
// generate a try/catch.
// FIXME(antoyo): add a check in the libgccjit API to prevent this.
bx.switch_to_block(current_block);
bx.invoke(try_func_ty, None, None, try_func, &[data], then, catch, None);
bx.invoke(try_func_ty, None, None, try_func, &[data], then, catch, None, None);
});
let func = unsafe { std::mem::transmute(func) };
// Note that no invoke is used here because by definition this function
// can't panic (that's what it's catching).
let ret = bx.call(llty, None, None, func, &[try_func, data, catch_func], None);
let ret = bx.call(llty, None, None, func, &[try_func, data, catch_func], None, None);
let i32_align = bx.tcx().data_layout.i32_align.abi;
bx.store(ret, dest, i32_align);
}

View File

@ -466,11 +466,11 @@ pub(crate) fn inline_asm_call<'ll>(
let call = if !labels.is_empty() {
assert!(catch_funclet.is_none());
bx.callbr(fty, None, None, v, inputs, dest.unwrap(), labels, None)
bx.callbr(fty, None, None, v, inputs, dest.unwrap(), labels, None, None)
} else if let Some((catch, funclet)) = catch_funclet {
bx.invoke(fty, None, None, v, inputs, dest.unwrap(), catch, funclet)
bx.invoke(fty, None, None, v, inputs, dest.unwrap(), catch, funclet, None)
} else {
bx.call(fty, None, None, v, inputs, None)
bx.call(fty, None, None, v, inputs, None, None)
};
// Store mark in a metadata node so we can map LLVM errors

View File

@ -19,9 +19,12 @@
use rustc_middle::ty::layout::{
FnAbiError, FnAbiOfHelpers, FnAbiRequest, LayoutError, LayoutOfHelpers, TyAndLayout,
};
use rustc_middle::ty::{self, Ty, TyCtxt};
use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
use rustc_span::Span;
use rustc_symbol_mangling::typeid::{kcfi_typeid_for_fnabi, typeid_for_fnabi, TypeIdOptions};
use rustc_symbol_mangling::typeid::{
kcfi_typeid_for_fnabi, kcfi_typeid_for_instance, typeid_for_fnabi, typeid_for_instance,
TypeIdOptions,
};
use rustc_target::abi::{self, call::FnAbi, Align, Size, WrappingRange};
use rustc_target::spec::{HasTargetSpec, SanitizerSet, Target};
use smallvec::SmallVec;
@ -221,6 +224,7 @@ fn invoke(
then: &'ll BasicBlock,
catch: &'ll BasicBlock,
funclet: Option<&Funclet<'ll>>,
instance: Option<Instance<'tcx>>,
) -> &'ll Value {
debug!("invoke {:?} with args ({:?})", llfn, args);
@ -233,10 +237,10 @@ fn invoke(
}
// Emit CFI pointer type membership test
self.cfi_type_test(fn_attrs, fn_abi, llfn);
self.cfi_type_test(fn_attrs, fn_abi, instance, llfn);
// Emit KCFI operand bundle
let kcfi_bundle = self.kcfi_operand_bundle(fn_attrs, fn_abi, llfn);
let kcfi_bundle = self.kcfi_operand_bundle(fn_attrs, fn_abi, instance, llfn);
let kcfi_bundle = kcfi_bundle.as_ref().map(|b| &*b.raw);
if let Some(kcfi_bundle) = kcfi_bundle {
bundles.push(kcfi_bundle);
@ -1231,6 +1235,7 @@ fn call(
llfn: &'ll Value,
args: &[&'ll Value],
funclet: Option<&Funclet<'ll>>,
instance: Option<Instance<'tcx>>,
) -> &'ll Value {
debug!("call {:?} with args ({:?})", llfn, args);
@ -1243,10 +1248,10 @@ fn call(
}
// Emit CFI pointer type membership test
self.cfi_type_test(fn_attrs, fn_abi, llfn);
self.cfi_type_test(fn_attrs, fn_abi, instance, llfn);
// Emit KCFI operand bundle
let kcfi_bundle = self.kcfi_operand_bundle(fn_attrs, fn_abi, llfn);
let kcfi_bundle = self.kcfi_operand_bundle(fn_attrs, fn_abi, instance, llfn);
let kcfi_bundle = kcfi_bundle.as_ref().map(|b| &*b.raw);
if let Some(kcfi_bundle) = kcfi_bundle {
bundles.push(kcfi_bundle);
@ -1468,7 +1473,7 @@ pub fn va_arg(&mut self, list: &'ll Value, ty: &'ll Type) -> &'ll Value {
pub(crate) fn call_intrinsic(&mut self, intrinsic: &str, args: &[&'ll Value]) -> &'ll Value {
let (ty, f) = self.cx.get_intrinsic(intrinsic);
self.call(ty, None, None, f, args, None)
self.call(ty, None, None, f, args, None, None)
}
fn call_lifetime_intrinsic(&mut self, intrinsic: &str, ptr: &'ll Value, size: Size) {
@ -1526,7 +1531,7 @@ fn fptoint_sat(&mut self, signed: bool, val: &'ll Value, dest_ty: &'ll Type) ->
format!("llvm.{instr}.sat.i{int_width}.f{float_width}")
};
let f = self.declare_cfn(&name, llvm::UnnamedAddr::No, self.type_func(&[src_ty], dest_ty));
self.call(self.type_func(&[src_ty], dest_ty), None, None, f, &[val], None)
self.call(self.type_func(&[src_ty], dest_ty), None, None, f, &[val], None, None)
}
pub(crate) fn landing_pad(
@ -1554,6 +1559,7 @@ pub(crate) fn callbr(
default_dest: &'ll BasicBlock,
indirect_dest: &[&'ll BasicBlock],
funclet: Option<&Funclet<'ll>>,
instance: Option<Instance<'tcx>>,
) -> &'ll Value {
debug!("invoke {:?} with args ({:?})", llfn, args);
@ -1566,10 +1572,10 @@ pub(crate) fn callbr(
}
// Emit CFI pointer type membership test
self.cfi_type_test(fn_attrs, fn_abi, llfn);
self.cfi_type_test(fn_attrs, fn_abi, instance, llfn);
// Emit KCFI operand bundle
let kcfi_bundle = self.kcfi_operand_bundle(fn_attrs, fn_abi, llfn);
let kcfi_bundle = self.kcfi_operand_bundle(fn_attrs, fn_abi, instance, llfn);
let kcfi_bundle = kcfi_bundle.as_ref().map(|b| &*b.raw);
if let Some(kcfi_bundle) = kcfi_bundle {
bundles.push(kcfi_bundle);
@ -1601,6 +1607,7 @@ fn cfi_type_test(
&mut self,
fn_attrs: Option<&CodegenFnAttrs>,
fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
instance: Option<Instance<'tcx>>,
llfn: &'ll Value,
) {
let is_indirect_call = unsafe { llvm::LLVMRustIsNonGVFunctionPointerTy(llfn) };
@ -1622,7 +1629,11 @@ fn cfi_type_test(
options.insert(TypeIdOptions::NORMALIZE_INTEGERS);
}
let typeid = typeid_for_fnabi(self.tcx, fn_abi, options);
let typeid = if let Some(instance) = instance {
typeid_for_instance(self.tcx, &instance, options)
} else {
typeid_for_fnabi(self.tcx, fn_abi, options)
};
let typeid_metadata = self.cx.typeid_metadata(typeid).unwrap();
// Test whether the function pointer is associated with the type identifier.
@ -1644,6 +1655,7 @@ fn kcfi_operand_bundle(
&mut self,
fn_attrs: Option<&CodegenFnAttrs>,
fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
instance: Option<Instance<'tcx>>,
llfn: &'ll Value,
) -> Option<llvm::OperandBundleDef<'ll>> {
let is_indirect_call = unsafe { llvm::LLVMRustIsNonGVFunctionPointerTy(llfn) };
@ -1665,7 +1677,12 @@ fn kcfi_operand_bundle(
options.insert(TypeIdOptions::NORMALIZE_INTEGERS);
}
let kcfi_typeid = kcfi_typeid_for_fnabi(self.tcx, fn_abi, options);
let kcfi_typeid = if let Some(instance) = instance {
kcfi_typeid_for_instance(self.tcx, &instance, options)
} else {
kcfi_typeid_for_fnabi(self.tcx, fn_abi, options)
};
Some(llvm::OperandBundleDef::new("kcfi", &[self.const_u32(kcfi_typeid)]))
} else {
None

View File

@ -181,6 +181,7 @@ fn codegen_intrinsic_call(
simple_fn,
&args.iter().map(|arg| arg.immediate()).collect::<Vec<_>>(),
None,
Some(instance),
)
}
sym::likely => {
@ -539,7 +540,7 @@ fn catch_unwind_intrinsic<'ll>(
) {
if bx.sess().panic_strategy() == PanicStrategy::Abort {
let try_func_ty = bx.type_func(&[bx.type_ptr()], bx.type_void());
bx.call(try_func_ty, None, None, try_func, &[data], None);
bx.call(try_func_ty, None, None, try_func, &[data], None, None);
// Return 0 unconditionally from the intrinsic call;
// we can never unwind.
let ret_align = bx.tcx().data_layout.i32_align.abi;
@ -640,7 +641,7 @@ fn codegen_msvc_try<'ll>(
let ptr_align = bx.tcx().data_layout.pointer_align.abi;
let slot = bx.alloca(bx.type_ptr(), ptr_align);
let try_func_ty = bx.type_func(&[bx.type_ptr()], bx.type_void());
bx.invoke(try_func_ty, None, None, try_func, &[data], normal, catchswitch, None);
bx.invoke(try_func_ty, None, None, try_func, &[data], normal, catchswitch, None, None);
bx.switch_to_block(normal);
bx.ret(bx.const_i32(0));
@ -684,7 +685,7 @@ fn codegen_msvc_try<'ll>(
let funclet = bx.catch_pad(cs, &[tydesc, flags, slot]);
let ptr = bx.load(bx.type_ptr(), slot, ptr_align);
let catch_ty = bx.type_func(&[bx.type_ptr(), bx.type_ptr()], bx.type_void());
bx.call(catch_ty, None, None, catch_func, &[data, ptr], Some(&funclet));
bx.call(catch_ty, None, None, catch_func, &[data, ptr], Some(&funclet), None);
bx.catch_ret(&funclet, caught);
// The flag value of 64 indicates a "catch-all".
@ -692,7 +693,7 @@ fn codegen_msvc_try<'ll>(
let flags = bx.const_i32(64);
let null = bx.const_null(bx.type_ptr());
let funclet = bx.catch_pad(cs, &[null, flags, null]);
bx.call(catch_ty, None, None, catch_func, &[data, null], Some(&funclet));
bx.call(catch_ty, None, None, catch_func, &[data, null], Some(&funclet), None);
bx.catch_ret(&funclet, caught);
bx.switch_to_block(caught);
@ -701,7 +702,7 @@ fn codegen_msvc_try<'ll>(
// Note that no invoke is used here because by definition this function
// can't panic (that's what it's catching).
let ret = bx.call(llty, None, None, llfn, &[try_func, data, catch_func], None);
let ret = bx.call(llty, None, None, llfn, &[try_func, data, catch_func], None, None);
let i32_align = bx.tcx().data_layout.i32_align.abi;
bx.store(ret, dest, i32_align);
}
@ -750,7 +751,7 @@ fn codegen_wasm_try<'ll>(
// }
//
let try_func_ty = bx.type_func(&[bx.type_ptr()], bx.type_void());
bx.invoke(try_func_ty, None, None, try_func, &[data], normal, catchswitch, None);
bx.invoke(try_func_ty, None, None, try_func, &[data], normal, catchswitch, None, None);
bx.switch_to_block(normal);
bx.ret(bx.const_i32(0));
@ -766,7 +767,7 @@ fn codegen_wasm_try<'ll>(
let _sel = bx.call_intrinsic("llvm.wasm.get.ehselector", &[funclet.cleanuppad()]);
let catch_ty = bx.type_func(&[bx.type_ptr(), bx.type_ptr()], bx.type_void());
bx.call(catch_ty, None, None, catch_func, &[data, ptr], Some(&funclet));
bx.call(catch_ty, None, None, catch_func, &[data, ptr], Some(&funclet), None);
bx.catch_ret(&funclet, caught);
bx.switch_to_block(caught);
@ -775,7 +776,7 @@ fn codegen_wasm_try<'ll>(
// Note that no invoke is used here because by definition this function
// can't panic (that's what it's catching).
let ret = bx.call(llty, None, None, llfn, &[try_func, data, catch_func], None);
let ret = bx.call(llty, None, None, llfn, &[try_func, data, catch_func], None, None);
let i32_align = bx.tcx().data_layout.i32_align.abi;
bx.store(ret, dest, i32_align);
}
@ -818,7 +819,7 @@ fn codegen_gnu_try<'ll>(
let data = llvm::get_param(bx.llfn(), 1);
let catch_func = llvm::get_param(bx.llfn(), 2);
let try_func_ty = bx.type_func(&[bx.type_ptr()], bx.type_void());
bx.invoke(try_func_ty, None, None, try_func, &[data], then, catch, None);
bx.invoke(try_func_ty, None, None, try_func, &[data], then, catch, None, None);
bx.switch_to_block(then);
bx.ret(bx.const_i32(0));
@ -836,13 +837,13 @@ fn codegen_gnu_try<'ll>(
bx.add_clause(vals, tydesc);
let ptr = bx.extract_value(vals, 0);
let catch_ty = bx.type_func(&[bx.type_ptr(), bx.type_ptr()], bx.type_void());
bx.call(catch_ty, None, None, catch_func, &[data, ptr], None);
bx.call(catch_ty, None, None, catch_func, &[data, ptr], None, None);
bx.ret(bx.const_i32(1));
});
// Note that no invoke is used here because by definition this function
// can't panic (that's what it's catching).
let ret = bx.call(llty, None, None, llfn, &[try_func, data, catch_func], None);
let ret = bx.call(llty, None, None, llfn, &[try_func, data, catch_func], None, None);
let i32_align = bx.tcx().data_layout.i32_align.abi;
bx.store(ret, dest, i32_align);
}
@ -882,7 +883,7 @@ fn codegen_emcc_try<'ll>(
let data = llvm::get_param(bx.llfn(), 1);
let catch_func = llvm::get_param(bx.llfn(), 2);
let try_func_ty = bx.type_func(&[bx.type_ptr()], bx.type_void());
bx.invoke(try_func_ty, None, None, try_func, &[data], then, catch, None);
bx.invoke(try_func_ty, None, None, try_func, &[data], then, catch, None, None);
bx.switch_to_block(then);
bx.ret(bx.const_i32(0));
@ -920,13 +921,13 @@ fn codegen_emcc_try<'ll>(
bx.store(is_rust_panic, catch_data_1, i8_align);
let catch_ty = bx.type_func(&[bx.type_ptr(), bx.type_ptr()], bx.type_void());
bx.call(catch_ty, None, None, catch_func, &[data, catch_data], None);
bx.call(catch_ty, None, None, catch_func, &[data, catch_data], None, None);
bx.ret(bx.const_i32(1));
});
// Note that no invoke is used here because by definition this function
// can't panic (that's what it's catching).
let ret = bx.call(llty, None, None, llfn, &[try_func, data, catch_func], None);
let ret = bx.call(llty, None, None, llfn, &[try_func, data, catch_func], None, None);
let i32_align = bx.tcx().data_layout.i32_align.abi;
bx.store(ret, dest, i32_align);
}
@ -1439,6 +1440,7 @@ macro_rules! return_error {
f,
&args.iter().map(|arg| arg.immediate()).collect::<Vec<_>>(),
None,
None,
);
Ok(c)
}
@ -1607,6 +1609,7 @@ fn llvm_vector_ty<'ll>(cx: &CodegenCx<'ll, '_>, elem_ty: Ty<'_>, vec_len: u64) -
f,
&[args[1].immediate(), alignment, mask, args[0].immediate()],
None,
None,
);
return Ok(v);
}
@ -1706,6 +1709,7 @@ fn llvm_vector_ty<'ll>(cx: &CodegenCx<'ll, '_>, elem_ty: Ty<'_>, vec_len: u64) -
f,
&[args[1].immediate(), alignment, mask, args[2].immediate()],
None,
None,
);
return Ok(v);
}
@ -1799,6 +1803,7 @@ fn llvm_vector_ty<'ll>(cx: &CodegenCx<'ll, '_>, elem_ty: Ty<'_>, vec_len: u64) -
f,
&[args[2].immediate(), args[1].immediate(), alignment, mask],
None,
None,
);
return Ok(v);
}
@ -1904,6 +1909,7 @@ fn llvm_vector_ty<'ll>(cx: &CodegenCx<'ll, '_>, elem_ty: Ty<'_>, vec_len: u64) -
f,
&[args[0].immediate(), args[1].immediate(), alignment, mask],
None,
None,
);
return Ok(v);
}
@ -2352,11 +2358,12 @@ macro_rules! arith_unary {
f,
&[args[0].immediate(), bx.const_int(bx.type_i1(), 0)],
None,
None,
))
} else {
let fn_ty = bx.type_func(&[vec_ty], vec_ty);
let f = bx.declare_cfn(llvm_intrinsic, llvm::UnnamedAddr::No, fn_ty);
Ok(bx.call(fn_ty, None, None, f, &[args[0].immediate()], None))
Ok(bx.call(fn_ty, None, None, f, &[args[0].immediate()], None, None))
};
}
@ -2409,7 +2416,7 @@ macro_rules! arith_unary {
let fn_ty = bx.type_func(&[vec_ty, vec_ty], vec_ty);
let f = bx.declare_cfn(llvm_intrinsic, llvm::UnnamedAddr::No, fn_ty);
let v = bx.call(fn_ty, None, None, f, &[lhs, rhs], None);
let v = bx.call(fn_ty, None, None, f, &[lhs, rhs], None, None);
return Ok(v);
}

View File

@ -462,27 +462,34 @@ fn create_entry_fn<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
let ptr_ty = cx.type_ptr();
let (arg_argc, arg_argv) = get_argc_argv(cx, &mut bx);
let (start_fn, start_ty, args) = if let EntryFnType::Main { sigpipe } = entry_type {
let (start_fn, start_ty, args, instance) = if let EntryFnType::Main { sigpipe } = entry_type
{
let start_def_id = cx.tcx().require_lang_item(LangItem::Start, None);
let start_fn = cx.get_fn_addr(ty::Instance::expect_resolve(
let start_instance = ty::Instance::expect_resolve(
cx.tcx(),
ty::ParamEnv::reveal_all(),
start_def_id,
cx.tcx().mk_args(&[main_ret_ty.into()]),
));
);
let start_fn = cx.get_fn_addr(start_instance);
let i8_ty = cx.type_i8();
let arg_sigpipe = bx.const_u8(sigpipe);
let start_ty = cx.type_func(&[cx.val_ty(rust_main), isize_ty, ptr_ty, i8_ty], isize_ty);
(start_fn, start_ty, vec![rust_main, arg_argc, arg_argv, arg_sigpipe])
(
start_fn,
start_ty,
vec![rust_main, arg_argc, arg_argv, arg_sigpipe],
Some(start_instance),
)
} else {
debug!("using user-defined start fn");
let start_ty = cx.type_func(&[isize_ty, ptr_ty], isize_ty);
(rust_main, start_ty, vec![arg_argc, arg_argv])
(rust_main, start_ty, vec![arg_argc, arg_argv], None)
};
let result = bx.call(start_ty, None, None, start_fn, &args, None);
let result = bx.call(start_ty, None, None, start_fn, &args, None, instance);
if cx.sess().target.os.contains("uefi") {
bx.ret(result);
} else {

View File

@ -232,6 +232,7 @@ fn do_call<Bx: BuilderMethods<'a, 'tcx>>(
ret_llbb,
unwind_block,
self.funclet(fx),
instance,
);
if fx.mir[self.bb].is_cleanup {
bx.apply_attrs_to_cleanup_callsite(invokeret);
@ -247,7 +248,8 @@ fn do_call<Bx: BuilderMethods<'a, 'tcx>>(
}
MergingSucc::False
} else {
let llret = bx.call(fn_ty, fn_attrs, Some(fn_abi), fn_ptr, llargs, self.funclet(fx));
let llret =
bx.call(fn_ty, fn_attrs, Some(fn_abi), fn_ptr, llargs, self.funclet(fx), instance);
if fx.mir[self.bb].is_cleanup {
bx.apply_attrs_to_cleanup_callsite(llret);
}
@ -502,7 +504,6 @@ fn codegen_drop_terminator(
let ty = location.ty(self.mir, bx.tcx()).ty;
let ty = self.monomorphize(ty);
let drop_fn = Instance::resolve_drop_in_place(bx.tcx(), ty);
let instance = drop_fn.clone();
if let ty::InstanceDef::DropGlue(_, None) = drop_fn.def {
// we don't actually need to drop anything.
@ -518,7 +519,7 @@ fn codegen_drop_terminator(
args1 = [place.llval];
&args1[..]
};
let (drop_fn, fn_abi) =
let (drop_fn, fn_abi, drop_instance) =
match ty.kind() {
// FIXME(eddyb) perhaps move some of this logic into
// `Instance::resolve_drop_in_place`?
@ -550,6 +551,7 @@ fn codegen_drop_terminator(
meth::VirtualIndex::from_index(ty::COMMON_VTABLE_ENTRIES_DROPINPLACE)
.get_fn(bx, vtable, ty, fn_abi),
fn_abi,
virtual_drop,
)
}
ty::Dynamic(_, _, ty::DynStar) => {
@ -592,9 +594,14 @@ fn codegen_drop_terminator(
meth::VirtualIndex::from_index(ty::COMMON_VTABLE_ENTRIES_DROPINPLACE)
.get_fn(bx, meta.immediate(), ty, fn_abi),
fn_abi,
virtual_drop,
)
}
_ => (bx.get_fn_addr(drop_fn), bx.fn_abi_of_instance(drop_fn, ty::List::empty())),
_ => (
bx.get_fn_addr(drop_fn),
bx.fn_abi_of_instance(drop_fn, ty::List::empty()),
drop_fn,
),
};
helper.do_call(
self,
@ -605,7 +612,7 @@ fn codegen_drop_terminator(
Some((ReturnDest::Nothing, target)),
unwind,
&[],
Some(instance),
Some(drop_instance),
mergeable_succ,
)
}
@ -1699,7 +1706,7 @@ fn terminate_block(&mut self, reason: UnwindTerminateReason) -> Bx::BasicBlock {
} else {
let fn_ty = bx.fn_decl_backend_type(fn_abi);
let llret = bx.call(fn_ty, None, Some(fn_abi), fn_ptr, &[], funclet.as_ref());
let llret = bx.call(fn_ty, None, Some(fn_abi), fn_ptr, &[], funclet.as_ref(), None);
bx.apply_attrs_to_cleanup_callsite(llret);
}

View File

@ -710,7 +710,7 @@ pub fn codegen_rvalue_operand(
} else {
None
};
bx.call(fn_ty, fn_attrs, Some(fn_abi), fn_ptr, &[], None)
bx.call(fn_ty, fn_attrs, Some(fn_abi), fn_ptr, &[], None, Some(instance))
} else {
bx.get_static(def_id)
};

View File

@ -70,7 +70,15 @@ pub fn size_and_align_of_dst<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
// (But we are in good company, this code is duplicated plenty of times.)
let fn_ty = bx.fn_decl_backend_type(fn_abi);
bx.call(fn_ty, /* fn_attrs */ None, Some(fn_abi), llfn, &[msg.0, msg.1], None);
bx.call(
fn_ty,
/* fn_attrs */ None,
Some(fn_abi),
llfn,
&[msg.0, msg.1],
None,
None,
);
// This function does not return so we can now return whatever we want.
let size = bx.const_usize(layout.size.bytes());

View File

@ -17,7 +17,7 @@
use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrs;
use rustc_middle::ty::layout::{HasParamEnv, TyAndLayout};
use rustc_middle::ty::Ty;
use rustc_middle::ty::{Instance, Ty};
use rustc_session::config::OptLevel;
use rustc_span::Span;
use rustc_target::abi::call::FnAbi;
@ -82,6 +82,7 @@ fn invoke(
then: Self::BasicBlock,
catch: Self::BasicBlock,
funclet: Option<&Self::Funclet>,
instance: Option<Instance<'tcx>>,
) -> Self::Value;
fn unreachable(&mut self);
@ -389,6 +390,7 @@ fn call(
llfn: Self::Value,
args: &[Self::Value],
funclet: Option<&Self::Funclet>,
instance: Option<Instance<'tcx>>,
) -> Self::Value;
fn zext(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;