2015-10-21 16:42:25 -05:00
|
|
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
|
|
|
|
// file at the top-level directory of this distribution and at
|
|
|
|
// http://rust-lang.org/COPYRIGHT.
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
|
|
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
|
|
|
// option. This file may not be copied, modified, or distributed
|
|
|
|
// except according to those terms.
|
|
|
|
|
2016-12-11 23:19:39 -06:00
|
|
|
use llvm::{self, ValueRef, BasicBlockRef};
|
2016-08-16 09:41:38 -05:00
|
|
|
use rustc_const_eval::{ErrKind, ConstEvalErr, note_const_eval_err};
|
2016-05-25 00:39:32 -05:00
|
|
|
use rustc::middle::lang_items;
|
2016-11-23 13:48:31 -06:00
|
|
|
use rustc::ty::{self, layout};
|
2016-09-19 15:50:00 -05:00
|
|
|
use rustc::mir;
|
2016-04-04 02:21:27 -05:00
|
|
|
use abi::{Abi, FnType, ArgType};
|
2016-03-22 12:23:36 -05:00
|
|
|
use adt;
|
2016-12-11 16:03:52 -06:00
|
|
|
use base::{self, Lifetime};
|
2016-03-22 12:23:36 -05:00
|
|
|
use callee::{Callee, CalleeData, Fn, Intrinsic, NamedTupleConstructor, Virtual};
|
2016-12-12 07:48:39 -06:00
|
|
|
use common::{self, BlockAndBuilder, Funclet};
|
2016-06-07 16:35:01 -05:00
|
|
|
use common::{C_bool, C_str_slice, C_struct, C_u32, C_undef};
|
2016-05-25 00:39:32 -05:00
|
|
|
use consts;
|
2016-12-18 12:08:57 -06:00
|
|
|
use debuginfo;
|
2016-03-22 12:23:36 -05:00
|
|
|
use Disr;
|
2016-06-07 16:35:01 -05:00
|
|
|
use machine::{llalign_of_min, llbitsize_of_real};
|
2016-03-22 12:23:36 -05:00
|
|
|
use meth;
|
|
|
|
use type_of;
|
|
|
|
use glue;
|
|
|
|
use type_::Type;
|
2016-05-25 00:39:32 -05:00
|
|
|
|
2016-12-11 23:19:39 -06:00
|
|
|
use rustc_data_structures::indexed_vec::IndexVec;
|
2016-11-07 21:02:55 -06:00
|
|
|
use rustc_data_structures::fx::FxHashMap;
|
2016-11-16 04:52:37 -06:00
|
|
|
use syntax::symbol::Symbol;
|
2015-10-21 16:42:25 -05:00
|
|
|
|
2016-06-20 15:55:14 -05:00
|
|
|
use super::{MirContext, LocalRef};
|
2016-05-29 14:01:06 -05:00
|
|
|
use super::analyze::CleanupKind;
|
2016-04-26 15:54:38 -05:00
|
|
|
use super::constant::Const;
|
2016-10-04 09:44:31 -05:00
|
|
|
use super::lvalue::{LvalueRef};
|
2016-03-08 06:40:37 -06:00
|
|
|
use super::operand::OperandRef;
|
2016-10-30 19:16:21 -05:00
|
|
|
use super::operand::OperandValue::{Pair, Ref, Immediate};
|
|
|
|
|
2016-12-10 21:32:44 -06:00
|
|
|
use std::ptr;
|
2015-10-21 16:42:25 -05:00
|
|
|
|
2016-12-17 20:54:32 -06:00
|
|
|
impl<'a, 'tcx> MirContext<'a, 'tcx> {
|
2016-12-11 23:19:39 -06:00
|
|
|
pub fn trans_block(&mut self, bb: mir::BasicBlock,
|
2016-12-12 07:48:39 -06:00
|
|
|
funclets: &IndexVec<mir::BasicBlock, Option<Funclet>>) {
|
2016-12-11 23:19:39 -06:00
|
|
|
let mut bcx = self.build_block(bb);
|
2016-12-17 13:56:33 -06:00
|
|
|
let data = &self.mir[bb];
|
2015-10-21 16:42:25 -05:00
|
|
|
|
2016-05-29 14:01:06 -05:00
|
|
|
debug!("trans_block({:?}={:?})", bb, data);
|
|
|
|
|
2016-12-12 07:48:39 -06:00
|
|
|
let funclet = match self.cleanup_kinds[bb] {
|
|
|
|
CleanupKind::Internal { funclet } => funclets[funclet].as_ref(),
|
|
|
|
_ => funclets[bb].as_ref(),
|
2016-12-11 23:19:39 -06:00
|
|
|
};
|
|
|
|
|
2016-05-29 14:01:06 -05:00
|
|
|
// Create the cleanup bundle, if needed.
|
2016-12-12 07:48:39 -06:00
|
|
|
let cleanup_pad = funclet.map(|lp| lp.cleanuppad());
|
|
|
|
let cleanup_bundle = funclet.map(|l| l.bundle());
|
2016-05-29 14:01:06 -05:00
|
|
|
|
|
|
|
let funclet_br = |this: &Self, bcx: BlockAndBuilder, bb: mir::BasicBlock| {
|
2016-12-11 23:19:39 -06:00
|
|
|
let lltarget = this.blocks[bb];
|
2016-05-29 14:01:06 -05:00
|
|
|
if let Some(cp) = cleanup_pad {
|
2016-06-07 09:28:36 -05:00
|
|
|
match this.cleanup_kinds[bb] {
|
2016-05-29 14:01:06 -05:00
|
|
|
CleanupKind::Funclet => {
|
|
|
|
// micro-optimization: generate a `ret` rather than a jump
|
|
|
|
// to a return block
|
|
|
|
bcx.cleanup_ret(cp, Some(lltarget));
|
|
|
|
}
|
|
|
|
CleanupKind::Internal { .. } => bcx.br(lltarget),
|
|
|
|
CleanupKind::NotCleanup => bug!("jump from cleanup bb to bb {:?}", bb)
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
bcx.br(lltarget);
|
|
|
|
}
|
2016-02-11 14:57:09 -06:00
|
|
|
};
|
2016-05-29 14:01:06 -05:00
|
|
|
|
|
|
|
let llblock = |this: &mut Self, target: mir::BasicBlock| {
|
2016-12-11 23:19:39 -06:00
|
|
|
let lltarget = this.blocks[target];
|
2016-05-29 14:01:06 -05:00
|
|
|
|
|
|
|
if let Some(cp) = cleanup_pad {
|
2016-06-07 09:28:36 -05:00
|
|
|
match this.cleanup_kinds[target] {
|
2016-05-29 14:01:06 -05:00
|
|
|
CleanupKind::Funclet => {
|
|
|
|
// MSVC cross-funclet jump - need a trampoline
|
|
|
|
|
|
|
|
debug!("llblock: creating cleanup trampoline for {:?}", target);
|
|
|
|
let name = &format!("{:?}_cleanup_trampoline_{:?}", bb, target);
|
2016-12-11 23:19:39 -06:00
|
|
|
let trampoline = this.fcx.build_new_block(name);
|
2016-05-29 14:01:06 -05:00
|
|
|
trampoline.set_personality_fn(this.fcx.eh_personality());
|
|
|
|
trampoline.cleanup_ret(cp, Some(lltarget));
|
|
|
|
trampoline.llbb()
|
|
|
|
}
|
|
|
|
CleanupKind::Internal { .. } => lltarget,
|
|
|
|
CleanupKind::NotCleanup =>
|
|
|
|
bug!("jump from cleanup bb {:?} to bb {:?}", bb, target)
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
if let (CleanupKind::NotCleanup, CleanupKind::Funclet) =
|
2016-06-07 09:28:36 -05:00
|
|
|
(this.cleanup_kinds[bb], this.cleanup_kinds[target])
|
2016-05-29 14:01:06 -05:00
|
|
|
{
|
|
|
|
// jump *into* cleanup - need a landing pad if GNU
|
2016-12-11 23:19:39 -06:00
|
|
|
this.landing_pad_to(target)
|
2016-05-29 14:01:06 -05:00
|
|
|
} else {
|
|
|
|
lltarget
|
|
|
|
}
|
|
|
|
}
|
2016-02-11 14:57:09 -06:00
|
|
|
};
|
|
|
|
|
2015-10-21 16:42:25 -05:00
|
|
|
for statement in &data.statements {
|
|
|
|
bcx = self.trans_statement(bcx, statement);
|
|
|
|
}
|
|
|
|
|
2016-04-07 14:35:11 -05:00
|
|
|
let terminator = data.terminator();
|
|
|
|
debug!("trans_block: terminator: {:?}", terminator);
|
|
|
|
|
2016-06-07 11:21:56 -05:00
|
|
|
let span = terminator.source_info.span;
|
2016-12-16 14:25:18 -06:00
|
|
|
let (scope, debug_span) = self.debug_loc(terminator.source_info);
|
2016-12-18 12:08:57 -06:00
|
|
|
debuginfo::set_source_location(self, &bcx, scope, debug_span);
|
2016-04-07 14:35:11 -05:00
|
|
|
match terminator.kind {
|
2016-03-10 08:55:15 -06:00
|
|
|
mir::TerminatorKind::Resume => {
|
2016-02-11 14:57:09 -06:00
|
|
|
if let Some(cleanup_pad) = cleanup_pad {
|
|
|
|
bcx.cleanup_ret(cleanup_pad, None);
|
|
|
|
} else {
|
2016-10-01 08:04:42 -05:00
|
|
|
let llpersonality = bcx.fcx().eh_personality();
|
|
|
|
bcx.set_personality_fn(llpersonality);
|
|
|
|
|
2016-02-11 14:57:09 -06:00
|
|
|
let ps = self.get_personality_slot(&bcx);
|
|
|
|
let lp = bcx.load(ps);
|
2016-12-11 16:03:52 -06:00
|
|
|
Lifetime::End.call(&bcx, ps);
|
2016-12-11 23:19:39 -06:00
|
|
|
if !bcx.sess().target.target.options.custom_unwind_resume {
|
|
|
|
bcx.resume(lp);
|
|
|
|
} else {
|
|
|
|
let exc_ptr = bcx.extract_value(lp, 0);
|
|
|
|
bcx.call(
|
|
|
|
bcx.fcx().eh_unwind_resume().reify(bcx.ccx()),
|
|
|
|
&[exc_ptr],
|
|
|
|
cleanup_bundle,
|
|
|
|
);
|
|
|
|
}
|
2016-02-11 14:57:09 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-03-10 08:55:15 -06:00
|
|
|
mir::TerminatorKind::Goto { target } => {
|
2016-05-29 14:01:06 -05:00
|
|
|
funclet_br(self, bcx, target);
|
2015-10-21 16:42:25 -05:00
|
|
|
}
|
|
|
|
|
2016-03-10 08:55:15 -06:00
|
|
|
mir::TerminatorKind::If { ref cond, targets: (true_bb, false_bb) } => {
|
2016-02-01 04:04:46 -06:00
|
|
|
let cond = self.trans_operand(&bcx, cond);
|
2016-05-29 14:01:06 -05:00
|
|
|
|
|
|
|
let lltrue = llblock(self, true_bb);
|
|
|
|
let llfalse = llblock(self, false_bb);
|
2016-02-01 04:04:46 -06:00
|
|
|
bcx.cond_br(cond.immediate(), lltrue, llfalse);
|
2015-10-21 16:42:25 -05:00
|
|
|
}
|
|
|
|
|
2016-03-10 08:55:15 -06:00
|
|
|
mir::TerminatorKind::Switch { ref discr, ref adt_def, ref targets } => {
|
2016-02-01 04:04:46 -06:00
|
|
|
let discr_lvalue = self.trans_lvalue(&bcx, discr);
|
2016-01-13 19:22:02 -06:00
|
|
|
let ty = discr_lvalue.ty.to_ty(bcx.tcx());
|
2016-12-10 21:32:44 -06:00
|
|
|
let discr = adt::trans_get_discr(&bcx, ty, discr_lvalue.llval, None, true);
|
2015-12-11 12:19:19 -06:00
|
|
|
|
2016-11-07 21:02:55 -06:00
|
|
|
let mut bb_hist = FxHashMap();
|
2016-05-11 14:31:19 -05:00
|
|
|
for target in targets {
|
|
|
|
*bb_hist.entry(target).or_insert(0) += 1;
|
|
|
|
}
|
|
|
|
let (default_bb, default_blk) = match bb_hist.iter().max_by_key(|&(_, c)| c) {
|
|
|
|
// If a single target basic blocks is predominant, promote that to be the
|
|
|
|
// default case for the switch instruction to reduce the size of the generated
|
|
|
|
// code. This is especially helpful in cases like an if-let on a huge enum.
|
|
|
|
// Note: This optimization is only valid for exhaustive matches.
|
|
|
|
Some((&&bb, &c)) if c > targets.len() / 2 => {
|
2016-05-29 14:01:06 -05:00
|
|
|
(Some(bb), llblock(self, bb))
|
2016-05-11 14:31:19 -05:00
|
|
|
}
|
|
|
|
// We're generating an exhaustive switch, so the else branch
|
|
|
|
// can't be hit. Branching to an unreachable instruction
|
|
|
|
// lets LLVM know this
|
2016-12-11 23:19:39 -06:00
|
|
|
_ => (None, self.unreachable_block())
|
2016-05-11 14:31:19 -05:00
|
|
|
};
|
2016-05-29 14:01:06 -05:00
|
|
|
let switch = bcx.switch(discr, default_blk, targets.len());
|
2015-12-11 12:19:19 -06:00
|
|
|
assert_eq!(adt_def.variants.len(), targets.len());
|
2016-05-11 14:31:19 -05:00
|
|
|
for (adt_variant, &target) in adt_def.variants.iter().zip(targets) {
|
|
|
|
if default_bb != Some(target) {
|
2016-05-29 14:01:06 -05:00
|
|
|
let llbb = llblock(self, target);
|
2016-12-10 21:32:44 -06:00
|
|
|
let llval = adt::trans_case(&bcx, ty, Disr::from(adt_variant.disr_val));
|
2016-12-11 16:03:52 -06:00
|
|
|
bcx.add_case(switch, llval, llbb)
|
2016-05-11 14:31:19 -05:00
|
|
|
}
|
2015-12-11 12:19:19 -06:00
|
|
|
}
|
2015-10-21 16:42:25 -05:00
|
|
|
}
|
|
|
|
|
2016-03-10 08:55:15 -06:00
|
|
|
mir::TerminatorKind::SwitchInt { ref discr, switch_ty, ref values, ref targets } => {
|
2015-11-08 12:11:11 -06:00
|
|
|
let (otherwise, targets) = targets.split_last().unwrap();
|
2016-02-01 04:04:46 -06:00
|
|
|
let discr = bcx.load(self.trans_lvalue(&bcx, discr).llval);
|
2016-12-10 21:32:44 -06:00
|
|
|
let discr = base::to_immediate(&bcx, discr, switch_ty);
|
2016-05-29 14:01:06 -05:00
|
|
|
let switch = bcx.switch(discr, llblock(self, *otherwise), values.len());
|
2015-11-08 12:11:11 -06:00
|
|
|
for (value, target) in values.iter().zip(targets) {
|
2016-04-26 15:54:38 -05:00
|
|
|
let val = Const::from_constval(bcx.ccx(), value.clone(), switch_ty);
|
2016-05-29 14:01:06 -05:00
|
|
|
let llbb = llblock(self, *target);
|
2016-12-11 16:03:52 -06:00
|
|
|
bcx.add_case(switch, val.llval, llbb)
|
2015-11-08 12:11:11 -06:00
|
|
|
}
|
2015-10-26 13:35:18 -05:00
|
|
|
}
|
|
|
|
|
2016-03-10 08:55:15 -06:00
|
|
|
mir::TerminatorKind::Return => {
|
2016-06-20 15:55:14 -05:00
|
|
|
let ret = bcx.fcx().fn_ty.ret;
|
|
|
|
if ret.is_ignore() || ret.is_indirect() {
|
|
|
|
bcx.ret_void();
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
let llval = if let Some(cast_ty) = ret.cast {
|
2016-09-24 18:38:27 -05:00
|
|
|
let op = match self.locals[mir::RETURN_POINTER] {
|
2016-06-20 15:55:14 -05:00
|
|
|
LocalRef::Operand(Some(op)) => op,
|
|
|
|
LocalRef::Operand(None) => bug!("use of return before def"),
|
|
|
|
LocalRef::Lvalue(tr_lvalue) => {
|
|
|
|
OperandRef {
|
|
|
|
val: Ref(tr_lvalue.llval),
|
|
|
|
ty: tr_lvalue.ty.to_ty(bcx.tcx())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
let llslot = match op.val {
|
|
|
|
Immediate(_) | Pair(..) => {
|
2016-12-11 09:59:20 -06:00
|
|
|
let llscratch = bcx.fcx().alloca(ret.original_ty, "ret");
|
2016-06-20 15:55:14 -05:00
|
|
|
self.store_operand(&bcx, llscratch, op);
|
|
|
|
llscratch
|
|
|
|
}
|
|
|
|
Ref(llval) => llval
|
|
|
|
};
|
|
|
|
let load = bcx.load(bcx.pointercast(llslot, cast_ty.ptr_to()));
|
|
|
|
let llalign = llalign_of_min(bcx.ccx(), ret.ty);
|
|
|
|
unsafe {
|
|
|
|
llvm::LLVMSetAlignment(load, llalign);
|
|
|
|
}
|
|
|
|
load
|
|
|
|
} else {
|
2016-09-24 18:38:27 -05:00
|
|
|
let op = self.trans_consume(&bcx, &mir::Lvalue::Local(mir::RETURN_POINTER));
|
2016-06-20 15:55:14 -05:00
|
|
|
op.pack_if_pair(&bcx).immediate()
|
|
|
|
};
|
|
|
|
bcx.ret(llval);
|
2015-10-21 16:42:25 -05:00
|
|
|
}
|
|
|
|
|
2016-06-08 11:26:19 -05:00
|
|
|
mir::TerminatorKind::Unreachable => {
|
|
|
|
bcx.unreachable();
|
|
|
|
}
|
|
|
|
|
2016-05-16 17:06:52 -05:00
|
|
|
mir::TerminatorKind::Drop { ref location, target, unwind } => {
|
2016-10-30 19:16:21 -05:00
|
|
|
let ty = location.ty(&self.mir, bcx.tcx()).to_ty(bcx.tcx());
|
2016-12-11 23:19:39 -06:00
|
|
|
let ty = bcx.fcx().monomorphize(&ty);
|
2016-06-09 10:15:15 -05:00
|
|
|
|
2016-01-30 11:32:50 -06:00
|
|
|
// Double check for necessity to drop
|
2016-12-18 12:50:07 -06:00
|
|
|
if !bcx.ccx().shared().type_needs_drop(ty) {
|
2016-05-29 14:01:06 -05:00
|
|
|
funclet_br(self, bcx, target);
|
2016-01-30 11:32:50 -06:00
|
|
|
return;
|
|
|
|
}
|
2016-06-09 10:15:15 -05:00
|
|
|
|
|
|
|
let lvalue = self.trans_lvalue(&bcx, location);
|
2016-01-30 11:32:50 -06:00
|
|
|
let drop_fn = glue::get_drop_glue(bcx.ccx(), ty);
|
2016-12-18 12:50:07 -06:00
|
|
|
let drop_ty = glue::get_drop_glue_type(bcx.ccx().shared(), ty);
|
|
|
|
let is_sized = bcx.ccx().shared().type_is_sized(ty);
|
2016-09-13 15:04:27 -05:00
|
|
|
let llvalue = if is_sized {
|
|
|
|
if drop_ty != ty {
|
|
|
|
bcx.pointercast(lvalue.llval, type_of::type_of(bcx.ccx(), drop_ty).ptr_to())
|
|
|
|
} else {
|
|
|
|
lvalue.llval
|
|
|
|
}
|
2016-01-30 11:32:50 -06:00
|
|
|
} else {
|
2016-09-13 15:04:27 -05:00
|
|
|
// FIXME(#36457) Currently drop glue takes sized
|
|
|
|
// values as a `*(data, meta)`, but elsewhere in
|
|
|
|
// MIR we pass `(data, meta)` as two separate
|
|
|
|
// arguments. It would be better to fix drop glue,
|
|
|
|
// but I am shooting for a quick fix to #35546
|
|
|
|
// here that can be cleanly backported to beta, so
|
|
|
|
// I want to avoid touching all of trans.
|
2016-12-10 21:32:44 -06:00
|
|
|
let scratch = base::alloc_ty(&bcx, ty, "drop");
|
2016-12-11 16:03:52 -06:00
|
|
|
Lifetime::Start.call(&bcx, scratch);
|
2016-12-11 09:59:20 -06:00
|
|
|
bcx.store(lvalue.llval, base::get_dataptr(&bcx, scratch));
|
|
|
|
bcx.store(lvalue.llextra, base::get_meta(&bcx, scratch));
|
2016-12-10 21:32:44 -06:00
|
|
|
scratch
|
2016-01-30 11:32:50 -06:00
|
|
|
};
|
|
|
|
if let Some(unwind) = unwind {
|
2016-02-01 04:04:46 -06:00
|
|
|
bcx.invoke(drop_fn,
|
|
|
|
&[llvalue],
|
2016-12-11 23:19:39 -06:00
|
|
|
self.blocks[target],
|
2016-05-29 14:01:06 -05:00
|
|
|
llblock(self, unwind),
|
|
|
|
cleanup_bundle);
|
2016-01-30 11:32:50 -06:00
|
|
|
} else {
|
2016-05-29 14:01:06 -05:00
|
|
|
bcx.call(drop_fn, &[llvalue], cleanup_bundle);
|
|
|
|
funclet_br(self, bcx, target);
|
2016-01-30 11:32:50 -06:00
|
|
|
}
|
2016-01-29 16:18:47 -06:00
|
|
|
}
|
|
|
|
|
2016-05-25 00:39:32 -05:00
|
|
|
mir::TerminatorKind::Assert { ref cond, expected, ref msg, target, cleanup } => {
|
|
|
|
let cond = self.trans_operand(&bcx, cond).immediate();
|
2016-08-04 11:57:57 -05:00
|
|
|
let mut const_cond = common::const_to_opt_uint(cond).map(|c| c == 1);
|
|
|
|
|
|
|
|
// This case can currently arise only from functions marked
|
|
|
|
// with #[rustc_inherit_overflow_checks] and inlined from
|
|
|
|
// another crate (mostly core::num generic/#[inline] fns),
|
|
|
|
// while the current crate doesn't use overflow checks.
|
|
|
|
// NOTE: Unlike binops, negation doesn't have its own
|
|
|
|
// checked operation, just a comparison with the minimum
|
|
|
|
// value, so we have to check for the assert message.
|
|
|
|
if !bcx.ccx().check_overflow() {
|
|
|
|
use rustc_const_math::ConstMathErr::Overflow;
|
|
|
|
use rustc_const_math::Op::Neg;
|
|
|
|
|
|
|
|
if let mir::AssertMessage::Math(Overflow(Neg)) = *msg {
|
|
|
|
const_cond = Some(expected);
|
|
|
|
}
|
|
|
|
}
|
2016-05-25 00:39:32 -05:00
|
|
|
|
|
|
|
// Don't translate the panic block if success if known.
|
|
|
|
if const_cond == Some(expected) {
|
2016-06-05 06:38:29 -05:00
|
|
|
funclet_br(self, bcx, target);
|
2016-05-25 00:39:32 -05:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2016-06-05 06:38:29 -05:00
|
|
|
// Pass the condition through llvm.expect for branch hinting.
|
|
|
|
let expect = bcx.ccx().get_intrinsic(&"llvm.expect.i1");
|
|
|
|
let cond = bcx.call(expect, &[cond, C_bool(bcx.ccx(), expected)], None);
|
|
|
|
|
|
|
|
// Create the failure block and the conditional branch to it.
|
|
|
|
let lltarget = llblock(self, target);
|
2016-12-11 23:19:39 -06:00
|
|
|
let panic_block = self.fcx.build_new_block("panic");
|
2016-06-05 06:38:29 -05:00
|
|
|
if expected {
|
2016-12-11 23:19:39 -06:00
|
|
|
bcx.cond_br(cond, lltarget, panic_block.llbb());
|
2016-05-25 00:39:32 -05:00
|
|
|
} else {
|
2016-12-11 23:19:39 -06:00
|
|
|
bcx.cond_br(cond, panic_block.llbb(), lltarget);
|
2016-05-25 00:39:32 -05:00
|
|
|
}
|
|
|
|
|
2016-06-05 06:38:29 -05:00
|
|
|
// After this point, bcx is the block for the call to panic.
|
2016-12-11 23:19:39 -06:00
|
|
|
bcx = panic_block;
|
2016-12-18 12:08:57 -06:00
|
|
|
debuginfo::set_source_location(self, &bcx, scope, debug_span);
|
2016-06-05 06:38:29 -05:00
|
|
|
|
2016-05-25 00:39:32 -05:00
|
|
|
// Get the location information.
|
2016-06-07 11:21:56 -05:00
|
|
|
let loc = bcx.sess().codemap().lookup_char_pos(span.lo);
|
2016-11-16 04:52:37 -06:00
|
|
|
let filename = Symbol::intern(&loc.file.name).as_str();
|
2016-05-25 00:39:32 -05:00
|
|
|
let filename = C_str_slice(bcx.ccx(), filename);
|
|
|
|
let line = C_u32(bcx.ccx(), loc.line as u32);
|
|
|
|
|
|
|
|
// Put together the arguments to the panic entry point.
|
2016-06-05 06:38:29 -05:00
|
|
|
let (lang_item, args, const_err) = match *msg {
|
2016-05-25 00:39:32 -05:00
|
|
|
mir::AssertMessage::BoundsCheck { ref len, ref index } => {
|
2016-06-05 06:38:29 -05:00
|
|
|
let len = self.trans_operand(&mut bcx, len).immediate();
|
|
|
|
let index = self.trans_operand(&mut bcx, index).immediate();
|
|
|
|
|
|
|
|
let const_err = common::const_to_opt_uint(len).and_then(|len| {
|
|
|
|
common::const_to_opt_uint(index).map(|index| {
|
|
|
|
ErrKind::IndexOutOfBounds {
|
|
|
|
len: len,
|
|
|
|
index: index
|
|
|
|
}
|
|
|
|
})
|
|
|
|
});
|
2016-05-25 00:39:32 -05:00
|
|
|
|
|
|
|
let file_line = C_struct(bcx.ccx(), &[filename, line], false);
|
|
|
|
let align = llalign_of_min(bcx.ccx(), common::val_ty(file_line));
|
|
|
|
let file_line = consts::addr_of(bcx.ccx(),
|
|
|
|
file_line,
|
|
|
|
align,
|
|
|
|
"panic_bounds_check_loc");
|
|
|
|
(lang_items::PanicBoundsCheckFnLangItem,
|
2016-06-05 06:38:29 -05:00
|
|
|
vec![file_line, index, len],
|
|
|
|
const_err)
|
2016-05-25 00:39:32 -05:00
|
|
|
}
|
|
|
|
mir::AssertMessage::Math(ref err) => {
|
2016-11-16 04:52:37 -06:00
|
|
|
let msg_str = Symbol::intern(err.description()).as_str();
|
2016-05-25 00:39:32 -05:00
|
|
|
let msg_str = C_str_slice(bcx.ccx(), msg_str);
|
|
|
|
let msg_file_line = C_struct(bcx.ccx(),
|
|
|
|
&[msg_str, filename, line],
|
|
|
|
false);
|
|
|
|
let align = llalign_of_min(bcx.ccx(), common::val_ty(msg_file_line));
|
|
|
|
let msg_file_line = consts::addr_of(bcx.ccx(),
|
|
|
|
msg_file_line,
|
|
|
|
align,
|
|
|
|
"panic_loc");
|
2016-06-05 06:38:29 -05:00
|
|
|
(lang_items::PanicFnLangItem,
|
|
|
|
vec![msg_file_line],
|
|
|
|
Some(ErrKind::Math(err.clone())))
|
2016-05-25 00:39:32 -05:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2016-06-05 06:38:29 -05:00
|
|
|
// If we know we always panic, and the error message
|
|
|
|
// is also constant, then we can produce a warning.
|
|
|
|
if const_cond == Some(!expected) {
|
|
|
|
if let Some(err) = const_err {
|
2016-08-16 09:41:38 -05:00
|
|
|
let err = ConstEvalErr{ span: span, kind: err };
|
|
|
|
let mut diag = bcx.tcx().sess.struct_span_warn(
|
|
|
|
span, "this expression will panic at run-time");
|
|
|
|
note_const_eval_err(bcx.tcx(), &err, span, "expression", &mut diag);
|
|
|
|
diag.emit();
|
2016-06-05 06:38:29 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-05-25 00:39:32 -05:00
|
|
|
// Obtain the panic entry point.
|
2016-06-07 11:21:56 -05:00
|
|
|
let def_id = common::langcall(bcx.tcx(), Some(span), "", lang_item);
|
2016-05-25 00:39:32 -05:00
|
|
|
let callee = Callee::def(bcx.ccx(), def_id,
|
|
|
|
bcx.ccx().empty_substs_for_def_id(def_id));
|
2016-08-16 09:41:38 -05:00
|
|
|
let llfn = callee.reify(bcx.ccx());
|
2016-05-25 00:39:32 -05:00
|
|
|
|
|
|
|
// Translate the actual panic invoke/call.
|
|
|
|
if let Some(unwind) = cleanup {
|
|
|
|
bcx.invoke(llfn,
|
|
|
|
&args,
|
2016-12-11 23:19:39 -06:00
|
|
|
self.unreachable_block(),
|
2016-06-05 06:38:29 -05:00
|
|
|
llblock(self, unwind),
|
|
|
|
cleanup_bundle);
|
2016-05-25 00:39:32 -05:00
|
|
|
} else {
|
2016-06-05 06:38:29 -05:00
|
|
|
bcx.call(llfn, &args, cleanup_bundle);
|
2016-05-25 00:39:32 -05:00
|
|
|
bcx.unreachable();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-05-16 17:06:52 -05:00
|
|
|
mir::TerminatorKind::DropAndReplace { .. } => {
|
|
|
|
bug!("undesugared DropAndReplace in trans: {:?}", data);
|
|
|
|
}
|
|
|
|
|
2016-03-10 08:55:15 -06:00
|
|
|
mir::TerminatorKind::Call { ref func, ref args, ref destination, ref cleanup } => {
|
2016-03-06 09:32:47 -06:00
|
|
|
// Create the callee. This is a fn ptr or zero-sized and hence a kind of scalar.
|
2016-02-01 04:04:46 -06:00
|
|
|
let callee = self.trans_operand(&bcx, func);
|
2016-01-12 07:20:18 -06:00
|
|
|
|
2016-03-06 05:23:20 -06:00
|
|
|
let (mut callee, abi, sig) = match callee.ty.sty {
|
2016-03-06 09:32:47 -06:00
|
|
|
ty::TyFnDef(def_id, substs, f) => {
|
2016-03-06 05:23:20 -06:00
|
|
|
(Callee::def(bcx.ccx(), def_id, substs), f.abi, &f.sig)
|
2016-03-06 09:32:47 -06:00
|
|
|
}
|
|
|
|
ty::TyFnPtr(f) => {
|
|
|
|
(Callee {
|
|
|
|
data: Fn(callee.immediate()),
|
|
|
|
ty: callee.ty
|
2016-03-06 05:23:20 -06:00
|
|
|
}, f.abi, &f.sig)
|
2016-03-06 09:32:47 -06:00
|
|
|
}
|
2016-03-28 18:46:02 -05:00
|
|
|
_ => bug!("{} is not callable", callee.ty)
|
2016-03-06 09:32:47 -06:00
|
|
|
};
|
|
|
|
|
2016-10-12 16:08:11 -05:00
|
|
|
let sig = bcx.tcx().erase_late_bound_regions_and_normalize(sig);
|
2016-04-07 22:37:56 -05:00
|
|
|
|
2016-03-08 06:40:37 -06:00
|
|
|
// Handle intrinsics old trans wants Expr's for, ourselves.
|
|
|
|
let intrinsic = match (&callee.ty.sty, &callee.data) {
|
2016-08-26 11:23:42 -05:00
|
|
|
(&ty::TyFnDef(def_id, ..), &Intrinsic) => {
|
2016-03-08 06:40:37 -06:00
|
|
|
Some(bcx.tcx().item_name(def_id).as_str())
|
|
|
|
}
|
|
|
|
_ => None
|
|
|
|
};
|
|
|
|
let intrinsic = intrinsic.as_ref().map(|s| &s[..]);
|
|
|
|
|
|
|
|
if intrinsic == Some("move_val_init") {
|
|
|
|
let &(_, target) = destination.as_ref().unwrap();
|
|
|
|
// The first argument is a thin destination pointer.
|
|
|
|
let llptr = self.trans_operand(&bcx, &args[0]).immediate();
|
|
|
|
let val = self.trans_operand(&bcx, &args[1]);
|
|
|
|
self.store_operand(&bcx, llptr, val);
|
2016-05-29 14:01:06 -05:00
|
|
|
funclet_br(self, bcx, target);
|
2016-03-08 06:40:37 -06:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2016-12-10 21:32:44 -06:00
|
|
|
// FIXME: This should proxy to the drop glue in the future when the ABI matches;
|
|
|
|
// most of the below code was copied from the match arm for TerminatorKind::Drop.
|
|
|
|
if intrinsic == Some("drop_in_place") {
|
|
|
|
let &(_, target) = destination.as_ref().unwrap();
|
|
|
|
let ty = if let ty::TyFnDef(_, substs, _) = callee.ty.sty {
|
|
|
|
substs.type_at(0)
|
|
|
|
} else {
|
|
|
|
bug!("Unexpected ty: {}", callee.ty);
|
|
|
|
};
|
|
|
|
|
|
|
|
// Double check for necessity to drop
|
2016-12-18 12:50:07 -06:00
|
|
|
if !bcx.ccx().shared().type_needs_drop(ty) {
|
2016-12-10 21:32:44 -06:00
|
|
|
funclet_br(self, bcx, target);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2016-12-15 19:01:51 -06:00
|
|
|
let ptr = self.trans_operand(&bcx, &args[0]);
|
2016-12-10 21:32:44 -06:00
|
|
|
let (llval, llextra) = match ptr.val {
|
|
|
|
Immediate(llptr) => (llptr, ptr::null_mut()),
|
|
|
|
Pair(llptr, llextra) => (llptr, llextra),
|
|
|
|
Ref(_) => bug!("Deref of by-Ref type {:?}", ptr.ty)
|
|
|
|
};
|
|
|
|
|
|
|
|
let drop_fn = glue::get_drop_glue(bcx.ccx(), ty);
|
2016-12-18 12:50:07 -06:00
|
|
|
let drop_ty = glue::get_drop_glue_type(bcx.ccx().shared(), ty);
|
|
|
|
let is_sized = bcx.ccx().shared().type_is_sized(ty);
|
2016-12-10 21:32:44 -06:00
|
|
|
let llvalue = if is_sized {
|
|
|
|
if drop_ty != ty {
|
|
|
|
bcx.pointercast(llval, type_of::type_of(bcx.ccx(), drop_ty).ptr_to())
|
|
|
|
} else {
|
|
|
|
llval
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// FIXME(#36457) Currently drop glue takes sized
|
|
|
|
// values as a `*(data, meta)`, but elsewhere in
|
|
|
|
// MIR we pass `(data, meta)` as two separate
|
|
|
|
// arguments. It would be better to fix drop glue,
|
|
|
|
// but I am shooting for a quick fix to #35546
|
|
|
|
// here that can be cleanly backported to beta, so
|
|
|
|
// I want to avoid touching all of trans.
|
|
|
|
let scratch = base::alloc_ty(&bcx, ty, "drop");
|
2016-12-11 16:03:52 -06:00
|
|
|
Lifetime::Start.call(&bcx, scratch);
|
2016-12-11 09:59:20 -06:00
|
|
|
bcx.store(llval, base::get_dataptr(&bcx, scratch));
|
|
|
|
bcx.store(llextra, base::get_meta(&bcx, scratch));
|
2016-12-10 21:32:44 -06:00
|
|
|
scratch
|
|
|
|
};
|
|
|
|
if let Some(unwind) = *cleanup {
|
|
|
|
bcx.invoke(drop_fn,
|
|
|
|
&[llvalue],
|
2016-12-11 23:19:39 -06:00
|
|
|
self.blocks[target],
|
2016-12-10 21:32:44 -06:00
|
|
|
llblock(self, unwind),
|
|
|
|
cleanup_bundle);
|
|
|
|
} else {
|
|
|
|
bcx.call(drop_fn, &[llvalue], cleanup_bundle);
|
|
|
|
funclet_br(self, bcx, target);
|
|
|
|
}
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2016-03-08 06:40:37 -06:00
|
|
|
if intrinsic == Some("transmute") {
|
|
|
|
let &(ref dest, target) = destination.as_ref().unwrap();
|
2016-04-04 02:21:27 -05:00
|
|
|
self.with_lvalue_ref(&bcx, dest, |this, dest| {
|
|
|
|
this.trans_transmute(&bcx, &args[0], dest);
|
|
|
|
});
|
2016-03-08 06:40:37 -06:00
|
|
|
|
2016-05-29 14:01:06 -05:00
|
|
|
funclet_br(self, bcx, target);
|
2016-03-08 06:40:37 -06:00
|
|
|
return;
|
|
|
|
}
|
2016-03-06 05:23:20 -06:00
|
|
|
|
2016-11-28 20:35:38 -06:00
|
|
|
let extra_args = &args[sig.inputs().len()..];
|
2016-03-06 05:23:20 -06:00
|
|
|
let extra_args = extra_args.iter().map(|op_arg| {
|
2016-08-08 15:35:10 -05:00
|
|
|
let op_ty = op_arg.ty(&self.mir, bcx.tcx());
|
2016-12-11 23:19:39 -06:00
|
|
|
bcx.fcx().monomorphize(&op_ty)
|
2016-03-06 05:23:20 -06:00
|
|
|
}).collect::<Vec<_>>();
|
|
|
|
let fn_ty = callee.direct_fn_type(bcx.ccx(), &extra_args);
|
|
|
|
|
|
|
|
// The arguments we'll be passing. Plus one to account for outptr, if used.
|
|
|
|
let arg_count = fn_ty.args.len() + fn_ty.ret.is_indirect() as usize;
|
|
|
|
let mut llargs = Vec::with_capacity(arg_count);
|
2015-12-21 17:46:56 -06:00
|
|
|
|
|
|
|
// Prepare the return value destination
|
2016-04-06 00:57:42 -05:00
|
|
|
let ret_dest = if let Some((ref dest, _)) = *destination {
|
|
|
|
let is_intrinsic = if let Intrinsic = callee.data {
|
|
|
|
true
|
|
|
|
} else {
|
|
|
|
false
|
|
|
|
};
|
|
|
|
self.make_return_dest(&bcx, dest, &fn_ty.ret, &mut llargs, is_intrinsic)
|
2015-12-13 07:48:43 -06:00
|
|
|
} else {
|
2016-04-04 02:21:27 -05:00
|
|
|
ReturnDest::Nothing
|
2015-12-13 07:48:43 -06:00
|
|
|
};
|
|
|
|
|
2016-03-06 09:32:47 -06:00
|
|
|
// Split the rust-call tupled arguments off.
|
2016-03-08 06:31:48 -06:00
|
|
|
let (first_args, untuple) = if abi == Abi::RustCall && !args.is_empty() {
|
2016-03-06 09:32:47 -06:00
|
|
|
let (tup, args) = args.split_last().unwrap();
|
2016-03-06 05:23:20 -06:00
|
|
|
(args, Some(tup))
|
2016-03-06 09:32:47 -06:00
|
|
|
} else {
|
2016-03-06 05:23:20 -06:00
|
|
|
(&args[..], None)
|
2016-03-06 09:32:47 -06:00
|
|
|
};
|
|
|
|
|
2016-04-26 15:54:38 -05:00
|
|
|
let is_shuffle = intrinsic.map_or(false, |name| {
|
|
|
|
name.starts_with("simd_shuffle")
|
|
|
|
});
|
2016-03-06 05:23:20 -06:00
|
|
|
let mut idx = 0;
|
2016-03-08 06:31:48 -06:00
|
|
|
for arg in first_args {
|
2016-04-26 15:54:38 -05:00
|
|
|
// The indices passed to simd_shuffle* in the
|
|
|
|
// third argument must be constant. This is
|
|
|
|
// checked by const-qualification, which also
|
|
|
|
// promotes any complex rvalues to constants.
|
|
|
|
if is_shuffle && idx == 2 {
|
|
|
|
match *arg {
|
|
|
|
mir::Operand::Consume(_) => {
|
2016-06-07 11:21:56 -05:00
|
|
|
span_bug!(span, "shuffle indices must be constant");
|
2016-04-26 15:54:38 -05:00
|
|
|
}
|
|
|
|
mir::Operand::Constant(ref constant) => {
|
|
|
|
let val = self.trans_constant(&bcx, constant);
|
|
|
|
llargs.push(val.llval);
|
|
|
|
idx += 1;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-05-25 03:55:44 -05:00
|
|
|
let op = self.trans_operand(&bcx, arg);
|
|
|
|
self.trans_argument(&bcx, op, &mut llargs, &fn_ty,
|
2016-03-06 05:23:20 -06:00
|
|
|
&mut idx, &mut callee.data);
|
|
|
|
}
|
|
|
|
if let Some(tup) = untuple {
|
|
|
|
self.trans_arguments_untupled(&bcx, tup, &mut llargs, &fn_ty,
|
|
|
|
&mut idx, &mut callee.data)
|
|
|
|
}
|
2016-02-25 17:10:40 -06:00
|
|
|
|
2016-03-08 06:40:37 -06:00
|
|
|
let fn_ptr = match callee.data {
|
|
|
|
NamedTupleConstructor(_) => {
|
|
|
|
// FIXME translate this like mir::Rvalue::Aggregate.
|
2016-08-16 09:41:38 -05:00
|
|
|
callee.reify(bcx.ccx())
|
2016-03-08 06:40:37 -06:00
|
|
|
}
|
|
|
|
Intrinsic => {
|
2016-03-22 12:23:36 -05:00
|
|
|
use intrinsic::trans_intrinsic_call;
|
2016-03-08 06:40:37 -06:00
|
|
|
|
2016-04-04 02:21:27 -05:00
|
|
|
let (dest, llargs) = match ret_dest {
|
|
|
|
_ if fn_ty.ret.is_indirect() => {
|
2016-08-16 09:41:38 -05:00
|
|
|
(llargs[0], &llargs[1..])
|
|
|
|
}
|
|
|
|
ReturnDest::Nothing => {
|
|
|
|
(C_undef(fn_ty.ret.original_ty.ptr_to()), &llargs[..])
|
2016-04-04 02:21:27 -05:00
|
|
|
}
|
|
|
|
ReturnDest::IndirectOperand(dst, _) |
|
2016-08-16 09:41:38 -05:00
|
|
|
ReturnDest::Store(dst) => (dst, &llargs[..]),
|
2016-04-04 02:21:27 -05:00
|
|
|
ReturnDest::DirectOperand(_) =>
|
|
|
|
bug!("Cannot use direct operand with an intrinsic call")
|
2016-03-08 06:40:37 -06:00
|
|
|
};
|
|
|
|
|
2016-12-16 14:25:18 -06:00
|
|
|
trans_intrinsic_call(&bcx, callee.ty, &fn_ty, &llargs, dest, debug_span);
|
2016-04-04 02:21:27 -05:00
|
|
|
|
|
|
|
if let ReturnDest::IndirectOperand(dst, _) = ret_dest {
|
|
|
|
// Make a fake operand for store_return
|
|
|
|
let op = OperandRef {
|
2016-05-25 03:55:44 -05:00
|
|
|
val: Ref(dst),
|
2016-11-28 20:35:38 -06:00
|
|
|
ty: sig.output(),
|
2016-04-04 02:21:27 -05:00
|
|
|
};
|
|
|
|
self.store_return(&bcx, ret_dest, fn_ty.ret, op);
|
|
|
|
}
|
|
|
|
|
2016-04-07 22:37:56 -05:00
|
|
|
if let Some((_, target)) = *destination {
|
2016-05-29 14:01:06 -05:00
|
|
|
funclet_br(self, bcx, target);
|
2016-04-07 22:37:56 -05:00
|
|
|
} else {
|
2016-12-10 21:32:44 -06:00
|
|
|
bcx.unreachable();
|
2016-04-07 22:37:56 -05:00
|
|
|
}
|
|
|
|
|
2016-03-08 06:40:37 -06:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
Fn(f) => f,
|
2016-03-28 18:46:02 -05:00
|
|
|
Virtual(_) => bug!("Virtual fn ptr not extracted")
|
2016-03-08 06:40:37 -06:00
|
|
|
};
|
2015-12-13 07:48:43 -06:00
|
|
|
|
2015-12-21 17:46:56 -06:00
|
|
|
// Many different ways to call a function handled here
|
2016-05-29 14:01:06 -05:00
|
|
|
if let &Some(cleanup) = cleanup {
|
2016-03-10 23:00:52 -06:00
|
|
|
let ret_bcx = if let Some((_, target)) = *destination {
|
2016-06-07 09:28:36 -05:00
|
|
|
self.blocks[target]
|
2016-03-08 06:40:04 -06:00
|
|
|
} else {
|
|
|
|
self.unreachable_block()
|
|
|
|
};
|
|
|
|
let invokeret = bcx.invoke(fn_ptr,
|
|
|
|
&llargs,
|
2016-12-11 23:19:39 -06:00
|
|
|
ret_bcx,
|
2016-05-29 14:01:06 -05:00
|
|
|
llblock(self, cleanup),
|
|
|
|
cleanup_bundle);
|
2016-03-08 06:40:04 -06:00
|
|
|
fn_ty.apply_attrs_callsite(invokeret);
|
|
|
|
|
2016-12-11 23:19:39 -06:00
|
|
|
if let Some((_, target)) = *destination {
|
|
|
|
let ret_bcx = self.build_block(target);
|
2016-03-10 23:00:52 -06:00
|
|
|
ret_bcx.at_start(|ret_bcx| {
|
2016-12-18 12:08:57 -06:00
|
|
|
debuginfo::set_source_location(self, &ret_bcx, scope, debug_span);
|
2016-04-04 02:21:27 -05:00
|
|
|
let op = OperandRef {
|
2016-05-25 03:55:44 -05:00
|
|
|
val: Immediate(invokeret),
|
2016-11-28 20:35:38 -06:00
|
|
|
ty: sig.output(),
|
2016-04-04 02:21:27 -05:00
|
|
|
};
|
|
|
|
self.store_return(&ret_bcx, ret_dest, fn_ty.ret, op);
|
2016-03-10 23:00:52 -06:00
|
|
|
});
|
2015-12-21 17:46:56 -06:00
|
|
|
}
|
2016-03-08 06:40:04 -06:00
|
|
|
} else {
|
2016-05-29 14:01:06 -05:00
|
|
|
let llret = bcx.call(fn_ptr, &llargs, cleanup_bundle);
|
2016-03-08 06:40:04 -06:00
|
|
|
fn_ty.apply_attrs_callsite(llret);
|
|
|
|
if let Some((_, target)) = *destination {
|
2016-04-04 02:21:27 -05:00
|
|
|
let op = OperandRef {
|
2016-05-25 03:55:44 -05:00
|
|
|
val: Immediate(llret),
|
2016-11-28 20:35:38 -06:00
|
|
|
ty: sig.output(),
|
2016-04-04 02:21:27 -05:00
|
|
|
};
|
|
|
|
self.store_return(&bcx, ret_dest, fn_ty.ret, op);
|
2016-05-29 14:01:06 -05:00
|
|
|
funclet_br(self, bcx, target);
|
2016-03-08 06:40:04 -06:00
|
|
|
} else {
|
|
|
|
bcx.unreachable();
|
2015-12-13 07:48:43 -06:00
|
|
|
}
|
|
|
|
}
|
2015-10-21 16:42:25 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-03-06 05:23:20 -06:00
|
|
|
fn trans_argument(&mut self,
|
2016-12-17 20:54:32 -06:00
|
|
|
bcx: &BlockAndBuilder<'a, 'tcx>,
|
2016-06-20 15:55:14 -05:00
|
|
|
op: OperandRef<'tcx>,
|
2016-03-06 05:23:20 -06:00
|
|
|
llargs: &mut Vec<ValueRef>,
|
|
|
|
fn_ty: &FnType,
|
|
|
|
next_idx: &mut usize,
|
|
|
|
callee: &mut CalleeData) {
|
2016-05-25 03:55:44 -05:00
|
|
|
if let Pair(a, b) = op.val {
|
|
|
|
// Treat the values in a fat pointer separately.
|
2016-12-18 12:50:07 -06:00
|
|
|
if common::type_is_fat_ptr(bcx.ccx(), op.ty) {
|
2016-05-25 03:55:44 -05:00
|
|
|
let (ptr, meta) = (a, b);
|
|
|
|
if *next_idx == 0 {
|
|
|
|
if let Virtual(idx) = *callee {
|
2016-12-10 21:32:44 -06:00
|
|
|
let llfn = meth::get_virtual_method(bcx, meta, idx);
|
2016-05-25 03:55:44 -05:00
|
|
|
let llty = fn_ty.llvm_type(bcx.ccx()).ptr_to();
|
|
|
|
*callee = Fn(bcx.pointercast(llfn, llty));
|
|
|
|
}
|
2016-03-06 05:23:20 -06:00
|
|
|
}
|
2016-05-25 03:55:44 -05:00
|
|
|
|
|
|
|
let imm_op = |x| OperandRef {
|
|
|
|
val: Immediate(x),
|
|
|
|
// We won't be checking the type again.
|
|
|
|
ty: bcx.tcx().types.err
|
|
|
|
};
|
|
|
|
self.trans_argument(bcx, imm_op(ptr), llargs, fn_ty, next_idx, callee);
|
|
|
|
self.trans_argument(bcx, imm_op(meta), llargs, fn_ty, next_idx, callee);
|
|
|
|
return;
|
2016-03-06 05:23:20 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let arg = &fn_ty.args[*next_idx];
|
|
|
|
*next_idx += 1;
|
|
|
|
|
|
|
|
// Fill padding with undef value, where applicable.
|
|
|
|
if let Some(ty) = arg.pad {
|
|
|
|
llargs.push(C_undef(ty));
|
|
|
|
}
|
|
|
|
|
|
|
|
if arg.is_ignore() {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Force by-ref if we have to load through a cast pointer.
|
2016-05-25 03:55:44 -05:00
|
|
|
let (mut llval, by_ref) = match op.val {
|
2016-06-20 15:55:14 -05:00
|
|
|
Immediate(_) | Pair(..) => {
|
|
|
|
if arg.is_indirect() || arg.cast.is_some() {
|
2016-12-11 09:59:20 -06:00
|
|
|
let llscratch = bcx.fcx().alloca(arg.original_ty, "arg");
|
2016-06-20 15:55:14 -05:00
|
|
|
self.store_operand(bcx, llscratch, op);
|
|
|
|
(llscratch, true)
|
|
|
|
} else {
|
|
|
|
(op.pack_if_pair(bcx).immediate(), false)
|
|
|
|
}
|
2016-03-06 05:23:20 -06:00
|
|
|
}
|
2016-06-20 15:55:14 -05:00
|
|
|
Ref(llval) => (llval, true)
|
2016-03-06 05:23:20 -06:00
|
|
|
};
|
|
|
|
|
|
|
|
if by_ref && !arg.is_indirect() {
|
|
|
|
// Have to load the argument, maybe while casting it.
|
|
|
|
if arg.original_ty == Type::i1(bcx.ccx()) {
|
|
|
|
// We store bools as i8 so we need to truncate to i1.
|
|
|
|
llval = bcx.load_range_assert(llval, 0, 2, llvm::False);
|
|
|
|
llval = bcx.trunc(llval, arg.original_ty);
|
|
|
|
} else if let Some(ty) = arg.cast {
|
|
|
|
llval = bcx.load(bcx.pointercast(llval, ty.ptr_to()));
|
|
|
|
let llalign = llalign_of_min(bcx.ccx(), arg.ty);
|
|
|
|
unsafe {
|
|
|
|
llvm::LLVMSetAlignment(llval, llalign);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
llval = bcx.load(llval);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
llargs.push(llval);
|
|
|
|
}
|
|
|
|
|
|
|
|
fn trans_arguments_untupled(&mut self,
|
2016-12-17 20:54:32 -06:00
|
|
|
bcx: &BlockAndBuilder<'a, 'tcx>,
|
2016-03-06 05:23:20 -06:00
|
|
|
operand: &mir::Operand<'tcx>,
|
|
|
|
llargs: &mut Vec<ValueRef>,
|
|
|
|
fn_ty: &FnType,
|
|
|
|
next_idx: &mut usize,
|
|
|
|
callee: &mut CalleeData) {
|
2016-04-19 22:27:15 -05:00
|
|
|
let tuple = self.trans_operand(bcx, operand);
|
2016-03-06 05:23:20 -06:00
|
|
|
|
2016-04-19 22:27:15 -05:00
|
|
|
let arg_types = match tuple.ty.sty {
|
2016-03-06 05:23:20 -06:00
|
|
|
ty::TyTuple(ref tys) => tys,
|
2016-04-19 22:27:15 -05:00
|
|
|
_ => span_bug!(self.mir.span,
|
|
|
|
"bad final argument to \"rust-call\" fn {:?}", tuple.ty)
|
2016-03-06 05:23:20 -06:00
|
|
|
};
|
|
|
|
|
2016-04-20 18:43:01 -05:00
|
|
|
// Handle both by-ref and immediate tuples.
|
2016-04-19 22:27:15 -05:00
|
|
|
match tuple.val {
|
|
|
|
Ref(llval) => {
|
|
|
|
let base = adt::MaybeSizedValue::sized(llval);
|
|
|
|
for (n, &ty) in arg_types.iter().enumerate() {
|
2016-12-11 16:03:52 -06:00
|
|
|
let ptr = adt::trans_field_ptr(bcx, tuple.ty, base, Disr(0), n);
|
2016-12-18 12:50:07 -06:00
|
|
|
let val = if common::type_is_fat_ptr(bcx.ccx(), ty) {
|
2016-12-11 16:03:52 -06:00
|
|
|
let (lldata, llextra) = base::load_fat_ptr(bcx, ptr, ty);
|
2016-05-25 03:55:44 -05:00
|
|
|
Pair(lldata, llextra)
|
2016-04-19 22:27:15 -05:00
|
|
|
} else {
|
|
|
|
// trans_argument will load this if it needs to
|
|
|
|
Ref(ptr)
|
|
|
|
};
|
2016-05-25 03:55:44 -05:00
|
|
|
let op = OperandRef {
|
|
|
|
val: val,
|
|
|
|
ty: ty
|
|
|
|
};
|
|
|
|
self.trans_argument(bcx, op, llargs, fn_ty, next_idx, callee);
|
2016-04-19 22:27:15 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
Immediate(llval) => {
|
2016-11-23 13:48:31 -06:00
|
|
|
let l = bcx.ccx().layout_of(tuple.ty);
|
|
|
|
let v = if let layout::Univariant { ref variant, .. } = *l {
|
|
|
|
variant
|
|
|
|
} else {
|
|
|
|
bug!("Not a tuple.");
|
|
|
|
};
|
2016-04-19 22:27:15 -05:00
|
|
|
for (n, &ty) in arg_types.iter().enumerate() {
|
2016-11-23 13:48:31 -06:00
|
|
|
let mut elem = bcx.extract_value(llval, v.memory_index[n] as usize);
|
2016-04-19 22:27:15 -05:00
|
|
|
// Truncate bools to i1, if needed
|
|
|
|
if ty.is_bool() && common::val_ty(elem) != Type::i1(bcx.ccx()) {
|
|
|
|
elem = bcx.trunc(elem, Type::i1(bcx.ccx()));
|
|
|
|
}
|
|
|
|
// If the tuple is immediate, the elements are as well
|
2016-05-25 03:55:44 -05:00
|
|
|
let op = OperandRef {
|
|
|
|
val: Immediate(elem),
|
|
|
|
ty: ty
|
|
|
|
};
|
|
|
|
self.trans_argument(bcx, op, llargs, fn_ty, next_idx, callee);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Pair(a, b) => {
|
|
|
|
let elems = [a, b];
|
|
|
|
for (n, &ty) in arg_types.iter().enumerate() {
|
|
|
|
let mut elem = elems[n];
|
|
|
|
// Truncate bools to i1, if needed
|
|
|
|
if ty.is_bool() && common::val_ty(elem) != Type::i1(bcx.ccx()) {
|
|
|
|
elem = bcx.trunc(elem, Type::i1(bcx.ccx()));
|
|
|
|
}
|
|
|
|
// Pair is always made up of immediates
|
|
|
|
let op = OperandRef {
|
|
|
|
val: Immediate(elem),
|
|
|
|
ty: ty
|
|
|
|
};
|
|
|
|
self.trans_argument(bcx, op, llargs, fn_ty, next_idx, callee);
|
2016-04-19 22:27:15 -05:00
|
|
|
}
|
|
|
|
}
|
2016-03-06 05:23:20 -06:00
|
|
|
}
|
2016-04-19 22:27:15 -05:00
|
|
|
|
2016-03-06 05:23:20 -06:00
|
|
|
}
|
|
|
|
|
2016-12-17 20:54:32 -06:00
|
|
|
fn get_personality_slot(&mut self, bcx: &BlockAndBuilder<'a, 'tcx>) -> ValueRef {
|
2016-01-01 16:45:21 -06:00
|
|
|
let ccx = bcx.ccx();
|
|
|
|
if let Some(slot) = self.llpersonalityslot {
|
|
|
|
slot
|
|
|
|
} else {
|
|
|
|
let llretty = Type::struct_(ccx, &[Type::i8p(ccx), Type::i32(ccx)], false);
|
2016-12-16 15:11:17 -06:00
|
|
|
let slot = bcx.fcx().alloca(llretty, "personalityslot");
|
2016-12-10 21:32:44 -06:00
|
|
|
self.llpersonalityslot = Some(slot);
|
2016-12-11 16:03:52 -06:00
|
|
|
Lifetime::Start.call(bcx, slot);
|
2016-12-10 21:32:44 -06:00
|
|
|
slot
|
2016-01-01 16:45:21 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-05-29 14:01:06 -05:00
|
|
|
/// Return the landingpad wrapper around the given basic block
|
2016-02-11 14:57:09 -06:00
|
|
|
///
|
|
|
|
/// No-op in MSVC SEH scheme.
|
2016-12-11 23:19:39 -06:00
|
|
|
fn landing_pad_to(&mut self, target_bb: mir::BasicBlock) -> BasicBlockRef {
|
2016-06-07 09:28:36 -05:00
|
|
|
if let Some(block) = self.landing_pads[target_bb] {
|
2016-05-29 14:01:06 -05:00
|
|
|
return block;
|
|
|
|
}
|
|
|
|
|
|
|
|
if base::wants_msvc_seh(self.fcx.ccx.sess()) {
|
2016-06-07 09:28:36 -05:00
|
|
|
return self.blocks[target_bb];
|
2016-02-11 14:57:09 -06:00
|
|
|
}
|
2016-05-29 14:01:06 -05:00
|
|
|
|
2016-12-11 23:19:39 -06:00
|
|
|
let target = self.build_block(target_bb);
|
2016-05-29 14:01:06 -05:00
|
|
|
|
2016-12-11 23:19:39 -06:00
|
|
|
let bcx = self.fcx.build_new_block("cleanup");
|
|
|
|
self.landing_pads[target_bb] = Some(bcx.llbb());
|
2016-05-29 14:01:06 -05:00
|
|
|
|
2015-12-19 08:48:49 -06:00
|
|
|
let ccx = bcx.ccx();
|
2016-02-08 16:08:47 -06:00
|
|
|
let llpersonality = self.fcx.eh_personality();
|
2015-12-19 08:48:49 -06:00
|
|
|
let llretty = Type::struct_(ccx, &[Type::i8p(ccx), Type::i32(ccx)], false);
|
2016-02-08 16:08:47 -06:00
|
|
|
let llretval = bcx.landing_pad(llretty, llpersonality, 1, self.fcx.llfn);
|
2016-02-01 04:04:46 -06:00
|
|
|
bcx.set_cleanup(llretval);
|
|
|
|
let slot = self.get_personality_slot(&bcx);
|
|
|
|
bcx.store(llretval, slot);
|
2016-05-29 14:01:06 -05:00
|
|
|
bcx.br(target.llbb());
|
2016-12-11 23:19:39 -06:00
|
|
|
bcx.llbb()
|
2015-12-19 08:48:49 -06:00
|
|
|
}
|
|
|
|
|
2016-12-11 23:19:39 -06:00
|
|
|
pub fn init_cpad(&mut self, bb: mir::BasicBlock,
|
2016-12-12 07:48:39 -06:00
|
|
|
funclets: &mut IndexVec<mir::BasicBlock, Option<Funclet>>) {
|
2016-12-11 23:19:39 -06:00
|
|
|
let bcx = self.build_block(bb);
|
2016-06-07 13:20:50 -05:00
|
|
|
let data = &self.mir[bb];
|
2016-05-29 14:01:06 -05:00
|
|
|
debug!("init_cpad({:?})", data);
|
|
|
|
|
2016-06-07 09:28:36 -05:00
|
|
|
match self.cleanup_kinds[bb] {
|
2016-05-29 14:01:06 -05:00
|
|
|
CleanupKind::NotCleanup => {
|
2016-12-12 07:48:39 -06:00
|
|
|
funclets[bb] = None;
|
2016-05-29 14:01:06 -05:00
|
|
|
}
|
|
|
|
_ if !base::wants_msvc_seh(bcx.sess()) => {
|
2016-12-12 07:48:39 -06:00
|
|
|
funclets[bb] = Funclet::gnu();
|
2016-05-29 14:01:06 -05:00
|
|
|
}
|
2016-12-11 23:19:39 -06:00
|
|
|
CleanupKind::Internal { funclet: _ } => {
|
2016-05-29 14:01:06 -05:00
|
|
|
// FIXME: is this needed?
|
|
|
|
bcx.set_personality_fn(self.fcx.eh_personality());
|
2016-12-12 07:48:39 -06:00
|
|
|
funclets[bb] = None;
|
2016-05-29 14:01:06 -05:00
|
|
|
}
|
|
|
|
CleanupKind::Funclet => {
|
|
|
|
bcx.set_personality_fn(self.fcx.eh_personality());
|
|
|
|
let cleanup_pad = bcx.cleanup_pad(None, &[]);
|
2016-12-12 07:48:39 -06:00
|
|
|
funclets[bb] = Funclet::msvc(cleanup_pad);
|
2016-05-29 14:01:06 -05:00
|
|
|
}
|
2016-02-11 14:57:09 -06:00
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2016-12-11 23:19:39 -06:00
|
|
|
fn unreachable_block(&mut self) -> BasicBlockRef {
|
2016-02-01 04:04:46 -06:00
|
|
|
self.unreachable_block.unwrap_or_else(|| {
|
2016-12-11 23:19:39 -06:00
|
|
|
let bl = self.fcx.build_new_block("unreachable");
|
|
|
|
bl.unreachable();
|
|
|
|
self.unreachable_block = Some(bl.llbb());
|
|
|
|
bl.llbb()
|
2016-02-01 04:04:46 -06:00
|
|
|
})
|
2015-12-19 08:47:52 -06:00
|
|
|
}
|
|
|
|
|
2016-12-17 20:54:32 -06:00
|
|
|
fn build_block(&self, bb: mir::BasicBlock) -> BlockAndBuilder<'a, 'tcx> {
|
2016-12-11 23:19:39 -06:00
|
|
|
BlockAndBuilder::new(self.blocks[bb], self.fcx)
|
2015-10-21 16:42:25 -05:00
|
|
|
}
|
|
|
|
|
2016-12-17 20:54:32 -06:00
|
|
|
fn make_return_dest(&mut self, bcx: &BlockAndBuilder<'a, 'tcx>,
|
2016-04-06 00:57:42 -05:00
|
|
|
dest: &mir::Lvalue<'tcx>, fn_ret_ty: &ArgType,
|
|
|
|
llargs: &mut Vec<ValueRef>, is_intrinsic: bool) -> ReturnDest {
|
|
|
|
// If the return is ignored, we can just return a do-nothing ReturnDest
|
|
|
|
if fn_ret_ty.is_ignore() {
|
|
|
|
return ReturnDest::Nothing;
|
|
|
|
}
|
2016-09-24 18:38:27 -05:00
|
|
|
let dest = if let mir::Lvalue::Local(index) = *dest {
|
2016-08-05 17:59:51 -05:00
|
|
|
let ret_ty = self.monomorphized_lvalue_ty(dest);
|
2016-06-20 15:55:14 -05:00
|
|
|
match self.locals[index] {
|
|
|
|
LocalRef::Lvalue(dest) => dest,
|
|
|
|
LocalRef::Operand(None) => {
|
|
|
|
// Handle temporary lvalues, specifically Operand ones, as
|
|
|
|
// they don't have allocas
|
|
|
|
return if fn_ret_ty.is_indirect() {
|
|
|
|
// Odd, but possible, case, we have an operand temporary,
|
|
|
|
// but the calling convention has an indirect return.
|
2016-12-10 21:32:44 -06:00
|
|
|
let tmp = base::alloc_ty(bcx, ret_ty, "tmp_ret");
|
2016-06-20 15:55:14 -05:00
|
|
|
llargs.push(tmp);
|
|
|
|
ReturnDest::IndirectOperand(tmp, index)
|
|
|
|
} else if is_intrinsic {
|
|
|
|
// Currently, intrinsics always need a location to store
|
|
|
|
// the result. so we create a temporary alloca for the
|
|
|
|
// result
|
2016-12-10 21:32:44 -06:00
|
|
|
let tmp = base::alloc_ty(bcx, ret_ty, "tmp_ret");
|
2016-06-20 15:55:14 -05:00
|
|
|
ReturnDest::IndirectOperand(tmp, index)
|
|
|
|
} else {
|
|
|
|
ReturnDest::DirectOperand(index)
|
|
|
|
};
|
|
|
|
}
|
|
|
|
LocalRef::Operand(Some(_)) => {
|
|
|
|
bug!("lvalue local already assigned to");
|
2016-04-06 00:57:42 -05:00
|
|
|
}
|
|
|
|
}
|
2016-06-20 15:55:14 -05:00
|
|
|
} else {
|
|
|
|
self.trans_lvalue(bcx, dest)
|
2016-04-06 00:57:42 -05:00
|
|
|
};
|
|
|
|
if fn_ret_ty.is_indirect() {
|
|
|
|
llargs.push(dest.llval);
|
|
|
|
ReturnDest::Nothing
|
|
|
|
} else {
|
|
|
|
ReturnDest::Store(dest.llval)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-12-17 20:54:32 -06:00
|
|
|
fn trans_transmute(&mut self, bcx: &BlockAndBuilder<'a, 'tcx>,
|
2016-04-04 02:21:27 -05:00
|
|
|
src: &mir::Operand<'tcx>, dst: LvalueRef<'tcx>) {
|
|
|
|
let mut val = self.trans_operand(bcx, src);
|
|
|
|
if let ty::TyFnDef(def_id, substs, _) = val.ty.sty {
|
|
|
|
let llouttype = type_of::type_of(bcx.ccx(), dst.ty.to_ty(bcx.tcx()));
|
|
|
|
let out_type_size = llbitsize_of_real(bcx.ccx(), llouttype);
|
|
|
|
if out_type_size != 0 {
|
|
|
|
// FIXME #19925 Remove this hack after a release cycle.
|
|
|
|
let f = Callee::def(bcx.ccx(), def_id, substs);
|
2016-08-16 09:41:38 -05:00
|
|
|
let ty = match f.ty.sty {
|
2016-08-26 11:23:42 -05:00
|
|
|
ty::TyFnDef(.., f) => bcx.tcx().mk_fn_ptr(f),
|
2016-08-16 09:41:38 -05:00
|
|
|
_ => f.ty
|
|
|
|
};
|
2016-04-04 02:21:27 -05:00
|
|
|
val = OperandRef {
|
2016-08-16 09:41:38 -05:00
|
|
|
val: Immediate(f.reify(bcx.ccx())),
|
|
|
|
ty: ty
|
2016-04-04 02:21:27 -05:00
|
|
|
};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let llty = type_of::type_of(bcx.ccx(), val.ty);
|
|
|
|
let cast_ptr = bcx.pointercast(dst.llval, llty.ptr_to());
|
|
|
|
self.store_operand(bcx, cast_ptr, val);
|
|
|
|
}
|
|
|
|
|
2016-06-07 09:28:36 -05:00
|
|
|
|
2016-04-04 02:21:27 -05:00
|
|
|
// Stores the return value of a function call into it's final location.
|
|
|
|
fn store_return(&mut self,
|
2016-12-17 20:54:32 -06:00
|
|
|
bcx: &BlockAndBuilder<'a, 'tcx>,
|
2016-04-04 02:21:27 -05:00
|
|
|
dest: ReturnDest,
|
|
|
|
ret_ty: ArgType,
|
|
|
|
op: OperandRef<'tcx>) {
|
|
|
|
use self::ReturnDest::*;
|
|
|
|
|
2016-06-07 16:35:01 -05:00
|
|
|
match dest {
|
|
|
|
Nothing => (),
|
|
|
|
Store(dst) => ret_ty.store(bcx, op.immediate(), dst),
|
2016-06-20 15:55:14 -05:00
|
|
|
IndirectOperand(tmp, index) => {
|
2016-04-04 02:21:27 -05:00
|
|
|
let op = self.trans_load(bcx, tmp, op.ty);
|
2016-06-20 15:55:14 -05:00
|
|
|
self.locals[index] = LocalRef::Operand(Some(op));
|
2016-04-04 02:21:27 -05:00
|
|
|
}
|
2016-06-20 15:55:14 -05:00
|
|
|
DirectOperand(index) => {
|
2016-06-07 16:35:01 -05:00
|
|
|
// If there is a cast, we have to store and reload.
|
|
|
|
let op = if ret_ty.cast.is_some() {
|
2016-12-10 21:32:44 -06:00
|
|
|
let tmp = base::alloc_ty(bcx, op.ty, "tmp_ret");
|
2016-06-07 16:35:01 -05:00
|
|
|
ret_ty.store(bcx, op.immediate(), tmp);
|
|
|
|
self.trans_load(bcx, tmp, op.ty)
|
2016-04-07 22:37:56 -05:00
|
|
|
} else {
|
2016-06-07 16:35:01 -05:00
|
|
|
op.unpack_if_pair(bcx)
|
|
|
|
};
|
2016-06-20 15:55:14 -05:00
|
|
|
self.locals[index] = LocalRef::Operand(Some(op));
|
2016-04-04 02:21:27 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
enum ReturnDest {
|
|
|
|
// Do nothing, the return value is indirect or ignored
|
|
|
|
Nothing,
|
|
|
|
// Store the return value to the pointer
|
|
|
|
Store(ValueRef),
|
2016-06-20 15:55:14 -05:00
|
|
|
// Stores an indirect return value to an operand local lvalue
|
|
|
|
IndirectOperand(ValueRef, mir::Local),
|
|
|
|
// Stores a direct return value to an operand local lvalue
|
|
|
|
DirectOperand(mir::Local)
|
2015-10-21 16:42:25 -05:00
|
|
|
}
|