2013-02-18 16:16:21 -06:00
|
|
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
|
|
|
|
// file at the top-level directory of this distribution and at
|
|
|
|
// http://rust-lang.org/COPYRIGHT.
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
|
|
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
|
|
|
// option. This file may not be copied, modified, or distributed
|
|
|
|
// except according to those terms.
|
|
|
|
|
2014-11-25 20:17:11 -06:00
|
|
|
//! # Representation of Algebraic Data Types
|
|
|
|
//!
|
|
|
|
//! This module determines how to represent enums, structs, and tuples
|
|
|
|
//! based on their monomorphized types; it is responsible both for
|
|
|
|
//! choosing a representation and translating basic operations on
|
|
|
|
//! values of those types. (Note: exporting the representations for
|
|
|
|
//! debuggers is handled in debuginfo.rs, not here.)
|
|
|
|
//!
|
|
|
|
//! Note that the interface treats everything as a general case of an
|
|
|
|
//! enum, so structs/tuples/etc. have one pseudo-variant with
|
|
|
|
//! discriminant 0; i.e., as if they were a univariant enum.
|
|
|
|
//!
|
|
|
|
//! Having everything in one place will enable improvements to data
|
|
|
|
//! structure representation; possibilities include:
|
|
|
|
//!
|
|
|
|
//! - User-specified alignment (e.g., cacheline-aligning parts of
|
|
|
|
//! concurrently accessed data structures); LLVM can't represent this
|
|
|
|
//! directly, so we'd have to insert padding fields in any structure
|
|
|
|
//! that might contain one and adjust GEP indices accordingly. See
|
|
|
|
//! issue #4578.
|
|
|
|
//!
|
|
|
|
//! - Store nested enums' discriminants in the same word. Rather, if
|
|
|
|
//! some variants start with enums, and those enums representations
|
|
|
|
//! have unused alignment padding between discriminant and body, the
|
|
|
|
//! outer enum's discriminant can be stored there and those variants
|
|
|
|
//! can start at offset 0. Kind of fancy, and might need work to
|
|
|
|
//! make copies of the inner enum type cooperate, but it could help
|
|
|
|
//! with `Option` or `Result` wrapped around another enum.
|
|
|
|
//!
|
|
|
|
//! - Tagged pointers would be neat, but given that any type can be
|
|
|
|
//! used unboxed and any field can have pointers (including mutable)
|
|
|
|
//! taken to it, implementing them for Rust seems difficult.
|
2013-02-28 01:08:52 -06:00
|
|
|
|
2014-11-06 02:05:53 -06:00
|
|
|
pub use self::Repr::*;
|
2016-01-16 09:03:09 -06:00
|
|
|
use super::Disr;
|
2014-11-06 02:05:53 -06:00
|
|
|
|
2015-12-06 07:38:29 -06:00
|
|
|
use std;
|
2014-04-21 19:03:02 -05:00
|
|
|
use std::rc::Rc;
|
2013-02-28 01:08:52 -06:00
|
|
|
|
2014-07-07 19:58:01 -05:00
|
|
|
use llvm::{ValueRef, True, IntEQ, IntNE};
|
2014-12-04 15:44:51 -06:00
|
|
|
use back::abi::FAT_PTR_ADDR;
|
2014-05-13 10:35:42 -05:00
|
|
|
use middle::subst;
|
2015-06-30 04:18:03 -05:00
|
|
|
use middle::ty::{self, Ty};
|
2015-01-07 12:58:27 -06:00
|
|
|
use syntax::ast;
|
2015-09-14 04:58:20 -05:00
|
|
|
use syntax::attr;
|
|
|
|
use syntax::attr::IntType;
|
2014-11-15 19:30:33 -06:00
|
|
|
use trans::_match;
|
2016-01-08 13:40:52 -06:00
|
|
|
use trans::base::InitAlloca;
|
2014-11-15 19:30:33 -06:00
|
|
|
use trans::build::*;
|
|
|
|
use trans::cleanup;
|
|
|
|
use trans::cleanup::CleanupMethods;
|
|
|
|
use trans::common::*;
|
|
|
|
use trans::datum;
|
2014-12-11 06:53:30 -06:00
|
|
|
use trans::debuginfo::DebugLoc;
|
2015-12-06 07:38:29 -06:00
|
|
|
use trans::glue;
|
2014-11-15 19:30:33 -06:00
|
|
|
use trans::machine;
|
2015-01-07 12:58:27 -06:00
|
|
|
use trans::monomorphize;
|
2014-11-15 19:30:33 -06:00
|
|
|
use trans::type_::Type;
|
|
|
|
use trans::type_of;
|
2013-02-18 16:16:21 -06:00
|
|
|
|
2013-07-01 00:42:30 -05:00
|
|
|
type Hint = attr::ReprAttr;
|
|
|
|
|
2015-07-27 07:45:20 -05:00
|
|
|
// Representation of the context surrounding an unsized type. I want
|
|
|
|
// to be able to track the drop flags that are injected by trans.
|
|
|
|
#[derive(Clone, Copy, PartialEq, Debug)]
|
|
|
|
pub struct TypeContext {
|
|
|
|
prefix: Type,
|
|
|
|
needs_drop_flag: bool,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl TypeContext {
|
|
|
|
pub fn prefix(&self) -> Type { self.prefix }
|
|
|
|
pub fn needs_drop_flag(&self) -> bool { self.needs_drop_flag }
|
|
|
|
|
|
|
|
fn direct(t: Type) -> TypeContext {
|
|
|
|
TypeContext { prefix: t, needs_drop_flag: false }
|
|
|
|
}
|
|
|
|
fn may_need_drop_flag(t: Type, needs_drop_flag: bool) -> TypeContext {
|
|
|
|
TypeContext { prefix: t, needs_drop_flag: needs_drop_flag }
|
|
|
|
}
|
|
|
|
pub fn to_string(self) -> String {
|
|
|
|
let TypeContext { prefix, needs_drop_flag } = self;
|
|
|
|
format!("TypeContext {{ prefix: {}, needs_drop_flag: {} }}",
|
|
|
|
prefix.to_string(), needs_drop_flag)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-02-28 01:08:52 -06:00
|
|
|
/// Representations.
|
2015-01-28 07:34:18 -06:00
|
|
|
#[derive(Eq, PartialEq, Debug)]
|
2014-09-29 14:11:30 -05:00
|
|
|
pub enum Repr<'tcx> {
|
2013-02-28 01:08:52 -06:00
|
|
|
/// C-like enums; basically an int.
|
2013-07-01 00:42:30 -05:00
|
|
|
CEnum(IntType, Disr, Disr), // discriminant range (signedness based on the IntType)
|
2014-11-24 19:06:06 -06:00
|
|
|
/// Single-case variants, and structs/tuples/records.
|
|
|
|
///
|
|
|
|
/// Structs with destructors need a dynamic destroyedness flag to
|
|
|
|
/// avoid running the destructor too many times; this is included
|
|
|
|
/// in the `Struct` if present.
|
2015-02-10 03:04:39 -06:00
|
|
|
/// (The flag if nonzero, represents the initialization value to use;
|
|
|
|
/// if zero, then use no flag at all.)
|
|
|
|
Univariant(Struct<'tcx>, u8),
|
2014-11-24 19:06:06 -06:00
|
|
|
/// General-case enums: for each case there is a struct, and they
|
|
|
|
/// all start with a field for the discriminant.
|
|
|
|
///
|
|
|
|
/// Types with destructors need a dynamic destroyedness flag to
|
|
|
|
/// avoid running the destructor too many times; the last argument
|
|
|
|
/// indicates whether such a flag is present.
|
2015-02-10 03:04:39 -06:00
|
|
|
/// (The flag, if nonzero, represents the initialization value to use;
|
|
|
|
/// if zero, then use no flag at all.)
|
|
|
|
General(IntType, Vec<Struct<'tcx>>, u8),
|
2014-11-24 19:06:06 -06:00
|
|
|
/// Two cases distinguished by a nullable pointer: the case with discriminant
|
|
|
|
/// `nndiscr` must have single field which is known to be nonnull due to its type.
|
|
|
|
/// The other case is known to be zero sized. Hence we represent the enum
|
|
|
|
/// as simply a nullable pointer: if not null it indicates the `nndiscr` variant,
|
|
|
|
/// otherwise it indicates the other case.
|
2014-05-15 19:55:23 -05:00
|
|
|
RawNullablePointer {
|
2014-11-15 19:57:54 -06:00
|
|
|
nndiscr: Disr,
|
2014-09-29 14:11:30 -05:00
|
|
|
nnty: Ty<'tcx>,
|
|
|
|
nullfields: Vec<Ty<'tcx>>
|
2014-05-15 19:55:23 -05:00
|
|
|
},
|
2014-11-24 19:06:06 -06:00
|
|
|
/// Two cases distinguished by a nullable pointer: the case with discriminant
|
2014-12-04 15:44:51 -06:00
|
|
|
/// `nndiscr` is represented by the struct `nonnull`, where the `discrfield`th
|
2014-11-24 19:06:06 -06:00
|
|
|
/// field is known to be nonnull due to its type; if that field is null, then
|
|
|
|
/// it represents the other case, which is inhabited by at most one value
|
|
|
|
/// (and all other fields are undefined/unused).
|
|
|
|
///
|
|
|
|
/// For example, `std::option::Option` instantiated at a safe pointer type
|
|
|
|
/// is represented such that `None` is a null pointer and `Some` is the
|
|
|
|
/// identity function.
|
2014-05-15 19:55:23 -05:00
|
|
|
StructWrappedNullablePointer {
|
2014-09-29 14:11:30 -05:00
|
|
|
nonnull: Struct<'tcx>,
|
2014-11-15 19:57:54 -06:00
|
|
|
nndiscr: Disr,
|
2014-12-04 15:44:51 -06:00
|
|
|
discrfield: DiscrField,
|
2014-09-29 14:11:30 -05:00
|
|
|
nullfields: Vec<Ty<'tcx>>,
|
2014-03-28 12:05:27 -05:00
|
|
|
}
|
2013-02-18 16:16:21 -06:00
|
|
|
}
|
|
|
|
|
2013-02-28 01:08:52 -06:00
|
|
|
/// For structs, and struct-like parts of anything fancier.
|
2015-01-28 07:34:18 -06:00
|
|
|
#[derive(Eq, PartialEq, Debug)]
|
2014-09-29 14:11:30 -05:00
|
|
|
pub struct Struct<'tcx> {
|
2014-08-06 04:59:40 -05:00
|
|
|
// If the struct is DST, then the size and alignment do not take into
|
|
|
|
// account the unsized fields of the struct.
|
|
|
|
pub size: u64,
|
2014-10-14 15:36:11 -05:00
|
|
|
pub align: u32,
|
2014-08-06 04:59:40 -05:00
|
|
|
pub sized: bool,
|
2014-03-28 12:05:27 -05:00
|
|
|
pub packed: bool,
|
2015-07-27 07:45:20 -05:00
|
|
|
pub fields: Vec<Ty<'tcx>>,
|
2014-03-28 12:05:27 -05:00
|
|
|
}
|
2013-02-18 16:16:21 -06:00
|
|
|
|
2015-12-06 07:38:29 -06:00
|
|
|
#[derive(Copy, Clone)]
|
|
|
|
pub struct MaybeSizedValue {
|
|
|
|
pub value: ValueRef,
|
|
|
|
pub meta: ValueRef,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl MaybeSizedValue {
|
|
|
|
pub fn sized(value: ValueRef) -> MaybeSizedValue {
|
|
|
|
MaybeSizedValue {
|
|
|
|
value: value,
|
|
|
|
meta: std::ptr::null_mut()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn unsized_(value: ValueRef, meta: ValueRef) -> MaybeSizedValue {
|
|
|
|
MaybeSizedValue {
|
|
|
|
value: value,
|
|
|
|
meta: meta
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn has_meta(&self) -> bool {
|
|
|
|
!self.meta.is_null()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-11-24 19:06:06 -06:00
|
|
|
/// Convenience for `represent_type`. There should probably be more or
|
|
|
|
/// these, for places in trans where the `Ty` isn't directly
|
|
|
|
/// available.
|
2014-09-29 14:11:30 -05:00
|
|
|
pub fn represent_node<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
|
|
|
node: ast::NodeId) -> Rc<Repr<'tcx>> {
|
2013-02-18 16:16:21 -06:00
|
|
|
represent_type(bcx.ccx(), node_id_type(bcx, node))
|
|
|
|
}
|
|
|
|
|
2013-02-28 01:08:52 -06:00
|
|
|
/// Decides how to represent a given type.
|
2014-09-29 14:11:30 -05:00
|
|
|
pub fn represent_type<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
2015-04-19 17:52:26 -05:00
|
|
|
t: Ty<'tcx>)
|
|
|
|
-> Rc<Repr<'tcx>> {
|
2015-06-18 12:25:05 -05:00
|
|
|
debug!("Representing: {}", t);
|
2014-11-06 11:25:16 -06:00
|
|
|
match cx.adt_reprs().borrow().get(&t) {
|
2014-04-21 19:03:02 -05:00
|
|
|
Some(repr) => return repr.clone(),
|
2014-03-20 21:49:20 -05:00
|
|
|
None => {}
|
2013-02-25 03:49:21 -06:00
|
|
|
}
|
2013-12-18 19:58:24 -06:00
|
|
|
|
2014-04-21 19:03:02 -05:00
|
|
|
let repr = Rc::new(represent_type_uncached(cx, t));
|
2014-12-20 02:09:35 -06:00
|
|
|
debug!("Represented as: {:?}", repr);
|
2014-09-05 11:18:53 -05:00
|
|
|
cx.adt_reprs().borrow_mut().insert(t, repr.clone());
|
2014-04-21 19:03:02 -05:00
|
|
|
repr
|
2013-03-31 17:55:30 -05:00
|
|
|
}
|
|
|
|
|
2015-08-30 07:32:17 -05:00
|
|
|
const fn repeat_u8_as_u32(val: u8) -> u32 {
|
|
|
|
(val as u32) << 24 | (val as u32) << 16 | (val as u32) << 8 | val as u32
|
2015-02-10 03:04:39 -06:00
|
|
|
}
|
2015-08-30 07:32:17 -05:00
|
|
|
|
|
|
|
const fn repeat_u8_as_u64(val: u8) -> u64 {
|
|
|
|
(repeat_u8_as_u32(val) as u64) << 32 | repeat_u8_as_u32(val) as u64
|
2015-02-10 03:04:39 -06:00
|
|
|
}
|
|
|
|
|
Add dropflag hints (stack-local booleans) for unfragmented paths in trans.
Added code to maintain these hints at runtime, and to conditionalize
drop-filling and calls to destructors.
In this early stage, we are using hints, so we are always free to
leave out a flag for a path -- then we just pass `None` as the
dropflag hint in the corresponding schedule cleanup call. But, once a
path has a hint, we must at least maintain it: i.e. if the hint
exists, we must ensure it is never set to "moved" if the data in
question might actually have been initialized. It remains sound to
conservatively set the hint to "initialized" as long as the true
drop-flag embedded in the value itself is up-to-date.
----
Here are some high-level details I want to point out:
* We maintain the hint in Lvalue::post_store, marking the lvalue as
moved. (But also continue drop-filling if necessary.)
* We update the hint on ExprAssign.
* We pass along the hint in once closures that capture-by-move.
* You only call `drop_ty` for state that does not have an associated hint.
If you have a hint, you must call `drop_ty_core` instead.
(Originally I passed the hint into `drop_ty` as well, to make the
connection to a hint more apparent, but the vast majority of
current calls to `drop_ty` are in contexts where no hint is
available, so it just seemed like noise in the resulting diff.)
2015-06-07 02:25:14 -05:00
|
|
|
/// `DTOR_NEEDED_HINT` is a stack-local hint that just means
|
|
|
|
/// "we do not know whether the destructor has run or not; check the
|
|
|
|
/// drop-flag embedded in the value itself."
|
|
|
|
pub const DTOR_NEEDED_HINT: u8 = 0x3d;
|
|
|
|
|
|
|
|
/// `DTOR_MOVED_HINT` is a stack-local hint that means "this value has
|
|
|
|
/// definitely been moved; you do not need to run its destructor."
|
|
|
|
///
|
|
|
|
/// (However, for now, such values may still end up being explicitly
|
|
|
|
/// zeroed by the generated code; this is the distinction between
|
|
|
|
/// `datum::DropFlagInfo::ZeroAndMaintain` versus
|
|
|
|
/// `datum::DropFlagInfo::DontZeroJustUse`.)
|
|
|
|
pub const DTOR_MOVED_HINT: u8 = 0x2d;
|
|
|
|
|
2015-03-17 18:20:52 -05:00
|
|
|
pub const DTOR_NEEDED: u8 = 0xd4;
|
2015-02-10 03:04:39 -06:00
|
|
|
#[allow(dead_code)]
|
2016-01-28 14:54:09 -06:00
|
|
|
pub const DTOR_NEEDED_U64: u64 = repeat_u8_as_u64(DTOR_NEEDED);
|
2015-02-10 03:04:39 -06:00
|
|
|
|
2015-03-17 18:20:52 -05:00
|
|
|
pub const DTOR_DONE: u8 = 0x1d;
|
2015-02-10 03:04:39 -06:00
|
|
|
#[allow(dead_code)]
|
2016-01-28 14:54:09 -06:00
|
|
|
pub const DTOR_DONE_U64: u64 = repeat_u8_as_u64(DTOR_DONE);
|
2015-02-10 03:04:39 -06:00
|
|
|
|
|
|
|
fn dtor_to_init_u8(dtor: bool) -> u8 {
|
|
|
|
if dtor { DTOR_NEEDED } else { 0 }
|
|
|
|
}
|
|
|
|
|
|
|
|
pub trait GetDtorType<'tcx> { fn dtor_type(&self) -> Ty<'tcx>; }
|
|
|
|
impl<'tcx> GetDtorType<'tcx> for ty::ctxt<'tcx> {
|
|
|
|
fn dtor_type(&self) -> Ty<'tcx> { self.types.u8 }
|
|
|
|
}
|
|
|
|
|
|
|
|
fn dtor_active(flag: u8) -> bool {
|
|
|
|
flag != 0
|
|
|
|
}
|
|
|
|
|
2014-09-29 14:11:30 -05:00
|
|
|
fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
|
|
|
t: Ty<'tcx>) -> Repr<'tcx> {
|
2014-10-31 03:51:16 -05:00
|
|
|
match t.sty {
|
2015-06-11 18:21:46 -05:00
|
|
|
ty::TyTuple(ref elems) => {
|
2015-02-10 03:04:39 -06:00
|
|
|
Univariant(mk_struct(cx, &elems[..], false, t), 0)
|
2013-02-18 16:16:21 -06:00
|
|
|
}
|
2015-07-20 14:13:36 -05:00
|
|
|
ty::TyStruct(def, substs) => {
|
2015-08-02 14:52:50 -05:00
|
|
|
let mut ftys = def.struct_variant().fields.iter().map(|field| {
|
|
|
|
monomorphize::field_ty(cx.tcx(), substs, field)
|
2014-03-28 14:42:34 -05:00
|
|
|
}).collect::<Vec<_>>();
|
2015-07-20 14:13:36 -05:00
|
|
|
let packed = cx.tcx().lookup_packed(def.did);
|
2015-12-07 20:40:25 -06:00
|
|
|
// FIXME(16758) don't add a drop flag to unsized structs, as it
|
|
|
|
// won't actually be in the location we say it is because it'll be after
|
|
|
|
// the unsized field. Several other pieces of code assume that the unsized
|
|
|
|
// field is definitely the last one.
|
|
|
|
let dtor = def.dtor_kind().has_drop_flag() && type_is_sized(cx.tcx(), t);
|
2015-04-19 17:52:26 -05:00
|
|
|
if dtor {
|
|
|
|
ftys.push(cx.tcx().dtor_type());
|
|
|
|
}
|
2013-07-02 21:13:00 -05:00
|
|
|
|
2015-02-10 03:04:39 -06:00
|
|
|
Univariant(mk_struct(cx, &ftys[..], packed, t), dtor_to_init_u8(dtor))
|
2013-02-18 16:16:21 -06:00
|
|
|
}
|
2015-07-17 07:22:03 -05:00
|
|
|
ty::TyClosure(_, ref substs) => {
|
|
|
|
Univariant(mk_struct(cx, &substs.upvar_tys, false, t), 0)
|
2014-05-29 00:26:56 -05:00
|
|
|
}
|
2015-07-20 14:13:36 -05:00
|
|
|
ty::TyEnum(def, substs) => {
|
2015-08-02 14:52:50 -05:00
|
|
|
let cases = get_cases(cx.tcx(), def, substs);
|
2015-07-20 14:13:36 -05:00
|
|
|
let hint = *cx.tcx().lookup_repr_hints(def.did).get(0)
|
2014-05-27 01:56:52 -05:00
|
|
|
.unwrap_or(&attr::ReprAny);
|
2013-03-31 17:55:30 -05:00
|
|
|
|
2015-08-25 13:52:15 -05:00
|
|
|
let dtor = def.dtor_kind().has_drop_flag();
|
2014-06-14 08:55:55 -05:00
|
|
|
|
2015-03-24 18:53:34 -05:00
|
|
|
if cases.is_empty() {
|
2013-02-18 16:16:21 -06:00
|
|
|
// Uninhabitable; represent as unit
|
2013-08-30 01:45:06 -05:00
|
|
|
// (Typechecking will reject discriminant-sizing attrs.)
|
|
|
|
assert_eq!(hint, attr::ReprAny);
|
2015-02-10 03:04:39 -06:00
|
|
|
let ftys = if dtor { vec!(cx.tcx().dtor_type()) } else { vec!() };
|
2015-02-18 13:48:57 -06:00
|
|
|
return Univariant(mk_struct(cx, &ftys[..], false, t),
|
2015-02-10 03:04:39 -06:00
|
|
|
dtor_to_init_u8(dtor));
|
2013-03-31 17:55:30 -05:00
|
|
|
}
|
|
|
|
|
2015-03-24 18:53:34 -05:00
|
|
|
if !dtor && cases.iter().all(|c| c.tys.is_empty()) {
|
2013-02-28 01:08:52 -06:00
|
|
|
// All bodies empty -> intlike
|
2016-01-16 09:03:09 -06:00
|
|
|
let discrs: Vec<_> = cases.iter().map(|c| Disr::from(c.discr)).collect();
|
2013-07-01 00:42:30 -05:00
|
|
|
let bounds = IntBounds {
|
2016-01-16 09:03:09 -06:00
|
|
|
ulo: discrs.iter().min().unwrap().0,
|
|
|
|
uhi: discrs.iter().max().unwrap().0,
|
|
|
|
slo: discrs.iter().map(|n| n.0 as i64).min().unwrap(),
|
|
|
|
shi: discrs.iter().map(|n| n.0 as i64).max().unwrap()
|
2013-07-01 00:42:30 -05:00
|
|
|
};
|
|
|
|
return mk_cenum(cx, hint, &bounds);
|
2013-03-31 17:55:30 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
// Since there's at least one
|
|
|
|
// non-empty body, explicit discriminants should have
|
|
|
|
// been rejected by a checker before this point.
|
2016-01-16 09:03:09 -06:00
|
|
|
if !cases.iter().enumerate().all(|(i,c)| c.discr == Disr::from(i)) {
|
2015-01-07 10:58:31 -06:00
|
|
|
cx.sess().bug(&format!("non-C-like enum {} with specified \
|
2015-06-25 15:42:17 -05:00
|
|
|
discriminants",
|
2015-07-20 14:13:36 -05:00
|
|
|
cx.tcx().item_path_str(def.did)));
|
2013-03-31 17:55:30 -05:00
|
|
|
}
|
|
|
|
|
2013-08-30 01:45:06 -05:00
|
|
|
if cases.len() == 1 {
|
|
|
|
// Equivalent to a struct/tuple/newtype.
|
|
|
|
// (Typechecking will reject discriminant-sizing attrs.)
|
|
|
|
assert_eq!(hint, attr::ReprAny);
|
2014-10-15 01:05:01 -05:00
|
|
|
let mut ftys = cases[0].tys.clone();
|
2015-02-10 03:04:39 -06:00
|
|
|
if dtor { ftys.push(cx.tcx().dtor_type()); }
|
2015-02-18 13:48:57 -06:00
|
|
|
return Univariant(mk_struct(cx, &ftys[..], false, t),
|
2015-02-10 03:04:39 -06:00
|
|
|
dtor_to_init_u8(dtor));
|
2013-08-30 01:45:06 -05:00
|
|
|
}
|
|
|
|
|
2014-06-14 08:55:55 -05:00
|
|
|
if !dtor && cases.len() == 2 && hint == attr::ReprAny {
|
2013-08-30 01:45:06 -05:00
|
|
|
// Nullable pointer optimization
|
2013-03-31 17:55:30 -05:00
|
|
|
let mut discr = 0;
|
|
|
|
while discr < 2 {
|
2014-10-15 01:05:01 -05:00
|
|
|
if cases[1 - discr].is_zerolen(cx, t) {
|
2015-02-20 13:08:14 -06:00
|
|
|
let st = mk_struct(cx, &cases[discr].tys,
|
2014-10-14 15:40:21 -05:00
|
|
|
false, t);
|
2014-11-03 16:37:18 -06:00
|
|
|
match cases[discr].find_ptr(cx) {
|
2014-12-21 21:21:53 -06:00
|
|
|
Some(ref df) if df.len() == 1 && st.fields.len() == 1 => {
|
2014-07-03 21:26:38 -05:00
|
|
|
return RawNullablePointer {
|
2016-01-16 09:03:09 -06:00
|
|
|
nndiscr: Disr::from(discr),
|
2014-10-15 01:05:01 -05:00
|
|
|
nnty: st.fields[0],
|
|
|
|
nullfields: cases[1 - discr].tys.clone()
|
2014-07-03 21:26:38 -05:00
|
|
|
};
|
|
|
|
}
|
2014-12-21 21:21:53 -06:00
|
|
|
Some(mut discrfield) => {
|
|
|
|
discrfield.push(0);
|
|
|
|
discrfield.reverse();
|
2014-07-03 21:26:38 -05:00
|
|
|
return StructWrappedNullablePointer {
|
2016-01-16 09:03:09 -06:00
|
|
|
nndiscr: Disr::from(discr),
|
2014-07-03 21:26:38 -05:00
|
|
|
nonnull: st,
|
2014-12-04 15:44:51 -06:00
|
|
|
discrfield: discrfield,
|
2014-10-15 01:05:01 -05:00
|
|
|
nullfields: cases[1 - discr].tys.clone()
|
2014-05-15 19:55:23 -05:00
|
|
|
};
|
2013-03-31 17:55:30 -05:00
|
|
|
}
|
2014-12-21 21:21:53 -06:00
|
|
|
None => {}
|
2013-03-31 17:55:30 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
discr += 1;
|
2013-02-18 16:16:21 -06:00
|
|
|
}
|
|
|
|
}
|
2013-03-31 17:55:30 -05:00
|
|
|
|
|
|
|
// The general case.
|
2013-07-01 00:42:30 -05:00
|
|
|
assert!((cases.len() - 1) as i64 >= 0);
|
|
|
|
let bounds = IntBounds { ulo: 0, uhi: (cases.len() - 1) as u64,
|
|
|
|
slo: 0, shi: (cases.len() - 1) as i64 };
|
2014-12-19 23:53:37 -06:00
|
|
|
let min_ity = range_to_inttype(cx, hint, &bounds);
|
|
|
|
|
|
|
|
// Create the set of structs that represent each variant
|
|
|
|
// Use the minimum integer type we figured out above
|
|
|
|
let fields : Vec<_> = cases.iter().map(|c| {
|
2014-12-25 06:20:48 -06:00
|
|
|
let mut ftys = vec!(ty_of_inttype(cx.tcx(), min_ity));
|
2015-12-02 19:31:49 -06:00
|
|
|
ftys.extend_from_slice(&c.tys);
|
2015-02-10 03:04:39 -06:00
|
|
|
if dtor { ftys.push(cx.tcx().dtor_type()); }
|
2015-02-01 20:53:25 -06:00
|
|
|
mk_struct(cx, &ftys, false, t)
|
2014-12-19 23:53:37 -06:00
|
|
|
}).collect();
|
|
|
|
|
|
|
|
|
|
|
|
// Check to see if we should use a different type for the
|
|
|
|
// discriminant. If the overall alignment of the type is
|
|
|
|
// the same as the first field in each variant, we can safely use
|
|
|
|
// an alignment-sized type.
|
|
|
|
// We increase the size of the discriminant to avoid LLVM copying
|
|
|
|
// padding when it doesn't need to. This normally causes unaligned
|
|
|
|
// load/stores and excessive memcpy/memset operations. By using a
|
|
|
|
// bigger integer size, LLVM can be sure about it's contents and
|
|
|
|
// won't be so conservative.
|
|
|
|
// This check is needed to avoid increasing the size of types when
|
|
|
|
// the alignment of the first field is smaller than the overall
|
|
|
|
// alignment of the type.
|
2015-02-01 20:53:25 -06:00
|
|
|
let (_, align) = union_size_and_align(&fields);
|
2014-12-19 23:53:37 -06:00
|
|
|
let mut use_align = true;
|
2015-01-31 11:20:46 -06:00
|
|
|
for st in &fields {
|
2014-12-19 23:53:37 -06:00
|
|
|
// Get the first non-zero-sized field
|
|
|
|
let field = st.fields.iter().skip(1).filter(|ty| {
|
|
|
|
let t = type_of::sizing_type_of(cx, **ty);
|
|
|
|
machine::llsize_of_real(cx, t) != 0 ||
|
|
|
|
// This case is only relevant for zero-sized types with large alignment
|
|
|
|
machine::llalign_of_min(cx, t) != 1
|
|
|
|
}).next();
|
|
|
|
|
|
|
|
if let Some(field) = field {
|
|
|
|
let field_align = type_of::align_of(cx, *field);
|
|
|
|
if field_align != align {
|
|
|
|
use_align = false;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
let ity = if use_align {
|
|
|
|
// Use the overall alignment
|
|
|
|
match align {
|
2015-09-14 04:58:20 -05:00
|
|
|
1 => attr::UnsignedInt(ast::TyU8),
|
|
|
|
2 => attr::UnsignedInt(ast::TyU16),
|
|
|
|
4 => attr::UnsignedInt(ast::TyU32),
|
2014-12-19 23:53:37 -06:00
|
|
|
8 if machine::llalign_of_min(cx, Type::i64(cx)) == 8 =>
|
2015-09-14 04:58:20 -05:00
|
|
|
attr::UnsignedInt(ast::TyU64),
|
2014-12-19 23:53:37 -06:00
|
|
|
_ => min_ity // use min_ity as a fallback
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
min_ity
|
|
|
|
};
|
2014-06-14 08:55:55 -05:00
|
|
|
|
2014-10-14 15:40:21 -05:00
|
|
|
let fields : Vec<_> = cases.iter().map(|c| {
|
2014-12-25 06:20:48 -06:00
|
|
|
let mut ftys = vec!(ty_of_inttype(cx.tcx(), ity));
|
2015-12-02 19:31:49 -06:00
|
|
|
ftys.extend_from_slice(&c.tys);
|
2015-02-10 03:04:39 -06:00
|
|
|
if dtor { ftys.push(cx.tcx().dtor_type()); }
|
2015-02-18 13:48:57 -06:00
|
|
|
mk_struct(cx, &ftys[..], false, t)
|
2014-10-14 15:40:21 -05:00
|
|
|
}).collect();
|
|
|
|
|
2015-02-18 13:48:57 -06:00
|
|
|
ensure_enum_fits_in_address_space(cx, &fields[..], t);
|
2014-10-14 15:40:21 -05:00
|
|
|
|
2015-02-10 03:04:39 -06:00
|
|
|
General(ity, fields, dtor_to_init_u8(dtor))
|
2013-02-18 16:16:21 -06:00
|
|
|
}
|
2015-06-18 12:25:05 -05:00
|
|
|
_ => cx.sess().bug(&format!("adt::represent_type called on non-ADT type: {}", t))
|
2013-03-31 17:55:30 -05:00
|
|
|
}
|
2013-02-18 16:16:21 -06:00
|
|
|
}
|
|
|
|
|
2013-11-28 14:22:53 -06:00
|
|
|
// this should probably all be in ty
|
2014-09-29 14:11:30 -05:00
|
|
|
struct Case<'tcx> {
|
2014-07-03 21:26:38 -05:00
|
|
|
discr: Disr,
|
2014-09-29 14:11:30 -05:00
|
|
|
tys: Vec<Ty<'tcx>>
|
2014-07-03 21:26:38 -05:00
|
|
|
}
|
|
|
|
|
2014-12-04 15:44:51 -06:00
|
|
|
/// This represents the (GEP) indices to follow to get to the discriminant field
|
2015-03-25 19:06:52 -05:00
|
|
|
pub type DiscrField = Vec<usize>;
|
2014-07-03 21:26:38 -05:00
|
|
|
|
2014-12-21 21:21:53 -06:00
|
|
|
fn find_discr_field_candidate<'tcx>(tcx: &ty::ctxt<'tcx>,
|
|
|
|
ty: Ty<'tcx>,
|
|
|
|
mut path: DiscrField) -> Option<DiscrField> {
|
2014-12-04 15:44:51 -06:00
|
|
|
match ty.sty {
|
2014-12-21 21:21:53 -06:00
|
|
|
// Fat &T/&mut T/Box<T> i.e. T is [T], str, or Trait
|
2015-07-10 20:27:06 -05:00
|
|
|
ty::TyRef(_, ty::TypeAndMut { ty, .. }) | ty::TyBox(ty) if !type_is_sized(tcx, ty) => {
|
2014-12-21 21:21:53 -06:00
|
|
|
path.push(FAT_PTR_ADDR);
|
|
|
|
Some(path)
|
2014-12-04 15:44:51 -06:00
|
|
|
},
|
2014-07-03 21:26:38 -05:00
|
|
|
|
2014-12-21 21:21:53 -06:00
|
|
|
// Regular thin pointer: &T/&mut T/Box<T>
|
2015-06-11 18:21:46 -05:00
|
|
|
ty::TyRef(..) | ty::TyBox(..) => Some(path),
|
2014-12-21 21:21:53 -06:00
|
|
|
|
2014-12-04 15:44:51 -06:00
|
|
|
// Functions are just pointers
|
2015-06-11 18:21:46 -05:00
|
|
|
ty::TyBareFn(..) => Some(path),
|
2014-07-03 21:26:38 -05:00
|
|
|
|
2014-12-04 15:56:57 -06:00
|
|
|
// Is this the NonZero lang item wrapping a pointer or integer type?
|
2015-07-20 14:13:36 -05:00
|
|
|
ty::TyStruct(def, substs) if Some(def.did) == tcx.lang_items.non_zero() => {
|
2015-08-02 14:52:50 -05:00
|
|
|
let nonzero_fields = &def.struct_variant().fields;
|
2014-12-04 15:56:57 -06:00
|
|
|
assert_eq!(nonzero_fields.len(), 1);
|
2015-08-02 14:52:50 -05:00
|
|
|
let field_ty = monomorphize::field_ty(tcx, substs, &nonzero_fields[0]);
|
|
|
|
match field_ty.sty {
|
2015-07-10 20:27:06 -05:00
|
|
|
ty::TyRawPtr(ty::TypeAndMut { ty, .. }) if !type_is_sized(tcx, ty) => {
|
2015-12-02 19:31:49 -06:00
|
|
|
path.extend_from_slice(&[0, FAT_PTR_ADDR]);
|
2015-04-25 03:20:08 -05:00
|
|
|
Some(path)
|
|
|
|
},
|
2015-06-11 18:21:46 -05:00
|
|
|
ty::TyRawPtr(..) | ty::TyInt(..) | ty::TyUint(..) => {
|
2014-12-21 21:21:53 -06:00
|
|
|
path.push(0);
|
|
|
|
Some(path)
|
|
|
|
},
|
2014-12-04 15:56:57 -06:00
|
|
|
_ => None
|
|
|
|
}
|
|
|
|
},
|
|
|
|
|
|
|
|
// Perhaps one of the fields of this struct is non-zero
|
2014-12-04 15:44:51 -06:00
|
|
|
// let's recurse and find out
|
2015-07-20 14:13:36 -05:00
|
|
|
ty::TyStruct(def, substs) => {
|
2015-08-02 14:52:50 -05:00
|
|
|
for (j, field) in def.struct_variant().fields.iter().enumerate() {
|
2015-08-06 09:00:41 -05:00
|
|
|
let field_ty = monomorphize::field_ty(tcx, substs, field);
|
2014-12-21 21:21:53 -06:00
|
|
|
if let Some(mut fpath) = find_discr_field_candidate(tcx, field_ty, path.clone()) {
|
|
|
|
fpath.push(j);
|
|
|
|
return Some(fpath);
|
2014-12-04 15:44:51 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
None
|
|
|
|
},
|
2014-07-03 21:26:38 -05:00
|
|
|
|
2015-04-28 18:24:16 -05:00
|
|
|
// Perhaps one of the upvars of this struct is non-zero
|
|
|
|
// Let's recurse and find out!
|
2015-07-17 07:22:03 -05:00
|
|
|
ty::TyClosure(_, ref substs) => {
|
|
|
|
for (j, &ty) in substs.upvar_tys.iter().enumerate() {
|
2015-04-28 18:24:16 -05:00
|
|
|
if let Some(mut fpath) = find_discr_field_candidate(tcx, ty, path.clone()) {
|
|
|
|
fpath.push(j);
|
|
|
|
return Some(fpath);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
None
|
|
|
|
},
|
|
|
|
|
2014-12-04 15:55:56 -06:00
|
|
|
// Can we use one of the fields in this tuple?
|
2015-06-11 18:21:46 -05:00
|
|
|
ty::TyTuple(ref tys) => {
|
2014-12-04 15:55:56 -06:00
|
|
|
for (j, &ty) in tys.iter().enumerate() {
|
2014-12-21 21:21:53 -06:00
|
|
|
if let Some(mut fpath) = find_discr_field_candidate(tcx, ty, path.clone()) {
|
|
|
|
fpath.push(j);
|
|
|
|
return Some(fpath);
|
2014-12-04 15:55:56 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
None
|
|
|
|
},
|
|
|
|
|
2014-12-04 15:56:26 -06:00
|
|
|
// Is this a fixed-size array of something non-zero
|
|
|
|
// with at least one element?
|
2015-06-12 18:50:13 -05:00
|
|
|
ty::TyArray(ety, d) if d > 0 => {
|
2014-12-21 21:21:53 -06:00
|
|
|
if let Some(mut vpath) = find_discr_field_candidate(tcx, ety, path) {
|
|
|
|
vpath.push(0);
|
|
|
|
Some(vpath)
|
|
|
|
} else {
|
|
|
|
None
|
2014-12-04 15:56:26 -06:00
|
|
|
}
|
|
|
|
},
|
|
|
|
|
2014-12-04 15:44:51 -06:00
|
|
|
// Anything else is not a pointer
|
|
|
|
_ => None
|
|
|
|
}
|
|
|
|
}
|
2014-07-03 21:26:38 -05:00
|
|
|
|
2014-12-04 15:44:51 -06:00
|
|
|
impl<'tcx> Case<'tcx> {
|
|
|
|
fn is_zerolen<'a>(&self, cx: &CrateContext<'a, 'tcx>, scapegoat: Ty<'tcx>) -> bool {
|
2015-02-20 13:08:14 -06:00
|
|
|
mk_struct(cx, &self.tys, false, scapegoat).size == 0
|
2014-12-04 15:44:51 -06:00
|
|
|
}
|
2014-07-03 21:26:38 -05:00
|
|
|
|
2014-12-04 15:44:51 -06:00
|
|
|
fn find_ptr<'a>(&self, cx: &CrateContext<'a, 'tcx>) -> Option<DiscrField> {
|
|
|
|
for (i, &ty) in self.tys.iter().enumerate() {
|
2014-12-21 21:21:53 -06:00
|
|
|
if let Some(mut path) = find_discr_field_candidate(cx.tcx(), ty, vec![]) {
|
|
|
|
path.push(i);
|
|
|
|
return Some(path);
|
2014-04-20 07:43:37 -05:00
|
|
|
}
|
2014-07-03 21:26:38 -05:00
|
|
|
}
|
|
|
|
None
|
2013-06-02 15:03:35 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-09-29 14:11:30 -05:00
|
|
|
fn get_cases<'tcx>(tcx: &ty::ctxt<'tcx>,
|
2015-08-07 06:41:33 -05:00
|
|
|
adt: ty::AdtDef<'tcx>,
|
2014-09-29 14:11:30 -05:00
|
|
|
substs: &subst::Substs<'tcx>)
|
|
|
|
-> Vec<Case<'tcx>> {
|
2015-08-02 14:52:50 -05:00
|
|
|
adt.variants.iter().map(|vi| {
|
|
|
|
let field_tys = vi.fields.iter().map(|field| {
|
|
|
|
monomorphize::field_ty(tcx, substs, field)
|
2014-03-28 14:42:34 -05:00
|
|
|
}).collect();
|
2016-01-16 09:03:09 -06:00
|
|
|
Case { discr: Disr::from(vi.disr_val), tys: field_tys }
|
2014-03-28 14:42:34 -05:00
|
|
|
}).collect()
|
2013-06-02 15:03:35 -05:00
|
|
|
}
|
|
|
|
|
2014-09-29 14:11:30 -05:00
|
|
|
fn mk_struct<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
|
|
|
tys: &[Ty<'tcx>], packed: bool,
|
|
|
|
scapegoat: Ty<'tcx>)
|
|
|
|
-> Struct<'tcx> {
|
2014-12-18 08:26:10 -06:00
|
|
|
let sized = tys.iter().all(|&ty| type_is_sized(cx.tcx(), ty));
|
2014-10-14 15:40:21 -05:00
|
|
|
let lltys : Vec<Type> = if sized {
|
2015-04-19 17:52:26 -05:00
|
|
|
tys.iter().map(|&ty| type_of::sizing_type_of(cx, ty)).collect()
|
DST coercions and DST structs
[breaking-change]
1. The internal layout for traits has changed from (vtable, data) to (data, vtable). If you were relying on this in unsafe transmutes, you might get some very weird and apparently unrelated errors. You should not be doing this! Prefer not to do this at all, but if you must, you should use raw::TraitObject rather than hardcoding rustc's internal representation into your code.
2. The minimal type of reference-to-vec-literals (e.g., `&[1, 2, 3]`) is now a fixed size vec (e.g., `&[int, ..3]`) where it used to be an unsized vec (e.g., `&[int]`). If you want the unszied type, you must explicitly give the type (e.g., `let x: &[_] = &[1, 2, 3]`). Note in particular where multiple blocks must have the same type (e.g., if and else clauses, vec elements), the compiler will not coerce to the unsized type without a hint. E.g., `[&[1], &[1, 2]]` used to be a valid expression of type '[&[int]]'. It no longer type checks since the first element now has type `&[int, ..1]` and the second has type &[int, ..2]` which are incompatible.
3. The type of blocks (including functions) must be coercible to the expected type (used to be a subtype). Mostly this makes things more flexible and not less (in particular, in the case of coercing function bodies to the return type). However, in some rare cases, this is less flexible. TBH, I'm not exactly sure of the exact effects. I think the change causes us to resolve inferred type variables slightly earlier which might make us slightly more restrictive. Possibly it only affects blocks with unreachable code. E.g., `if ... { fail!(); "Hello" }` used to type check, it no longer does. The fix is to add a semicolon after the string.
2014-08-04 07:20:11 -05:00
|
|
|
} else {
|
2014-12-18 08:26:10 -06:00
|
|
|
tys.iter().filter(|&ty| type_is_sized(cx.tcx(), *ty))
|
2014-10-14 15:40:21 -05:00
|
|
|
.map(|&ty| type_of::sizing_type_of(cx, ty)).collect()
|
|
|
|
};
|
|
|
|
|
2015-02-18 13:48:57 -06:00
|
|
|
ensure_struct_fits_in_address_space(cx, &lltys[..], packed, scapegoat);
|
2014-10-14 15:40:21 -05:00
|
|
|
|
2015-02-18 13:48:57 -06:00
|
|
|
let llty_rec = Type::struct_(cx, &lltys[..], packed);
|
2014-10-14 15:40:21 -05:00
|
|
|
Struct {
|
|
|
|
size: machine::llsize_of_alloc(cx, llty_rec),
|
|
|
|
align: machine::llalign_of_min(cx, llty_rec),
|
|
|
|
sized: sized,
|
|
|
|
packed: packed,
|
2014-10-15 01:05:01 -05:00
|
|
|
fields: tys.to_vec(),
|
2013-02-18 16:16:21 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-01-28 07:34:18 -06:00
|
|
|
#[derive(Debug)]
|
2013-07-01 00:42:30 -05:00
|
|
|
struct IntBounds {
|
|
|
|
slo: i64,
|
|
|
|
shi: i64,
|
|
|
|
ulo: u64,
|
|
|
|
uhi: u64
|
|
|
|
}
|
|
|
|
|
2014-09-29 14:11:30 -05:00
|
|
|
fn mk_cenum<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
|
|
|
hint: Hint, bounds: &IntBounds)
|
|
|
|
-> Repr<'tcx> {
|
2013-07-01 00:42:30 -05:00
|
|
|
let it = range_to_inttype(cx, hint, bounds);
|
|
|
|
match it {
|
2016-01-16 09:03:09 -06:00
|
|
|
attr::SignedInt(_) => CEnum(it, Disr(bounds.slo as u64), Disr(bounds.shi as u64)),
|
|
|
|
attr::UnsignedInt(_) => CEnum(it, Disr(bounds.ulo), Disr(bounds.uhi))
|
2013-07-01 00:42:30 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-12-19 18:47:15 -06:00
|
|
|
fn range_to_inttype(cx: &CrateContext, hint: Hint, bounds: &IntBounds) -> IntType {
|
2014-12-20 02:09:35 -06:00
|
|
|
debug!("range_to_inttype: {:?} {:?}", hint, bounds);
|
2013-07-01 00:42:30 -05:00
|
|
|
// Lists of sizes to try. u64 is always allowed as a fallback.
|
2014-10-27 17:37:07 -05:00
|
|
|
#[allow(non_upper_case_globals)]
|
2015-02-27 08:36:53 -06:00
|
|
|
const choose_shortest: &'static [IntType] = &[
|
2015-09-14 04:58:20 -05:00
|
|
|
attr::UnsignedInt(ast::TyU8), attr::SignedInt(ast::TyI8),
|
|
|
|
attr::UnsignedInt(ast::TyU16), attr::SignedInt(ast::TyI16),
|
|
|
|
attr::UnsignedInt(ast::TyU32), attr::SignedInt(ast::TyI32)];
|
2014-10-27 17:37:07 -05:00
|
|
|
#[allow(non_upper_case_globals)]
|
2015-02-27 08:36:53 -06:00
|
|
|
const at_least_32: &'static [IntType] = &[
|
2015-09-14 04:58:20 -05:00
|
|
|
attr::UnsignedInt(ast::TyU32), attr::SignedInt(ast::TyI32)];
|
2013-07-01 00:42:30 -05:00
|
|
|
|
|
|
|
let attempts;
|
|
|
|
match hint {
|
|
|
|
attr::ReprInt(span, ity) => {
|
|
|
|
if !bounds_usable(cx, ity, bounds) {
|
2014-03-05 08:36:01 -06:00
|
|
|
cx.sess().span_bug(span, "representation hint insufficient for discriminant range")
|
2013-07-01 00:42:30 -05:00
|
|
|
}
|
|
|
|
return ity;
|
|
|
|
}
|
|
|
|
attr::ReprExtern => {
|
2015-02-20 13:08:14 -06:00
|
|
|
attempts = match &cx.sess().target.target.arch[..] {
|
2013-07-01 00:42:30 -05:00
|
|
|
// WARNING: the ARM EABI has two variants; the one corresponding to `at_least_32`
|
|
|
|
// appears to be used on Linux and NetBSD, but some systems may use the variant
|
|
|
|
// corresponding to `choose_shortest`. However, we don't run on those yet...?
|
2014-07-23 13:56:36 -05:00
|
|
|
"arm" => at_least_32,
|
|
|
|
_ => at_least_32,
|
2013-07-01 00:42:30 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
attr::ReprAny => {
|
|
|
|
attempts = choose_shortest;
|
2014-05-27 01:56:52 -05:00
|
|
|
},
|
|
|
|
attr::ReprPacked => {
|
2014-09-05 11:18:53 -05:00
|
|
|
cx.tcx().sess.bug("range_to_inttype: found ReprPacked on an enum");
|
2013-07-01 00:42:30 -05:00
|
|
|
}
|
2015-07-13 13:35:00 -05:00
|
|
|
attr::ReprSimd => {
|
|
|
|
cx.tcx().sess.bug("range_to_inttype: found ReprSimd on an enum");
|
|
|
|
}
|
2013-07-01 00:42:30 -05:00
|
|
|
}
|
2015-01-31 11:20:46 -06:00
|
|
|
for &ity in attempts {
|
2013-07-01 00:42:30 -05:00
|
|
|
if bounds_usable(cx, ity, bounds) {
|
2013-10-03 15:58:01 -05:00
|
|
|
return ity;
|
2013-07-01 00:42:30 -05:00
|
|
|
}
|
|
|
|
}
|
2015-09-14 04:58:20 -05:00
|
|
|
return attr::UnsignedInt(ast::TyU64);
|
2013-07-01 00:42:30 -05:00
|
|
|
}
|
|
|
|
|
2013-12-19 18:47:15 -06:00
|
|
|
pub fn ll_inttype(cx: &CrateContext, ity: IntType) -> Type {
|
2013-07-01 00:42:30 -05:00
|
|
|
match ity {
|
|
|
|
attr::SignedInt(t) => Type::int_from_ty(cx, t),
|
|
|
|
attr::UnsignedInt(t) => Type::uint_from_ty(cx, t)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-12-19 18:47:15 -06:00
|
|
|
fn bounds_usable(cx: &CrateContext, ity: IntType, bounds: &IntBounds) -> bool {
|
2014-12-20 02:09:35 -06:00
|
|
|
debug!("bounds_usable: {:?} {:?}", ity, bounds);
|
2013-07-01 00:42:30 -05:00
|
|
|
match ity {
|
|
|
|
attr::SignedInt(_) => {
|
|
|
|
let lllo = C_integral(ll_inttype(cx, ity), bounds.slo as u64, true);
|
|
|
|
let llhi = C_integral(ll_inttype(cx, ity), bounds.shi as u64, true);
|
|
|
|
bounds.slo == const_to_int(lllo) as i64 && bounds.shi == const_to_int(llhi) as i64
|
|
|
|
}
|
|
|
|
attr::UnsignedInt(_) => {
|
|
|
|
let lllo = C_integral(ll_inttype(cx, ity), bounds.ulo, false);
|
|
|
|
let llhi = C_integral(ll_inttype(cx, ity), bounds.uhi, false);
|
|
|
|
bounds.ulo == const_to_uint(lllo) as u64 && bounds.uhi == const_to_uint(llhi) as u64
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-12-25 06:20:48 -06:00
|
|
|
pub fn ty_of_inttype<'tcx>(tcx: &ty::ctxt<'tcx>, ity: IntType) -> Ty<'tcx> {
|
2013-07-01 00:42:30 -05:00
|
|
|
match ity {
|
2015-06-24 20:09:46 -05:00
|
|
|
attr::SignedInt(t) => tcx.mk_mach_int(t),
|
|
|
|
attr::UnsignedInt(t) => tcx.mk_mach_uint(t)
|
2013-07-01 00:42:30 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-10-14 15:40:21 -05:00
|
|
|
// LLVM doesn't like types that don't fit in the address space
|
2014-09-29 14:11:30 -05:00
|
|
|
fn ensure_struct_fits_in_address_space<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
|
|
|
fields: &[Type],
|
|
|
|
packed: bool,
|
|
|
|
scapegoat: Ty<'tcx>) {
|
2014-10-14 15:40:21 -05:00
|
|
|
let mut offset = 0;
|
2015-01-31 11:20:46 -06:00
|
|
|
for &llty in fields {
|
2014-11-06 20:30:49 -06:00
|
|
|
// Invariant: offset < ccx.obj_size_bound() <= 1<<61
|
2014-10-14 15:40:21 -05:00
|
|
|
if !packed {
|
|
|
|
let type_align = machine::llalign_of_min(ccx, llty);
|
|
|
|
offset = roundup(offset, type_align);
|
|
|
|
}
|
2014-11-06 20:30:49 -06:00
|
|
|
// type_align is a power-of-2, so still offset < ccx.obj_size_bound()
|
|
|
|
// llsize_of_alloc(ccx, llty) is also less than ccx.obj_size_bound()
|
2014-10-15 12:39:12 -05:00
|
|
|
// so the sum is less than 1<<62 (and therefore can't overflow).
|
2014-10-14 15:40:21 -05:00
|
|
|
offset += machine::llsize_of_alloc(ccx, llty);
|
|
|
|
|
2014-11-06 20:30:49 -06:00
|
|
|
if offset >= ccx.obj_size_bound() {
|
2014-10-14 15:40:21 -05:00
|
|
|
ccx.report_overbig_object(scapegoat);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn union_size_and_align(sts: &[Struct]) -> (machine::llsize, machine::llalign) {
|
|
|
|
let size = sts.iter().map(|st| st.size).max().unwrap();
|
2015-01-30 01:43:11 -06:00
|
|
|
let align = sts.iter().map(|st| st.align).max().unwrap();
|
|
|
|
(roundup(size, align), align)
|
2014-10-14 15:40:21 -05:00
|
|
|
}
|
|
|
|
|
2014-09-29 14:11:30 -05:00
|
|
|
fn ensure_enum_fits_in_address_space<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
|
|
|
fields: &[Struct],
|
|
|
|
scapegoat: Ty<'tcx>) {
|
2015-01-30 01:43:11 -06:00
|
|
|
let (total_size, _) = union_size_and_align(fields);
|
2014-10-14 15:40:21 -05:00
|
|
|
|
2014-11-06 20:30:49 -06:00
|
|
|
if total_size >= ccx.obj_size_bound() {
|
2014-10-14 15:40:21 -05:00
|
|
|
ccx.report_overbig_object(scapegoat);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-08-30 01:45:06 -05:00
|
|
|
|
2014-11-24 19:06:06 -06:00
|
|
|
/// LLVM-level types are a little complicated.
|
|
|
|
///
|
|
|
|
/// C-like enums need to be actual ints, not wrapped in a struct,
|
|
|
|
/// because that changes the ABI on some platforms (see issue #10308).
|
|
|
|
///
|
|
|
|
/// For nominal types, in some cases, we need to use LLVM named structs
|
|
|
|
/// and fill in the actual contents in a second pass to prevent
|
|
|
|
/// unbounded recursion; see also the comments in `trans::type_of`.
|
2014-09-29 14:11:30 -05:00
|
|
|
pub fn type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, r: &Repr<'tcx>) -> Type {
|
2015-07-27 07:45:20 -05:00
|
|
|
let c = generic_type_of(cx, r, None, false, false, false);
|
|
|
|
assert!(!c.needs_drop_flag);
|
|
|
|
c.prefix
|
2013-11-22 03:16:17 -06:00
|
|
|
}
|
2015-07-27 07:45:20 -05:00
|
|
|
|
|
|
|
|
2014-08-06 04:59:40 -05:00
|
|
|
// Pass dst=true if the type you are passing is a DST. Yes, we could figure
|
|
|
|
// this out, but if you call this on an unsized type without realising it, you
|
|
|
|
// are going to get the wrong type (it will not include the unsized parts of it).
|
2014-09-29 14:11:30 -05:00
|
|
|
pub fn sizing_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
|
|
|
r: &Repr<'tcx>, dst: bool) -> Type {
|
2015-07-27 07:45:20 -05:00
|
|
|
let c = generic_type_of(cx, r, None, true, dst, false);
|
|
|
|
assert!(!c.needs_drop_flag);
|
|
|
|
c.prefix
|
|
|
|
}
|
|
|
|
pub fn sizing_type_context_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
|
|
|
r: &Repr<'tcx>, dst: bool) -> TypeContext {
|
|
|
|
generic_type_of(cx, r, None, true, dst, true)
|
2013-02-18 16:16:21 -06:00
|
|
|
}
|
2014-09-29 14:11:30 -05:00
|
|
|
pub fn incomplete_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
|
|
|
r: &Repr<'tcx>, name: &str) -> Type {
|
2015-07-27 07:45:20 -05:00
|
|
|
let c = generic_type_of(cx, r, Some(name), false, false, false);
|
|
|
|
assert!(!c.needs_drop_flag);
|
|
|
|
c.prefix
|
2013-11-22 03:16:17 -06:00
|
|
|
}
|
2014-09-29 14:11:30 -05:00
|
|
|
pub fn finish_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
|
|
|
r: &Repr<'tcx>, llty: &mut Type) {
|
2013-11-22 03:16:17 -06:00
|
|
|
match *r {
|
2014-05-15 19:55:23 -05:00
|
|
|
CEnum(..) | General(..) | RawNullablePointer { .. } => { }
|
|
|
|
Univariant(ref st, _) | StructWrappedNullablePointer { nonnull: ref st, .. } =>
|
2015-02-20 13:08:14 -06:00
|
|
|
llty.set_struct_body(&struct_llfields(cx, st, false, false),
|
2014-03-08 14:36:22 -06:00
|
|
|
st.packed)
|
2013-11-22 03:16:17 -06:00
|
|
|
}
|
2013-02-28 01:08:52 -06:00
|
|
|
}
|
2013-11-22 03:16:17 -06:00
|
|
|
|
2014-09-29 14:11:30 -05:00
|
|
|
fn generic_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
|
|
|
r: &Repr<'tcx>,
|
|
|
|
name: Option<&str>,
|
|
|
|
sizing: bool,
|
2015-07-27 07:45:20 -05:00
|
|
|
dst: bool,
|
|
|
|
delay_drop_flag: bool) -> TypeContext {
|
|
|
|
debug!("adt::generic_type_of r: {:?} name: {:?} sizing: {} dst: {} delay_drop_flag: {}",
|
|
|
|
r, name, sizing, dst, delay_drop_flag);
|
2013-02-18 16:16:21 -06:00
|
|
|
match *r {
|
2015-07-27 07:45:20 -05:00
|
|
|
CEnum(ity, _, _) => TypeContext::direct(ll_inttype(cx, ity)),
|
|
|
|
RawNullablePointer { nnty, .. } =>
|
|
|
|
TypeContext::direct(type_of::sizing_type_of(cx, nnty)),
|
|
|
|
StructWrappedNullablePointer { nonnull: ref st, .. } => {
|
|
|
|
match name {
|
|
|
|
None => {
|
|
|
|
TypeContext::direct(
|
|
|
|
Type::struct_(cx, &struct_llfields(cx, st, sizing, dst),
|
|
|
|
st.packed))
|
|
|
|
}
|
|
|
|
Some(name) => {
|
|
|
|
assert_eq!(sizing, false);
|
|
|
|
TypeContext::direct(Type::named_struct(cx, name))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Univariant(ref st, dtor_needed) => {
|
|
|
|
let dtor_needed = dtor_needed != 0;
|
2013-11-22 03:16:17 -06:00
|
|
|
match name {
|
2014-03-08 14:36:22 -06:00
|
|
|
None => {
|
2015-07-27 07:45:20 -05:00
|
|
|
let mut fields = struct_llfields(cx, st, sizing, dst);
|
|
|
|
if delay_drop_flag && dtor_needed {
|
|
|
|
fields.pop();
|
|
|
|
}
|
|
|
|
TypeContext::may_need_drop_flag(
|
|
|
|
Type::struct_(cx, &fields,
|
|
|
|
st.packed),
|
|
|
|
delay_drop_flag && dtor_needed)
|
|
|
|
}
|
|
|
|
Some(name) => {
|
|
|
|
// Hypothesis: named_struct's can never need a
|
|
|
|
// drop flag. (... needs validation.)
|
|
|
|
assert_eq!(sizing, false);
|
|
|
|
TypeContext::direct(Type::named_struct(cx, name))
|
2014-03-08 14:36:22 -06:00
|
|
|
}
|
2013-11-22 03:16:17 -06:00
|
|
|
}
|
|
|
|
}
|
2015-07-27 07:45:20 -05:00
|
|
|
General(ity, ref sts, dtor_needed) => {
|
|
|
|
let dtor_needed = dtor_needed != 0;
|
2013-10-26 14:12:53 -05:00
|
|
|
// We need a representation that has:
|
|
|
|
// * The alignment of the most-aligned field
|
|
|
|
// * The size of the largest variant (rounded up to that alignment)
|
|
|
|
// * No alignment padding anywhere any variant has actual data
|
|
|
|
// (currently matters only for enums small enough to be immediate)
|
|
|
|
// * The discriminant in an obvious place.
|
|
|
|
//
|
|
|
|
// So we start with the discriminant, pad it up to the alignment with
|
|
|
|
// more of its own type, then use alignment-sized ints to get the rest
|
|
|
|
// of the size.
|
|
|
|
//
|
2013-11-22 03:16:17 -06:00
|
|
|
// FIXME #10604: this breaks when vector types are present.
|
2015-02-18 13:48:57 -06:00
|
|
|
let (size, align) = union_size_and_align(&sts[..]);
|
2014-10-14 15:40:21 -05:00
|
|
|
let align_s = align as u64;
|
2015-01-30 01:43:11 -06:00
|
|
|
assert_eq!(size % align_s, 0);
|
|
|
|
let align_units = size / align_s - 1;
|
|
|
|
|
2013-10-26 14:12:53 -05:00
|
|
|
let discr_ty = ll_inttype(cx, ity);
|
2014-10-14 15:40:21 -05:00
|
|
|
let discr_size = machine::llsize_of_alloc(cx, discr_ty);
|
2014-12-19 23:53:37 -06:00
|
|
|
let fill_ty = match align_s {
|
2014-03-15 15:29:34 -05:00
|
|
|
1 => Type::array(&Type::i8(cx), align_units),
|
|
|
|
2 => Type::array(&Type::i16(cx), align_units),
|
|
|
|
4 => Type::array(&Type::i32(cx), align_units),
|
|
|
|
8 if machine::llalign_of_min(cx, Type::i64(cx)) == 8 =>
|
|
|
|
Type::array(&Type::i64(cx), align_units),
|
|
|
|
a if a.count_ones() == 1 => Type::array(&Type::vector(&Type::i32(cx), a / 4),
|
2014-01-21 23:12:41 -06:00
|
|
|
align_units),
|
2014-10-09 14:17:22 -05:00
|
|
|
_ => panic!("unsupported enum alignment: {}", align)
|
2013-10-26 14:12:53 -05:00
|
|
|
};
|
2014-12-19 23:53:37 -06:00
|
|
|
assert_eq!(machine::llalign_of_min(cx, fill_ty), align);
|
2014-10-14 15:40:21 -05:00
|
|
|
assert_eq!(align_s % discr_size, 0);
|
2015-07-27 07:45:20 -05:00
|
|
|
let mut fields: Vec<Type> =
|
|
|
|
[discr_ty,
|
|
|
|
Type::array(&discr_ty, align_s / discr_size - 1),
|
|
|
|
fill_ty].iter().cloned().collect();
|
|
|
|
if delay_drop_flag && dtor_needed {
|
|
|
|
fields.pop();
|
|
|
|
}
|
2013-11-22 03:16:17 -06:00
|
|
|
match name {
|
2015-07-27 07:45:20 -05:00
|
|
|
None => {
|
|
|
|
TypeContext::may_need_drop_flag(
|
|
|
|
Type::struct_(cx, &fields[..], false),
|
|
|
|
delay_drop_flag && dtor_needed)
|
|
|
|
}
|
2013-11-22 03:16:17 -06:00
|
|
|
Some(name) => {
|
2014-03-15 15:29:34 -05:00
|
|
|
let mut llty = Type::named_struct(cx, name);
|
2015-02-18 13:48:57 -06:00
|
|
|
llty.set_struct_body(&fields[..], false);
|
2015-07-27 07:45:20 -05:00
|
|
|
TypeContext::may_need_drop_flag(
|
|
|
|
llty,
|
|
|
|
delay_drop_flag && dtor_needed)
|
2013-11-22 03:16:17 -06:00
|
|
|
}
|
|
|
|
}
|
2013-02-18 16:16:21 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-09-29 14:11:30 -05:00
|
|
|
fn struct_llfields<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, st: &Struct<'tcx>,
|
|
|
|
sizing: bool, dst: bool) -> Vec<Type> {
|
2013-03-11 03:04:08 -05:00
|
|
|
if sizing {
|
2014-12-18 08:26:10 -06:00
|
|
|
st.fields.iter().filter(|&ty| !dst || type_is_sized(cx.tcx(), *ty))
|
2014-08-06 04:59:40 -05:00
|
|
|
.map(|&ty| type_of::sizing_type_of(cx, ty)).collect()
|
2013-03-11 03:04:08 -05:00
|
|
|
} else {
|
2015-02-17 12:27:01 -06:00
|
|
|
st.fields.iter().map(|&ty| type_of::in_memory_type_of(cx, ty)).collect()
|
2013-03-11 03:04:08 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-11-24 19:06:06 -06:00
|
|
|
/// Obtain a representation of the discriminant sufficient to translate
|
|
|
|
/// destructuring; this may or may not involve the actual discriminant.
|
|
|
|
///
|
|
|
|
/// This should ideally be less tightly tied to `_match`.
|
2014-09-29 14:11:30 -05:00
|
|
|
pub fn trans_switch<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
2015-10-23 00:07:19 -05:00
|
|
|
r: &Repr<'tcx>,
|
|
|
|
scrutinee: ValueRef,
|
|
|
|
range_assert: bool)
|
2014-09-29 14:11:30 -05:00
|
|
|
-> (_match::BranchKind, Option<ValueRef>) {
|
2013-02-18 16:16:21 -06:00
|
|
|
match *r {
|
2014-05-15 19:55:23 -05:00
|
|
|
CEnum(..) | General(..) |
|
|
|
|
RawNullablePointer { .. } | StructWrappedNullablePointer { .. } => {
|
2015-10-23 00:07:19 -05:00
|
|
|
(_match::Switch, Some(trans_get_discr(bcx, r, scrutinee, None,
|
|
|
|
range_assert)))
|
2013-02-18 16:16:21 -06:00
|
|
|
}
|
2013-11-28 14:22:53 -06:00
|
|
|
Univariant(..) => {
|
2015-04-23 15:45:03 -05:00
|
|
|
// N.B.: Univariant means <= 1 enum variants (*not* == 1 variants).
|
2014-08-30 09:22:19 -05:00
|
|
|
(_match::Single, None)
|
2013-02-18 16:16:21 -06:00
|
|
|
}
|
2013-02-28 13:43:20 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-05-05 11:34:37 -05:00
|
|
|
pub fn is_discr_signed<'tcx>(r: &Repr<'tcx>) -> bool {
|
|
|
|
match *r {
|
|
|
|
CEnum(ity, _, _) => ity.is_signed(),
|
|
|
|
General(ity, _, _) => ity.is_signed(),
|
|
|
|
Univariant(..) => false,
|
|
|
|
RawNullablePointer { .. } => false,
|
|
|
|
StructWrappedNullablePointer { .. } => false,
|
|
|
|
}
|
|
|
|
}
|
2013-03-31 17:55:30 -05:00
|
|
|
|
2013-02-28 14:13:00 -06:00
|
|
|
/// Obtain the actual discriminant of a value.
|
2014-09-29 14:11:30 -05:00
|
|
|
pub fn trans_get_discr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, r: &Repr<'tcx>,
|
2015-10-23 00:07:19 -05:00
|
|
|
scrutinee: ValueRef, cast_to: Option<Type>,
|
|
|
|
range_assert: bool)
|
2013-02-28 13:43:20 -06:00
|
|
|
-> ValueRef {
|
2014-12-20 02:09:35 -06:00
|
|
|
debug!("trans_get_discr r: {:?}", r);
|
2015-05-05 11:34:37 -05:00
|
|
|
let val = match *r {
|
2015-10-23 00:07:19 -05:00
|
|
|
CEnum(ity, min, max) => {
|
|
|
|
load_discr(bcx, ity, scrutinee, min, max, range_assert)
|
|
|
|
}
|
2014-06-14 08:55:55 -05:00
|
|
|
General(ity, ref cases, _) => {
|
2015-08-24 15:51:57 -05:00
|
|
|
let ptr = StructGEP(bcx, scrutinee, 0);
|
2015-10-23 00:07:19 -05:00
|
|
|
load_discr(bcx, ity, ptr, Disr(0), Disr(cases.len() as u64 - 1),
|
|
|
|
range_assert)
|
2013-07-01 00:42:30 -05:00
|
|
|
}
|
2015-05-05 11:34:37 -05:00
|
|
|
Univariant(..) => C_u8(bcx.ccx(), 0),
|
2014-05-15 19:55:23 -05:00
|
|
|
RawNullablePointer { nndiscr, nnty, .. } => {
|
2016-01-16 09:03:09 -06:00
|
|
|
let cmp = if nndiscr == Disr(0) { IntEQ } else { IntNE };
|
2014-05-15 19:55:23 -05:00
|
|
|
let llptrty = type_of::sizing_type_of(bcx.ccx(), nnty);
|
2015-05-05 11:34:37 -05:00
|
|
|
ICmp(bcx, cmp, Load(bcx, scrutinee), C_null(llptrty), DebugLoc::None)
|
2014-05-15 19:55:23 -05:00
|
|
|
}
|
2014-12-04 15:44:51 -06:00
|
|
|
StructWrappedNullablePointer { nndiscr, ref discrfield, .. } => {
|
2015-05-05 11:34:37 -05:00
|
|
|
struct_wrapped_nullable_bitdiscr(bcx, nndiscr, discrfield, scrutinee)
|
2013-03-31 17:55:30 -05:00
|
|
|
}
|
2015-05-05 11:34:37 -05:00
|
|
|
};
|
2013-07-01 00:42:30 -05:00
|
|
|
match cast_to {
|
|
|
|
None => val,
|
2015-05-05 11:34:37 -05:00
|
|
|
Some(llty) => if is_discr_signed(r) { SExt(bcx, val, llty) } else { ZExt(bcx, val, llty) }
|
2013-07-01 00:42:30 -05:00
|
|
|
}
|
2013-02-18 16:16:21 -06:00
|
|
|
}
|
|
|
|
|
2014-12-04 15:44:51 -06:00
|
|
|
fn struct_wrapped_nullable_bitdiscr(bcx: Block, nndiscr: Disr, discrfield: &DiscrField,
|
2014-05-15 19:55:23 -05:00
|
|
|
scrutinee: ValueRef) -> ValueRef {
|
2015-02-18 13:48:57 -06:00
|
|
|
let llptrptr = GEPi(bcx, scrutinee, &discrfield[..]);
|
2014-07-03 21:26:38 -05:00
|
|
|
let llptr = Load(bcx, llptrptr);
|
2016-01-16 09:03:09 -06:00
|
|
|
let cmp = if nndiscr == Disr(0) { IntEQ } else { IntNE };
|
2015-02-04 10:42:32 -06:00
|
|
|
ICmp(bcx, cmp, llptr, C_null(val_ty(llptr)), DebugLoc::None)
|
2013-03-31 17:55:30 -05:00
|
|
|
}
|
|
|
|
|
2013-02-28 14:13:00 -06:00
|
|
|
/// Helper for cases where the discriminant is simply loaded.
|
2015-10-23 00:07:19 -05:00
|
|
|
fn load_discr(bcx: Block, ity: IntType, ptr: ValueRef, min: Disr, max: Disr,
|
|
|
|
range_assert: bool)
|
2013-02-28 13:24:30 -06:00
|
|
|
-> ValueRef {
|
2013-07-01 00:42:30 -05:00
|
|
|
let llty = ll_inttype(bcx.ccx(), ity);
|
|
|
|
assert_eq!(val_ty(ptr), llty.ptr_to());
|
|
|
|
let bits = machine::llbitsize_of_real(bcx.ccx(), llty);
|
|
|
|
assert!(bits <= 64);
|
2016-01-18 04:30:52 -06:00
|
|
|
let bits = bits as usize;
|
2016-01-16 09:03:09 -06:00
|
|
|
let mask = Disr(!0u64 >> (64 - bits));
|
2015-02-20 06:10:54 -06:00
|
|
|
// For a (max) discr of -1, max will be `-1 as usize`, which overflows.
|
|
|
|
// However, that is fine here (it would still represent the full range),
|
2015-10-23 00:07:19 -05:00
|
|
|
if max.wrapping_add(Disr(1)) & mask == min & mask || !range_assert {
|
2013-02-28 13:24:30 -06:00
|
|
|
// i.e., if the range is everything. The lo==hi case would be
|
|
|
|
// rejected by the LLVM verifier (it would mean either an
|
|
|
|
// empty set, which is impossible, or the entire range of the
|
|
|
|
// type, which is pointless).
|
|
|
|
Load(bcx, ptr)
|
|
|
|
} else {
|
|
|
|
// llvm::ConstantRange can deal with ranges that wrap around,
|
|
|
|
// so an overflow on (max + 1) is fine.
|
2016-01-18 04:30:52 -06:00
|
|
|
LoadRangeAssert(bcx, ptr, min.0, max.0.wrapping_add(1), /* signed: */ True)
|
2013-02-28 13:24:30 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-11-24 19:06:06 -06:00
|
|
|
/// Yield information about how to dispatch a case of the
|
|
|
|
/// discriminant-like value returned by `trans_switch`.
|
|
|
|
///
|
|
|
|
/// This should ideally be less tightly tied to `_match`.
|
2014-09-06 11:13:04 -05:00
|
|
|
pub fn trans_case<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, r: &Repr, discr: Disr)
|
2015-10-21 16:33:08 -05:00
|
|
|
-> ValueRef {
|
2013-02-18 16:16:21 -06:00
|
|
|
match *r {
|
2013-07-01 00:42:30 -05:00
|
|
|
CEnum(ity, _, _) => {
|
2016-01-16 09:03:09 -06:00
|
|
|
C_integral(ll_inttype(bcx.ccx(), ity), discr.0, true)
|
2013-07-01 00:42:30 -05:00
|
|
|
}
|
2014-06-14 08:55:55 -05:00
|
|
|
General(ity, _, _) => {
|
2016-01-16 09:03:09 -06:00
|
|
|
C_integral(ll_inttype(bcx.ccx(), ity), discr.0, true)
|
2013-02-18 16:16:21 -06:00
|
|
|
}
|
2013-11-28 14:22:53 -06:00
|
|
|
Univariant(..) => {
|
2014-03-05 08:36:01 -06:00
|
|
|
bcx.ccx().sess().bug("no cases for univariants or structs")
|
2013-02-18 16:16:21 -06:00
|
|
|
}
|
2014-05-15 19:55:23 -05:00
|
|
|
RawNullablePointer { .. } |
|
|
|
|
StructWrappedNullablePointer { .. } => {
|
2016-01-16 09:03:09 -06:00
|
|
|
assert!(discr == Disr(0) || discr == Disr(1));
|
|
|
|
C_bool(bcx.ccx(), discr != Disr(0))
|
2013-03-31 17:55:30 -05:00
|
|
|
}
|
2013-02-18 16:16:21 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-11-24 19:06:06 -06:00
|
|
|
/// Set the discriminant for a new value of the given case of the given
|
|
|
|
/// representation.
|
2014-09-29 14:11:30 -05:00
|
|
|
pub fn trans_set_discr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, r: &Repr<'tcx>,
|
|
|
|
val: ValueRef, discr: Disr) {
|
2013-02-18 16:16:21 -06:00
|
|
|
match *r {
|
2013-07-01 00:42:30 -05:00
|
|
|
CEnum(ity, min, max) => {
|
|
|
|
assert_discr_in_range(ity, min, max, discr);
|
2016-01-16 09:03:09 -06:00
|
|
|
Store(bcx, C_integral(ll_inttype(bcx.ccx(), ity), discr.0, true),
|
2015-04-15 13:14:54 -05:00
|
|
|
val);
|
2013-07-01 00:42:30 -05:00
|
|
|
}
|
2014-06-14 08:55:55 -05:00
|
|
|
General(ity, ref cases, dtor) => {
|
2015-02-10 03:04:39 -06:00
|
|
|
if dtor_active(dtor) {
|
2015-12-06 07:38:29 -06:00
|
|
|
let ptr = trans_field_ptr(bcx, r, MaybeSizedValue::sized(val), discr,
|
2016-01-16 09:03:09 -06:00
|
|
|
cases[discr.0 as usize].fields.len() - 2);
|
2015-08-05 02:46:59 -05:00
|
|
|
Store(bcx, C_u8(bcx.ccx(), DTOR_NEEDED), ptr);
|
2014-06-14 08:55:55 -05:00
|
|
|
}
|
2016-01-16 09:03:09 -06:00
|
|
|
Store(bcx, C_integral(ll_inttype(bcx.ccx(), ity), discr.0, true),
|
2015-08-24 15:51:57 -05:00
|
|
|
StructGEP(bcx, val, 0));
|
2013-02-18 16:16:21 -06:00
|
|
|
}
|
2014-06-14 08:55:55 -05:00
|
|
|
Univariant(ref st, dtor) => {
|
2016-01-16 09:03:09 -06:00
|
|
|
assert_eq!(discr, Disr(0));
|
2015-02-10 03:04:39 -06:00
|
|
|
if dtor_active(dtor) {
|
2015-08-05 02:46:59 -05:00
|
|
|
Store(bcx, C_u8(bcx.ccx(), DTOR_NEEDED),
|
2015-08-24 15:51:57 -05:00
|
|
|
StructGEP(bcx, val, st.fields.len() - 1));
|
2014-06-14 08:55:55 -05:00
|
|
|
}
|
2013-02-18 16:16:21 -06:00
|
|
|
}
|
2014-05-15 19:55:23 -05:00
|
|
|
RawNullablePointer { nndiscr, nnty, ..} => {
|
2013-03-31 17:55:30 -05:00
|
|
|
if discr != nndiscr {
|
2014-05-15 19:55:23 -05:00
|
|
|
let llptrty = type_of::sizing_type_of(bcx.ccx(), nnty);
|
2015-04-15 13:14:54 -05:00
|
|
|
Store(bcx, C_null(llptrty), val);
|
2014-05-15 19:55:23 -05:00
|
|
|
}
|
|
|
|
}
|
2014-12-04 15:44:51 -06:00
|
|
|
StructWrappedNullablePointer { nndiscr, ref discrfield, .. } => {
|
2014-05-15 19:55:23 -05:00
|
|
|
if discr != nndiscr {
|
2015-02-18 13:48:57 -06:00
|
|
|
let llptrptr = GEPi(bcx, val, &discrfield[..]);
|
2014-12-04 15:44:51 -06:00
|
|
|
let llptrty = val_ty(llptrptr).element_type();
|
2015-04-15 13:14:54 -05:00
|
|
|
Store(bcx, C_null(llptrty), llptrptr);
|
2013-03-31 17:55:30 -05:00
|
|
|
}
|
|
|
|
}
|
2013-02-18 16:16:21 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-07-01 00:42:30 -05:00
|
|
|
fn assert_discr_in_range(ity: IntType, min: Disr, max: Disr, discr: Disr) {
|
|
|
|
match ity {
|
2016-01-16 09:03:09 -06:00
|
|
|
attr::UnsignedInt(_) => {
|
|
|
|
assert!(min <= discr);
|
|
|
|
assert!(discr <= max)
|
|
|
|
},
|
|
|
|
attr::SignedInt(_) => {
|
|
|
|
assert!(min.0 as i64 <= discr.0 as i64);
|
|
|
|
assert!(discr.0 as i64 <= max.0 as i64);
|
|
|
|
},
|
2013-07-01 00:42:30 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-11-24 19:06:06 -06:00
|
|
|
/// The number of fields in a given case; for use when obtaining this
|
|
|
|
/// information from the type or definition is less convenient.
|
2015-03-25 19:06:52 -05:00
|
|
|
pub fn num_args(r: &Repr, discr: Disr) -> usize {
|
2013-02-18 16:16:21 -06:00
|
|
|
match *r {
|
2013-11-28 14:22:53 -06:00
|
|
|
CEnum(..) => 0,
|
2013-03-11 00:58:44 -05:00
|
|
|
Univariant(ref st, dtor) => {
|
2016-01-16 09:03:09 -06:00
|
|
|
assert_eq!(discr, Disr(0));
|
2015-02-10 03:04:39 -06:00
|
|
|
st.fields.len() - (if dtor_active(dtor) { 1 } else { 0 })
|
2013-03-11 00:58:44 -05:00
|
|
|
}
|
2014-06-14 08:55:55 -05:00
|
|
|
General(_, ref cases, dtor) => {
|
2016-01-16 09:03:09 -06:00
|
|
|
cases[discr.0 as usize].fields.len() - 1 - (if dtor_active(dtor) { 1 } else { 0 })
|
2014-06-14 08:55:55 -05:00
|
|
|
}
|
2014-05-15 19:55:23 -05:00
|
|
|
RawNullablePointer { nndiscr, ref nullfields, .. } => {
|
|
|
|
if discr == nndiscr { 1 } else { nullfields.len() }
|
|
|
|
}
|
2014-10-05 19:36:53 -05:00
|
|
|
StructWrappedNullablePointer { ref nonnull, nndiscr,
|
|
|
|
ref nullfields, .. } => {
|
2013-04-30 13:36:22 -05:00
|
|
|
if discr == nndiscr { nonnull.fields.len() } else { nullfields.len() }
|
2013-03-31 17:55:30 -05:00
|
|
|
}
|
2013-02-18 16:16:21 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-02-28 01:08:52 -06:00
|
|
|
/// Access a field, at a point when the value's case is known.
|
2014-09-29 14:11:30 -05:00
|
|
|
pub fn trans_field_ptr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, r: &Repr<'tcx>,
|
2015-12-06 07:38:29 -06:00
|
|
|
val: MaybeSizedValue, discr: Disr, ix: usize) -> ValueRef {
|
2013-02-18 16:16:21 -06:00
|
|
|
// Note: if this ever needs to generate conditionals (e.g., if we
|
|
|
|
// decide to do some kind of cdr-coding-like non-unique repr
|
2013-02-28 01:08:52 -06:00
|
|
|
// someday), it will need to return a possibly-new bcx as well.
|
2013-02-18 16:16:21 -06:00
|
|
|
match *r {
|
2013-11-28 14:22:53 -06:00
|
|
|
CEnum(..) => {
|
2014-03-05 08:36:01 -06:00
|
|
|
bcx.ccx().sess().bug("element access in C-like enum")
|
2013-02-18 16:16:21 -06:00
|
|
|
}
|
2013-03-11 00:58:44 -05:00
|
|
|
Univariant(ref st, _dtor) => {
|
2016-01-16 09:03:09 -06:00
|
|
|
assert_eq!(discr, Disr(0));
|
2013-03-02 18:08:49 -06:00
|
|
|
struct_field_ptr(bcx, st, val, ix, false)
|
2013-02-18 16:16:21 -06:00
|
|
|
}
|
2014-06-14 08:55:55 -05:00
|
|
|
General(_, ref cases, _) => {
|
2016-01-16 09:03:09 -06:00
|
|
|
struct_field_ptr(bcx, &cases[discr.0 as usize], val, ix + 1, true)
|
2013-02-18 16:16:21 -06:00
|
|
|
}
|
2014-05-15 19:55:23 -05:00
|
|
|
RawNullablePointer { nndiscr, ref nullfields, .. } |
|
|
|
|
StructWrappedNullablePointer { nndiscr, ref nullfields, .. } if discr != nndiscr => {
|
|
|
|
// The unit-like case might have a nonzero number of unit-like fields.
|
|
|
|
// (e.d., Result of Either with (), as one side.)
|
2014-10-15 01:05:01 -05:00
|
|
|
let ty = type_of::type_of(bcx.ccx(), nullfields[ix]);
|
2014-05-15 19:55:23 -05:00
|
|
|
assert_eq!(machine::llsize_of_alloc(bcx.ccx(), ty), 0);
|
|
|
|
// The contents of memory at this pointer can't matter, but use
|
2014-06-08 23:00:52 -05:00
|
|
|
// the value that's "reasonable" in case of pointer comparison.
|
2015-12-06 07:38:29 -06:00
|
|
|
PointerCast(bcx, val.value, ty.ptr_to())
|
2014-05-15 19:55:23 -05:00
|
|
|
}
|
|
|
|
RawNullablePointer { nndiscr, nnty, .. } => {
|
|
|
|
assert_eq!(ix, 0);
|
|
|
|
assert_eq!(discr, nndiscr);
|
|
|
|
let ty = type_of::type_of(bcx.ccx(), nnty);
|
2015-12-06 07:38:29 -06:00
|
|
|
PointerCast(bcx, val.value, ty.ptr_to())
|
2014-05-15 19:55:23 -05:00
|
|
|
}
|
|
|
|
StructWrappedNullablePointer { ref nonnull, nndiscr, .. } => {
|
|
|
|
assert_eq!(discr, nndiscr);
|
|
|
|
struct_field_ptr(bcx, nonnull, val, ix, false)
|
2013-03-31 17:55:30 -05:00
|
|
|
}
|
2013-02-18 16:16:21 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-12-06 07:38:29 -06:00
|
|
|
pub fn struct_field_ptr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, st: &Struct<'tcx>, val: MaybeSizedValue,
|
2015-03-25 19:06:52 -05:00
|
|
|
ix: usize, needs_cast: bool) -> ValueRef {
|
2015-12-07 20:40:25 -06:00
|
|
|
let ccx = bcx.ccx();
|
2015-12-06 07:38:29 -06:00
|
|
|
let ptr_val = if needs_cast {
|
|
|
|
let fields = st.fields.iter().map(|&ty| {
|
|
|
|
type_of::in_memory_type_of(ccx, ty)
|
|
|
|
}).collect::<Vec<_>>();
|
2015-02-18 13:48:57 -06:00
|
|
|
let real_ty = Type::struct_(ccx, &fields[..], st.packed);
|
2015-12-06 07:38:29 -06:00
|
|
|
PointerCast(bcx, val.value, real_ty.ptr_to())
|
2013-02-23 00:03:59 -06:00
|
|
|
} else {
|
2015-12-06 07:38:29 -06:00
|
|
|
val.value
|
2013-02-23 00:03:59 -06:00
|
|
|
};
|
2013-02-18 16:16:21 -06:00
|
|
|
|
2015-12-06 07:38:29 -06:00
|
|
|
let fty = st.fields[ix];
|
|
|
|
// Simple case - we can just GEP the field
|
|
|
|
// * First field - Always aligned properly
|
|
|
|
// * Packed struct - There is no alignment padding
|
|
|
|
// * Field is sized - pointer is properly aligned already
|
|
|
|
if ix == 0 || st.packed || type_is_sized(bcx.tcx(), fty) {
|
|
|
|
return StructGEP(bcx, ptr_val, ix);
|
|
|
|
}
|
|
|
|
|
|
|
|
// If the type of the last field is [T] or str, then we don't need to do
|
|
|
|
// any adjusments
|
|
|
|
match fty.sty {
|
|
|
|
ty::TySlice(..) | ty::TyStr => {
|
|
|
|
return StructGEP(bcx, ptr_val, ix);
|
|
|
|
}
|
|
|
|
_ => ()
|
|
|
|
}
|
|
|
|
|
|
|
|
// There's no metadata available, log the case and just do the GEP.
|
|
|
|
if !val.has_meta() {
|
|
|
|
debug!("Unsized field `{}`, of `{}` has no metadata for adjustment",
|
|
|
|
ix,
|
|
|
|
bcx.val_to_string(ptr_val));
|
|
|
|
return StructGEP(bcx, ptr_val, ix);
|
|
|
|
}
|
|
|
|
|
|
|
|
let dbloc = DebugLoc::None;
|
|
|
|
|
|
|
|
// We need to get the pointer manually now.
|
|
|
|
// We do this by casting to a *i8, then offsetting it by the appropriate amount.
|
|
|
|
// We do this instead of, say, simply adjusting the pointer from the result of a GEP
|
2015-12-07 20:40:25 -06:00
|
|
|
// because the field may have an arbitrary alignment in the LLVM representation
|
2015-12-06 07:38:29 -06:00
|
|
|
// anyway.
|
|
|
|
//
|
|
|
|
// To demonstrate:
|
|
|
|
// struct Foo<T: ?Sized> {
|
|
|
|
// x: u16,
|
|
|
|
// y: T
|
|
|
|
// }
|
|
|
|
//
|
|
|
|
// The type Foo<Foo<Trait>> is represented in LLVM as { u16, { u16, u8 }}, meaning that
|
|
|
|
// the `y` field has 16-bit alignment.
|
|
|
|
|
|
|
|
let meta = val.meta;
|
|
|
|
|
2015-12-15 12:51:08 -06:00
|
|
|
// Calculate the unaligned offset of the unsized field.
|
2015-12-07 20:40:25 -06:00
|
|
|
let mut offset = 0;
|
|
|
|
for &ty in &st.fields[0..ix] {
|
|
|
|
let llty = type_of::sizing_type_of(ccx, ty);
|
|
|
|
let type_align = type_of::align_of(ccx, ty);
|
|
|
|
offset = roundup(offset, type_align);
|
|
|
|
offset += machine::llsize_of_alloc(ccx, llty);
|
|
|
|
}
|
|
|
|
let unaligned_offset = C_uint(bcx.ccx(), offset);
|
2015-12-06 07:38:29 -06:00
|
|
|
|
|
|
|
// Get the alignment of the field
|
|
|
|
let (_, align) = glue::size_and_align_of_dst(bcx, fty, meta);
|
|
|
|
|
|
|
|
// Bump the unaligned offset up to the appropriate alignment using the
|
|
|
|
// following expression:
|
|
|
|
//
|
|
|
|
// (unaligned offset + (align - 1)) & -align
|
|
|
|
|
|
|
|
// Calculate offset
|
|
|
|
let align_sub_1 = Sub(bcx, align, C_uint(bcx.ccx(), 1u64), dbloc);
|
|
|
|
let offset = And(bcx,
|
|
|
|
Add(bcx, unaligned_offset, align_sub_1, dbloc),
|
|
|
|
Neg(bcx, align, dbloc),
|
|
|
|
dbloc);
|
|
|
|
|
|
|
|
debug!("struct_field_ptr: DST field offset: {}",
|
|
|
|
bcx.val_to_string(offset));
|
|
|
|
|
|
|
|
// Cast and adjust pointer
|
|
|
|
let byte_ptr = PointerCast(bcx, ptr_val, Type::i8p(bcx.ccx()));
|
|
|
|
let byte_ptr = GEP(bcx, byte_ptr, &[offset]);
|
|
|
|
|
|
|
|
// Finally, cast back to the type expected
|
|
|
|
let ll_fty = type_of::in_memory_type_of(bcx.ccx(), fty);
|
|
|
|
debug!("struct_field_ptr: Field type is {}", ll_fty.to_string());
|
|
|
|
PointerCast(bcx, byte_ptr, ll_fty.ptr_to())
|
2013-02-18 16:16:21 -06:00
|
|
|
}
|
|
|
|
|
2014-12-09 12:44:51 -06:00
|
|
|
pub fn fold_variants<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
|
|
|
|
r: &Repr<'tcx>,
|
|
|
|
value: ValueRef,
|
|
|
|
mut f: F)
|
|
|
|
-> Block<'blk, 'tcx> where
|
|
|
|
F: FnMut(Block<'blk, 'tcx>, &Struct<'tcx>, ValueRef) -> Block<'blk, 'tcx>,
|
|
|
|
{
|
2014-06-14 08:55:55 -05:00
|
|
|
let fcx = bcx.fcx;
|
|
|
|
match *r {
|
|
|
|
Univariant(ref st, _) => {
|
|
|
|
f(bcx, st, value)
|
|
|
|
}
|
|
|
|
General(ity, ref cases, _) => {
|
|
|
|
let ccx = bcx.ccx();
|
rustc: Update LLVM
This commit updates the LLVM submodule in use to the current HEAD of the LLVM
repository. This is primarily being done to start picking up unwinding support
for MSVC, which is currently unimplemented in the revision of LLVM we are using.
Along the way a few changes had to be made:
* As usual, lots of C++ debuginfo bindings in LLVM changed, so there were some
significant changes to our RustWrapper.cpp
* As usual, some pass management changed in LLVM, so clang was re-scrutinized to
ensure that we're doing the same thing as clang.
* Some optimization options are now passed directly into the
`PassManagerBuilder` instead of through CLI switches to LLVM.
* The `NoFramePointerElim` option was removed from LLVM, favoring instead the
`no-frame-pointer-elim` function attribute instead.
Additionally, LLVM has picked up some new optimizations which required fixing an
existing soundness hole in the IR we generate. It appears that the current LLVM
we use does not expose this hole. When an enum is moved, the previous slot in
memory is overwritten with a bit pattern corresponding to "dropped". When the
drop glue for this slot is run, however, the switch on the discriminant can
often start executing the `unreachable` block of the switch due to the
discriminant now being outside the normal range. This was patched over locally
for now by having the `unreachable` block just change to a `ret void`.
2015-05-14 14:10:43 -05:00
|
|
|
|
|
|
|
// See the comments in trans/base.rs for more information (inside
|
|
|
|
// iter_structural_ty), but the gist here is that if the enum's
|
|
|
|
// discriminant is *not* in the range that we're expecting (in which
|
|
|
|
// case we'll take the fall-through branch on the switch
|
|
|
|
// instruction) then we can't just optimize this to an Unreachable
|
|
|
|
// block.
|
|
|
|
//
|
|
|
|
// Currently we still have filling drop, so this means that the drop
|
|
|
|
// glue for enums may be called when the enum has been paved over
|
|
|
|
// with the "I've been dropped" value. In this case the default
|
|
|
|
// branch of the switch instruction will actually be taken at
|
|
|
|
// runtime, so the basic block isn't actually unreachable, so we
|
|
|
|
// need to make it do something with defined behavior. In this case
|
|
|
|
// we just return early from the function.
|
2015-10-23 00:07:19 -05:00
|
|
|
//
|
|
|
|
// Note that this is also why the `trans_get_discr` below has
|
|
|
|
// `false` to indicate that loading the discriminant should
|
|
|
|
// not have a range assert.
|
rustc: Update LLVM
This commit updates the LLVM submodule in use to the current HEAD of the LLVM
repository. This is primarily being done to start picking up unwinding support
for MSVC, which is currently unimplemented in the revision of LLVM we are using.
Along the way a few changes had to be made:
* As usual, lots of C++ debuginfo bindings in LLVM changed, so there were some
significant changes to our RustWrapper.cpp
* As usual, some pass management changed in LLVM, so clang was re-scrutinized to
ensure that we're doing the same thing as clang.
* Some optimization options are now passed directly into the
`PassManagerBuilder` instead of through CLI switches to LLVM.
* The `NoFramePointerElim` option was removed from LLVM, favoring instead the
`no-frame-pointer-elim` function attribute instead.
Additionally, LLVM has picked up some new optimizations which required fixing an
existing soundness hole in the IR we generate. It appears that the current LLVM
we use does not expose this hole. When an enum is moved, the previous slot in
memory is overwritten with a bit pattern corresponding to "dropped". When the
drop glue for this slot is run, however, the switch on the discriminant can
often start executing the `unreachable` block of the switch due to the
discriminant now being outside the normal range. This was patched over locally
for now by having the `unreachable` block just change to a `ret void`.
2015-05-14 14:10:43 -05:00
|
|
|
let ret_void_cx = fcx.new_temp_block("enum-variant-iter-ret-void");
|
|
|
|
RetVoid(ret_void_cx, DebugLoc::None);
|
2014-06-14 08:55:55 -05:00
|
|
|
|
2015-10-23 00:07:19 -05:00
|
|
|
let discr_val = trans_get_discr(bcx, r, value, None, false);
|
rustc: Update LLVM
This commit updates the LLVM submodule in use to the current HEAD of the LLVM
repository. This is primarily being done to start picking up unwinding support
for MSVC, which is currently unimplemented in the revision of LLVM we are using.
Along the way a few changes had to be made:
* As usual, lots of C++ debuginfo bindings in LLVM changed, so there were some
significant changes to our RustWrapper.cpp
* As usual, some pass management changed in LLVM, so clang was re-scrutinized to
ensure that we're doing the same thing as clang.
* Some optimization options are now passed directly into the
`PassManagerBuilder` instead of through CLI switches to LLVM.
* The `NoFramePointerElim` option was removed from LLVM, favoring instead the
`no-frame-pointer-elim` function attribute instead.
Additionally, LLVM has picked up some new optimizations which required fixing an
existing soundness hole in the IR we generate. It appears that the current LLVM
we use does not expose this hole. When an enum is moved, the previous slot in
memory is overwritten with a bit pattern corresponding to "dropped". When the
drop glue for this slot is run, however, the switch on the discriminant can
often start executing the `unreachable` block of the switch due to the
discriminant now being outside the normal range. This was patched over locally
for now by having the `unreachable` block just change to a `ret void`.
2015-05-14 14:10:43 -05:00
|
|
|
let llswitch = Switch(bcx, discr_val, ret_void_cx.llbb, cases.len());
|
2014-06-14 08:55:55 -05:00
|
|
|
let bcx_next = fcx.new_temp_block("enum-variant-iter-next");
|
|
|
|
|
|
|
|
for (discr, case) in cases.iter().enumerate() {
|
|
|
|
let mut variant_cx = fcx.new_temp_block(
|
2015-02-20 13:08:14 -06:00
|
|
|
&format!("enum-variant-iter-{}", &discr.to_string())
|
2014-06-14 08:55:55 -05:00
|
|
|
);
|
|
|
|
let rhs_val = C_integral(ll_inttype(ccx, ity), discr as u64, true);
|
|
|
|
AddCase(llswitch, rhs_val, variant_cx.llbb);
|
|
|
|
|
|
|
|
let fields = case.fields.iter().map(|&ty|
|
|
|
|
type_of::type_of(bcx.ccx(), ty)).collect::<Vec<_>>();
|
2015-02-18 13:48:57 -06:00
|
|
|
let real_ty = Type::struct_(ccx, &fields[..], case.packed);
|
2014-06-14 08:55:55 -05:00
|
|
|
let variant_value = PointerCast(variant_cx, value, real_ty.ptr_to());
|
|
|
|
|
|
|
|
variant_cx = f(variant_cx, case, variant_value);
|
2014-12-11 06:53:30 -06:00
|
|
|
Br(variant_cx, bcx_next.llbb, DebugLoc::None);
|
2014-06-14 08:55:55 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
bcx_next
|
|
|
|
}
|
|
|
|
_ => unreachable!()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-02-28 01:08:52 -06:00
|
|
|
/// Access the struct drop flag, if present.
|
2015-04-19 17:52:26 -05:00
|
|
|
pub fn trans_drop_flag_ptr<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
|
|
|
|
r: &Repr<'tcx>,
|
|
|
|
val: ValueRef)
|
2014-12-25 06:20:48 -06:00
|
|
|
-> datum::DatumBlock<'blk, 'tcx, datum::Expr>
|
|
|
|
{
|
|
|
|
let tcx = bcx.tcx();
|
2015-06-24 20:09:46 -05:00
|
|
|
let ptr_ty = bcx.tcx().mk_imm_ptr(tcx.dtor_type());
|
2013-02-25 01:20:08 -06:00
|
|
|
match *r {
|
2015-02-10 03:04:39 -06:00
|
|
|
Univariant(ref st, dtor) if dtor_active(dtor) => {
|
2015-08-24 15:51:57 -05:00
|
|
|
let flag_ptr = StructGEP(bcx, val, st.fields.len() - 1);
|
2014-06-14 08:55:55 -05:00
|
|
|
datum::immediate_rvalue_bcx(bcx, flag_ptr, ptr_ty).to_expr_datumblock()
|
|
|
|
}
|
2015-02-10 03:04:39 -06:00
|
|
|
General(_, _, dtor) if dtor_active(dtor) => {
|
2014-06-14 08:55:55 -05:00
|
|
|
let fcx = bcx.fcx;
|
|
|
|
let custom_cleanup_scope = fcx.push_custom_cleanup_scope();
|
|
|
|
let scratch = unpack_datum!(bcx, datum::lvalue_scratch_datum(
|
2015-02-10 03:04:39 -06:00
|
|
|
bcx, tcx.dtor_type(), "drop_flag",
|
2016-01-08 13:40:52 -06:00
|
|
|
InitAlloca::Uninit("drop flag itself has no dtor"),
|
2016-01-08 13:40:13 -06:00
|
|
|
cleanup::CustomScope(custom_cleanup_scope), (), |_, bcx, _| {
|
|
|
|
debug!("no-op populate call for trans_drop_flag_ptr on dtor_type={:?}",
|
|
|
|
tcx.dtor_type());
|
|
|
|
bcx
|
|
|
|
}
|
2014-06-14 08:55:55 -05:00
|
|
|
));
|
|
|
|
bcx = fold_variants(bcx, r, val, |variant_cx, st, value| {
|
2015-12-06 07:38:29 -06:00
|
|
|
let ptr = struct_field_ptr(variant_cx, st, MaybeSizedValue::sized(value),
|
|
|
|
(st.fields.len() - 1), false);
|
2015-06-05 14:34:03 -05:00
|
|
|
datum::Datum::new(ptr, ptr_ty, datum::Lvalue::new("adt::trans_drop_flag_ptr"))
|
2014-06-14 08:55:55 -05:00
|
|
|
.store_to(variant_cx, scratch.val)
|
|
|
|
});
|
|
|
|
let expr_datum = scratch.to_expr_datum();
|
|
|
|
fcx.pop_custom_cleanup_scope(custom_cleanup_scope);
|
|
|
|
datum::DatumBlock::new(bcx, expr_datum)
|
|
|
|
}
|
2014-03-05 08:36:01 -06:00
|
|
|
_ => bcx.ccx().sess().bug("tried to get drop flag of non-droppable type")
|
2013-02-25 01:20:08 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-11-24 19:06:06 -06:00
|
|
|
/// Construct a constant value, suitable for initializing a
|
|
|
|
/// GlobalVariable, given a case and constant values for its fields.
|
|
|
|
/// Note that this may have a different LLVM type (and different
|
|
|
|
/// alignment!) from the representation's `type_of`, so it needs a
|
|
|
|
/// pointer cast before use.
|
|
|
|
///
|
|
|
|
/// The LLVM type system does not directly support unions, and only
|
|
|
|
/// pointers can be bitcast, so a constant (and, by extension, the
|
|
|
|
/// GlobalVariable initialized by it) will have a type that can vary
|
|
|
|
/// depending on which case of an enum it is.
|
|
|
|
///
|
|
|
|
/// To understand the alignment situation, consider `enum E { V64(u64),
|
|
|
|
/// V32(u32, u32) }` on Windows. The type has 8-byte alignment to
|
|
|
|
/// accommodate the u64, but `V32(x, y)` would have LLVM type `{i32,
|
|
|
|
/// i32, i32}`, which is 4-byte aligned.
|
|
|
|
///
|
|
|
|
/// Currently the returned value has the same size as the type, but
|
|
|
|
/// this could be changed in the future to avoid allocating unnecessary
|
|
|
|
/// space after values of shorter-than-maximum cases.
|
2014-09-29 14:11:30 -05:00
|
|
|
pub fn trans_const<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, r: &Repr<'tcx>, discr: Disr,
|
|
|
|
vals: &[ValueRef]) -> ValueRef {
|
2013-02-18 16:16:21 -06:00
|
|
|
match *r {
|
2013-07-01 00:42:30 -05:00
|
|
|
CEnum(ity, min, max) => {
|
2013-05-18 21:02:45 -05:00
|
|
|
assert_eq!(vals.len(), 0);
|
2013-07-01 00:42:30 -05:00
|
|
|
assert_discr_in_range(ity, min, max, discr);
|
2016-01-16 09:03:09 -06:00
|
|
|
C_integral(ll_inttype(ccx, ity), discr.0, true)
|
2013-02-18 16:16:21 -06:00
|
|
|
}
|
2014-06-14 08:55:55 -05:00
|
|
|
General(ity, ref cases, _) => {
|
2016-01-16 09:03:09 -06:00
|
|
|
let case = &cases[discr.0 as usize];
|
2015-02-18 13:48:57 -06:00
|
|
|
let (max_sz, _) = union_size_and_align(&cases[..]);
|
2016-01-16 09:03:09 -06:00
|
|
|
let lldiscr = C_integral(ll_inttype(ccx, ity), discr.0 as u64, true);
|
2014-10-15 01:05:01 -05:00
|
|
|
let mut f = vec![lldiscr];
|
2015-12-02 19:31:49 -06:00
|
|
|
f.extend_from_slice(vals);
|
2015-02-18 13:48:57 -06:00
|
|
|
let mut contents = build_const_struct(ccx, case, &f[..]);
|
2015-12-02 19:31:49 -06:00
|
|
|
contents.extend_from_slice(&[padding(ccx, max_sz - case.size)]);
|
2015-02-18 13:48:57 -06:00
|
|
|
C_struct(ccx, &contents[..], false)
|
2013-02-18 16:16:21 -06:00
|
|
|
}
|
2013-07-01 00:42:30 -05:00
|
|
|
Univariant(ref st, _dro) => {
|
2016-01-16 09:03:09 -06:00
|
|
|
assert_eq!(discr, Disr(0));
|
2013-07-01 00:42:30 -05:00
|
|
|
let contents = build_const_struct(ccx, st, vals);
|
2015-02-18 13:48:57 -06:00
|
|
|
C_struct(ccx, &contents[..], st.packed)
|
2013-07-01 00:42:30 -05:00
|
|
|
}
|
2014-05-15 19:55:23 -05:00
|
|
|
RawNullablePointer { nndiscr, nnty, .. } => {
|
2014-05-11 20:56:53 -05:00
|
|
|
if discr == nndiscr {
|
|
|
|
assert_eq!(vals.len(), 1);
|
|
|
|
vals[0]
|
|
|
|
} else {
|
2014-05-15 19:55:23 -05:00
|
|
|
C_null(type_of::sizing_type_of(ccx, nnty))
|
2014-05-11 20:56:53 -05:00
|
|
|
}
|
|
|
|
}
|
2014-10-05 19:36:53 -05:00
|
|
|
StructWrappedNullablePointer { ref nonnull, nndiscr, .. } => {
|
2013-03-31 17:55:30 -05:00
|
|
|
if discr == nndiscr {
|
2015-01-07 10:58:31 -06:00
|
|
|
C_struct(ccx, &build_const_struct(ccx,
|
2014-03-15 15:29:34 -05:00
|
|
|
nonnull,
|
2015-02-20 13:08:14 -06:00
|
|
|
vals),
|
2014-03-08 14:36:22 -06:00
|
|
|
false)
|
2013-03-31 17:55:30 -05:00
|
|
|
} else {
|
2014-03-28 14:42:34 -05:00
|
|
|
let vals = nonnull.fields.iter().map(|&ty| {
|
2014-12-04 15:44:51 -06:00
|
|
|
// Always use null even if it's not the `discrfield`th
|
2014-03-04 22:41:21 -06:00
|
|
|
// field; see #8506.
|
|
|
|
C_null(type_of::sizing_type_of(ccx, ty))
|
2014-03-28 14:42:34 -05:00
|
|
|
}).collect::<Vec<ValueRef>>();
|
2015-01-07 10:58:31 -06:00
|
|
|
C_struct(ccx, &build_const_struct(ccx,
|
2014-03-15 15:29:34 -05:00
|
|
|
nonnull,
|
2015-02-20 13:08:14 -06:00
|
|
|
&vals[..]),
|
2014-03-08 14:36:22 -06:00
|
|
|
false)
|
2013-03-31 17:55:30 -05:00
|
|
|
}
|
|
|
|
}
|
2013-02-18 16:16:21 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-11-24 19:06:06 -06:00
|
|
|
/// Compute struct field offsets relative to struct begin.
|
2014-09-29 14:11:30 -05:00
|
|
|
fn compute_struct_field_offsets<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
|
|
|
st: &Struct<'tcx>) -> Vec<u64> {
|
2014-04-12 13:56:34 -05:00
|
|
|
let mut offsets = vec!();
|
|
|
|
|
|
|
|
let mut offset = 0;
|
2015-01-31 11:20:46 -06:00
|
|
|
for &ty in &st.fields {
|
2014-04-12 13:56:34 -05:00
|
|
|
let llty = type_of::sizing_type_of(ccx, ty);
|
|
|
|
if !st.packed {
|
2014-10-14 15:36:11 -05:00
|
|
|
let type_align = type_of::align_of(ccx, ty);
|
2014-04-12 13:56:34 -05:00
|
|
|
offset = roundup(offset, type_align);
|
|
|
|
}
|
|
|
|
offsets.push(offset);
|
2014-10-14 15:36:11 -05:00
|
|
|
offset += machine::llsize_of_alloc(ccx, llty);
|
2014-04-12 13:56:34 -05:00
|
|
|
}
|
|
|
|
assert_eq!(st.fields.len(), offsets.len());
|
|
|
|
offsets
|
|
|
|
}
|
|
|
|
|
2014-11-24 19:06:06 -06:00
|
|
|
/// Building structs is a little complicated, because we might need to
|
|
|
|
/// insert padding if a field's value is less aligned than its type.
|
|
|
|
///
|
|
|
|
/// Continuing the example from `trans_const`, a value of type `(u32,
|
|
|
|
/// E)` should have the `E` at offset 8, but if that field's
|
|
|
|
/// initializer is 4-byte aligned then simply translating the tuple as
|
|
|
|
/// a two-element struct will locate it at offset 4, and accesses to it
|
|
|
|
/// will read the wrong memory.
|
2014-09-29 14:11:30 -05:00
|
|
|
fn build_const_struct<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
|
|
|
st: &Struct<'tcx>, vals: &[ValueRef])
|
|
|
|
-> Vec<ValueRef> {
|
2013-05-18 21:02:45 -05:00
|
|
|
assert_eq!(vals.len(), st.fields.len());
|
2013-02-18 16:16:21 -06:00
|
|
|
|
2014-04-12 13:56:34 -05:00
|
|
|
let target_offsets = compute_struct_field_offsets(ccx, st);
|
|
|
|
|
|
|
|
// offset of current value
|
2013-02-18 16:16:21 -06:00
|
|
|
let mut offset = 0;
|
2014-03-04 12:02:49 -06:00
|
|
|
let mut cfields = Vec::new();
|
2015-06-10 11:22:20 -05:00
|
|
|
for (&val, target_offset) in vals.iter().zip(target_offsets) {
|
2014-04-12 13:56:34 -05:00
|
|
|
if !st.packed {
|
2014-10-14 15:36:11 -05:00
|
|
|
let val_align = machine::llalign_of_min(ccx, val_ty(val));
|
2014-04-12 13:56:34 -05:00
|
|
|
offset = roundup(offset, val_align);
|
|
|
|
}
|
2014-01-19 02:21:14 -06:00
|
|
|
if offset != target_offset {
|
2014-03-15 15:29:34 -05:00
|
|
|
cfields.push(padding(ccx, target_offset - offset));
|
2013-02-18 16:16:21 -06:00
|
|
|
offset = target_offset;
|
|
|
|
}
|
2014-04-12 13:56:34 -05:00
|
|
|
assert!(!is_undef(val));
|
|
|
|
cfields.push(val);
|
2014-10-14 15:36:11 -05:00
|
|
|
offset += machine::llsize_of_alloc(ccx, val_ty(val));
|
2014-04-12 13:56:34 -05:00
|
|
|
}
|
|
|
|
|
2014-08-06 04:59:40 -05:00
|
|
|
assert!(st.sized && offset <= st.size);
|
|
|
|
if offset != st.size {
|
|
|
|
cfields.push(padding(ccx, st.size - offset));
|
2013-02-18 16:16:21 -06:00
|
|
|
}
|
|
|
|
|
2014-04-12 13:56:34 -05:00
|
|
|
cfields
|
2013-02-18 16:16:21 -06:00
|
|
|
}
|
|
|
|
|
2014-03-15 15:29:34 -05:00
|
|
|
fn padding(ccx: &CrateContext, size: u64) -> ValueRef {
|
|
|
|
C_undef(Type::array(&Type::i8(ccx), size))
|
2013-02-28 13:24:30 -06:00
|
|
|
}
|
|
|
|
|
2014-01-26 02:43:42 -06:00
|
|
|
// FIXME this utility routine should be somewhere more general
|
2013-06-18 16:45:18 -05:00
|
|
|
#[inline]
|
2014-10-14 15:36:11 -05:00
|
|
|
fn roundup(x: u64, a: u32) -> u64 { let a = a as u64; ((x + (a - 1)) / a) * a }
|
2013-02-18 16:16:21 -06:00
|
|
|
|
2014-12-04 15:44:51 -06:00
|
|
|
/// Get the discriminant of a constant value.
|
|
|
|
pub fn const_get_discrim(ccx: &CrateContext, r: &Repr, val: ValueRef) -> Disr {
|
2013-02-18 16:16:21 -06:00
|
|
|
match *r {
|
2013-07-01 00:42:30 -05:00
|
|
|
CEnum(ity, _, _) => {
|
|
|
|
match ity {
|
2016-01-16 09:03:09 -06:00
|
|
|
attr::SignedInt(..) => Disr(const_to_int(val) as u64),
|
|
|
|
attr::UnsignedInt(..) => Disr(const_to_uint(val)),
|
2013-07-01 00:42:30 -05:00
|
|
|
}
|
|
|
|
}
|
2014-06-14 08:55:55 -05:00
|
|
|
General(ity, _, _) => {
|
2013-07-01 00:42:30 -05:00
|
|
|
match ity {
|
2016-01-16 09:03:09 -06:00
|
|
|
attr::SignedInt(..) => Disr(const_to_int(const_get_elt(ccx, val, &[0])) as u64),
|
|
|
|
attr::UnsignedInt(..) => Disr(const_to_uint(const_get_elt(ccx, val, &[0])))
|
2013-07-01 00:42:30 -05:00
|
|
|
}
|
|
|
|
}
|
2016-01-16 09:03:09 -06:00
|
|
|
Univariant(..) => Disr(0),
|
2014-12-04 15:44:51 -06:00
|
|
|
RawNullablePointer { .. } | StructWrappedNullablePointer { .. } => {
|
|
|
|
ccx.sess().bug("const discrim access of non c-like enum")
|
2013-03-31 17:55:30 -05:00
|
|
|
}
|
2013-02-18 16:16:21 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-11-24 19:06:06 -06:00
|
|
|
/// Extract a field of a constant value, as appropriate for its
|
|
|
|
/// representation.
|
|
|
|
///
|
|
|
|
/// (Not to be confused with `common::const_get_elt`, which operates on
|
|
|
|
/// raw LLVM-level structs and arrays.)
|
2013-12-19 18:47:15 -06:00
|
|
|
pub fn const_get_field(ccx: &CrateContext, r: &Repr, val: ValueRef,
|
2015-03-25 19:06:52 -05:00
|
|
|
_discr: Disr, ix: usize) -> ValueRef {
|
2013-02-18 16:16:21 -06:00
|
|
|
match *r {
|
2014-03-05 08:36:01 -06:00
|
|
|
CEnum(..) => ccx.sess().bug("element access in C-like enum const"),
|
2014-12-04 15:44:51 -06:00
|
|
|
Univariant(..) => const_struct_field(ccx, val, ix),
|
|
|
|
General(..) => const_struct_field(ccx, val, ix + 1),
|
2014-05-15 19:55:23 -05:00
|
|
|
RawNullablePointer { .. } => {
|
2014-05-11 20:56:53 -05:00
|
|
|
assert_eq!(ix, 0);
|
|
|
|
val
|
2014-12-04 15:44:51 -06:00
|
|
|
},
|
|
|
|
StructWrappedNullablePointer{ .. } => const_struct_field(ccx, val, ix)
|
2013-02-18 16:16:21 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-02-28 13:24:30 -06:00
|
|
|
/// Extract field of struct-like const, skipping our alignment padding.
|
2015-03-25 19:06:52 -05:00
|
|
|
fn const_struct_field(ccx: &CrateContext, val: ValueRef, ix: usize) -> ValueRef {
|
2013-02-18 16:16:21 -06:00
|
|
|
// Get the ix-th non-undef element of the struct.
|
|
|
|
let mut real_ix = 0; // actual position in the struct
|
|
|
|
let mut ix = ix; // logical index relative to real_ix
|
|
|
|
let mut field;
|
|
|
|
loop {
|
|
|
|
loop {
|
2014-12-04 15:44:51 -06:00
|
|
|
field = const_get_elt(ccx, val, &[real_ix]);
|
2013-02-18 16:16:21 -06:00
|
|
|
if !is_undef(field) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
real_ix = real_ix + 1;
|
|
|
|
}
|
|
|
|
if ix == 0 {
|
|
|
|
return field;
|
|
|
|
}
|
|
|
|
ix = ix - 1;
|
|
|
|
real_ix = real_ix + 1;
|
|
|
|
}
|
|
|
|
}
|