Merge pull request #284 from oli-obk/no_more_llvm_madness

Rust reorders fields, but miri uses the order from the source files
This commit is contained in:
Oliver Schneider 2017-08-04 14:31:13 +02:00 committed by GitHub
commit 8b449c3ead
4 changed files with 51 additions and 6 deletions

View File

@ -690,7 +690,7 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
}
}
StructWrappedNullablePointer { nndiscr, ref discrfield, .. } => {
StructWrappedNullablePointer { nndiscr, ref discrfield_source, .. } => {
if let mir::AggregateKind::Adt(_, variant, _, _) = **kind {
if nndiscr == variant as u64 {
self.assign_fields(dest, dest_ty, operands)?;
@ -699,7 +699,7 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
let operand_ty = self.operand_ty(operand);
assert_eq!(self.type_size(operand_ty)?, Some(0));
}
let (offset, TyAndPacked { ty, packed: _}) = self.nonnull_offset_and_ty(dest_ty, nndiscr, discrfield)?;
let (offset, TyAndPacked { ty, packed: _}) = self.nonnull_offset_and_ty(dest_ty, nndiscr, discrfield_source)?;
// TODO: The packed flag is ignored
// FIXME(solson)

View File

@ -104,9 +104,9 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
}
}
Layout::StructWrappedNullablePointer { nndiscr, ref discrfield, .. } => {
Layout::StructWrappedNullablePointer { nndiscr, ref discrfield_source, .. } => {
if variant_index as u64 != nndiscr {
let (offset, TyAndPacked { ty, packed }) = self.nonnull_offset_and_ty(dest_ty, nndiscr, discrfield)?;
let (offset, TyAndPacked { ty, packed }) = self.nonnull_offset_and_ty(dest_ty, nndiscr, discrfield_source)?;
let nonnull = self.force_allocation(dest)?.to_ptr()?.offset(offset.bytes(), &self)?;
trace!("struct wrapped nullable pointer type: {}", ty);
// only the pointer part of a fat pointer is used for this space optimization

View File

@ -440,8 +440,8 @@ impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
self.read_nonnull_discriminant_value(adt_ptr, nndiscr as u128, discr_size)?
}
StructWrappedNullablePointer { nndiscr, ref discrfield, .. } => {
let (offset, TyAndPacked { ty, packed }) = self.nonnull_offset_and_ty(adt_ty, nndiscr, discrfield)?;
StructWrappedNullablePointer { nndiscr, ref discrfield_source, .. } => {
let (offset, TyAndPacked { ty, packed }) = self.nonnull_offset_and_ty(adt_ty, nndiscr, discrfield_source)?;
let nonnull = adt_ptr.offset(offset.bytes(), &*self)?;
trace!("struct wrapped nullable pointer type: {}", ty);
// only the pointer part of a fat pointer is used for this space optimization

View File

@ -0,0 +1,45 @@
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// zip!(a1,a2,a3,a4) is equivalent to:
// a1.zip(a2).zip(a3).zip(a4).map(|(((x1,x2),x3),x4)| (x1,x2,x3,x4))
macro_rules! zip {
// Entry point
([$a:expr, $b:expr, $($rest:expr),*]) => {
zip!([$($rest),*], $a.zip($b), (x,y), [x,y])
};
// Intermediate steps to build the zipped expression, the match pattern, and
// and the output tuple of the closure, using macro hygene to repeatedly
// introduce new variables named 'x'.
([$a:expr, $($rest:expr),*], $zip:expr, $pat:pat, [$($flat:expr),*]) => {
zip!([$($rest),*], $zip.zip($a), ($pat,x), [$($flat),*, x])
};
// Final step
([], $zip:expr, $pat:pat, [$($flat:expr),+]) => {
$zip.map(|$pat| ($($flat),+))
};
// Comma
([$a:expr], $zip:expr, $pat:pat, [$($flat:expr),*]) => {
zip!([$a,], $zip, $pat, [$($flat),*])
};
}
fn main() {
let p1 = vec![1i32, 2].into_iter();
let p2 = vec!["10", "20"].into_iter();
let p3 = vec![100u16, 200].into_iter();
let p4 = vec![1000i64, 2000].into_iter();
let e = zip!([p1,p2,p3,p4]).collect::<Vec<_>>();
assert_eq!(e[0], (1i32,"10",100u16,1000i64));
}