Auto merge of #15380 - HKalbasi:mir, r=HKalbasi

Fix unsized struct problems in mir eval
This commit is contained in:
bors 2023-08-02 14:28:41 +00:00
commit d398ad3326
7 changed files with 220 additions and 35 deletions

View File

@ -1186,6 +1186,25 @@ const fn f(x: &(u8, u8)) -> u8 {
);
}
#[test]
fn destructing_assignment() {
check_number(
r#"
//- minicore: add
const fn f(i: &mut u8) -> &mut u8 {
*i += 1;
i
}
const GOAL: u8 = {
let mut i = 4;
_ = f(&mut i);
i
};
"#,
5,
);
}
#[test]
fn let_else() {
check_number(
@ -1745,6 +1764,24 @@ fn add2(x: u8) -> u8 {
);
}
#[test]
fn function_pointer_and_niche_optimization() {
check_number(
r#"
//- minicore: option
const GOAL: i32 = {
let f: fn(i32) -> i32 = |x| x + 2;
let init = Some(f);
match init {
Some(t) => t(3),
None => 222,
}
};
"#,
5,
);
}
#[test]
fn function_pointer() {
check_number(
@ -2359,11 +2396,14 @@ fn const_loop() {
fn const_transfer_memory() {
check_number(
r#"
const A1: &i32 = &2;
const A2: &i32 = &5;
const GOAL: i32 = *A1 + *A2;
//- minicore: slice, index, coerce_unsized
const A1: &i32 = &1;
const A2: &i32 = &10;
const A3: [&i32; 3] = [&1, &2, &100];
const A4: (i32, &i32) = (1, &1000);
const GOAL: i32 = *A1 + *A2 + *A3[2] + *A4.1;
"#,
7,
1111,
);
}
@ -2634,9 +2674,9 @@ const fn f(n: i32) -> i32 {
}
sum
}
const GOAL: i32 = f(10000);
const GOAL: i32 = f(1000);
"#,
10000 * 10000,
1000 * 1000,
);
}
@ -2683,7 +2723,7 @@ fn unsized_field() {
//- minicore: coerce_unsized, index, slice, transmute
use core::mem::transmute;
struct Slice([u8]);
struct Slice([usize]);
struct Slice2(Slice);
impl Slice2 {
@ -2691,19 +2731,19 @@ fn as_inner(&self) -> &Slice {
&self.0
}
fn as_bytes(&self) -> &[u8] {
fn as_bytes(&self) -> &[usize] {
&self.as_inner().0
}
}
const GOAL: u8 = unsafe {
let x: &[u8] = &[1, 2, 3];
const GOAL: usize = unsafe {
let x: &[usize] = &[1, 2, 3];
let x: &Slice2 = transmute(x);
let x = x.as_bytes();
x[0] + x[1] + x[2]
x[0] + x[1] + x[2] + x.len() * 100
};
"#,
6,
306,
);
}

View File

@ -251,6 +251,28 @@ fn wrapping_add() {
);
}
#[test]
fn ptr_offset_from() {
check_number(
r#"
//- minicore: index, slice, coerce_unsized
extern "rust-intrinsic" {
pub fn ptr_offset_from<T>(ptr: *const T, base: *const T) -> isize;
pub fn ptr_offset_from_unsigned<T>(ptr: *const T, base: *const T) -> usize;
}
const GOAL: isize = {
let x = [1, 2, 3, 4, 5i32];
let r1 = -ptr_offset_from(&x[0], &x[4]);
let r2 = ptr_offset_from(&x[3], &x[1]);
let r3 = ptr_offset_from_unsigned(&x[3], &x[0]) as isize;
r3 * 100 + r2 * 10 + r1
};
"#,
324,
);
}
#[test]
fn saturating() {
check_number(

View File

@ -234,6 +234,7 @@ fn is_parent(&self, child: &Place) -> bool {
self.local == child.local && child.projection.starts_with(&self.projection)
}
/// The place itself is not included
fn iterate_over_parents(&self) -> impl Iterator<Item = Place> + '_ {
(0..self.projection.len())
.map(|x| &self.projection[0..x])

View File

@ -68,18 +68,22 @@ pub struct VTableMap {
}
impl VTableMap {
const OFFSET: usize = 1000; // We should add some offset to ids to make 0 (null) an invalid id.
fn id(&mut self, ty: Ty) -> usize {
if let Some(it) = self.ty_to_id.get(&ty) {
return *it;
}
let id = self.id_to_ty.len();
let id = self.id_to_ty.len() + VTableMap::OFFSET;
self.id_to_ty.push(ty.clone());
self.ty_to_id.insert(ty, id);
id
}
pub(crate) fn ty(&self, id: usize) -> Result<&Ty> {
self.id_to_ty.get(id).ok_or(MirEvalError::InvalidVTableId(id))
id.checked_sub(VTableMap::OFFSET)
.and_then(|id| self.id_to_ty.get(id))
.ok_or(MirEvalError::InvalidVTableId(id))
}
fn ty_of_bytes(&self, bytes: &[u8]) -> Result<&Ty> {
@ -467,6 +471,10 @@ fn add_place(&mut self, p: Place) {
fn remove_place(&mut self, p: &Place) -> bool {
// FIXME: replace parents with parts
if let Some(parent) = p.iterate_over_parents().find(|it| self.need_drop.contains(&it)) {
self.need_drop.remove(&parent);
return true;
}
self.need_drop.remove(p)
}
}
@ -511,6 +519,11 @@ pub fn interpret_mir(
)
}
#[cfg(test)]
const EXECUTION_LIMIT: usize = 100_000;
#[cfg(not(test))]
const EXECUTION_LIMIT: usize = 10_000_000;
impl Evaluator<'_> {
pub fn new<'a>(
db: &'a dyn HirDatabase,
@ -534,7 +547,7 @@ pub fn new<'a>(
stderr: vec![],
assert_placeholder_ty_is_unused,
stack_depth_limit: 100,
execution_limit: 1000_000,
execution_limit: EXECUTION_LIMIT,
memory_limit: 1000_000_000, // 2GB, 1GB for stack and 1GB for heap
layout_cache: RefCell::new(HashMap::default()),
}
@ -683,8 +696,10 @@ fn place_addr_and_ty_and_metadata<'a>(
.offset(u32::from(f.local_id.into_raw()) as usize)
.bytes_usize();
addr = addr.offset(offset);
// FIXME: support structs with unsized fields
metadata = None;
// Unsized field metadata is equal to the metadata of the struct
if self.size_align_of(&ty, locals)?.is_some() {
metadata = None;
}
}
ProjectionElem::OpaqueCast(_) => not_supported!("opaque cast"),
}
@ -1803,6 +1818,17 @@ fn rec(
}
}
}
chalk_ir::TyKind::Array(inner, len) => {
let len = match try_const_usize(this.db, &len) {
Some(it) => it as usize,
None => not_supported!("non evaluatable array len in patching addresses"),
};
let size = this.size_of_sized(inner, locals, "inner of array")?;
for i in 0..len {
let offset = i * size;
rec(this, &bytes[offset..offset + size], inner, locals, mm)?;
}
}
chalk_ir::TyKind::Tuple(_, subst) => {
let layout = this.layout(ty)?;
for (id, ty) in subst.iter(Interner).enumerate() {
@ -1911,10 +1937,31 @@ fn patch_addresses(
AdtId::UnionId(_) => (),
AdtId::EnumId(_) => (),
},
TyKind::Tuple(_, subst) => {
for (id, ty) in subst.iter(Interner).enumerate() {
let ty = ty.assert_ty_ref(Interner); // Tuple only has type argument
let offset = layout.fields.offset(id).bytes_usize();
self.patch_addresses(patch_map, old_vtable, addr.offset(offset), ty, locals)?;
}
}
TyKind::Array(inner, len) => {
let len = match try_const_usize(self.db, &len) {
Some(it) => it as usize,
None => not_supported!("non evaluatable array len in patching addresses"),
};
let size = self.size_of_sized(inner, locals, "inner of array")?;
for i in 0..len {
self.patch_addresses(
patch_map,
old_vtable,
addr.offset(i * size),
inner,
locals,
)?;
}
}
TyKind::AssociatedType(_, _)
| TyKind::Scalar(_)
| TyKind::Tuple(_, _)
| TyKind::Array(_, _)
| TyKind::Slice(_)
| TyKind::Raw(_, _)
| TyKind::OpaqueType(_, _)

View File

@ -694,12 +694,15 @@ fn exec_intrinsic(
else {
return Err(MirEvalError::TypeError("type_name generic arg is not provided"));
};
let Ok(ty_name) = ty.display_source_code(
let ty_name = match ty.display_source_code(
self.db,
locals.body.owner.module(self.db.upcast()),
true,
) else {
not_supported!("fail in generating type_name using source code display");
) {
Ok(ty_name) => ty_name,
// Fallback to human readable display in case of `Err`. Ideally we want to use `display_source_code` to
// render full paths.
Err(_) => ty.display(self.db).to_string(),
};
let len = ty_name.len();
let addr = self.heap_allocate(len, 1)?;
@ -755,7 +758,22 @@ fn exec_intrinsic(
let ans = lhs.wrapping_add(rhs);
destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
}
"wrapping_sub" | "unchecked_sub" | "ptr_offset_from_unsigned" | "ptr_offset_from" => {
"ptr_offset_from_unsigned" | "ptr_offset_from" => {
let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("wrapping_sub args are not provided"));
};
let lhs = i128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = i128::from_le_bytes(pad16(rhs.get(self)?, false));
let ans = lhs.wrapping_sub(rhs);
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
else {
return Err(MirEvalError::TypeError("ptr_offset_from generic arg is not provided"));
};
let size = self.size_of_sized(ty, locals, "ptr_offset_from arg")? as i128;
let ans = ans / size;
destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
}
"wrapping_sub" | "unchecked_sub" => {
let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("wrapping_sub args are not provided"));
};

View File

@ -182,6 +182,50 @@ fn main() {
);
}
#[test]
fn drop_struct_field() {
check_pass(
r#"
//- minicore: drop, add, option, cell, builtin_impls
use core::cell::Cell;
fn should_not_reach() {
_ // FIXME: replace this function with panic when that works
}
struct X<'a>(&'a Cell<i32>);
impl<'a> Drop for X<'a> {
fn drop(&mut self) {
self.0.set(self.0.get() + 1)
}
}
struct Tuple<'a>(X<'a>, X<'a>, X<'a>);
fn main() {
let s = Cell::new(0);
{
let x0 = X(&s);
let xt = Tuple(x0, X(&s), X(&s));
let x1 = xt.1;
if s.get() != 0 {
should_not_reach();
}
drop(xt.0);
if s.get() != 1 {
should_not_reach();
}
}
// FIXME: this should be 3
if s.get() != 2 {
should_not_reach();
}
}
"#,
);
}
#[test]
fn drop_in_place() {
check_pass(

View File

@ -1028,18 +1028,7 @@ fn lower_expr_to_place_without_adjust(
self.push_assignment(current, lhs_place, r_value, expr_id.into());
return Ok(Some(current));
} else {
let Some((lhs_place, current)) =
self.lower_expr_as_place(current, *lhs, false)?
else {
return Ok(None);
};
let Some((rhs_op, current)) =
self.lower_expr_to_some_operand(*rhs, current)?
else {
return Ok(None);
};
self.push_assignment(current, lhs_place, rhs_op.into(), expr_id.into());
return Ok(Some(current));
return self.lower_assignment(current, *lhs, *rhs, expr_id.into());
}
}
let Some((lhs_op, current)) = self.lower_expr_to_some_operand(*lhs, current)?
@ -1285,6 +1274,30 @@ fn lower_expr_to_place_without_adjust(
}
}
fn lower_assignment(
&mut self,
current: BasicBlockId,
lhs: ExprId,
rhs: ExprId,
span: MirSpan,
) -> Result<Option<BasicBlockId>> {
let Some((rhs_op, current)) =
self.lower_expr_to_some_operand(rhs, current)?
else {
return Ok(None);
};
if matches!(&self.body.exprs[lhs], Expr::Underscore) {
return Ok(Some(current));
}
let Some((lhs_place, current)) =
self.lower_expr_as_place(current, lhs, false)?
else {
return Ok(None);
};
self.push_assignment(current, lhs_place, rhs_op.into(), span);
Ok(Some(current))
}
fn placeholder_subst(&mut self) -> Substitution {
let placeholder_subst = match self.owner.as_generic_def_id() {
Some(it) => TyBuilder::placeholder_subst(self.db, it),