MIR episode 4

This commit is contained in:
hkalbasi 2023-04-28 20:44:30 +03:30
parent 001607fdb4
commit 6312fbf521
47 changed files with 2557 additions and 805 deletions

View File

@ -34,7 +34,7 @@ impl fmt::Debug for Change {
}
impl Change {
pub fn new() -> Change {
pub fn new() -> Self {
Change::default()
}

View File

@ -17,7 +17,7 @@ use rustc_hash::FxHashMap;
use smallvec::SmallVec;
use syntax::{
ast::{
self, ArrayExprKind, AstChildren, BlockExpr, HasArgList, HasLoopBody, HasName,
self, ArrayExprKind, AstChildren, BlockExpr, HasArgList, HasAttrs, HasLoopBody, HasName,
SlicePatComponents,
},
AstNode, AstPtr, SyntaxNodePtr,
@ -302,16 +302,29 @@ impl ExprCollector<'_> {
self.alloc_expr(Expr::For { iterable, pat, body, label }, syntax_ptr)
}
ast::Expr::CallExpr(e) => {
let callee = self.collect_expr_opt(e.expr());
let args = if let Some(arg_list) = e.arg_list() {
arg_list.args().filter_map(|e| self.maybe_collect_expr(e)).collect()
} else {
Box::default()
let is_rustc_box = {
let attrs = e.attrs();
attrs.filter_map(|x| x.as_simple_atom()).any(|x| x == "rustc_box")
};
self.alloc_expr(
Expr::Call { callee, args, is_assignee_expr: self.is_lowering_assignee_expr },
syntax_ptr,
)
if is_rustc_box {
let expr = self.collect_expr_opt(e.arg_list().and_then(|x| x.args().next()));
self.alloc_expr(Expr::Box { expr }, syntax_ptr)
} else {
let callee = self.collect_expr_opt(e.expr());
let args = if let Some(arg_list) = e.arg_list() {
arg_list.args().filter_map(|e| self.maybe_collect_expr(e)).collect()
} else {
Box::default()
};
self.alloc_expr(
Expr::Call {
callee,
args,
is_assignee_expr: self.is_lowering_assignee_expr,
},
syntax_ptr,
)
}
}
ast::Expr::MethodCallExpr(e) => {
let receiver = self.collect_expr_opt(e.receiver());

View File

@ -232,6 +232,25 @@ impl TyBuilder<()> {
TyBuilder::new((), params, parent_subst)
}
pub fn subst_for_closure(
db: &dyn HirDatabase,
parent: DefWithBodyId,
sig_ty: Ty,
) -> Substitution {
let sig_ty = sig_ty.cast(Interner);
let self_subst = iter::once(&sig_ty);
let Some(parent) = parent.as_generic_def_id() else {
return Substitution::from_iter(Interner, self_subst);
};
Substitution::from_iter(
Interner,
self_subst
.chain(generics(db.upcast(), parent).placeholder_subst(db).iter(Interner))
.cloned()
.collect::<Vec<_>>(),
)
}
pub fn build(self) -> Substitution {
let ((), subst) = self.build_internal();
subst

View File

@ -24,7 +24,7 @@ use crate::{
method_resolution::{TraitImpls, TyFingerprint, ALL_FLOAT_FPS, ALL_INT_FPS},
to_assoc_type_id, to_chalk_trait_id,
traits::ChalkContext,
utils::generics,
utils::{generics, ClosureSubst},
wrap_empty_binders, AliasEq, AliasTy, BoundVar, CallableDefId, DebruijnIndex, FnDefId,
Interner, ProjectionTy, ProjectionTyExt, QuantifiedWhereClause, Substitution, TraitRef,
TraitRefExt, Ty, TyBuilder, TyExt, TyKind, WhereClause,
@ -337,7 +337,7 @@ impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
_closure_id: chalk_ir::ClosureId<Interner>,
substs: &chalk_ir::Substitution<Interner>,
) -> chalk_ir::Binders<rust_ir::FnDefInputsAndOutputDatum<Interner>> {
let sig_ty = substs.at(Interner, 0).assert_ty_ref(Interner).clone();
let sig_ty = ClosureSubst(substs).sig_ty();
let sig = &sig_ty.callable_sig(self.db).expect("first closure param should be fn ptr");
let io = rust_ir::FnDefInputsAndOutputDatum {
argument_types: sig.params().to_vec(),

View File

@ -1,20 +1,22 @@
//! Various extensions traits for Chalk types.
use chalk_ir::{FloatTy, IntTy, Mutability, Scalar, TyVariableKind, UintTy};
use chalk_ir::{cast::Cast, FloatTy, IntTy, Mutability, Scalar, TyVariableKind, UintTy};
use hir_def::{
builtin_type::{BuiltinFloat, BuiltinInt, BuiltinType, BuiltinUint},
generics::TypeOrConstParamData,
lang_item::LangItem,
type_ref::Rawness,
FunctionId, GenericDefId, HasModule, ItemContainerId, Lookup, TraitId,
DefWithBodyId, FunctionId, GenericDefId, HasModule, ItemContainerId, Lookup, TraitId,
};
use crate::{
db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id,
from_placeholder_idx, to_chalk_trait_id, utils::generics, AdtId, AliasEq, AliasTy, Binders,
CallableDefId, CallableSig, ClosureId, DynTy, FnPointer, ImplTraitId, Interner, Lifetime,
ProjectionTy, QuantifiedWhereClause, Substitution, TraitRef, Ty, TyBuilder, TyKind, TypeFlags,
WhereClause,
db::HirDatabase,
from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, from_placeholder_idx,
to_chalk_trait_id,
utils::{generics, ClosureSubst},
AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, Canonical, CanonicalVarKinds,
ClosureId, DynTy, FnPointer, ImplTraitId, InEnvironment, Interner, Lifetime, ProjectionTy,
QuantifiedWhereClause, Substitution, TraitRef, Ty, TyBuilder, TyKind, TypeFlags, WhereClause,
};
pub trait TyExt {
@ -46,6 +48,7 @@ pub trait TyExt {
fn impl_trait_bounds(&self, db: &dyn HirDatabase) -> Option<Vec<QuantifiedWhereClause>>;
fn associated_type_parent_trait(&self, db: &dyn HirDatabase) -> Option<TraitId>;
fn is_copy(self, db: &dyn HirDatabase, owner: DefWithBodyId) -> bool;
/// FIXME: Get rid of this, it's not a good abstraction
fn equals_ctor(&self, other: &Ty) -> bool;
@ -185,10 +188,7 @@ impl TyExt for Ty {
let sig = db.callable_item_signature(callable_def);
Some(sig.substitute(Interner, parameters))
}
TyKind::Closure(.., substs) => {
let sig_param = substs.at(Interner, 0).assert_ty_ref(Interner);
sig_param.callable_sig(db)
}
TyKind::Closure(.., substs) => ClosureSubst(substs).sig_ty().callable_sig(db),
_ => None,
}
}
@ -327,6 +327,20 @@ impl TyExt for Ty {
}
}
fn is_copy(self, db: &dyn HirDatabase, owner: DefWithBodyId) -> bool {
let crate_id = owner.module(db.upcast()).krate();
let Some(copy_trait) = db.lang_item(crate_id, LangItem::Copy).and_then(|x| x.as_trait()) else {
return false;
};
let trait_ref = TyBuilder::trait_ref(db, copy_trait).push(self).build();
let env = db.trait_environment_for_body(owner);
let goal = Canonical {
value: InEnvironment::new(&env.env, trait_ref.cast(Interner)),
binders: CanonicalVarKinds::empty(Interner),
};
db.trait_solve(crate_id, None, goal).is_some()
}
fn equals_ctor(&self, other: &Ty) -> bool {
match (self.kind(Interner), other.kind(Interner)) {
(TyKind::Adt(adt, ..), TyKind::Adt(adt2, ..)) => adt == adt2,

View File

@ -7,7 +7,7 @@ use hir_def::{
path::Path,
resolver::{Resolver, ValueNs},
type_ref::ConstRef,
ConstId, EnumVariantId,
DefWithBodyId, EnumVariantId,
};
use la_arena::{Idx, RawIdx};
use stdx::never;
@ -57,7 +57,7 @@ pub enum ConstEvalError {
impl From<MirLowerError> for ConstEvalError {
fn from(value: MirLowerError) -> Self {
match value {
MirLowerError::ConstEvalError(e) => *e,
MirLowerError::ConstEvalError(_, e) => *e,
_ => ConstEvalError::MirLowerError(value),
}
}
@ -168,7 +168,7 @@ pub fn try_const_usize(c: &Const) -> Option<u128> {
pub(crate) fn const_eval_recover(
_: &dyn HirDatabase,
_: &[String],
_: &ConstId,
_: &DefWithBodyId,
_: &Substitution,
) -> Result<Const, ConstEvalError> {
Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
@ -184,10 +184,9 @@ pub(crate) fn const_eval_discriminant_recover(
pub(crate) fn const_eval_query(
db: &dyn HirDatabase,
const_id: ConstId,
def: DefWithBodyId,
subst: Substitution,
) -> Result<Const, ConstEvalError> {
let def = const_id.into();
let body = db.mir_body(def)?;
let c = interpret_mir(db, &body, subst, false)?;
Ok(c)

View File

@ -1,4 +1,4 @@
use base_db::fixture::WithFixture;
use base_db::{fixture::WithFixture, FileId};
use chalk_ir::Substitution;
use hir_def::db::DefDatabase;
@ -16,7 +16,7 @@ mod intrinsics;
fn simplify(e: ConstEvalError) -> ConstEvalError {
match e {
ConstEvalError::MirEvalError(MirEvalError::InFunction(_, e)) => {
ConstEvalError::MirEvalError(MirEvalError::InFunction(_, e, _, _)) => {
simplify(ConstEvalError::MirEvalError(*e))
}
_ => e,
@ -24,13 +24,30 @@ fn simplify(e: ConstEvalError) -> ConstEvalError {
}
#[track_caller]
fn check_fail(ra_fixture: &str, error: ConstEvalError) {
assert_eq!(eval_goal(ra_fixture).map_err(simplify), Err(error));
fn check_fail(ra_fixture: &str, error: impl FnOnce(ConstEvalError) -> bool) {
let (db, file_id) = TestDB::with_single_file(ra_fixture);
match eval_goal(&db, file_id).map_err(simplify) {
Ok(_) => panic!("Expected fail, but it succeeded"),
Err(e) => assert!(error(e)),
}
}
#[track_caller]
fn check_number(ra_fixture: &str, answer: i128) {
let r = eval_goal(ra_fixture).unwrap();
let (db, file_id) = TestDB::with_single_file(ra_fixture);
let r = match eval_goal(&db, file_id) {
Ok(t) => t,
Err(e) => {
let mut err = String::new();
let span_formatter = |file, range| format!("{:?} {:?}", file, range);
match e {
ConstEvalError::MirLowerError(e) => e.pretty_print(&mut err, &db, span_formatter),
ConstEvalError::MirEvalError(e) => e.pretty_print(&mut err, &db, span_formatter),
}
.unwrap();
panic!("Error in evaluating goal: {}", err);
}
};
match &r.data(Interner).value {
chalk_ir::ConstValue::Concrete(c) => match &c.interned {
ConstScalar::Bytes(b, _) => {
@ -47,10 +64,9 @@ fn check_number(ra_fixture: &str, answer: i128) {
}
}
fn eval_goal(ra_fixture: &str) -> Result<Const, ConstEvalError> {
let (db, file_id) = TestDB::with_single_file(ra_fixture);
fn eval_goal(db: &TestDB, file_id: FileId) -> Result<Const, ConstEvalError> {
let module_id = db.module_for_file(file_id);
let def_map = module_id.def_map(&db);
let def_map = module_id.def_map(db);
let scope = &def_map[module_id.local_id].scope;
let const_id = scope
.declarations()
@ -65,7 +81,7 @@ fn eval_goal(ra_fixture: &str) -> Result<Const, ConstEvalError> {
_ => None,
})
.unwrap();
db.const_eval(const_id, Substitution::empty(Interner))
db.const_eval(const_id.into(), Substitution::empty(Interner))
}
#[test]
@ -303,6 +319,81 @@ fn overloaded_index() {
);
}
#[test]
fn overloaded_binop() {
check_number(
r#"
//- minicore: add
enum Color {
Red,
Green,
Yellow,
}
use Color::*;
impl core::ops::Add for Color {
type Output = Color;
fn add(self, rhs: Color) -> Self::Output {
Yellow
}
}
impl core::ops::AddAssign for Color {
fn add_assign(&mut self, rhs: Color) {
*self = Red;
}
}
const GOAL: bool = {
let x = Red + Green;
let mut y = Green;
y += x;
x == Yellow && y == Red && Red + Green == Yellow && Red + Red == Yellow && Yellow + Green == Yellow
};
"#,
1,
);
check_number(
r#"
//- minicore: add
impl core::ops::Add for usize {
type Output = usize;
fn add(self, rhs: usize) -> Self::Output {
self + rhs
}
}
impl core::ops::AddAssign for usize {
fn add_assign(&mut self, rhs: usize) {
*self += rhs;
}
}
#[lang = "shl"]
pub trait Shl<Rhs = Self> {
type Output;
fn shl(self, rhs: Rhs) -> Self::Output;
}
impl Shl<u8> for usize {
type Output = usize;
fn shl(self, rhs: u8) -> Self::Output {
self << rhs
}
}
const GOAL: usize = {
let mut x = 10;
x += 20;
2 + 2 + (x << 1u8)
};"#,
64,
);
}
#[test]
fn function_call() {
check_number(
@ -426,6 +517,16 @@ fn generic_fn() {
"#,
12,
);
check_number(
r#"
const fn y<T>(b: T) -> (T, ) {
let alloc = b;
(alloc, )
}
const GOAL: u8 = y(2).0;
"#,
2,
);
check_number(
r#"
//- minicore: coerce_unsized, index, slice
@ -590,6 +691,30 @@ fn loops() {
"#,
8,
);
check_number(
r#"
//- minicore: add
const GOAL: u8 = {
let mut x = 0;
'a: loop {
'b: loop {
'c: while x < 20 {
'd: while x < 5 {
'e: loop {
x += 1;
continue 'c;
};
};
x += 1;
};
break 'a;
};
}
x
};
"#,
20,
);
}
#[test]
@ -790,10 +915,12 @@ fn path_pattern_matching() {
use Season::*;
const MY_SEASON: Season = Summer;
const fn f(x: Season) -> i32 {
match x {
Spring => 1,
Summer => 2,
MY_SEASON => 2,
Fall => 3,
Winter => 4,
}
@ -944,19 +1071,10 @@ fn function_param_patterns() {
fn match_guards() {
check_number(
r#"
//- minicore: option, eq
impl<T: PartialEq> PartialEq for Option<T> {
fn eq(&self, other: &Rhs) -> bool {
match (self, other) {
(Some(x), Some(y)) => x == y,
(None, None) => true,
_ => false,
}
}
}
//- minicore: option
fn f(x: Option<i32>) -> i32 {
match x {
y if y == Some(42) => 42000,
y if let Some(42) = y => 42000,
Some(y) => y,
None => 10
}
@ -967,6 +1085,59 @@ fn match_guards() {
);
}
#[test]
fn result_layout_niche_optimization() {
check_number(
r#"
//- minicore: option, result
const GOAL: i32 = match Some(2).ok_or(Some(2)) {
Ok(x) => x,
Err(_) => 1000,
};
"#,
2,
);
check_number(
r#"
//- minicore: result
pub enum AlignmentEnum64 {
_Align1Shl0 = 1 << 0,
_Align1Shl1 = 1 << 1,
_Align1Shl2 = 1 << 2,
_Align1Shl3 = 1 << 3,
_Align1Shl4 = 1 << 4,
_Align1Shl5 = 1 << 5,
}
const GOAL: Result<AlignmentEnum64, ()> = {
let align = Err(());
align
};
"#,
0, // It is 0 since result is niche encoded and 1 is valid for `AlignmentEnum64`
);
check_number(
r#"
//- minicore: result
pub enum AlignmentEnum64 {
_Align1Shl0 = 1 << 0,
_Align1Shl1 = 1 << 1,
_Align1Shl2 = 1 << 2,
_Align1Shl3 = 1 << 3,
_Align1Shl4 = 1 << 4,
_Align1Shl5 = 1 << 5,
}
const GOAL: i32 = {
let align = Ok::<_, ()>(AlignmentEnum64::_Align1Shl0);
match align {
Ok(_) => 2,
Err(_) => 1,
}
};
"#,
2,
);
}
#[test]
fn options() {
check_number(
@ -1147,6 +1318,16 @@ fn closures() {
check_number(
r#"
//- minicore: fn, copy
const GOAL: i32 = {
let c: fn(i32) -> i32 = |x| 2 * x;
c(2) + c(10)
};
"#,
24,
);
check_number(
r#"
//- minicore: fn, copy
struct X(i32);
impl X {
fn mult(&mut self, n: i32) {
@ -1180,6 +1361,36 @@ fn closures() {
);
}
#[test]
fn closure_and_impl_fn() {
check_number(
r#"
//- minicore: fn, copy
fn closure_wrapper<F: FnOnce() -> i32>(c: F) -> impl FnOnce() -> F {
|| c
}
const GOAL: i32 = {
let y = 5;
let c = closure_wrapper(|| y);
c()()
};
"#,
5,
);
check_number(
r#"
//- minicore: fn, copy
fn f<T, F: Fn() -> T>(t: F) -> impl Fn() -> T {
move || t()
}
const GOAL: i32 = f(|| 2)();
"#,
2,
);
}
#[test]
fn or_pattern() {
check_number(
@ -1218,6 +1429,23 @@ fn or_pattern() {
);
}
#[test]
fn function_pointer_in_constants() {
check_number(
r#"
struct Foo {
f: fn(u8) -> u8,
}
const FOO: Foo = Foo { f: add2 };
fn add2(x: u8) -> u8 {
x + 2
}
const GOAL: u8 = (FOO.f)(3);
"#,
5,
);
}
#[test]
fn function_pointer() {
check_number(
@ -1432,6 +1660,51 @@ fn dyn_trait() {
);
}
#[test]
fn boxes() {
check_number(
r#"
//- minicore: coerce_unsized, deref_mut, slice
use core::ops::{Deref, DerefMut};
use core::{marker::Unsize, ops::CoerceUnsized};
#[lang = "owned_box"]
pub struct Box<T: ?Sized> {
inner: *mut T,
}
impl<T> Box<T> {
fn new(t: T) -> Self {
#[rustc_box]
Box::new(t)
}
}
impl<T: ?Sized> Deref for Box<T> {
type Target = T;
fn deref(&self) -> &T {
&**self
}
}
impl<T: ?Sized> DerefMut for Box<T> {
fn deref_mut(&mut self) -> &mut T {
&mut **self
}
}
impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Box<U>> for Box<T> {}
const GOAL: usize = {
let x = Box::new(5);
let y: Box<[i32]> = Box::new([1, 2, 3]);
*x + y.len()
};
"#,
8,
);
}
#[test]
fn array_and_index() {
check_number(
@ -1504,6 +1777,37 @@ fn consts() {
);
}
#[test]
fn statics() {
check_number(
r#"
//- minicore: cell
use core::cell::Cell;
fn f() -> i32 {
static S: Cell<i32> = Cell::new(10);
S.set(S.get() + 1);
S.get()
}
const GOAL: i32 = f() + f() + f();
"#,
36,
);
}
#[test]
fn extern_weak_statics() {
check_number(
r#"
extern "C" {
#[linkage = "extern_weak"]
static __dso_handle: *mut u8;
}
const GOAL: usize = __dso_handle as usize;
"#,
0,
);
}
#[test]
fn enums() {
check_number(
@ -1531,13 +1835,13 @@ fn enums() {
"#,
0,
);
let r = eval_goal(
let (db, file_id) = TestDB::with_single_file(
r#"
enum E { A = 1, B }
const GOAL: E = E::A;
"#,
)
.unwrap();
);
let r = eval_goal(&db, file_id).unwrap();
assert_eq!(try_const_usize(&r), Some(1));
}
@ -1550,7 +1854,7 @@ fn const_loop() {
const F2: i32 = 2 * F1;
const GOAL: i32 = F3;
"#,
ConstEvalError::MirLowerError(MirLowerError::Loop),
|e| e == ConstEvalError::MirLowerError(MirLowerError::Loop),
);
}
@ -1609,8 +1913,7 @@ fn const_generic_subst_assoc_const_impl() {
#[test]
fn const_trait_assoc() {
// FIXME: this should evaluate to 0
check_fail(
check_number(
r#"
struct U0;
trait ToConst {
@ -1619,9 +1922,35 @@ fn const_trait_assoc() {
impl ToConst for U0 {
const VAL: usize = 0;
}
const GOAL: usize = U0::VAL;
impl ToConst for i32 {
const VAL: usize = 32;
}
const GOAL: usize = U0::VAL + i32::VAL;
"#,
ConstEvalError::MirLowerError(MirLowerError::IncompleteExpr),
32,
);
check_number(
r#"
struct S<T>(*mut T);
trait MySized: Sized {
const SIZE: S<Self> = S(1 as *mut Self);
}
impl MySized for i32 {
const SIZE: S<i32> = S(10 as *mut i32);
}
impl MySized for i64 {
}
const fn f<T: MySized>() -> usize {
T::SIZE.0 as usize
}
const GOAL: usize = f::<i32>() + f::<i64>() * 2;
"#,
12,
);
}
@ -1631,7 +1960,7 @@ fn exec_limits() {
r#"
const GOAL: usize = loop {};
"#,
ConstEvalError::MirEvalError(MirEvalError::ExecutionLimitExceeded),
|e| e == ConstEvalError::MirEvalError(MirEvalError::ExecutionLimitExceeded),
);
check_fail(
r#"
@ -1640,7 +1969,7 @@ fn exec_limits() {
}
const GOAL: i32 = f(0);
"#,
ConstEvalError::MirEvalError(MirEvalError::StackOverflow),
|e| e == ConstEvalError::MirEvalError(MirEvalError::StackOverflow),
);
// Reasonable code should still work
check_number(
@ -1665,7 +1994,7 @@ fn exec_limits() {
#[test]
fn type_error() {
let e = eval_goal(
check_fail(
r#"
const GOAL: u8 = {
let x: u16 = 2;
@ -1673,6 +2002,6 @@ fn type_error() {
y.0
};
"#,
|e| matches!(e, ConstEvalError::MirLowerError(MirLowerError::TypeMismatch(_))),
);
assert!(matches!(e, Err(ConstEvalError::MirLowerError(MirLowerError::TypeMismatch(_)))));
}

View File

@ -67,6 +67,135 @@ fn wrapping_add() {
);
}
#[test]
fn allocator() {
check_number(
r#"
extern "Rust" {
#[rustc_allocator]
fn __rust_alloc(size: usize, align: usize) -> *mut u8;
#[rustc_deallocator]
fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize);
#[rustc_reallocator]
fn __rust_realloc(ptr: *mut u8, old_size: usize, align: usize, new_size: usize) -> *mut u8;
#[rustc_allocator_zeroed]
fn __rust_alloc_zeroed(size: usize, align: usize) -> *mut u8;
}
const GOAL: u8 = unsafe {
let ptr = __rust_alloc(4, 1);
let ptr2 = ((ptr as usize) + 1) as *mut u8;
*ptr = 23;
*ptr2 = 32;
let ptr = __rust_realloc(ptr, 4, 1, 8);
let ptr2 = ((ptr as usize) + 1) as *mut u8;
*ptr + *ptr2
};
"#,
55,
);
}
#[test]
fn overflowing_add() {
check_number(
r#"
extern "rust-intrinsic" {
pub fn add_with_overflow<T>(x: T, y: T) -> (T, bool);
}
const GOAL: u8 = add_with_overflow(1, 2).0;
"#,
3,
);
check_number(
r#"
extern "rust-intrinsic" {
pub fn add_with_overflow<T>(x: T, y: T) -> (T, bool);
}
const GOAL: u8 = add_with_overflow(1, 2).1 as u8;
"#,
0,
);
}
#[test]
fn needs_drop() {
check_number(
r#"
//- minicore: copy, sized
extern "rust-intrinsic" {
pub fn needs_drop<T: ?Sized>() -> bool;
}
struct X;
const GOAL: bool = !needs_drop::<i32>() && needs_drop::<X>();
"#,
1,
);
}
#[test]
fn likely() {
check_number(
r#"
extern "rust-intrinsic" {
pub fn likely(b: bool) -> bool;
pub fn unlikely(b: bool) -> bool;
}
const GOAL: bool = likely(true) && unlikely(true) && !likely(false) && !unlikely(false);
"#,
1,
);
}
#[test]
fn atomic() {
check_number(
r#"
//- minicore: copy
extern "rust-intrinsic" {
pub fn atomic_load_seqcst<T: Copy>(src: *const T) -> T;
pub fn atomic_xchg_acquire<T: Copy>(dst: *mut T, src: T) -> T;
pub fn atomic_cxchg_release_seqcst<T: Copy>(dst: *mut T, old: T, src: T) -> (T, bool);
pub fn atomic_cxchgweak_acquire_acquire<T: Copy>(dst: *mut T, old: T, src: T) -> (T, bool);
pub fn atomic_store_release<T: Copy>(dst: *mut T, val: T);
pub fn atomic_xadd_acqrel<T: Copy>(dst: *mut T, src: T) -> T;
pub fn atomic_xsub_seqcst<T: Copy>(dst: *mut T, src: T) -> T;
pub fn atomic_and_acquire<T: Copy>(dst: *mut T, src: T) -> T;
pub fn atomic_nand_seqcst<T: Copy>(dst: *mut T, src: T) -> T;
pub fn atomic_or_release<T: Copy>(dst: *mut T, src: T) -> T;
pub fn atomic_xor_seqcst<T: Copy>(dst: *mut T, src: T) -> T;
}
fn should_not_reach() {
_ // fails the test if executed
}
const GOAL: i32 = {
let mut x = 5;
atomic_store_release(&mut x, 10);
let mut y = atomic_xchg_acquire(&mut x, 100);
atomic_xadd_acqrel(&mut y, 20);
if (30, true) != atomic_cxchg_release_seqcst(&mut y, 30, 40) {
should_not_reach();
}
if (40, false) != atomic_cxchg_release_seqcst(&mut y, 30, 50) {
should_not_reach();
}
if (40, true) != atomic_cxchgweak_acquire_acquire(&mut y, 40, 30) {
should_not_reach();
}
let mut z = atomic_xsub_seqcst(&mut x, -200);
atomic_xor_seqcst(&mut x, 1024);
atomic_load_seqcst(&x) + z * 3 + atomic_load_seqcst(&y) * 2
};
"#,
660 + 1024,
);
}
#[test]
fn offset() {
check_number(

View File

@ -5,7 +5,7 @@ use std::sync::Arc;
use base_db::{impl_intern_key, salsa, CrateId, Upcast};
use hir_def::{
db::DefDatabase, hir::ExprId, layout::TargetDataLayout, AdtId, BlockId, ConstId, ConstParamId,
db::DefDatabase, hir::ExprId, layout::TargetDataLayout, AdtId, BlockId, ConstParamId,
DefWithBodyId, EnumVariantId, FunctionId, GenericDefId, ImplId, LifetimeParamId, LocalFieldId,
TypeOrConstParamId, VariantId,
};
@ -59,7 +59,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
#[salsa::invoke(crate::consteval::const_eval_query)]
#[salsa::cycle(crate::consteval::const_eval_recover)]
fn const_eval(&self, def: ConstId, subst: Substitution) -> Result<Const, ConstEvalError>;
fn const_eval(&self, def: DefWithBodyId, subst: Substitution) -> Result<Const, ConstEvalError>;
#[salsa::invoke(crate::consteval::const_eval_discriminant_variant)]
#[salsa::cycle(crate::consteval::const_eval_discriminant_recover)]

View File

@ -772,7 +772,7 @@ impl<'p> Fields<'p> {
(0..fields_len).map(|idx| LocalFieldId::from_raw(idx.into())).filter_map(move |fid| {
let ty = field_ty[fid].clone().substitute(Interner, substs);
let ty = normalize(cx.db, cx.body, ty);
let ty = normalize(cx.db, cx.db.trait_environment_for_body(cx.body), ty);
let is_visible = matches!(adt, hir_def::AdtId::EnumId(..))
|| visibility[fid].is_visible_from(cx.db.upcast(), cx.module);
let is_uninhabited = cx.is_uninhabited(&ty);

View File

@ -32,7 +32,7 @@ use crate::{
mapping::from_chalk,
mir::pad16,
primitive, to_assoc_type_id,
utils::{self, generics},
utils::{self, generics, ClosureSubst},
AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, Const, ConstScalar, ConstValue,
DomainGoal, GenericArg, ImplTraitId, Interner, Lifetime, LifetimeData, LifetimeOutlives,
MemoryMap, Mutability, OpaqueTy, ProjectionTy, ProjectionTyExt, QuantifiedWhereClause, Scalar,
@ -419,6 +419,16 @@ impl HirDisplay for Const {
}
ConstValue::Concrete(c) => match &c.interned {
ConstScalar::Bytes(b, m) => render_const_scalar(f, &b, m, &data.ty),
ConstScalar::UnevaluatedConst(c, parameters) => {
let const_data = f.db.const_data(*c);
write!(
f,
"{}",
const_data.name.as_ref().and_then(|x| x.as_str()).unwrap_or("_")
)?;
hir_fmt_generics(f, parameters, Some((*c).into()))?;
Ok(())
}
ConstScalar::Unknown => f.write_char('_'),
},
}
@ -485,7 +495,7 @@ fn render_const_scalar(
chalk_ir::TyKind::Ref(_, _, t) => match t.kind(Interner) {
chalk_ir::TyKind::Str => {
let addr = usize::from_le_bytes(b[0..b.len() / 2].try_into().unwrap());
let bytes = memory_map.0.get(&addr).map(|x| &**x).unwrap_or(&[]);
let bytes = memory_map.memory.get(&addr).map(|x| &**x).unwrap_or(&[]);
let s = std::str::from_utf8(bytes).unwrap_or("<utf8-error>");
write!(f, "{s:?}")
}
@ -574,6 +584,11 @@ fn render_const_scalar(
hir_def::AdtId::EnumId(_) => f.write_str("<enum-not-supported>"),
},
chalk_ir::TyKind::FnDef(..) => ty.hir_fmt(f),
chalk_ir::TyKind::Raw(_, _) => {
let x = u128::from_le_bytes(pad16(b, false));
write!(f, "{:#X} as ", x)?;
ty.hir_fmt(f)
}
_ => f.write_str("<not-supported>"),
}
}
@ -794,82 +809,9 @@ impl HirDisplay for Ty {
}
f.end_location_link();
if parameters.len(Interner) > 0 {
let parameters_to_write = if f.display_target.is_source_code()
|| f.omit_verbose_types()
{
match self
.as_generic_def(db)
.map(|generic_def_id| db.generic_defaults(generic_def_id))
.filter(|defaults| !defaults.is_empty())
{
None => parameters.as_slice(Interner),
Some(default_parameters) => {
fn should_show(
parameter: &GenericArg,
default_parameters: &[Binders<GenericArg>],
i: usize,
parameters: &Substitution,
) -> bool {
if parameter.ty(Interner).map(|x| x.kind(Interner))
== Some(&TyKind::Error)
{
return true;
}
if let Some(ConstValue::Concrete(c)) = parameter
.constant(Interner)
.map(|x| &x.data(Interner).value)
{
if c.interned == ConstScalar::Unknown {
return true;
}
}
let default_parameter = match default_parameters.get(i) {
Some(x) => x,
None => return true,
};
let actual_default =
default_parameter.clone().substitute(Interner, &parameters);
parameter != &actual_default
}
let mut default_from = 0;
for (i, parameter) in parameters.iter(Interner).enumerate() {
if should_show(parameter, &default_parameters, i, parameters) {
default_from = i + 1;
}
}
&parameters.as_slice(Interner)[0..default_from]
}
}
} else {
parameters.as_slice(Interner)
};
if !parameters_to_write.is_empty() {
write!(f, "<")?;
let generic_def = self.as_generic_def(db);
if f.display_target.is_source_code() {
let mut first = true;
for generic_arg in parameters_to_write {
if !first {
write!(f, ", ")?;
}
first = false;
if generic_arg.ty(Interner).map(|ty| ty.kind(Interner))
== Some(&TyKind::Error)
{
write!(f, "_")?;
} else {
generic_arg.hir_fmt(f)?;
}
}
} else {
f.write_joined(parameters_to_write, ", ")?;
}
write!(f, ">")?;
}
}
hir_fmt_generics(f, parameters, generic_def)?;
}
TyKind::AssociatedType(assoc_type_id, parameters) => {
let type_alias = from_assoc_type_id(*assoc_type_id);
@ -983,7 +925,7 @@ impl HirDisplay for Ty {
}
_ => (),
}
let sig = substs.at(Interner, 0).assert_ty_ref(Interner).callable_sig(db);
let sig = ClosureSubst(substs).sig_ty().callable_sig(db);
if let Some(sig) = sig {
let (def, _) = db.lookup_intern_closure((*id).into());
let infer = db.infer(def);
@ -1141,6 +1083,85 @@ impl HirDisplay for Ty {
}
}
fn hir_fmt_generics(
f: &mut HirFormatter<'_>,
parameters: &Substitution,
generic_def: Option<hir_def::GenericDefId>,
) -> Result<(), HirDisplayError> {
let db = f.db;
if parameters.len(Interner) > 0 {
let parameters_to_write = if f.display_target.is_source_code() || f.omit_verbose_types() {
match generic_def
.map(|generic_def_id| db.generic_defaults(generic_def_id))
.filter(|defaults| !defaults.is_empty())
{
None => parameters.as_slice(Interner),
Some(default_parameters) => {
fn should_show(
parameter: &GenericArg,
default_parameters: &[Binders<GenericArg>],
i: usize,
parameters: &Substitution,
) -> bool {
if parameter.ty(Interner).map(|x| x.kind(Interner)) == Some(&TyKind::Error)
{
return true;
}
if let Some(ConstValue::Concrete(c)) =
parameter.constant(Interner).map(|x| &x.data(Interner).value)
{
if c.interned == ConstScalar::Unknown {
return true;
}
}
let default_parameter = match default_parameters.get(i) {
Some(x) => x,
None => return true,
};
let actual_default =
default_parameter.clone().substitute(Interner, &parameters);
parameter != &actual_default
}
let mut default_from = 0;
for (i, parameter) in parameters.iter(Interner).enumerate() {
if should_show(parameter, &default_parameters, i, parameters) {
default_from = i + 1;
}
}
&parameters.as_slice(Interner)[0..default_from]
}
}
} else {
parameters.as_slice(Interner)
};
if !parameters_to_write.is_empty() {
write!(f, "<")?;
if f.display_target.is_source_code() {
let mut first = true;
for generic_arg in parameters_to_write {
if !first {
write!(f, ", ")?;
}
first = false;
if generic_arg.ty(Interner).map(|ty| ty.kind(Interner)) == Some(&TyKind::Error)
{
write!(f, "_")?;
} else {
generic_arg.hir_fmt(f)?;
}
}
} else {
f.write_joined(parameters_to_write, ", ")?;
}
write!(f, ">")?;
}
}
Ok(())
}
impl HirDisplay for CallableSig {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
write!(f, "fn(")?;

View File

@ -44,7 +44,7 @@ use crate::{
db::HirDatabase, fold_tys, infer::coerce::CoerceMany, lower::ImplTraitLoweringMode,
static_lifetime, to_assoc_type_id, traits::FnTrait, AliasEq, AliasTy, ClosureId, DomainGoal,
GenericArg, Goal, ImplTraitId, InEnvironment, Interner, ProjectionTy, RpitId, Substitution,
TraitRef, Ty, TyBuilder, TyExt, TyKind,
TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, TyKind,
};
// This lint has a false positive here. See the link below for details.
@ -117,11 +117,10 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<Infer
///
/// This is appropriate to use only after type-check: it assumes
/// that normalization will succeed, for example.
pub(crate) fn normalize(db: &dyn HirDatabase, owner: DefWithBodyId, ty: Ty) -> Ty {
pub(crate) fn normalize(db: &dyn HirDatabase, trait_env: Arc<TraitEnvironment>, ty: Ty) -> Ty {
if !ty.data(Interner).flags.intersects(TypeFlags::HAS_PROJECTION) {
return ty;
}
let trait_env = db.trait_environment_for_body(owner);
let mut table = unify::InferenceTable::new(db, trait_env);
let ty_with_vars = table.normalize_associated_types_in(ty);

View File

@ -2,7 +2,11 @@
use std::{cmp, collections::HashMap, convert::Infallible, mem};
use chalk_ir::{cast::Cast, AliasEq, AliasTy, FnSubst, Mutability, TyKind, WhereClause};
use chalk_ir::{
cast::Cast,
fold::{FallibleTypeFolder, TypeFoldable},
AliasEq, AliasTy, BoundVar, DebruijnIndex, FnSubst, Mutability, TyKind, WhereClause,
};
use hir_def::{
data::adt::VariantData,
hir::{
@ -11,7 +15,7 @@ use hir_def::{
},
lang_item::LangItem,
resolver::{resolver_for_expr, ResolveValueResult, ValueNs},
FieldId, HasModule, VariantId,
DefWithBodyId, FieldId, HasModule, VariantId,
};
use hir_expand::name;
use rustc_hash::FxHashMap;
@ -20,12 +24,13 @@ use stdx::never;
use crate::{
db::HirDatabase,
from_placeholder_idx, make_binders,
mir::{BorrowKind, MirSpan, ProjectionElem},
static_lifetime, to_chalk_trait_id,
traits::FnTrait,
utils::{self, pattern_matching_dereference_count},
Adjust, Adjustment, Canonical, CanonicalVarKinds, ChalkTraitId, ClosureId, DynTy, FnPointer,
FnSig, InEnvironment, Interner, Substitution, Ty, TyBuilder, TyExt,
utils::{self, generics, pattern_matching_dereference_count, Generics},
Adjust, Adjustment, Binders, ChalkTraitId, ClosureId, DynTy, FnPointer, FnSig, Interner,
Substitution, Ty, TyExt,
};
use super::{Expectation, InferenceContext};
@ -117,7 +122,7 @@ impl HirPlace {
fn ty(&self, ctx: &mut InferenceContext<'_>) -> Ty {
let mut ty = ctx.table.resolve_completely(ctx.result[self.local].clone());
for p in &self.projections {
ty = p.projected_ty(ty, ctx.db, |_, _| {
ty = p.projected_ty(ty, ctx.db, |_, _, _| {
unreachable!("Closure field only happens in MIR");
});
}
@ -152,7 +157,7 @@ pub struct CapturedItem {
pub(crate) place: HirPlace,
pub(crate) kind: CaptureKind,
pub(crate) span: MirSpan,
pub(crate) ty: Ty,
pub(crate) ty: Binders<Ty>,
}
impl CapturedItem {
@ -232,6 +237,52 @@ pub(crate) struct CapturedItemWithoutTy {
impl CapturedItemWithoutTy {
fn with_ty(self, ctx: &mut InferenceContext<'_>) -> CapturedItem {
fn replace_placeholder_with_binder(
db: &dyn HirDatabase,
owner: DefWithBodyId,
ty: Ty,
) -> Binders<Ty> {
struct Filler<'a> {
db: &'a dyn HirDatabase,
generics: Generics,
}
impl FallibleTypeFolder<Interner> for Filler<'_> {
type Error = ();
fn as_dyn(&mut self) -> &mut dyn FallibleTypeFolder<Interner, Error = Self::Error> {
self
}
fn interner(&self) -> Interner {
Interner
}
fn try_fold_free_placeholder_ty(
&mut self,
idx: chalk_ir::PlaceholderIndex,
_outer_binder: DebruijnIndex,
) -> std::result::Result<Ty, Self::Error> {
let x = from_placeholder_idx(self.db, idx);
let Some(idx) = self.generics.param_idx(x) else {
return Err(());
};
Ok(TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, idx))
.intern(Interner))
}
}
let g_def = match owner {
DefWithBodyId::FunctionId(f) => Some(f.into()),
DefWithBodyId::StaticId(_) => None,
DefWithBodyId::ConstId(f) => Some(f.into()),
DefWithBodyId::VariantId(f) => Some(f.into()),
};
let Some(generics) = g_def.map(|g_def| generics(db.upcast(), g_def)) else {
return Binders::empty(Interner, ty);
};
let filler = &mut Filler { db, generics };
let result = ty.clone().try_fold_with(filler, DebruijnIndex::INNERMOST).unwrap_or(ty);
make_binders(db, &filler.generics, result)
}
let ty = self.place.ty(ctx).clone();
let ty = match &self.kind {
CaptureKind::ByValue => ty,
@ -243,7 +294,12 @@ impl CapturedItemWithoutTy {
TyKind::Ref(m, static_lifetime(), ty).intern(Interner)
}
};
CapturedItem { place: self.place, kind: self.kind, span: self.span, ty }
CapturedItem {
place: self.place,
kind: self.kind,
span: self.span,
ty: replace_placeholder_with_binder(ctx.db, ctx.owner, ty),
}
}
}
@ -590,17 +646,7 @@ impl InferenceContext<'_> {
// without creating query cycles.
return self.result.closure_info.get(id).map(|x| x.1 == FnTrait::Fn).unwrap_or(true);
}
let crate_id = self.owner.module(self.db.upcast()).krate();
let Some(copy_trait) = self.db.lang_item(crate_id, LangItem::Copy).and_then(|x| x.as_trait()) else {
return false;
};
let trait_ref = TyBuilder::trait_ref(self.db, copy_trait).push(ty).build();
let env = self.db.trait_environment_for_body(self.owner);
let goal = Canonical {
value: InEnvironment::new(&env.env, trait_ref.cast(Interner)),
binders: CanonicalVarKinds::empty(Interner),
};
self.db.trait_solve(crate_id, None, goal).is_some()
ty.is_copy(self.db, self.owner)
}
fn select_from_expr(&mut self, expr: ExprId) {

View File

@ -21,8 +21,10 @@ use crate::{
Adjust, Adjustment, AutoBorrow, InferOk, InferenceContext, OverloadedDeref, PointerCast,
TypeError, TypeMismatch,
},
static_lifetime, Canonical, DomainGoal, FnPointer, FnSig, Guidance, InEnvironment, Interner,
Solution, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt,
static_lifetime,
utils::ClosureSubst,
Canonical, DomainGoal, FnPointer, FnSig, Guidance, InEnvironment, Interner, Solution,
Substitution, TraitEnvironment, Ty, TyBuilder, TyExt,
};
use super::unify::InferenceTable;
@ -670,7 +672,7 @@ impl<'a> InferenceTable<'a> {
}
fn coerce_closure_fn_ty(closure_substs: &Substitution, safety: chalk_ir::Safety) -> Ty {
let closure_sig = closure_substs.at(Interner, 0).assert_ty_ref(Interner).clone();
let closure_sig = ClosureSubst(closure_substs).sig_ty().clone();
match closure_sig.kind(Interner) {
TyKind::Function(fn_ty) => TyKind::Function(FnPointer {
num_binders: fn_ty.num_binders,

View File

@ -282,7 +282,7 @@ impl<'a> InferenceContext<'a> {
let closure_id = self.db.intern_closure((self.owner, tgt_expr)).into();
let closure_ty = TyKind::Closure(
closure_id,
Substitution::from1(Interner, sig_ty.clone()),
TyBuilder::subst_for_closure(self.db, self.owner, sig_ty.clone()),
)
.intern(Interner);
self.deferred_closures.entry(closure_id).or_default();

View File

@ -1,5 +1,7 @@
//! Compute the binary representation of a type
use std::sync::Arc;
use base_db::CrateId;
use chalk_ir::{AdtId, TyKind};
use hir_def::{
@ -13,8 +15,8 @@ use la_arena::{Idx, RawIdx};
use stdx::never;
use crate::{
consteval::try_const_usize, db::HirDatabase, layout::adt::struct_variant_idx, Interner,
Substitution, Ty,
consteval::try_const_usize, db::HirDatabase, infer::normalize, layout::adt::struct_variant_idx,
utils::ClosureSubst, Interner, Substitution, TraitEnvironment, Ty,
};
pub use self::{
@ -80,6 +82,8 @@ pub fn layout_of_ty(db: &dyn HirDatabase, ty: &Ty, krate: CrateId) -> Result<Lay
let Some(target) = db.target_data_layout(krate) else { return Err(LayoutError::TargetLayoutNotAvailable) };
let cx = LayoutCx { krate, target: &target };
let dl = &*cx.current_data_layout();
let trait_env = Arc::new(TraitEnvironment::empty(krate));
let ty = normalize(db, trait_env, ty.clone());
Ok(match ty.kind(Interner) {
TyKind::Adt(AdtId(def), subst) => db.layout_of_adt(*def, subst.clone())?,
TyKind::Scalar(s) => match s {
@ -146,7 +150,7 @@ pub fn layout_of_ty(db: &dyn HirDatabase, ty: &Ty, krate: CrateId) -> Result<Lay
}
TyKind::Array(element, count) => {
let count = try_const_usize(&count).ok_or(LayoutError::UserError(
"mismatched type of const generic parameter".to_string(),
"unevaluated or mistyped const generic parameter".to_string(),
))? as u64;
let element = layout_of_ty(db, element, krate)?;
let size = element.size.checked_mul(count, dl).ok_or(LayoutError::SizeOverflow)?;
@ -252,13 +256,19 @@ pub fn layout_of_ty(db: &dyn HirDatabase, ty: &Ty, krate: CrateId) -> Result<Lay
}
}
}
TyKind::Closure(c, _) => {
TyKind::Closure(c, subst) => {
let (def, _) = db.lookup_intern_closure((*c).into());
let infer = db.infer(def);
let (captures, _) = infer.closure_info(c);
let fields = captures
.iter()
.map(|x| layout_of_ty(db, &x.ty, krate))
.map(|x| {
layout_of_ty(
db,
&x.ty.clone().substitute(Interner, ClosureSubst(subst).parent_subst()),
krate,
)
})
.collect::<Result<Vec<_>, _>>()?;
let fields = fields.iter().collect::<Vec<_>>();
let fields = fields.iter().collect::<Vec<_>>();

View File

@ -218,6 +218,22 @@ fn generic() {
}
}
#[test]
fn associated_types() {
size_and_align! {
trait Tr {
type Ty;
}
impl Tr for i32 {
type Ty = i64;
}
struct Foo<A: Tr>(<A as Tr>::Ty);
struct Goal(Foo<i32>);
}
}
#[test]
fn return_position_impl_trait() {
size_and_align_expr! {

View File

@ -44,10 +44,10 @@ use chalk_ir::{
NoSolution, TyData,
};
use either::Either;
use hir_def::{hir::ExprId, type_ref::Rawness, TypeOrConstParamId};
use hir_def::{hir::ExprId, type_ref::Rawness, ConstId, TypeOrConstParamId};
use hir_expand::name;
use la_arena::{Arena, Idx};
use mir::MirEvalError;
use mir::{MirEvalError, VTableMap};
use rustc_hash::FxHashSet;
use traits::FnTrait;
use utils::Generics;
@ -151,11 +151,14 @@ pub type WhereClause = chalk_ir::WhereClause<Interner>;
/// the necessary bits of memory of the const eval session to keep the constant
/// meaningful.
#[derive(Debug, Default, Clone, PartialEq, Eq)]
pub struct MemoryMap(pub HashMap<usize, Vec<u8>>);
pub struct MemoryMap {
pub memory: HashMap<usize, Vec<u8>>,
pub vtable: VTableMap,
}
impl MemoryMap {
fn insert(&mut self, addr: usize, x: Vec<u8>) {
self.0.insert(addr, x);
self.memory.insert(addr, x);
}
/// This functions convert each address by a function `f` which gets the byte intervals and assign an address
@ -165,7 +168,7 @@ impl MemoryMap {
&self,
mut f: impl FnMut(&[u8]) -> Result<usize, MirEvalError>,
) -> Result<HashMap<usize, usize>, MirEvalError> {
self.0.iter().map(|x| Ok((*x.0, f(x.1)?))).collect()
self.memory.iter().map(|x| Ok((*x.0, f(x.1)?))).collect()
}
}
@ -173,6 +176,9 @@ impl MemoryMap {
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ConstScalar {
Bytes(Vec<u8>, MemoryMap),
// FIXME: this is a hack to get around chalk not being able to represent unevaluatable
// constants
UnevaluatedConst(ConstId, Substitution),
/// Case of an unknown value that rustc might know but we don't
// FIXME: this is a hack to get around chalk not being able to represent unevaluatable
// constants

View File

@ -3,13 +3,14 @@
use std::{fmt::Display, iter};
use crate::{
db::HirDatabase, display::HirDisplay, infer::PointerCast, lang_items::is_box, ClosureId, Const,
ConstScalar, InferenceResult, Interner, MemoryMap, Substitution, Ty, TyKind,
db::HirDatabase, display::HirDisplay, infer::PointerCast, lang_items::is_box, mapping::ToChalk,
CallableDefId, ClosureId, Const, ConstScalar, InferenceResult, Interner, MemoryMap,
Substitution, Ty, TyKind,
};
use chalk_ir::Mutability;
use hir_def::{
hir::{BindingId, Expr, ExprId, Ordering, PatId},
DefWithBodyId, FieldId, UnionId, VariantId,
DefWithBodyId, FieldId, StaticId, UnionId, VariantId,
};
use la_arena::{Arena, ArenaMap, Idx, RawIdx};
@ -19,7 +20,7 @@ mod borrowck;
mod pretty;
pub use borrowck::{borrowck_query, BorrowckResult, MutabilityReason};
pub use eval::{interpret_mir, pad16, Evaluator, MirEvalError};
pub use eval::{interpret_mir, pad16, Evaluator, MirEvalError, VTableMap};
pub use lower::{
lower_to_mir, mir_body_for_closure_query, mir_body_query, mir_body_recover, MirLowerError,
};
@ -76,6 +77,9 @@ pub enum Operand {
Move(Place),
/// Constants are already semantically values, and remain unchanged.
Constant(Const),
/// NON STANDARD: This kind of operand returns an immutable reference to that static memory. Rustc
/// handles it with the `Constant` variant somehow.
Static(StaticId),
}
impl Operand {
@ -90,6 +94,17 @@ impl Operand {
fn const_zst(ty: Ty) -> Operand {
Self::from_bytes(vec![], ty)
}
fn from_fn(
db: &dyn HirDatabase,
func_id: hir_def::FunctionId,
generic_args: Substitution,
) -> Operand {
let ty =
chalk_ir::TyKind::FnDef(CallableDefId::FunctionId(func_id).to_chalk(db), generic_args)
.intern(Interner);
Operand::from_bytes(vec![], ty)
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
@ -110,7 +125,7 @@ impl<V, T> ProjectionElem<V, T> {
&self,
base: Ty,
db: &dyn HirDatabase,
closure_field: impl FnOnce(ClosureId, usize) -> Ty,
closure_field: impl FnOnce(ClosureId, &Substitution, usize) -> Ty,
) -> Ty {
match self {
ProjectionElem::Deref => match &base.data(Interner).kind {
@ -142,7 +157,7 @@ impl<V, T> ProjectionElem<V, T> {
never!("Out of bound tuple field");
TyKind::Error.intern(Interner)
}),
TyKind::Closure(id, _) => closure_field(*id, *f),
TyKind::Closure(id, subst) => closure_field(*id, subst, *f),
_ => {
never!("Only tuple or closure has tuple or closure field");
return TyKind::Error.intern(Interner);
@ -261,7 +276,13 @@ impl SwitchTargets {
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum Terminator {
pub struct Terminator {
span: MirSpan,
kind: TerminatorKind,
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum TerminatorKind {
/// Block has one successor; we continue execution there.
Goto { target: BasicBlockId },
@ -836,6 +857,9 @@ pub enum Rvalue {
/// affects alias analysis.
ShallowInitBox(Operand, Ty),
/// NON STANDARD: allocates memory with the type's layout, and shallow init the box with the resulting pointer.
ShallowInitBoxWithAlloc(Ty),
/// A CopyForDeref is equivalent to a read from a place at the
/// codegen level, but is treated specially by drop elaboration. When such a read happens, it
/// is guaranteed (via nature of the mir_opt `Derefer` in rustc_mir_transform/src/deref_separator)
@ -918,7 +942,7 @@ impl MirBody {
Operand::Copy(p) | Operand::Move(p) => {
f(p);
}
Operand::Constant(_) => (),
Operand::Constant(_) | Operand::Static(_) => (),
}
}
for (_, block) in self.basic_blocks.iter_mut() {
@ -927,6 +951,7 @@ impl MirBody {
StatementKind::Assign(p, r) => {
f(p);
match r {
Rvalue::ShallowInitBoxWithAlloc(_) => (),
Rvalue::ShallowInitBox(o, _)
| Rvalue::UnaryOp(_, o)
| Rvalue::Cast(_, o, _)
@ -954,32 +979,32 @@ impl MirBody {
}
}
match &mut block.terminator {
Some(x) => match x {
Terminator::SwitchInt { discr, .. } => for_operand(discr, &mut f),
Terminator::FalseEdge { .. }
| Terminator::FalseUnwind { .. }
| Terminator::Goto { .. }
| Terminator::Resume
| Terminator::GeneratorDrop
| Terminator::Abort
| Terminator::Return
| Terminator::Unreachable => (),
Terminator::Drop { place, .. } => {
Some(x) => match &mut x.kind {
TerminatorKind::SwitchInt { discr, .. } => for_operand(discr, &mut f),
TerminatorKind::FalseEdge { .. }
| TerminatorKind::FalseUnwind { .. }
| TerminatorKind::Goto { .. }
| TerminatorKind::Resume
| TerminatorKind::GeneratorDrop
| TerminatorKind::Abort
| TerminatorKind::Return
| TerminatorKind::Unreachable => (),
TerminatorKind::Drop { place, .. } => {
f(place);
}
Terminator::DropAndReplace { place, value, .. } => {
TerminatorKind::DropAndReplace { place, value, .. } => {
f(place);
for_operand(value, &mut f);
}
Terminator::Call { func, args, destination, .. } => {
TerminatorKind::Call { func, args, destination, .. } => {
for_operand(func, &mut f);
args.iter_mut().for_each(|x| for_operand(x, &mut f));
f(destination);
}
Terminator::Assert { cond, .. } => {
TerminatorKind::Assert { cond, .. } => {
for_operand(cond, &mut f);
}
Terminator::Yield { value, resume_arg, .. } => {
TerminatorKind::Yield { value, resume_arg, .. } => {
for_operand(value, &mut f);
f(resume_arg);
}

View File

@ -13,7 +13,7 @@ use crate::{db::HirDatabase, ClosureId};
use super::{
BasicBlockId, BorrowKind, LocalId, MirBody, MirLowerError, MirSpan, Place, ProjectionElem,
Rvalue, StatementKind, Terminator,
Rvalue, StatementKind, TerminatorKind,
};
#[derive(Debug, Clone, PartialEq, Eq)]
@ -141,26 +141,26 @@ fn ever_initialized_map(body: &MirBody) -> ArenaMap<BasicBlockId, ArenaMap<Local
never!("Terminator should be none only in construction");
return;
};
let targets = match terminator {
Terminator::Goto { target } => vec![*target],
Terminator::SwitchInt { targets, .. } => targets.all_targets().to_vec(),
Terminator::Resume
| Terminator::Abort
| Terminator::Return
| Terminator::Unreachable => vec![],
Terminator::Call { target, cleanup, destination, .. } => {
let targets = match &terminator.kind {
TerminatorKind::Goto { target } => vec![*target],
TerminatorKind::SwitchInt { targets, .. } => targets.all_targets().to_vec(),
TerminatorKind::Resume
| TerminatorKind::Abort
| TerminatorKind::Return
| TerminatorKind::Unreachable => vec![],
TerminatorKind::Call { target, cleanup, destination, .. } => {
if destination.projection.len() == 0 && destination.local == l {
is_ever_initialized = true;
}
target.into_iter().chain(cleanup.into_iter()).copied().collect()
}
Terminator::Drop { .. }
| Terminator::DropAndReplace { .. }
| Terminator::Assert { .. }
| Terminator::Yield { .. }
| Terminator::GeneratorDrop
| Terminator::FalseEdge { .. }
| Terminator::FalseUnwind { .. } => {
TerminatorKind::Drop { .. }
| TerminatorKind::DropAndReplace { .. }
| TerminatorKind::Assert { .. }
| TerminatorKind::Yield { .. }
| TerminatorKind::GeneratorDrop
| TerminatorKind::FalseEdge { .. }
| TerminatorKind::FalseUnwind { .. } => {
never!("We don't emit these MIR terminators yet");
vec![]
}
@ -228,21 +228,21 @@ fn mutability_of_locals(body: &MirBody) -> ArenaMap<LocalId, MutabilityReason> {
never!("Terminator should be none only in construction");
continue;
};
match terminator {
Terminator::Goto { .. }
| Terminator::Resume
| Terminator::Abort
| Terminator::Return
| Terminator::Unreachable
| Terminator::FalseEdge { .. }
| Terminator::FalseUnwind { .. }
| Terminator::GeneratorDrop
| Terminator::SwitchInt { .. }
| Terminator::Drop { .. }
| Terminator::DropAndReplace { .. }
| Terminator::Assert { .. }
| Terminator::Yield { .. } => (),
Terminator::Call { destination, .. } => {
match &terminator.kind {
TerminatorKind::Goto { .. }
| TerminatorKind::Resume
| TerminatorKind::Abort
| TerminatorKind::Return
| TerminatorKind::Unreachable
| TerminatorKind::FalseEdge { .. }
| TerminatorKind::FalseUnwind { .. }
| TerminatorKind::GeneratorDrop
| TerminatorKind::SwitchInt { .. }
| TerminatorKind::Drop { .. }
| TerminatorKind::DropAndReplace { .. }
| TerminatorKind::Assert { .. }
| TerminatorKind::Yield { .. } => (),
TerminatorKind::Call { destination, .. } => {
if destination.projection.len() == 0 {
if ever_init_map.get(destination.local).copied().unwrap_or_default() {
push_mut_span(destination.local, MirSpan::Unknown);

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,396 @@
//! Interpret intrinsics, lang items and `extern "C"` wellknown functions which their implementation
//! is not available.
use super::*;
macro_rules! from_bytes {
($ty:tt, $value:expr) => {
($ty::from_le_bytes(match ($value).try_into() {
Ok(x) => x,
Err(_) => return Err(MirEvalError::TypeError("mismatched size")),
}))
};
}
macro_rules! not_supported {
($x: expr) => {
return Err(MirEvalError::NotSupported(format!($x)))
};
}
impl Evaluator<'_> {
pub(super) fn detect_and_exec_special_function(
&mut self,
def: FunctionId,
args: &[IntervalAndTy],
generic_args: &Substitution,
locals: &Locals<'_>,
destination: Interval,
span: MirSpan,
) -> Result<bool> {
let function_data = self.db.function_data(def);
let is_intrinsic = match &function_data.abi {
Some(abi) => *abi == Interned::new_str("rust-intrinsic"),
None => match def.lookup(self.db.upcast()).container {
hir_def::ItemContainerId::ExternBlockId(block) => {
let id = block.lookup(self.db.upcast()).id;
id.item_tree(self.db.upcast())[id.value].abi.as_deref()
== Some("rust-intrinsic")
}
_ => false,
},
};
if is_intrinsic {
self.exec_intrinsic(
function_data.name.as_text().unwrap_or_default().as_str(),
args,
generic_args,
destination,
&locals,
span,
)?;
return Ok(true);
}
let alloc_fn = function_data
.attrs
.iter()
.filter_map(|x| x.path().as_ident())
.filter_map(|x| x.as_str())
.find(|x| {
[
"rustc_allocator",
"rustc_deallocator",
"rustc_reallocator",
"rustc_allocator_zeroed",
]
.contains(x)
});
if let Some(alloc_fn) = alloc_fn {
self.exec_alloc_fn(alloc_fn, args, destination)?;
return Ok(true);
}
if let Some(x) = self.detect_lang_function(def) {
let arg_bytes =
args.iter().map(|x| Ok(x.get(&self)?.to_owned())).collect::<Result<Vec<_>>>()?;
let result = self.exec_lang_item(x, &arg_bytes)?;
destination.write_from_bytes(self, &result)?;
return Ok(true);
}
Ok(false)
}
fn exec_alloc_fn(
&mut self,
alloc_fn: &str,
args: &[IntervalAndTy],
destination: Interval,
) -> Result<()> {
match alloc_fn {
"rustc_allocator_zeroed" | "rustc_allocator" => {
let [size, align] = args else {
return Err(MirEvalError::TypeError("rustc_allocator args are not provided"));
};
let size = from_bytes!(usize, size.get(self)?);
let align = from_bytes!(usize, align.get(self)?);
let result = self.heap_allocate(size, align);
destination.write_from_bytes(self, &result.to_bytes())?;
}
"rustc_deallocator" => { /* no-op for now */ }
"rustc_reallocator" => {
let [ptr, old_size, align, new_size] = args else {
return Err(MirEvalError::TypeError("rustc_allocator args are not provided"));
};
let ptr = Address::from_bytes(ptr.get(self)?)?;
let old_size = from_bytes!(usize, old_size.get(self)?);
let new_size = from_bytes!(usize, new_size.get(self)?);
let align = from_bytes!(usize, align.get(self)?);
let result = self.heap_allocate(new_size, align);
Interval { addr: result, size: old_size }
.write_from_interval(self, Interval { addr: ptr, size: old_size })?;
destination.write_from_bytes(self, &result.to_bytes())?;
}
_ => not_supported!("unknown alloc function"),
}
Ok(())
}
fn detect_lang_function(&self, def: FunctionId) -> Option<LangItem> {
use LangItem::*;
let candidate = lang_attr(self.db.upcast(), def)?;
// We want to execute these functions with special logic
if [PanicFmt, BeginPanic, SliceLen].contains(&candidate) {
return Some(candidate);
}
None
}
fn exec_lang_item(&self, x: LangItem, args: &[Vec<u8>]) -> Result<Vec<u8>> {
use LangItem::*;
let mut args = args.iter();
match x {
// FIXME: we want to find the panic message from arguments, but it wouldn't work
// currently even if we do that, since macro expansion of panic related macros
// is dummy.
PanicFmt | BeginPanic => Err(MirEvalError::Panic("<format-args>".to_string())),
SliceLen => {
let arg = args
.next()
.ok_or(MirEvalError::TypeError("argument of <[T]>::len() is not provided"))?;
let ptr_size = arg.len() / 2;
Ok(arg[ptr_size..].into())
}
x => not_supported!("Executing lang item {x:?}"),
}
}
fn exec_intrinsic(
&mut self,
as_str: &str,
args: &[IntervalAndTy],
generic_args: &Substitution,
destination: Interval,
locals: &Locals<'_>,
span: MirSpan,
) -> Result<()> {
// We are a single threaded runtime with no UB checking and no optimization, so
// we can implement these as normal functions.
if let Some(name) = as_str.strip_prefix("atomic_") {
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
return Err(MirEvalError::TypeError("atomic intrinsic generic arg is not provided"));
};
let Some(arg0) = args.get(0) else {
return Err(MirEvalError::TypeError("atomic intrinsic arg0 is not provided"));
};
let arg0_addr = Address::from_bytes(arg0.get(self)?)?;
let arg0_interval = Interval::new(
arg0_addr,
self.size_of_sized(ty, locals, "atomic intrinsic type arg")?,
);
if name.starts_with("load_") {
return destination.write_from_interval(self, arg0_interval);
}
let Some(arg1) = args.get(1) else {
return Err(MirEvalError::TypeError("atomic intrinsic arg1 is not provided"));
};
if name.starts_with("store_") {
return arg0_interval.write_from_interval(self, arg1.interval);
}
if name.starts_with("xchg_") {
destination.write_from_interval(self, arg0_interval)?;
return arg0_interval.write_from_interval(self, arg1.interval);
}
if name.starts_with("xadd_") {
destination.write_from_interval(self, arg0_interval)?;
let lhs = u128::from_le_bytes(pad16(arg0_interval.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(arg1.get(self)?, false));
let ans = lhs.wrapping_add(rhs);
return arg0_interval
.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]);
}
if name.starts_with("xsub_") {
destination.write_from_interval(self, arg0_interval)?;
let lhs = u128::from_le_bytes(pad16(arg0_interval.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(arg1.get(self)?, false));
let ans = lhs.wrapping_sub(rhs);
return arg0_interval
.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]);
}
if name.starts_with("and_") {
destination.write_from_interval(self, arg0_interval)?;
let lhs = u128::from_le_bytes(pad16(arg0_interval.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(arg1.get(self)?, false));
let ans = lhs & rhs;
return arg0_interval
.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]);
}
if name.starts_with("or_") {
destination.write_from_interval(self, arg0_interval)?;
let lhs = u128::from_le_bytes(pad16(arg0_interval.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(arg1.get(self)?, false));
let ans = lhs | rhs;
return arg0_interval
.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]);
}
if name.starts_with("xor_") {
destination.write_from_interval(self, arg0_interval)?;
let lhs = u128::from_le_bytes(pad16(arg0_interval.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(arg1.get(self)?, false));
let ans = lhs ^ rhs;
return arg0_interval
.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]);
}
if name.starts_with("nand_") {
destination.write_from_interval(self, arg0_interval)?;
let lhs = u128::from_le_bytes(pad16(arg0_interval.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(arg1.get(self)?, false));
let ans = !(lhs & rhs);
return arg0_interval
.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]);
}
let Some(arg2) = args.get(2) else {
return Err(MirEvalError::TypeError("atomic intrinsic arg2 is not provided"));
};
if name.starts_with("cxchg_") || name.starts_with("cxchgweak_") {
let dest = if arg1.get(self)? == arg0_interval.get(self)? {
arg0_interval.write_from_interval(self, arg2.interval)?;
(arg1.interval, true)
} else {
(arg0_interval, false)
};
let result_ty = TyKind::Tuple(
2,
Substitution::from_iter(Interner, [ty.clone(), TyBuilder::bool()]),
)
.intern(Interner);
let layout = self.layout(&result_ty)?;
let result = self.make_by_layout(
layout.size.bytes_usize(),
&layout,
None,
[
IntervalOrOwned::Borrowed(dest.0),
IntervalOrOwned::Owned(vec![u8::from(dest.1)]),
]
.into_iter(),
)?;
return destination.write_from_bytes(self, &result);
}
not_supported!("unknown atomic intrinsic {name}");
}
match as_str {
"size_of" => {
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
return Err(MirEvalError::TypeError("size_of generic arg is not provided"));
};
let size = self.size_of_sized(ty, locals, "size_of arg")?;
destination.write_from_bytes(self, &size.to_le_bytes()[0..destination.size])
}
"min_align_of" | "pref_align_of" => {
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
return Err(MirEvalError::TypeError("align_of generic arg is not provided"));
};
let align = self.layout_filled(ty, locals)?.align.abi.bytes();
destination.write_from_bytes(self, &align.to_le_bytes()[0..destination.size])
}
"needs_drop" => {
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
return Err(MirEvalError::TypeError("size_of generic arg is not provided"));
};
let result = !ty.clone().is_copy(self.db, locals.body.owner);
destination.write_from_bytes(self, &[u8::from(result)])
}
"ptr_guaranteed_cmp" => {
// FIXME: this is wrong for const eval, it should return 2 in some
// cases.
let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("wrapping_add args are not provided"));
};
let ans = lhs.get(self)? == rhs.get(self)?;
destination.write_from_bytes(self, &[u8::from(ans)])
}
"wrapping_add" => {
let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("wrapping_add args are not provided"));
};
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
let ans = lhs.wrapping_add(rhs);
destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
}
"add_with_overflow" => {
let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("const_eval_select args are not provided"));
};
let result_ty = TyKind::Tuple(
2,
Substitution::from_iter(Interner, [lhs.ty.clone(), TyBuilder::bool()]),
)
.intern(Interner);
let op_size =
self.size_of_sized(&lhs.ty, locals, "operand of add_with_overflow")?;
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
let ans = lhs.wrapping_add(rhs);
let is_overflow = false;
let is_overflow = vec![u8::from(is_overflow)];
let layout = self.layout(&result_ty)?;
let result = self.make_by_layout(
layout.size.bytes_usize(),
&layout,
None,
[ans.to_le_bytes()[0..op_size].to_vec(), is_overflow]
.into_iter()
.map(IntervalOrOwned::Owned),
)?;
destination.write_from_bytes(self, &result)
}
"copy" | "copy_nonoverlapping" => {
let [src, dst, offset] = args else {
return Err(MirEvalError::TypeError("copy_nonoverlapping args are not provided"));
};
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
return Err(MirEvalError::TypeError("copy_nonoverlapping generic arg is not provided"));
};
let src = Address::from_bytes(src.get(self)?)?;
let dst = Address::from_bytes(dst.get(self)?)?;
let offset = from_bytes!(usize, offset.get(self)?);
let size = self.size_of_sized(ty, locals, "copy_nonoverlapping ptr type")?;
let size = offset * size;
let src = Interval { addr: src, size };
let dst = Interval { addr: dst, size };
dst.write_from_interval(self, src)
}
"offset" | "arith_offset" => {
let [ptr, offset] = args else {
return Err(MirEvalError::TypeError("offset args are not provided"));
};
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
return Err(MirEvalError::TypeError("offset generic arg is not provided"));
};
let ptr = u128::from_le_bytes(pad16(ptr.get(self)?, false));
let offset = u128::from_le_bytes(pad16(offset.get(self)?, false));
let size = self.size_of_sized(ty, locals, "offset ptr type")? as u128;
let ans = ptr + offset * size;
destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
}
"assert_inhabited" | "assert_zero_valid" | "assert_uninit_valid" | "assume" => {
// FIXME: We should actually implement these checks
Ok(())
}
"forget" => {
// We don't call any drop glue yet, so there is nothing here
Ok(())
}
"transmute" => {
let [arg] = args else {
return Err(MirEvalError::TypeError("trasmute arg is not provided"));
};
destination.write_from_interval(self, arg.interval)
}
"likely" | "unlikely" => {
let [arg] = args else {
return Err(MirEvalError::TypeError("likely arg is not provided"));
};
destination.write_from_interval(self, arg.interval)
}
"const_eval_select" => {
let [tuple, const_fn, _] = args else {
return Err(MirEvalError::TypeError("const_eval_select args are not provided"));
};
let mut args = vec![const_fn.clone()];
let TyKind::Tuple(_, fields) = tuple.ty.kind(Interner) else {
return Err(MirEvalError::TypeError("const_eval_select arg[0] is not a tuple"));
};
let layout = self.layout(&tuple.ty)?;
for (i, field) in fields.iter(Interner).enumerate() {
let field = field.assert_ty_ref(Interner).clone();
let offset = layout.fields.offset(i).bytes_usize();
let addr = tuple.interval.addr.offset(offset);
args.push(IntervalAndTy::new(addr, field, self, locals)?);
}
self.exec_fn_trait(&args, destination, locals, span)
}
_ => not_supported!("unknown intrinsic {as_str}"),
}
}
}

View File

@ -1,14 +1,15 @@
//! This module generates a polymorphic MIR from a hir body
use std::{iter, mem, sync::Arc};
use std::{fmt::Write, iter, mem, sync::Arc};
use base_db::FileId;
use chalk_ir::{BoundVar, ConstData, DebruijnIndex, TyKind};
use hir_def::{
body::Body,
data::adt::{StructKind, VariantData},
hir::{
Array, BindingAnnotation, BindingId, ExprId, LabelId, Literal, MatchArm, Pat, PatId,
RecordFieldPat, RecordLitField,
ArithOp, Array, BinaryOp, BindingAnnotation, BindingId, ExprId, LabelId, Literal, MatchArm,
Pat, PatId, RecordFieldPat, RecordLitField,
},
lang_item::{LangItem, LangItemTarget},
path::Path,
@ -18,6 +19,7 @@ use hir_def::{
use hir_expand::name::Name;
use la_arena::ArenaMap;
use rustc_hash::FxHashMap;
use syntax::TextRange;
use crate::{
consteval::ConstEvalError,
@ -27,8 +29,9 @@ use crate::{
inhabitedness::is_ty_uninhabited_from,
layout::{layout_of_ty, LayoutError},
mapping::ToChalk,
method_resolution::lookup_impl_const,
static_lifetime,
utils::generics,
utils::{generics, ClosureSubst},
Adjust, Adjustment, AutoBorrow, CallableDefId, TyBuilder, TyExt,
};
@ -62,14 +65,14 @@ struct MirLowerCtx<'a> {
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum MirLowerError {
ConstEvalError(Box<ConstEvalError>),
ConstEvalError(String, Box<ConstEvalError>),
LayoutError(LayoutError),
IncompleteExpr,
/// Trying to lower a trait function, instead of an implementation
TraitFunctionDefinition(TraitId, Name),
UnresolvedName(String),
RecordLiteralWithoutPath,
UnresolvedMethod,
UnresolvedMethod(String),
UnresolvedField,
MissingFunctionDefinition,
TypeMismatch(TypeMismatch),
@ -88,6 +91,46 @@ pub enum MirLowerError {
UnaccessableLocal,
}
impl MirLowerError {
pub fn pretty_print(
&self,
f: &mut String,
db: &dyn HirDatabase,
span_formatter: impl Fn(FileId, TextRange) -> String,
) -> std::result::Result<(), std::fmt::Error> {
match self {
MirLowerError::ConstEvalError(name, e) => {
writeln!(f, "In evaluating constant {name}")?;
match &**e {
ConstEvalError::MirLowerError(e) => e.pretty_print(f, db, span_formatter)?,
ConstEvalError::MirEvalError(e) => e.pretty_print(f, db, span_formatter)?,
}
}
MirLowerError::LayoutError(_)
| MirLowerError::IncompleteExpr
| MirLowerError::UnaccessableLocal
| MirLowerError::TraitFunctionDefinition(_, _)
| MirLowerError::UnresolvedName(_)
| MirLowerError::RecordLiteralWithoutPath
| MirLowerError::UnresolvedMethod(_)
| MirLowerError::UnresolvedField
| MirLowerError::MissingFunctionDefinition
| MirLowerError::TypeMismatch(_)
| MirLowerError::TypeError(_)
| MirLowerError::NotSupported(_)
| MirLowerError::ContinueWithoutLoop
| MirLowerError::BreakWithoutLoop
| MirLowerError::Loop
| MirLowerError::ImplementationError(_)
| MirLowerError::LangItemNotFound(_)
| MirLowerError::MutatingRvalue
| MirLowerError::UnresolvedLabel
| MirLowerError::UnresolvedUpvar(_) => writeln!(f, "{:?}", self)?,
}
Ok(())
}
}
macro_rules! not_supported {
($x: expr) => {
return Err(MirLowerError::NotSupported(format!($x)))
@ -101,15 +144,6 @@ macro_rules! implementation_error {
}};
}
impl From<ConstEvalError> for MirLowerError {
fn from(value: ConstEvalError) -> Self {
match value {
ConstEvalError::MirLowerError(e) => e,
_ => MirLowerError::ConstEvalError(Box::new(value)),
}
}
}
impl From<LayoutError> for MirLowerError {
fn from(value: LayoutError) -> Self {
MirLowerError::LayoutError(value)
@ -177,7 +211,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
if !self.has_adjustments(expr_id) {
match &self.body.exprs[expr_id] {
Expr::Literal(l) => {
let ty = self.expr_ty(expr_id);
let ty = self.expr_ty_without_adjust(expr_id);
return Ok(Some((self.lower_literal_to_operand(ty, l)?, current)));
}
_ => (),
@ -282,7 +316,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
{
match assoc {
hir_def::AssocItemId::ConstId(c) => {
self.lower_const(c, current, place, subst, expr_id.into())?;
self.lower_const(c, current, place, subst, expr_id.into(), self.expr_ty_without_adjust(expr_id))?;
return Ok(Some(current))
},
hir_def::AssocItemId::FunctionId(_) => {
@ -309,17 +343,20 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
};
match pr {
ValueNs::LocalBinding(pat_id) => {
ValueNs::LocalBinding(_) | ValueNs::StaticId(_) => {
let Some((temp, current)) = self.lower_expr_as_place_without_adjust(current, expr_id, false)? else {
return Ok(None);
};
self.push_assignment(
current,
place,
Operand::Copy(self.binding_local(pat_id)?.into()).into(),
Operand::Copy(temp).into(),
expr_id.into(),
);
Ok(Some(current))
}
ValueNs::ConstId(const_id) => {
self.lower_const(const_id, current, place, Substitution::empty(Interner), expr_id.into())?;
self.lower_const(const_id, current, place, Substitution::empty(Interner), expr_id.into(), self.expr_ty_without_adjust(expr_id))?;
Ok(Some(current))
}
ValueNs::EnumVariantId(variant_id) => {
@ -343,7 +380,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
not_supported!("owner without generic def id");
};
let gen = generics(self.db.upcast(), def);
let ty = self.expr_ty(expr_id);
let ty = self.expr_ty_without_adjust(expr_id);
self.push_assignment(
current,
place,
@ -388,12 +425,13 @@ impl<'ctx> MirLowerCtx<'ctx> {
};
self.set_terminator(
current,
Terminator::SwitchInt {
TerminatorKind::SwitchInt {
discr,
targets: SwitchTargets::static_if(1, start_of_then, start_of_else),
},
expr_id.into(),
);
Ok(self.merge_blocks(end_of_then, end_of_else))
Ok(self.merge_blocks(end_of_then, end_of_else, expr_id.into()))
}
Expr::Let { pat, expr } => {
let Some((cond_place, current)) = self.lower_expr_as_place(current, *expr, true)? else {
@ -423,32 +461,32 @@ impl<'ctx> MirLowerCtx<'ctx> {
MirSpan::Unknown,
)?;
}
Ok(self.merge_blocks(Some(then_target), else_target))
Ok(self.merge_blocks(Some(then_target), else_target, expr_id.into()))
}
Expr::Unsafe { id: _, statements, tail } => {
self.lower_block_to_place(statements, current, *tail, place)
self.lower_block_to_place(statements, current, *tail, place, expr_id.into())
}
Expr::Block { id: _, statements, tail, label } => {
if let Some(label) = label {
self.lower_loop(current, place.clone(), Some(*label), |this, begin| {
if let Some(block) = this.lower_block_to_place(statements, begin, *tail, place)? {
self.lower_loop(current, place.clone(), Some(*label), expr_id.into(), |this, begin| {
if let Some(block) = this.lower_block_to_place(statements, begin, *tail, place, expr_id.into())? {
let end = this.current_loop_end()?;
this.set_goto(block, end);
this.set_goto(block, end, expr_id.into());
}
Ok(())
})
} else {
self.lower_block_to_place(statements, current, *tail, place)
self.lower_block_to_place(statements, current, *tail, place, expr_id.into())
}
}
Expr::Loop { body, label } => self.lower_loop(current, place, *label, |this, begin| {
Expr::Loop { body, label } => self.lower_loop(current, place, *label, expr_id.into(), |this, begin| {
if let Some((_, block)) = this.lower_expr_as_place(begin, *body, true)? {
this.set_goto(block, begin);
this.set_goto(block, begin, expr_id.into());
}
Ok(())
}),
Expr::While { condition, body, label } => {
self.lower_loop(current, place, *label, |this, begin| {
self.lower_loop(current, place, *label, expr_id.into(),|this, begin| {
let Some((discr, to_switch)) = this.lower_expr_to_some_operand(*condition, begin)? else {
return Ok(());
};
@ -456,13 +494,14 @@ impl<'ctx> MirLowerCtx<'ctx> {
let after_cond = this.new_basic_block();
this.set_terminator(
to_switch,
Terminator::SwitchInt {
TerminatorKind::SwitchInt {
discr,
targets: SwitchTargets::static_if(1, after_cond, end),
},
expr_id.into(),
);
if let Some((_, block)) = this.lower_expr_as_place(after_cond, *body, true)? {
this.set_goto(block, begin);
this.set_goto(block, begin, expr_id.into());
}
Ok(())
})
@ -478,12 +517,12 @@ impl<'ctx> MirLowerCtx<'ctx> {
let into_iter_fn_op = Operand::const_zst(
TyKind::FnDef(
self.db.intern_callable_def(CallableDefId::FunctionId(into_iter_fn)).into(),
Substitution::from1(Interner, self.expr_ty(iterable))
Substitution::from1(Interner, self.expr_ty_without_adjust(iterable))
).intern(Interner));
let iter_next_fn_op = Operand::const_zst(
TyKind::FnDef(
self.db.intern_callable_def(CallableDefId::FunctionId(iter_next_fn)).into(),
Substitution::from1(Interner, self.expr_ty(iterable))
Substitution::from1(Interner, self.expr_ty_without_adjust(iterable))
).intern(Interner));
let &Some(iterator_ty) = &self.infer.type_of_for_iterator.get(&expr_id) else {
return Err(MirLowerError::TypeError("unknown for loop iterator type"));
@ -494,13 +533,13 @@ impl<'ctx> MirLowerCtx<'ctx> {
let iterator_place: Place = self.temp(iterator_ty.clone())?.into();
let option_item_place: Place = self.temp(option_item_ty.clone())?.into();
let ref_mut_iterator_place: Place = self.temp(ref_mut_iterator_ty)?.into();
let Some(current) = self.lower_call_and_args(into_iter_fn_op, Some(iterable).into_iter(), iterator_place.clone(), current, false)?
let Some(current) = self.lower_call_and_args(into_iter_fn_op, Some(iterable).into_iter(), iterator_place.clone(), current, false, expr_id.into())?
else {
return Ok(None);
};
self.push_assignment(current, ref_mut_iterator_place.clone(), Rvalue::Ref(BorrowKind::Mut { allow_two_phase_borrow: false }, iterator_place), expr_id.into());
self.lower_loop(current, place, label, |this, begin| {
let Some(current) = this.lower_call(iter_next_fn_op, vec![Operand::Copy(ref_mut_iterator_place)], option_item_place.clone(), begin, false)?
self.lower_loop(current, place, label, expr_id.into(), |this, begin| {
let Some(current) = this.lower_call(iter_next_fn_op, vec![Operand::Copy(ref_mut_iterator_place)], option_item_place.clone(), begin, false, expr_id.into())?
else {
return Ok(());
};
@ -516,7 +555,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
AdtPatternShape::Tuple { args: &[pat], ellipsis: None },
)?;
if let Some((_, block)) = this.lower_expr_as_place(current, body, true)? {
this.set_goto(block, begin);
this.set_goto(block, begin, expr_id.into());
}
Ok(())
})
@ -536,39 +575,36 @@ impl<'ctx> MirLowerCtx<'ctx> {
place,
current,
self.is_uninhabited(expr_id),
expr_id.into(),
);
}
let callee_ty = self.expr_ty_after_adjustments(*callee);
match &callee_ty.data(Interner).kind {
chalk_ir::TyKind::FnDef(..) => {
let func = Operand::from_bytes(vec![], callee_ty.clone());
self.lower_call_and_args(func, args.iter().copied(), place, current, self.is_uninhabited(expr_id))
self.lower_call_and_args(func, args.iter().copied(), place, current, self.is_uninhabited(expr_id), expr_id.into())
}
chalk_ir::TyKind::Function(_) => {
let Some((func, current)) = self.lower_expr_to_some_operand(*callee, current)? else {
return Ok(None);
};
self.lower_call_and_args(func, args.iter().copied(), place, current, self.is_uninhabited(expr_id))
self.lower_call_and_args(func, args.iter().copied(), place, current, self.is_uninhabited(expr_id), expr_id.into())
}
TyKind::Error => return Err(MirLowerError::MissingFunctionDefinition),
_ => return Err(MirLowerError::TypeError("function call on bad type")),
}
}
Expr::MethodCall { receiver, args, .. } => {
Expr::MethodCall { receiver, args, method_name, .. } => {
let (func_id, generic_args) =
self.infer.method_resolution(expr_id).ok_or(MirLowerError::UnresolvedMethod)?;
let ty = chalk_ir::TyKind::FnDef(
CallableDefId::FunctionId(func_id).to_chalk(self.db),
generic_args,
)
.intern(Interner);
let func = Operand::from_bytes(vec![], ty);
self.infer.method_resolution(expr_id).ok_or_else(|| MirLowerError::UnresolvedMethod(format!("{}", method_name)))?;
let func = Operand::from_fn(self.db, func_id, generic_args);
self.lower_call_and_args(
func,
iter::once(*receiver).chain(args.iter().copied()),
place,
current,
self.is_uninhabited(expr_id),
expr_id.into(),
)
}
Expr::Match { expr, arms } => {
@ -591,7 +627,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
let next = self.new_basic_block();
let o = otherwise.get_or_insert_with(|| self.new_basic_block());
if let Some((discr, c)) = self.lower_expr_to_some_operand(guard, then)? {
self.set_terminator(c, Terminator::SwitchInt { discr, targets: SwitchTargets::static_if(1, next, *o) });
self.set_terminator(c, TerminatorKind::SwitchInt { discr, targets: SwitchTargets::static_if(1, next, *o) }, expr_id.into());
}
next
} else {
@ -599,7 +635,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
};
if let Some(block) = self.lower_expr_to_place(*expr, place.clone(), then)? {
let r = end.get_or_insert_with(|| self.new_basic_block());
self.set_goto(block, *r);
self.set_goto(block, *r, expr_id.into());
}
match otherwise {
Some(o) => current = o,
@ -611,18 +647,17 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
}
if self.is_unterminated(current) {
self.set_terminator(current, Terminator::Unreachable);
self.set_terminator(current, TerminatorKind::Unreachable, expr_id.into());
}
Ok(end)
}
Expr::Continue { label } => match label {
Some(_) => not_supported!("continue with label"),
None => {
let loop_data =
self.current_loop_blocks.as_ref().ok_or(MirLowerError::ContinueWithoutLoop)?;
self.set_goto(current, loop_data.begin);
Ok(None)
}
Expr::Continue { label } => {
let loop_data = match label {
Some(l) => self.labeled_loop_blocks.get(l).ok_or(MirLowerError::UnresolvedLabel)?,
None => self.current_loop_blocks.as_ref().ok_or(MirLowerError::ContinueWithoutLoop)?,
};
self.set_goto(current, loop_data.begin, expr_id.into());
Ok(None)
},
&Expr::Break { expr, label } => {
if let Some(expr) = expr {
@ -639,7 +674,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
Some(l) => self.labeled_loop_blocks.get(&l).ok_or(MirLowerError::UnresolvedLabel)?.end.expect("We always generate end for labeled loops"),
None => self.current_loop_end()?,
};
self.set_goto(current, end);
self.set_goto(current, end, expr_id.into());
Ok(None)
}
Expr::Return { expr } => {
@ -650,7 +685,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
return Ok(None);
}
}
self.set_terminator(current, Terminator::Return);
self.set_terminator(current, TerminatorKind::Return, expr_id.into());
Ok(None)
}
Expr::Yield { .. } => not_supported!("yield"),
@ -672,7 +707,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
Some(p) => MirLowerError::UnresolvedName(p.display(self.db).to_string()),
None => MirLowerError::RecordLiteralWithoutPath,
})?;
let subst = match self.expr_ty(expr_id).kind(Interner) {
let subst = match self.expr_ty_without_adjust(expr_id).kind(Interner) {
TyKind::Adt(_, s) => s.clone(),
_ => not_supported!("Non ADT record literal"),
};
@ -757,7 +792,17 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.push_assignment(current, place, Rvalue::Ref(bk, p), expr_id.into());
Ok(Some(current))
}
Expr::Box { .. } => not_supported!("box expression"),
Expr::Box { expr } => {
let ty = self.expr_ty_after_adjustments(*expr);
self.push_assignment(current, place.clone(), Rvalue::ShallowInitBoxWithAlloc(ty), expr_id.into());
let Some((operand, current)) = self.lower_expr_to_some_operand(*expr, current)? else {
return Ok(None);
};
let mut p = place;
p.projection.push(ProjectionElem::Deref);
self.push_assignment(current, p, operand.into(), expr_id.into());
Ok(Some(current))
},
Expr::Field { .. } | Expr::Index { .. } | Expr::UnaryOp { op: hir_def::hir::UnaryOp::Deref, .. } => {
let Some((p, current)) = self.lower_expr_as_place_without_adjust(current, expr_id, true)? else {
return Ok(None);
@ -784,20 +829,63 @@ impl<'ctx> MirLowerCtx<'ctx> {
},
Expr::BinaryOp { lhs, rhs, op } => {
let op = op.ok_or(MirLowerError::IncompleteExpr)?;
if let hir_def::hir::BinaryOp::Assignment { op } = op {
if op.is_some() {
not_supported!("assignment with arith op (like +=)");
let is_builtin = {
// Without adjust here is a hack. We assume that we know every possible adjustment
// for binary operator, and use without adjust to simplify our conditions.
let lhs_ty = self.expr_ty_without_adjust(*lhs);
let rhs_ty = self.expr_ty_without_adjust(*rhs);
let builtin_inequal_impls = matches!(
op,
BinaryOp::ArithOp(ArithOp::Shl | ArithOp::Shr) | BinaryOp::Assignment { op: Some(ArithOp::Shl | ArithOp::Shr) }
);
lhs_ty.as_builtin().is_some() && rhs_ty.as_builtin().is_some() && (lhs_ty == rhs_ty || builtin_inequal_impls)
};
if !is_builtin {
if let Some((func_id, generic_args)) = self.infer.method_resolution(expr_id) {
let func = Operand::from_fn(self.db, func_id, generic_args);
return self.lower_call_and_args(
func,
[*lhs, *rhs].into_iter(),
place,
current,
self.is_uninhabited(expr_id),
expr_id.into(),
);
}
let Some((lhs_place, current)) =
}
if let hir_def::hir::BinaryOp::Assignment { op } = op {
if let Some(op) = op {
// last adjustment is `&mut` which we don't want it.
let adjusts = self
.infer
.expr_adjustments
.get(lhs)
.and_then(|x| x.split_last())
.map(|x| x.1)
.ok_or(MirLowerError::TypeError("adjustment of binary op was missing"))?;
let Some((lhs_place, current)) =
self.lower_expr_as_place_with_adjust(current, *lhs, false, adjusts)?
else {
return Ok(None);
};
let Some((rhs_op, current)) = self.lower_expr_to_some_operand(*rhs, current)? else {
return Ok(None);
};
let r_value = Rvalue::CheckedBinaryOp(op.into(), Operand::Copy(lhs_place.clone()), rhs_op);
self.push_assignment(current, lhs_place, r_value, expr_id.into());
return Ok(Some(current));
} else {
let Some((lhs_place, current)) =
self.lower_expr_as_place(current, *lhs, false)?
else {
return Ok(None);
};
let Some((rhs_op, current)) = self.lower_expr_to_some_operand(*rhs, current)? else {
return Ok(None);
};
self.push_assignment(current, lhs_place, rhs_op.into(), expr_id.into());
return Ok(Some(current));
else {
return Ok(None);
};
let Some((rhs_op, current)) = self.lower_expr_to_some_operand(*rhs, current)? else {
return Ok(None);
};
self.push_assignment(current, lhs_place, rhs_op.into(), expr_id.into());
return Ok(Some(current));
}
}
let Some((lhs_op, current)) = self.lower_expr_to_some_operand(*lhs, current)? else {
return Ok(None);
@ -826,7 +914,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
Ok(Some(current))
}
&Expr::Range { lhs, rhs, range_type: _ } => {
let ty = self.expr_ty(expr_id);
let ty = self.expr_ty_without_adjust(expr_id);
let Some((adt, subst)) = ty.as_adt() else {
return Err(MirLowerError::TypeError("Range type is not adt"));
};
@ -869,7 +957,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
Ok(Some(current))
},
Expr::Closure { .. } => {
let ty = self.expr_ty(expr_id);
let ty = self.expr_ty_without_adjust(expr_id);
let TyKind::Closure(id, _) = ty.kind(Interner) else {
not_supported!("closure with non closure type");
};
@ -893,7 +981,12 @@ impl<'ctx> MirLowerCtx<'ctx> {
};
match &capture.kind {
CaptureKind::ByRef(bk) => {
let tmp: Place = self.temp(capture.ty.clone())?.into();
let placeholder_subst = match self.owner.as_generic_def_id() {
Some(x) => TyBuilder::placeholder_subst(self.db, x),
None => Substitution::empty(Interner),
};
let tmp_ty = capture.ty.clone().substitute(Interner, &placeholder_subst);
let tmp: Place = self.temp(tmp_ty)?.into();
self.push_assignment(
current,
tmp.clone(),
@ -928,7 +1021,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
return Ok(None);
};
let r = Rvalue::Aggregate(
AggregateKind::Tuple(self.expr_ty(expr_id)),
AggregateKind::Tuple(self.expr_ty_without_adjust(expr_id)),
values,
);
self.push_assignment(current, place, r, expr_id.into());
@ -936,7 +1029,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
Expr::Array(l) => match l {
Array::ElementList { elements, .. } => {
let elem_ty = match &self.expr_ty(expr_id).data(Interner).kind {
let elem_ty = match &self.expr_ty_without_adjust(expr_id).data(Interner).kind {
TyKind::Array(ty, _) => ty.clone(),
_ => {
return Err(MirLowerError::TypeError(
@ -968,7 +1061,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
let Some((init, current)) = self.lower_expr_to_some_operand(*initializer, current)? else {
return Ok(None);
};
let len = match &self.expr_ty(expr_id).data(Interner).kind {
let len = match &self.expr_ty_without_adjust(expr_id).data(Interner).kind {
TyKind::Array(_, len) => len.clone(),
_ => {
return Err(MirLowerError::TypeError(
@ -982,7 +1075,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
},
},
Expr::Literal(l) => {
let ty = self.expr_ty(expr_id);
let ty = self.expr_ty_without_adjust(expr_id);
let op = self.lower_literal_to_operand(ty, l)?;
self.push_assignment(current, place, op.into(), expr_id.into());
Ok(Some(current))
@ -1057,8 +1150,30 @@ impl<'ctx> MirLowerCtx<'ctx> {
place: Place,
subst: Substitution,
span: MirSpan,
ty: Ty,
) -> Result<()> {
let c = self.db.const_eval(const_id, subst)?;
let c = if subst.len(Interner) != 0 {
// We can't evaluate constant with substitution now, as generics are not monomorphized in lowering.
intern_const_scalar(ConstScalar::UnevaluatedConst(const_id, subst), ty)
} else {
let (const_id, subst) = lookup_impl_const(
self.db,
self.db.trait_environment_for_body(self.owner),
const_id,
subst,
);
let name = self
.db
.const_data(const_id)
.name
.as_ref()
.and_then(|x| x.as_str())
.unwrap_or("_")
.to_owned();
self.db
.const_eval(const_id.into(), subst)
.map_err(|e| MirLowerError::ConstEvalError(name, Box::new(e)))?
};
self.write_const_to_place(c, prev_block, place, span)
}
@ -1114,6 +1229,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
place: Place,
mut current: BasicBlockId,
is_uninhabited: bool,
span: MirSpan,
) -> Result<Option<BasicBlockId>> {
let Some(args) = args
.map(|arg| {
@ -1128,7 +1244,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
else {
return Ok(None);
};
self.lower_call(func, args, place, current, is_uninhabited)
self.lower_call(func, args, place, current, is_uninhabited, span)
}
fn lower_call(
@ -1138,11 +1254,12 @@ impl<'ctx> MirLowerCtx<'ctx> {
place: Place,
current: BasicBlockId,
is_uninhabited: bool,
span: MirSpan,
) -> Result<Option<BasicBlockId>> {
let b = if is_uninhabited { None } else { Some(self.new_basic_block()) };
self.set_terminator(
current,
Terminator::Call {
TerminatorKind::Call {
func,
args,
destination: place,
@ -1150,6 +1267,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
cleanup: None,
from_hir_call: true,
},
span,
);
Ok(b)
}
@ -1158,15 +1276,15 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.result.basic_blocks[source].terminator.is_none()
}
fn set_terminator(&mut self, source: BasicBlockId, terminator: Terminator) {
self.result.basic_blocks[source].terminator = Some(terminator);
fn set_terminator(&mut self, source: BasicBlockId, terminator: TerminatorKind, span: MirSpan) {
self.result.basic_blocks[source].terminator = Some(Terminator { span, kind: terminator });
}
fn set_goto(&mut self, source: BasicBlockId, target: BasicBlockId) {
self.set_terminator(source, Terminator::Goto { target });
fn set_goto(&mut self, source: BasicBlockId, target: BasicBlockId, span: MirSpan) {
self.set_terminator(source, TerminatorKind::Goto { target }, span);
}
fn expr_ty(&self, e: ExprId) -> Ty {
fn expr_ty_without_adjust(&self, e: ExprId) -> Ty {
self.infer[e].clone()
}
@ -1177,7 +1295,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
ty = Some(x.target.clone());
}
}
ty.unwrap_or_else(|| self.expr_ty(e))
ty.unwrap_or_else(|| self.expr_ty_without_adjust(e))
}
fn push_statement(&mut self, block: BasicBlockId, statement: Statement) {
@ -1211,6 +1329,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
prev_block: BasicBlockId,
place: Place,
label: Option<LabelId>,
span: MirSpan,
f: impl FnOnce(&mut MirLowerCtx<'_>, BasicBlockId) -> Result<()>,
) -> Result<Option<BasicBlockId>> {
let begin = self.new_basic_block();
@ -1228,7 +1347,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
} else {
None
};
self.set_goto(prev_block, begin);
self.set_goto(prev_block, begin, span);
f(self, begin)?;
let my = mem::replace(&mut self.current_loop_blocks, prev).ok_or(
MirLowerError::ImplementationError("current_loop_blocks is corrupt".to_string()),
@ -1247,14 +1366,15 @@ impl<'ctx> MirLowerCtx<'ctx> {
&mut self,
b1: Option<BasicBlockId>,
b2: Option<BasicBlockId>,
span: MirSpan,
) -> Option<BasicBlockId> {
match (b1, b2) {
(None, None) => None,
(None, Some(b)) | (Some(b), None) => Some(b),
(Some(b1), Some(b2)) => {
let bm = self.new_basic_block();
self.set_goto(b1, bm);
self.set_goto(b2, bm);
self.set_goto(b1, bm, span);
self.set_goto(b2, bm, span);
Some(bm)
}
}
@ -1332,6 +1452,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
mut current: BasicBlockId,
tail: Option<ExprId>,
place: Place,
span: MirSpan,
) -> Result<Option<Idx<BasicBlock>>> {
for statement in statements.iter() {
match statement {
@ -1355,13 +1476,13 @@ impl<'ctx> MirLowerCtx<'ctx> {
match (else_block, else_branch) {
(None, _) => (),
(Some(else_block), None) => {
self.set_terminator(else_block, Terminator::Unreachable);
self.set_terminator(else_block, TerminatorKind::Unreachable, span);
}
(Some(else_block), Some(else_branch)) => {
if let Some((_, b)) =
self.lower_expr_as_place(else_block, *else_branch, true)?
{
self.set_terminator(b, Terminator::Unreachable);
self.set_terminator(b, TerminatorKind::Unreachable, span);
}
}
}
@ -1438,7 +1559,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
BindingAnnotation::Unannotated,
)?;
if let Some(b) = r.1 {
self.set_terminator(b, Terminator::Unreachable);
self.set_terminator(b, TerminatorKind::Unreachable, param.into());
}
current = r.0;
}
@ -1456,6 +1577,18 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
}
}
fn const_eval_discriminant(&self, variant: EnumVariantId) -> Result<i128> {
let r = self.db.const_eval_discriminant(variant);
match r {
Ok(r) => Ok(r),
Err(e) => {
let data = self.db.enum_data(variant.parent);
let name = format!("{}::{}", data.name, data.variants[variant.local_id].name);
Err(MirLowerError::ConstEvalError(name, Box::new(e)))
}
}
}
}
fn cast_kind(source_ty: &Ty, target_ty: &Ty) -> Result<CastKind> {
@ -1498,7 +1631,7 @@ pub fn mir_body_for_closure_query(
// 0 is return local
ctx.result.locals.alloc(Local { ty: infer[*root].clone() });
ctx.result.locals.alloc(Local { ty: infer[expr].clone() });
let Some(sig) = substs.at(Interner, 0).assert_ty_ref(Interner).callable_sig(db) else {
let Some(sig) = ClosureSubst(substs).sig_ty().callable_sig(db) else {
implementation_error!("closure has not callable sig");
};
let current = ctx.lower_params_and_bindings(
@ -1506,7 +1639,7 @@ pub fn mir_body_for_closure_query(
|_| true,
)?;
if let Some(b) = ctx.lower_expr_to_place(*root, return_slot().into(), current)? {
ctx.set_terminator(b, Terminator::Return);
ctx.set_terminator(b, TerminatorKind::Return, (*root).into());
}
let mut upvar_map: FxHashMap<LocalId, Vec<(&CapturedItem, usize)>> = FxHashMap::default();
for (i, capture) in captures.iter().enumerate() {
@ -1628,7 +1761,7 @@ pub fn lower_to_mir(
ctx.lower_params_and_bindings([].into_iter(), binding_picker)?
};
if let Some(b) = ctx.lower_expr_to_place(root_expr, return_slot().into(), current)? {
ctx.set_terminator(b, Terminator::Return);
ctx.set_terminator(b, TerminatorKind::Return, root_expr.into());
}
Ok(ctx.result)
}

View File

@ -1,7 +1,7 @@
//! MIR lowering for places
use super::*;
use hir_def::FunctionId;
use hir_def::{lang_item::lang_attr, FunctionId};
use hir_expand::name;
macro_rules! not_supported {
@ -16,7 +16,7 @@ impl MirLowerCtx<'_> {
expr_id: ExprId,
prev_block: BasicBlockId,
) -> Result<Option<(Place, BasicBlockId)>> {
let ty = self.expr_ty(expr_id);
let ty = self.expr_ty_without_adjust(expr_id);
let place = self.temp(ty)?;
let Some(current) = self.lower_expr_to_place_without_adjust(expr_id, place.into(), prev_block)? else {
return Ok(None);
@ -30,8 +30,10 @@ impl MirLowerCtx<'_> {
prev_block: BasicBlockId,
adjustments: &[Adjustment],
) -> Result<Option<(Place, BasicBlockId)>> {
let ty =
adjustments.last().map(|x| x.target.clone()).unwrap_or_else(|| self.expr_ty(expr_id));
let ty = adjustments
.last()
.map(|x| x.target.clone())
.unwrap_or_else(|| self.expr_ty_without_adjust(expr_id));
let place = self.temp(ty)?;
let Some(current) = self.lower_expr_to_place_with_adjust(expr_id, place.into(), prev_block, adjustments)? else {
return Ok(None);
@ -80,7 +82,7 @@ impl MirLowerCtx<'_> {
r,
rest.last()
.map(|x| x.target.clone())
.unwrap_or_else(|| self.expr_ty(expr_id)),
.unwrap_or_else(|| self.expr_ty_without_adjust(expr_id)),
last.target.clone(),
expr_id.into(),
match od.0 {
@ -135,17 +137,39 @@ impl MirLowerCtx<'_> {
};
match pr {
ValueNs::LocalBinding(pat_id) => {
Ok(Some((self.result.binding_locals[pat_id].into(), current)))
Ok(Some((self.binding_local(pat_id)?.into(), current)))
}
ValueNs::StaticId(s) => {
let ty = self.expr_ty_without_adjust(expr_id);
let ref_ty =
TyKind::Ref(Mutability::Not, static_lifetime(), ty).intern(Interner);
let mut temp: Place = self.temp(ref_ty)?.into();
self.push_assignment(
current,
temp.clone(),
Operand::Static(s).into(),
expr_id.into(),
);
temp.projection.push(ProjectionElem::Deref);
Ok(Some((temp, current)))
}
_ => try_rvalue(self),
}
}
Expr::UnaryOp { expr, op } => match op {
hir_def::hir::UnaryOp::Deref => {
if !matches!(
self.expr_ty(*expr).kind(Interner),
TyKind::Ref(..) | TyKind::Raw(..)
) {
let is_builtin = match self.expr_ty_without_adjust(*expr).kind(Interner) {
TyKind::Ref(..) | TyKind::Raw(..) => true,
TyKind::Adt(id, _) => {
if let Some(lang_item) = lang_attr(self.db.upcast(), id.0) {
lang_item == LangItem::OwnedBox
} else {
false
}
}
_ => false,
};
if !is_builtin {
let Some((p, current)) = self.lower_expr_as_place(current, *expr, true)? else {
return Ok(None);
};
@ -153,7 +177,7 @@ impl MirLowerCtx<'_> {
current,
p,
self.expr_ty_after_adjustments(*expr),
self.expr_ty(expr_id),
self.expr_ty_without_adjust(expr_id),
expr_id.into(),
'b: {
if let Some((f, _)) = self.infer.method_resolution(expr_id) {
@ -198,7 +222,7 @@ impl MirLowerCtx<'_> {
)
{
let Some(index_fn) = self.infer.method_resolution(expr_id) else {
return Err(MirLowerError::UnresolvedMethod);
return Err(MirLowerError::UnresolvedMethod("[overloaded index]".to_string()));
};
let Some((base_place, current)) = self.lower_expr_as_place(current, *base, true)? else {
return Ok(None);
@ -210,7 +234,7 @@ impl MirLowerCtx<'_> {
current,
base_place,
base_ty,
self.expr_ty(expr_id),
self.expr_ty_without_adjust(expr_id),
index_operand,
expr_id.into(),
index_fn,
@ -266,7 +290,7 @@ impl MirLowerCtx<'_> {
)
.intern(Interner),
);
let Some(current) = self.lower_call(index_fn_op, vec![Operand::Copy(ref_place), index_operand], result.clone(), current, false)? else {
let Some(current) = self.lower_call(index_fn_op, vec![Operand::Copy(ref_place), index_operand], result.clone(), current, false, span)? else {
return Ok(None);
};
result.projection.push(ProjectionElem::Deref);
@ -313,7 +337,7 @@ impl MirLowerCtx<'_> {
.intern(Interner),
);
let mut result: Place = self.temp(target_ty_ref)?.into();
let Some(current) = self.lower_call(deref_fn_op, vec![Operand::Copy(ref_place)], result.clone(), current, false)? else {
let Some(current) = self.lower_call(deref_fn_op, vec![Operand::Copy(ref_place)], result.clone(), current, false, span)? else {
return Ok(None);
};
result.projection.push(ProjectionElem::Deref);

View File

@ -1,5 +1,7 @@
//! MIR lowering for patterns
use hir_def::resolver::HasResolver;
use crate::utils::pattern_matching_dereference_count;
use super::*;
@ -72,7 +74,7 @@ impl MirLowerCtx<'_> {
*pat,
binding_mode,
)?;
self.set_goto(next, then_target);
self.set_goto(next, then_target, pattern.into());
match next_else {
Some(t) => {
current = t;
@ -85,13 +87,13 @@ impl MirLowerCtx<'_> {
}
if !finished {
let ce = *current_else.get_or_insert_with(|| self.new_basic_block());
self.set_goto(current, ce);
self.set_goto(current, ce, pattern.into());
}
(then_target, current_else)
}
Pat::Record { args, .. } => {
let Some(variant) = self.infer.variant_resolution_for_pat(pattern) else {
not_supported!("unresolved variant");
not_supported!("unresolved variant for record");
};
self.pattern_matching_variant(
cond_ty,
@ -106,11 +108,8 @@ impl MirLowerCtx<'_> {
}
Pat::Range { .. } => not_supported!("range pattern"),
Pat::Slice { .. } => not_supported!("slice pattern"),
Pat::Path(_) => {
let Some(variant) = self.infer.variant_resolution_for_pat(pattern) else {
not_supported!("unresolved variant");
};
self.pattern_matching_variant(
Pat::Path(p) => match self.infer.variant_resolution_for_pat(pattern) {
Some(variant) => self.pattern_matching_variant(
cond_ty,
binding_mode,
cond_place,
@ -119,8 +118,60 @@ impl MirLowerCtx<'_> {
pattern.into(),
current_else,
AdtPatternShape::Unit,
)?
}
)?,
None => {
let unresolved_name = || MirLowerError::unresolved_path(self.db, p);
let resolver = self.owner.resolver(self.db.upcast());
let pr = resolver
.resolve_path_in_value_ns(self.db.upcast(), p)
.ok_or_else(unresolved_name)?;
match pr {
ResolveValueResult::ValueNs(v) => match v {
ValueNs::ConstId(c) => {
let tmp: Place = self.temp(cond_ty.clone())?.into();
let span = pattern.into();
self.lower_const(
c,
current,
tmp.clone(),
Substitution::empty(Interner),
span,
cond_ty.clone(),
)?;
let tmp2: Place = self.temp(TyBuilder::bool())?.into();
self.push_assignment(
current,
tmp2.clone(),
Rvalue::CheckedBinaryOp(
BinOp::Eq,
Operand::Copy(tmp),
Operand::Copy(cond_place),
),
span,
);
let next = self.new_basic_block();
let else_target =
current_else.unwrap_or_else(|| self.new_basic_block());
self.set_terminator(
current,
TerminatorKind::SwitchInt {
discr: Operand::Copy(tmp2),
targets: SwitchTargets::static_if(1, next, else_target),
},
span,
);
(next, Some(else_target))
}
_ => not_supported!(
"path in pattern position that is not const or variant"
),
},
ResolveValueResult::Partial(_, _) => {
not_supported!("assoc const in patterns")
}
}
}
},
Pat::Lit(l) => match &self.body.exprs[*l] {
Expr::Literal(l) => {
let c = self.lower_literal_to_operand(cond_ty, l)?;
@ -218,10 +269,11 @@ impl MirLowerCtx<'_> {
let discr = Operand::Copy(discr);
self.set_terminator(
current,
Terminator::SwitchInt {
TerminatorKind::SwitchInt {
discr,
targets: SwitchTargets::static_if(1, then_target, else_target),
},
pattern.into(),
);
Ok((then_target, Some(else_target)))
}
@ -244,8 +296,7 @@ impl MirLowerCtx<'_> {
};
Ok(match variant {
VariantId::EnumVariantId(v) => {
let e = self.db.const_eval_discriminant(v)? as u128;
let next = self.new_basic_block();
let e = self.const_eval_discriminant(v)? as u128;
let tmp = self.discr_temp_place();
self.push_assignment(
current,
@ -253,13 +304,15 @@ impl MirLowerCtx<'_> {
Rvalue::Discriminant(cond_place.clone()),
span,
);
let next = self.new_basic_block();
let else_target = current_else.unwrap_or_else(|| self.new_basic_block());
self.set_terminator(
current,
Terminator::SwitchInt {
TerminatorKind::SwitchInt {
discr: Operand::Copy(tmp),
targets: SwitchTargets::static_if(e, next, else_target),
},
span,
);
let enum_data = self.db.enum_data(v.parent);
self.pattern_matching_variant_fields(

View File

@ -11,19 +11,52 @@ use la_arena::ArenaMap;
use crate::{
db::HirDatabase,
display::HirDisplay,
mir::{PlaceElem, ProjectionElem, StatementKind, Terminator},
display::{ClosureStyle, HirDisplay},
mir::{PlaceElem, ProjectionElem, StatementKind, TerminatorKind},
ClosureId,
};
use super::{
AggregateKind, BasicBlockId, BorrowKind, LocalId, MirBody, Operand, Place, Rvalue, UnOp,
};
macro_rules! w {
($dst:expr, $($arg:tt)*) => {
{ let _ = write!($dst, $($arg)*); }
};
}
macro_rules! wln {
($dst:expr) => {
{ let _ = writeln!($dst); }
};
($dst:expr, $($arg:tt)*) => {
{ let _ = writeln!($dst, $($arg)*); }
};
}
impl MirBody {
pub fn pretty_print(&self, db: &dyn HirDatabase) -> String {
let hir_body = db.body(self.owner);
let mut ctx = MirPrettyCtx::new(self, &hir_body, db);
ctx.for_body(ctx.body.owner);
ctx.for_body(|this| match ctx.body.owner {
hir_def::DefWithBodyId::FunctionId(id) => {
let data = db.function_data(id);
w!(this, "fn {}() ", data.name);
}
hir_def::DefWithBodyId::StaticId(id) => {
let data = db.static_data(id);
w!(this, "static {}: _ = ", data.name);
}
hir_def::DefWithBodyId::ConstId(id) => {
let data = db.const_data(id);
w!(this, "const {}: _ = ", data.name.as_ref().unwrap_or(&Name::missing()));
}
hir_def::DefWithBodyId::VariantId(id) => {
let data = db.enum_data(id.parent);
w!(this, "enum {} = ", data.name);
}
});
ctx.result
}
@ -49,21 +82,6 @@ struct MirPrettyCtx<'a> {
local_to_binding: ArenaMap<LocalId, BindingId>,
}
macro_rules! w {
($dst:expr, $($arg:tt)*) => {
{ let _ = write!($dst, $($arg)*); }
};
}
macro_rules! wln {
($dst:expr) => {
{ let _ = writeln!($dst); }
};
($dst:expr, $($arg:tt)*) => {
{ let _ = writeln!($dst, $($arg)*); }
};
}
impl Write for MirPrettyCtx<'_> {
fn write_str(&mut self, s: &str) -> std::fmt::Result {
let mut it = s.split('\n'); // note: `.lines()` is wrong here
@ -91,36 +109,40 @@ impl Display for LocalName {
}
impl<'a> MirPrettyCtx<'a> {
fn for_body(&mut self, name: impl Debug) {
wln!(self, "// {:?}", name);
fn for_body(&mut self, name: impl FnOnce(&mut MirPrettyCtx<'_>)) {
name(self);
self.with_block(|this| {
this.locals();
wln!(this);
this.blocks();
});
for &closure in &self.body.closures {
let body = match self.db.mir_body_for_closure(closure) {
Ok(x) => x,
Err(e) => {
wln!(self, "// error in {closure:?}: {e:?}");
continue;
}
};
let result = mem::take(&mut self.result);
let indent = mem::take(&mut self.indent);
let mut ctx = MirPrettyCtx {
body: &body,
local_to_binding: body.binding_locals.iter().map(|(x, y)| (*y, x)).collect(),
result,
indent,
..*self
};
ctx.for_body(closure);
self.result = ctx.result;
self.indent = ctx.indent;
self.for_closure(closure);
}
}
fn for_closure(&mut self, closure: ClosureId) {
let body = match self.db.mir_body_for_closure(closure) {
Ok(x) => x,
Err(e) => {
wln!(self, "// error in {closure:?}: {e:?}");
return;
}
};
let result = mem::take(&mut self.result);
let indent = mem::take(&mut self.indent);
let mut ctx = MirPrettyCtx {
body: &body,
local_to_binding: body.binding_locals.iter().map(|(x, y)| (*y, x)).collect(),
result,
indent,
..*self
};
ctx.for_body(|this| wln!(this, "// Closure: {:?}", closure));
self.result = ctx.result;
self.indent = ctx.indent;
}
fn with_block(&mut self, f: impl FnOnce(&mut MirPrettyCtx<'_>)) {
self.indent += " ";
wln!(self, "{{");
@ -155,7 +177,7 @@ impl<'a> MirPrettyCtx<'a> {
fn locals(&mut self) {
for (id, local) in self.body.locals.iter() {
wln!(self, "let {}: {};", self.local_name(id), local.ty.display(self.db));
wln!(self, "let {}: {};", self.local_name(id), self.hir_display(&local.ty));
}
}
@ -198,11 +220,11 @@ impl<'a> MirPrettyCtx<'a> {
}
}
match &block.terminator {
Some(terminator) => match terminator {
Terminator::Goto { target } => {
Some(terminator) => match &terminator.kind {
TerminatorKind::Goto { target } => {
wln!(this, "goto 'bb{};", u32::from(target.into_raw()))
}
Terminator::SwitchInt { discr, targets } => {
TerminatorKind::SwitchInt { discr, targets } => {
w!(this, "switch ");
this.operand(discr);
w!(this, " ");
@ -213,7 +235,7 @@ impl<'a> MirPrettyCtx<'a> {
wln!(this, "_ => {},", this.basic_block_id(targets.otherwise()));
});
}
Terminator::Call { func, args, destination, target, .. } => {
TerminatorKind::Call { func, args, destination, target, .. } => {
w!(this, "Call ");
this.with_block(|this| {
w!(this, "func: ");
@ -295,7 +317,8 @@ impl<'a> MirPrettyCtx<'a> {
// equally. Feel free to change it.
self.place(p);
}
Operand::Constant(c) => w!(self, "Const({})", c.display(self.db)),
Operand::Constant(c) => w!(self, "Const({})", self.hir_display(c)),
Operand::Static(s) => w!(self, "Static({:?})", s),
}
}
@ -349,7 +372,7 @@ impl<'a> MirPrettyCtx<'a> {
Rvalue::Cast(ck, op, ty) => {
w!(self, "Cast({ck:?}, ");
self.operand(op);
w!(self, ", {})", ty.display(self.db));
w!(self, ", {})", self.hir_display(ty));
}
Rvalue::CheckedBinaryOp(b, o1, o2) => {
self.operand(o1);
@ -369,6 +392,7 @@ impl<'a> MirPrettyCtx<'a> {
self.place(p);
w!(self, ")");
}
Rvalue::ShallowInitBoxWithAlloc(_) => w!(self, "ShallowInitBoxWithAlloc"),
Rvalue::ShallowInitBox(op, _) => {
w!(self, "ShallowInitBox(");
self.operand(op);
@ -392,4 +416,8 @@ impl<'a> MirPrettyCtx<'a> {
}
}
}
fn hir_display<T: HirDisplay>(&self, ty: &'a T) -> impl Display + 'a {
ty.display(self.db).with_closure_style(ClosureStyle::ClosureWithId)
}
}

View File

@ -23,10 +23,11 @@ use hir_expand::name::Name;
use intern::Interned;
use rustc_hash::FxHashSet;
use smallvec::{smallvec, SmallVec};
use stdx::never;
use crate::{
db::HirDatabase, ChalkTraitId, Interner, Substitution, TraitRef, TraitRefExt, Ty, TyExt,
WhereClause,
db::HirDatabase, ChalkTraitId, GenericArg, Interner, Substitution, TraitRef, TraitRefExt, Ty,
TyExt, WhereClause,
};
pub(crate) fn fn_traits(
@ -176,6 +177,37 @@ pub(crate) fn generics(db: &dyn DefDatabase, def: GenericDefId) -> Generics {
Generics { def, params: db.generic_params(def), parent_generics }
}
/// It is a bit different from the rustc equivalent. Currently it stores:
/// - 0: the function signature, encoded as a function pointer type
/// - 1..n: generics of the parent
///
/// and it doesn't store the closure types and fields.
///
/// Codes should not assume this ordering, and should always use methods available
/// on this struct for retriving, and `TyBuilder::substs_for_closure` for creating.
pub(crate) struct ClosureSubst<'a>(pub(crate) &'a Substitution);
impl<'a> ClosureSubst<'a> {
pub(crate) fn parent_subst(&self) -> &'a [GenericArg] {
match self.0.as_slice(Interner) {
[_, x @ ..] => x,
_ => {
never!("Closure missing parameter");
&[]
}
}
}
pub(crate) fn sig_ty(&self) -> &'a Ty {
match self.0.as_slice(Interner) {
[x, ..] => x.assert_ty_ref(Interner),
_ => {
unreachable!("Closure missing sig_ty parameter");
}
}
}
}
#[derive(Debug)]
pub(crate) struct Generics {
def: GenericDefId,

View File

@ -1856,11 +1856,21 @@ impl Function {
def_map.fn_as_proc_macro(self.id).map(|id| Macro { id: id.into() })
}
pub fn eval(self, db: &dyn HirDatabase) -> Result<(), MirEvalError> {
pub fn eval(
self,
db: &dyn HirDatabase,
span_formatter: impl Fn(FileId, TextRange) -> String,
) -> Result<(), String> {
let converter = |e: MirEvalError| {
let mut r = String::new();
_ = e.pretty_print(&mut r, db, &span_formatter);
r
};
let body = db
.mir_body(self.id.into())
.map_err(|e| MirEvalError::MirLowerError(self.id.into(), e))?;
interpret_mir(db, &body, Substitution::empty(Interner), false)?;
.map_err(|e| MirEvalError::MirLowerError(self.id.into(), e))
.map_err(converter)?;
interpret_mir(db, &body, Substitution::empty(Interner), false).map_err(converter)?;
Ok(())
}
}
@ -2006,7 +2016,7 @@ impl Const {
}
pub fn render_eval(self, db: &dyn HirDatabase) -> Result<String, ConstEvalError> {
let c = db.const_eval(self.id, Substitution::empty(Interner))?;
let c = db.const_eval(self.id.into(), Substitution::empty(Interner))?;
let r = format!("{}", HexifiedConst(c).display(db));
// We want to see things like `<utf8-error>` and `<layout-error>` as they are probably bug in our
// implementation, but there is no need to show things like `<enum-not-supported>` or `<ref-not-supported>` to

View File

@ -30,7 +30,6 @@ pub struct HoverConfig {
pub documentation: bool,
pub keywords: bool,
pub format: HoverDocFormat,
pub interpret_tests: bool,
}
#[derive(Clone, Debug, PartialEq, Eq)]

View File

@ -3,8 +3,7 @@ use std::fmt::Display;
use either::Either;
use hir::{
db::DefDatabase, Adt, AsAssocItem, AttributeTemplate, HasAttrs, HasSource, HirDisplay,
MirEvalError, Semantics, TypeInfo,
Adt, AsAssocItem, AttributeTemplate, HasAttrs, HasSource, HirDisplay, Semantics, TypeInfo,
};
use ide_db::{
base_db::SourceDatabase,
@ -435,19 +434,7 @@ pub(super) fn definition(
))
}),
Definition::Module(it) => label_and_docs(db, it),
Definition::Function(it) => label_and_layout_info_and_docs(db, it, |_| {
if !config.interpret_tests {
return None;
}
match it.eval(db) {
Ok(()) => Some("pass".into()),
Err(MirEvalError::MirLowerError(f, e)) => {
let name = &db.function_data(f).name;
Some(format!("error: fail to lower {name} due {e:?}"))
}
Err(e) => Some(format!("error: {e:?}")),
}
}),
Definition::Function(it) => label_and_docs(db, it),
Definition::Adt(it) => label_and_layout_info_and_docs(db, it, |&it| {
let layout = it.layout(db).ok()?;
Some(format!("size = {}, align = {}", layout.size.bytes(), layout.align.abi.bytes()))

View File

@ -9,7 +9,6 @@ const HOVER_BASE_CONFIG: HoverConfig = HoverConfig {
documentation: true,
format: HoverDocFormat::Markdown,
keywords: true,
interpret_tests: false,
};
fn check_hover_no_result(ra_fixture: &str) {

View File

@ -3,7 +3,10 @@
//! let _: u32 = /* <never-to-any> */ loop {};
//! let _: &u32 = /* &* */ &mut 0;
//! ```
use hir::{Adjust, Adjustment, AutoBorrow, HirDisplay, Mutability, PointerCast, Safety, Semantics};
use hir::{
Adjust, Adjustment, AutoBorrow, HirDisplay, Mutability, OverloadedDeref, PointerCast, Safety,
Semantics,
};
use ide_db::RootDatabase;
use stdx::never;
@ -88,7 +91,13 @@ pub(super) fn hints(
Adjust::NeverToAny if config.adjustment_hints == AdjustmentHints::Always => {
("<never-to-any>", "never to any")
}
Adjust::Deref(_) => ("*", "dereference"),
Adjust::Deref(None) => ("*", "dereference"),
Adjust::Deref(Some(OverloadedDeref(Mutability::Shared))) => {
("*", "`Deref` dereference")
}
Adjust::Deref(Some(OverloadedDeref(Mutability::Mut))) => {
("*", "`DerefMut` dereference")
}
Adjust::Borrow(AutoBorrow::Ref(Mutability::Shared)) => ("&", "borrow"),
Adjust::Borrow(AutoBorrow::Ref(Mutability::Mut)) => ("&mut ", "unique borrow"),
Adjust::Borrow(AutoBorrow::RawPtr(Mutability::Shared)) => {

View File

@ -444,7 +444,7 @@ fn main() {
file_id: FileId(
1,
),
range: 5768..5776,
range: 5769..5777,
},
),
tooltip: "",
@ -457,7 +457,7 @@ fn main() {
file_id: FileId(
1,
),
range: 5800..5804,
range: 5801..5805,
},
),
tooltip: "",
@ -478,7 +478,7 @@ fn main() {
file_id: FileId(
1,
),
range: 5768..5776,
range: 5769..5777,
},
),
tooltip: "",
@ -491,7 +491,7 @@ fn main() {
file_id: FileId(
1,
),
range: 5800..5804,
range: 5801..5805,
},
),
tooltip: "",
@ -512,7 +512,7 @@ fn main() {
file_id: FileId(
1,
),
range: 5768..5776,
range: 5769..5777,
},
),
tooltip: "",
@ -525,7 +525,7 @@ fn main() {
file_id: FileId(
1,
),
range: 5800..5804,
range: 5801..5805,
},
),
tooltip: "",

View File

@ -0,0 +1,49 @@
use hir::Semantics;
use ide_db::base_db::SourceDatabaseExt;
use ide_db::RootDatabase;
use ide_db::{base_db::FilePosition, LineIndexDatabase};
use std::{fmt::Write, time::Instant};
use syntax::TextRange;
use syntax::{algo::find_node_at_offset, ast, AstNode};
// Feature: Interpret Function
//
// |===
// | Editor | Action Name
//
// | VS Code | **rust-analyzer: Interpret Function**
// |===
pub(crate) fn interpret_function(db: &RootDatabase, position: FilePosition) -> String {
let start_time = Instant::now();
let mut result = find_and_interpret(db, position)
.unwrap_or_else(|| "Not inside a function body".to_string());
let duration = Instant::now() - start_time;
writeln!(result, "").unwrap();
writeln!(result, "----------------------").unwrap();
writeln!(result, " Finished in {}s", duration.as_secs_f32()).unwrap();
result
}
fn find_and_interpret(db: &RootDatabase, position: FilePosition) -> Option<String> {
let sema = Semantics::new(db);
let source_file = sema.parse(position.file_id);
let item = find_node_at_offset::<ast::Item>(source_file.syntax(), position.offset)?;
let def = match item {
ast::Item::Fn(it) => sema.to_def(&it)?,
_ => return None,
};
let span_formatter = |file_id, text_range: TextRange| {
let line_col = db.line_index(file_id).line_col(text_range.start());
let path = &db
.source_root(db.file_source_root(file_id))
.path_for_file(&file_id)
.map(|x| x.to_string());
let path = path.as_deref().unwrap_or("<unknown file>");
format!("file://{path}#{}:{}", line_col.line + 1, line_col.col)
};
match def.eval(db, span_formatter) {
Ok(_) => Some("pass".to_string()),
Err(e) => Some(e),
}
}

View File

@ -56,6 +56,7 @@ mod typing;
mod view_crate_graph;
mod view_hir;
mod view_mir;
mod interpret_function;
mod view_item_tree;
mod shuffle_crate_graph;
@ -317,6 +318,10 @@ impl Analysis {
self.with_db(|db| view_mir::view_mir(db, position))
}
pub fn interpret_function(&self, position: FilePosition) -> Cancellable<String> {
self.with_db(|db| interpret_function::interpret_function(db, position))
}
pub fn view_item_tree(&self, file_id: FileId) -> Cancellable<String> {
self.with_db(|db| view_item_tree::view_item_tree(db, file_id))
}

View File

@ -140,7 +140,6 @@ impl StaticIndex<'_> {
documentation: true,
keywords: true,
format: crate::HoverDocFormat::Markdown,
interpret_tests: false,
};
let tokens = tokens.filter(|token| {
matches!(

View File

@ -574,6 +574,7 @@ pub struct LensConfig {
// runnables
pub run: bool,
pub debug: bool,
pub interpret: bool,
// implementations
pub implementations: bool,
@ -1423,6 +1424,9 @@ impl Config {
LensConfig {
run: self.data.lens_enable && self.data.lens_run_enable,
debug: self.data.lens_enable && self.data.lens_debug_enable,
interpret: self.data.lens_enable
&& self.data.lens_run_enable
&& self.data.interpret_tests,
implementations: self.data.lens_enable && self.data.lens_implementations_enable,
method_refs: self.data.lens_enable && self.data.lens_references_method_enable,
refs_adt: self.data.lens_enable && self.data.lens_references_adt_enable,
@ -1481,7 +1485,6 @@ impl Config {
}
},
keywords: self.data.hover_documentation_keywords_enable,
interpret_tests: self.data.interpret_tests,
}
}

View File

@ -163,6 +163,16 @@ pub(crate) fn handle_view_mir(
Ok(res)
}
pub(crate) fn handle_interpret_function(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
) -> Result<String> {
let _p = profile::span("handle_interpret_function");
let position = from_proto::file_position(&snap, params)?;
let res = snap.analysis.interpret_function(position)?;
Ok(res)
}
pub(crate) fn handle_view_file_text(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentIdentifier,

View File

@ -90,6 +90,14 @@ impl Request for ViewMir {
const METHOD: &'static str = "rust-analyzer/viewMir";
}
pub enum InterpretFunction {}
impl Request for InterpretFunction {
type Params = lsp_types::TextDocumentPositionParams;
type Result = String;
const METHOD: &'static str = "rust-analyzer/interpretFunction";
}
pub enum ViewFileText {}
impl Request for ViewFileText {

View File

@ -665,6 +665,7 @@ impl GlobalState {
.on::<lsp_ext::SyntaxTree>(handlers::handle_syntax_tree)
.on::<lsp_ext::ViewHir>(handlers::handle_view_hir)
.on::<lsp_ext::ViewMir>(handlers::handle_view_mir)
.on::<lsp_ext::InterpretFunction>(handlers::handle_interpret_function)
.on::<lsp_ext::ViewFileText>(handlers::handle_view_file_text)
.on::<lsp_ext::ViewCrateGraph>(handlers::handle_view_crate_graph)
.on::<lsp_ext::ViewItemTree>(handlers::handle_view_item_tree)

View File

@ -1215,6 +1215,14 @@ pub(crate) fn code_lens(
data: None,
})
}
if lens_config.interpret {
let command = command::interpret_single(&r);
acc.push(lsp_types::CodeLens {
range: annotation_range,
command: Some(command),
data: None,
})
}
}
AnnotationKind::HasImpls { pos: file_range, data } => {
if !client_commands_config.show_reference {
@ -1359,6 +1367,15 @@ pub(crate) mod command {
}
}
pub(crate) fn interpret_single(_runnable: &lsp_ext::Runnable) -> lsp_types::Command {
lsp_types::Command {
title: "Interpret".into(),
command: "rust-analyzer.interpretFunction".into(),
// FIXME: use the `_runnable` here.
arguments: Some(vec![]),
}
}
pub(crate) fn goto_location(
snap: &GlobalStateSnapshot,
nav: &NavigationTarget,

View File

@ -11,6 +11,7 @@
//! add:
//! as_ref: sized
//! bool_impl: option, fn
//! cell: copy, drop
//! clone: sized
//! coerce_unsized: unsize
//! copy: clone
@ -139,6 +140,52 @@ pub mod hash {
}
// endregion:hash
// region:cell
pub mod cell {
use crate::mem;
#[lang = "unsafe_cell"]
pub struct UnsafeCell<T: ?Sized> {
value: T,
}
impl<T> UnsafeCell<T> {
pub const fn new(value: T) -> UnsafeCell<T> {
UnsafeCell { value }
}
pub const fn get(&self) -> *mut T {
self as *const UnsafeCell<T> as *const T as *mut T
}
}
pub struct Cell<T: ?Sized> {
value: UnsafeCell<T>,
}
impl<T> Cell<T> {
pub const fn new(value: T) -> Cell<T> {
Cell { value: UnsafeCell::new(value) }
}
pub fn set(&self, val: T) {
let old = self.replace(val);
mem::drop(old);
}
pub fn replace(&self, val: T) -> T {
mem::replace(unsafe { &mut *self.value.get() }, val)
}
}
impl<T: Copy> Cell<T> {
pub fn get(&self) -> T {
unsafe { *self.value.get() }
}
}
}
// endregion:cell
// region:clone
pub mod clone {
#[lang = "clone"]
@ -220,6 +267,13 @@ pub mod mem {
// endregion:manually_drop
pub fn drop<T>(_x: T) {}
pub const fn replace<T>(dest: &mut T, src: T) -> T {
unsafe {
let result = *dest;
*dest = src;
result
}
}
}
// endregion:drop
@ -710,6 +764,14 @@ pub mod option {
None => default,
}
}
// region:result
pub const fn ok_or<E>(self, err: E) -> Result<T, E> {
match self {
Some(v) => Ok(v),
None => Err(err),
}
}
// endregion:result
// region:fn
pub fn and_then<U, F>(self, f: F) -> Option<U>
where

View File

@ -1,5 +1,5 @@
<!---
lsp_ext.rs hash: be2f663a78beb7bd
lsp_ext.rs hash: 37ac44a0f507e05a
If you need to change the above hash to make the test pass, please check if you
need to adjust this doc as well and ping this issue:
@ -548,6 +548,18 @@ For debugging or when working on rust-analyzer itself.
Returns a textual representation of the MIR of the function containing the cursor.
For debugging or when working on rust-analyzer itself.
## Interpret Function
**Method:** `rust-analyzer/interpretFunction`
**Request:** `TextDocumentPositionParams`
**Response:** `string`
Tries to evaluate the function using internal rust analyzer knowledge, without compiling
the code. Currently evaluates the function under cursor, but will give a runnable in
future. Highly experimental.
## View File Text
**Method:** `rust-analyzer/viewFileText`

View File

@ -120,6 +120,11 @@
"title": "View Mir",
"category": "rust-analyzer (debug command)"
},
{
"command": "rust-analyzer.interpretFunction",
"title": "Interpret Function",
"category": "rust-analyzer (debug command)"
},
{
"command": "rust-analyzer.viewFileText",
"title": "View File Text (as seen by the server)",

View File

@ -422,8 +422,20 @@ export function syntaxTree(ctx: CtxInit): Cmd {
function viewHirOrMir(ctx: CtxInit, xir: "hir" | "mir"): Cmd {
const viewXir = xir === "hir" ? "viewHir" : "viewMir";
const requestType = xir === "hir" ? ra.viewHir : ra.viewMir;
const uri = `rust-analyzer-${xir}://${viewXir}/${xir}.rs`;
const scheme = `rust-analyzer-${xir}`;
return viewFileUsingTextDocumentContentProvider(ctx, requestType, uri, scheme, true);
}
function viewFileUsingTextDocumentContentProvider(
ctx: CtxInit,
requestType: lc.RequestType<lc.TextDocumentPositionParams, string, void>,
uri: string,
scheme: string,
shouldUpdate: boolean
): Cmd {
const tdcp = new (class implements vscode.TextDocumentContentProvider {
readonly uri = vscode.Uri.parse(`rust-analyzer-${xir}://${viewXir}/${xir}.rs`);
readonly uri = vscode.Uri.parse(uri);
readonly eventEmitter = new vscode.EventEmitter<vscode.Uri>();
constructor() {
vscode.workspace.onDidChangeTextDocument(
@ -439,14 +451,14 @@ function viewHirOrMir(ctx: CtxInit, xir: "hir" | "mir"): Cmd {
}
private onDidChangeTextDocument(event: vscode.TextDocumentChangeEvent) {
if (isRustDocument(event.document)) {
if (isRustDocument(event.document) && shouldUpdate) {
// We need to order this after language server updates, but there's no API for that.
// Hence, good old sleep().
void sleep(10).then(() => this.eventEmitter.fire(this.uri));
}
}
private onDidChangeActiveTextEditor(editor: vscode.TextEditor | undefined) {
if (editor && isRustEditor(editor)) {
if (editor && isRustEditor(editor) && shouldUpdate) {
this.eventEmitter.fire(this.uri);
}
}
@ -473,9 +485,7 @@ function viewHirOrMir(ctx: CtxInit, xir: "hir" | "mir"): Cmd {
}
})();
ctx.pushExtCleanup(
vscode.workspace.registerTextDocumentContentProvider(`rust-analyzer-${xir}`, tdcp)
);
ctx.pushExtCleanup(vscode.workspace.registerTextDocumentContentProvider(scheme, tdcp));
return async () => {
const document = await vscode.workspace.openTextDocument(tdcp.uri);
@ -501,6 +511,20 @@ export function viewMir(ctx: CtxInit): Cmd {
return viewHirOrMir(ctx, "mir");
}
// Opens the virtual file that will show the MIR of the function containing the cursor position
//
// The contents of the file come from the `TextDocumentContentProvider`
export function interpretFunction(ctx: CtxInit): Cmd {
const uri = `rust-analyzer-interpret-function://interpretFunction/result.log`;
return viewFileUsingTextDocumentContentProvider(
ctx,
ra.interpretFunction,
uri,
`rust-analyzer-interpret-function`,
false
);
}
export function viewFileText(ctx: CtxInit): Cmd {
const tdcp = new (class implements vscode.TextDocumentContentProvider {
readonly uri = vscode.Uri.parse("rust-analyzer-file-text://viewFileText/file.rs");

View File

@ -64,6 +64,9 @@ export const viewHir = new lc.RequestType<lc.TextDocumentPositionParams, string,
export const viewMir = new lc.RequestType<lc.TextDocumentPositionParams, string, void>(
"rust-analyzer/viewMir"
);
export const interpretFunction = new lc.RequestType<lc.TextDocumentPositionParams, string, void>(
"rust-analyzer/interpretFunction"
);
export const viewItemTree = new lc.RequestType<ViewItemTreeParams, string, void>(
"rust-analyzer/viewItemTree"
);

View File

@ -159,6 +159,7 @@ function createCommands(): Record<string, CommandFactory> {
syntaxTree: { enabled: commands.syntaxTree },
viewHir: { enabled: commands.viewHir },
viewMir: { enabled: commands.viewMir },
interpretFunction: { enabled: commands.interpretFunction },
viewFileText: { enabled: commands.viewFileText },
viewItemTree: { enabled: commands.viewItemTree },
viewCrateGraph: { enabled: commands.viewCrateGraph },