Auto merge of #16527 - Veykril:salsa-no-self-ref, r=Veykril
internal: Remove SELF_REF hack for self referential SyntaxContexts This should reduce the amount of SyntaxContexts we allocate
This commit is contained in:
commit
ddf105b646
@ -70,7 +70,11 @@
|
||||
panic::{RefUnwindSafe, UnwindSafe},
|
||||
};
|
||||
|
||||
use base_db::{impl_intern_key, salsa, CrateId, Edition};
|
||||
use base_db::{
|
||||
impl_intern_key,
|
||||
salsa::{self, impl_intern_value_trivial},
|
||||
CrateId, Edition,
|
||||
};
|
||||
use hir_expand::{
|
||||
ast_id_map::{AstIdNode, FileAstId},
|
||||
builtin_attr_macro::BuiltinAttrExpander,
|
||||
@ -171,6 +175,7 @@ pub trait ItemTreeLoc {
|
||||
macro_rules! impl_intern {
|
||||
($id:ident, $loc:ident, $intern:ident, $lookup:ident) => {
|
||||
impl_intern_key!($id);
|
||||
impl_intern_value_trivial!($loc);
|
||||
impl_intern_lookup!(DefDatabase, $id, $loc, $intern, $lookup);
|
||||
};
|
||||
}
|
||||
@ -490,6 +495,7 @@ pub struct TypeOrConstParamId {
|
||||
pub parent: GenericDefId,
|
||||
pub local_id: LocalTypeOrConstParamId,
|
||||
}
|
||||
impl_intern_value_trivial!(TypeOrConstParamId);
|
||||
|
||||
/// A TypeOrConstParamId with an invariant that it actually belongs to a type
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
@ -551,6 +557,7 @@ pub struct LifetimeParamId {
|
||||
pub local_id: LocalLifetimeParamId,
|
||||
}
|
||||
pub type LocalLifetimeParamId = Idx<generics::LifetimeParamData>;
|
||||
impl_intern_value_trivial!(LifetimeParamId);
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum ItemContainerId {
|
||||
|
@ -35,9 +35,9 @@ struct $ident {
|
||||
};
|
||||
}
|
||||
|
||||
struct#0:1@58..64#2# MyTraitMap2#0:2@31..42#0# {#0:1@72..73#2#
|
||||
map#0:1@86..89#2#:#0:1@89..90#2# #0:1@89..90#2#::#0:1@91..92#2#std#0:1@93..96#2#::#0:1@96..97#2#collections#0:1@98..109#2#::#0:1@109..110#2#HashSet#0:1@111..118#2#<#0:1@118..119#2#(#0:1@119..120#2#)#0:1@120..121#2#>#0:1@121..122#2#,#0:1@122..123#2#
|
||||
}#0:1@132..133#2#
|
||||
struct#0:1@58..64#1# MyTraitMap2#0:2@31..42#0# {#0:1@72..73#1#
|
||||
map#0:1@86..89#1#:#0:1@89..90#1# #0:1@89..90#1#::#0:1@91..92#1#std#0:1@93..96#1#::#0:1@96..97#1#collections#0:1@98..109#1#::#0:1@109..110#1#HashSet#0:1@111..118#1#<#0:1@118..119#1#(#0:1@119..120#1#)#0:1@120..121#1#>#0:1@121..122#1#,#0:1@122..123#1#
|
||||
}#0:1@132..133#1#
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
@ -171,7 +171,7 @@ macro_rules! identity {
|
||||
}
|
||||
|
||||
fn main(foo: ()) {
|
||||
/* error: unresolved macro unresolved */"helloworld!"#0:3@207..323#6#;
|
||||
/* error: unresolved macro unresolved */"helloworld!"#0:3@207..323#2#;
|
||||
}
|
||||
}
|
||||
|
||||
@ -197,7 +197,7 @@ macro_rules! mk_struct {
|
||||
#[macro_use]
|
||||
mod foo;
|
||||
|
||||
struct#1:1@59..65#2# Foo#0:2@32..35#0#(#1:1@70..71#2#u32#0:2@41..44#0#)#1:1@74..75#2#;#1:1@75..76#2#
|
||||
struct#1:1@59..65#1# Foo#0:2@32..35#0#(#1:1@70..71#1#u32#0:2@41..44#0#)#1:1@74..75#1#;#1:1@75..76#1#
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
@ -7,9 +7,10 @@
|
||||
|
||||
use std::iter;
|
||||
|
||||
use base_db::salsa::{self, InternValue};
|
||||
use span::{MacroCallId, Span, SyntaxContextId};
|
||||
|
||||
use crate::db::ExpandDatabase;
|
||||
use crate::db::{ExpandDatabase, InternSyntaxContextQuery};
|
||||
|
||||
#[derive(Copy, Clone, Hash, PartialEq, Eq)]
|
||||
pub struct SyntaxContextData {
|
||||
@ -22,6 +23,14 @@ pub struct SyntaxContextData {
|
||||
pub opaque_and_semitransparent: SyntaxContextId,
|
||||
}
|
||||
|
||||
impl InternValue for SyntaxContextData {
|
||||
type Key = (SyntaxContextId, Option<MacroCallId>, Transparency);
|
||||
|
||||
fn into_key(&self) -> Self::Key {
|
||||
(self.parent, self.outer_expn, self.outer_transparency)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for SyntaxContextData {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("SyntaxContextData")
|
||||
@ -149,38 +158,36 @@ fn apply_mark_internal(
|
||||
transparency: Transparency,
|
||||
) -> SyntaxContextId {
|
||||
let syntax_context_data = db.lookup_intern_syntax_context(ctxt);
|
||||
let mut opaque = handle_self_ref(ctxt, syntax_context_data.opaque);
|
||||
let mut opaque_and_semitransparent =
|
||||
handle_self_ref(ctxt, syntax_context_data.opaque_and_semitransparent);
|
||||
let mut opaque = syntax_context_data.opaque;
|
||||
let mut opaque_and_semitransparent = syntax_context_data.opaque_and_semitransparent;
|
||||
|
||||
if transparency >= Transparency::Opaque {
|
||||
let parent = opaque;
|
||||
// Unlike rustc, with salsa we can't prefetch the to be allocated ID to create cycles with
|
||||
// salsa when interning, so we use a sentinel value that effectively means the current
|
||||
// syntax context.
|
||||
let new_opaque = SyntaxContextId::SELF_REF;
|
||||
opaque = db.intern_syntax_context(SyntaxContextData {
|
||||
outer_expn: call_id,
|
||||
outer_transparency: transparency,
|
||||
parent,
|
||||
opaque: new_opaque,
|
||||
opaque_and_semitransparent: new_opaque,
|
||||
});
|
||||
opaque = salsa::plumbing::get_query_table::<InternSyntaxContextQuery>(db).get_or_insert(
|
||||
(parent, call_id, transparency),
|
||||
|new_opaque| SyntaxContextData {
|
||||
outer_expn: call_id,
|
||||
outer_transparency: transparency,
|
||||
parent,
|
||||
opaque: new_opaque,
|
||||
opaque_and_semitransparent: new_opaque,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
if transparency >= Transparency::SemiTransparent {
|
||||
let parent = opaque_and_semitransparent;
|
||||
// Unlike rustc, with salsa we can't prefetch the to be allocated ID to create cycles with
|
||||
// salsa when interning, so we use a sentinel value that effectively means the current
|
||||
// syntax context.
|
||||
let new_opaque_and_semitransparent = SyntaxContextId::SELF_REF;
|
||||
opaque_and_semitransparent = db.intern_syntax_context(SyntaxContextData {
|
||||
outer_expn: call_id,
|
||||
outer_transparency: transparency,
|
||||
parent,
|
||||
opaque,
|
||||
opaque_and_semitransparent: new_opaque_and_semitransparent,
|
||||
});
|
||||
opaque_and_semitransparent =
|
||||
salsa::plumbing::get_query_table::<InternSyntaxContextQuery>(db).get_or_insert(
|
||||
(parent, call_id, transparency),
|
||||
|new_opaque_and_semitransparent| SyntaxContextData {
|
||||
outer_expn: call_id,
|
||||
outer_transparency: transparency,
|
||||
parent,
|
||||
opaque,
|
||||
opaque_and_semitransparent: new_opaque_and_semitransparent,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
let parent = ctxt;
|
||||
@ -201,20 +208,12 @@ pub trait SyntaxContextExt {
|
||||
fn marks(self, db: &dyn ExpandDatabase) -> Vec<(Option<MacroCallId>, Transparency)>;
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn handle_self_ref(p: SyntaxContextId, n: SyntaxContextId) -> SyntaxContextId {
|
||||
match n {
|
||||
SyntaxContextId::SELF_REF => p,
|
||||
_ => n,
|
||||
}
|
||||
}
|
||||
|
||||
impl SyntaxContextExt for SyntaxContextId {
|
||||
fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self {
|
||||
handle_self_ref(self, db.lookup_intern_syntax_context(self).opaque_and_semitransparent)
|
||||
db.lookup_intern_syntax_context(self).opaque_and_semitransparent
|
||||
}
|
||||
fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self {
|
||||
handle_self_ref(self, db.lookup_intern_syntax_context(self).opaque)
|
||||
db.lookup_intern_syntax_context(self).opaque
|
||||
}
|
||||
fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self {
|
||||
db.lookup_intern_syntax_context(self).parent
|
||||
|
@ -30,7 +30,7 @@
|
||||
|
||||
use std::{fmt, hash::Hash};
|
||||
|
||||
use base_db::{CrateId, Edition, FileId};
|
||||
use base_db::{salsa::impl_intern_value_trivial, CrateId, Edition, FileId};
|
||||
use either::Either;
|
||||
use span::{FileRange, HirFileIdRepr, Span, SyntaxContextId};
|
||||
use syntax::{
|
||||
@ -176,6 +176,7 @@ pub struct MacroCallLoc {
|
||||
pub kind: MacroCallKind,
|
||||
pub call_site: Span,
|
||||
}
|
||||
impl_intern_value_trivial!(MacroCallLoc);
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct MacroDefId {
|
||||
|
@ -17,7 +17,7 @@
|
||||
use hir_expand::name::name;
|
||||
|
||||
use crate::{
|
||||
db::HirDatabase,
|
||||
db::{HirDatabase, InternedCoroutine},
|
||||
display::HirDisplay,
|
||||
from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, make_binders,
|
||||
make_single_type_binders,
|
||||
@ -428,7 +428,7 @@ fn coroutine_datum(
|
||||
&self,
|
||||
id: chalk_ir::CoroutineId<Interner>,
|
||||
) -> Arc<chalk_solve::rust_ir::CoroutineDatum<Interner>> {
|
||||
let (parent, expr) = self.db.lookup_intern_coroutine(id.into());
|
||||
let InternedCoroutine(parent, expr) = self.db.lookup_intern_coroutine(id.into());
|
||||
|
||||
// We fill substitution with unknown type, because we only need to know whether the generic
|
||||
// params are types or consts to build `Binders` and those being filled up are for
|
||||
@ -473,7 +473,7 @@ fn coroutine_witness_datum(
|
||||
let inner_types =
|
||||
rust_ir::CoroutineWitnessExistential { types: wrap_empty_binders(vec![]) };
|
||||
|
||||
let (parent, _) = self.db.lookup_intern_coroutine(id.into());
|
||||
let InternedCoroutine(parent, _) = self.db.lookup_intern_coroutine(id.into());
|
||||
// See the comment in `coroutine_datum()` for unknown types.
|
||||
let subst = TyBuilder::subst_for_coroutine(self.db, parent).fill_with_unknown().build();
|
||||
let it = subst
|
||||
|
@ -3,7 +3,11 @@
|
||||
|
||||
use std::sync;
|
||||
|
||||
use base_db::{impl_intern_key, salsa, CrateId, Upcast};
|
||||
use base_db::{
|
||||
impl_intern_key,
|
||||
salsa::{self, impl_intern_value_trivial},
|
||||
CrateId, Upcast,
|
||||
};
|
||||
use hir_def::{
|
||||
db::DefDatabase, hir::ExprId, layout::TargetDataLayout, AdtId, BlockId, ConstParamId,
|
||||
DefWithBodyId, EnumVariantId, FunctionId, GeneralConstId, GenericDefId, ImplId,
|
||||
@ -199,9 +203,9 @@ fn intern_type_or_const_param_id(
|
||||
#[salsa::interned]
|
||||
fn intern_impl_trait_id(&self, id: ImplTraitId) -> InternedOpaqueTyId;
|
||||
#[salsa::interned]
|
||||
fn intern_closure(&self, id: (DefWithBodyId, ExprId)) -> InternedClosureId;
|
||||
fn intern_closure(&self, id: InternedClosure) -> InternedClosureId;
|
||||
#[salsa::interned]
|
||||
fn intern_coroutine(&self, id: (DefWithBodyId, ExprId)) -> InternedCoroutineId;
|
||||
fn intern_coroutine(&self, id: InternedCoroutine) -> InternedCoroutineId;
|
||||
|
||||
#[salsa::invoke(chalk_db::associated_ty_data_query)]
|
||||
fn associated_ty_data(
|
||||
@ -337,10 +341,18 @@ fn _assert_object_safe(_: &dyn HirDatabase) {}
|
||||
pub struct InternedClosureId(salsa::InternId);
|
||||
impl_intern_key!(InternedClosureId);
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct InternedClosure(pub DefWithBodyId, pub ExprId);
|
||||
impl_intern_value_trivial!(InternedClosure);
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct InternedCoroutineId(salsa::InternId);
|
||||
impl_intern_key!(InternedCoroutineId);
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct InternedCoroutine(pub DefWithBodyId, pub ExprId);
|
||||
impl_intern_value_trivial!(InternedCoroutine);
|
||||
|
||||
/// This exists just for Chalk, because Chalk just has a single `FnDefId` where
|
||||
/// we have different IDs for struct and enum variant constructors.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
|
||||
|
@ -32,7 +32,7 @@
|
||||
|
||||
use crate::{
|
||||
consteval::try_const_usize,
|
||||
db::HirDatabase,
|
||||
db::{HirDatabase, InternedClosure},
|
||||
from_assoc_type_id, from_foreign_def_id, from_placeholder_idx,
|
||||
layout::Layout,
|
||||
lt_from_placeholder_idx,
|
||||
@ -1085,7 +1085,7 @@ fn hir_fmt(
|
||||
}
|
||||
let sig = ClosureSubst(substs).sig_ty().callable_sig(db);
|
||||
if let Some(sig) = sig {
|
||||
let (def, _) = db.lookup_intern_closure((*id).into());
|
||||
let InternedClosure(def, _) = db.lookup_intern_closure((*id).into());
|
||||
let infer = db.infer(def);
|
||||
let (_, kind) = infer.closure_info(id);
|
||||
match f.closure_style {
|
||||
|
@ -21,7 +21,7 @@
|
||||
use stdx::never;
|
||||
|
||||
use crate::{
|
||||
db::HirDatabase,
|
||||
db::{HirDatabase, InternedClosure},
|
||||
from_placeholder_idx, make_binders,
|
||||
mir::{BorrowKind, MirSpan, ProjectionElem},
|
||||
static_lifetime, to_chalk_trait_id,
|
||||
@ -716,7 +716,7 @@ fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty {
|
||||
|
||||
fn is_upvar(&self, place: &HirPlace) -> bool {
|
||||
if let Some(c) = self.current_closure {
|
||||
let (_, root) = self.db.lookup_intern_closure(c.into());
|
||||
let InternedClosure(_, root) = self.db.lookup_intern_closure(c.into());
|
||||
return self.body.is_binding_upvar(place.local, root);
|
||||
}
|
||||
false
|
||||
@ -938,7 +938,7 @@ fn closure_kind(&self) -> FnTrait {
|
||||
}
|
||||
|
||||
fn analyze_closure(&mut self, closure: ClosureId) -> FnTrait {
|
||||
let (_, root) = self.db.lookup_intern_closure(closure.into());
|
||||
let InternedClosure(_, root) = self.db.lookup_intern_closure(closure.into());
|
||||
self.current_closure = Some(closure);
|
||||
let Expr::Closure { body, capture_by, .. } = &self.body[root] else {
|
||||
unreachable!("Closure expression id is always closure");
|
||||
|
@ -23,6 +23,7 @@
|
||||
use crate::{
|
||||
autoderef::{builtin_deref, deref_by_trait, Autoderef},
|
||||
consteval,
|
||||
db::{InternedClosure, InternedCoroutine},
|
||||
infer::{
|
||||
coerce::{CoerceMany, CoercionCause},
|
||||
find_continuable,
|
||||
@ -253,13 +254,17 @@ fn infer_expr_inner(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
|
||||
.push(ret_ty.clone())
|
||||
.build();
|
||||
|
||||
let coroutine_id = self.db.intern_coroutine((self.owner, tgt_expr)).into();
|
||||
let coroutine_id = self
|
||||
.db
|
||||
.intern_coroutine(InternedCoroutine(self.owner, tgt_expr))
|
||||
.into();
|
||||
let coroutine_ty = TyKind::Coroutine(coroutine_id, subst).intern(Interner);
|
||||
|
||||
(None, coroutine_ty, Some((resume_ty, yield_ty)))
|
||||
}
|
||||
ClosureKind::Closure | ClosureKind::Async => {
|
||||
let closure_id = self.db.intern_closure((self.owner, tgt_expr)).into();
|
||||
let closure_id =
|
||||
self.db.intern_closure(InternedClosure(self.owner, tgt_expr)).into();
|
||||
let closure_ty = TyKind::Closure(
|
||||
closure_id,
|
||||
TyBuilder::subst_for_closure(self.db, self.owner, sig_ty.clone()),
|
||||
|
@ -19,8 +19,12 @@
|
||||
use triomphe::Arc;
|
||||
|
||||
use crate::{
|
||||
consteval::try_const_usize, db::HirDatabase, infer::normalize, layout::adt::struct_variant_idx,
|
||||
utils::ClosureSubst, Interner, ProjectionTy, Substitution, TraitEnvironment, Ty,
|
||||
consteval::try_const_usize,
|
||||
db::{HirDatabase, InternedClosure},
|
||||
infer::normalize,
|
||||
layout::adt::struct_variant_idx,
|
||||
utils::ClosureSubst,
|
||||
Interner, ProjectionTy, Substitution, TraitEnvironment, Ty,
|
||||
};
|
||||
|
||||
pub use self::{
|
||||
@ -391,7 +395,7 @@ pub fn layout_of_ty_query(
|
||||
}
|
||||
}
|
||||
TyKind::Closure(c, subst) => {
|
||||
let (def, _) = db.lookup_intern_closure((*c).into());
|
||||
let InternedClosure(def, _) = db.lookup_intern_closure((*c).into());
|
||||
let infer = db.infer(def);
|
||||
let (captures, _) = infer.closure_info(c);
|
||||
let fields = captures
|
||||
|
@ -51,6 +51,7 @@
|
||||
hash::{BuildHasherDefault, Hash},
|
||||
};
|
||||
|
||||
use base_db::salsa::impl_intern_value_trivial;
|
||||
use chalk_ir::{
|
||||
fold::{Shift, TypeFoldable},
|
||||
interner::HasInterner,
|
||||
@ -586,6 +587,7 @@ pub enum ImplTraitId {
|
||||
ReturnTypeImplTrait(hir_def::FunctionId, RpitId),
|
||||
AsyncBlockTypeImplTrait(hir_def::DefWithBodyId, ExprId),
|
||||
}
|
||||
impl_intern_value_trivial!(ImplTraitId);
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
|
||||
pub struct ReturnTypeImplTraits {
|
||||
|
@ -10,7 +10,10 @@
|
||||
iter,
|
||||
};
|
||||
|
||||
use base_db::{salsa::Cycle, CrateId};
|
||||
use base_db::{
|
||||
salsa::{impl_intern_value_trivial, Cycle},
|
||||
CrateId,
|
||||
};
|
||||
use chalk_ir::{
|
||||
cast::Cast, fold::Shift, fold::TypeFoldable, interner::HasInterner, Mutability, Safety,
|
||||
};
|
||||
@ -1809,6 +1812,7 @@ pub enum CallableDefId {
|
||||
StructId(StructId),
|
||||
EnumVariantId(EnumVariantId),
|
||||
}
|
||||
impl_intern_value_trivial!(CallableDefId);
|
||||
impl_from!(FunctionId, StructId, EnumVariantId for CallableDefId);
|
||||
impl From<CallableDefId> for ModuleDefId {
|
||||
fn from(def: CallableDefId) -> ModuleDefId {
|
||||
|
@ -11,7 +11,10 @@
|
||||
use triomphe::Arc;
|
||||
|
||||
use crate::{
|
||||
db::HirDatabase, mir::Operand, utils::ClosureSubst, ClosureId, Interner, Ty, TyExt, TypeFlags,
|
||||
db::{HirDatabase, InternedClosure},
|
||||
mir::Operand,
|
||||
utils::ClosureSubst,
|
||||
ClosureId, Interner, Ty, TyExt, TypeFlags,
|
||||
};
|
||||
|
||||
use super::{
|
||||
@ -97,7 +100,7 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef>
|
||||
ty,
|
||||
db,
|
||||
|c, subst, f| {
|
||||
let (def, _) = db.lookup_intern_closure(c.into());
|
||||
let InternedClosure(def, _) = db.lookup_intern_closure(c.into());
|
||||
let infer = db.infer(def);
|
||||
let (captures, _) = infer.closure_info(&c);
|
||||
let parent_subst = ClosureSubst(subst).parent_subst();
|
||||
@ -215,7 +218,7 @@ fn place_case(db: &dyn HirDatabase, body: &MirBody, lvalue: &Place) -> Projectio
|
||||
ty,
|
||||
db,
|
||||
|c, subst, f| {
|
||||
let (def, _) = db.lookup_intern_closure(c.into());
|
||||
let InternedClosure(def, _) = db.lookup_intern_closure(c.into());
|
||||
let infer = db.infer(def);
|
||||
let (captures, _) = infer.closure_info(&c);
|
||||
let parent_subst = ClosureSubst(subst).parent_subst();
|
||||
|
@ -25,7 +25,7 @@
|
||||
|
||||
use crate::{
|
||||
consteval::{intern_const_scalar, try_const_usize, ConstEvalError},
|
||||
db::HirDatabase,
|
||||
db::{HirDatabase, InternedClosure},
|
||||
display::{ClosureStyle, HirDisplay},
|
||||
infer::PointerCast,
|
||||
layout::{Layout, LayoutError, RustcEnumVariantIdx},
|
||||
@ -647,7 +647,7 @@ fn projected_ty(&self, ty: Ty, proj: PlaceElem) -> Ty {
|
||||
ty.clone(),
|
||||
self.db,
|
||||
|c, subst, f| {
|
||||
let (def, _) = self.db.lookup_intern_closure(c.into());
|
||||
let InternedClosure(def, _) = self.db.lookup_intern_closure(c.into());
|
||||
let infer = self.db.infer(def);
|
||||
let (captures, _) = infer.closure_info(&c);
|
||||
let parent_subst = ClosureSubst(subst).parent_subst();
|
||||
|
@ -178,7 +178,7 @@ fn exec_clone(
|
||||
not_supported!("wrong arg count for clone");
|
||||
};
|
||||
let addr = Address::from_bytes(arg.get(self)?)?;
|
||||
let (closure_owner, _) = self.db.lookup_intern_closure((*id).into());
|
||||
let InternedClosure(closure_owner, _) = self.db.lookup_intern_closure((*id).into());
|
||||
let infer = self.db.infer(closure_owner);
|
||||
let (captures, _) = infer.closure_info(id);
|
||||
let layout = self.layout(&self_ty)?;
|
||||
|
@ -25,7 +25,7 @@
|
||||
|
||||
use crate::{
|
||||
consteval::ConstEvalError,
|
||||
db::HirDatabase,
|
||||
db::{HirDatabase, InternedClosure},
|
||||
display::HirDisplay,
|
||||
infer::{CaptureKind, CapturedItem, TypeMismatch},
|
||||
inhabitedness::is_ty_uninhabited_from,
|
||||
@ -1977,7 +1977,7 @@ pub fn mir_body_for_closure_query(
|
||||
db: &dyn HirDatabase,
|
||||
closure: ClosureId,
|
||||
) -> Result<Arc<MirBody>> {
|
||||
let (owner, expr) = db.lookup_intern_closure(closure.into());
|
||||
let InternedClosure(owner, expr) = db.lookup_intern_closure(closure.into());
|
||||
let body = db.body(owner);
|
||||
let infer = db.infer(owner);
|
||||
let Expr::Closure { args, body: root, .. } = &body[expr] else {
|
||||
|
@ -19,7 +19,7 @@
|
||||
|
||||
use crate::{
|
||||
consteval::{intern_const_scalar, unknown_const},
|
||||
db::HirDatabase,
|
||||
db::{HirDatabase, InternedClosure},
|
||||
from_placeholder_idx,
|
||||
infer::normalize,
|
||||
utils::{generics, Generics},
|
||||
@ -315,7 +315,7 @@ pub fn monomorphized_mir_body_for_closure_query(
|
||||
subst: Substitution,
|
||||
trait_env: Arc<crate::TraitEnvironment>,
|
||||
) -> Result<Arc<MirBody>, MirLowerError> {
|
||||
let (owner, _) = db.lookup_intern_closure(closure.into());
|
||||
let InternedClosure(owner, _) = db.lookup_intern_closure(closure.into());
|
||||
let generics = owner.as_generic_def_id().map(|g_def| generics(db.upcast(), g_def));
|
||||
let filler = &mut Filler { db, subst: &subst, trait_env, generics, owner };
|
||||
let body = db.mir_body_for_closure(closure)?;
|
||||
|
@ -62,6 +62,7 @@
|
||||
use hir_ty::{
|
||||
all_super_traits, autoderef, check_orphan_rules,
|
||||
consteval::{try_const_usize, unknown_const_as_generic, ConstExt},
|
||||
db::InternedClosure,
|
||||
diagnostics::BodyValidationDiagnostic,
|
||||
known_const_to_ast,
|
||||
layout::{Layout as TyLayout, RustcEnumVariantIdx, RustcFieldIdx, TagEncoding},
|
||||
@ -4499,7 +4500,7 @@ pub fn sig(&self) -> &CallableSig {
|
||||
}
|
||||
|
||||
fn closure_source(db: &dyn HirDatabase, closure: ClosureId) -> Option<ast::ClosureExpr> {
|
||||
let (owner, expr_id) = db.lookup_intern_closure(closure.into());
|
||||
let InternedClosure(owner, expr_id) = db.lookup_intern_closure(closure.into());
|
||||
let (_, source_map) = db.body_with_source_map(owner);
|
||||
let ast = source_map.expr_syntax(expr_id).ok()?;
|
||||
let root = ast.file_syntax(db.upcast());
|
||||
|
@ -8,6 +8,7 @@
|
||||
use crate::plumbing::QueryStorageOps;
|
||||
use crate::revision::Revision;
|
||||
use crate::Query;
|
||||
use crate::QueryTable;
|
||||
use crate::{Database, DatabaseKeyIndex, QueryDb};
|
||||
use parking_lot::RwLock;
|
||||
use rustc_hash::FxHashMap;
|
||||
@ -24,10 +25,11 @@
|
||||
pub struct InternedStorage<Q>
|
||||
where
|
||||
Q: Query,
|
||||
Q::Key: InternValue,
|
||||
Q::Value: InternKey,
|
||||
{
|
||||
group_index: u16,
|
||||
tables: RwLock<InternTables<Q::Key>>,
|
||||
tables: RwLock<InternTables<MappedKey<Q>, Q::Key>>,
|
||||
}
|
||||
|
||||
/// Storage for the looking up interned things.
|
||||
@ -35,17 +37,17 @@ pub struct LookupInternedStorage<Q, IQ>
|
||||
where
|
||||
Q: Query,
|
||||
Q::Key: InternKey,
|
||||
Q::Value: Eq + Hash,
|
||||
Q::Value: InternValue,
|
||||
{
|
||||
phantom: std::marker::PhantomData<(Q::Key, IQ)>,
|
||||
}
|
||||
|
||||
struct InternTables<K> {
|
||||
struct InternTables<K, V> {
|
||||
/// Map from the key to the corresponding intern-index.
|
||||
map: FxHashMap<K, InternId>,
|
||||
|
||||
/// For each valid intern-index, stores the interned value.
|
||||
values: Vec<Arc<Slot<K>>>,
|
||||
values: Vec<Arc<Slot<V>>>,
|
||||
}
|
||||
|
||||
/// Trait implemented for the "key" that results from a
|
||||
@ -69,13 +71,62 @@ fn as_intern_id(&self) -> InternId {
|
||||
}
|
||||
}
|
||||
|
||||
/// Trait implemented for the "value" that is being interned.
|
||||
pub trait InternValue {
|
||||
/// They key used to intern this value by.
|
||||
type Key: Eq + Hash + Debug + Clone;
|
||||
/// Maps the value to a key that will be used to intern it.
|
||||
fn into_key(&self) -> Self::Key;
|
||||
/// Calls the given function with the key that was used to intern this value.
|
||||
///
|
||||
/// This is mainly used to prevent frequent cloning of the key when doing a lookup.
|
||||
#[inline]
|
||||
fn with_key<F: FnOnce(&Self::Key) -> T, T>(&self, f: F) -> T {
|
||||
f(&self.into_key())
|
||||
}
|
||||
}
|
||||
|
||||
impl<A: InternValue + Eq + Hash + Debug + Clone, B: InternValue + Eq + Hash + Debug + Clone>
|
||||
InternValue for (A, B)
|
||||
{
|
||||
type Key = Self;
|
||||
#[inline]
|
||||
fn into_key(&self) -> Self::Key {
|
||||
self.clone()
|
||||
}
|
||||
#[inline]
|
||||
fn with_key<F: FnOnce(&Self::Key) -> T, T>(&self, f: F) -> T {
|
||||
f(self)
|
||||
}
|
||||
}
|
||||
|
||||
/// Implement [`InternValue`] trivially, that is without actually mapping at all.
|
||||
#[macro_export]
|
||||
macro_rules! impl_intern_value_trivial {
|
||||
($($ty:ty),*) => {
|
||||
$(
|
||||
impl $crate::InternValue for $ty {
|
||||
type Key = $ty;
|
||||
#[inline]
|
||||
fn into_key(&self) -> Self::Key {
|
||||
self.clone()
|
||||
}
|
||||
#[inline]
|
||||
fn with_key<F: FnOnce(&Self::Key) -> T, T>(&self, f: F) -> T {
|
||||
f(self)
|
||||
}
|
||||
}
|
||||
)*
|
||||
};
|
||||
}
|
||||
impl_intern_value_trivial!(String);
|
||||
#[derive(Debug)]
|
||||
struct Slot<K> {
|
||||
struct Slot<V> {
|
||||
/// DatabaseKeyIndex for this slot.
|
||||
database_key_index: DatabaseKeyIndex,
|
||||
|
||||
/// Value that was interned.
|
||||
value: K,
|
||||
value: V,
|
||||
|
||||
/// When was this intern'd?
|
||||
///
|
||||
@ -86,27 +137,28 @@ struct Slot<K> {
|
||||
impl<Q> std::panic::RefUnwindSafe for InternedStorage<Q>
|
||||
where
|
||||
Q: Query,
|
||||
Q::Key: InternValue,
|
||||
Q::Key: std::panic::RefUnwindSafe,
|
||||
Q::Value: InternKey,
|
||||
Q::Value: std::panic::RefUnwindSafe,
|
||||
{
|
||||
}
|
||||
|
||||
impl<K: Debug + Hash + Eq> InternTables<K> {
|
||||
impl<K: Debug + Hash + Eq, V> InternTables<K, V> {
|
||||
/// Returns the slot for the given key.
|
||||
fn slot_for_key(&self, key: &K) -> Option<(Arc<Slot<K>>, InternId)> {
|
||||
fn slot_for_key(&self, key: &K) -> Option<(Arc<Slot<V>>, InternId)> {
|
||||
let &index = self.map.get(key)?;
|
||||
Some((self.slot_for_index(index), index))
|
||||
}
|
||||
|
||||
/// Returns the slot at the given index.
|
||||
fn slot_for_index(&self, index: InternId) -> Arc<Slot<K>> {
|
||||
fn slot_for_index(&self, index: InternId) -> Arc<Slot<V>> {
|
||||
let slot = &self.values[index.as_usize()];
|
||||
slot.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl<K> Default for InternTables<K>
|
||||
impl<K, V> Default for InternTables<K, V>
|
||||
where
|
||||
K: Eq + Hash,
|
||||
{
|
||||
@ -115,29 +167,26 @@ fn default() -> Self {
|
||||
}
|
||||
}
|
||||
|
||||
type MappedKey<Q> = <<Q as Query>::Key as InternValue>::Key;
|
||||
|
||||
impl<Q> InternedStorage<Q>
|
||||
where
|
||||
Q: Query,
|
||||
Q::Key: Eq + Hash + Clone,
|
||||
Q::Key: InternValue,
|
||||
Q::Value: InternKey,
|
||||
{
|
||||
/// If `key` has already been interned, returns its slot. Otherwise, creates a new slot.
|
||||
/// Creates a new slot.
|
||||
fn intern_index(
|
||||
&self,
|
||||
db: &<Q as QueryDb<'_>>::DynDb,
|
||||
key: &Q::Key,
|
||||
mapped_key: MappedKey<Q>,
|
||||
insert: impl FnOnce(Q::Value) -> Q::Key,
|
||||
) -> (Arc<Slot<Q::Key>>, InternId) {
|
||||
if let Some(i) = self.intern_check(key) {
|
||||
return i;
|
||||
}
|
||||
|
||||
let owned_key1 = key.to_owned();
|
||||
let owned_key2 = owned_key1.clone();
|
||||
let revision_now = db.salsa_runtime().current_revision();
|
||||
|
||||
let mut tables = self.tables.write();
|
||||
let tables = &mut *tables;
|
||||
let entry = match tables.map.entry(owned_key1) {
|
||||
let entry = match tables.map.entry(mapped_key) {
|
||||
Entry::Vacant(entry) => entry,
|
||||
Entry::Occupied(entry) => {
|
||||
// Somebody inserted this key while we were waiting
|
||||
@ -146,7 +195,6 @@ fn intern_index(
|
||||
// have already done so!
|
||||
let index = *entry.get();
|
||||
let slot = &tables.values[index.as_usize()];
|
||||
debug_assert_eq!(owned_key2, slot.value);
|
||||
return (slot.clone(), index);
|
||||
}
|
||||
};
|
||||
@ -157,19 +205,22 @@ fn intern_index(
|
||||
query_index: Q::QUERY_INDEX,
|
||||
key_index: index.as_u32(),
|
||||
};
|
||||
Arc::new(Slot { database_key_index, value: owned_key2, interned_at: revision_now })
|
||||
Arc::new(Slot {
|
||||
database_key_index,
|
||||
value: insert(Q::Value::from_intern_id(index)),
|
||||
interned_at: revision_now,
|
||||
})
|
||||
};
|
||||
|
||||
let (slot, index);
|
||||
index = InternId::from(tables.values.len());
|
||||
slot = create_slot(index);
|
||||
let index = InternId::from(tables.values.len());
|
||||
let slot = create_slot(index);
|
||||
tables.values.push(slot.clone());
|
||||
entry.insert(index);
|
||||
|
||||
(slot, index)
|
||||
}
|
||||
|
||||
fn intern_check(&self, key: &Q::Key) -> Option<(Arc<Slot<Q::Key>>, InternId)> {
|
||||
fn intern_check(&self, key: &MappedKey<Q>) -> Option<(Arc<Slot<Q::Key>>, InternId)> {
|
||||
self.tables.read().slot_for_key(key)
|
||||
}
|
||||
|
||||
@ -178,11 +229,32 @@ fn intern_check(&self, key: &Q::Key) -> Option<(Arc<Slot<Q::Key>>, InternId)> {
|
||||
fn lookup_value(&self, index: InternId) -> Arc<Slot<Q::Key>> {
|
||||
self.tables.read().slot_for_index(index)
|
||||
}
|
||||
|
||||
fn fetch_or_insert(
|
||||
&self,
|
||||
db: &<Q as QueryDb<'_>>::DynDb,
|
||||
key: MappedKey<Q>,
|
||||
insert: impl FnOnce(Q::Value) -> Q::Key,
|
||||
) -> Q::Value {
|
||||
db.unwind_if_cancelled();
|
||||
let (slot, index) = match self.intern_check(&key) {
|
||||
Some(i) => i,
|
||||
None => self.intern_index(db, key, insert),
|
||||
};
|
||||
let changed_at = slot.interned_at;
|
||||
db.salsa_runtime().report_query_read_and_unwind_if_cycle_resulted(
|
||||
slot.database_key_index,
|
||||
INTERN_DURABILITY,
|
||||
changed_at,
|
||||
);
|
||||
<Q::Value>::from_intern_id(index)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Q> QueryStorageOps<Q> for InternedStorage<Q>
|
||||
where
|
||||
Q: Query,
|
||||
Q::Key: InternValue,
|
||||
Q::Value: InternKey,
|
||||
{
|
||||
const CYCLE_STRATEGY: crate::plumbing::CycleRecoveryStrategy = CycleRecoveryStrategy::Panic;
|
||||
@ -220,7 +292,11 @@ fn maybe_changed_after(
|
||||
|
||||
fn fetch(&self, db: &<Q as QueryDb<'_>>::DynDb, key: &Q::Key) -> Q::Value {
|
||||
db.unwind_if_cancelled();
|
||||
let (slot, index) = self.intern_index(db, key);
|
||||
|
||||
let (slot, index) = match key.with_key(|key| self.intern_check(key)) {
|
||||
Some(i) => i,
|
||||
None => self.intern_index(db, key.into_key(), |_| key.clone()),
|
||||
};
|
||||
let changed_at = slot.interned_at;
|
||||
db.salsa_runtime().report_query_read_and_unwind_if_cycle_resulted(
|
||||
slot.database_key_index,
|
||||
@ -241,9 +317,12 @@ fn entries<C>(&self, _db: &<Q as QueryDb<'_>>::DynDb) -> C
|
||||
let tables = self.tables.read();
|
||||
tables
|
||||
.map
|
||||
.iter()
|
||||
.map(|(key, index)| {
|
||||
TableEntry::new(key.clone(), Some(<Q::Value>::from_intern_id(*index)))
|
||||
.values()
|
||||
.map(|index| {
|
||||
TableEntry::new(
|
||||
tables.values[index.as_usize()].value.clone(),
|
||||
Some(<Q::Value>::from_intern_id(*index)),
|
||||
)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
@ -252,6 +331,7 @@ fn entries<C>(&self, _db: &<Q as QueryDb<'_>>::DynDb) -> C
|
||||
impl<Q> QueryStorageMassOps for InternedStorage<Q>
|
||||
where
|
||||
Q: Query,
|
||||
Q::Key: InternValue,
|
||||
Q::Value: InternKey,
|
||||
{
|
||||
fn purge(&self) {
|
||||
@ -296,7 +376,7 @@ impl<Q, IQ> QueryStorageOps<Q> for LookupInternedStorage<Q, IQ>
|
||||
where
|
||||
Q: Query,
|
||||
Q::Key: InternKey,
|
||||
Q::Value: Eq + Hash,
|
||||
Q::Value: InternValue,
|
||||
IQ: Query<Key = Q::Value, Value = Q::Key, Storage = InternedStorage<IQ>>,
|
||||
for<'d> Q: EqualDynDb<'d, IQ>,
|
||||
{
|
||||
@ -360,9 +440,12 @@ fn entries<C>(&self, db: &<Q as QueryDb<'_>>::DynDb) -> C
|
||||
let tables = interned_storage.tables.read();
|
||||
tables
|
||||
.map
|
||||
.iter()
|
||||
.map(|(key, index)| {
|
||||
TableEntry::new(<Q::Key>::from_intern_id(*index), Some(key.clone()))
|
||||
.values()
|
||||
.map(|index| {
|
||||
TableEntry::new(
|
||||
<Q::Key>::from_intern_id(*index),
|
||||
Some(tables.values[index.as_usize()].value.clone()),
|
||||
)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
@ -372,7 +455,7 @@ impl<Q, IQ> QueryStorageMassOps for LookupInternedStorage<Q, IQ>
|
||||
where
|
||||
Q: Query,
|
||||
Q::Key: InternKey,
|
||||
Q::Value: Eq + Hash,
|
||||
Q::Value: InternValue,
|
||||
IQ: Query<Key = Q::Value, Value = Q::Key>,
|
||||
{
|
||||
fn purge(&self) {}
|
||||
@ -407,3 +490,19 @@ fn check_static<K>()
|
||||
fn is_static<T: 'static>() {}
|
||||
is_static::<Slot<K>>();
|
||||
}
|
||||
|
||||
impl<'me, Q> QueryTable<'me, Q>
|
||||
where
|
||||
Q: Query<Storage = InternedStorage<Q>>,
|
||||
Q::Key: InternValue,
|
||||
Q::Value: InternKey,
|
||||
{
|
||||
/// Fetches the intern id for the given key or inserts it if it does not exist.
|
||||
pub fn get_or_insert(
|
||||
&self,
|
||||
key: MappedKey<Q>,
|
||||
insert: impl FnOnce(Q::Value) -> Q::Key,
|
||||
) -> Q::Value {
|
||||
self.storage.fetch_or_insert(self.db, key, insert)
|
||||
}
|
||||
}
|
||||
|
@ -42,7 +42,7 @@
|
||||
|
||||
pub use crate::durability::Durability;
|
||||
pub use crate::intern_id::InternId;
|
||||
pub use crate::interned::InternKey;
|
||||
pub use crate::interned::{InternKey, InternValue};
|
||||
pub use crate::runtime::Runtime;
|
||||
pub use crate::runtime::RuntimeId;
|
||||
pub use crate::storage::Storage;
|
||||
|
@ -68,26 +68,9 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct SyntaxContextId(InternId);
|
||||
|
||||
impl fmt::Debug for SyntaxContextId {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
if *self == Self::SELF_REF {
|
||||
f.debug_tuple("SyntaxContextId")
|
||||
.field(&{
|
||||
#[derive(Debug)]
|
||||
#[allow(non_camel_case_types)]
|
||||
struct SELF_REF;
|
||||
SELF_REF
|
||||
})
|
||||
.finish()
|
||||
} else {
|
||||
f.debug_tuple("SyntaxContextId").field(&self.0).finish()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl salsa::InternKey for SyntaxContextId {
|
||||
fn from_intern_id(v: salsa::InternId) -> Self {
|
||||
SyntaxContextId(v)
|
||||
@ -106,10 +89,6 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
// inherent trait impls please tyvm
|
||||
impl SyntaxContextId {
|
||||
pub const ROOT: Self = SyntaxContextId(unsafe { InternId::new_unchecked(0) });
|
||||
// veykril(HACK): FIXME salsa doesn't allow us fetching the id of the current input to be allocated so
|
||||
// we need a special value that behaves as the current context.
|
||||
pub const SELF_REF: Self =
|
||||
SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 1) });
|
||||
|
||||
pub fn is_root(self) -> bool {
|
||||
self == Self::ROOT
|
||||
|
Loading…
Reference in New Issue
Block a user