Allow to set a query's result as a side effect.

This commit is contained in:
Camille GILLOT 2022-03-22 21:45:32 +01:00
parent ca42dd6716
commit 5471381349
4 changed files with 171 additions and 1 deletions

View File

@ -114,6 +114,9 @@ struct QueryModifiers {
/// Always remap the ParamEnv's constness before hashing. /// Always remap the ParamEnv's constness before hashing.
remap_env_constness: Option<Ident>, remap_env_constness: Option<Ident>,
/// Generate a `feed` method to set the query's value from another query.
feedable: Option<Ident>,
} }
fn parse_query_modifiers(input: ParseStream<'_>) -> Result<QueryModifiers> { fn parse_query_modifiers(input: ParseStream<'_>) -> Result<QueryModifiers> {
@ -128,6 +131,7 @@ fn parse_query_modifiers(input: ParseStream<'_>) -> Result<QueryModifiers> {
let mut depth_limit = None; let mut depth_limit = None;
let mut separate_provide_extern = None; let mut separate_provide_extern = None;
let mut remap_env_constness = None; let mut remap_env_constness = None;
let mut feedable = None;
while !input.is_empty() { while !input.is_empty() {
let modifier: Ident = input.parse()?; let modifier: Ident = input.parse()?;
@ -187,6 +191,8 @@ macro_rules! try_insert {
try_insert!(separate_provide_extern = modifier); try_insert!(separate_provide_extern = modifier);
} else if modifier == "remap_env_constness" { } else if modifier == "remap_env_constness" {
try_insert!(remap_env_constness = modifier); try_insert!(remap_env_constness = modifier);
} else if modifier == "feedable" {
try_insert!(feedable = modifier);
} else { } else {
return Err(Error::new(modifier.span(), "unknown query modifier")); return Err(Error::new(modifier.span(), "unknown query modifier"));
} }
@ -206,6 +212,7 @@ macro_rules! try_insert {
depth_limit, depth_limit,
separate_provide_extern, separate_provide_extern,
remap_env_constness, remap_env_constness,
feedable,
}) })
} }
@ -296,6 +303,7 @@ pub fn rustc_queries(input: TokenStream) -> TokenStream {
let mut query_stream = quote! {}; let mut query_stream = quote! {};
let mut query_description_stream = quote! {}; let mut query_description_stream = quote! {};
let mut query_cached_stream = quote! {}; let mut query_cached_stream = quote! {};
let mut feedable_queries = quote! {};
for query in queries.0 { for query in queries.0 {
let Query { name, arg, modifiers, .. } = &query; let Query { name, arg, modifiers, .. } = &query;
@ -350,6 +358,13 @@ macro_rules! passthrough {
[#attribute_stream] fn #name(#arg) #result, [#attribute_stream] fn #name(#arg) #result,
}); });
if modifiers.feedable.is_some() {
feedable_queries.extend(quote! {
#(#doc_comments)*
[#attribute_stream] fn #name(#arg) #result,
});
}
add_query_desc_cached_impl(&query, &mut query_description_stream, &mut query_cached_stream); add_query_desc_cached_impl(&query, &mut query_description_stream, &mut query_cached_stream);
} }
@ -363,7 +378,11 @@ macro_rules! rustc_query_append {
} }
} }
} }
macro_rules! rustc_feedable_queries {
( $macro:ident! ) => {
$macro!(#feedable_queries);
}
}
pub mod descs { pub mod descs {
use super::*; use super::*;
#query_description_stream #query_description_stream

View File

@ -165,6 +165,7 @@
} }
cache_on_disk_if { key.is_local() } cache_on_disk_if { key.is_local() }
separate_provide_extern separate_provide_extern
feedable
} }
query collect_trait_impl_trait_tys(key: DefId) query collect_trait_impl_trait_tys(key: DefId)

View File

@ -85,6 +85,11 @@ pub struct TyCtxtEnsure<'tcx> {
pub tcx: TyCtxt<'tcx>, pub tcx: TyCtxt<'tcx>,
} }
#[derive(Copy, Clone)]
pub struct TyCtxtFeed<'tcx> {
pub tcx: TyCtxt<'tcx>,
}
impl<'tcx> TyCtxt<'tcx> { impl<'tcx> TyCtxt<'tcx> {
/// Returns a transparent wrapper for `TyCtxt`, which ensures queries /// Returns a transparent wrapper for `TyCtxt`, which ensures queries
/// are executed instead of just returning their results. /// are executed instead of just returning their results.
@ -93,6 +98,12 @@ pub fn ensure(self) -> TyCtxtEnsure<'tcx> {
TyCtxtEnsure { tcx: self } TyCtxtEnsure { tcx: self }
} }
/// Returns a transparent wrapper for `TyCtxt`, for setting a result into a query.
#[inline(always)]
pub fn feed(self) -> TyCtxtFeed<'tcx> {
TyCtxtFeed { tcx: self }
}
/// Returns a transparent wrapper for `TyCtxt` which uses /// Returns a transparent wrapper for `TyCtxt` which uses
/// `span` as the location of queries performed through it. /// `span` as the location of queries performed through it.
#[inline(always)] #[inline(always)]
@ -175,6 +186,18 @@ macro_rules! opt_remap_env_constness {
}; };
} }
macro_rules! hash_result {
([]) => {{
Some(dep_graph::hash_result)
}};
([(no_hash) $($rest:tt)*]) => {{
None
}};
([$other:tt $($modifiers:tt)*]) => {
hash_result!([$($modifiers)*])
};
}
macro_rules! define_callbacks { macro_rules! define_callbacks {
( (
$($(#[$attr:meta])* $($(#[$attr:meta])*
@ -327,6 +350,50 @@ fn $name(
}; };
} }
macro_rules! define_feedable {
($($(#[$attr:meta])* [$($modifiers:tt)*] fn $name:ident($($K:tt)*) -> $V:ty,)*) => {
impl<'tcx> TyCtxtFeed<'tcx> {
$($(#[$attr])*
#[inline(always)]
pub fn $name(
self,
key: query_helper_param_ty!($($K)*),
value: $V,
) -> query_stored::$name<'tcx> {
let key = key.into_query_param();
opt_remap_env_constness!([$($modifiers)*][key]);
let tcx = self.tcx;
let cache = &tcx.query_caches.$name;
let cached = try_get_cached(tcx, cache, &key, copy);
match cached {
Ok(old) => {
assert_eq!(
value, old,
"Trying to feed an already recorded value for query {} key={key:?}",
stringify!($name),
);
return old;
}
Err(()) => (),
}
let dep_node = dep_graph::DepNode::construct(tcx, dep_graph::DepKind::$name, &key);
let dep_node_index = tcx.dep_graph.with_feed_task(
dep_node,
tcx,
key,
&value,
hash_result!([$($modifiers)*]).unwrap(),
);
cache.complete(key, value, dep_node_index)
})*
}
}
}
// Each of these queries corresponds to a function pointer field in the // Each of these queries corresponds to a function pointer field in the
// `Providers` struct for requesting a value of that type, and a method // `Providers` struct for requesting a value of that type, and a method
// on `tcx: TyCtxt` (and `tcx.at(span)`) for doing that request in a way // on `tcx: TyCtxt` (and `tcx.at(span)`) for doing that request in a way
@ -340,6 +407,7 @@ fn $name(
// as they will raise an fatal error on query cycles instead. // as they will raise an fatal error on query cycles instead.
rustc_query_append! { define_callbacks! } rustc_query_append! { define_callbacks! }
rustc_feedable_queries! { define_feedable! }
mod sealed { mod sealed {
use super::{DefId, LocalDefId, OwnerId}; use super::{DefId, LocalDefId, OwnerId};

View File

@ -489,6 +489,88 @@ pub fn read_index(&self, dep_node_index: DepNodeIndex) {
} }
} }
/// Create a node when we force-feed a value into the query cache.
/// This is used to remove cycles during type-checking const generic parameters.
///
/// As usual in the query system, we consider the current state of the calling query
/// only depends on the list of dependencies up to now. As a consequence, the value
/// that this query gives us can only depend on those dependencies too. Therefore,
/// it is sound to use the current dependency set for the created node.
///
/// During replay, the order of the nodes is relevant in the dependency graph.
/// So the unchanged replay will mark the caller query before trying to mark this one.
/// If there is a change to report, the caller query will be re-executed before this one.
///
/// FIXME: If the code is changed enough for this node to be marked before requiring the
/// caller's node, we suppose that those changes will be enough to mark this node red and
/// force a recomputation using the "normal" way.
pub fn with_feed_task<Ctxt: DepContext<DepKind = K>, A: Debug, R: Debug>(
&self,
node: DepNode<K>,
cx: Ctxt,
key: A,
result: &R,
hash_result: fn(&mut StableHashingContext<'_>, &R) -> Fingerprint,
) -> DepNodeIndex {
if let Some(data) = self.data.as_ref() {
if let Some(dep_node_index) = self.dep_node_index_of_opt(&node) {
#[cfg(debug_assertions)]
{
let hashing_timer = cx.profiler().incr_result_hashing();
let current_fingerprint =
cx.with_stable_hashing_context(|mut hcx| hash_result(&mut hcx, result));
hashing_timer.finish_with_query_invocation_id(dep_node_index.into());
data.current.record_edge(dep_node_index, node, current_fingerprint);
}
return dep_node_index;
}
let mut edges = SmallVec::new();
K::read_deps(|task_deps| match task_deps {
TaskDepsRef::Allow(deps) => edges.extend(deps.lock().reads.iter().copied()),
TaskDepsRef::Ignore | TaskDepsRef::Forbid => {
panic!("Cannot summarize when dependencies are not recorded.")
}
});
let hashing_timer = cx.profiler().incr_result_hashing();
let current_fingerprint =
cx.with_stable_hashing_context(|mut hcx| hash_result(&mut hcx, result));
let print_status = cfg!(debug_assertions) && cx.sess().opts.unstable_opts.dep_tasks;
// Intern the new `DepNode` with the dependencies up-to-now.
let (dep_node_index, prev_and_color) = data.current.intern_node(
cx.profiler(),
&data.previous,
node,
edges,
Some(current_fingerprint),
print_status,
);
hashing_timer.finish_with_query_invocation_id(dep_node_index.into());
if let Some((prev_index, color)) = prev_and_color {
debug_assert!(
data.colors.get(prev_index).is_none(),
"DepGraph::with_task() - Duplicate DepNodeColor insertion for {key:?}",
);
data.colors.insert(prev_index, color);
}
dep_node_index
} else {
// Incremental compilation is turned off. We just execute the task
// without tracking. We still provide a dep-node index that uniquely
// identifies the task so that we have a cheap way of referring to
// the query for self-profiling.
self.next_virtual_depnode_index()
}
}
#[inline] #[inline]
pub fn dep_node_index_of(&self, dep_node: &DepNode<K>) -> DepNodeIndex { pub fn dep_node_index_of(&self, dep_node: &DepNode<K>) -> DepNodeIndex {
self.dep_node_index_of_opt(dep_node).unwrap() self.dep_node_index_of_opt(dep_node).unwrap()