2018-12-02 18:14:35 -06:00
|
|
|
use proc_macro::TokenStream;
|
|
|
|
use syn::{
|
2019-03-18 02:19:23 -05:00
|
|
|
Token, Ident, Type, Attribute, ReturnType, Expr, Block, Error,
|
2018-12-02 18:14:35 -06:00
|
|
|
braced, parenthesized, parse_macro_input,
|
|
|
|
};
|
2019-03-18 02:19:23 -05:00
|
|
|
use syn::spanned::Spanned;
|
2018-12-02 18:14:35 -06:00
|
|
|
use syn::parse::{Result, Parse, ParseStream};
|
|
|
|
use syn::punctuated::Punctuated;
|
2019-03-18 02:19:23 -05:00
|
|
|
use syn;
|
2018-12-02 18:14:35 -06:00
|
|
|
use quote::quote;
|
2019-03-20 10:06:09 -05:00
|
|
|
use itertools::Itertools;
|
2018-12-02 18:14:35 -06:00
|
|
|
|
2019-03-18 02:19:23 -05:00
|
|
|
#[allow(non_camel_case_types)]
|
|
|
|
mod kw {
|
|
|
|
syn::custom_keyword!(query);
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Ident or a wildcard `_`.
|
2018-12-02 18:14:35 -06:00
|
|
|
struct IdentOrWild(Ident);
|
|
|
|
|
|
|
|
impl Parse for IdentOrWild {
|
|
|
|
fn parse(input: ParseStream<'_>) -> Result<Self> {
|
|
|
|
Ok(if input.peek(Token![_]) {
|
2019-03-18 08:17:26 -05:00
|
|
|
let underscore = input.parse::<Token![_]>()?;
|
|
|
|
IdentOrWild(Ident::new("_", underscore.span()))
|
2018-12-02 18:14:35 -06:00
|
|
|
} else {
|
|
|
|
IdentOrWild(input.parse()?)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-03-18 02:19:23 -05:00
|
|
|
/// A modifier for a query
|
|
|
|
enum QueryModifier {
|
2019-03-18 08:17:26 -05:00
|
|
|
/// The description of the query.
|
2018-12-02 18:14:35 -06:00
|
|
|
Desc(Option<Ident>, Punctuated<Expr, Token![,]>),
|
2019-03-18 02:19:23 -05:00
|
|
|
|
|
|
|
/// Cache the query to disk if the `Expr` returns true.
|
2018-12-02 18:14:35 -06:00
|
|
|
Cache(Option<Ident>, Expr),
|
2019-03-18 02:19:23 -05:00
|
|
|
|
|
|
|
/// Custom code to load the query from disk.
|
2019-03-17 01:00:46 -05:00
|
|
|
LoadCached(Ident, Ident, Block),
|
2019-03-18 02:19:23 -05:00
|
|
|
|
|
|
|
/// A cycle error for this query aborting the compilation with a fatal error.
|
2018-12-02 18:14:35 -06:00
|
|
|
FatalCycle,
|
2019-03-20 10:06:09 -05:00
|
|
|
|
|
|
|
/// Don't hash the result, instead just mark a query red if it runs
|
|
|
|
NoHash,
|
2019-03-20 10:53:55 -05:00
|
|
|
|
|
|
|
/// Don't force the query
|
|
|
|
NoForce,
|
2019-03-20 11:13:44 -05:00
|
|
|
|
|
|
|
/// Generate a dep node based on the dependencies of the query
|
|
|
|
Anon,
|
2019-03-20 11:22:16 -05:00
|
|
|
|
|
|
|
// Always evaluate the query, ignoring its depdendencies
|
|
|
|
EvalAlways,
|
2018-12-02 18:14:35 -06:00
|
|
|
}
|
|
|
|
|
2019-03-18 02:19:23 -05:00
|
|
|
impl Parse for QueryModifier {
|
2018-12-02 18:14:35 -06:00
|
|
|
fn parse(input: ParseStream<'_>) -> Result<Self> {
|
2019-03-18 02:19:23 -05:00
|
|
|
let modifier: Ident = input.parse()?;
|
|
|
|
if modifier == "desc" {
|
|
|
|
// Parse a description modifier like:
|
|
|
|
// `desc { |tcx| "foo {}", tcx.item_path(key) }`
|
2018-12-02 18:14:35 -06:00
|
|
|
let attr_content;
|
|
|
|
braced!(attr_content in input);
|
|
|
|
let tcx = if attr_content.peek(Token![|]) {
|
|
|
|
attr_content.parse::<Token![|]>()?;
|
|
|
|
let tcx = attr_content.parse()?;
|
|
|
|
attr_content.parse::<Token![|]>()?;
|
|
|
|
Some(tcx)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
};
|
|
|
|
let desc = attr_content.parse_terminated(Expr::parse)?;
|
2019-03-18 02:19:23 -05:00
|
|
|
Ok(QueryModifier::Desc(tcx, desc))
|
|
|
|
} else if modifier == "cache" {
|
|
|
|
// Parse a cache modifier like:
|
|
|
|
// `cache { |tcx| key.is_local() }`
|
2018-12-02 18:14:35 -06:00
|
|
|
let attr_content;
|
|
|
|
braced!(attr_content in input);
|
|
|
|
let tcx = if attr_content.peek(Token![|]) {
|
|
|
|
attr_content.parse::<Token![|]>()?;
|
|
|
|
let tcx = attr_content.parse()?;
|
|
|
|
attr_content.parse::<Token![|]>()?;
|
|
|
|
Some(tcx)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
};
|
|
|
|
let expr = attr_content.parse()?;
|
2019-03-18 02:19:23 -05:00
|
|
|
Ok(QueryModifier::Cache(tcx, expr))
|
|
|
|
} else if modifier == "load_cached" {
|
|
|
|
// Parse a load_cached modifier like:
|
|
|
|
// `load_cached(tcx, id) { tcx.queries.on_disk_cache.try_load_query_result(tcx, id) }`
|
2019-03-17 01:00:46 -05:00
|
|
|
let args;
|
|
|
|
parenthesized!(args in input);
|
|
|
|
let tcx = args.parse()?;
|
|
|
|
args.parse::<Token![,]>()?;
|
|
|
|
let id = args.parse()?;
|
|
|
|
let block = input.parse()?;
|
2019-03-18 02:19:23 -05:00
|
|
|
Ok(QueryModifier::LoadCached(tcx, id, block))
|
|
|
|
} else if modifier == "fatal_cycle" {
|
|
|
|
Ok(QueryModifier::FatalCycle)
|
2019-03-20 10:06:09 -05:00
|
|
|
} else if modifier == "no_hash" {
|
|
|
|
Ok(QueryModifier::NoHash)
|
2019-03-20 10:53:55 -05:00
|
|
|
} else if modifier == "no_force" {
|
|
|
|
Ok(QueryModifier::NoForce)
|
2019-03-20 11:13:44 -05:00
|
|
|
} else if modifier == "anon" {
|
|
|
|
Ok(QueryModifier::Anon)
|
2019-03-20 11:22:16 -05:00
|
|
|
} else if modifier == "eval_always" {
|
|
|
|
Ok(QueryModifier::EvalAlways)
|
2018-12-02 18:14:35 -06:00
|
|
|
} else {
|
2019-03-18 02:19:23 -05:00
|
|
|
Err(Error::new(modifier.span(), "unknown query modifier"))
|
2018-12-02 18:14:35 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-03-18 02:19:23 -05:00
|
|
|
/// Ensures only doc comment attributes are used
|
|
|
|
fn check_attributes(attrs: Vec<Attribute>) -> Result<()> {
|
|
|
|
for attr in attrs {
|
|
|
|
if !attr.path.is_ident("doc") {
|
|
|
|
return Err(Error::new(attr.span(), "attributes not supported on queries"));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
/// A compiler query. `query ... { ... }`
|
2018-12-02 18:14:35 -06:00
|
|
|
struct Query {
|
2019-03-18 08:17:26 -05:00
|
|
|
modifiers: List<QueryModifier>,
|
2018-12-02 18:14:35 -06:00
|
|
|
name: Ident,
|
|
|
|
key: IdentOrWild,
|
|
|
|
arg: Type,
|
|
|
|
result: ReturnType,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Parse for Query {
|
|
|
|
fn parse(input: ParseStream<'_>) -> Result<Self> {
|
2019-03-18 02:19:23 -05:00
|
|
|
check_attributes(input.call(Attribute::parse_outer)?)?;
|
2018-12-02 18:14:35 -06:00
|
|
|
|
2019-03-18 02:19:23 -05:00
|
|
|
// Parse the query declaration. Like `query type_of(key: DefId) -> Ty<'tcx>`
|
|
|
|
input.parse::<kw::query>()?;
|
2018-12-02 18:14:35 -06:00
|
|
|
let name: Ident = input.parse()?;
|
|
|
|
let arg_content;
|
|
|
|
parenthesized!(arg_content in input);
|
|
|
|
let key = arg_content.parse()?;
|
|
|
|
arg_content.parse::<Token![:]>()?;
|
|
|
|
let arg = arg_content.parse()?;
|
|
|
|
let result = input.parse()?;
|
|
|
|
|
2019-03-18 02:19:23 -05:00
|
|
|
// Parse the query modifiers
|
2018-12-02 18:14:35 -06:00
|
|
|
let content;
|
|
|
|
braced!(content in input);
|
2019-03-18 08:17:26 -05:00
|
|
|
let modifiers = content.parse()?;
|
2018-12-02 18:14:35 -06:00
|
|
|
|
|
|
|
Ok(Query {
|
2019-03-18 08:17:26 -05:00
|
|
|
modifiers,
|
2018-12-02 18:14:35 -06:00
|
|
|
name,
|
|
|
|
key,
|
|
|
|
arg,
|
|
|
|
result,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-03-18 02:19:23 -05:00
|
|
|
/// A type used to greedily parse another type until the input is empty.
|
2018-12-02 18:14:35 -06:00
|
|
|
struct List<T>(Vec<T>);
|
|
|
|
|
|
|
|
impl<T: Parse> Parse for List<T> {
|
|
|
|
fn parse(input: ParseStream<'_>) -> Result<Self> {
|
|
|
|
let mut list = Vec::new();
|
|
|
|
while !input.is_empty() {
|
|
|
|
list.push(input.parse()?);
|
|
|
|
}
|
|
|
|
Ok(List(list))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-03-18 02:19:23 -05:00
|
|
|
/// A named group containing queries.
|
2018-12-02 18:14:35 -06:00
|
|
|
struct Group {
|
|
|
|
name: Ident,
|
|
|
|
queries: List<Query>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Parse for Group {
|
|
|
|
fn parse(input: ParseStream<'_>) -> Result<Self> {
|
|
|
|
let name: Ident = input.parse()?;
|
|
|
|
let content;
|
|
|
|
braced!(content in input);
|
|
|
|
Ok(Group {
|
|
|
|
name,
|
|
|
|
queries: content.parse()?,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-03-18 08:17:26 -05:00
|
|
|
struct QueryModifiers {
|
|
|
|
/// The description of the query.
|
|
|
|
desc: Option<(Option<Ident>, Punctuated<Expr, Token![,]>)>,
|
|
|
|
|
|
|
|
/// Cache the query to disk if the `Expr` returns true.
|
|
|
|
cache: Option<(Option<Ident>, Expr)>,
|
|
|
|
|
|
|
|
/// Custom code to load the query from disk.
|
|
|
|
load_cached: Option<(Ident, Ident, Block)>,
|
|
|
|
|
|
|
|
/// A cycle error for this query aborting the compilation with a fatal error.
|
|
|
|
fatal_cycle: bool,
|
2019-03-20 10:06:09 -05:00
|
|
|
|
|
|
|
/// Don't hash the result, instead just mark a query red if it runs
|
|
|
|
no_hash: bool,
|
2019-03-20 10:53:55 -05:00
|
|
|
|
|
|
|
/// Don't force the query
|
|
|
|
no_force: bool,
|
2019-03-20 11:13:44 -05:00
|
|
|
|
|
|
|
/// Generate a dep node based on the dependencies of the query
|
|
|
|
anon: bool,
|
2019-03-20 12:00:08 -05:00
|
|
|
|
2019-03-20 11:22:16 -05:00
|
|
|
// Always evaluate the query, ignoring its depdendencies
|
|
|
|
eval_always: bool,
|
2019-03-18 08:17:26 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Process query modifiers into a struct, erroring on duplicates
|
|
|
|
fn process_modifiers(query: &mut Query) -> QueryModifiers {
|
|
|
|
let mut load_cached = None;
|
|
|
|
let mut cache = None;
|
|
|
|
let mut desc = None;
|
|
|
|
let mut fatal_cycle = false;
|
2019-03-20 10:06:09 -05:00
|
|
|
let mut no_hash = false;
|
2019-03-20 10:53:55 -05:00
|
|
|
let mut no_force = false;
|
2019-03-20 11:13:44 -05:00
|
|
|
let mut anon = false;
|
2019-03-20 11:22:16 -05:00
|
|
|
let mut eval_always = false;
|
2019-03-18 08:17:26 -05:00
|
|
|
for modifier in query.modifiers.0.drain(..) {
|
|
|
|
match modifier {
|
|
|
|
QueryModifier::LoadCached(tcx, id, block) => {
|
|
|
|
if load_cached.is_some() {
|
|
|
|
panic!("duplicate modifier `load_cached` for query `{}`", query.name);
|
|
|
|
}
|
|
|
|
load_cached = Some((tcx, id, block));
|
|
|
|
}
|
|
|
|
QueryModifier::Cache(tcx, expr) => {
|
|
|
|
if cache.is_some() {
|
|
|
|
panic!("duplicate modifier `cache` for query `{}`", query.name);
|
|
|
|
}
|
|
|
|
cache = Some((tcx, expr));
|
|
|
|
}
|
|
|
|
QueryModifier::Desc(tcx, list) => {
|
|
|
|
if desc.is_some() {
|
|
|
|
panic!("duplicate modifier `desc` for query `{}`", query.name);
|
|
|
|
}
|
|
|
|
desc = Some((tcx, list));
|
|
|
|
}
|
|
|
|
QueryModifier::FatalCycle => {
|
|
|
|
if fatal_cycle {
|
|
|
|
panic!("duplicate modifier `fatal_cycle` for query `{}`", query.name);
|
|
|
|
}
|
|
|
|
fatal_cycle = true;
|
|
|
|
}
|
2019-03-20 10:06:09 -05:00
|
|
|
QueryModifier::NoHash => {
|
|
|
|
if no_hash {
|
|
|
|
panic!("duplicate modifier `no_hash` for query `{}`", query.name);
|
|
|
|
}
|
|
|
|
no_hash = true;
|
|
|
|
}
|
2019-03-20 10:53:55 -05:00
|
|
|
QueryModifier::NoForce => {
|
|
|
|
if no_force {
|
|
|
|
panic!("duplicate modifier `no_force` for query `{}`", query.name);
|
|
|
|
}
|
|
|
|
no_force = true;
|
|
|
|
}
|
2019-03-20 11:13:44 -05:00
|
|
|
QueryModifier::Anon => {
|
|
|
|
if anon {
|
|
|
|
panic!("duplicate modifier `anon` for query `{}`", query.name);
|
|
|
|
}
|
|
|
|
anon = true;
|
|
|
|
}
|
2019-03-20 11:22:16 -05:00
|
|
|
QueryModifier::EvalAlways => {
|
|
|
|
if eval_always {
|
|
|
|
panic!("duplicate modifier `eval_always` for query `{}`", query.name);
|
|
|
|
}
|
|
|
|
eval_always = true;
|
|
|
|
}
|
2019-03-18 08:17:26 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
QueryModifiers {
|
|
|
|
load_cached,
|
|
|
|
cache,
|
|
|
|
desc,
|
|
|
|
fatal_cycle,
|
2019-03-20 10:06:09 -05:00
|
|
|
no_hash,
|
2019-03-20 10:53:55 -05:00
|
|
|
no_force,
|
2019-03-20 11:13:44 -05:00
|
|
|
anon,
|
2019-03-20 11:22:16 -05:00
|
|
|
eval_always,
|
2019-03-18 08:17:26 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-03-18 02:19:23 -05:00
|
|
|
/// Add the impl of QueryDescription for the query to `impls` if one is requested
|
2019-03-18 08:17:26 -05:00
|
|
|
fn add_query_description_impl(
|
|
|
|
query: &Query,
|
|
|
|
modifiers: QueryModifiers,
|
|
|
|
impls: &mut proc_macro2::TokenStream
|
|
|
|
) {
|
2019-03-18 02:19:23 -05:00
|
|
|
let name = &query.name;
|
|
|
|
let arg = &query.arg;
|
|
|
|
let key = &query.key.0;
|
|
|
|
|
|
|
|
// Find out if we should cache the query on disk
|
2019-03-18 08:17:26 -05:00
|
|
|
let cache = modifiers.cache.as_ref().map(|(tcx, expr)| {
|
|
|
|
let try_load_from_disk = if let Some((tcx, id, block)) = modifiers.load_cached.as_ref() {
|
|
|
|
// Use custom code to load the query from disk
|
2019-03-18 02:19:23 -05:00
|
|
|
quote! {
|
|
|
|
#[inline]
|
|
|
|
fn try_load_from_disk(
|
|
|
|
#tcx: TyCtxt<'_, 'tcx, 'tcx>,
|
|
|
|
#id: SerializedDepNodeIndex
|
|
|
|
) -> Option<Self::Value> {
|
|
|
|
#block
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
2019-03-18 08:17:26 -05:00
|
|
|
// Use the default code to load the query from disk
|
2019-03-18 02:19:23 -05:00
|
|
|
quote! {
|
|
|
|
#[inline]
|
|
|
|
fn try_load_from_disk(
|
|
|
|
tcx: TyCtxt<'_, 'tcx, 'tcx>,
|
|
|
|
id: SerializedDepNodeIndex
|
|
|
|
) -> Option<Self::Value> {
|
|
|
|
tcx.queries.on_disk_cache.try_load_query_result(tcx, id)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
let tcx = tcx.as_ref().map(|t| quote! { #t }).unwrap_or(quote! { _ });
|
|
|
|
quote! {
|
|
|
|
#[inline]
|
|
|
|
fn cache_on_disk(#tcx: TyCtxt<'_, 'tcx, 'tcx>, #key: Self::Key) -> bool {
|
|
|
|
#expr
|
|
|
|
}
|
|
|
|
|
|
|
|
#try_load_from_disk
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
2019-03-18 08:17:26 -05:00
|
|
|
if cache.is_none() && modifiers.load_cached.is_some() {
|
2019-03-18 02:19:23 -05:00
|
|
|
panic!("load_cached modifier on query `{}` without a cache modifier", name);
|
|
|
|
}
|
|
|
|
|
2019-03-18 08:17:26 -05:00
|
|
|
let desc = modifiers.desc.as_ref().map(|(tcx, desc)| {
|
2019-03-18 02:19:23 -05:00
|
|
|
let tcx = tcx.as_ref().map(|t| quote! { #t }).unwrap_or(quote! { _ });
|
|
|
|
quote! {
|
|
|
|
fn describe(
|
|
|
|
#tcx: TyCtxt<'_, '_, '_>,
|
|
|
|
#key: #arg,
|
|
|
|
) -> Cow<'static, str> {
|
|
|
|
format!(#desc).into()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
if desc.is_some() || cache.is_some() {
|
|
|
|
let cache = cache.unwrap_or(quote! {});
|
|
|
|
let desc = desc.unwrap_or(quote! {});
|
|
|
|
|
|
|
|
impls.extend(quote! {
|
|
|
|
impl<'tcx> QueryDescription<'tcx> for queries::#name<'tcx> {
|
|
|
|
#desc
|
|
|
|
#cache
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-12-02 18:14:35 -06:00
|
|
|
pub fn rustc_queries(input: TokenStream) -> TokenStream {
|
|
|
|
let groups = parse_macro_input!(input as List<Group>);
|
|
|
|
|
|
|
|
let mut query_stream = quote! {};
|
|
|
|
let mut query_description_stream = quote! {};
|
|
|
|
let mut dep_node_def_stream = quote! {};
|
|
|
|
let mut dep_node_force_stream = quote! {};
|
2019-03-20 10:53:55 -05:00
|
|
|
let mut no_force_queries = Vec::new();
|
2018-12-02 18:14:35 -06:00
|
|
|
|
|
|
|
for group in groups.0 {
|
|
|
|
let mut group_stream = quote! {};
|
2019-03-18 08:17:26 -05:00
|
|
|
for mut query in group.queries.0 {
|
|
|
|
let modifiers = process_modifiers(&mut query);
|
2018-12-02 18:14:35 -06:00
|
|
|
let name = &query.name;
|
|
|
|
let arg = &query.arg;
|
|
|
|
let result_full = &query.result;
|
|
|
|
let result = match query.result {
|
|
|
|
ReturnType::Default => quote! { -> () },
|
|
|
|
_ => quote! { #result_full },
|
|
|
|
};
|
|
|
|
|
2019-03-20 10:06:09 -05:00
|
|
|
let mut attributes = Vec::new();
|
|
|
|
|
2019-03-18 08:17:26 -05:00
|
|
|
// Pass on the fatal_cycle modifier
|
2019-03-20 10:06:09 -05:00
|
|
|
if modifiers.fatal_cycle {
|
|
|
|
attributes.push(quote! { fatal_cycle });
|
|
|
|
};
|
|
|
|
// Pass on the no_hash modifier
|
|
|
|
if modifiers.no_hash {
|
|
|
|
attributes.push(quote! { no_hash });
|
2019-03-18 08:17:26 -05:00
|
|
|
};
|
2018-12-02 18:14:35 -06:00
|
|
|
|
2019-03-20 10:06:09 -05:00
|
|
|
let mut attribute_stream = quote! {};
|
2019-03-20 12:00:08 -05:00
|
|
|
|
2019-03-20 10:06:09 -05:00
|
|
|
for e in attributes.into_iter().intersperse(quote! {,}) {
|
|
|
|
attribute_stream.extend(e);
|
|
|
|
}
|
|
|
|
|
2019-03-18 02:19:23 -05:00
|
|
|
// Add the query to the group
|
2018-12-02 18:14:35 -06:00
|
|
|
group_stream.extend(quote! {
|
2019-03-20 10:06:09 -05:00
|
|
|
[#attribute_stream] fn #name: #name(#arg) #result,
|
2018-12-02 18:14:35 -06:00
|
|
|
});
|
|
|
|
|
2019-03-20 11:13:44 -05:00
|
|
|
let mut attributes = Vec::new();
|
|
|
|
|
|
|
|
// Pass on the anon modifier
|
|
|
|
if modifiers.anon {
|
|
|
|
attributes.push(quote! { anon });
|
|
|
|
};
|
2019-03-20 11:22:16 -05:00
|
|
|
// Pass on the eval_always modifier
|
|
|
|
if modifiers.eval_always {
|
|
|
|
attributes.push(quote! { eval_always });
|
|
|
|
};
|
2019-03-20 11:13:44 -05:00
|
|
|
|
|
|
|
let mut attribute_stream = quote! {};
|
|
|
|
for e in attributes.into_iter().intersperse(quote! {,}) {
|
|
|
|
attribute_stream.extend(e);
|
|
|
|
}
|
2019-03-18 02:19:23 -05:00
|
|
|
// Create a dep node for the query
|
2018-12-02 18:14:35 -06:00
|
|
|
dep_node_def_stream.extend(quote! {
|
2019-03-20 11:13:44 -05:00
|
|
|
[#attribute_stream] #name(#arg),
|
2018-12-02 18:14:35 -06:00
|
|
|
});
|
2019-03-18 02:19:23 -05:00
|
|
|
|
2019-03-20 10:53:55 -05:00
|
|
|
if modifiers.no_force {
|
|
|
|
no_force_queries.push(name.clone());
|
|
|
|
} else {
|
|
|
|
// Add a match arm to force the query given the dep node
|
|
|
|
dep_node_force_stream.extend(quote! {
|
|
|
|
DepKind::#name => {
|
|
|
|
if let Some(key) = RecoverKey::recover($tcx, $dep_node) {
|
|
|
|
force_ex!($tcx, #name, key);
|
|
|
|
} else {
|
|
|
|
return false;
|
|
|
|
}
|
2018-12-02 18:14:35 -06:00
|
|
|
}
|
2019-03-20 10:53:55 -05:00
|
|
|
});
|
|
|
|
}
|
2019-03-20 12:00:08 -05:00
|
|
|
|
2019-03-20 10:53:55 -05:00
|
|
|
add_query_description_impl(&query, modifiers, &mut query_description_stream);
|
2018-12-02 18:14:35 -06:00
|
|
|
}
|
|
|
|
let name = &group.name;
|
|
|
|
query_stream.extend(quote! {
|
|
|
|
#name { #group_stream },
|
|
|
|
});
|
|
|
|
}
|
2019-03-20 10:53:55 -05:00
|
|
|
|
|
|
|
// Add an arm for the no force queries to panic when trying to force them
|
|
|
|
for query in no_force_queries {
|
|
|
|
dep_node_force_stream.extend(quote! {
|
|
|
|
DepKind::#query |
|
|
|
|
});
|
|
|
|
}
|
|
|
|
dep_node_force_stream.extend(quote! {
|
|
|
|
DepKind::Null => {
|
|
|
|
bug!("Cannot force dep node: {:?}", $dep_node)
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
2018-12-02 18:14:35 -06:00
|
|
|
TokenStream::from(quote! {
|
|
|
|
macro_rules! rustc_query_append {
|
|
|
|
([$($macro:tt)*][$($other:tt)*]) => {
|
|
|
|
$($macro)* {
|
|
|
|
$($other)*
|
|
|
|
|
|
|
|
#query_stream
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
macro_rules! rustc_dep_node_append {
|
|
|
|
([$($macro:tt)*][$($other:tt)*]) => {
|
|
|
|
$($macro)*(
|
|
|
|
$($other)*
|
|
|
|
|
|
|
|
#dep_node_def_stream
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
macro_rules! rustc_dep_node_force {
|
|
|
|
([$dep_node:expr, $tcx:expr] $($other:tt)*) => {
|
|
|
|
match $dep_node.kind {
|
|
|
|
$($other)*
|
|
|
|
|
|
|
|
#dep_node_force_stream
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
#query_description_stream
|
|
|
|
})
|
|
|
|
}
|