2018-12-02 18:14:35 -06:00
|
|
|
use proc_macro::TokenStream;
|
2019-12-22 16:42:04 -06:00
|
|
|
use proc_macro2::{Delimiter, TokenTree};
|
|
|
|
use quote::quote;
|
|
|
|
use syn::parse::{Parse, ParseStream, Result};
|
|
|
|
use syn::punctuated::Punctuated;
|
|
|
|
use syn::spanned::Spanned;
|
2018-12-02 18:14:35 -06:00
|
|
|
use syn::{
|
2020-12-23 10:45:30 -06:00
|
|
|
braced, parenthesized, parse_macro_input, parse_quote, AttrStyle, Attribute, Block, Error,
|
|
|
|
Expr, Ident, ReturnType, Token, Type,
|
2018-12-02 18:14:35 -06:00
|
|
|
};
|
|
|
|
|
2019-03-18 02:19:23 -05:00
|
|
|
mod kw {
|
|
|
|
syn::custom_keyword!(query);
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Ident or a wildcard `_`.
|
2018-12-02 18:14:35 -06:00
|
|
|
struct IdentOrWild(Ident);
|
|
|
|
|
|
|
|
impl Parse for IdentOrWild {
|
|
|
|
fn parse(input: ParseStream<'_>) -> Result<Self> {
|
|
|
|
Ok(if input.peek(Token![_]) {
|
2019-03-18 08:17:26 -05:00
|
|
|
let underscore = input.parse::<Token![_]>()?;
|
|
|
|
IdentOrWild(Ident::new("_", underscore.span()))
|
2018-12-02 18:14:35 -06:00
|
|
|
} else {
|
|
|
|
IdentOrWild(input.parse()?)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-03-18 02:19:23 -05:00
|
|
|
/// A modifier for a query
|
|
|
|
enum QueryModifier {
|
2019-03-18 08:17:26 -05:00
|
|
|
/// The description of the query.
|
2018-12-02 18:14:35 -06:00
|
|
|
Desc(Option<Ident>, Punctuated<Expr, Token![,]>),
|
2019-03-18 02:19:23 -05:00
|
|
|
|
2020-02-14 11:29:20 -06:00
|
|
|
/// Use this type for the in-memory cache.
|
|
|
|
Storage(Type),
|
|
|
|
|
2019-03-18 02:19:23 -05:00
|
|
|
/// Cache the query to disk if the `Expr` returns true.
|
2019-04-05 06:11:44 -05:00
|
|
|
Cache(Option<(IdentOrWild, IdentOrWild)>, Block),
|
2019-03-18 02:19:23 -05:00
|
|
|
|
|
|
|
/// Custom code to load the query from disk.
|
2019-03-17 01:00:46 -05:00
|
|
|
LoadCached(Ident, Ident, Block),
|
2019-03-18 02:19:23 -05:00
|
|
|
|
|
|
|
/// A cycle error for this query aborting the compilation with a fatal error.
|
2018-12-02 18:14:35 -06:00
|
|
|
FatalCycle,
|
2019-03-20 10:06:09 -05:00
|
|
|
|
2019-03-28 21:05:19 -05:00
|
|
|
/// A cycle error results in a delay_bug call
|
|
|
|
CycleDelayBug,
|
|
|
|
|
2019-03-20 10:06:09 -05:00
|
|
|
/// Don't hash the result, instead just mark a query red if it runs
|
|
|
|
NoHash,
|
2019-03-20 10:53:55 -05:00
|
|
|
|
2019-03-20 11:13:44 -05:00
|
|
|
/// Generate a dep node based on the dependencies of the query
|
|
|
|
Anon,
|
2019-03-20 11:22:16 -05:00
|
|
|
|
2020-03-06 05:13:55 -06:00
|
|
|
/// Always evaluate the query, ignoring its dependencies
|
2019-03-20 11:22:16 -05:00
|
|
|
EvalAlways,
|
2018-12-02 18:14:35 -06:00
|
|
|
}
|
|
|
|
|
2019-03-18 02:19:23 -05:00
|
|
|
impl Parse for QueryModifier {
|
2018-12-02 18:14:35 -06:00
|
|
|
fn parse(input: ParseStream<'_>) -> Result<Self> {
|
2019-03-18 02:19:23 -05:00
|
|
|
let modifier: Ident = input.parse()?;
|
|
|
|
if modifier == "desc" {
|
|
|
|
// Parse a description modifier like:
|
|
|
|
// `desc { |tcx| "foo {}", tcx.item_path(key) }`
|
2018-12-02 18:14:35 -06:00
|
|
|
let attr_content;
|
|
|
|
braced!(attr_content in input);
|
|
|
|
let tcx = if attr_content.peek(Token![|]) {
|
|
|
|
attr_content.parse::<Token![|]>()?;
|
|
|
|
let tcx = attr_content.parse()?;
|
|
|
|
attr_content.parse::<Token![|]>()?;
|
|
|
|
Some(tcx)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
};
|
|
|
|
let desc = attr_content.parse_terminated(Expr::parse)?;
|
2019-03-18 02:19:23 -05:00
|
|
|
Ok(QueryModifier::Desc(tcx, desc))
|
2019-04-05 06:11:44 -05:00
|
|
|
} else if modifier == "cache_on_disk_if" {
|
2019-03-18 02:19:23 -05:00
|
|
|
// Parse a cache modifier like:
|
2019-04-05 06:11:44 -05:00
|
|
|
// `cache(tcx, value) { |tcx| key.is_local() }`
|
|
|
|
let has_args = if let TokenTree::Group(group) = input.fork().parse()? {
|
|
|
|
group.delimiter() == Delimiter::Parenthesis
|
|
|
|
} else {
|
|
|
|
false
|
|
|
|
};
|
|
|
|
let args = if has_args {
|
|
|
|
let args;
|
|
|
|
parenthesized!(args in input);
|
|
|
|
let tcx = args.parse()?;
|
|
|
|
args.parse::<Token![,]>()?;
|
|
|
|
let value = args.parse()?;
|
|
|
|
Some((tcx, value))
|
2018-12-02 18:14:35 -06:00
|
|
|
} else {
|
|
|
|
None
|
|
|
|
};
|
2019-04-05 06:11:44 -05:00
|
|
|
let block = input.parse()?;
|
|
|
|
Ok(QueryModifier::Cache(args, block))
|
2019-03-18 02:19:23 -05:00
|
|
|
} else if modifier == "load_cached" {
|
|
|
|
// Parse a load_cached modifier like:
|
2021-01-04 16:38:20 -06:00
|
|
|
// `load_cached(tcx, id) { tcx.on_disk_cache.try_load_query_result(tcx, id) }`
|
2019-03-17 01:00:46 -05:00
|
|
|
let args;
|
|
|
|
parenthesized!(args in input);
|
|
|
|
let tcx = args.parse()?;
|
|
|
|
args.parse::<Token![,]>()?;
|
|
|
|
let id = args.parse()?;
|
|
|
|
let block = input.parse()?;
|
2019-03-18 02:19:23 -05:00
|
|
|
Ok(QueryModifier::LoadCached(tcx, id, block))
|
2020-02-14 11:29:20 -06:00
|
|
|
} else if modifier == "storage" {
|
2020-04-02 03:13:38 -05:00
|
|
|
let args;
|
|
|
|
parenthesized!(args in input);
|
|
|
|
let ty = args.parse()?;
|
2020-02-14 11:29:20 -06:00
|
|
|
Ok(QueryModifier::Storage(ty))
|
2019-03-18 02:19:23 -05:00
|
|
|
} else if modifier == "fatal_cycle" {
|
|
|
|
Ok(QueryModifier::FatalCycle)
|
2019-03-28 21:05:19 -05:00
|
|
|
} else if modifier == "cycle_delay_bug" {
|
|
|
|
Ok(QueryModifier::CycleDelayBug)
|
2019-03-20 10:06:09 -05:00
|
|
|
} else if modifier == "no_hash" {
|
|
|
|
Ok(QueryModifier::NoHash)
|
2019-03-20 11:13:44 -05:00
|
|
|
} else if modifier == "anon" {
|
|
|
|
Ok(QueryModifier::Anon)
|
2019-03-20 11:22:16 -05:00
|
|
|
} else if modifier == "eval_always" {
|
|
|
|
Ok(QueryModifier::EvalAlways)
|
2018-12-02 18:14:35 -06:00
|
|
|
} else {
|
2019-03-18 02:19:23 -05:00
|
|
|
Err(Error::new(modifier.span(), "unknown query modifier"))
|
2018-12-02 18:14:35 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-03-18 02:19:23 -05:00
|
|
|
/// Ensures only doc comment attributes are used
|
2020-09-16 23:03:00 -05:00
|
|
|
fn check_attributes(attrs: Vec<Attribute>) -> Result<Vec<Attribute>> {
|
|
|
|
let inner = |attr: Attribute| {
|
2019-03-18 02:19:23 -05:00
|
|
|
if !attr.path.is_ident("doc") {
|
2020-09-16 23:03:00 -05:00
|
|
|
Err(Error::new(attr.span(), "attributes not supported on queries"))
|
|
|
|
} else if attr.style != AttrStyle::Outer {
|
|
|
|
Err(Error::new(
|
|
|
|
attr.span(),
|
|
|
|
"attributes must be outer attributes (`///`), not inner attributes",
|
|
|
|
))
|
|
|
|
} else {
|
|
|
|
Ok(attr)
|
2019-03-18 02:19:23 -05:00
|
|
|
}
|
2020-09-16 23:03:00 -05:00
|
|
|
};
|
|
|
|
attrs.into_iter().map(inner).collect()
|
2019-03-18 02:19:23 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
/// A compiler query. `query ... { ... }`
|
2018-12-02 18:14:35 -06:00
|
|
|
struct Query {
|
2020-09-16 23:03:00 -05:00
|
|
|
doc_comments: Vec<Attribute>,
|
2019-03-18 08:17:26 -05:00
|
|
|
modifiers: List<QueryModifier>,
|
2018-12-02 18:14:35 -06:00
|
|
|
name: Ident,
|
|
|
|
key: IdentOrWild,
|
|
|
|
arg: Type,
|
|
|
|
result: ReturnType,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Parse for Query {
|
|
|
|
fn parse(input: ParseStream<'_>) -> Result<Self> {
|
2020-09-16 23:03:00 -05:00
|
|
|
let doc_comments = check_attributes(input.call(Attribute::parse_outer)?)?;
|
2018-12-02 18:14:35 -06:00
|
|
|
|
2019-03-18 02:19:23 -05:00
|
|
|
// Parse the query declaration. Like `query type_of(key: DefId) -> Ty<'tcx>`
|
|
|
|
input.parse::<kw::query>()?;
|
2018-12-02 18:14:35 -06:00
|
|
|
let name: Ident = input.parse()?;
|
|
|
|
let arg_content;
|
|
|
|
parenthesized!(arg_content in input);
|
|
|
|
let key = arg_content.parse()?;
|
|
|
|
arg_content.parse::<Token![:]>()?;
|
|
|
|
let arg = arg_content.parse()?;
|
|
|
|
let result = input.parse()?;
|
|
|
|
|
2019-03-18 02:19:23 -05:00
|
|
|
// Parse the query modifiers
|
2018-12-02 18:14:35 -06:00
|
|
|
let content;
|
|
|
|
braced!(content in input);
|
2019-03-18 08:17:26 -05:00
|
|
|
let modifiers = content.parse()?;
|
2018-12-02 18:14:35 -06:00
|
|
|
|
2020-09-16 23:03:00 -05:00
|
|
|
Ok(Query { doc_comments, modifiers, name, key, arg, result })
|
2018-12-02 18:14:35 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-03-18 02:19:23 -05:00
|
|
|
/// A type used to greedily parse another type until the input is empty.
|
2018-12-02 18:14:35 -06:00
|
|
|
struct List<T>(Vec<T>);
|
|
|
|
|
|
|
|
impl<T: Parse> Parse for List<T> {
|
|
|
|
fn parse(input: ParseStream<'_>) -> Result<Self> {
|
|
|
|
let mut list = Vec::new();
|
|
|
|
while !input.is_empty() {
|
|
|
|
list.push(input.parse()?);
|
|
|
|
}
|
|
|
|
Ok(List(list))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-03-18 08:17:26 -05:00
|
|
|
struct QueryModifiers {
|
|
|
|
/// The description of the query.
|
2020-05-31 10:11:51 -05:00
|
|
|
desc: (Option<Ident>, Punctuated<Expr, Token![,]>),
|
2019-03-18 08:17:26 -05:00
|
|
|
|
2020-02-14 11:29:20 -06:00
|
|
|
/// Use this type for the in-memory cache.
|
|
|
|
storage: Option<Type>,
|
|
|
|
|
2019-04-05 06:11:44 -05:00
|
|
|
/// Cache the query to disk if the `Block` returns true.
|
|
|
|
cache: Option<(Option<(IdentOrWild, IdentOrWild)>, Block)>,
|
2019-03-18 08:17:26 -05:00
|
|
|
|
|
|
|
/// Custom code to load the query from disk.
|
|
|
|
load_cached: Option<(Ident, Ident, Block)>,
|
|
|
|
|
|
|
|
/// A cycle error for this query aborting the compilation with a fatal error.
|
|
|
|
fatal_cycle: bool,
|
2019-03-20 10:06:09 -05:00
|
|
|
|
2019-03-28 21:05:19 -05:00
|
|
|
/// A cycle error results in a delay_bug call
|
|
|
|
cycle_delay_bug: bool,
|
|
|
|
|
2019-03-20 10:06:09 -05:00
|
|
|
/// Don't hash the result, instead just mark a query red if it runs
|
|
|
|
no_hash: bool,
|
2019-03-20 10:53:55 -05:00
|
|
|
|
2019-03-20 11:13:44 -05:00
|
|
|
/// Generate a dep node based on the dependencies of the query
|
|
|
|
anon: bool,
|
2019-03-20 12:00:08 -05:00
|
|
|
|
2020-03-06 05:13:55 -06:00
|
|
|
// Always evaluate the query, ignoring its dependencies
|
2019-03-20 11:22:16 -05:00
|
|
|
eval_always: bool,
|
2019-03-18 08:17:26 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Process query modifiers into a struct, erroring on duplicates
|
|
|
|
fn process_modifiers(query: &mut Query) -> QueryModifiers {
|
|
|
|
let mut load_cached = None;
|
2020-02-14 11:29:20 -06:00
|
|
|
let mut storage = None;
|
2019-03-18 08:17:26 -05:00
|
|
|
let mut cache = None;
|
|
|
|
let mut desc = None;
|
|
|
|
let mut fatal_cycle = false;
|
2019-03-28 21:05:19 -05:00
|
|
|
let mut cycle_delay_bug = false;
|
2019-03-20 10:06:09 -05:00
|
|
|
let mut no_hash = false;
|
2019-03-20 11:13:44 -05:00
|
|
|
let mut anon = false;
|
2019-03-20 11:22:16 -05:00
|
|
|
let mut eval_always = false;
|
2019-03-18 08:17:26 -05:00
|
|
|
for modifier in query.modifiers.0.drain(..) {
|
|
|
|
match modifier {
|
|
|
|
QueryModifier::LoadCached(tcx, id, block) => {
|
|
|
|
if load_cached.is_some() {
|
|
|
|
panic!("duplicate modifier `load_cached` for query `{}`", query.name);
|
|
|
|
}
|
|
|
|
load_cached = Some((tcx, id, block));
|
|
|
|
}
|
2020-02-14 11:29:20 -06:00
|
|
|
QueryModifier::Storage(ty) => {
|
|
|
|
if storage.is_some() {
|
|
|
|
panic!("duplicate modifier `storage` for query `{}`", query.name);
|
|
|
|
}
|
|
|
|
storage = Some(ty);
|
|
|
|
}
|
2019-04-05 06:11:44 -05:00
|
|
|
QueryModifier::Cache(args, expr) => {
|
2019-03-18 08:17:26 -05:00
|
|
|
if cache.is_some() {
|
|
|
|
panic!("duplicate modifier `cache` for query `{}`", query.name);
|
|
|
|
}
|
2019-04-05 06:11:44 -05:00
|
|
|
cache = Some((args, expr));
|
2019-03-18 08:17:26 -05:00
|
|
|
}
|
|
|
|
QueryModifier::Desc(tcx, list) => {
|
|
|
|
if desc.is_some() {
|
|
|
|
panic!("duplicate modifier `desc` for query `{}`", query.name);
|
|
|
|
}
|
2020-12-23 10:45:30 -06:00
|
|
|
// If there are no doc-comments, give at least some idea of what
|
|
|
|
// it does by showing the query description.
|
|
|
|
if query.doc_comments.is_empty() {
|
|
|
|
use ::syn::*;
|
|
|
|
let mut list = list.iter();
|
|
|
|
let format_str: String = match list.next() {
|
|
|
|
Some(&Expr::Lit(ExprLit { lit: Lit::Str(ref lit_str), .. })) => {
|
|
|
|
lit_str.value().replace("`{}`", "{}") // We add them later anyways for consistency
|
|
|
|
}
|
|
|
|
_ => panic!("Expected a string literal"),
|
|
|
|
};
|
|
|
|
let mut fmt_fragments = format_str.split("{}");
|
|
|
|
let mut doc_string = fmt_fragments.next().unwrap().to_string();
|
|
|
|
list.map(::quote::ToTokens::to_token_stream).zip(fmt_fragments).for_each(
|
|
|
|
|(tts, next_fmt_fragment)| {
|
|
|
|
use ::core::fmt::Write;
|
|
|
|
write!(
|
|
|
|
&mut doc_string,
|
|
|
|
" `{}` {}",
|
|
|
|
tts.to_string().replace(" . ", "."),
|
|
|
|
next_fmt_fragment,
|
|
|
|
)
|
|
|
|
.unwrap();
|
|
|
|
},
|
|
|
|
);
|
|
|
|
let doc_string = format!(
|
|
|
|
"[query description - consider adding a doc-comment!] {}",
|
|
|
|
doc_string
|
|
|
|
);
|
|
|
|
let comment = parse_quote! {
|
|
|
|
#[doc = #doc_string]
|
|
|
|
};
|
|
|
|
query.doc_comments.push(comment);
|
|
|
|
}
|
2019-03-18 08:17:26 -05:00
|
|
|
desc = Some((tcx, list));
|
|
|
|
}
|
|
|
|
QueryModifier::FatalCycle => {
|
|
|
|
if fatal_cycle {
|
|
|
|
panic!("duplicate modifier `fatal_cycle` for query `{}`", query.name);
|
|
|
|
}
|
|
|
|
fatal_cycle = true;
|
|
|
|
}
|
2019-03-28 21:05:19 -05:00
|
|
|
QueryModifier::CycleDelayBug => {
|
|
|
|
if cycle_delay_bug {
|
|
|
|
panic!("duplicate modifier `cycle_delay_bug` for query `{}`", query.name);
|
|
|
|
}
|
|
|
|
cycle_delay_bug = true;
|
|
|
|
}
|
2019-03-20 10:06:09 -05:00
|
|
|
QueryModifier::NoHash => {
|
|
|
|
if no_hash {
|
|
|
|
panic!("duplicate modifier `no_hash` for query `{}`", query.name);
|
|
|
|
}
|
|
|
|
no_hash = true;
|
|
|
|
}
|
2019-03-20 11:13:44 -05:00
|
|
|
QueryModifier::Anon => {
|
|
|
|
if anon {
|
|
|
|
panic!("duplicate modifier `anon` for query `{}`", query.name);
|
|
|
|
}
|
|
|
|
anon = true;
|
|
|
|
}
|
2019-03-20 11:22:16 -05:00
|
|
|
QueryModifier::EvalAlways => {
|
|
|
|
if eval_always {
|
|
|
|
panic!("duplicate modifier `eval_always` for query `{}`", query.name);
|
|
|
|
}
|
|
|
|
eval_always = true;
|
|
|
|
}
|
2019-03-18 08:17:26 -05:00
|
|
|
}
|
|
|
|
}
|
2020-05-31 10:11:51 -05:00
|
|
|
let desc = desc.unwrap_or_else(|| {
|
|
|
|
panic!("no description provided for query `{}`", query.name);
|
|
|
|
});
|
2019-03-18 08:17:26 -05:00
|
|
|
QueryModifiers {
|
|
|
|
load_cached,
|
2020-02-14 11:29:20 -06:00
|
|
|
storage,
|
2019-03-18 08:17:26 -05:00
|
|
|
cache,
|
|
|
|
desc,
|
|
|
|
fatal_cycle,
|
2019-03-28 21:05:19 -05:00
|
|
|
cycle_delay_bug,
|
2019-03-20 10:06:09 -05:00
|
|
|
no_hash,
|
2019-03-20 11:13:44 -05:00
|
|
|
anon,
|
2019-03-20 11:22:16 -05:00
|
|
|
eval_always,
|
2019-03-18 08:17:26 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-03-18 02:19:23 -05:00
|
|
|
/// Add the impl of QueryDescription for the query to `impls` if one is requested
|
2019-03-18 08:17:26 -05:00
|
|
|
fn add_query_description_impl(
|
|
|
|
query: &Query,
|
|
|
|
modifiers: QueryModifiers,
|
2019-06-11 16:11:55 -05:00
|
|
|
impls: &mut proc_macro2::TokenStream,
|
2019-03-18 08:17:26 -05:00
|
|
|
) {
|
2019-03-18 02:19:23 -05:00
|
|
|
let name = &query.name;
|
|
|
|
let key = &query.key.0;
|
|
|
|
|
|
|
|
// Find out if we should cache the query on disk
|
2020-05-31 10:11:51 -05:00
|
|
|
let cache = if let Some((args, expr)) = modifiers.cache.as_ref() {
|
2019-03-18 08:17:26 -05:00
|
|
|
let try_load_from_disk = if let Some((tcx, id, block)) = modifiers.load_cached.as_ref() {
|
|
|
|
// Use custom code to load the query from disk
|
2019-03-18 02:19:23 -05:00
|
|
|
quote! {
|
|
|
|
#[inline]
|
|
|
|
fn try_load_from_disk(
|
2020-10-11 03:34:13 -05:00
|
|
|
#tcx: QueryCtxt<'tcx>,
|
|
|
|
#id: SerializedDepNodeIndex
|
2019-03-18 02:19:23 -05:00
|
|
|
) -> Option<Self::Value> {
|
|
|
|
#block
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
2019-03-18 08:17:26 -05:00
|
|
|
// Use the default code to load the query from disk
|
2019-03-18 02:19:23 -05:00
|
|
|
quote! {
|
|
|
|
#[inline]
|
|
|
|
fn try_load_from_disk(
|
2020-04-08 10:03:34 -05:00
|
|
|
tcx: QueryCtxt<'tcx>,
|
2019-03-18 02:19:23 -05:00
|
|
|
id: SerializedDepNodeIndex
|
|
|
|
) -> Option<Self::Value> {
|
2020-04-08 10:03:34 -05:00
|
|
|
tcx.on_disk_cache.as_ref()?.try_load_query_result(*tcx, id)
|
2019-03-18 02:19:23 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2019-12-22 16:42:04 -06:00
|
|
|
let tcx = args
|
|
|
|
.as_ref()
|
|
|
|
.map(|t| {
|
|
|
|
let t = &(t.0).0;
|
|
|
|
quote! { #t }
|
|
|
|
})
|
|
|
|
.unwrap_or(quote! { _ });
|
|
|
|
let value = args
|
|
|
|
.as_ref()
|
|
|
|
.map(|t| {
|
|
|
|
let t = &(t.1).0;
|
|
|
|
quote! { #t }
|
|
|
|
})
|
|
|
|
.unwrap_or(quote! { _ });
|
2020-03-27 15:55:15 -05:00
|
|
|
// expr is a `Block`, meaning that `{ #expr }` gets expanded
|
|
|
|
// to `{ { stmts... } }`, which triggers the `unused_braces` lint.
|
2019-03-18 02:19:23 -05:00
|
|
|
quote! {
|
|
|
|
#[inline]
|
2020-03-27 15:55:15 -05:00
|
|
|
#[allow(unused_variables, unused_braces)]
|
2019-04-05 06:11:44 -05:00
|
|
|
fn cache_on_disk(
|
2020-10-11 03:34:13 -05:00
|
|
|
#tcx: QueryCtxt<'tcx>,
|
|
|
|
#key: &Self::Key,
|
|
|
|
#value: Option<&Self::Value>
|
2019-04-05 06:11:44 -05:00
|
|
|
) -> bool {
|
2019-03-18 02:19:23 -05:00
|
|
|
#expr
|
|
|
|
}
|
|
|
|
|
|
|
|
#try_load_from_disk
|
|
|
|
}
|
2020-05-31 10:11:51 -05:00
|
|
|
} else {
|
|
|
|
if modifiers.load_cached.is_some() {
|
|
|
|
panic!("load_cached modifier on query `{}` without a cache modifier", name);
|
|
|
|
}
|
|
|
|
quote! {}
|
|
|
|
};
|
|
|
|
|
|
|
|
let (tcx, desc) = modifiers.desc;
|
2021-01-11 13:45:33 -06:00
|
|
|
let tcx = tcx.as_ref().map_or(quote! { _ }, |t| quote! { #t });
|
2020-05-31 10:11:51 -05:00
|
|
|
|
|
|
|
let desc = quote! {
|
|
|
|
#[allow(unused_variables)]
|
2021-01-19 12:43:59 -06:00
|
|
|
fn describe(tcx: QueryCtxt<'tcx>, key: Self::Key) -> String {
|
2020-04-08 10:03:34 -05:00
|
|
|
let (#tcx, #key) = (*tcx, key);
|
|
|
|
::rustc_middle::ty::print::with_no_trimmed_paths(|| format!(#desc).into())
|
2020-05-31 10:11:51 -05:00
|
|
|
}
|
|
|
|
};
|
2019-03-18 02:19:23 -05:00
|
|
|
|
2020-05-31 10:11:51 -05:00
|
|
|
impls.extend(quote! {
|
2020-04-08 10:03:34 -05:00
|
|
|
impl<'tcx> QueryDescription<QueryCtxt<'tcx>> for queries::#name<'tcx> {
|
2020-05-31 10:11:51 -05:00
|
|
|
#desc
|
|
|
|
#cache
|
2019-03-18 02:19:23 -05:00
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2018-12-02 18:14:35 -06:00
|
|
|
pub fn rustc_queries(input: TokenStream) -> TokenStream {
|
2021-01-31 14:37:17 -06:00
|
|
|
let queries = parse_macro_input!(input as List<Query>);
|
2018-12-02 18:14:35 -06:00
|
|
|
|
|
|
|
let mut query_stream = quote! {};
|
|
|
|
let mut query_description_stream = quote! {};
|
|
|
|
let mut dep_node_def_stream = quote! {};
|
2019-06-26 23:41:06 -05:00
|
|
|
let mut cached_queries = quote! {};
|
2018-12-02 18:14:35 -06:00
|
|
|
|
2021-01-31 14:40:03 -06:00
|
|
|
for mut query in queries.0 {
|
|
|
|
let modifiers = process_modifiers(&mut query);
|
|
|
|
let name = &query.name;
|
|
|
|
let arg = &query.arg;
|
|
|
|
let result_full = &query.result;
|
|
|
|
let result = match query.result {
|
|
|
|
ReturnType::Default => quote! { -> () },
|
|
|
|
_ => quote! { #result_full },
|
|
|
|
};
|
2018-12-02 18:14:35 -06:00
|
|
|
|
2021-01-31 14:40:03 -06:00
|
|
|
if modifiers.cache.is_some() {
|
|
|
|
cached_queries.extend(quote! {
|
|
|
|
#name,
|
|
|
|
});
|
|
|
|
}
|
2019-04-05 06:11:44 -05:00
|
|
|
|
2021-01-31 14:40:03 -06:00
|
|
|
let mut attributes = Vec::new();
|
2019-03-20 10:06:09 -05:00
|
|
|
|
2021-01-31 14:40:03 -06:00
|
|
|
// Pass on the fatal_cycle modifier
|
|
|
|
if modifiers.fatal_cycle {
|
|
|
|
attributes.push(quote! { fatal_cycle });
|
|
|
|
};
|
|
|
|
// Pass on the storage modifier
|
|
|
|
if let Some(ref ty) = modifiers.storage {
|
|
|
|
attributes.push(quote! { storage(#ty) });
|
|
|
|
};
|
|
|
|
// Pass on the cycle_delay_bug modifier
|
|
|
|
if modifiers.cycle_delay_bug {
|
|
|
|
attributes.push(quote! { cycle_delay_bug });
|
|
|
|
};
|
|
|
|
// Pass on the no_hash modifier
|
|
|
|
if modifiers.no_hash {
|
|
|
|
attributes.push(quote! { no_hash });
|
|
|
|
};
|
|
|
|
// Pass on the anon modifier
|
|
|
|
if modifiers.anon {
|
|
|
|
attributes.push(quote! { anon });
|
|
|
|
};
|
|
|
|
// Pass on the eval_always modifier
|
|
|
|
if modifiers.eval_always {
|
|
|
|
attributes.push(quote! { eval_always });
|
|
|
|
};
|
2019-03-20 11:13:44 -05:00
|
|
|
|
2021-01-31 14:40:03 -06:00
|
|
|
let attribute_stream = quote! {#(#attributes),*};
|
|
|
|
let doc_comments = query.doc_comments.iter();
|
|
|
|
// Add the query to the group
|
|
|
|
query_stream.extend(quote! {
|
|
|
|
#(#doc_comments)*
|
|
|
|
[#attribute_stream] fn #name(#arg) #result,
|
|
|
|
});
|
2019-06-24 18:41:16 -05:00
|
|
|
|
2021-01-31 14:40:03 -06:00
|
|
|
// Create a dep node for the query
|
|
|
|
dep_node_def_stream.extend(quote! {
|
|
|
|
[#attribute_stream] #name(#arg),
|
|
|
|
});
|
2019-03-18 02:19:23 -05:00
|
|
|
|
2021-01-31 14:40:03 -06:00
|
|
|
add_query_description_impl(&query, modifiers, &mut query_description_stream);
|
|
|
|
}
|
2019-03-20 10:53:55 -05:00
|
|
|
|
2018-12-02 18:14:35 -06:00
|
|
|
TokenStream::from(quote! {
|
|
|
|
macro_rules! rustc_query_append {
|
|
|
|
([$($macro:tt)*][$($other:tt)*]) => {
|
|
|
|
$($macro)* {
|
|
|
|
$($other)*
|
|
|
|
|
|
|
|
#query_stream
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
macro_rules! rustc_dep_node_append {
|
|
|
|
([$($macro:tt)*][$($other:tt)*]) => {
|
|
|
|
$($macro)*(
|
|
|
|
$($other)*
|
|
|
|
|
|
|
|
#dep_node_def_stream
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
2019-06-26 23:41:06 -05:00
|
|
|
macro_rules! rustc_cached_queries {
|
|
|
|
($($macro:tt)*) => {
|
|
|
|
$($macro)*(#cached_queries);
|
|
|
|
}
|
|
|
|
}
|
2021-01-19 12:43:59 -06:00
|
|
|
macro_rules! rustc_query_description {
|
|
|
|
() => { #query_description_stream }
|
|
|
|
}
|
2018-12-02 18:14:35 -06:00
|
|
|
})
|
|
|
|
}
|