2018-12-02 18:14:35 -06:00
|
|
|
use proc_macro::TokenStream;
|
|
|
|
use proc_macro2::Span;
|
|
|
|
use syn::{
|
2019-03-18 02:19:23 -05:00
|
|
|
Token, Ident, Type, Attribute, ReturnType, Expr, Block, Error,
|
2018-12-02 18:14:35 -06:00
|
|
|
braced, parenthesized, parse_macro_input,
|
|
|
|
};
|
2019-03-18 02:19:23 -05:00
|
|
|
use syn::spanned::Spanned;
|
2018-12-02 18:14:35 -06:00
|
|
|
use syn::parse::{Result, Parse, ParseStream};
|
|
|
|
use syn::punctuated::Punctuated;
|
2019-03-18 02:19:23 -05:00
|
|
|
use syn;
|
2018-12-02 18:14:35 -06:00
|
|
|
use quote::quote;
|
|
|
|
|
2019-03-18 02:19:23 -05:00
|
|
|
#[allow(non_camel_case_types)]
|
|
|
|
mod kw {
|
|
|
|
syn::custom_keyword!(query);
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Ident or a wildcard `_`.
|
2018-12-02 18:14:35 -06:00
|
|
|
struct IdentOrWild(Ident);
|
|
|
|
|
|
|
|
impl Parse for IdentOrWild {
|
|
|
|
fn parse(input: ParseStream<'_>) -> Result<Self> {
|
|
|
|
Ok(if input.peek(Token![_]) {
|
|
|
|
input.parse::<Token![_]>()?;
|
|
|
|
IdentOrWild(Ident::new("_", Span::call_site()))
|
|
|
|
} else {
|
|
|
|
IdentOrWild(input.parse()?)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-03-18 02:19:23 -05:00
|
|
|
/// A modifier for a query
|
|
|
|
enum QueryModifier {
|
|
|
|
/// The description of the query
|
2018-12-02 18:14:35 -06:00
|
|
|
Desc(Option<Ident>, Punctuated<Expr, Token![,]>),
|
2019-03-18 02:19:23 -05:00
|
|
|
|
|
|
|
/// Cache the query to disk if the `Expr` returns true.
|
2018-12-02 18:14:35 -06:00
|
|
|
Cache(Option<Ident>, Expr),
|
2019-03-18 02:19:23 -05:00
|
|
|
|
|
|
|
/// Custom code to load the query from disk.
|
2019-03-17 01:00:46 -05:00
|
|
|
LoadCached(Ident, Ident, Block),
|
2019-03-18 02:19:23 -05:00
|
|
|
|
|
|
|
/// A cycle error for this query aborting the compilation with a fatal error.
|
2018-12-02 18:14:35 -06:00
|
|
|
FatalCycle,
|
|
|
|
}
|
|
|
|
|
2019-03-18 02:19:23 -05:00
|
|
|
impl Parse for QueryModifier {
|
2018-12-02 18:14:35 -06:00
|
|
|
fn parse(input: ParseStream<'_>) -> Result<Self> {
|
2019-03-18 02:19:23 -05:00
|
|
|
let modifier: Ident = input.parse()?;
|
|
|
|
if modifier == "desc" {
|
|
|
|
// Parse a description modifier like:
|
|
|
|
// `desc { |tcx| "foo {}", tcx.item_path(key) }`
|
2018-12-02 18:14:35 -06:00
|
|
|
let attr_content;
|
|
|
|
braced!(attr_content in input);
|
|
|
|
let tcx = if attr_content.peek(Token![|]) {
|
|
|
|
attr_content.parse::<Token![|]>()?;
|
|
|
|
let tcx = attr_content.parse()?;
|
|
|
|
attr_content.parse::<Token![|]>()?;
|
|
|
|
Some(tcx)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
};
|
|
|
|
let desc = attr_content.parse_terminated(Expr::parse)?;
|
2019-03-18 02:19:23 -05:00
|
|
|
Ok(QueryModifier::Desc(tcx, desc))
|
|
|
|
} else if modifier == "cache" {
|
|
|
|
// Parse a cache modifier like:
|
|
|
|
// `cache { |tcx| key.is_local() }`
|
2018-12-02 18:14:35 -06:00
|
|
|
let attr_content;
|
|
|
|
braced!(attr_content in input);
|
|
|
|
let tcx = if attr_content.peek(Token![|]) {
|
|
|
|
attr_content.parse::<Token![|]>()?;
|
|
|
|
let tcx = attr_content.parse()?;
|
|
|
|
attr_content.parse::<Token![|]>()?;
|
|
|
|
Some(tcx)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
};
|
|
|
|
let expr = attr_content.parse()?;
|
2019-03-18 02:19:23 -05:00
|
|
|
Ok(QueryModifier::Cache(tcx, expr))
|
|
|
|
} else if modifier == "load_cached" {
|
|
|
|
// Parse a load_cached modifier like:
|
|
|
|
// `load_cached(tcx, id) { tcx.queries.on_disk_cache.try_load_query_result(tcx, id) }`
|
2019-03-17 01:00:46 -05:00
|
|
|
let args;
|
|
|
|
parenthesized!(args in input);
|
|
|
|
let tcx = args.parse()?;
|
|
|
|
args.parse::<Token![,]>()?;
|
|
|
|
let id = args.parse()?;
|
|
|
|
let block = input.parse()?;
|
2019-03-18 02:19:23 -05:00
|
|
|
Ok(QueryModifier::LoadCached(tcx, id, block))
|
|
|
|
} else if modifier == "fatal_cycle" {
|
|
|
|
Ok(QueryModifier::FatalCycle)
|
2018-12-02 18:14:35 -06:00
|
|
|
} else {
|
2019-03-18 02:19:23 -05:00
|
|
|
Err(Error::new(modifier.span(), "unknown query modifier"))
|
2018-12-02 18:14:35 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-03-18 02:19:23 -05:00
|
|
|
/// Ensures only doc comment attributes are used
|
|
|
|
fn check_attributes(attrs: Vec<Attribute>) -> Result<()> {
|
|
|
|
for attr in attrs {
|
|
|
|
if !attr.path.is_ident("doc") {
|
|
|
|
return Err(Error::new(attr.span(), "attributes not supported on queries"));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
/// A compiler query. `query ... { ... }`
|
2018-12-02 18:14:35 -06:00
|
|
|
struct Query {
|
2019-03-18 02:19:23 -05:00
|
|
|
attrs: List<QueryModifier>,
|
2018-12-02 18:14:35 -06:00
|
|
|
name: Ident,
|
|
|
|
key: IdentOrWild,
|
|
|
|
arg: Type,
|
|
|
|
result: ReturnType,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Parse for Query {
|
|
|
|
fn parse(input: ParseStream<'_>) -> Result<Self> {
|
2019-03-18 02:19:23 -05:00
|
|
|
check_attributes(input.call(Attribute::parse_outer)?)?;
|
2018-12-02 18:14:35 -06:00
|
|
|
|
2019-03-18 02:19:23 -05:00
|
|
|
// Parse the query declaration. Like `query type_of(key: DefId) -> Ty<'tcx>`
|
|
|
|
input.parse::<kw::query>()?;
|
2018-12-02 18:14:35 -06:00
|
|
|
let name: Ident = input.parse()?;
|
|
|
|
let arg_content;
|
|
|
|
parenthesized!(arg_content in input);
|
|
|
|
let key = arg_content.parse()?;
|
|
|
|
arg_content.parse::<Token![:]>()?;
|
|
|
|
let arg = arg_content.parse()?;
|
|
|
|
let result = input.parse()?;
|
|
|
|
|
2019-03-18 02:19:23 -05:00
|
|
|
// Parse the query modifiers
|
2018-12-02 18:14:35 -06:00
|
|
|
let content;
|
|
|
|
braced!(content in input);
|
|
|
|
let attrs = content.parse()?;
|
|
|
|
|
|
|
|
Ok(Query {
|
|
|
|
attrs,
|
|
|
|
name,
|
|
|
|
key,
|
|
|
|
arg,
|
|
|
|
result,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-03-18 02:19:23 -05:00
|
|
|
/// A type used to greedily parse another type until the input is empty.
|
2018-12-02 18:14:35 -06:00
|
|
|
struct List<T>(Vec<T>);
|
|
|
|
|
|
|
|
impl<T: Parse> Parse for List<T> {
|
|
|
|
fn parse(input: ParseStream<'_>) -> Result<Self> {
|
|
|
|
let mut list = Vec::new();
|
|
|
|
while !input.is_empty() {
|
|
|
|
list.push(input.parse()?);
|
|
|
|
}
|
|
|
|
Ok(List(list))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-03-18 02:19:23 -05:00
|
|
|
/// A named group containing queries.
|
2018-12-02 18:14:35 -06:00
|
|
|
struct Group {
|
|
|
|
name: Ident,
|
|
|
|
queries: List<Query>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Parse for Group {
|
|
|
|
fn parse(input: ParseStream<'_>) -> Result<Self> {
|
|
|
|
let name: Ident = input.parse()?;
|
|
|
|
let content;
|
|
|
|
braced!(content in input);
|
|
|
|
Ok(Group {
|
|
|
|
name,
|
|
|
|
queries: content.parse()?,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-03-18 02:19:23 -05:00
|
|
|
/// Add the impl of QueryDescription for the query to `impls` if one is requested
|
|
|
|
fn add_query_description_impl(query: &Query, impls: &mut proc_macro2::TokenStream) {
|
|
|
|
let name = &query.name;
|
|
|
|
let arg = &query.arg;
|
|
|
|
let key = &query.key.0;
|
|
|
|
|
|
|
|
// Find custom code to load the query from disk
|
|
|
|
let load_cached = query.attrs.0.iter().find_map(|attr| match attr {
|
|
|
|
QueryModifier::LoadCached(tcx, id, block) => Some((tcx, id, block)),
|
|
|
|
_ => None,
|
|
|
|
});
|
|
|
|
|
|
|
|
// Find out if we should cache the query on disk
|
|
|
|
let cache = query.attrs.0.iter().find_map(|attr| match attr {
|
|
|
|
QueryModifier::Cache(tcx, expr) => Some((tcx, expr)),
|
|
|
|
_ => None,
|
|
|
|
}).map(|(tcx, expr)| {
|
|
|
|
let try_load_from_disk = if let Some((tcx, id, block)) = load_cached {
|
|
|
|
quote! {
|
|
|
|
#[inline]
|
|
|
|
fn try_load_from_disk(
|
|
|
|
#tcx: TyCtxt<'_, 'tcx, 'tcx>,
|
|
|
|
#id: SerializedDepNodeIndex
|
|
|
|
) -> Option<Self::Value> {
|
|
|
|
#block
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
quote! {
|
|
|
|
#[inline]
|
|
|
|
fn try_load_from_disk(
|
|
|
|
tcx: TyCtxt<'_, 'tcx, 'tcx>,
|
|
|
|
id: SerializedDepNodeIndex
|
|
|
|
) -> Option<Self::Value> {
|
|
|
|
tcx.queries.on_disk_cache.try_load_query_result(tcx, id)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
let tcx = tcx.as_ref().map(|t| quote! { #t }).unwrap_or(quote! { _ });
|
|
|
|
quote! {
|
|
|
|
#[inline]
|
|
|
|
fn cache_on_disk(#tcx: TyCtxt<'_, 'tcx, 'tcx>, #key: Self::Key) -> bool {
|
|
|
|
#expr
|
|
|
|
}
|
|
|
|
|
|
|
|
#try_load_from_disk
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
if cache.is_none() && load_cached.is_some() {
|
|
|
|
panic!("load_cached modifier on query `{}` without a cache modifier", name);
|
|
|
|
}
|
|
|
|
|
|
|
|
let desc = query.attrs.0.iter().find_map(|attr| match attr {
|
|
|
|
QueryModifier::Desc(tcx, desc) => Some((tcx, desc)),
|
|
|
|
_ => None,
|
|
|
|
}).map(|(tcx, desc)| {
|
|
|
|
let tcx = tcx.as_ref().map(|t| quote! { #t }).unwrap_or(quote! { _ });
|
|
|
|
quote! {
|
|
|
|
fn describe(
|
|
|
|
#tcx: TyCtxt<'_, '_, '_>,
|
|
|
|
#key: #arg,
|
|
|
|
) -> Cow<'static, str> {
|
|
|
|
format!(#desc).into()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
if desc.is_some() || cache.is_some() {
|
|
|
|
let cache = cache.unwrap_or(quote! {});
|
|
|
|
let desc = desc.unwrap_or(quote! {});
|
|
|
|
|
|
|
|
impls.extend(quote! {
|
|
|
|
impl<'tcx> QueryDescription<'tcx> for queries::#name<'tcx> {
|
|
|
|
#desc
|
|
|
|
#cache
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-12-02 18:14:35 -06:00
|
|
|
pub fn rustc_queries(input: TokenStream) -> TokenStream {
|
|
|
|
let groups = parse_macro_input!(input as List<Group>);
|
|
|
|
|
|
|
|
let mut query_stream = quote! {};
|
|
|
|
let mut query_description_stream = quote! {};
|
|
|
|
let mut dep_node_def_stream = quote! {};
|
|
|
|
let mut dep_node_force_stream = quote! {};
|
|
|
|
|
|
|
|
for group in groups.0 {
|
|
|
|
let mut group_stream = quote! {};
|
|
|
|
for query in &group.queries.0 {
|
|
|
|
let name = &query.name;
|
|
|
|
let arg = &query.arg;
|
|
|
|
let result_full = &query.result;
|
|
|
|
let result = match query.result {
|
|
|
|
ReturnType::Default => quote! { -> () },
|
|
|
|
_ => quote! { #result_full },
|
|
|
|
};
|
|
|
|
|
2019-03-18 02:19:23 -05:00
|
|
|
// Look for a fatal_cycle modifier to pass on
|
2018-12-02 18:14:35 -06:00
|
|
|
let fatal_cycle = query.attrs.0.iter().find_map(|attr| match attr {
|
2019-03-18 02:19:23 -05:00
|
|
|
QueryModifier::FatalCycle => Some(()),
|
2018-12-02 18:14:35 -06:00
|
|
|
_ => None,
|
|
|
|
}).map(|_| quote! { fatal_cycle }).unwrap_or(quote! {});
|
|
|
|
|
2019-03-18 02:19:23 -05:00
|
|
|
// Add the query to the group
|
2018-12-02 18:14:35 -06:00
|
|
|
group_stream.extend(quote! {
|
2019-03-17 01:00:46 -05:00
|
|
|
[#fatal_cycle] fn #name: #name(#arg) #result,
|
2018-12-02 18:14:35 -06:00
|
|
|
});
|
|
|
|
|
2019-03-18 02:19:23 -05:00
|
|
|
add_query_description_impl(query, &mut query_description_stream);
|
2018-12-02 18:14:35 -06:00
|
|
|
|
2019-03-18 02:19:23 -05:00
|
|
|
// Create a dep node for the query
|
2018-12-02 18:14:35 -06:00
|
|
|
dep_node_def_stream.extend(quote! {
|
2019-03-17 01:00:46 -05:00
|
|
|
[] #name(#arg),
|
2018-12-02 18:14:35 -06:00
|
|
|
});
|
2019-03-18 02:19:23 -05:00
|
|
|
|
|
|
|
// Add a match arm to force the query given the dep node
|
2018-12-02 18:14:35 -06:00
|
|
|
dep_node_force_stream.extend(quote! {
|
2019-03-17 01:00:46 -05:00
|
|
|
DepKind::#name => {
|
2018-12-02 18:14:35 -06:00
|
|
|
if let Some(key) = RecoverKey::recover($tcx, $dep_node) {
|
|
|
|
force_ex!($tcx, #name, key);
|
|
|
|
} else {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
let name = &group.name;
|
|
|
|
query_stream.extend(quote! {
|
|
|
|
#name { #group_stream },
|
|
|
|
});
|
|
|
|
}
|
|
|
|
TokenStream::from(quote! {
|
|
|
|
macro_rules! rustc_query_append {
|
|
|
|
([$($macro:tt)*][$($other:tt)*]) => {
|
|
|
|
$($macro)* {
|
|
|
|
$($other)*
|
|
|
|
|
|
|
|
#query_stream
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
macro_rules! rustc_dep_node_append {
|
|
|
|
([$($macro:tt)*][$($other:tt)*]) => {
|
|
|
|
$($macro)*(
|
|
|
|
$($other)*
|
|
|
|
|
|
|
|
#dep_node_def_stream
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
macro_rules! rustc_dep_node_force {
|
|
|
|
([$dep_node:expr, $tcx:expr] $($other:tt)*) => {
|
|
|
|
match $dep_node.kind {
|
|
|
|
$($other)*
|
|
|
|
|
|
|
|
#dep_node_force_stream
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
#query_description_stream
|
|
|
|
})
|
|
|
|
}
|