2021-02-05 05:57:32 -06:00
|
|
|
//! This module add real world mbe example for benchmark tests
|
|
|
|
|
|
|
|
use rustc_hash::FxHashMap;
|
|
|
|
use syntax::{
|
2021-09-27 05:54:24 -05:00
|
|
|
ast::{self, HasName},
|
2021-02-05 05:57:32 -06:00
|
|
|
AstNode, SmolStr,
|
|
|
|
};
|
|
|
|
use test_utils::{bench, bench_fixture, skip_slow_tests};
|
|
|
|
|
|
|
|
use crate::{
|
2022-10-10 07:25:14 -05:00
|
|
|
parser::{MetaVarKind, Op, RepeatKind, Separator},
|
2021-10-10 13:07:43 -05:00
|
|
|
syntax_node_to_token_tree, DeclarativeMacro,
|
2021-02-05 05:57:32 -06:00
|
|
|
};
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn benchmark_parse_macro_rules() {
|
|
|
|
if skip_slow_tests() {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
let rules = macro_rules_fixtures_tt();
|
|
|
|
let hash: usize = {
|
|
|
|
let _pt = bench("mbe parse macro rules");
|
2021-10-10 13:07:43 -05:00
|
|
|
rules.values().map(|it| DeclarativeMacro::parse_macro_rules(it).unwrap().rules.len()).sum()
|
2021-02-05 05:57:32 -06:00
|
|
|
};
|
|
|
|
assert_eq!(hash, 1144);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn benchmark_expand_macro_rules() {
|
|
|
|
if skip_slow_tests() {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
let rules = macro_rules_fixtures();
|
|
|
|
let invocations = invocation_fixtures(&rules);
|
|
|
|
|
|
|
|
let hash: usize = {
|
|
|
|
let _pt = bench("mbe expand macro rules");
|
|
|
|
invocations
|
|
|
|
.into_iter()
|
|
|
|
.map(|(id, tt)| {
|
|
|
|
let res = rules[&id].expand(&tt);
|
2021-02-01 14:42:37 -06:00
|
|
|
assert!(res.err.is_none());
|
2021-02-05 05:57:32 -06:00
|
|
|
res.value.token_trees.len()
|
|
|
|
})
|
|
|
|
.sum()
|
|
|
|
};
|
2021-02-01 14:42:37 -06:00
|
|
|
assert_eq!(hash, 69413);
|
2021-02-05 05:57:32 -06:00
|
|
|
}
|
|
|
|
|
2021-10-10 13:07:43 -05:00
|
|
|
fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro> {
|
2021-02-05 05:57:32 -06:00
|
|
|
macro_rules_fixtures_tt()
|
|
|
|
.into_iter()
|
2021-10-10 13:07:43 -05:00
|
|
|
.map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt).unwrap()))
|
2021-02-05 05:57:32 -06:00
|
|
|
.collect()
|
|
|
|
}
|
|
|
|
|
|
|
|
fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree> {
|
|
|
|
let fixture = bench_fixture::numerous_macro_rules();
|
|
|
|
let source_file = ast::SourceFile::parse(&fixture).ok().unwrap();
|
|
|
|
|
|
|
|
source_file
|
|
|
|
.syntax()
|
|
|
|
.descendants()
|
|
|
|
.filter_map(ast::MacroRules::cast)
|
|
|
|
.map(|rule| {
|
|
|
|
let id = rule.name().unwrap().to_string();
|
2021-08-09 07:41:19 -05:00
|
|
|
let (def_tt, _) = syntax_node_to_token_tree(rule.token_tree().unwrap().syntax());
|
2021-02-05 05:57:32 -06:00
|
|
|
(id, def_tt)
|
|
|
|
})
|
|
|
|
.collect()
|
|
|
|
}
|
|
|
|
|
2021-02-01 14:42:37 -06:00
|
|
|
/// Generate random invocation fixtures from rules
|
2021-10-10 13:07:43 -05:00
|
|
|
fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(String, tt::Subtree)> {
|
2021-02-05 05:57:32 -06:00
|
|
|
let mut seed = 123456789;
|
|
|
|
let mut res = Vec::new();
|
|
|
|
|
|
|
|
for (name, it) in rules {
|
|
|
|
for rule in &it.rules {
|
|
|
|
// Generate twice
|
|
|
|
for _ in 0..2 {
|
2021-02-01 14:42:37 -06:00
|
|
|
// The input are generated by filling the `Op` randomly.
|
|
|
|
// However, there are some cases generated are ambiguous for expanding, for example:
|
|
|
|
// ```rust
|
|
|
|
// macro_rules! m {
|
|
|
|
// ($($t:ident),* as $ty:ident) => {}
|
|
|
|
// }
|
|
|
|
// m!(as u32); // error: local ambiguity: multiple parsing options: built-in NTs ident ('t') or 1 other option.
|
|
|
|
// ```
|
|
|
|
//
|
|
|
|
// So we just skip any error cases and try again
|
|
|
|
let mut try_cnt = 0;
|
|
|
|
loop {
|
|
|
|
let mut subtree = tt::Subtree::default();
|
|
|
|
for op in rule.lhs.iter() {
|
|
|
|
collect_from_op(op, &mut subtree, &mut seed);
|
|
|
|
}
|
|
|
|
if it.expand(&subtree).err.is_none() {
|
|
|
|
res.push((name.clone(), subtree));
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
try_cnt += 1;
|
|
|
|
if try_cnt > 100 {
|
|
|
|
panic!("invocaton fixture {} cannot be generated.\n", name);
|
|
|
|
}
|
2021-02-05 05:57:32 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return res;
|
|
|
|
|
|
|
|
fn collect_from_op(op: &Op, parent: &mut tt::Subtree, seed: &mut usize) {
|
|
|
|
return match op {
|
2022-10-10 07:25:14 -05:00
|
|
|
Op::Var { kind, .. } => match kind.as_ref() {
|
|
|
|
Some(MetaVarKind::Ident) => parent.token_trees.push(make_ident("foo")),
|
|
|
|
Some(MetaVarKind::Ty) => parent.token_trees.push(make_ident("Foo")),
|
|
|
|
Some(MetaVarKind::Tt) => parent.token_trees.push(make_ident("foo")),
|
|
|
|
Some(MetaVarKind::Vis) => parent.token_trees.push(make_ident("pub")),
|
|
|
|
Some(MetaVarKind::Pat) => parent.token_trees.push(make_ident("foo")),
|
|
|
|
Some(MetaVarKind::Path) => parent.token_trees.push(make_ident("foo")),
|
|
|
|
Some(MetaVarKind::Literal) => parent.token_trees.push(make_literal("1")),
|
|
|
|
Some(MetaVarKind::Expr) => parent.token_trees.push(make_ident("foo")),
|
|
|
|
Some(MetaVarKind::Lifetime) => {
|
2021-02-05 05:57:32 -06:00
|
|
|
parent.token_trees.push(make_punct('\''));
|
|
|
|
parent.token_trees.push(make_ident("a"));
|
|
|
|
}
|
2022-10-10 07:25:14 -05:00
|
|
|
Some(MetaVarKind::Block) => {
|
2021-02-05 05:57:32 -06:00
|
|
|
parent.token_trees.push(make_subtree(tt::DelimiterKind::Brace, None))
|
|
|
|
}
|
2022-10-10 07:25:14 -05:00
|
|
|
Some(MetaVarKind::Item) => {
|
2021-02-05 05:57:32 -06:00
|
|
|
parent.token_trees.push(make_ident("fn"));
|
|
|
|
parent.token_trees.push(make_ident("foo"));
|
|
|
|
parent.token_trees.push(make_subtree(tt::DelimiterKind::Parenthesis, None));
|
|
|
|
parent.token_trees.push(make_subtree(tt::DelimiterKind::Brace, None));
|
|
|
|
}
|
2022-10-10 07:25:14 -05:00
|
|
|
Some(MetaVarKind::Meta) => {
|
2021-02-05 05:57:32 -06:00
|
|
|
parent.token_trees.push(make_ident("foo"));
|
|
|
|
parent.token_trees.push(make_subtree(tt::DelimiterKind::Parenthesis, None));
|
|
|
|
}
|
|
|
|
|
|
|
|
None => (),
|
2022-10-10 07:25:14 -05:00
|
|
|
Some(kind) => panic!("Unhandled kind {:?}", kind),
|
2021-02-05 05:57:32 -06:00
|
|
|
},
|
|
|
|
Op::Leaf(leaf) => parent.token_trees.push(leaf.clone().into()),
|
|
|
|
Op::Repeat { tokens, kind, separator } => {
|
|
|
|
let max = 10;
|
|
|
|
let cnt = match kind {
|
|
|
|
RepeatKind::ZeroOrMore => rand(seed) % max,
|
|
|
|
RepeatKind::OneOrMore => 1 + rand(seed) % max,
|
|
|
|
RepeatKind::ZeroOrOne => rand(seed) % 2,
|
|
|
|
};
|
|
|
|
for i in 0..cnt {
|
|
|
|
for it in tokens.iter() {
|
|
|
|
collect_from_op(it, parent, seed);
|
|
|
|
}
|
|
|
|
if i + 1 != cnt {
|
|
|
|
if let Some(sep) = separator {
|
|
|
|
match sep {
|
2021-03-16 19:27:56 -05:00
|
|
|
Separator::Literal(it) => {
|
|
|
|
parent.token_trees.push(tt::Leaf::Literal(it.clone()).into())
|
|
|
|
}
|
|
|
|
Separator::Ident(it) => {
|
|
|
|
parent.token_trees.push(tt::Leaf::Ident(it.clone()).into())
|
|
|
|
}
|
2021-02-05 05:57:32 -06:00
|
|
|
Separator::Puncts(puncts) => {
|
|
|
|
for it in puncts {
|
2021-03-16 19:56:31 -05:00
|
|
|
parent.token_trees.push(tt::Leaf::Punct(*it).into())
|
2021-02-05 05:57:32 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Op::Subtree { tokens, delimiter } => {
|
2021-03-16 19:56:31 -05:00
|
|
|
let mut subtree = tt::Subtree { delimiter: *delimiter, token_trees: Vec::new() };
|
2021-02-05 05:57:32 -06:00
|
|
|
tokens.iter().for_each(|it| {
|
|
|
|
collect_from_op(it, &mut subtree, seed);
|
|
|
|
});
|
|
|
|
parent.token_trees.push(subtree.into());
|
|
|
|
}
|
2022-07-11 11:31:42 -05:00
|
|
|
Op::Ignore { .. } | Op::Index { .. } => {}
|
2021-02-05 05:57:32 -06:00
|
|
|
};
|
|
|
|
|
|
|
|
// Simple linear congruential generator for determistic result
|
|
|
|
fn rand(seed: &mut usize) -> usize {
|
|
|
|
let a = 1664525;
|
|
|
|
let c = 1013904223;
|
|
|
|
*seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c);
|
2021-06-03 05:46:56 -05:00
|
|
|
*seed
|
2021-03-01 02:05:14 -06:00
|
|
|
}
|
2021-02-05 05:57:32 -06:00
|
|
|
fn make_ident(ident: &str) -> tt::TokenTree {
|
|
|
|
tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), text: SmolStr::new(ident) })
|
|
|
|
.into()
|
|
|
|
}
|
|
|
|
fn make_punct(char: char) -> tt::TokenTree {
|
|
|
|
tt::Leaf::Punct(tt::Punct {
|
|
|
|
id: tt::TokenId::unspecified(),
|
|
|
|
char,
|
|
|
|
spacing: tt::Spacing::Alone,
|
|
|
|
})
|
|
|
|
.into()
|
|
|
|
}
|
|
|
|
fn make_literal(lit: &str) -> tt::TokenTree {
|
|
|
|
tt::Leaf::Literal(tt::Literal {
|
|
|
|
id: tt::TokenId::unspecified(),
|
|
|
|
text: SmolStr::new(lit),
|
|
|
|
})
|
|
|
|
.into()
|
|
|
|
}
|
|
|
|
fn make_subtree(
|
|
|
|
kind: tt::DelimiterKind,
|
|
|
|
token_trees: Option<Vec<tt::TokenTree>>,
|
|
|
|
) -> tt::TokenTree {
|
|
|
|
tt::Subtree {
|
|
|
|
delimiter: Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind }),
|
|
|
|
token_trees: token_trees.unwrap_or_default(),
|
|
|
|
}
|
|
|
|
.into()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|